From e38f0557f26bc6f9338a9003a733347594566d16 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Tue, 26 Mar 2024 10:29:19 +0000 Subject: [PATCH 01/41] WIP --- barretenberg/cpp/src/barretenberg/bb/main.cpp | 4 +- .../proof_system/proving_key/serialize.hpp | 16 ++ cspell.json | 1 + .../tooling/acvm_cli/src/cli/execute_cmd.rs | 7 +- .../tooling/acvm_cli/src/cli/fs/witness.rs | 41 +++- .../tooling/noirc_abi_wasm/src/lib.rs | 10 +- .../noir-protocol-circuits-types/src/index.ts | 34 +++- yarn-project/prover-client/package.json | 7 +- yarn-project/prover-client/src/bb/cli.ts | 78 ++++++++ yarn-project/prover-client/src/bb/execute.ts | 179 +++++++++++++++++ yarn-project/prover-client/src/bb/index.ts | 23 +++ .../prover-client/src/mocks/fixtures.ts | 63 ++++++ .../orchestrator/block-building-helpers.ts | 12 +- .../src/orchestrator/orchestrator.test.ts | 91 ++------- .../src/orchestrator/orchestrator.ts | 6 +- .../src/prover/bb_prover.test.ts | 112 +++++++++++ .../prover-client/src/prover/bb_prover.ts | 181 ++++++++++++++++++ .../prover-client/src/prover/empty.ts | 97 ---------- .../prover-client/src/prover/index.ts | 72 +------ .../prover-client/src/prover/interface.ts | 68 +++++++ .../src/prover/test_circuit_prover.ts | 131 +++++++++++++ .../prover-client/src/simulator/rollup.ts | 4 +- .../prover-client/src/tx-prover/tx-prover.ts | 4 +- .../simulator/src/simulator/acvm_native.ts | 28 ++- yarn-project/yarn.lock | 3 + 25 files changed, 996 insertions(+), 276 deletions(-) create mode 100644 yarn-project/prover-client/src/bb/cli.ts create mode 100644 yarn-project/prover-client/src/bb/execute.ts create mode 100644 yarn-project/prover-client/src/bb/index.ts create mode 100644 yarn-project/prover-client/src/mocks/fixtures.ts create mode 100644 yarn-project/prover-client/src/prover/bb_prover.test.ts create mode 100644 yarn-project/prover-client/src/prover/bb_prover.ts delete mode 100644 yarn-project/prover-client/src/prover/empty.ts create mode 100644 yarn-project/prover-client/src/prover/interface.ts create mode 100644 yarn-project/prover-client/src/prover/test_circuit_prover.ts diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index 34e4ef0fe4c..7185ffba523 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -317,13 +317,13 @@ void write_pk(const std::string& bytecodePath, const std::string& outputPath) acir_composer.create_circuit(constraint_system); init_bn254_crs(acir_composer.get_dyadic_circuit_size()); auto pk = acir_composer.init_proving_key(); - auto serialized_pk = to_buffer(*pk); if (outputPath == "-") { + auto serialized_pk = to_buffer(*pk); writeRawBytesToStdout(serialized_pk); vinfo("pk written to stdout"); } else { - write_file(outputPath, serialized_pk); + write_to_file(outputPath, *pk); vinfo("pk written to: ", outputPath); } } diff --git a/barretenberg/cpp/src/barretenberg/plonk/proof_system/proving_key/serialize.hpp b/barretenberg/cpp/src/barretenberg/plonk/proof_system/proving_key/serialize.hpp index 5900997677d..10f539fd7c2 100644 --- a/barretenberg/cpp/src/barretenberg/plonk/proof_system/proving_key/serialize.hpp +++ b/barretenberg/cpp/src/barretenberg/plonk/proof_system/proving_key/serialize.hpp @@ -106,6 +106,14 @@ template inline void read_from_file(B& is, std::string const& path, read(is, key.memory_write_records); } +inline void read_from_file(std::string const& path, proving_key_data& key) +{ + auto filename = format(path, "/meta"); + std::ifstream key_file; + key_file.open(filename, std::ios::in | std::ios::binary); + read_from_file(key_file, path, key); +} + template inline void write_to_file(B& os, std::string const& path, proving_key& key) { using serialize::write; @@ -138,4 +146,12 @@ template inline void write_to_file(B& os, std::string const& path, write(os, key.memory_write_records); } +inline void write_to_file(std::string const& path, proving_key& key) +{ + auto outputPath = format(path, "/meta"); + std::ofstream key_file; + key_file.open(outputPath, std::ios::out | std::ios::trunc | std::ios::binary); + write_to_file(key_file, path, key); +} + } // namespace bb::plonk diff --git a/cspell.json b/cspell.json index b489357e0e5..d52a34e6bc8 100644 --- a/cspell.json +++ b/cspell.json @@ -18,6 +18,7 @@ "bbfree", "bbmalloc", "benesjan", + "Bincode", "bleurgh", "bodyparser", "bootnode", diff --git a/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs b/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs index b76d0eccc29..c86d9360733 100644 --- a/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs +++ b/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs @@ -6,11 +6,10 @@ use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use crate::cli::fs::inputs::{read_bytecode_from_file, read_inputs_from_file}; -use crate::cli::fs::witness::save_witness_to_dir; use crate::errors::CliError; use nargo::ops::{execute_circuit, DefaultForeignCallExecutor}; -use super::fs::witness::create_output_witness_string; +use super::fs::witness::{create_output_witness_string, save_witness_to_dir}; /// Executes a circuit to calculate its return value #[derive(Debug, Clone, Args)] @@ -43,9 +42,9 @@ fn run_command(args: ExecuteCommand) -> Result { let output_witness_string = create_output_witness_string(&output_witness)?; if args.output_witness.is_some() { save_witness_to_dir( - &output_witness_string, - &args.working_directory, + output_witness, &args.output_witness.unwrap(), + &args.working_directory, )?; } Ok(output_witness_string) diff --git a/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs b/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs index 2daaa5a3a58..1c71c2d0f40 100644 --- a/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs +++ b/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs @@ -9,22 +9,40 @@ use acvm::acir::native_types::WitnessMap; use crate::errors::{CliError, FilesystemError}; +fn create_named_dir(named_dir: &Path, name: &str) -> PathBuf { + std::fs::create_dir_all(named_dir) + .unwrap_or_else(|_| panic!("could not create the `{name}` directory")); + + PathBuf::from(named_dir) +} + +fn write_to_file(bytes: &[u8], path: &Path) -> String { + let display = path.display(); + + let mut file = match File::create(path) { + Err(why) => panic!("couldn't create {display}: {why}"), + Ok(file) => file, + }; + + match file.write_all(bytes) { + Err(why) => panic!("couldn't write to {display}: {why}"), + Ok(_) => display.to_string(), + } +} + /// Saves the provided output witnesses to a toml file created at the given location -pub(crate) fn save_witness_to_dir>( +pub(crate) fn save_witness_string_to_dir>( output_witness: &String, witness_dir: P, file_name: &String, ) -> Result { let witness_path = witness_dir.as_ref().join(file_name); - let mut file = File::create(&witness_path) .map_err(|_| FilesystemError::OutputWitnessCreationFailed(file_name.clone()))?; write!(file, "{}", output_witness) .map_err(|_| FilesystemError::OutputWitnessWriteFailed(file_name.clone()))?; - Ok(witness_path) } - /// Creates a toml representation of the provided witness map pub(crate) fn create_output_witness_string(witnesses: &WitnessMap) -> Result { let mut witness_map: BTreeMap = BTreeMap::new(); @@ -34,3 +52,18 @@ pub(crate) fn create_output_witness_string(witnesses: &WitnessMap) -> Result>( + witnesses: WitnessMap, + witness_name: &str, + witness_dir: P, +) -> Result { + create_named_dir(witness_dir.as_ref(), "witness"); + let witness_path = witness_dir.as_ref().join(witness_name).with_extension("gz"); + + let buf: Vec = witnesses.try_into().map_err(|_op| FilesystemError::OutputWitnessCreationFailed(witness_name.to_string()))?; + println!("Writing out to {}", witness_path.display()); + write_to_file(buf.as_slice(), &witness_path); + + Ok(witness_path) +} diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs b/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs index ce15f6d502e..72806bdbaf1 100644 --- a/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs +++ b/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs @@ -13,7 +13,7 @@ use noirc_abi::{ Abi, MAIN_RETURN_NAME, }; use serde::Serialize; -use std::collections::BTreeMap; +use std::{collections::BTreeMap, process::Output}; use gloo_utils::format::JsValueSerdeExt; use wasm_bindgen::{prelude::wasm_bindgen, JsValue}; @@ -113,3 +113,11 @@ pub fn abi_decode(abi: JsAbi, witness_map: JsWitnessMap) -> Result::from_serde(&return_struct) .map_err(|err| err.to_string().into()) } + +#[wasm_bindgen(js_name = serializeWitness)] +pub fn serialise_witness(witness_map: JsWitnessMap) -> Result, JsAbiError> { + console_error_panic_hook::set_once(); + let converted_witness: WitnessMap = witness_map.into(); + let output = converted_witness.try_into(); + output.map_err(|op| JsAbiError::new("Failed to convert to Vec".to_string())) +} diff --git a/yarn-project/noir-protocol-circuits-types/src/index.ts b/yarn-project/noir-protocol-circuits-types/src/index.ts index cf56cc0b7c1..ef095ff95a8 100644 --- a/yarn-project/noir-protocol-circuits-types/src/index.ts +++ b/yarn-project/noir-protocol-circuits-types/src/index.ts @@ -34,6 +34,7 @@ import PublicKernelAppLogicSimulatedJson from './target/public_kernel_app_logic_ import PublicKernelSetupSimulatedJson from './target/public_kernel_setup_simulated.json' assert { type: 'json' }; import PublicKernelTailSimulatedJson from './target/public_kernel_tail_simulated.json' assert { type: 'json' }; import PublicKernelTeardownSimulatedJson from './target/public_kernel_teardown_simulated.json' assert { type: 'json' }; +import BaseRollupJson from './target/rollup_base.json' assert { type: 'json' }; import BaseRollupSimulatedJson from './target/rollup_base_simulated.json' assert { type: 'json' }; import MergeRollupJson from './target/rollup_merge.json' assert { type: 'json' }; import RootRollupJson from './target/rollup_root.json' assert { type: 'json' }; @@ -99,12 +100,43 @@ export const BaseParityArtifact = BaseParityJson as NoirCompiledCircuit; export const RootParityArtifact = RootParityJson as NoirCompiledCircuit; -export const BaseRollupArtifact = BaseRollupSimulatedJson as NoirCompiledCircuit; +export const SimulatedBaseRollupArtifact = BaseRollupSimulatedJson as NoirCompiledCircuit; + +export const BaseRollupArtifact = BaseRollupJson as NoirCompiledCircuit; export const MergeRollupArtifact = MergeRollupJson as NoirCompiledCircuit; export const RootRollupArtifact = RootRollupJson as NoirCompiledCircuit; +export type ProtocolArtifacts = + | 'PrivateKernelInitArtifact' + | 'PrivateKernelInnerArtifact' + | 'PrivateKernelTailArtifact' + | 'PublicKernelSetupArtifact' + | 'PublicKernelAppLogicArtifact' + | 'PublicKernelTeardownArtifact' + | 'PublicKernelTailArtifact' + | 'BaseParityArtifact' + | 'RootParityArtifact' + | 'BaseRollupArtifact' + | 'MergeRollupArtifact' + | 'RootRollupArtifact'; + +export const ProtocolCircuitArtifacts: Record = { + PrivateKernelInitArtifact: PrivateKernelInitArtifact, + PrivateKernelInnerArtifact: PrivateKernelInnerArtifact, + PrivateKernelTailArtifact: PrivateKernelTailArtifact, + PublicKernelSetupArtifact: PublicKernelSetupArtifact, + PublicKernelAppLogicArtifact: PublicKernelAppLogicArtifact, + PublicKernelTeardownArtifact: PublicKernelTeardownArtifact, + PublicKernelTailArtifact: PublicKernelTailArtifact, + BaseParityArtifact: BaseParityArtifact, + RootParityArtifact: RootParityArtifact, + BaseRollupArtifact: BaseRollupArtifact, + MergeRollupArtifact: MergeRollupArtifact, + RootRollupArtifact: RootRollupArtifact, +}; + let solver: Promise; const getSolver = (): Promise => { diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index a1b03a69775..07629e69e24 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -3,6 +3,9 @@ "version": "0.1.0", "type": "module", "exports": "./dest/index.js", + "bin": { + "bb-cli": "./dest/bb/index.js" + }, "typedocOptions": { "entryPoints": [ "./src/index.ts" @@ -16,7 +19,8 @@ "clean": "rm -rf ./dest .tsbuildinfo", "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", - "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules $(yarn bin jest) --passWithNoTests" + "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules $(yarn bin jest) --passWithNoTests", + "bb": "node --no-warnings ./dest/bb/index.js" }, "inherits": [ "../package.common.json" @@ -37,6 +41,7 @@ "@aztec/noir-protocol-circuits-types": "workspace:^", "@aztec/simulator": "workspace:^", "@aztec/world-state": "workspace:^", + "commander": "^9.0.0", "lodash.chunk": "^4.2.0", "tslib": "^2.4.0" }, diff --git a/yarn-project/prover-client/src/bb/cli.ts b/yarn-project/prover-client/src/bb/cli.ts new file mode 100644 index 00000000000..84fb6fc2888 --- /dev/null +++ b/yarn-project/prover-client/src/bb/cli.ts @@ -0,0 +1,78 @@ +import { LogFn } from '@aztec/foundation/log'; +import { ProtocolArtifacts, ProtocolCircuitArtifacts } from '@aztec/noir-protocol-circuits-types'; + +import { Command } from 'commander'; + +import { generateProvingKeyForNoirCircuit, generateVerificationKeyForNoirCircuit } from './execute.js'; + +const { BB_WORKING_DIRECTORY, BB_BINARY_PATH } = process.env; + +/** + * Returns commander program that defines the CLI. + * @param log - Console logger. + * @param debugLogger - Debug logger. + * @returns The CLI. + */ +export function getProgram(log: LogFn): Command { + const program = new Command(); + + program.name('bb-cli').description('CLI for interacting with Barretenberg.'); + + program + .command('protocol-circuits') + .description('Lists the available protocol circuit artifacts') + .action(() => { + log(Object.keys(ProtocolCircuitArtifacts).reduce((prev: string, x: string) => prev.concat(`\n${x}`))); + }); + + program + .command('write_pk') + .description('Generates the proving key for the specified circuit') + .requiredOption( + '-w, --working-directory ', + 'A directory to use for storing input/output files', + BB_WORKING_DIRECTORY, + ) + .requiredOption('-b, --bb-path ', 'The path to the BB binary', BB_BINARY_PATH) + .requiredOption('-c, --circuit ', 'The name of a protocol circuit') + .action(async options => { + const compiledCircuit = ProtocolCircuitArtifacts[options.circuit as ProtocolArtifacts]; + if (!compiledCircuit) { + log(`Failed to find circuit ${options.circuit}`); + return; + } + await generateProvingKeyForNoirCircuit( + options.bbPath, + options.workingDirectory, + options.circuit, + compiledCircuit, + log, + ); + }); + + program + .command('write_vk') + .description('Generates the verification key for the specified circuit') + .requiredOption( + '-w, --working-directory ', + 'A directory to use for storing input/output files', + BB_WORKING_DIRECTORY, + ) + .requiredOption('-b, --bb-path ', 'The path to the BB binary', BB_BINARY_PATH) + .requiredOption('-c, --circuit ', 'The name of a protocol circuit') + .action(async options => { + const compiledCircuit = ProtocolCircuitArtifacts[options.circuit as ProtocolArtifacts]; + if (!compiledCircuit) { + log(`Failed to find circuit ${options.circuit}`); + return; + } + await generateVerificationKeyForNoirCircuit( + options.bbPath, + options.workingDirectory, + options.circuit, + compiledCircuit, + log, + ); + }); + return program; +} diff --git a/yarn-project/prover-client/src/bb/execute.ts b/yarn-project/prover-client/src/bb/execute.ts new file mode 100644 index 00000000000..c94ba512e73 --- /dev/null +++ b/yarn-project/prover-client/src/bb/execute.ts @@ -0,0 +1,179 @@ +import { sha256 } from '@aztec/foundation/crypto'; +import { LogFn } from '@aztec/foundation/log'; +import { Timer } from '@aztec/foundation/timer'; +import { NoirCompiledCircuit } from '@aztec/types/noir'; + +import * as proc from 'child_process'; +import * as fs from 'fs/promises'; +import path from 'path'; + +enum BB_RESULT { + SUCCESS, + FAILURE, + ALREADY_PRESENT, +} + +export type BBSuccess = { + status: BB_RESULT.SUCCESS | BB_RESULT.ALREADY_PRESENT; +}; + +export type BBFailure = { + status: BB_RESULT.FAILURE; + reason: string; +}; + +export type BBResult = BBSuccess | BBFailure; + +/** + * Invokes the Barretenberg binary with the provided command and args + * @param pathToBB - The path to the BB binary + * @param command - The command to execute + * @param args - The arguments to pass + * @param logger - A log function + * @returns The completed partial witness outputted from the circuit + */ +export function executeBB(pathToBB: string, command: string, args: string[], logger: LogFn) { + return new Promise((resolve, reject) => { + let errorBuffer = Buffer.alloc(0); + const acvm = proc.spawn(pathToBB, [command, ...args]); + acvm.stdout.on('data', data => { + logger(data.toString('utf-8')); + }); + acvm.stderr.on('data', data => { + errorBuffer = Buffer.concat([errorBuffer, data]); + }); + acvm.on('close', code => { + if (code === 0) { + resolve({ status: BB_RESULT.SUCCESS }); + } else { + reject(errorBuffer.toString('utf-8')); + } + }); + }).catch((reason: string) => ({ status: BB_RESULT.FAILURE, reason })); +} + +const bytecodeHashFilename = 'bytecode_hash'; + +async function generateKeyForNoirCircuit( + pathToBB: string, + workingDirectory: string, + circuitName: string, + compiledCircuit: NoirCompiledCircuit, + key: 'vk' | 'pk', + log: LogFn, + force = false, +) { + // The bytecode is written to e.g. /workingDirectory/pk/BaseParityArtifact-bytecode + const bytecodePath = `${workingDirectory}/${key}/${circuitName}-bytecode`; + const bytecode = Buffer.from(compiledCircuit.bytecode, 'base64'); + + // The key generation outputs are written to e.g. /workingDirectory/pk/BaseParityArtifact/ + // The bytecode hash file is also written here as /workingDirectory/pk/BaseParityArtifact/bytecode-hash + const circuitOutputDirectory = `${workingDirectory}/${key}/${circuitName}`; + const bytecodeHashPath = `${circuitOutputDirectory}/${bytecodeHashFilename}`; + const bytecodeHash = sha256(bytecode); + + let mustRegenerate = + force || + (await fs + .access(bytecodeHashPath, fs.constants.R_OK) + .then(_ => false) + .catch(_ => true)); + + if (!mustRegenerate) { + const data: Buffer = await fs.readFile(bytecodeHashPath).catch(_ => Buffer.alloc(0)); + mustRegenerate = data.length == 0 || !data.equals(bytecodeHash); + } + + if (!mustRegenerate) { + const alreadyPresent: BBSuccess = { status: BB_RESULT.ALREADY_PRESENT }; + return { result: alreadyPresent, circuitOutputDirectory }; + } + + const binaryPresent = await fs + .access(pathToBB, fs.constants.R_OK) + .then(_ => true) + .catch(_ => false); + if (!binaryPresent) { + const failed: BBFailure = { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; + return { result: failed, circuitOutputDirectory }; + } + + // Clear up the circuit output directory removing anything that is there + await fs.rm(circuitOutputDirectory, { recursive: true, force: true }); + await fs.mkdir(circuitOutputDirectory, { recursive: true }); + // Write the bytecode and input witness to the working directory + await fs.writeFile(bytecodePath, bytecode); + const args = ['-o', circuitOutputDirectory, '-b', bytecodePath]; + const timer = new Timer(); + const result = await executeBB(pathToBB, `write_${key}`, args, log); + const duration = timer.ms(); + await fs.rm(bytecodePath, { force: true }); + await fs.writeFile(bytecodeHashPath, bytecodeHash); + return { result, duration, circuitOutputDirectory }; +} + +const directorySize = async (directory: string, filesToOmit: string[]) => { + const files = await fs.readdir(directory); + const stats = files + .filter(f => !filesToOmit.find(file => file === f)) + .map(file => fs.stat(path.join(directory, file))); + + return (await Promise.all(stats)).reduce((accumulator, { size }) => accumulator + size, 0); +}; + +export async function generateVerificationKeyForNoirCircuit( + pathToBB: string, + workingDirectory: string, + circuitName: string, + compiledCircuit: NoirCompiledCircuit, + log: LogFn, +) { + const { + result, + duration, + circuitOutputDirectory: keyPath, + } = await generateKeyForNoirCircuit(pathToBB, workingDirectory, circuitName, compiledCircuit, 'vk', log); + if (result.status === BB_RESULT.FAILURE) { + log(`Failed to generate verification key for circuit ${circuitName}, reason: ${result.reason}`); + return; + } + if (result.status === BB_RESULT.ALREADY_PRESENT) { + log(`Verification key for circuit ${circuitName} was already present`); + return; + } + const size = await directorySize(keyPath, [bytecodeHashFilename]); + log( + `Verification key for circuit ${circuitName} written to ${keyPath} in ${duration} ms, size: ${ + size / (1024 * 1024) + } MB`, + ); + return result; +} + +export async function generateProvingKeyForNoirCircuit( + pathToBB: string, + workingDirectory: string, + circuitName: string, + compiledCircuit: NoirCompiledCircuit, + log: LogFn, +) { + const { + result, + duration, + circuitOutputDirectory: keyPath, + } = await generateKeyForNoirCircuit(pathToBB, workingDirectory, circuitName, compiledCircuit, 'pk', log); + if (result.status === BB_RESULT.FAILURE) { + log(`Failed to generate proving key for circuit ${circuitName}, reason: ${result.reason}`); + return; + } + if (result.status === BB_RESULT.ALREADY_PRESENT) { + log(`Proving key for circuit ${circuitName} was already present`); + return; + } + const size = await directorySize(keyPath, [bytecodeHashFilename]); + log( + `Proving key for circuit ${circuitName} written to ${keyPath} in ${duration} ms, size: ${size / (1024 * 1024)} MB`, + ); + return result; +} diff --git a/yarn-project/prover-client/src/bb/index.ts b/yarn-project/prover-client/src/bb/index.ts new file mode 100644 index 00000000000..9a19d22b742 --- /dev/null +++ b/yarn-project/prover-client/src/bb/index.ts @@ -0,0 +1,23 @@ +#!/usr/bin/env -S node --no-warnings +import { createConsoleLogger } from '@aztec/foundation/log'; + +import 'source-map-support/register.js'; + +import { getProgram } from './cli.js'; + +const log = createConsoleLogger(); + +/** CLI main entrypoint */ +async function main() { + process.once('SIGINT', () => process.exit(0)); + process.once('SIGTERM', () => process.exit(0)); + + const program = getProgram(log); + await program.parseAsync(process.argv); +} + +main().catch(err => { + log(`Error in command execution`); + log(err); + process.exit(1); +}); diff --git a/yarn-project/prover-client/src/mocks/fixtures.ts b/yarn-project/prover-client/src/mocks/fixtures.ts new file mode 100644 index 00000000000..78e1f4e44eb --- /dev/null +++ b/yarn-project/prover-client/src/mocks/fixtures.ts @@ -0,0 +1,63 @@ +import { makeProcessedTx, mockTx } from '@aztec/circuit-types'; +import { + MAX_NEW_L2_TO_L1_MSGS_PER_TX, + MAX_NEW_NULLIFIERS_PER_TX, + MAX_NON_REVERTIBLE_NOTE_HASHES_PER_TX, + MAX_NON_REVERTIBLE_NULLIFIERS_PER_TX, + MAX_NON_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + MAX_REVERTIBLE_NOTE_HASHES_PER_TX, + MAX_REVERTIBLE_NULLIFIERS_PER_TX, + MAX_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + PublicDataUpdateRequest, + PublicKernelCircuitPublicInputs, + SideEffectLinkedToNoteHash, +} from '@aztec/circuits.js'; +import { fr, makeNewSideEffect, makeNewSideEffectLinkedToNoteHash, makeProof } from '@aztec/circuits.js/testing'; +import { makeTuple } from '@aztec/foundation/array'; +import { toTruncField } from '@aztec/foundation/serialize'; +import { MerkleTreeOperations } from '@aztec/world-state'; + +export const makeBloatedProcessedTx = async (builderDb: MerkleTreeOperations, seed = 0x1) => { + seed *= MAX_NEW_NULLIFIERS_PER_TX; // Ensure no clashing given incremental seeds + const tx = mockTx(seed); + const kernelOutput = PublicKernelCircuitPublicInputs.empty(); + kernelOutput.constants.historicalHeader = await builderDb.buildInitialHeader(); + kernelOutput.end.publicDataUpdateRequests = makeTuple( + MAX_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + i => new PublicDataUpdateRequest(fr(i), fr(i + 10)), + seed + 0x500, + ); + kernelOutput.endNonRevertibleData.publicDataUpdateRequests = makeTuple( + MAX_NON_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + i => new PublicDataUpdateRequest(fr(i), fr(i + 10)), + seed + 0x600, + ); + + const processedTx = makeProcessedTx(tx, kernelOutput, makeProof()); + + processedTx.data.end.newNoteHashes = makeTuple(MAX_REVERTIBLE_NOTE_HASHES_PER_TX, makeNewSideEffect, seed + 0x100); + processedTx.data.endNonRevertibleData.newNoteHashes = makeTuple( + MAX_NON_REVERTIBLE_NOTE_HASHES_PER_TX, + makeNewSideEffect, + seed + 0x100, + ); + processedTx.data.end.newNullifiers = makeTuple( + MAX_REVERTIBLE_NULLIFIERS_PER_TX, + makeNewSideEffectLinkedToNoteHash, + seed + 0x100000, + ); + + processedTx.data.endNonRevertibleData.newNullifiers = makeTuple( + MAX_NON_REVERTIBLE_NULLIFIERS_PER_TX, + makeNewSideEffectLinkedToNoteHash, + seed + 0x100000 + MAX_REVERTIBLE_NULLIFIERS_PER_TX, + ); + + processedTx.data.end.newNullifiers[tx.data.end.newNullifiers.length - 1] = SideEffectLinkedToNoteHash.empty(); + + processedTx.data.end.newL2ToL1Msgs = makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_TX, fr, seed + 0x300); + processedTx.data.end.encryptedLogsHash = toTruncField(processedTx.encryptedLogs.hash()); + processedTx.data.end.unencryptedLogsHash = toTruncField(processedTx.unencryptedLogs.hash()); + + return processedTx; +}; diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index 07ed89f7a6c..eb53c4dbb6a 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -49,7 +49,7 @@ import { Tuple, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize import { MerkleTreeOperations } from '@aztec/world-state'; import { VerificationKeys, getVerificationKeys } from '../mocks/verification_keys.js'; -import { RollupProver } from '../prover/index.js'; +import { CircuitProver } from '../prover/index.js'; import { RollupSimulator } from '../simulator/rollup.js'; // Denotes fields that are not used now, but will be in the future @@ -191,7 +191,7 @@ export function createMergeRollupInputs( export async function executeMergeRollupCircuit( mergeInputs: MergeRollupInputs, simulator: RollupSimulator, - prover: RollupProver, + prover: CircuitProver, logger?: DebugLogger, ): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { logger?.debug(`Running merge rollup circuit`); @@ -206,7 +206,7 @@ export async function executeRootRollupCircuit( l1ToL2Roots: RootParityInput, newL1ToL2Messages: Tuple, simulator: RollupSimulator, - prover: RollupProver, + prover: CircuitProver, db: MerkleTreeOperations, logger?: DebugLogger, ): Promise<[RootRollupPublicInputs, Proof]> { @@ -512,7 +512,7 @@ export async function executeBaseRollupCircuit( inputs: BaseRollupInputs, treeSnapshots: Map, simulator: RollupSimulator, - prover: RollupProver, + prover: CircuitProver, logger?: DebugLogger, ): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { logger?.(`Running base rollup for ${tx.hash}`); @@ -557,7 +557,7 @@ export function validateSimulatedTree( export async function executeBaseParityCircuit( inputs: BaseParityInputs, simulator: RollupSimulator, - prover: RollupProver, + prover: CircuitProver, logger?: DebugLogger, ): Promise { logger?.debug(`Running base parity circuit`); @@ -569,7 +569,7 @@ export async function executeBaseParityCircuit( export async function executeRootParityCircuit( inputs: RootParityInputs, simulator: RollupSimulator, - prover: RollupProver, + prover: CircuitProver, logger?: DebugLogger, ): Promise { logger?.debug(`Running root parity circuit`); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts index 73a2bd9fe2d..fecec3a070b 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts @@ -4,8 +4,6 @@ import { ProcessedTx, ProvingSuccess, makeEmptyProcessedTx as makeEmptyProcessedTxFromHistoricalTreeRoots, - makeProcessedTx, - mockTx, } from '@aztec/circuit-types'; import { AztecAddress, @@ -13,22 +11,13 @@ import { EthAddress, Fr, GlobalVariables, - MAX_NEW_L2_TO_L1_MSGS_PER_TX, MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, - MAX_NON_REVERTIBLE_NOTE_HASHES_PER_TX, - MAX_NON_REVERTIBLE_NULLIFIERS_PER_TX, - MAX_NON_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - MAX_REVERTIBLE_NOTE_HASHES_PER_TX, - MAX_REVERTIBLE_NULLIFIERS_PER_TX, - MAX_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, NULLIFIER_SUBTREE_HEIGHT, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, PUBLIC_DATA_SUBTREE_HEIGHT, Proof, PublicDataTreeLeaf, - PublicDataUpdateRequest, - PublicKernelCircuitPublicInputs, RootRollupPublicInputs, SideEffect, SideEffectLinkedToNoteHash, @@ -37,15 +26,11 @@ import { import { fr, makeBaseOrMergeRollupPublicInputs, - makeNewSideEffect, - makeNewSideEffectLinkedToNoteHash, makeParityPublicInputs, - makeProof, makeRootRollupPublicInputs, } from '@aztec/circuits.js/testing'; -import { makeTuple, range } from '@aztec/foundation/array'; +import { range } from '@aztec/foundation/array'; import { padArrayEnd, times } from '@aztec/foundation/collection'; -import { toTruncField } from '@aztec/foundation/serialize'; import { sleep } from '@aztec/foundation/sleep'; import { openTmpStore } from '@aztec/kv-store/utils'; import { WASMSimulator } from '@aztec/simulator'; @@ -54,8 +39,9 @@ import { MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; import { MockProxy, mock } from 'jest-mock-extended'; import { type MemDown, default as memdown } from 'memdown'; +import { makeBloatedProcessedTx } from '../mocks/fixtures.js'; import { getVerificationKeys } from '../mocks/verification_keys.js'; -import { RollupProver } from '../prover/index.js'; +import { CircuitProver } from '../prover/index.js'; import { RollupSimulator } from '../simulator/rollup.js'; import { ProvingOrchestrator } from './orchestrator.js'; @@ -67,7 +53,7 @@ describe('prover/tx-prover', () => { let expectsDb: MerkleTreeOperations; let simulator: MockProxy; - let prover: MockProxy; + let prover: MockProxy; let blockNumber: number; let baseRollupOutputLeft: BaseOrMergeRollupPublicInputs; @@ -91,7 +77,7 @@ describe('prover/tx-prover', () => { builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); expectsDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); simulator = mock(); - prover = mock(); + prover = mock(); builder = new ProvingOrchestrator(builderDb, new WASMSimulator(), getVerificationKeys(), prover); // Create mock l1 to L2 messages @@ -322,55 +308,6 @@ describe('prover/tx-prover', () => { await builder.stop(); }); - const makeBloatedProcessedTx = async (seed = 0x1) => { - seed *= MAX_NEW_NULLIFIERS_PER_TX; // Ensure no clashing given incremental seeds - const tx = mockTx(seed); - const kernelOutput = PublicKernelCircuitPublicInputs.empty(); - kernelOutput.constants.historicalHeader = await builderDb.buildInitialHeader(); - kernelOutput.end.publicDataUpdateRequests = makeTuple( - MAX_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - i => new PublicDataUpdateRequest(fr(i), fr(i + 10)), - seed + 0x500, - ); - kernelOutput.endNonRevertibleData.publicDataUpdateRequests = makeTuple( - MAX_NON_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - i => new PublicDataUpdateRequest(fr(i), fr(i + 10)), - seed + 0x600, - ); - - const processedTx = makeProcessedTx(tx, kernelOutput, makeProof()); - - processedTx.data.end.newNoteHashes = makeTuple( - MAX_REVERTIBLE_NOTE_HASHES_PER_TX, - makeNewSideEffect, - seed + 0x100, - ); - processedTx.data.endNonRevertibleData.newNoteHashes = makeTuple( - MAX_NON_REVERTIBLE_NOTE_HASHES_PER_TX, - makeNewSideEffect, - seed + 0x100, - ); - processedTx.data.end.newNullifiers = makeTuple( - MAX_REVERTIBLE_NULLIFIERS_PER_TX, - makeNewSideEffectLinkedToNoteHash, - seed + 0x100000, - ); - - processedTx.data.endNonRevertibleData.newNullifiers = makeTuple( - MAX_NON_REVERTIBLE_NULLIFIERS_PER_TX, - makeNewSideEffectLinkedToNoteHash, - seed + 0x100000 + MAX_REVERTIBLE_NULLIFIERS_PER_TX, - ); - - processedTx.data.end.newNullifiers[tx.data.end.newNullifiers.length - 1] = SideEffectLinkedToNoteHash.empty(); - - processedTx.data.end.newL2ToL1Msgs = makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_TX, fr, seed + 0x300); - processedTx.data.end.encryptedLogsHash = toTruncField(processedTx.encryptedLogs.hash()); - processedTx.data.end.unencryptedLogsHash = toTruncField(processedTx.unencryptedLogs.hash()); - - return processedTx; - }; - it.each([ [0, 4], [1, 4], @@ -382,7 +319,7 @@ describe('prover/tx-prover', () => { async (bloatedCount: number, totalCount: number) => { const noteHashTreeBefore = await builderDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE); const txs = [ - ...(await Promise.all(times(bloatedCount, makeBloatedProcessedTx))), + ...(await Promise.all(times(bloatedCount, () => makeBloatedProcessedTx(builderDb)))), ...(await Promise.all(times(totalCount - bloatedCount, makeEmptyProcessedTx))), ]; @@ -450,10 +387,10 @@ describe('prover/tx-prover', () => { it('builds a mixed L2 block', async () => { const txs = await Promise.all([ - makeBloatedProcessedTx(1), - makeBloatedProcessedTx(2), - makeBloatedProcessedTx(3), - makeBloatedProcessedTx(4), + makeBloatedProcessedTx(builderDb, 1), + makeBloatedProcessedTx(builderDb, 2), + makeBloatedProcessedTx(builderDb, 3), + makeBloatedProcessedTx(builderDb, 4), ]); const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); @@ -476,10 +413,10 @@ describe('prover/tx-prover', () => { it('builds a block concurrently with transactions', async () => { const txs = await Promise.all([ - makeBloatedProcessedTx(1), - makeBloatedProcessedTx(2), - makeBloatedProcessedTx(3), - makeBloatedProcessedTx(4), + makeBloatedProcessedTx(builderDb, 1), + makeBloatedProcessedTx(builderDb, 2), + makeBloatedProcessedTx(builderDb, 3), + makeBloatedProcessedTx(builderDb, 4), ]); const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index d813a9dc0d1..2e483f4d457 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -26,7 +26,7 @@ import { MerkleTreeOperations } from '@aztec/world-state'; import { inspect } from 'util'; import { VerificationKeys, getVerificationKeys } from '../mocks/verification_keys.js'; -import { RollupProver } from '../prover/index.js'; +import { CircuitProver } from '../prover/index.js'; import { RealRollupCircuitSimulator, RollupSimulator } from '../simulator/rollup.js'; import { buildBaseRollupInput, @@ -75,13 +75,13 @@ export class ProvingOrchestrator { private db: MerkleTreeOperations, simulationProvider: SimulationProvider, protected vks: VerificationKeys, - private prover: RollupProver, + private prover: CircuitProver, private maxConcurrentJobs = MAX_CONCURRENT_JOBS, ) { this.simulator = new RealRollupCircuitSimulator(simulationProvider); } - public static new(db: MerkleTreeOperations, simulationProvider: SimulationProvider, prover: RollupProver) { + public static new(db: MerkleTreeOperations, simulationProvider: SimulationProvider, prover: CircuitProver) { const orchestrator = new ProvingOrchestrator(db, simulationProvider, getVerificationKeys(), prover); orchestrator.start(); return Promise.resolve(orchestrator); diff --git a/yarn-project/prover-client/src/prover/bb_prover.test.ts b/yarn-project/prover-client/src/prover/bb_prover.test.ts new file mode 100644 index 00000000000..306e71f9907 --- /dev/null +++ b/yarn-project/prover-client/src/prover/bb_prover.test.ts @@ -0,0 +1,112 @@ +import { AztecAddress, EthAddress, Fr, GlobalVariables, RootRollupPublicInputs } from '@aztec/circuits.js'; +import { makeRootRollupPublicInputs } from '@aztec/circuits.js/testing'; +import { randomBytes } from '@aztec/foundation/crypto'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { fileURLToPath } from '@aztec/foundation/url'; +import { openTmpStore } from '@aztec/kv-store/utils'; +import { MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; + +import * as fs from 'fs/promises'; +import { type MemDown, default as memdown } from 'memdown'; +import path from 'path'; + +import { makeBloatedProcessedTx } from '../mocks/fixtures.js'; +import { buildBaseRollupInput } from '../orchestrator/block-building-helpers.js'; +import { BBNativeRollupProver, BBProverConfig } from './bb_prover.js'; + +export const createMemDown = () => (memdown as any)() as MemDown; + +const logger = createDebugLogger('aztec:bb-prover-test'); + +const { + BB_RELEASE_DIR = 'cpp/build/bin', + TEMP_DIR = '/tmp', + BB_BINARY_PATH = '', + BB_WORKING_DIRECTORY = '', + NOIR_RELEASE_DIR = 'noir-repo/target/release', + ACVM_BINARY_PATH = '', + ACVM_WORKING_DIRECTORY = '', +} = process.env; + +// Determines if we have access to the bb binary and a tmp folder for temp files +const getConfig = async () => { + try { + const expectedBBPath = BB_BINARY_PATH + ? BB_BINARY_PATH + : `${path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../../../../barretenberg/', BB_RELEASE_DIR)}/bb`; + await fs.access(expectedBBPath, fs.constants.R_OK); + const tempWorkingDirectory = `${TEMP_DIR}/${randomBytes(4).toString('hex')}`; + const bbWorkingDirectory = BB_WORKING_DIRECTORY ? BB_WORKING_DIRECTORY : `${tempWorkingDirectory}/bb`; + await fs.mkdir(bbWorkingDirectory, { recursive: true }); + logger(`Using native BB binary at ${expectedBBPath} with working directory ${bbWorkingDirectory}`); + + const expectedAcvmPath = ACVM_BINARY_PATH + ? ACVM_BINARY_PATH + : `${path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../../../../noir/', NOIR_RELEASE_DIR)}/acvm`; + await fs.access(expectedAcvmPath, fs.constants.R_OK); + const acvmWorkingDirectory = ACVM_WORKING_DIRECTORY ? ACVM_WORKING_DIRECTORY : `${tempWorkingDirectory}/acvm`; + await fs.mkdir(acvmWorkingDirectory, { recursive: true }); + logger(`Using native ACVM binary at ${expectedAcvmPath} with working directory ${acvmWorkingDirectory}`); + return { + acvmWorkingDirectory, + bbWorkingDirectory, + expectedAcvmPath, + expectedBBPath, + directoryToCleanup: ACVM_WORKING_DIRECTORY && BB_WORKING_DIRECTORY ? undefined : tempWorkingDirectory, + }; + } catch (err) { + logger(`Native BB not available, error: ${err}`); + return undefined; + } +}; + +describe('prover/bb_prover', () => { + let builderDb: MerkleTreeOperations; + let prover: BBNativeRollupProver; + let directoryToCleanup: string | undefined; + + let blockNumber: number; + let rootRollupOutput: RootRollupPublicInputs; + + let globalVariables: GlobalVariables; + + const chainId = Fr.ZERO; + const version = Fr.ZERO; + const coinbase = EthAddress.ZERO; + const feeRecipient = AztecAddress.ZERO; + + beforeEach(async () => { + blockNumber = 3; + globalVariables = new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); + + builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); + rootRollupOutput = makeRootRollupPublicInputs(0); + rootRollupOutput.header.globalVariables = globalVariables; + + const config = await getConfig(); + if (!config) { + throw new Error(`BB binary must be present to test the BB Prover`); + } + directoryToCleanup = config.directoryToCleanup; + const bbConfig: BBProverConfig = { + acvmBinaryPath: config.expectedAcvmPath, + acvmWorkingDirectory: config.acvmWorkingDirectory, + bbBinaryPath: config.expectedBBPath, + bbWorkingDirectory: config.bbWorkingDirectory, + }; + prover = await BBNativeRollupProver.new(bbConfig); + }, 20_000); + + afterEach(async () => { + if (directoryToCleanup) { + await fs.rm(directoryToCleanup, { recursive: true, force: true }); + } + }, 5000); + + it('proves the base rollup circuit', async () => { + const tx = await makeBloatedProcessedTx(builderDb); + + const inputs = await buildBaseRollupInput(tx, globalVariables, builderDb); + await prover.getBaseRollupProof(inputs); + }, 30_000); +}); diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts new file mode 100644 index 00000000000..67e2423a977 --- /dev/null +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -0,0 +1,181 @@ +/* eslint-disable require-await */ +import { CircuitSimulationStats } from '@aztec/circuit-types/stats'; +import { + BaseOrMergeRollupPublicInputs, + BaseParityInputs, + BaseRollupInputs, + MergeRollupInputs, + ParityPublicInputs, + Proof, + RootParityInputs, + RootRollupInputs, + RootRollupPublicInputs, + makeEmptyProof, +} from '@aztec/circuits.js'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { elapsed } from '@aztec/foundation/timer'; +import { + BaseParityArtifact, + MergeRollupArtifact, + ProtocolArtifacts, + ProtocolCircuitArtifacts, + RootParityArtifact, + RootRollupArtifact, + SimulatedBaseRollupArtifact, + convertBaseParityInputsToWitnessMap, + convertBaseParityOutputsFromWitnessMap, + convertBaseRollupInputsToWitnessMap, + convertBaseRollupOutputsFromWitnessMap, + convertMergeRollupInputsToWitnessMap, + convertMergeRollupOutputsFromWitnessMap, + convertRootParityInputsToWitnessMap, + convertRootParityOutputsFromWitnessMap, + convertRootRollupInputsToWitnessMap, + convertRootRollupOutputsFromWitnessMap, +} from '@aztec/noir-protocol-circuits-types'; +import { NativeACVMSimulator } from '@aztec/simulator'; + +import * as fs from 'fs/promises'; + +import { generateProvingKeyForNoirCircuit, generateVerificationKeyForNoirCircuit } from '../bb/execute.js'; +import { CircuitProver } from './interface.js'; + +const logger = createDebugLogger('aztec:bb-prover'); + +async function ensureAllKeys(bbBinaryPath: string, bbWorkingDirectory: string) { + const realCircuits = Object.keys(ProtocolCircuitArtifacts).filter((n: string) => !n.includes('Simulated')); + for (const circuitName of realCircuits) { + logger.info(`Generating proving key for circuit ${circuitName}`); + await generateProvingKeyForNoirCircuit( + bbBinaryPath, + bbWorkingDirectory, + circuitName, + ProtocolCircuitArtifacts[circuitName as ProtocolArtifacts], + logger, + ); + logger.info(`Generating verification key for circuit ${circuitName}`); + await generateVerificationKeyForNoirCircuit( + bbBinaryPath, + bbWorkingDirectory, + circuitName, + ProtocolCircuitArtifacts[circuitName as ProtocolArtifacts], + logger, + ); + } +} + +export type BBProverConfig = { + bbBinaryPath: string; + bbWorkingDirectory: string; + acvmBinaryPath: string; + acvmWorkingDirectory: string; +}; + +/** + * Prover implementation that uses barretenberg native proving + */ +export class BBNativeRollupProver implements CircuitProver { + constructor( + private simulator: NativeACVMSimulator, + private bbBinaryPath: string, + private bbWorkingDirectory: string, + ) {} + + static async new(config: BBProverConfig) { + await fs.access(config.acvmBinaryPath, fs.constants.R_OK); + await fs.mkdir(config.acvmWorkingDirectory, { recursive: true }); + await fs.access(config.bbBinaryPath, fs.constants.R_OK); + await fs.mkdir(config.bbWorkingDirectory, { recursive: true }); + logger.info(`Using native BB at ${config.bbBinaryPath} and working directory ${config.bbWorkingDirectory}`); + logger.info(`Using native ACVM at ${config.acvmBinaryPath} and working directory ${config.acvmWorkingDirectory}`); + + await ensureAllKeys(config.bbBinaryPath, config.bbWorkingDirectory); + + const simulator = new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath, true); + + return new BBNativeRollupProver(simulator, config.bbBinaryPath, config.bbWorkingDirectory); + } + + /** + * Simulates the base parity circuit from its inputs. + * @param inputs - Inputs to the circuit. + * @returns The public inputs of the parity circuit. + */ + public async getBaseParityProof(inputs: BaseParityInputs): Promise<[Proof, ParityPublicInputs]> { + const witnessMap = convertBaseParityInputsToWitnessMap(inputs); + + const witness = await this.simulator.simulateCircuit(witnessMap, BaseParityArtifact); + + const result = convertBaseParityOutputsFromWitnessMap(witness); + + return Promise.resolve([makeEmptyProof(), result]); + } + + /** + * Simulates the root parity circuit from its inputs. + * @param inputs - Inputs to the circuit. + * @returns The public inputs of the parity circuit. + */ + public async getRootParityProof(inputs: RootParityInputs): Promise<[Proof, ParityPublicInputs]> { + const witnessMap = convertRootParityInputsToWitnessMap(inputs); + + const witness = await this.simulator.simulateCircuit(witnessMap, RootParityArtifact); + + const result = convertRootParityOutputsFromWitnessMap(witness); + + return Promise.resolve([makeEmptyProof(), result]); + } + + /** + * Simulates the base rollup circuit from its inputs. + * @param input - Inputs to the circuit. + * @returns The public inputs as outputs of the simulation. + */ + public async getBaseRollupProof(input: BaseRollupInputs): Promise<[Proof, BaseOrMergeRollupPublicInputs]> { + const witnessMap = convertBaseRollupInputsToWitnessMap(input); + + const witness = await this.simulator.simulateCircuit(witnessMap, SimulatedBaseRollupArtifact); + + const result = convertBaseRollupOutputsFromWitnessMap(witness); + + return Promise.resolve([makeEmptyProof(), result]); + } + /** + * Simulates the merge rollup circuit from its inputs. + * @param input - Inputs to the circuit. + * @returns The public inputs as outputs of the simulation. + */ + public async getMergeRollupProof(input: MergeRollupInputs): Promise<[Proof, BaseOrMergeRollupPublicInputs]> { + const witnessMap = convertMergeRollupInputsToWitnessMap(input); + + // use WASM here as it is faster for small circuits + const witness = await this.simulator.simulateCircuit(witnessMap, MergeRollupArtifact); + + const result = convertMergeRollupOutputsFromWitnessMap(witness); + + return Promise.resolve([makeEmptyProof(), result]); + } + + /** + * Simulates the root rollup circuit from its inputs. + * @param input - Inputs to the circuit. + * @returns The public inputs as outputs of the simulation. + */ + public async getRootRollupProof(input: RootRollupInputs): Promise<[Proof, RootRollupPublicInputs]> { + const witnessMap = convertRootRollupInputsToWitnessMap(input); + + // use WASM here as it is faster for small circuits + const [duration, witness] = await elapsed(() => this.simulator.simulateCircuit(witnessMap, RootRollupArtifact)); + + const result = convertRootRollupOutputsFromWitnessMap(witness); + + logger(`Simulated root rollup circuit`, { + eventName: 'circuit-simulation', + circuitName: 'root-rollup', + duration, + inputSize: input.toBuffer().length, + outputSize: result.toBuffer().length, + } satisfies CircuitSimulationStats); + return Promise.resolve([makeEmptyProof(), result]); + } +} diff --git a/yarn-project/prover-client/src/prover/empty.ts b/yarn-project/prover-client/src/prover/empty.ts deleted file mode 100644 index 7ca043c8f8f..00000000000 --- a/yarn-project/prover-client/src/prover/empty.ts +++ /dev/null @@ -1,97 +0,0 @@ -/* eslint-disable require-await */ -import { - AggregationObject, - BaseOrMergeRollupPublicInputs, - BaseParityInputs, - BaseRollupInputs, - MergeRollupInputs, - ParityPublicInputs, - Proof, - PublicCircuitPublicInputs, - PublicKernelCircuitPublicInputs, - RootParityInputs, - RootRollupInputs, - RootRollupPublicInputs, -} from '@aztec/circuits.js'; - -import { PublicProver, RollupProver } from './index.js'; - -const EMPTY_PROOF_SIZE = 42; - -// TODO: Silently modifying one of the inputs to inject the aggregation object is horrible. -// We should rethink these interfaces. - -/** - * Prover implementation that returns empty proofs and overrides aggregation objects. - */ -export class EmptyRollupProver implements RollupProver { - /** - * Creates an empty proof for the given input. - * @param inputs - Inputs to the circuit. - * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. - */ - async getBaseParityProof(inputs: BaseParityInputs, publicInputs: ParityPublicInputs): Promise { - publicInputs.aggregationObject = AggregationObject.makeFake(); - return new Proof(Buffer.alloc(EMPTY_PROOF_SIZE, 0)); - } - - /** - * Creates an empty proof for the given input. - * @param inputs - Inputs to the circuit. - * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. - */ - async getRootParityProof(inputs: RootParityInputs, publicInputs: ParityPublicInputs): Promise { - publicInputs.aggregationObject = AggregationObject.makeFake(); - return new Proof(Buffer.alloc(EMPTY_PROOF_SIZE, 0)); - } - - /** - * Creates an empty proof for the given input. - * @param _input - Input to the circuit. - * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. - */ - async getBaseRollupProof(_input: BaseRollupInputs, publicInputs: BaseOrMergeRollupPublicInputs): Promise { - publicInputs.aggregationObject = AggregationObject.makeFake(); - return new Proof(Buffer.alloc(EMPTY_PROOF_SIZE, 0)); - } - - /** - * Creates an empty proof for the given input. - * @param _input - Input to the circuit. - * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. - */ - async getMergeRollupProof(_input: MergeRollupInputs, publicInputs: BaseOrMergeRollupPublicInputs): Promise { - publicInputs.aggregationObject = AggregationObject.makeFake(); - return new Proof(Buffer.alloc(EMPTY_PROOF_SIZE, 0)); - } - /** - * Creates an empty proof for the given input. - * @param _input - Input to the circuit. - * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. - */ - async getRootRollupProof(_input: RootRollupInputs, publicInputs: RootRollupPublicInputs): Promise { - publicInputs.aggregationObject = AggregationObject.makeFake(); - return new Proof(Buffer.alloc(EMPTY_PROOF_SIZE, 0)); - } -} - -/** - * Prover implementation that returns empty proofs. - */ -export class EmptyPublicProver implements PublicProver { - /** - * Creates an empty proof for the given input. - * @param _publicInputs - Public inputs obtained via simulation. - */ - async getPublicCircuitProof(_publicInputs: PublicCircuitPublicInputs): Promise { - return new Proof(Buffer.alloc(EMPTY_PROOF_SIZE, 0)); - } - - /** - * Creates an empty proof for the given input. - * @param _publicInputs - Public inputs obtained via simulation. - */ - async getPublicKernelCircuitProof(_publicInputs: PublicKernelCircuitPublicInputs): Promise { - return new Proof(Buffer.alloc(EMPTY_PROOF_SIZE, 0)); - } -} diff --git a/yarn-project/prover-client/src/prover/index.ts b/yarn-project/prover-client/src/prover/index.ts index 333d872cb96..dbccb681019 100644 --- a/yarn-project/prover-client/src/prover/index.ts +++ b/yarn-project/prover-client/src/prover/index.ts @@ -1,70 +1,2 @@ -import { - BaseOrMergeRollupPublicInputs, - BaseParityInputs, - BaseRollupInputs, - MergeRollupInputs, - ParityPublicInputs, - Proof, - PublicCircuitPublicInputs, - PublicKernelCircuitPublicInputs, - RootParityInputs, - RootRollupInputs, - RootRollupPublicInputs, -} from '@aztec/circuits.js'; - -/** - * Generates proofs for parity and rollup circuits. - */ -export interface RollupProver { - /** - * Creates a proof for the given input. - * @param input - Input to the circuit. - * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. - */ - getBaseParityProof(inputs: BaseParityInputs, publicInputs: ParityPublicInputs): Promise; - - /** - * Creates a proof for the given input. - * @param input - Input to the circuit. - * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. - */ - getRootParityProof(inputs: RootParityInputs, publicInputs: ParityPublicInputs): Promise; - - /** - * Creates a proof for the given input. - * @param input - Input to the circuit. - * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. - */ - getBaseRollupProof(input: BaseRollupInputs, publicInputs: BaseOrMergeRollupPublicInputs): Promise; - - /** - * Creates a proof for the given input. - * @param input - Input to the circuit. - * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. - */ - getMergeRollupProof(input: MergeRollupInputs, publicInputs: BaseOrMergeRollupPublicInputs): Promise; - - /** - * Creates a proof for the given input. - * @param input - Input to the circuit. - * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. - */ - getRootRollupProof(input: RootRollupInputs, publicInputs: RootRollupPublicInputs): Promise; -} - -/** - * Generates proofs for the public and public kernel circuits. - */ -export interface PublicProver { - /** - * Creates a proof for the given input. - * @param publicInputs - Public inputs obtained via simulation. - */ - getPublicCircuitProof(publicInputs: PublicCircuitPublicInputs): Promise; - - /** - * Creates a proof for the given input. - * @param publicInputs - Public inputs obtained via simulation. - */ - getPublicKernelCircuitProof(publicInputs: PublicKernelCircuitPublicInputs): Promise; -} +export * from './interface.js'; +export * from './empty.js'; \ No newline at end of file diff --git a/yarn-project/prover-client/src/prover/interface.ts b/yarn-project/prover-client/src/prover/interface.ts new file mode 100644 index 00000000000..00ba7419b92 --- /dev/null +++ b/yarn-project/prover-client/src/prover/interface.ts @@ -0,0 +1,68 @@ +import { + BaseOrMergeRollupPublicInputs, + BaseParityInputs, + BaseRollupInputs, + MergeRollupInputs, + ParityPublicInputs, + Proof, + PublicCircuitPublicInputs, + PublicKernelCircuitPublicInputs, + RootParityInputs, + RootRollupInputs, + RootRollupPublicInputs, +} from '@aztec/circuits.js'; + +/** + * Generates proofs for parity and rollup circuits. + */ +export interface CircuitProver { + /** + * Creates a proof for the given input. + * @param input - Input to the circuit. + */ + getBaseParityProof(inputs: BaseParityInputs): Promise<[Proof, ParityPublicInputs]>; + + /** + * Creates a proof for the given input. + * @param input - Input to the circuit. + * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. + */ + getRootParityProof(inputs: RootParityInputs): Promise<[Proof, ParityPublicInputs]>; + + /** + * Creates a proof for the given input. + * @param input - Input to the circuit. + */ + getBaseRollupProof(input: BaseRollupInputs): Promise<[Proof, BaseOrMergeRollupPublicInputs]>; + + /** + * Creates a proof for the given input. + * @param input - Input to the circuit. + * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. + */ + getMergeRollupProof(input: MergeRollupInputs): Promise<[Proof, BaseOrMergeRollupPublicInputs]>; + + /** + * Creates a proof for the given input. + * @param input - Input to the circuit. + * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. + */ + getRootRollupProof(input: RootRollupInputs): Promise<[Proof, RootRollupPublicInputs]>; +} + +/** + * Generates proofs for the public and public kernel circuits. + */ +export interface PublicProver { + /** + * Creates a proof for the given input. + * @param publicInputs - Public inputs obtained via simulation. + */ + getPublicCircuitProof(publicInputs: PublicCircuitPublicInputs): Promise; + + /** + * Creates a proof for the given input. + * @param publicInputs - Public inputs obtained via simulation. + */ + getPublicKernelCircuitProof(publicInputs: PublicKernelCircuitPublicInputs): Promise; +} diff --git a/yarn-project/prover-client/src/prover/test_circuit_prover.ts b/yarn-project/prover-client/src/prover/test_circuit_prover.ts new file mode 100644 index 00000000000..63fb5aabde2 --- /dev/null +++ b/yarn-project/prover-client/src/prover/test_circuit_prover.ts @@ -0,0 +1,131 @@ +import { CircuitSimulationStats } from '@aztec/circuit-types/stats'; +import { + BaseOrMergeRollupPublicInputs, + BaseParityInputs, + BaseRollupInputs, + MergeRollupInputs, + ParityPublicInputs, + Proof, + RootParityInputs, + RootRollupInputs, + RootRollupPublicInputs, + makeEmptyProof, +} from '@aztec/circuits.js'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { elapsed } from '@aztec/foundation/timer'; +import { + BaseParityArtifact, + MergeRollupArtifact, + RootParityArtifact, + RootRollupArtifact, + SimulatedBaseRollupArtifact, + convertBaseParityInputsToWitnessMap, + convertBaseParityOutputsFromWitnessMap, + convertBaseRollupInputsToWitnessMap, + convertBaseRollupOutputsFromWitnessMap, + convertMergeRollupInputsToWitnessMap, + convertMergeRollupOutputsFromWitnessMap, + convertRootParityInputsToWitnessMap, + convertRootParityOutputsFromWitnessMap, + convertRootRollupInputsToWitnessMap, + convertRootRollupOutputsFromWitnessMap, +} from '@aztec/noir-protocol-circuits-types'; +import { SimulationProvider, WASMSimulator } from '@aztec/simulator'; + +import { CircuitProver } from './interface.js'; + +/** + * A class for use in testing situations (e2e, unit test etc) + * Simulates circuits using the most efficient method and performs no proving + */ +export class TestCircuitProver implements CircuitProver { + private wasmSimulator = new WASMSimulator(); + + constructor( + private simulationProvider: SimulationProvider, + private logger = createDebugLogger('aztec:test-prover'), + ) {} + + /** + * Simulates the base parity circuit from its inputs. + * @param inputs - Inputs to the circuit. + * @returns The public inputs of the parity circuit. + */ + public async getBaseParityProof(inputs: BaseParityInputs): Promise<[Proof, ParityPublicInputs]> { + const witnessMap = convertBaseParityInputsToWitnessMap(inputs); + + const witness = await this.simulationProvider.simulateCircuit(witnessMap, BaseParityArtifact); + + const result = convertBaseParityOutputsFromWitnessMap(witness); + + return Promise.resolve([makeEmptyProof(), result]); + } + + /** + * Simulates the root parity circuit from its inputs. + * @param inputs - Inputs to the circuit. + * @returns The public inputs of the parity circuit. + */ + public async getRootParityProof(inputs: RootParityInputs): Promise<[Proof, ParityPublicInputs]> { + const witnessMap = convertRootParityInputsToWitnessMap(inputs); + + const witness = await this.simulationProvider.simulateCircuit(witnessMap, RootParityArtifact); + + const result = convertRootParityOutputsFromWitnessMap(witness); + + return Promise.resolve([makeEmptyProof(), result]); + } + + /** + * Simulates the base rollup circuit from its inputs. + * @param input - Inputs to the circuit. + * @returns The public inputs as outputs of the simulation. + */ + public async getBaseRollupProof(input: BaseRollupInputs): Promise<[Proof, BaseOrMergeRollupPublicInputs]> { + const witnessMap = convertBaseRollupInputsToWitnessMap(input); + + const witness = await this.simulationProvider.simulateCircuit(witnessMap, SimulatedBaseRollupArtifact); + + const result = convertBaseRollupOutputsFromWitnessMap(witness); + + return Promise.resolve([makeEmptyProof(), result]); + } + /** + * Simulates the merge rollup circuit from its inputs. + * @param input - Inputs to the circuit. + * @returns The public inputs as outputs of the simulation. + */ + public async getMergeRollupProof(input: MergeRollupInputs): Promise<[Proof, BaseOrMergeRollupPublicInputs]> { + const witnessMap = convertMergeRollupInputsToWitnessMap(input); + + // use WASM here as it is faster for small circuits + const witness = await this.wasmSimulator.simulateCircuit(witnessMap, MergeRollupArtifact); + + const result = convertMergeRollupOutputsFromWitnessMap(witness); + + return Promise.resolve([makeEmptyProof(), result]); + } + + /** + * Simulates the root rollup circuit from its inputs. + * @param input - Inputs to the circuit. + * @returns The public inputs as outputs of the simulation. + */ + public async getRootRollupProof(input: RootRollupInputs): Promise<[Proof, RootRollupPublicInputs]> { + const witnessMap = convertRootRollupInputsToWitnessMap(input); + + // use WASM here as it is faster for small circuits + const [duration, witness] = await elapsed(() => this.wasmSimulator.simulateCircuit(witnessMap, RootRollupArtifact)); + + const result = convertRootRollupOutputsFromWitnessMap(witness); + + this.logger(`Simulated root rollup circuit`, { + eventName: 'circuit-simulation', + circuitName: 'root-rollup', + duration, + inputSize: input.toBuffer().length, + outputSize: result.toBuffer().length, + } satisfies CircuitSimulationStats); + return Promise.resolve([makeEmptyProof(), result]); + } +} diff --git a/yarn-project/prover-client/src/simulator/rollup.ts b/yarn-project/prover-client/src/simulator/rollup.ts index 5101c07b2d9..bfb8312a364 100644 --- a/yarn-project/prover-client/src/simulator/rollup.ts +++ b/yarn-project/prover-client/src/simulator/rollup.ts @@ -13,10 +13,10 @@ import { createDebugLogger } from '@aztec/foundation/log'; import { elapsed } from '@aztec/foundation/timer'; import { BaseParityArtifact, - BaseRollupArtifact, MergeRollupArtifact, RootParityArtifact, RootRollupArtifact, + SimulatedBaseRollupArtifact, convertBaseParityInputsToWitnessMap, convertBaseParityOutputsFromWitnessMap, convertBaseRollupInputsToWitnessMap, @@ -115,7 +115,7 @@ export class RealRollupCircuitSimulator implements RollupSimulator { public async baseRollupCircuit(input: BaseRollupInputs): Promise { const witnessMap = convertBaseRollupInputsToWitnessMap(input); - const witness = await this.simulationProvider.simulateCircuit(witnessMap, BaseRollupArtifact); + const witness = await this.simulationProvider.simulateCircuit(witnessMap, SimulatedBaseRollupArtifact); const result = convertBaseRollupOutputsFromWitnessMap(witness); diff --git a/yarn-project/prover-client/src/tx-prover/tx-prover.ts b/yarn-project/prover-client/src/tx-prover/tx-prover.ts index 782b65d14c2..a7b22289144 100644 --- a/yarn-project/prover-client/src/tx-prover/tx-prover.ts +++ b/yarn-project/prover-client/src/tx-prover/tx-prover.ts @@ -7,7 +7,7 @@ import { WorldStateSynchronizer } from '@aztec/world-state'; import { ProverConfig } from '../config.js'; import { VerificationKeys, getVerificationKeys } from '../mocks/verification_keys.js'; import { ProvingOrchestrator } from '../orchestrator/orchestrator.js'; -import { EmptyRollupProver } from '../prover/empty.js'; +import { TestCircuitProver } from '../prover/test_circuit_prover.js'; /** * A prover accepting individual transaction requests @@ -23,7 +23,7 @@ export class TxProver implements ProverClient { worldStateSynchronizer.getLatest(), simulationProvider, getVerificationKeys(), - new EmptyRollupProver(), + new TestCircuitProver(simulationProvider), ); } diff --git a/yarn-project/simulator/src/simulator/acvm_native.ts b/yarn-project/simulator/src/simulator/acvm_native.ts index ec777bdea39..848a5d7d15b 100644 --- a/yarn-project/simulator/src/simulator/acvm_native.ts +++ b/yarn-project/simulator/src/simulator/acvm_native.ts @@ -29,7 +29,8 @@ function parseIntoWitnessMap(outputString: string) { * @param inputWitness - The circuit's input witness * @param bytecode - The circuit bytecode * @param workingDirectory - A directory to use for temporary files by the ACVM - * @param pathToAcvm - The path to the ACVm binary + * @param pathToAcvm - The path to the ACVM binary + * @param outputFilename - If specified, the output will be stored as a file, encoded using Bincode, instead of being streamed back over stdout * @returns The completed partial witness outputted from the circuit */ export async function executeNativeCircuit( @@ -37,6 +38,7 @@ export async function executeNativeCircuit( bytecode: Buffer, workingDirectory: string, pathToAcvm: string, + outputFilename?: string, ) { const bytecodeFilename = 'bytecode'; const witnessFilename = 'input_witness.toml'; @@ -56,7 +58,7 @@ export async function executeNativeCircuit( await fs.writeFile(`${workingDirectory}/${witnessFilename}`, witnessMap); // Execute the ACVM using the given args - const args = [ + let args = [ `execute`, `--working-directory`, `${workingDirectory}`, @@ -64,8 +66,12 @@ export async function executeNativeCircuit( `${bytecodeFilename}`, `--input-witness`, `${witnessFilename}`, - `--print`, ]; + if (!outputFilename) { + args = args.concat(['--print']); + } else { + args = args.concat([`--output-witness`, `${outputFilename}`]); + } const processPromise = new Promise((resolve, reject) => { let outputWitness = Buffer.alloc(0); let errorBuffer = Buffer.alloc(0); @@ -87,6 +93,9 @@ export async function executeNativeCircuit( try { const output = await processPromise; + if (outputFilename) { + return new Map(); + } return parseIntoWitnessMap(output); } finally { // Clean up the working directory before we leave @@ -95,7 +104,7 @@ export async function executeNativeCircuit( } export class NativeACVMSimulator implements SimulationProvider { - constructor(private workingDirectory: string, private pathToAcvm: string) {} + constructor(private workingDirectory: string, private pathToAcvm: string, private outputAsBincode = false) {} async simulateCircuit(input: WitnessMap, compiledCircuit: NoirCompiledCircuit): Promise { // Execute the circuit on those initial witness values @@ -103,9 +112,16 @@ export class NativeACVMSimulator implements SimulationProvider { const decodedBytecode = Buffer.from(compiledCircuit.bytecode, 'base64'); // Provide a unique working directory so we don't get clashes with parallel executions - const directory = `${this.workingDirectory}/${randomBytes(32).toString('hex')}`; + const directory = `${this.workingDirectory}/${randomBytes(8).toString('hex')}`; + // Execute the circuit - const _witnessMap = await executeNativeCircuit(input, decodedBytecode, directory, this.pathToAcvm); + const _witnessMap = await executeNativeCircuit( + input, + decodedBytecode, + directory, + this.pathToAcvm, + this.outputAsBincode ? 'output-witness' : undefined, + ); return _witnessMap; } diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index a7437ba5451..34c1e83d7dc 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -766,6 +766,7 @@ __metadata: "@types/jest": ^29.5.0 "@types/memdown": ^3.0.0 "@types/node": ^18.7.23 + commander: ^9.0.0 jest: ^29.5.0 jest-mock-extended: ^3.0.3 lodash.chunk: ^4.2.0 @@ -773,6 +774,8 @@ __metadata: ts-node: ^10.9.1 tslib: ^2.4.0 typescript: ^5.0.4 + bin: + bb-cli: ./dest/bb/index.js languageName: unknown linkType: soft From 78b36ca9befe33177d18b0fb01109469b7860826 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Tue, 26 Mar 2024 10:31:40 +0000 Subject: [PATCH 02/41] Merge fix --- .../prover-client/src/mocks/fixtures.ts | 6 +-- .../src/orchestrator/orchestrator.test.ts | 49 ------------------- 2 files changed, 3 insertions(+), 52 deletions(-) diff --git a/yarn-project/prover-client/src/mocks/fixtures.ts b/yarn-project/prover-client/src/mocks/fixtures.ts index 78e1f4e44eb..0fde07dfaad 100644 --- a/yarn-project/prover-client/src/mocks/fixtures.ts +++ b/yarn-project/prover-client/src/mocks/fixtures.ts @@ -1,5 +1,6 @@ import { makeProcessedTx, mockTx } from '@aztec/circuit-types'; import { + Fr, MAX_NEW_L2_TO_L1_MSGS_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NON_REVERTIBLE_NOTE_HASHES_PER_TX, @@ -14,7 +15,6 @@ import { } from '@aztec/circuits.js'; import { fr, makeNewSideEffect, makeNewSideEffectLinkedToNoteHash, makeProof } from '@aztec/circuits.js/testing'; import { makeTuple } from '@aztec/foundation/array'; -import { toTruncField } from '@aztec/foundation/serialize'; import { MerkleTreeOperations } from '@aztec/world-state'; export const makeBloatedProcessedTx = async (builderDb: MerkleTreeOperations, seed = 0x1) => { @@ -56,8 +56,8 @@ export const makeBloatedProcessedTx = async (builderDb: MerkleTreeOperations, se processedTx.data.end.newNullifiers[tx.data.end.newNullifiers.length - 1] = SideEffectLinkedToNoteHash.empty(); processedTx.data.end.newL2ToL1Msgs = makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_TX, fr, seed + 0x300); - processedTx.data.end.encryptedLogsHash = toTruncField(processedTx.encryptedLogs.hash()); - processedTx.data.end.unencryptedLogsHash = toTruncField(processedTx.unencryptedLogs.hash()); + processedTx.data.end.encryptedLogsHash = Fr.fromBuffer(processedTx.encryptedLogs.hash()); + processedTx.data.end.unencryptedLogsHash = Fr.fromBuffer(processedTx.unencryptedLogs.hash()); return processedTx; }; diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts index 5369101e250..fecec3a070b 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts @@ -308,55 +308,6 @@ describe('prover/tx-prover', () => { await builder.stop(); }); - const makeBloatedProcessedTx = async (seed = 0x1) => { - seed *= MAX_NEW_NULLIFIERS_PER_TX; // Ensure no clashing given incremental seeds - const tx = mockTx(seed); - const kernelOutput = PublicKernelCircuitPublicInputs.empty(); - kernelOutput.constants.historicalHeader = await builderDb.buildInitialHeader(); - kernelOutput.end.publicDataUpdateRequests = makeTuple( - MAX_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - i => new PublicDataUpdateRequest(fr(i), fr(i + 10)), - seed + 0x500, - ); - kernelOutput.endNonRevertibleData.publicDataUpdateRequests = makeTuple( - MAX_NON_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - i => new PublicDataUpdateRequest(fr(i), fr(i + 10)), - seed + 0x600, - ); - - const processedTx = makeProcessedTx(tx, kernelOutput, makeProof()); - - processedTx.data.end.newNoteHashes = makeTuple( - MAX_REVERTIBLE_NOTE_HASHES_PER_TX, - makeNewSideEffect, - seed + 0x100, - ); - processedTx.data.endNonRevertibleData.newNoteHashes = makeTuple( - MAX_NON_REVERTIBLE_NOTE_HASHES_PER_TX, - makeNewSideEffect, - seed + 0x100, - ); - processedTx.data.end.newNullifiers = makeTuple( - MAX_REVERTIBLE_NULLIFIERS_PER_TX, - makeNewSideEffectLinkedToNoteHash, - seed + 0x100000, - ); - - processedTx.data.endNonRevertibleData.newNullifiers = makeTuple( - MAX_NON_REVERTIBLE_NULLIFIERS_PER_TX, - makeNewSideEffectLinkedToNoteHash, - seed + 0x100000 + MAX_REVERTIBLE_NULLIFIERS_PER_TX, - ); - - processedTx.data.end.newNullifiers[tx.data.end.newNullifiers.length - 1] = SideEffectLinkedToNoteHash.empty(); - - processedTx.data.end.newL2ToL1Msgs = makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_TX, fr, seed + 0x300); - processedTx.data.end.encryptedLogsHash = Fr.fromBuffer(processedTx.encryptedLogs.hash()); - processedTx.data.end.unencryptedLogsHash = Fr.fromBuffer(processedTx.unencryptedLogs.hash()); - - return processedTx; - }; - it.each([ [0, 4], [1, 4], From 0a07ffb060184f1c8548c1681ec6db9762e51b73 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Tue, 26 Mar 2024 11:37:44 +0000 Subject: [PATCH 03/41] WIP --- yarn-project/prover-client/src/index.ts | 1 - .../orchestrator/block-building-helpers.ts | 17 +++++----------- .../prover-client/src/prover/bb_prover.ts | 20 +++++++++---------- .../prover-client/src/prover/interface.ts | 10 +++++----- .../src/prover/test_circuit_prover.ts | 20 +++++++++---------- 5 files changed, 30 insertions(+), 38 deletions(-) diff --git a/yarn-project/prover-client/src/index.ts b/yarn-project/prover-client/src/index.ts index 46368c53575..c47f1852f99 100644 --- a/yarn-project/prover-client/src/index.ts +++ b/yarn-project/prover-client/src/index.ts @@ -4,5 +4,4 @@ export * from './dummy-prover.js'; // Exported for integration_l1_publisher.test.ts export { getVerificationKeys } from './mocks/verification_keys.js'; -export { EmptyRollupProver } from './prover/empty.js'; export { RealRollupCircuitSimulator } from './simulator/rollup.js'; diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index d6ef39071e2..fb152c089f6 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -195,9 +195,7 @@ export async function executeMergeRollupCircuit( logger?: DebugLogger, ): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { logger?.debug(`Running merge rollup circuit`); - const output = await simulator.mergeRollupCircuit(mergeInputs); - const proof = await prover.getMergeRollupProof(mergeInputs, output); - return [output, proof]; + return await prover.getMergeRollupProof(mergeInputs); } export async function executeRootRollupCircuit( @@ -220,9 +218,7 @@ export async function executeRootRollupCircuit( ); // Simulate and get proof for the root circuit - const rootOutput = await simulator.rootRollupCircuit(rootInput); - - const rootProof = await prover.getRootRollupProof(rootInput, rootOutput); + const [rootOutput, rootProof] = await prover.getRootRollupProof(rootInput); //TODO(@PhilWindle) Move this to orchestrator to ensure that we are still on the same block // Update the archive with the latest block header @@ -517,9 +513,8 @@ export async function executeBaseRollupCircuit( logger?: DebugLogger, ): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { logger?.(`Running base rollup for ${tx.hash}`); - const rollupOutput = await simulator.baseRollupCircuit(inputs); + const [rollupOutput, proof] = await prover.getBaseRollupProof(inputs); validatePartialState(rollupOutput.end, treeSnapshots); - const proof = await prover.getBaseRollupProof(inputs, rollupOutput); return [rollupOutput, proof]; } @@ -562,8 +557,7 @@ export async function executeBaseParityCircuit( logger?: DebugLogger, ): Promise { logger?.debug(`Running base parity circuit`); - const parityPublicInputs = await simulator.baseParityCircuit(inputs); - const proof = await prover.getBaseParityProof(inputs, parityPublicInputs); + const [parityPublicInputs, proof] = await prover.getBaseParityProof(inputs); return new RootParityInput(proof, parityPublicInputs); } @@ -574,8 +568,7 @@ export async function executeRootParityCircuit( logger?: DebugLogger, ): Promise { logger?.debug(`Running root parity circuit`); - const parityPublicInputs = await simulator.rootParityCircuit(inputs); - const proof = await prover.getRootParityProof(inputs, parityPublicInputs); + const [parityPublicInputs, proof] = await prover.getRootParityProof(inputs); return new RootParityInput(proof, parityPublicInputs); } diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index 67e2423a977..d75da77b11b 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -101,14 +101,14 @@ export class BBNativeRollupProver implements CircuitProver { * @param inputs - Inputs to the circuit. * @returns The public inputs of the parity circuit. */ - public async getBaseParityProof(inputs: BaseParityInputs): Promise<[Proof, ParityPublicInputs]> { + public async getBaseParityProof(inputs: BaseParityInputs): Promise<[ParityPublicInputs, Proof]> { const witnessMap = convertBaseParityInputsToWitnessMap(inputs); const witness = await this.simulator.simulateCircuit(witnessMap, BaseParityArtifact); const result = convertBaseParityOutputsFromWitnessMap(witness); - return Promise.resolve([makeEmptyProof(), result]); + return Promise.resolve([result, makeEmptyProof()]); } /** @@ -116,14 +116,14 @@ export class BBNativeRollupProver implements CircuitProver { * @param inputs - Inputs to the circuit. * @returns The public inputs of the parity circuit. */ - public async getRootParityProof(inputs: RootParityInputs): Promise<[Proof, ParityPublicInputs]> { + public async getRootParityProof(inputs: RootParityInputs): Promise<[ParityPublicInputs, Proof]> { const witnessMap = convertRootParityInputsToWitnessMap(inputs); const witness = await this.simulator.simulateCircuit(witnessMap, RootParityArtifact); const result = convertRootParityOutputsFromWitnessMap(witness); - return Promise.resolve([makeEmptyProof(), result]); + return Promise.resolve([result, makeEmptyProof()]); } /** @@ -131,21 +131,21 @@ export class BBNativeRollupProver implements CircuitProver { * @param input - Inputs to the circuit. * @returns The public inputs as outputs of the simulation. */ - public async getBaseRollupProof(input: BaseRollupInputs): Promise<[Proof, BaseOrMergeRollupPublicInputs]> { + public async getBaseRollupProof(input: BaseRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { const witnessMap = convertBaseRollupInputsToWitnessMap(input); const witness = await this.simulator.simulateCircuit(witnessMap, SimulatedBaseRollupArtifact); const result = convertBaseRollupOutputsFromWitnessMap(witness); - return Promise.resolve([makeEmptyProof(), result]); + return Promise.resolve([result, makeEmptyProof()]); } /** * Simulates the merge rollup circuit from its inputs. * @param input - Inputs to the circuit. * @returns The public inputs as outputs of the simulation. */ - public async getMergeRollupProof(input: MergeRollupInputs): Promise<[Proof, BaseOrMergeRollupPublicInputs]> { + public async getMergeRollupProof(input: MergeRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { const witnessMap = convertMergeRollupInputsToWitnessMap(input); // use WASM here as it is faster for small circuits @@ -153,7 +153,7 @@ export class BBNativeRollupProver implements CircuitProver { const result = convertMergeRollupOutputsFromWitnessMap(witness); - return Promise.resolve([makeEmptyProof(), result]); + return Promise.resolve([result, makeEmptyProof()]); } /** @@ -161,7 +161,7 @@ export class BBNativeRollupProver implements CircuitProver { * @param input - Inputs to the circuit. * @returns The public inputs as outputs of the simulation. */ - public async getRootRollupProof(input: RootRollupInputs): Promise<[Proof, RootRollupPublicInputs]> { + public async getRootRollupProof(input: RootRollupInputs): Promise<[RootRollupPublicInputs, Proof]> { const witnessMap = convertRootRollupInputsToWitnessMap(input); // use WASM here as it is faster for small circuits @@ -176,6 +176,6 @@ export class BBNativeRollupProver implements CircuitProver { inputSize: input.toBuffer().length, outputSize: result.toBuffer().length, } satisfies CircuitSimulationStats); - return Promise.resolve([makeEmptyProof(), result]); + return Promise.resolve([result, makeEmptyProof()]); } } diff --git a/yarn-project/prover-client/src/prover/interface.ts b/yarn-project/prover-client/src/prover/interface.ts index 00ba7419b92..0f0d124ccdd 100644 --- a/yarn-project/prover-client/src/prover/interface.ts +++ b/yarn-project/prover-client/src/prover/interface.ts @@ -20,34 +20,34 @@ export interface CircuitProver { * Creates a proof for the given input. * @param input - Input to the circuit. */ - getBaseParityProof(inputs: BaseParityInputs): Promise<[Proof, ParityPublicInputs]>; + getBaseParityProof(inputs: BaseParityInputs): Promise<[ParityPublicInputs, Proof]>; /** * Creates a proof for the given input. * @param input - Input to the circuit. * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. */ - getRootParityProof(inputs: RootParityInputs): Promise<[Proof, ParityPublicInputs]>; + getRootParityProof(inputs: RootParityInputs): Promise<[ParityPublicInputs, Proof]>; /** * Creates a proof for the given input. * @param input - Input to the circuit. */ - getBaseRollupProof(input: BaseRollupInputs): Promise<[Proof, BaseOrMergeRollupPublicInputs]>; + getBaseRollupProof(input: BaseRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]>; /** * Creates a proof for the given input. * @param input - Input to the circuit. * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. */ - getMergeRollupProof(input: MergeRollupInputs): Promise<[Proof, BaseOrMergeRollupPublicInputs]>; + getMergeRollupProof(input: MergeRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]>; /** * Creates a proof for the given input. * @param input - Input to the circuit. * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. */ - getRootRollupProof(input: RootRollupInputs): Promise<[Proof, RootRollupPublicInputs]>; + getRootRollupProof(input: RootRollupInputs): Promise<[RootRollupPublicInputs, Proof]>; } /** diff --git a/yarn-project/prover-client/src/prover/test_circuit_prover.ts b/yarn-project/prover-client/src/prover/test_circuit_prover.ts index 63fb5aabde2..a8cda3fdcf8 100644 --- a/yarn-project/prover-client/src/prover/test_circuit_prover.ts +++ b/yarn-project/prover-client/src/prover/test_circuit_prover.ts @@ -51,14 +51,14 @@ export class TestCircuitProver implements CircuitProver { * @param inputs - Inputs to the circuit. * @returns The public inputs of the parity circuit. */ - public async getBaseParityProof(inputs: BaseParityInputs): Promise<[Proof, ParityPublicInputs]> { + public async getBaseParityProof(inputs: BaseParityInputs): Promise<[ParityPublicInputs, Proof]> { const witnessMap = convertBaseParityInputsToWitnessMap(inputs); const witness = await this.simulationProvider.simulateCircuit(witnessMap, BaseParityArtifact); const result = convertBaseParityOutputsFromWitnessMap(witness); - return Promise.resolve([makeEmptyProof(), result]); + return Promise.resolve([result, makeEmptyProof()]); } /** @@ -66,14 +66,14 @@ export class TestCircuitProver implements CircuitProver { * @param inputs - Inputs to the circuit. * @returns The public inputs of the parity circuit. */ - public async getRootParityProof(inputs: RootParityInputs): Promise<[Proof, ParityPublicInputs]> { + public async getRootParityProof(inputs: RootParityInputs): Promise<[ParityPublicInputs, Proof]> { const witnessMap = convertRootParityInputsToWitnessMap(inputs); const witness = await this.simulationProvider.simulateCircuit(witnessMap, RootParityArtifact); const result = convertRootParityOutputsFromWitnessMap(witness); - return Promise.resolve([makeEmptyProof(), result]); + return Promise.resolve([result, makeEmptyProof()]); } /** @@ -81,21 +81,21 @@ export class TestCircuitProver implements CircuitProver { * @param input - Inputs to the circuit. * @returns The public inputs as outputs of the simulation. */ - public async getBaseRollupProof(input: BaseRollupInputs): Promise<[Proof, BaseOrMergeRollupPublicInputs]> { + public async getBaseRollupProof(input: BaseRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { const witnessMap = convertBaseRollupInputsToWitnessMap(input); const witness = await this.simulationProvider.simulateCircuit(witnessMap, SimulatedBaseRollupArtifact); const result = convertBaseRollupOutputsFromWitnessMap(witness); - return Promise.resolve([makeEmptyProof(), result]); + return Promise.resolve([result, makeEmptyProof()]); } /** * Simulates the merge rollup circuit from its inputs. * @param input - Inputs to the circuit. * @returns The public inputs as outputs of the simulation. */ - public async getMergeRollupProof(input: MergeRollupInputs): Promise<[Proof, BaseOrMergeRollupPublicInputs]> { + public async getMergeRollupProof(input: MergeRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { const witnessMap = convertMergeRollupInputsToWitnessMap(input); // use WASM here as it is faster for small circuits @@ -103,7 +103,7 @@ export class TestCircuitProver implements CircuitProver { const result = convertMergeRollupOutputsFromWitnessMap(witness); - return Promise.resolve([makeEmptyProof(), result]); + return Promise.resolve([result, makeEmptyProof()]); } /** @@ -111,7 +111,7 @@ export class TestCircuitProver implements CircuitProver { * @param input - Inputs to the circuit. * @returns The public inputs as outputs of the simulation. */ - public async getRootRollupProof(input: RootRollupInputs): Promise<[Proof, RootRollupPublicInputs]> { + public async getRootRollupProof(input: RootRollupInputs): Promise<[RootRollupPublicInputs, Proof]> { const witnessMap = convertRootRollupInputsToWitnessMap(input); // use WASM here as it is faster for small circuits @@ -126,6 +126,6 @@ export class TestCircuitProver implements CircuitProver { inputSize: input.toBuffer().length, outputSize: result.toBuffer().length, } satisfies CircuitSimulationStats); - return Promise.resolve([makeEmptyProof(), result]); + return Promise.resolve([result, makeEmptyProof()]); } } From 029912a260493a00de41a53fddb5b979c3e5b55e Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Tue, 26 Mar 2024 16:52:25 +0000 Subject: [PATCH 04/41] WIP --- .../noir-protocol-circuits-types/src/index.ts | 28 +++++- yarn-project/prover-client/package.json | 2 +- yarn-project/prover-client/src/bb/execute.ts | 92 +++++++++++++++---- .../src/prover/bb_prover.test.ts | 5 +- .../prover-client/src/prover/bb_prover.ts | 40 ++++++-- .../src/prover/test_circuit_prover.ts | 6 +- .../prover-client/src/simulator/rollup.ts | 8 +- .../simulator/src/simulator/acvm_native.ts | 12 +-- 8 files changed, 150 insertions(+), 43 deletions(-) diff --git a/yarn-project/noir-protocol-circuits-types/src/index.ts b/yarn-project/noir-protocol-circuits-types/src/index.ts index ef095ff95a8..4906782f100 100644 --- a/yarn-project/noir-protocol-circuits-types/src/index.ts +++ b/yarn-project/noir-protocol-circuits-types/src/index.ts @@ -224,6 +224,17 @@ export function convertRootParityInputsToWitnessMap(inputs: RootParityInputs): W * @returns The witness map */ export function convertBaseRollupInputsToWitnessMap(inputs: BaseRollupInputs): WitnessMap { + const mapped = mapBaseRollupInputsToNoir(inputs); + const initialWitnessMap = abiEncode(BaseRollupJson.abi as Abi, { inputs: mapped as any }); + return initialWitnessMap; +} + +/** + * Converts the inputs of the simulated base rollup circuit into a witness map. + * @param inputs - The base rollup inputs. + * @returns The witness map + */ +export function convertSimulatedBaseRollupInputsToWitnessMap(inputs: BaseRollupInputs): WitnessMap { const mapped = mapBaseRollupInputsToNoir(inputs); const initialWitnessMap = abiEncode(BaseRollupSimulatedJson.abi as Abi, { inputs: mapped as any }); return initialWitnessMap; @@ -294,6 +305,21 @@ export function convertPublicTailInputsToWitnessMap(inputs: PublicKernelTailCirc return initialWitnessMap; } +/** + * Converts the outputs of the simulated base rollup circuit from a witness map. + * @param outputs - The base rollup outputs as a witness map. + * @returns The public inputs. + */ +export function convertSimulatedBaseRollupOutputsFromWitnessMap(outputs: WitnessMap): BaseOrMergeRollupPublicInputs { + // Decode the witness map into two fields, the return values and the inputs + const decodedInputs: DecodedInputs = abiDecode(BaseRollupSimulatedJson.abi as Abi, outputs); + + // Cast the inputs as the return type + const returnType = decodedInputs.return_value as BaseRollupReturnType; + + return mapBaseOrMergeRollupPublicInputsFromNoir(returnType); +} + /** * Converts the outputs of the base rollup circuit from a witness map. * @param outputs - The base rollup outputs as a witness map. @@ -301,7 +327,7 @@ export function convertPublicTailInputsToWitnessMap(inputs: PublicKernelTailCirc */ export function convertBaseRollupOutputsFromWitnessMap(outputs: WitnessMap): BaseOrMergeRollupPublicInputs { // Decode the witness map into two fields, the return values and the inputs - const decodedInputs: DecodedInputs = abiDecode(BaseRollupSimulatedJson.abi as Abi, outputs); + const decodedInputs: DecodedInputs = abiDecode(BaseRollupJson.abi as Abi, outputs); // Cast the inputs as the return type const returnType = decodedInputs.return_value as BaseRollupReturnType; diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index 07629e69e24..4be53e506dc 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -19,7 +19,7 @@ "clean": "rm -rf ./dest .tsbuildinfo", "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", - "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules $(yarn bin jest) --passWithNoTests", + "test": "DEBUG='aztec:*' NODE_NO_WARNINGS=1 node --experimental-vm-modules $(yarn bin jest) --passWithNoTests", "bb": "node --no-warnings ./dest/bb/index.js" }, "inherits": [ diff --git a/yarn-project/prover-client/src/bb/execute.ts b/yarn-project/prover-client/src/bb/execute.ts index c94ba512e73..a92ca560d63 100644 --- a/yarn-project/prover-client/src/bb/execute.ts +++ b/yarn-project/prover-client/src/bb/execute.ts @@ -7,7 +7,7 @@ import * as proc from 'child_process'; import * as fs from 'fs/promises'; import path from 'path'; -enum BB_RESULT { +export enum BB_RESULT { SUCCESS, FAILURE, ALREADY_PRESENT, @@ -87,7 +87,7 @@ async function generateKeyForNoirCircuit( if (!mustRegenerate) { const alreadyPresent: BBSuccess = { status: BB_RESULT.ALREADY_PRESENT }; - return { result: alreadyPresent, circuitOutputDirectory }; + return { result: alreadyPresent, outputPath: circuitOutputDirectory }; } const binaryPresent = await fs @@ -96,7 +96,7 @@ async function generateKeyForNoirCircuit( .catch(_ => false); if (!binaryPresent) { const failed: BBFailure = { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; - return { result: failed, circuitOutputDirectory }; + return { result: failed, outputPath: circuitOutputDirectory }; } // Clear up the circuit output directory removing anything that is there @@ -104,13 +104,16 @@ async function generateKeyForNoirCircuit( await fs.mkdir(circuitOutputDirectory, { recursive: true }); // Write the bytecode and input witness to the working directory await fs.writeFile(bytecodePath, bytecode); - const args = ['-o', circuitOutputDirectory, '-b', bytecodePath]; + + // For verification keys, the argument is the full file path + const outputPath = key === 'pk' ? circuitOutputDirectory : `${circuitOutputDirectory}/vk`; + const args = ['-o', outputPath, '-b', bytecodePath]; const timer = new Timer(); const result = await executeBB(pathToBB, `write_${key}`, args, log); const duration = timer.ms(); await fs.rm(bytecodePath, { force: true }); await fs.writeFile(bytecodeHashPath, bytecodeHash); - return { result, duration, circuitOutputDirectory }; + return { result, duration, outputPath }; } const directorySize = async (directory: string, filesToOmit: string[]) => { @@ -129,11 +132,14 @@ export async function generateVerificationKeyForNoirCircuit( compiledCircuit: NoirCompiledCircuit, log: LogFn, ) { - const { - result, - duration, - circuitOutputDirectory: keyPath, - } = await generateKeyForNoirCircuit(pathToBB, workingDirectory, circuitName, compiledCircuit, 'vk', log); + const { result, duration, outputPath } = await generateKeyForNoirCircuit( + pathToBB, + workingDirectory, + circuitName, + compiledCircuit, + 'vk', + log, + ); if (result.status === BB_RESULT.FAILURE) { log(`Failed to generate verification key for circuit ${circuitName}, reason: ${result.reason}`); return; @@ -142,10 +148,10 @@ export async function generateVerificationKeyForNoirCircuit( log(`Verification key for circuit ${circuitName} was already present`); return; } - const size = await directorySize(keyPath, [bytecodeHashFilename]); + const stats = await fs.stat(outputPath); log( - `Verification key for circuit ${circuitName} written to ${keyPath} in ${duration} ms, size: ${ - size / (1024 * 1024) + `Verification key for circuit ${circuitName} written to ${outputPath} in ${duration} ms, size: ${ + stats.size / (1024 * 1024) } MB`, ); return result; @@ -158,11 +164,14 @@ export async function generateProvingKeyForNoirCircuit( compiledCircuit: NoirCompiledCircuit, log: LogFn, ) { - const { - result, - duration, - circuitOutputDirectory: keyPath, - } = await generateKeyForNoirCircuit(pathToBB, workingDirectory, circuitName, compiledCircuit, 'pk', log); + const { result, duration, outputPath } = await generateKeyForNoirCircuit( + pathToBB, + workingDirectory, + circuitName, + compiledCircuit, + 'pk', + log, + ); if (result.status === BB_RESULT.FAILURE) { log(`Failed to generate proving key for circuit ${circuitName}, reason: ${result.reason}`); return; @@ -171,9 +180,52 @@ export async function generateProvingKeyForNoirCircuit( log(`Proving key for circuit ${circuitName} was already present`); return; } - const size = await directorySize(keyPath, [bytecodeHashFilename]); + const size = await directorySize(outputPath, [bytecodeHashFilename]); log( - `Proving key for circuit ${circuitName} written to ${keyPath} in ${duration} ms, size: ${size / (1024 * 1024)} MB`, + `Proving key for circuit ${circuitName} written to ${outputPath} in ${duration} ms, size: ${ + size / (1024 * 1024) + } MB`, ); return result; } + +export async function generateProof( + pathToBB: string, + workingDirectory: string, + circuitName: string, + compiledCircuit: NoirCompiledCircuit, + inputWitnessFile: string, + log: LogFn, +) { + // The bytecode is written to e.g. /workingDirectory/pk/BaseParityArtifact-bytecode + const bytecodePath = `${workingDirectory}/proof/${circuitName}-bytecode`; + const bytecode = Buffer.from(compiledCircuit.bytecode, 'base64'); + + // The key generation outputs are written to e.g. /workingDirectory/pk/BaseParityArtifact/ + // The bytecode hash file is also written here as /workingDirectory/pk/BaseParityArtifact/bytecode-hash + const circuitOutputDirectory = `${workingDirectory}/proof/${circuitName}`; + + const binaryPresent = await fs + .access(pathToBB, fs.constants.R_OK) + .then(_ => true) + .catch(_ => false); + if (!binaryPresent) { + const failed: BBFailure = { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; + return { result: failed, outputPath: circuitOutputDirectory }; + } + + // Clear up the circuit output directory removing anything that is there + await fs.rm(circuitOutputDirectory, { recursive: true, force: true }); + await fs.mkdir(circuitOutputDirectory, { recursive: true }); + // Write the bytecode and input witness to the working directory + await fs.writeFile(bytecodePath, bytecode); + + // For verification keys, the argument is the full file path + const outputPath = `${circuitOutputDirectory}/proof`; + const args = ['-o', outputPath, '-b', bytecodePath, '-w', inputWitnessFile]; + const timer = new Timer(); + const result = await executeBB(pathToBB, `prove`, args, log); + const duration = timer.ms(); + await fs.rm(bytecodePath, { force: true }); + return { result, duration, outputPath }; +} diff --git a/yarn-project/prover-client/src/prover/bb_prover.test.ts b/yarn-project/prover-client/src/prover/bb_prover.test.ts index 306e71f9907..1acb6287aba 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.test.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.test.ts @@ -95,7 +95,8 @@ describe('prover/bb_prover', () => { bbWorkingDirectory: config.bbWorkingDirectory, }; prover = await BBNativeRollupProver.new(bbConfig); - }, 20_000); + logger('AFTER PROVER START'); + }, 200_000); afterEach(async () => { if (directoryToCleanup) { @@ -106,6 +107,8 @@ describe('prover/bb_prover', () => { it('proves the base rollup circuit', async () => { const tx = await makeBloatedProcessedTx(builderDb); + logger('Starting Test!!'); + const inputs = await buildBaseRollupInput(tx, globalVariables, builderDb); await prover.getBaseRollupProof(inputs); }, 30_000); diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index d75da77b11b..00e35da5219 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -16,12 +16,12 @@ import { createDebugLogger } from '@aztec/foundation/log'; import { elapsed } from '@aztec/foundation/timer'; import { BaseParityArtifact, + BaseRollupArtifact, MergeRollupArtifact, ProtocolArtifacts, ProtocolCircuitArtifacts, RootParityArtifact, RootRollupArtifact, - SimulatedBaseRollupArtifact, convertBaseParityInputsToWitnessMap, convertBaseParityOutputsFromWitnessMap, convertBaseRollupInputsToWitnessMap, @@ -37,31 +37,39 @@ import { NativeACVMSimulator } from '@aztec/simulator'; import * as fs from 'fs/promises'; -import { generateProvingKeyForNoirCircuit, generateVerificationKeyForNoirCircuit } from '../bb/execute.js'; +import { + BB_RESULT, + generateProof, + generateProvingKeyForNoirCircuit, + generateVerificationKeyForNoirCircuit, +} from '../bb/execute.js'; import { CircuitProver } from './interface.js'; const logger = createDebugLogger('aztec:bb-prover'); async function ensureAllKeys(bbBinaryPath: string, bbWorkingDirectory: string) { - const realCircuits = Object.keys(ProtocolCircuitArtifacts).filter((n: string) => !n.includes('Simulated')); + const realCircuits = Object.keys(ProtocolCircuitArtifacts).filter( + (n: string) => !n.includes('Simulated') && !n.includes('PrivateKernel'), + ); + const promises = []; for (const circuitName of realCircuits) { - logger.info(`Generating proving key for circuit ${circuitName}`); - await generateProvingKeyForNoirCircuit( + const provingKeyPromise = generateProvingKeyForNoirCircuit( bbBinaryPath, bbWorkingDirectory, circuitName, ProtocolCircuitArtifacts[circuitName as ProtocolArtifacts], logger, ); - logger.info(`Generating verification key for circuit ${circuitName}`); - await generateVerificationKeyForNoirCircuit( + const verificationKeyPromise = generateVerificationKeyForNoirCircuit( bbBinaryPath, bbWorkingDirectory, circuitName, ProtocolCircuitArtifacts[circuitName as ProtocolArtifacts], logger, ); + promises.push(...[provingKeyPromise, verificationKeyPromise]); } + await Promise.all(promises); } export type BBProverConfig = { @@ -134,7 +142,23 @@ export class BBNativeRollupProver implements CircuitProver { public async getBaseRollupProof(input: BaseRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { const witnessMap = convertBaseRollupInputsToWitnessMap(input); - const witness = await this.simulator.simulateCircuit(witnessMap, SimulatedBaseRollupArtifact); + const witness = await this.simulator.simulateCircuit(witnessMap, BaseRollupArtifact); + + const inputWitness = ''; + + const provingResult = await generateProof( + this.bbBinaryPath, + this.bbWorkingDirectory, + 'Base Rollup', + BaseRollupArtifact, + inputWitness, + logger, + ); + + if (provingResult.result.status === BB_RESULT.FAILURE) { + logger.error(`Failed to generate base rollup proof: ${provingResult.result.reason}`); + throw new Error(provingResult.result.reason); + } const result = convertBaseRollupOutputsFromWitnessMap(witness); diff --git a/yarn-project/prover-client/src/prover/test_circuit_prover.ts b/yarn-project/prover-client/src/prover/test_circuit_prover.ts index a8cda3fdcf8..f2a73fac716 100644 --- a/yarn-project/prover-client/src/prover/test_circuit_prover.ts +++ b/yarn-project/prover-client/src/prover/test_circuit_prover.ts @@ -29,6 +29,8 @@ import { convertRootParityOutputsFromWitnessMap, convertRootRollupInputsToWitnessMap, convertRootRollupOutputsFromWitnessMap, + convertSimulatedBaseRollupInputsToWitnessMap, + convertSimulatedBaseRollupOutputsFromWitnessMap, } from '@aztec/noir-protocol-circuits-types'; import { SimulationProvider, WASMSimulator } from '@aztec/simulator'; @@ -82,11 +84,11 @@ export class TestCircuitProver implements CircuitProver { * @returns The public inputs as outputs of the simulation. */ public async getBaseRollupProof(input: BaseRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { - const witnessMap = convertBaseRollupInputsToWitnessMap(input); + const witnessMap = convertSimulatedBaseRollupInputsToWitnessMap(input); const witness = await this.simulationProvider.simulateCircuit(witnessMap, SimulatedBaseRollupArtifact); - const result = convertBaseRollupOutputsFromWitnessMap(witness); + const result = convertSimulatedBaseRollupOutputsFromWitnessMap(witness); return Promise.resolve([result, makeEmptyProof()]); } diff --git a/yarn-project/prover-client/src/simulator/rollup.ts b/yarn-project/prover-client/src/simulator/rollup.ts index bfb8312a364..240681598fe 100644 --- a/yarn-project/prover-client/src/simulator/rollup.ts +++ b/yarn-project/prover-client/src/simulator/rollup.ts @@ -19,14 +19,14 @@ import { SimulatedBaseRollupArtifact, convertBaseParityInputsToWitnessMap, convertBaseParityOutputsFromWitnessMap, - convertBaseRollupInputsToWitnessMap, - convertBaseRollupOutputsFromWitnessMap, convertMergeRollupInputsToWitnessMap, convertMergeRollupOutputsFromWitnessMap, convertRootParityInputsToWitnessMap, convertRootParityOutputsFromWitnessMap, convertRootRollupInputsToWitnessMap, convertRootRollupOutputsFromWitnessMap, + convertSimulatedBaseRollupInputsToWitnessMap, + convertSimulatedBaseRollupOutputsFromWitnessMap, } from '@aztec/noir-protocol-circuits-types'; import { SimulationProvider, WASMSimulator } from '@aztec/simulator'; @@ -113,11 +113,11 @@ export class RealRollupCircuitSimulator implements RollupSimulator { * @returns The public inputs as outputs of the simulation. */ public async baseRollupCircuit(input: BaseRollupInputs): Promise { - const witnessMap = convertBaseRollupInputsToWitnessMap(input); + const witnessMap = convertSimulatedBaseRollupInputsToWitnessMap(input); const witness = await this.simulationProvider.simulateCircuit(witnessMap, SimulatedBaseRollupArtifact); - const result = convertBaseRollupOutputsFromWitnessMap(witness); + const result = convertSimulatedBaseRollupOutputsFromWitnessMap(witness); return Promise.resolve(result); } diff --git a/yarn-project/simulator/src/simulator/acvm_native.ts b/yarn-project/simulator/src/simulator/acvm_native.ts index 848a5d7d15b..0240ef2fa78 100644 --- a/yarn-project/simulator/src/simulator/acvm_native.ts +++ b/yarn-project/simulator/src/simulator/acvm_native.ts @@ -30,7 +30,7 @@ function parseIntoWitnessMap(outputString: string) { * @param bytecode - The circuit bytecode * @param workingDirectory - A directory to use for temporary files by the ACVM * @param pathToAcvm - The path to the ACVM binary - * @param outputFilename - If specified, the output will be stored as a file, encoded using Bincode, instead of being streamed back over stdout + * @param outputDirectory - If specified, the output will be stored as a file, encoded using Bincode, instead of being streamed back over stdout * @returns The completed partial witness outputted from the circuit */ export async function executeNativeCircuit( @@ -38,7 +38,7 @@ export async function executeNativeCircuit( bytecode: Buffer, workingDirectory: string, pathToAcvm: string, - outputFilename?: string, + outputDirectory?: string, ) { const bytecodeFilename = 'bytecode'; const witnessFilename = 'input_witness.toml'; @@ -67,10 +67,10 @@ export async function executeNativeCircuit( `--input-witness`, `${witnessFilename}`, ]; - if (!outputFilename) { + if (!outputDirectory) { args = args.concat(['--print']); } else { - args = args.concat([`--output-witness`, `${outputFilename}`]); + args = args.concat([`--output-witness`, `${outputDirectory}/output-witness`]); } const processPromise = new Promise((resolve, reject) => { let outputWitness = Buffer.alloc(0); @@ -93,7 +93,7 @@ export async function executeNativeCircuit( try { const output = await processPromise; - if (outputFilename) { + if (outputDirectory) { return new Map(); } return parseIntoWitnessMap(output); @@ -104,7 +104,7 @@ export async function executeNativeCircuit( } export class NativeACVMSimulator implements SimulationProvider { - constructor(private workingDirectory: string, private pathToAcvm: string, private outputAsBincode = false) {} + constructor(private workingDirectory: string, private pathToAcvm: string, private bincodeDirectory?) {} async simulateCircuit(input: WitnessMap, compiledCircuit: NoirCompiledCircuit): Promise { // Execute the circuit on those initial witness values From 04e5bcb894c9d480f7a2206def77bcec956091eb Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Tue, 26 Mar 2024 20:50:44 +0000 Subject: [PATCH 05/41] WIP --- yarn-project/prover-client/src/prover/bb_prover.ts | 12 ++++-------- yarn-project/simulator/src/simulator/acvm_native.ts | 4 ++-- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index 00e35da5219..9028e9ed3a8 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -83,11 +83,7 @@ export type BBProverConfig = { * Prover implementation that uses barretenberg native proving */ export class BBNativeRollupProver implements CircuitProver { - constructor( - private simulator: NativeACVMSimulator, - private bbBinaryPath: string, - private bbWorkingDirectory: string, - ) {} + constructor(private config: BBProverConfig) {} static async new(config: BBProverConfig) { await fs.access(config.acvmBinaryPath, fs.constants.R_OK); @@ -99,9 +95,7 @@ export class BBNativeRollupProver implements CircuitProver { await ensureAllKeys(config.bbBinaryPath, config.bbWorkingDirectory); - const simulator = new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath, true); - - return new BBNativeRollupProver(simulator, config.bbBinaryPath, config.bbWorkingDirectory); + return new BBNativeRollupProver(config); } /** @@ -142,6 +136,8 @@ export class BBNativeRollupProver implements CircuitProver { public async getBaseRollupProof(input: BaseRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { const witnessMap = convertBaseRollupInputsToWitnessMap(input); + const simulator = new NativeACVMSimulator(this.config.acvmWorkingDirectory, this.config.acvmBinaryPath, true); + const witness = await this.simulator.simulateCircuit(witnessMap, BaseRollupArtifact); const inputWitness = ''; diff --git a/yarn-project/simulator/src/simulator/acvm_native.ts b/yarn-project/simulator/src/simulator/acvm_native.ts index 0240ef2fa78..fded9aea087 100644 --- a/yarn-project/simulator/src/simulator/acvm_native.ts +++ b/yarn-project/simulator/src/simulator/acvm_native.ts @@ -104,7 +104,7 @@ export async function executeNativeCircuit( } export class NativeACVMSimulator implements SimulationProvider { - constructor(private workingDirectory: string, private pathToAcvm: string, private bincodeDirectory?) {} + constructor(private workingDirectory: string, private pathToAcvm: string, private bincodeDirectory?: string) {} async simulateCircuit(input: WitnessMap, compiledCircuit: NoirCompiledCircuit): Promise { // Execute the circuit on those initial witness values @@ -120,7 +120,7 @@ export class NativeACVMSimulator implements SimulationProvider { decodedBytecode, directory, this.pathToAcvm, - this.outputAsBincode ? 'output-witness' : undefined, + this.bincodeDirectory, ); return _witnessMap; From 43022c1137542bc1b05d26ad906b6f1d09ad3711 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 28 Mar 2024 10:41:21 +0000 Subject: [PATCH 06/41] WIP --- barretenberg/cpp/src/barretenberg/bb/main.cpp | 3 +- .../tooling/acvm_cli/src/cli/fs/witness.rs | 1 - .../noir-protocol-circuits-types/src/index.ts | 6 +- .../prover-client/src/prover/bb_prover.ts | 70 ++++++++++++++++--- .../simulator/src/simulator/acvm_native.ts | 36 ++++++---- 5 files changed, 90 insertions(+), 26 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index 7185ffba523..f9f602c2166 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -219,8 +219,9 @@ bool proveAndVerifyGoblin(const std::string& bytecodePath, const std::string& wi void prove(const std::string& bytecodePath, const std::string& witnessPath, const std::string& outputPath) { auto constraint_system = get_constraint_system(bytecodePath); + std::cout << "Read bytecode" << std::endl; auto witness = get_witness(witnessPath); - + std::cout << "Read witness" << std::endl; acir_proofs::AcirComposer acir_composer{ 0, verbose }; acir_composer.create_circuit(constraint_system, witness); init_bn254_crs(acir_composer.get_dyadic_circuit_size()); diff --git a/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs b/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs index 1c71c2d0f40..35f1f5b2b14 100644 --- a/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs +++ b/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs @@ -62,7 +62,6 @@ pub(crate) fn save_witness_to_dir>( let witness_path = witness_dir.as_ref().join(witness_name).with_extension("gz"); let buf: Vec = witnesses.try_into().map_err(|_op| FilesystemError::OutputWitnessCreationFailed(witness_name.to_string()))?; - println!("Writing out to {}", witness_path.display()); write_to_file(buf.as_slice(), &witness_path); Ok(witness_path) diff --git a/yarn-project/noir-protocol-circuits-types/src/index.ts b/yarn-project/noir-protocol-circuits-types/src/index.ts index 4906782f100..10358f36724 100644 --- a/yarn-project/noir-protocol-circuits-types/src/index.ts +++ b/yarn-project/noir-protocol-circuits-types/src/index.ts @@ -19,7 +19,7 @@ import { import { NoirCompiledCircuit } from '@aztec/types/noir'; import { WasmBlackBoxFunctionSolver, createBlackBoxSolver, executeCircuitWithBlackBoxSolver } from '@noir-lang/acvm_js'; -import { Abi, abiDecode, abiEncode } from '@noir-lang/noirc_abi'; +import { Abi, abiDecode, abiEncode, serializeWitness } from '@noir-lang/noirc_abi'; import { WitnessMap } from '@noir-lang/types'; import BaseParityJson from './target/parity_base.json' assert { type: 'json' }; @@ -146,6 +146,10 @@ const getSolver = (): Promise => { return solver; }; +export async function serialiseInputWitness(witness: WitnessMap) { + return await serializeWitness(witness); +} + /** * Executes the init private kernel. * @param privateKernelInitCircuitPrivateInputs - The private inputs to the initial private kernel. diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index 9028e9ed3a8..bba8d33cd15 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -12,6 +12,7 @@ import { RootRollupPublicInputs, makeEmptyProof, } from '@aztec/circuits.js'; +import { randomBytes } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { elapsed } from '@aztec/foundation/timer'; import { @@ -32,6 +33,7 @@ import { convertRootParityOutputsFromWitnessMap, convertRootRollupInputsToWitnessMap, convertRootRollupOutputsFromWitnessMap, + serialiseInputWitness, } from '@aztec/noir-protocol-circuits-types'; import { NativeACVMSimulator } from '@aztec/simulator'; @@ -106,7 +108,16 @@ export class BBNativeRollupProver implements CircuitProver { public async getBaseParityProof(inputs: BaseParityInputs): Promise<[ParityPublicInputs, Proof]> { const witnessMap = convertBaseParityInputsToWitnessMap(inputs); - const witness = await this.simulator.simulateCircuit(witnessMap, BaseParityArtifact); + const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; + const outputWitnessFile = `${bbWorkingDirectory}/partial-witness`; + + const simulator = new NativeACVMSimulator( + this.config.acvmWorkingDirectory, + this.config.acvmBinaryPath, + outputWitnessFile, + ); + + const witness = await simulator.simulateCircuit(witnessMap, BaseParityArtifact); const result = convertBaseParityOutputsFromWitnessMap(witness); @@ -121,7 +132,16 @@ export class BBNativeRollupProver implements CircuitProver { public async getRootParityProof(inputs: RootParityInputs): Promise<[ParityPublicInputs, Proof]> { const witnessMap = convertRootParityInputsToWitnessMap(inputs); - const witness = await this.simulator.simulateCircuit(witnessMap, RootParityArtifact); + const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; + const outputWitnessFile = `${bbWorkingDirectory}/partial-witness`; + + const simulator = new NativeACVMSimulator( + this.config.acvmWorkingDirectory, + this.config.acvmBinaryPath, + outputWitnessFile, + ); + + const witness = await simulator.simulateCircuit(witnessMap, RootParityArtifact); const result = convertRootParityOutputsFromWitnessMap(witness); @@ -136,18 +156,30 @@ export class BBNativeRollupProver implements CircuitProver { public async getBaseRollupProof(input: BaseRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { const witnessMap = convertBaseRollupInputsToWitnessMap(input); - const simulator = new NativeACVMSimulator(this.config.acvmWorkingDirectory, this.config.acvmBinaryPath, true); + const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; + logger(`Using bb working directory ${bbWorkingDirectory}`); + await fs.mkdir(bbWorkingDirectory, { recursive: true }); + const outputWitnessFile = `${bbWorkingDirectory}/partial-witness.gz`; + const outputWitnessFile2 = `${bbWorkingDirectory}/partial-witness2.gz`; + + const simulator = new NativeACVMSimulator( + this.config.acvmWorkingDirectory, + this.config.acvmBinaryPath, + outputWitnessFile, + ); + + const witness = await simulator.simulateCircuit(witnessMap, BaseRollupArtifact); - const witness = await this.simulator.simulateCircuit(witnessMap, BaseRollupArtifact); + const binaryWitness = await serialiseInputWitness(witness); - const inputWitness = ''; + await fs.writeFile(outputWitnessFile2, binaryWitness); const provingResult = await generateProof( - this.bbBinaryPath, - this.bbWorkingDirectory, + this.config.bbBinaryPath, + bbWorkingDirectory, 'Base Rollup', BaseRollupArtifact, - inputWitness, + outputWitnessFile2, logger, ); @@ -168,8 +200,17 @@ export class BBNativeRollupProver implements CircuitProver { public async getMergeRollupProof(input: MergeRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { const witnessMap = convertMergeRollupInputsToWitnessMap(input); + const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; + const outputWitnessFile = `${bbWorkingDirectory}/partial-witness`; + + const simulator = new NativeACVMSimulator( + this.config.acvmWorkingDirectory, + this.config.acvmBinaryPath, + outputWitnessFile, + ); + // use WASM here as it is faster for small circuits - const witness = await this.simulator.simulateCircuit(witnessMap, MergeRollupArtifact); + const witness = await simulator.simulateCircuit(witnessMap, MergeRollupArtifact); const result = convertMergeRollupOutputsFromWitnessMap(witness); @@ -184,8 +225,17 @@ export class BBNativeRollupProver implements CircuitProver { public async getRootRollupProof(input: RootRollupInputs): Promise<[RootRollupPublicInputs, Proof]> { const witnessMap = convertRootRollupInputsToWitnessMap(input); + const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; + const outputWitnessFile = `${bbWorkingDirectory}/partial-witness`; + + const simulator = new NativeACVMSimulator( + this.config.acvmWorkingDirectory, + this.config.acvmBinaryPath, + outputWitnessFile, + ); + // use WASM here as it is faster for small circuits - const [duration, witness] = await elapsed(() => this.simulator.simulateCircuit(witnessMap, RootRollupArtifact)); + const [duration, witness] = await elapsed(() => simulator.simulateCircuit(witnessMap, RootRollupArtifact)); const result = convertRootRollupOutputsFromWitnessMap(witness); diff --git a/yarn-project/simulator/src/simulator/acvm_native.ts b/yarn-project/simulator/src/simulator/acvm_native.ts index fded9aea087..53906edf487 100644 --- a/yarn-project/simulator/src/simulator/acvm_native.ts +++ b/yarn-project/simulator/src/simulator/acvm_native.ts @@ -1,4 +1,5 @@ import { randomBytes } from '@aztec/foundation/crypto'; +import { createDebugLogger } from '@aztec/foundation/log'; import { NoirCompiledCircuit } from '@aztec/types/noir'; import { WitnessMap } from '@noir-lang/types'; @@ -7,6 +8,8 @@ import fs from 'fs/promises'; import { SimulationProvider } from './simulation_provider.js'; +const logger = createDebugLogger('aztec:acvm_native'); + /** * Parses a TOML format witness map string into a Map structure * @param outputString - The witness map in TOML format @@ -19,7 +22,12 @@ function parseIntoWitnessMap(outputString: string) { .filter((line: string) => line.length) .map((line: string) => { const pair = line.replaceAll(' ', '').split('='); - return [Number(pair[0]), pair[1].replaceAll('"', '')]; + try { + return [Number(pair[0]), pair[1].replaceAll('"', '')]; + } catch (err) { + logger(`Error: ${pair[1]}, line: ${line}`); + return [0, '']; + } }), ); } @@ -30,7 +38,7 @@ function parseIntoWitnessMap(outputString: string) { * @param bytecode - The circuit bytecode * @param workingDirectory - A directory to use for temporary files by the ACVM * @param pathToAcvm - The path to the ACVM binary - * @param outputDirectory - If specified, the output will be stored as a file, encoded using Bincode, instead of being streamed back over stdout + * @param outputFilename - If specified, the output will be stored as a file, encoded using Bincode, instead of being streamed back over stdout * @returns The completed partial witness outputted from the circuit */ export async function executeNativeCircuit( @@ -38,7 +46,7 @@ export async function executeNativeCircuit( bytecode: Buffer, workingDirectory: string, pathToAcvm: string, - outputDirectory?: string, + outputFilename?: string, ) { const bytecodeFilename = 'bytecode'; const witnessFilename = 'input_witness.toml'; @@ -58,7 +66,7 @@ export async function executeNativeCircuit( await fs.writeFile(`${workingDirectory}/${witnessFilename}`, witnessMap); // Execute the ACVM using the given args - let args = [ + const args = [ `execute`, `--working-directory`, `${workingDirectory}`, @@ -66,12 +74,11 @@ export async function executeNativeCircuit( `${bytecodeFilename}`, `--input-witness`, `${witnessFilename}`, + '--print', + '--output-witness', + 'output-witness', ]; - if (!outputDirectory) { - args = args.concat(['--print']); - } else { - args = args.concat([`--output-witness`, `${outputDirectory}/output-witness`]); - } + const processPromise = new Promise((resolve, reject) => { let outputWitness = Buffer.alloc(0); let errorBuffer = Buffer.alloc(0); @@ -93,8 +100,9 @@ export async function executeNativeCircuit( try { const output = await processPromise; - if (outputDirectory) { - return new Map(); + if (outputFilename) { + logger(`Copying file ${workingDirectory}/output-witness to ${outputFilename}`); + await fs.copyFile(`${workingDirectory}/output-witness.gz`, outputFilename); } return parseIntoWitnessMap(output); } finally { @@ -104,7 +112,7 @@ export async function executeNativeCircuit( } export class NativeACVMSimulator implements SimulationProvider { - constructor(private workingDirectory: string, private pathToAcvm: string, private bincodeDirectory?: string) {} + constructor(private workingDirectory: string, private pathToAcvm: string, private witnessFilename?: string) {} async simulateCircuit(input: WitnessMap, compiledCircuit: NoirCompiledCircuit): Promise { // Execute the circuit on those initial witness values @@ -114,13 +122,15 @@ export class NativeACVMSimulator implements SimulationProvider { // Provide a unique working directory so we don't get clashes with parallel executions const directory = `${this.workingDirectory}/${randomBytes(8).toString('hex')}`; + logger(`Using working directory ${directory}`); + // Execute the circuit const _witnessMap = await executeNativeCircuit( input, decodedBytecode, directory, this.pathToAcvm, - this.bincodeDirectory, + this.witnessFilename, ); return _witnessMap; From e16c2452d7aa98c1e1126e7171f142087330e059 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 28 Mar 2024 14:22:30 +0000 Subject: [PATCH 07/41] WIP --- barretenberg/cpp/src/barretenberg/bb/main.cpp | 40 +++++++++-- .../dsl/acir_proofs/acir_composer.cpp | 8 +++ .../dsl/acir_proofs/acir_composer.hpp | 3 + .../tooling/acvm_cli/src/cli/fs/witness.rs | 6 +- .../tooling/noirc_abi_wasm/src/lib.rs | 5 +- yarn-project/prover-client/src/bb/execute.ts | 17 +++-- .../src/prover/bb_prover.test.ts | 2 +- .../prover-client/src/prover/bb_prover.ts | 69 +++++++++---------- 8 files changed, 98 insertions(+), 52 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index f9f602c2166..dc677000fb1 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -215,18 +215,43 @@ bool proveAndVerifyGoblin(const std::string& bytecodePath, const std::string& wi * @param witnessPath Path to the file containing the serialized witness * @param recursive Whether to use recursive proof generation of non-recursive * @param outputPath Path to write the proof to + * @param pkPath Optional path containing the proving key data */ -void prove(const std::string& bytecodePath, const std::string& witnessPath, const std::string& outputPath) +void prove(const std::string& bytecodePath, + const std::string& witnessPath, + const std::string& outputPath, + const std::string& pkPath) { auto constraint_system = get_constraint_system(bytecodePath); - std::cout << "Read bytecode" << std::endl; auto witness = get_witness(witnessPath); - std::cout << "Read witness" << std::endl; acir_proofs::AcirComposer acir_composer{ 0, verbose }; acir_composer.create_circuit(constraint_system, witness); - init_bn254_crs(acir_composer.get_dyadic_circuit_size()); - acir_composer.init_proving_key(); + size_t circuit_size = acir_composer.get_dyadic_circuit_size(); + init_bn254_crs(circuit_size); + if (pkPath == "") { + Timer pk_timer; + acir_composer.init_proving_key(); + std::cout << "Generated proving key in " << pk_timer.milliseconds() << "ms" << std::endl; + } else { + std::cout << "Loading CRS for circuit size " << circuit_size + 1 << " from " << CRS_PATH << std::endl; + Timer crs_timer; + auto crs_factory = + std::make_shared>(CRS_PATH, circuit_size + 1); + auto prover_crs = crs_factory->get_prover_crs(circuit_size + 1); + std::cout << "CRS loaded in " << crs_timer.milliseconds() << "ms" << std::endl; + + std::cout << "Loading proving key data from: " << pkPath << std::endl; + bb::plonk::proving_key_data key_data; + Timer pk_timer; + read_from_file(pkPath, key_data); + acir_composer.init_proving_key(std::move(key_data), prover_crs); + std::cout << "Proving key loaded in " << pk_timer.milliseconds() << "ms" << std::endl; + } + + std::cout << "Generating proof..." << std::endl; + Timer proof_timer; auto proof = acir_composer.create_proof(); + std::cout << "Generated proof in " << proof_timer.milliseconds() << "ms" << std::endl; if (outputPath == "-") { writeRawBytesToStdout(proof); @@ -583,7 +608,10 @@ int main(int argc, char* argv[]) if (command == "prove") { std::string output_path = get_option(args, "-o", "./proofs/proof"); - prove(bytecode_path, witness_path, output_path); + prove(bytecode_path, witness_path, output_path, ""); + } else if (command == "prove_with_key") { + std::string output_path = get_option(args, "-o", "./proofs/proof"); + prove(bytecode_path, witness_path, output_path, pk_path); } else if (command == "gates") { gateCount(bytecode_path); } else if (command == "verify") { diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp index 07bcd68137a..f29ba8c77b1 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp @@ -41,6 +41,13 @@ std::shared_ptr AcirComposer::init_proving_key() return proving_key_; } +std::shared_ptr AcirComposer::init_proving_key( + bb::plonk::proving_key_data&& data, std::shared_ptr> const& crs) +{ + proving_key_ = std::make_shared(std::move(data), crs); + return proving_key_; +} + std::vector AcirComposer::create_proof() { if (!proving_key_) { @@ -56,6 +63,7 @@ std::vector AcirComposer::create_proof() proof = prover.construct_proof().proof_data; } else { auto prover = composer.create_ultra_with_keccak_prover(builder_); + std::cout << "Proving..." << std::endl; proof = prover.construct_proof().proof_data; } vinfo("done."); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.hpp index 9ff9b51ace3..920beefb259 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.hpp @@ -21,6 +21,9 @@ class AcirComposer { std::shared_ptr init_proving_key(); + std::shared_ptr init_proving_key( + bb::plonk::proving_key_data&& data, std::shared_ptr> const& crs); + std::vector create_proof(); void load_verification_key(bb::plonk::verification_key_data&& data); diff --git a/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs b/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs index 35f1f5b2b14..d3a4488f3cd 100644 --- a/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs +++ b/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs @@ -5,7 +5,7 @@ use std::{ path::{Path, PathBuf}, }; -use acvm::acir::native_types::WitnessMap; +use acvm::acir::native_types::{WitnessMap, WitnessStack}; use crate::errors::{CliError, FilesystemError}; @@ -61,7 +61,9 @@ pub(crate) fn save_witness_to_dir>( create_named_dir(witness_dir.as_ref(), "witness"); let witness_path = witness_dir.as_ref().join(witness_name).with_extension("gz"); - let buf: Vec = witnesses.try_into().map_err(|_op| FilesystemError::OutputWitnessCreationFailed(witness_name.to_string()))?; + let witness_stack: WitnessStack = witnesses.into(); + + let buf: Vec = witness_stack.try_into().map_err(|_op| FilesystemError::OutputWitnessCreationFailed(witness_name.to_string()))?; write_to_file(buf.as_slice(), &witness_path); Ok(witness_path) diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs b/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs index 72806bdbaf1..d84ff8e6d42 100644 --- a/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs +++ b/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs @@ -5,7 +5,7 @@ // See Cargo.toml for explanation. use getrandom as _; -use acvm::acir::native_types::WitnessMap; +use acvm::acir::native_types::{WitnessMap, WitnessStack}; use iter_extended::try_btree_map; use noirc_abi::{ errors::InputParserError, @@ -118,6 +118,7 @@ pub fn abi_decode(abi: JsAbi, witness_map: JsWitnessMap) -> Result Result, JsAbiError> { console_error_panic_hook::set_once(); let converted_witness: WitnessMap = witness_map.into(); - let output = converted_witness.try_into(); + let witness_stack: WitnessStack = witnesses.into(); + let output = witness_stack.try_into(); output.map_err(|op| JsAbiError::new("Failed to convert to Vec".to_string())) } diff --git a/yarn-project/prover-client/src/bb/execute.ts b/yarn-project/prover-client/src/bb/execute.ts index a92ca560d63..89b3656cd00 100644 --- a/yarn-project/prover-client/src/bb/execute.ts +++ b/yarn-project/prover-client/src/bb/execute.ts @@ -146,7 +146,7 @@ export async function generateVerificationKeyForNoirCircuit( } if (result.status === BB_RESULT.ALREADY_PRESENT) { log(`Verification key for circuit ${circuitName} was already present`); - return; + return outputPath; } const stats = await fs.stat(outputPath); log( @@ -154,7 +154,7 @@ export async function generateVerificationKeyForNoirCircuit( stats.size / (1024 * 1024) } MB`, ); - return result; + return outputPath; } export async function generateProvingKeyForNoirCircuit( @@ -178,7 +178,7 @@ export async function generateProvingKeyForNoirCircuit( } if (result.status === BB_RESULT.ALREADY_PRESENT) { log(`Proving key for circuit ${circuitName} was already present`); - return; + return outputPath; } const size = await directorySize(outputPath, [bytecodeHashFilename]); log( @@ -186,7 +186,7 @@ export async function generateProvingKeyForNoirCircuit( size / (1024 * 1024) } MB`, ); - return result; + return outputPath; } export async function generateProof( @@ -196,6 +196,7 @@ export async function generateProof( compiledCircuit: NoirCompiledCircuit, inputWitnessFile: string, log: LogFn, + provingKeyDirectory?: string, ) { // The bytecode is written to e.g. /workingDirectory/pk/BaseParityArtifact-bytecode const bytecodePath = `${workingDirectory}/proof/${circuitName}-bytecode`; @@ -222,9 +223,13 @@ export async function generateProof( // For verification keys, the argument is the full file path const outputPath = `${circuitOutputDirectory}/proof`; - const args = ['-o', outputPath, '-b', bytecodePath, '-w', inputWitnessFile]; + let args = ['-o', outputPath, '-b', bytecodePath, '-w', inputWitnessFile]; + if (provingKeyDirectory) { + args = args.concat(...['-r', provingKeyDirectory!]); + } + const command = provingKeyDirectory ? 'prove_with_key' : 'prove'; const timer = new Timer(); - const result = await executeBB(pathToBB, `prove`, args, log); + const result = await executeBB(pathToBB, command, args, log); const duration = timer.ms(); await fs.rm(bytecodePath, { force: true }); return { result, duration, outputPath }; diff --git a/yarn-project/prover-client/src/prover/bb_prover.test.ts b/yarn-project/prover-client/src/prover/bb_prover.test.ts index 1acb6287aba..1b4e1a8e3ea 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.test.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.test.ts @@ -111,5 +111,5 @@ describe('prover/bb_prover', () => { const inputs = await buildBaseRollupInput(tx, globalVariables, builderDb); await prover.getBaseRollupProof(inputs); - }, 30_000); + }, 300_000); }); diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index bba8d33cd15..7cfa5e2dd2a 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -33,7 +33,6 @@ import { convertRootParityOutputsFromWitnessMap, convertRootRollupInputsToWitnessMap, convertRootRollupOutputsFromWitnessMap, - serialiseInputWitness, } from '@aztec/noir-protocol-circuits-types'; import { NativeACVMSimulator } from '@aztec/simulator'; @@ -49,31 +48,6 @@ import { CircuitProver } from './interface.js'; const logger = createDebugLogger('aztec:bb-prover'); -async function ensureAllKeys(bbBinaryPath: string, bbWorkingDirectory: string) { - const realCircuits = Object.keys(ProtocolCircuitArtifacts).filter( - (n: string) => !n.includes('Simulated') && !n.includes('PrivateKernel'), - ); - const promises = []; - for (const circuitName of realCircuits) { - const provingKeyPromise = generateProvingKeyForNoirCircuit( - bbBinaryPath, - bbWorkingDirectory, - circuitName, - ProtocolCircuitArtifacts[circuitName as ProtocolArtifacts], - logger, - ); - const verificationKeyPromise = generateVerificationKeyForNoirCircuit( - bbBinaryPath, - bbWorkingDirectory, - circuitName, - ProtocolCircuitArtifacts[circuitName as ProtocolArtifacts], - logger, - ); - promises.push(...[provingKeyPromise, verificationKeyPromise]); - } - await Promise.all(promises); -} - export type BBProverConfig = { bbBinaryPath: string; bbWorkingDirectory: string; @@ -85,6 +59,7 @@ export type BBProverConfig = { * Prover implementation that uses barretenberg native proving */ export class BBNativeRollupProver implements CircuitProver { + private provingKeyDirectories: Map = new Map(); constructor(private config: BBProverConfig) {} static async new(config: BBProverConfig) { @@ -95,9 +70,9 @@ export class BBNativeRollupProver implements CircuitProver { logger.info(`Using native BB at ${config.bbBinaryPath} and working directory ${config.bbWorkingDirectory}`); logger.info(`Using native ACVM at ${config.acvmBinaryPath} and working directory ${config.acvmWorkingDirectory}`); - await ensureAllKeys(config.bbBinaryPath, config.bbWorkingDirectory); - - return new BBNativeRollupProver(config); + const prover = new BBNativeRollupProver(config); + await prover.init(); + return prover; } /** @@ -160,7 +135,6 @@ export class BBNativeRollupProver implements CircuitProver { logger(`Using bb working directory ${bbWorkingDirectory}`); await fs.mkdir(bbWorkingDirectory, { recursive: true }); const outputWitnessFile = `${bbWorkingDirectory}/partial-witness.gz`; - const outputWitnessFile2 = `${bbWorkingDirectory}/partial-witness2.gz`; const simulator = new NativeACVMSimulator( this.config.acvmWorkingDirectory, @@ -170,16 +144,12 @@ export class BBNativeRollupProver implements CircuitProver { const witness = await simulator.simulateCircuit(witnessMap, BaseRollupArtifact); - const binaryWitness = await serialiseInputWitness(witness); - - await fs.writeFile(outputWitnessFile2, binaryWitness); - const provingResult = await generateProof( this.config.bbBinaryPath, bbWorkingDirectory, 'Base Rollup', BaseRollupArtifact, - outputWitnessFile2, + outputWitnessFile, logger, ); @@ -248,4 +218,33 @@ export class BBNativeRollupProver implements CircuitProver { } satisfies CircuitSimulationStats); return Promise.resolve([result, makeEmptyProof()]); } + + private async init() { + const realCircuits = Object.keys(ProtocolCircuitArtifacts).filter( + (n: string) => !n.includes('Simulated') && !n.includes('PrivateKernel'), + ); + const promises = []; + for (const circuitName of realCircuits) { + const provingKeyPromise = generateProvingKeyForNoirCircuit( + this.config.bbBinaryPath, + this.config.bbWorkingDirectory, + circuitName, + ProtocolCircuitArtifacts[circuitName as ProtocolArtifacts], + logger, + ).then(result => { + if (result) { + this.provingKeyDirectories.set(circuitName, result); + } + }); + const verificationKeyPromise = generateVerificationKeyForNoirCircuit( + this.config.bbBinaryPath, + this.config.bbWorkingDirectory, + circuitName, + ProtocolCircuitArtifacts[circuitName as ProtocolArtifacts], + logger, + ); + promises.push(...[provingKeyPromise, verificationKeyPromise]); + } + await Promise.all(promises); + } } From 41cd8723f419d17e55b224ed768349d19b9af80c Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 28 Mar 2024 22:53:00 +0000 Subject: [PATCH 08/41] WIP --- barretenberg/cpp/src/barretenberg/bb/main.cpp | 5 +- .../dsl/acir_proofs/acir_composer.cpp | 1 - .../tooling/noirc_abi_wasm/src/lib.rs | 6 +- yarn-project/prover-client/src/bb/execute.ts | 32 ++- .../orchestrator/block-building-helpers.ts | 6 - .../src/orchestrator/orchestrator.ts | 29 +-- .../src/prover/bb_prover.test.ts | 116 +++++++++- .../prover-client/src/prover/bb_prover.ts | 214 +++++++++--------- .../simulator/src/simulator/acvm_native.ts | 3 - 9 files changed, 257 insertions(+), 155 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index dc677000fb1..0dfd24c9fe6 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -231,7 +231,8 @@ void prove(const std::string& bytecodePath, if (pkPath == "") { Timer pk_timer; acir_composer.init_proving_key(); - std::cout << "Generated proving key in " << pk_timer.milliseconds() << "ms" << std::endl; + std::cout << "Generated proving key for circuit size " << circuit_size << " in " << pk_timer.milliseconds() + << "ms" << std::endl; } else { std::cout << "Loading CRS for circuit size " << circuit_size + 1 << " from " << CRS_PATH << std::endl; Timer crs_timer; @@ -248,7 +249,6 @@ void prove(const std::string& bytecodePath, std::cout << "Proving key loaded in " << pk_timer.milliseconds() << "ms" << std::endl; } - std::cout << "Generating proof..." << std::endl; Timer proof_timer; auto proof = acir_composer.create_proof(); std::cout << "Generated proof in " << proof_timer.milliseconds() << "ms" << std::endl; @@ -303,7 +303,6 @@ bool verify(const std::string& proof_path, const std::string& vk_path) auto vk_data = from_buffer(read_file(vk_path)); acir_composer.load_verification_key(std::move(vk_data)); auto verified = acir_composer.verify_proof(read_file(proof_path)); - vinfo("verified: ", verified); return verified; } diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp index f29ba8c77b1..8e57081549f 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp @@ -63,7 +63,6 @@ std::vector AcirComposer::create_proof() proof = prover.construct_proof().proof_data; } else { auto prover = composer.create_ultra_with_keccak_prover(builder_); - std::cout << "Proving..." << std::endl; proof = prover.construct_proof().proof_data; } vinfo("done."); diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs b/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs index d84ff8e6d42..50ce9ae34ae 100644 --- a/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs +++ b/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs @@ -13,7 +13,7 @@ use noirc_abi::{ Abi, MAIN_RETURN_NAME, }; use serde::Serialize; -use std::{collections::BTreeMap, process::Output}; +use std::{collections::BTreeMap}; use gloo_utils::format::JsValueSerdeExt; use wasm_bindgen::{prelude::wasm_bindgen, JsValue}; @@ -118,7 +118,7 @@ pub fn abi_decode(abi: JsAbi, witness_map: JsWitnessMap) -> Result Result, JsAbiError> { console_error_panic_hook::set_once(); let converted_witness: WitnessMap = witness_map.into(); - let witness_stack: WitnessStack = witnesses.into(); + let witness_stack: WitnessStack = converted_witness.into(); let output = witness_stack.try_into(); - output.map_err(|op| JsAbiError::new("Failed to convert to Vec".to_string())) + output.map_err(|| JsAbiError::new("Failed to convert to Vec".to_string())) } diff --git a/yarn-project/prover-client/src/bb/execute.ts b/yarn-project/prover-client/src/bb/execute.ts index 89b3656cd00..b7c3f3c3322 100644 --- a/yarn-project/prover-client/src/bb/execute.ts +++ b/yarn-project/prover-client/src/bb/execute.ts @@ -30,20 +30,27 @@ export type BBResult = BBSuccess | BBFailure; * @param command - The command to execute * @param args - The arguments to pass * @param logger - A log function + * @param resultParser - An optional handler for detecting success or failure * @returns The completed partial witness outputted from the circuit */ -export function executeBB(pathToBB: string, command: string, args: string[], logger: LogFn) { +export function executeBB( + pathToBB: string, + command: string, + args: string[], + logger: LogFn, + resultParser = (code: number) => code === 0, +) { return new Promise((resolve, reject) => { let errorBuffer = Buffer.alloc(0); const acvm = proc.spawn(pathToBB, [command, ...args]); acvm.stdout.on('data', data => { - logger(data.toString('utf-8')); + logger(data.toString('utf-8').replace(/\n$/, '')); }); acvm.stderr.on('data', data => { errorBuffer = Buffer.concat([errorBuffer, data]); }); - acvm.on('close', code => { - if (code === 0) { + acvm.on('close', (code: number) => { + if (resultParser(code)) { resolve({ status: BB_RESULT.SUCCESS }); } else { reject(errorBuffer.toString('utf-8')); @@ -234,3 +241,20 @@ export async function generateProof( await fs.rm(bytecodePath, { force: true }); return { result, duration, outputPath }; } + +export async function verifyProof(pathToBB: string, proofFullPath: string, verificationKeyPath: string, log: LogFn) { + const binaryPresent = await fs + .access(pathToBB, fs.constants.R_OK) + .then(_ => true) + .catch(_ => false); + if (!binaryPresent) { + const failed: BBFailure = { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; + return { result: failed }; + } + + const args = ['-p', proofFullPath, '-k', verificationKeyPath]; + const timer = new Timer(); + const result = await executeBB(pathToBB, 'verify', args, log, (code: number) => code === 1); + const duration = timer.ms(); + return { result, duration }; +} diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index 29ff4ead188..1748ff3481b 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -50,7 +50,6 @@ import { MerkleTreeOperations } from '@aztec/world-state'; import { VerificationKeys, getVerificationKeys } from '../mocks/verification_keys.js'; import { CircuitProver } from '../prover/index.js'; -import { RollupSimulator } from '../simulator/rollup.js'; // Denotes fields that are not used now, but will be in the future const FUTURE_FR = new Fr(0n); @@ -190,7 +189,6 @@ export function createMergeRollupInputs( export async function executeMergeRollupCircuit( mergeInputs: MergeRollupInputs, - simulator: RollupSimulator, prover: CircuitProver, logger?: DebugLogger, ): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { @@ -203,7 +201,6 @@ export async function executeRootRollupCircuit( right: [BaseOrMergeRollupPublicInputs, Proof], l1ToL2Roots: RootParityInput, newL1ToL2Messages: Tuple, - simulator: RollupSimulator, prover: CircuitProver, db: MerkleTreeOperations, logger?: DebugLogger, @@ -505,7 +502,6 @@ export async function executeBaseRollupCircuit( tx: ProcessedTx, inputs: BaseRollupInputs, treeSnapshots: Map, - simulator: RollupSimulator, prover: CircuitProver, logger?: DebugLogger, ): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { @@ -549,7 +545,6 @@ export function validateSimulatedTree( export async function executeBaseParityCircuit( inputs: BaseParityInputs, - simulator: RollupSimulator, prover: CircuitProver, logger?: DebugLogger, ): Promise { @@ -560,7 +555,6 @@ export async function executeBaseParityCircuit( export async function executeRootParityCircuit( inputs: RootParityInputs, - simulator: RollupSimulator, prover: CircuitProver, logger?: DebugLogger, ): Promise { diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 2e483f4d457..c2af91bf0cb 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -20,14 +20,11 @@ import { createDebugLogger } from '@aztec/foundation/log'; import { Tuple } from '@aztec/foundation/serialize'; import { sleep } from '@aztec/foundation/sleep'; import { elapsed } from '@aztec/foundation/timer'; -import { SimulationProvider } from '@aztec/simulator'; import { MerkleTreeOperations } from '@aztec/world-state'; import { inspect } from 'util'; -import { VerificationKeys, getVerificationKeys } from '../mocks/verification_keys.js'; import { CircuitProver } from '../prover/index.js'; -import { RealRollupCircuitSimulator, RollupSimulator } from '../simulator/rollup.js'; import { buildBaseRollupInput, createMergeRollupInputs, @@ -68,21 +65,16 @@ enum PROMISE_RESULT { export class ProvingOrchestrator { private provingState: ProvingState | undefined = undefined; private jobQueue: MemoryFifo = new MemoryFifo(); - private simulator: RollupSimulator; private jobProcessPromise?: Promise; private stopped = false; constructor( private db: MerkleTreeOperations, - simulationProvider: SimulationProvider, - protected vks: VerificationKeys, private prover: CircuitProver, private maxConcurrentJobs = MAX_CONCURRENT_JOBS, - ) { - this.simulator = new RealRollupCircuitSimulator(simulationProvider); - } + ) {} - public static new(db: MerkleTreeOperations, simulationProvider: SimulationProvider, prover: CircuitProver) { - const orchestrator = new ProvingOrchestrator(db, simulationProvider, getVerificationKeys(), prover); + public static new(db: MerkleTreeOperations, prover: CircuitProver) { + const orchestrator = new ProvingOrchestrator(db, prover); orchestrator.start(); return Promise.resolve(orchestrator); } @@ -169,7 +161,7 @@ export class ProvingOrchestrator { validateTx(tx); - logger.info(`Received transaction :${tx.hash}`); + logger.info(`Received transaction: ${tx.hash}`); // We start the transaction by enqueueing the state updates @@ -272,7 +264,7 @@ export class ProvingOrchestrator { stateIdentifier: string, ) { const [duration, baseRollupOutputs] = await elapsed(() => - executeBaseRollupCircuit(tx, inputs, treeSnapshots, this.simulator, this.prover, logger), + executeBaseRollupCircuit(tx, inputs, treeSnapshots, this.prover, logger), ); logger.debug(`Simulated base rollup circuit`, { eventName: 'circuit-simulation', @@ -303,7 +295,7 @@ export class ProvingOrchestrator { [mergeInputData.inputs[1]!, mergeInputData.proofs[1]!], ); const [duration, circuitOutputs] = await elapsed(() => - executeMergeRollupCircuit(circuitInputs, this.simulator, this.prover, logger), + executeMergeRollupCircuit(circuitInputs, this.prover, logger), ); logger.debug(`Simulated merge rollup circuit`, { eventName: 'circuit-simulation', @@ -331,7 +323,6 @@ export class ProvingOrchestrator { [mergeInputData.inputs[1]!, mergeInputData.proofs[1]!], rootParityInput, this.provingState!.newL1ToL2Messages, - this.simulator, this.prover, this.db, logger, @@ -369,9 +360,7 @@ export class ProvingOrchestrator { // Executes the base parity circuit and stores the intermediate state for the root parity circuit // Enqueues the root parity circuit if all inputs are available private async runBaseParityCircuit(inputs: BaseParityInputs, index: number, stateIdentifier: string) { - const [duration, circuitOutputs] = await elapsed(() => - executeBaseParityCircuit(inputs, this.simulator, this.prover, logger), - ); + const [duration, circuitOutputs] = await elapsed(() => executeBaseParityCircuit(inputs, this.prover, logger)); logger.debug(`Simulated base parity circuit`, { eventName: 'circuit-simulation', circuitName: 'base-parity', @@ -400,9 +389,7 @@ export class ProvingOrchestrator { // Runs the root parity circuit ans stored the outputs // Enqueues the root rollup proof if all inputs are available private async runRootParityCircuit(inputs: RootParityInputs, stateIdentifier: string) { - const [duration, circuitOutputs] = await elapsed(() => - executeRootParityCircuit(inputs, this.simulator, this.prover, logger), - ); + const [duration, circuitOutputs] = await elapsed(() => executeRootParityCircuit(inputs, this.prover, logger)); logger.debug(`Simulated root parity circuit`, { eventName: 'circuit-simulation', circuitName: 'root-parity', diff --git a/yarn-project/prover-client/src/prover/bb_prover.test.ts b/yarn-project/prover-client/src/prover/bb_prover.test.ts index 1b4e1a8e3ea..bbd4c842514 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.test.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.test.ts @@ -1,7 +1,22 @@ -import { AztecAddress, EthAddress, Fr, GlobalVariables, RootRollupPublicInputs } from '@aztec/circuits.js'; +import { PROVING_STATUS, makeEmptyProcessedTx } from '@aztec/circuit-types'; +import { + AztecAddress, + BaseParityInputs, + EthAddress, + Fr, + GlobalVariables, + Header, + NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, + NUM_BASE_PARITY_PER_ROOT_PARITY, + RootParityInput, + RootParityInputs, + RootRollupPublicInputs, +} from '@aztec/circuits.js'; import { makeRootRollupPublicInputs } from '@aztec/circuits.js/testing'; +import { padArrayEnd } from '@aztec/foundation/collection'; import { randomBytes } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; +import { Tuple } from '@aztec/foundation/serialize'; import { fileURLToPath } from '@aztec/foundation/url'; import { openTmpStore } from '@aztec/kv-store/utils'; import { MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; @@ -11,7 +26,12 @@ import { type MemDown, default as memdown } from 'memdown'; import path from 'path'; import { makeBloatedProcessedTx } from '../mocks/fixtures.js'; -import { buildBaseRollupInput } from '../orchestrator/block-building-helpers.js'; +import { + buildBaseRollupInput, + createMergeRollupInputs, + executeRootRollupCircuit, +} from '../orchestrator/block-building-helpers.js'; +import { ProvingOrchestrator } from '../orchestrator/orchestrator.js'; import { BBNativeRollupProver, BBProverConfig } from './bb_prover.js'; export const createMemDown = () => (memdown as any)() as MemDown; @@ -105,11 +125,95 @@ describe('prover/bb_prover', () => { }, 5000); it('proves the base rollup circuit', async () => { - const tx = await makeBloatedProcessedTx(builderDb); + const txs = await Promise.all([ + makeBloatedProcessedTx(builderDb, 1), + makeBloatedProcessedTx(builderDb, 2), + makeBloatedProcessedTx(builderDb, 3), + makeBloatedProcessedTx(builderDb, 4), + ]); logger('Starting Test!!'); - const inputs = await buildBaseRollupInput(tx, globalVariables, builderDb); - await prover.getBaseRollupProof(inputs); - }, 300_000); + logger('Building base rollup inputs'); + const baseRollupInputs = []; + for (const tx of txs) { + baseRollupInputs.push(await buildBaseRollupInput(tx, globalVariables, builderDb)); + } + logger('Proving base rollups'); + const baseRollupOutputs = await Promise.all(baseRollupInputs.map(inputs => prover.getBaseRollupProof(inputs))); + logger('Proving merge rollups'); + const mergeRollupInputs = []; + for (let i = 0; i < 4; i += 2) { + mergeRollupInputs.push( + createMergeRollupInputs( + [baseRollupOutputs[i][0]!, baseRollupOutputs[i][1]!], + [baseRollupOutputs[i + 1][0]!, baseRollupOutputs[i + 1][1]!], + ), + ); + } + const mergeRollupOutputs = await Promise.all(mergeRollupInputs.map(inputs => prover.getMergeRollupProof(inputs))); + + let baseParityInputs: BaseParityInputs[] = []; + let l1ToL2MessagesPadded: Tuple; + try { + l1ToL2MessagesPadded = padArrayEnd([], Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); + } catch (err) { + throw new Error('Too many L1 to L2 messages'); + } + baseParityInputs = Array.from({ length: NUM_BASE_PARITY_PER_ROOT_PARITY }, (_, i) => + BaseParityInputs.fromSlice(l1ToL2MessagesPadded, i), + ); + + logger('Proving base parity circuits'); + const baseParityOutputs = await Promise.all(baseParityInputs.map(inputs => prover.getBaseParityProof(inputs))); + + const rootParityInputs = new RootParityInputs( + baseParityOutputs.map(([publicInputs, proof]) => new RootParityInput(proof, publicInputs)) as Tuple< + RootParityInput, + typeof NUM_BASE_PARITY_PER_ROOT_PARITY + >, + ); + logger('Proving root parity circuit'); + const rootParityCircuitOutput = await prover.getRootParityProof(rootParityInputs); + + const rootParityInput = new RootParityInput(rootParityCircuitOutput[1], rootParityCircuitOutput[0]); + + logger('Proving root rollup circuit')!; + await executeRootRollupCircuit( + [mergeRollupOutputs[0][0]!, mergeRollupOutputs[0][1]!], + [mergeRollupOutputs[1][0]!, mergeRollupOutputs[1][1]!], + rootParityInput, + l1ToL2MessagesPadded, + prover, + builderDb, + logger, + ); + logger('Completed!!'); + }, 600_000); + + it('proves all circuits', async () => { + const txs = await Promise.all([ + makeBloatedProcessedTx(builderDb, 1), + makeBloatedProcessedTx(builderDb, 2), + makeBloatedProcessedTx(builderDb, 3), + makeBloatedProcessedTx(builderDb, 4), + ]); + + const orchestrator = await ProvingOrchestrator.new(builderDb, prover); + + const provingTicket = await orchestrator.startNewBlock( + 4, + globalVariables, + [], + makeEmptyProcessedTx(Header.empty(), new Fr(1234), new Fr(1)), + ); + + for (const tx of txs) { + await orchestrator.addNewTx(tx); + } + + const provingResult = await provingTicket.provingPromise; + + expect(provingResult.status).toBe(PROVING_STATUS.SUCCESS); + }, 600_000); }); diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index 7cfa5e2dd2a..0689a570022 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -1,28 +1,23 @@ /* eslint-disable require-await */ -import { CircuitSimulationStats } from '@aztec/circuit-types/stats'; import { BaseOrMergeRollupPublicInputs, BaseParityInputs, BaseRollupInputs, MergeRollupInputs, ParityPublicInputs, + PreviousRollupData, Proof, + RollupTypes, RootParityInputs, RootRollupInputs, RootRollupPublicInputs, - makeEmptyProof, } from '@aztec/circuits.js'; import { randomBytes } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; -import { elapsed } from '@aztec/foundation/timer'; import { - BaseParityArtifact, BaseRollupArtifact, - MergeRollupArtifact, ProtocolArtifacts, ProtocolCircuitArtifacts, - RootParityArtifact, - RootRollupArtifact, convertBaseParityInputsToWitnessMap, convertBaseParityOutputsFromWitnessMap, convertBaseRollupInputsToWitnessMap, @@ -36,14 +31,10 @@ import { } from '@aztec/noir-protocol-circuits-types'; import { NativeACVMSimulator } from '@aztec/simulator'; +import { WitnessMap } from '@noir-lang/types'; import * as fs from 'fs/promises'; -import { - BB_RESULT, - generateProof, - generateProvingKeyForNoirCircuit, - generateVerificationKeyForNoirCircuit, -} from '../bb/execute.js'; +import { BB_RESULT, generateProof, generateVerificationKeyForNoirCircuit, verifyProof } from '../bb/execute.js'; import { CircuitProver } from './interface.js'; const logger = createDebugLogger('aztec:bb-prover'); @@ -60,6 +51,7 @@ export type BBProverConfig = { */ export class BBNativeRollupProver implements CircuitProver { private provingKeyDirectories: Map = new Map(); + private verificationKeyDirectories: Map = new Map(); constructor(private config: BBProverConfig) {} static async new(config: BBProverConfig) { @@ -83,20 +75,11 @@ export class BBNativeRollupProver implements CircuitProver { public async getBaseParityProof(inputs: BaseParityInputs): Promise<[ParityPublicInputs, Proof]> { const witnessMap = convertBaseParityInputsToWitnessMap(inputs); - const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; - const outputWitnessFile = `${bbWorkingDirectory}/partial-witness`; - - const simulator = new NativeACVMSimulator( - this.config.acvmWorkingDirectory, - this.config.acvmBinaryPath, - outputWitnessFile, - ); - - const witness = await simulator.simulateCircuit(witnessMap, BaseParityArtifact); + const [outputWitness, proof] = await this.createProof(witnessMap, 'BaseParityArtifact'); - const result = convertBaseParityOutputsFromWitnessMap(witness); + const result = convertBaseParityOutputsFromWitnessMap(outputWitness); - return Promise.resolve([result, makeEmptyProof()]); + return Promise.resolve([result, proof]); } /** @@ -105,22 +88,16 @@ export class BBNativeRollupProver implements CircuitProver { * @returns The public inputs of the parity circuit. */ public async getRootParityProof(inputs: RootParityInputs): Promise<[ParityPublicInputs, Proof]> { - const witnessMap = convertRootParityInputsToWitnessMap(inputs); + // verify all base parity inputs + await Promise.all(inputs.children.map(child => this.verifyProof('BaseParityInput', child.proof))); - const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; - const outputWitnessFile = `${bbWorkingDirectory}/partial-witness`; - - const simulator = new NativeACVMSimulator( - this.config.acvmWorkingDirectory, - this.config.acvmBinaryPath, - outputWitnessFile, - ); + const witnessMap = convertRootParityInputsToWitnessMap(inputs); - const witness = await simulator.simulateCircuit(witnessMap, RootParityArtifact); + const [outputWitness, proof] = await this.createProof(witnessMap, 'RootParityArtifact'); - const result = convertRootParityOutputsFromWitnessMap(witness); + const result = convertRootParityOutputsFromWitnessMap(outputWitness); - return Promise.resolve([result, makeEmptyProof()]); + return Promise.resolve([result, proof]); } /** @@ -131,36 +108,11 @@ export class BBNativeRollupProver implements CircuitProver { public async getBaseRollupProof(input: BaseRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { const witnessMap = convertBaseRollupInputsToWitnessMap(input); - const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; - logger(`Using bb working directory ${bbWorkingDirectory}`); - await fs.mkdir(bbWorkingDirectory, { recursive: true }); - const outputWitnessFile = `${bbWorkingDirectory}/partial-witness.gz`; - - const simulator = new NativeACVMSimulator( - this.config.acvmWorkingDirectory, - this.config.acvmBinaryPath, - outputWitnessFile, - ); + const [outputWitness, proof] = await this.createProof(witnessMap, 'BaseRollupArtifact'); - const witness = await simulator.simulateCircuit(witnessMap, BaseRollupArtifact); + const result = convertBaseRollupOutputsFromWitnessMap(outputWitness); - const provingResult = await generateProof( - this.config.bbBinaryPath, - bbWorkingDirectory, - 'Base Rollup', - BaseRollupArtifact, - outputWitnessFile, - logger, - ); - - if (provingResult.result.status === BB_RESULT.FAILURE) { - logger.error(`Failed to generate base rollup proof: ${provingResult.result.reason}`); - throw new Error(provingResult.result.reason); - } - - const result = convertBaseRollupOutputsFromWitnessMap(witness); - - return Promise.resolve([result, makeEmptyProof()]); + return Promise.resolve([result, proof]); } /** * Simulates the merge rollup circuit from its inputs. @@ -168,23 +120,16 @@ export class BBNativeRollupProver implements CircuitProver { * @returns The public inputs as outputs of the simulation. */ public async getMergeRollupProof(input: MergeRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { - const witnessMap = convertMergeRollupInputsToWitnessMap(input); + // verify both inputs + await Promise.all(input.previousRollupData.map(prev => this.verifyPreviousRollupProof(prev))); - const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; - const outputWitnessFile = `${bbWorkingDirectory}/partial-witness`; - - const simulator = new NativeACVMSimulator( - this.config.acvmWorkingDirectory, - this.config.acvmBinaryPath, - outputWitnessFile, - ); + const witnessMap = convertMergeRollupInputsToWitnessMap(input); - // use WASM here as it is faster for small circuits - const witness = await simulator.simulateCircuit(witnessMap, MergeRollupArtifact); + const [outputWitness, proof] = await this.createProof(witnessMap, 'MergeRollupArtifact'); - const result = convertMergeRollupOutputsFromWitnessMap(witness); + const result = convertMergeRollupOutputsFromWitnessMap(outputWitness); - return Promise.resolve([result, makeEmptyProof()]); + return Promise.resolve([result, proof]); } /** @@ -193,30 +138,17 @@ export class BBNativeRollupProver implements CircuitProver { * @returns The public inputs as outputs of the simulation. */ public async getRootRollupProof(input: RootRollupInputs): Promise<[RootRollupPublicInputs, Proof]> { - const witnessMap = convertRootRollupInputsToWitnessMap(input); - - const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; - const outputWitnessFile = `${bbWorkingDirectory}/partial-witness`; + // verify the inputs + await Promise.all(input.previousRollupData.map(prev => this.verifyPreviousRollupProof(prev))); - const simulator = new NativeACVMSimulator( - this.config.acvmWorkingDirectory, - this.config.acvmBinaryPath, - outputWitnessFile, - ); + const witnessMap = convertRootRollupInputsToWitnessMap(input); - // use WASM here as it is faster for small circuits - const [duration, witness] = await elapsed(() => simulator.simulateCircuit(witnessMap, RootRollupArtifact)); + const [outputWitness, proof] = await this.createProof(witnessMap, 'BaseRollupArtifact'); - const result = convertRootRollupOutputsFromWitnessMap(witness); + await this.verifyProof('RootRollupArtifact', proof); - logger(`Simulated root rollup circuit`, { - eventName: 'circuit-simulation', - circuitName: 'root-rollup', - duration, - inputSize: input.toBuffer().length, - outputSize: result.toBuffer().length, - } satisfies CircuitSimulationStats); - return Promise.resolve([result, makeEmptyProof()]); + const result = convertRootRollupOutputsFromWitnessMap(outputWitness); + return Promise.resolve([result, proof]); } private async init() { @@ -225,7 +157,7 @@ export class BBNativeRollupProver implements CircuitProver { ); const promises = []; for (const circuitName of realCircuits) { - const provingKeyPromise = generateProvingKeyForNoirCircuit( + const verificationKeyPromise = generateVerificationKeyForNoirCircuit( this.config.bbBinaryPath, this.config.bbWorkingDirectory, circuitName, @@ -233,18 +165,84 @@ export class BBNativeRollupProver implements CircuitProver { logger, ).then(result => { if (result) { - this.provingKeyDirectories.set(circuitName, result); + this.verificationKeyDirectories.set(circuitName, result); } }); - const verificationKeyPromise = generateVerificationKeyForNoirCircuit( - this.config.bbBinaryPath, - this.config.bbWorkingDirectory, - circuitName, - ProtocolCircuitArtifacts[circuitName as ProtocolArtifacts], - logger, - ); - promises.push(...[provingKeyPromise, verificationKeyPromise]); + promises.push(verificationKeyPromise); } await Promise.all(promises); } + + private async createProof(witnessMap: WitnessMap, circuitType: string): Promise<[WitnessMap, Proof]> { + // Create random directory to be used for temp files + const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; + await fs.mkdir(bbWorkingDirectory, { recursive: true }); + const outputWitnessFile = `${bbWorkingDirectory}/partial-witness.gz`; + + const simulator = new NativeACVMSimulator( + this.config.acvmWorkingDirectory, + this.config.acvmBinaryPath, + outputWitnessFile, + ); + + const artifact = ProtocolCircuitArtifacts[circuitType as ProtocolArtifacts]; + + logger(`Generating witness data for ${circuitType}`); + + const outputWitness = await simulator.simulateCircuit(witnessMap, artifact); + + logger(`Proving ${circuitType}...`); + + const provingResult = await generateProof( + this.config.bbBinaryPath, + bbWorkingDirectory, + circuitType, + BaseRollupArtifact, + outputWitnessFile, + logger, + ); + + if (provingResult.result.status === BB_RESULT.FAILURE) { + logger.error(`Failed to generate proof for ${circuitType}: ${provingResult.result.reason}`); + throw new Error(provingResult.result.reason); + } + + const proofBuffer = await fs.readFile(provingResult.outputPath); + + await fs.rm(bbWorkingDirectory, { recursive: true, force: true }); + + logger(`Generated proof for ${circuitType}, size: ${proofBuffer.length} bytes`); + + return [outputWitness, Proof.fromBuffer(proofBuffer)]; + } + + private async verifyProof(circuitType: string, proof: Proof) { + // Create random directory to be used for temp files + const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; + await fs.mkdir(bbWorkingDirectory, { recursive: true }); + + const proofFileName = `${bbWorkingDirectory}/proof`; + const verificationKeyPath = this.verificationKeyDirectories.get(circuitType); + + await fs.writeFile(proofFileName, proof.toBuffer()); + + const result = await verifyProof(this.config.bbBinaryPath, proofFileName, verificationKeyPath!, logger); + + await fs.rm(bbWorkingDirectory, { recursive: true, force: true }); + + if (result.result.status === BB_RESULT.FAILURE) { + throw new Error(`Failed to verify ${circuitType} proof!`); + } + + logger(`Successfully verified ${circuitType} proof!`); + } + + private async verifyPreviousRollupProof(previousRollupData: PreviousRollupData) { + const proof = previousRollupData.proof; + const circuitType = + previousRollupData.baseOrMergeRollupPublicInputs.rollupType === RollupTypes.Base + ? 'BaseRollupArtifact' + : 'MergeRollupArtifact'; + await this.verifyProof(circuitType, proof); + } } diff --git a/yarn-project/simulator/src/simulator/acvm_native.ts b/yarn-project/simulator/src/simulator/acvm_native.ts index 53906edf487..08f58ed8688 100644 --- a/yarn-project/simulator/src/simulator/acvm_native.ts +++ b/yarn-project/simulator/src/simulator/acvm_native.ts @@ -101,7 +101,6 @@ export async function executeNativeCircuit( try { const output = await processPromise; if (outputFilename) { - logger(`Copying file ${workingDirectory}/output-witness to ${outputFilename}`); await fs.copyFile(`${workingDirectory}/output-witness.gz`, outputFilename); } return parseIntoWitnessMap(output); @@ -122,8 +121,6 @@ export class NativeACVMSimulator implements SimulationProvider { // Provide a unique working directory so we don't get clashes with parallel executions const directory = `${this.workingDirectory}/${randomBytes(8).toString('hex')}`; - logger(`Using working directory ${directory}`); - // Execute the circuit const _witnessMap = await executeNativeCircuit( input, From 5050114b670cb9594c497adb31943f764f1e8d99 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Mon, 8 Apr 2024 19:33:01 +0000 Subject: [PATCH 09/41] WIP --- barretenberg/cpp/src/barretenberg/bb/main.cpp | 29 ++++----- .../dsl/acir_proofs/acir_composer.cpp | 6 ++ .../dsl/acir_proofs/acir_composer.hpp | 2 + .../tooling/noirc_abi_wasm/src/lib.rs | 2 +- yarn-project/prover-client/src/bb/execute.ts | 47 +++++++------- .../src/prover/bb_prover.test.ts | 62 +------------------ .../prover-client/src/prover/bb_prover.ts | 15 ++++- 7 files changed, 63 insertions(+), 100 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index 0dfd24c9fe6..763b0cbd864 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -225,33 +225,24 @@ void prove(const std::string& bytecodePath, auto constraint_system = get_constraint_system(bytecodePath); auto witness = get_witness(witnessPath); acir_proofs::AcirComposer acir_composer{ 0, verbose }; + Timer circuit_timer; acir_composer.create_circuit(constraint_system, witness); size_t circuit_size = acir_composer.get_dyadic_circuit_size(); init_bn254_crs(circuit_size); if (pkPath == "") { - Timer pk_timer; acir_composer.init_proving_key(); - std::cout << "Generated proving key for circuit size " << circuit_size << " in " << pk_timer.milliseconds() - << "ms" << std::endl; } else { - std::cout << "Loading CRS for circuit size " << circuit_size + 1 << " from " << CRS_PATH << std::endl; - Timer crs_timer; - auto crs_factory = - std::make_shared>(CRS_PATH, circuit_size + 1); - auto prover_crs = crs_factory->get_prover_crs(circuit_size + 1); - std::cout << "CRS loaded in " << crs_timer.milliseconds() << "ms" << std::endl; - - std::cout << "Loading proving key data from: " << pkPath << std::endl; - bb::plonk::proving_key_data key_data; Timer pk_timer; - read_from_file(pkPath, key_data); - acir_composer.init_proving_key(std::move(key_data), prover_crs); - std::cout << "Proving key loaded in " << pk_timer.milliseconds() << "ms" << std::endl; + bb::plonk::proving_key_data key_data; + auto pk_data = from_buffer(read_file(pkPath)); + auto crs = std::make_unique>(CRS_PATH); + auto proving_key = + std::make_shared(std::move(pk_data), crs->get_prover_crs(pk_data.circuit_size + 1)); + acir_composer.init_proving_key(proving_key); } Timer proof_timer; auto proof = acir_composer.create_proof(); - std::cout << "Generated proof in " << proof_timer.milliseconds() << "ms" << std::endl; if (outputPath == "-") { writeRawBytesToStdout(proof); @@ -299,6 +290,7 @@ void gateCount(const std::string& bytecodePath) */ bool verify(const std::string& proof_path, const std::string& vk_path) { + std::cout << "Verifying " << proof_path << " with key at " << vk_path << std::endl; auto acir_composer = verifier_init(); auto vk_data = from_buffer(read_file(vk_path)); acir_composer.load_verification_key(std::move(vk_data)); @@ -342,13 +334,14 @@ void write_pk(const std::string& bytecodePath, const std::string& outputPath) acir_composer.create_circuit(constraint_system); init_bn254_crs(acir_composer.get_dyadic_circuit_size()); auto pk = acir_composer.init_proving_key(); + auto serialized_pk = to_buffer(*pk); if (outputPath == "-") { - auto serialized_pk = to_buffer(*pk); writeRawBytesToStdout(serialized_pk); vinfo("pk written to stdout"); } else { - write_to_file(outputPath, *pk); + auto serialized_pk = to_buffer(*pk); + write_file(outputPath, serialized_pk); vinfo("pk written to: ", outputPath); } } diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp index 8e57081549f..e8619a27402 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp @@ -41,6 +41,12 @@ std::shared_ptr AcirComposer::init_proving_key() return proving_key_; } +std::shared_ptr AcirComposer::init_proving_key(std::shared_ptr pk) +{ + proving_key_ = pk; + return proving_key_; +} + std::shared_ptr AcirComposer::init_proving_key( bb::plonk::proving_key_data&& data, std::shared_ptr> const& crs) { diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.hpp index 920beefb259..90d646877e0 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.hpp @@ -21,6 +21,8 @@ class AcirComposer { std::shared_ptr init_proving_key(); + std::shared_ptr init_proving_key(std::shared_ptr pk); + std::shared_ptr init_proving_key( bb::plonk::proving_key_data&& data, std::shared_ptr> const& crs); diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs b/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs index 50ce9ae34ae..9874a4664ec 100644 --- a/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs +++ b/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs @@ -120,5 +120,5 @@ pub fn serialise_witness(witness_map: JsWitnessMap) -> Result, JsAbiErro let converted_witness: WitnessMap = witness_map.into(); let witness_stack: WitnessStack = converted_witness.into(); let output = witness_stack.try_into(); - output.map_err(|| JsAbiError::new("Failed to convert to Vec".to_string())) + output.map_err(|_| JsAbiError::new("Failed to convert to Vec".to_string())) } diff --git a/yarn-project/prover-client/src/bb/execute.ts b/yarn-project/prover-client/src/bb/execute.ts index b7c3f3c3322..7a5b4ca8366 100644 --- a/yarn-project/prover-client/src/bb/execute.ts +++ b/yarn-project/prover-client/src/bb/execute.ts @@ -41,19 +41,20 @@ export function executeBB( resultParser = (code: number) => code === 0, ) { return new Promise((resolve, reject) => { - let errorBuffer = Buffer.alloc(0); const acvm = proc.spawn(pathToBB, [command, ...args]); acvm.stdout.on('data', data => { - logger(data.toString('utf-8').replace(/\n$/, '')); + const message = data.toString('utf-8').replace(/\n$/, ''); + logger(message); }); acvm.stderr.on('data', data => { - errorBuffer = Buffer.concat([errorBuffer, data]); + const message = data.toString('utf-8').replace(/\n$/, ''); + logger(message); }); acvm.on('close', (code: number) => { if (resultParser(code)) { resolve({ status: BB_RESULT.SUCCESS }); } else { - reject(errorBuffer.toString('utf-8')); + reject('BB execution failed'); } }); }).catch((reason: string) => ({ status: BB_RESULT.FAILURE, reason })); @@ -80,6 +81,8 @@ async function generateKeyForNoirCircuit( const bytecodeHashPath = `${circuitOutputDirectory}/${bytecodeHashFilename}`; const bytecodeHash = sha256(bytecode); + const outputPath = `${circuitOutputDirectory}/${key}`; + let mustRegenerate = force || (await fs @@ -94,7 +97,7 @@ async function generateKeyForNoirCircuit( if (!mustRegenerate) { const alreadyPresent: BBSuccess = { status: BB_RESULT.ALREADY_PRESENT }; - return { result: alreadyPresent, outputPath: circuitOutputDirectory }; + return { result: alreadyPresent, outputPath: outputPath }; } const binaryPresent = await fs @@ -103,7 +106,7 @@ async function generateKeyForNoirCircuit( .catch(_ => false); if (!binaryPresent) { const failed: BBFailure = { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; - return { result: failed, outputPath: circuitOutputDirectory }; + return { result: failed, outputPath: outputPath }; } // Clear up the circuit output directory removing anything that is there @@ -112,9 +115,7 @@ async function generateKeyForNoirCircuit( // Write the bytecode and input witness to the working directory await fs.writeFile(bytecodePath, bytecode); - // For verification keys, the argument is the full file path - const outputPath = key === 'pk' ? circuitOutputDirectory : `${circuitOutputDirectory}/vk`; - const args = ['-o', outputPath, '-b', bytecodePath]; + const args = ['-o', outputPath, '-b', bytecodePath, '-v']; const timer = new Timer(); const result = await executeBB(pathToBB, `write_${key}`, args, log); const duration = timer.ms(); @@ -123,14 +124,14 @@ async function generateKeyForNoirCircuit( return { result, duration, outputPath }; } -const directorySize = async (directory: string, filesToOmit: string[]) => { - const files = await fs.readdir(directory); - const stats = files - .filter(f => !filesToOmit.find(file => file === f)) - .map(file => fs.stat(path.join(directory, file))); +// const directorySize = async (directory: string, filesToOmit: string[]) => { +// const files = await fs.readdir(directory); +// const stats = files +// .filter(f => !filesToOmit.find(file => file === f)) +// .map(file => fs.stat(path.join(directory, file))); - return (await Promise.all(stats)).reduce((accumulator, { size }) => accumulator + size, 0); -}; +// return (await Promise.all(stats)).reduce((accumulator, { size }) => accumulator + size, 0); +// }; export async function generateVerificationKeyForNoirCircuit( pathToBB: string, @@ -187,10 +188,10 @@ export async function generateProvingKeyForNoirCircuit( log(`Proving key for circuit ${circuitName} was already present`); return outputPath; } - const size = await directorySize(outputPath, [bytecodeHashFilename]); + const stats = await fs.stat(outputPath); log( `Proving key for circuit ${circuitName} written to ${outputPath} in ${duration} ms, size: ${ - size / (1024 * 1024) + stats.size / (1024 * 1024) } MB`, ); return outputPath; @@ -230,13 +231,16 @@ export async function generateProof( // For verification keys, the argument is the full file path const outputPath = `${circuitOutputDirectory}/proof`; - let args = ['-o', outputPath, '-b', bytecodePath, '-w', inputWitnessFile]; + let args = ['-v', '-o', outputPath, '-b', bytecodePath, '-w', inputWitnessFile]; if (provingKeyDirectory) { args = args.concat(...['-r', provingKeyDirectory!]); } const command = provingKeyDirectory ? 'prove_with_key' : 'prove'; const timer = new Timer(); - const result = await executeBB(pathToBB, command, args, log); + const logFunction = (message: string) => { + log(`${circuitName} BB out - ${message}`); + }; + const result = await executeBB(pathToBB, command, args, logFunction); const duration = timer.ms(); await fs.rm(bytecodePath, { force: true }); return { result, duration, outputPath }; @@ -252,7 +256,8 @@ export async function verifyProof(pathToBB: string, proofFullPath: string, verif return { result: failed }; } - const args = ['-p', proofFullPath, '-k', verificationKeyPath]; + log(`Verifying proof at ${proofFullPath} with key at ${verificationKeyPath}`); + const args = ['-p', proofFullPath, '-k', verificationKeyPath, '-v']; const timer = new Timer(); const result = await executeBB(pathToBB, 'verify', args, log, (code: number) => code === 1); const duration = timer.ms(); diff --git a/yarn-project/prover-client/src/prover/bb_prover.test.ts b/yarn-project/prover-client/src/prover/bb_prover.test.ts index bbd4c842514..4bd76083384 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.test.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.test.ts @@ -115,7 +115,6 @@ describe('prover/bb_prover', () => { bbWorkingDirectory: config.bbWorkingDirectory, }; prover = await BBNativeRollupProver.new(bbConfig); - logger('AFTER PROVER START'); }, 200_000); afterEach(async () => { @@ -124,15 +123,8 @@ describe('prover/bb_prover', () => { } }, 5000); - it('proves the base rollup circuit', async () => { - const txs = await Promise.all([ - makeBloatedProcessedTx(builderDb, 1), - makeBloatedProcessedTx(builderDb, 2), - makeBloatedProcessedTx(builderDb, 3), - makeBloatedProcessedTx(builderDb, 4), - ]); - - logger('Starting Test!!'); + it('proves the base rollup', async () => { + const txs = await Promise.all([makeBloatedProcessedTx(builderDb, 1)]); logger('Building base rollup inputs'); const baseRollupInputs = []; @@ -140,55 +132,7 @@ describe('prover/bb_prover', () => { baseRollupInputs.push(await buildBaseRollupInput(tx, globalVariables, builderDb)); } logger('Proving base rollups'); - const baseRollupOutputs = await Promise.all(baseRollupInputs.map(inputs => prover.getBaseRollupProof(inputs))); - logger('Proving merge rollups'); - const mergeRollupInputs = []; - for (let i = 0; i < 4; i += 2) { - mergeRollupInputs.push( - createMergeRollupInputs( - [baseRollupOutputs[i][0]!, baseRollupOutputs[i][1]!], - [baseRollupOutputs[i + 1][0]!, baseRollupOutputs[i + 1][1]!], - ), - ); - } - const mergeRollupOutputs = await Promise.all(mergeRollupInputs.map(inputs => prover.getMergeRollupProof(inputs))); - - let baseParityInputs: BaseParityInputs[] = []; - let l1ToL2MessagesPadded: Tuple; - try { - l1ToL2MessagesPadded = padArrayEnd([], Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); - } catch (err) { - throw new Error('Too many L1 to L2 messages'); - } - baseParityInputs = Array.from({ length: NUM_BASE_PARITY_PER_ROOT_PARITY }, (_, i) => - BaseParityInputs.fromSlice(l1ToL2MessagesPadded, i), - ); - - logger('Proving base parity circuits'); - const baseParityOutputs = await Promise.all(baseParityInputs.map(inputs => prover.getBaseParityProof(inputs))); - - const rootParityInputs = new RootParityInputs( - baseParityOutputs.map(([publicInputs, proof]) => new RootParityInput(proof, publicInputs)) as Tuple< - RootParityInput, - typeof NUM_BASE_PARITY_PER_ROOT_PARITY - >, - ); - logger('Proving root parity circuit'); - const rootParityCircuitOutput = await prover.getRootParityProof(rootParityInputs); - - const rootParityInput = new RootParityInput(rootParityCircuitOutput[1], rootParityCircuitOutput[0]); - - logger('Proving root rollup circuit')!; - await executeRootRollupCircuit( - [mergeRollupOutputs[0][0]!, mergeRollupOutputs[0][1]!], - [mergeRollupOutputs[1][0]!, mergeRollupOutputs[1][1]!], - rootParityInput, - l1ToL2MessagesPadded, - prover, - builderDb, - logger, - ); - logger('Completed!!'); + await Promise.all(baseRollupInputs.map(inputs => prover.getBaseRollupProof(inputs))); }, 600_000); it('proves all circuits', async () => { diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index 0689a570022..5d9913a385a 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -89,7 +89,7 @@ export class BBNativeRollupProver implements CircuitProver { */ public async getRootParityProof(inputs: RootParityInputs): Promise<[ParityPublicInputs, Proof]> { // verify all base parity inputs - await Promise.all(inputs.children.map(child => this.verifyProof('BaseParityInput', child.proof))); + await Promise.all(inputs.children.map(child => this.verifyProof('BaseParityArtifact', child.proof))); const witnessMap = convertRootParityInputsToWitnessMap(inputs); @@ -168,7 +168,19 @@ export class BBNativeRollupProver implements CircuitProver { this.verificationKeyDirectories.set(circuitName, result); } }); + // const provingKeyPromise = generateProvingKeyForNoirCircuit( + // this.config.bbBinaryPath, + // this.config.bbWorkingDirectory, + // circuitName, + // ProtocolCircuitArtifacts[circuitName as ProtocolArtifacts], + // logger, + // ).then(result => { + // if (result) { + // this.provingKeyDirectories.set(circuitName, result); + // } + // }); promises.push(verificationKeyPromise); + //promises.push(provingKeyPromise); } await Promise.all(promises); } @@ -200,6 +212,7 @@ export class BBNativeRollupProver implements CircuitProver { BaseRollupArtifact, outputWitnessFile, logger, + //this.provingKeyDirectories.get(circuitType)!, ); if (provingResult.result.status === BB_RESULT.FAILURE) { From 64e6a393e21bc1e06f887ea198a58029dc5d0b50 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Wed, 10 Apr 2024 14:38:56 +0000 Subject: [PATCH 10/41] WIP --- barretenberg/cpp/src/barretenberg/bb/main.cpp | 27 +---- .../tooling/acvm_cli/src/cli/fs/witness.rs | 6 +- .../circuit-types/src/tx/processed_tx.ts | 22 +++++ .../src/integration_l1_publisher.test.ts | 2 +- .../noir-protocol-circuits-types/src/index.ts | 10 +- yarn-project/prover-client/src/bb/cli.ts | 6 +- yarn-project/prover-client/src/bb/execute.ts | 34 ++----- .../prover-client/src/mocks/fixtures.ts | 44 +++------ .../src/orchestrator/orchestrator.test.ts | 98 ++++++++++++++----- .../src/orchestrator/orchestrator.ts | 78 +++++++++++---- .../src/orchestrator/proving-state.ts | 20 +++- .../src/prover/bb_prover.test.ts | 6 ++ .../prover-client/src/prover/bb_prover.ts | 39 +++----- .../prover-client/src/prover/index.ts | 1 - .../prover-client/src/tx-prover/tx-prover.ts | 2 - .../src/sequencer/abstract_phase_manager.ts | 42 +++++--- .../src/sequencer/app_logic_phase_manager.ts | 33 +++++-- .../src/sequencer/public_processor.test.ts | 1 + .../src/sequencer/public_processor.ts | 7 +- .../src/sequencer/sequencer.test.ts | 2 +- .../src/sequencer/setup_phase_manager.ts | 21 +++- .../src/sequencer/tail_phase_manager.ts | 22 +++-- .../src/sequencer/teardown_phase_manager.ts | 21 +++- 23 files changed, 334 insertions(+), 210 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index a59d118611e..5360bf7b66e 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -261,33 +261,17 @@ bool proveAndVerifyGoblin(const std::string& bytecodePath, const std::string& wi * @param witnessPath Path to the file containing the serialized witness * @param recursive Whether to use recursive proof generation of non-recursive * @param outputPath Path to write the proof to - * @param pkPath Optional path containing the proving key data */ -void prove(const std::string& bytecodePath, - const std::string& witnessPath, - const std::string& outputPath, - const std::string& pkPath) +void prove(const std::string& bytecodePath, const std::string& witnessPath, const std::string& outputPath) { auto constraint_system = get_constraint_system(bytecodePath); auto witness = get_witness(witnessPath); acir_proofs::AcirComposer acir_composer{ 0, verbose }; - Timer circuit_timer; acir_composer.create_circuit(constraint_system, witness); size_t circuit_size = acir_composer.get_dyadic_circuit_size(); init_bn254_crs(circuit_size); - if (pkPath == "") { - acir_composer.init_proving_key(); - } else { - Timer pk_timer; - bb::plonk::proving_key_data key_data; - auto pk_data = from_buffer(read_file(pkPath)); - auto crs = std::make_unique>(CRS_PATH); - auto proving_key = - std::make_shared(std::move(pk_data), crs->get_prover_crs(pk_data.circuit_size + 1)); - acir_composer.init_proving_key(proving_key); - } + acir_composer.init_proving_key(); - Timer proof_timer; auto proof = acir_composer.create_proof(); if (outputPath == "-") { @@ -336,7 +320,6 @@ void gateCount(const std::string& bytecodePath) */ bool verify(const std::string& proof_path, const std::string& vk_path) { - std::cout << "Verifying " << proof_path << " with key at " << vk_path << std::endl; auto acir_composer = verifier_init(); auto vk_data = from_buffer(read_file(vk_path)); acir_composer.load_verification_key(std::move(vk_data)); @@ -386,7 +369,6 @@ void write_pk(const std::string& bytecodePath, const std::string& outputPath) writeRawBytesToStdout(serialized_pk); vinfo("pk written to stdout"); } else { - auto serialized_pk = to_buffer(*pk); write_file(outputPath, serialized_pk); vinfo("pk written to: ", outputPath); } @@ -649,10 +631,7 @@ int main(int argc, char* argv[]) if (command == "prove") { std::string output_path = get_option(args, "-o", "./proofs/proof"); - prove(bytecode_path, witness_path, output_path, ""); - } else if (command == "prove_with_key") { - std::string output_path = get_option(args, "-o", "./proofs/proof"); - prove(bytecode_path, witness_path, output_path, pk_path); + prove(bytecode_path, witness_path, output_path); } else if (command == "gates") { gateCount(bytecode_path); } else if (command == "verify") { diff --git a/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs b/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs index d3a4488f3cd..cf2fcdb1c05 100644 --- a/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs +++ b/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs @@ -54,16 +54,14 @@ pub(crate) fn create_output_witness_string(witnesses: &WitnessMap) -> Result>( - witnesses: WitnessMap, + witnesses: WitnessStack, witness_name: &str, witness_dir: P, ) -> Result { create_named_dir(witness_dir.as_ref(), "witness"); let witness_path = witness_dir.as_ref().join(witness_name).with_extension("gz"); - let witness_stack: WitnessStack = witnesses.into(); - - let buf: Vec = witness_stack.try_into().map_err(|_op| FilesystemError::OutputWitnessCreationFailed(witness_name.to_string()))?; + let buf: Vec = witnesses.try_into().map_err(|_op| FilesystemError::OutputWitnessCreationFailed(witness_name.to_string()))?; write_to_file(buf.as_slice(), &witness_path); Ok(witness_path) diff --git a/yarn-project/circuit-types/src/tx/processed_tx.ts b/yarn-project/circuit-types/src/tx/processed_tx.ts index a70335cb85f..3782ebfe5d5 100644 --- a/yarn-project/circuit-types/src/tx/processed_tx.ts +++ b/yarn-project/circuit-types/src/tx/processed_tx.ts @@ -12,10 +12,24 @@ import { type Header, KernelCircuitPublicInputs, type Proof, + PublicKernelCircuitPrivateInputs, type PublicKernelCircuitPublicInputs, + PublicKernelTailCircuitPrivateInputs, makeEmptyProof, } from '@aztec/circuits.js'; +export enum PublicKernelType { + SETUP, + APP_LOGIC, + TEARDOWN, + TAIL, +} + +export type PublicKernelRequest = { + type: PublicKernelType; + inputs: PublicKernelCircuitPrivateInputs | PublicKernelTailCircuitPrivateInputs; +}; + /** * Represents a tx that has been processed by the sequencer public processor, * so its kernel circuit public inputs are filled in. @@ -38,6 +52,11 @@ export type ProcessedTx = Pick { seed + 0x500, ); - const processedTx = makeProcessedTx(tx, kernelOutput, makeProof()); + const processedTx = makeProcessedTx(tx, kernelOutput, makeProof(), []); processedTx.data.end.newNoteHashes = makeTuple(MAX_NEW_NOTE_HASHES_PER_TX, fr, seed + 0x100); processedTx.data.end.newNullifiers = makeTuple(MAX_NEW_NULLIFIERS_PER_TX, fr, seed + 0x200); diff --git a/yarn-project/noir-protocol-circuits-types/src/index.ts b/yarn-project/noir-protocol-circuits-types/src/index.ts index 4b20491265b..622de548952 100644 --- a/yarn-project/noir-protocol-circuits-types/src/index.ts +++ b/yarn-project/noir-protocol-circuits-types/src/index.ts @@ -29,7 +29,7 @@ import { createBlackBoxSolver, executeCircuitWithBlackBoxSolver, } from '@noir-lang/acvm_js'; -import { type Abi, abiDecode, abiEncode } from '@noir-lang/noirc_abi'; +import { type Abi, abiDecode, abiEncode, serializeWitness } from '@noir-lang/noirc_abi'; import { type WitnessMap } from '@noir-lang/types'; import BaseParityJson from './target/parity_base.json' assert { type: 'json' }; @@ -135,7 +135,7 @@ export const MergeRollupArtifact = MergeRollupJson as NoirCompiledCircuit; export const RootRollupArtifact = RootRollupJson as NoirCompiledCircuit; -export type ProtocolArtifacts = +export type ProtocolArtifact = | 'PrivateKernelInitArtifact' | 'PrivateKernelInnerArtifact' | 'PrivateKernelTailArtifact' @@ -149,7 +149,7 @@ export type ProtocolArtifacts = | 'MergeRollupArtifact' | 'RootRollupArtifact'; -export const ProtocolCircuitArtifacts: Record = { +export const ProtocolCircuitArtifacts: Record = { PrivateKernelInitArtifact: PrivateKernelInitArtifact, PrivateKernelInnerArtifact: PrivateKernelInnerArtifact, PrivateKernelTailArtifact: PrivateKernelTailArtifact, @@ -173,8 +173,8 @@ const getSolver = (): Promise => { return solver; }; -export async function serialiseInputWitness(witness: WitnessMap) { - return await serializeWitness(witness); +export function serializeInputWitness(witness: WitnessMap) { + return serializeWitness(witness); } /** diff --git a/yarn-project/prover-client/src/bb/cli.ts b/yarn-project/prover-client/src/bb/cli.ts index 84fb6fc2888..e081d4fe362 100644 --- a/yarn-project/prover-client/src/bb/cli.ts +++ b/yarn-project/prover-client/src/bb/cli.ts @@ -1,5 +1,5 @@ import { LogFn } from '@aztec/foundation/log'; -import { ProtocolArtifacts, ProtocolCircuitArtifacts } from '@aztec/noir-protocol-circuits-types'; +import { ProtocolArtifact, ProtocolCircuitArtifacts } from '@aztec/noir-protocol-circuits-types'; import { Command } from 'commander'; @@ -36,7 +36,7 @@ export function getProgram(log: LogFn): Command { .requiredOption('-b, --bb-path ', 'The path to the BB binary', BB_BINARY_PATH) .requiredOption('-c, --circuit ', 'The name of a protocol circuit') .action(async options => { - const compiledCircuit = ProtocolCircuitArtifacts[options.circuit as ProtocolArtifacts]; + const compiledCircuit = ProtocolCircuitArtifacts[options.circuit as ProtocolArtifact]; if (!compiledCircuit) { log(`Failed to find circuit ${options.circuit}`); return; @@ -61,7 +61,7 @@ export function getProgram(log: LogFn): Command { .requiredOption('-b, --bb-path ', 'The path to the BB binary', BB_BINARY_PATH) .requiredOption('-c, --circuit ', 'The name of a protocol circuit') .action(async options => { - const compiledCircuit = ProtocolCircuitArtifacts[options.circuit as ProtocolArtifacts]; + const compiledCircuit = ProtocolCircuitArtifacts[options.circuit as ProtocolArtifact]; if (!compiledCircuit) { log(`Failed to find circuit ${options.circuit}`); return; diff --git a/yarn-project/prover-client/src/bb/execute.ts b/yarn-project/prover-client/src/bb/execute.ts index 7a5b4ca8366..dc71358555c 100644 --- a/yarn-project/prover-client/src/bb/execute.ts +++ b/yarn-project/prover-client/src/bb/execute.ts @@ -5,7 +5,6 @@ import { NoirCompiledCircuit } from '@aztec/types/noir'; import * as proc from 'child_process'; import * as fs from 'fs/promises'; -import path from 'path'; export enum BB_RESULT { SUCCESS, @@ -115,7 +114,7 @@ async function generateKeyForNoirCircuit( // Write the bytecode and input witness to the working directory await fs.writeFile(bytecodePath, bytecode); - const args = ['-o', outputPath, '-b', bytecodePath, '-v']; + const args = ['-o', outputPath, '-b', bytecodePath]; const timer = new Timer(); const result = await executeBB(pathToBB, `write_${key}`, args, log); const duration = timer.ms(); @@ -124,15 +123,6 @@ async function generateKeyForNoirCircuit( return { result, duration, outputPath }; } -// const directorySize = async (directory: string, filesToOmit: string[]) => { -// const files = await fs.readdir(directory); -// const stats = files -// .filter(f => !filesToOmit.find(file => file === f)) -// .map(file => fs.stat(path.join(directory, file))); - -// return (await Promise.all(stats)).reduce((accumulator, { size }) => accumulator + size, 0); -// }; - export async function generateVerificationKeyForNoirCircuit( pathToBB: string, workingDirectory: string, @@ -158,9 +148,7 @@ export async function generateVerificationKeyForNoirCircuit( } const stats = await fs.stat(outputPath); log( - `Verification key for circuit ${circuitName} written to ${outputPath} in ${duration} ms, size: ${ - stats.size / (1024 * 1024) - } MB`, + `Verification key for circuit ${circuitName} generated in ${duration} ms, size: ${stats.size / (1024 * 1024)} MB`, ); return outputPath; } @@ -204,7 +192,6 @@ export async function generateProof( compiledCircuit: NoirCompiledCircuit, inputWitnessFile: string, log: LogFn, - provingKeyDirectory?: string, ) { // The bytecode is written to e.g. /workingDirectory/pk/BaseParityArtifact-bytecode const bytecodePath = `${workingDirectory}/proof/${circuitName}-bytecode`; @@ -220,7 +207,7 @@ export async function generateProof( .catch(_ => false); if (!binaryPresent) { const failed: BBFailure = { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; - return { result: failed, outputPath: circuitOutputDirectory }; + return { result: failed, outputPath: circuitOutputDirectory, duration: 0 }; } // Clear up the circuit output directory removing anything that is there @@ -229,13 +216,9 @@ export async function generateProof( // Write the bytecode and input witness to the working directory await fs.writeFile(bytecodePath, bytecode); - // For verification keys, the argument is the full file path const outputPath = `${circuitOutputDirectory}/proof`; - let args = ['-v', '-o', outputPath, '-b', bytecodePath, '-w', inputWitnessFile]; - if (provingKeyDirectory) { - args = args.concat(...['-r', provingKeyDirectory!]); - } - const command = provingKeyDirectory ? 'prove_with_key' : 'prove'; + const args = ['-o', outputPath, '-b', bytecodePath, '-w', inputWitnessFile]; + const command = 'prove'; const timer = new Timer(); const logFunction = (message: string) => { log(`${circuitName} BB out - ${message}`); @@ -253,13 +236,12 @@ export async function verifyProof(pathToBB: string, proofFullPath: string, verif .catch(_ => false); if (!binaryPresent) { const failed: BBFailure = { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; - return { result: failed }; + return { result: failed, duration: 0 }; } - log(`Verifying proof at ${proofFullPath} with key at ${verificationKeyPath}`); - const args = ['-p', proofFullPath, '-k', verificationKeyPath, '-v']; + const args = ['-p', proofFullPath, '-k', verificationKeyPath]; const timer = new Timer(); - const result = await executeBB(pathToBB, 'verify', args, log, (code: number) => code === 1); + const result = await executeBB(pathToBB, 'verify', args, log); const duration = timer.ms(); return { result, duration }; } diff --git a/yarn-project/prover-client/src/mocks/fixtures.ts b/yarn-project/prover-client/src/mocks/fixtures.ts index 0fde07dfaad..29009fbe688 100644 --- a/yarn-project/prover-client/src/mocks/fixtures.ts +++ b/yarn-project/prover-client/src/mocks/fixtures.ts @@ -1,59 +1,39 @@ import { makeProcessedTx, mockTx } from '@aztec/circuit-types'; import { Fr, + KernelCircuitPublicInputs, MAX_NEW_L2_TO_L1_MSGS_PER_TX, + MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, - MAX_NON_REVERTIBLE_NOTE_HASHES_PER_TX, - MAX_NON_REVERTIBLE_NULLIFIERS_PER_TX, - MAX_NON_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - MAX_REVERTIBLE_NOTE_HASHES_PER_TX, - MAX_REVERTIBLE_NULLIFIERS_PER_TX, - MAX_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, PublicDataUpdateRequest, - PublicKernelCircuitPublicInputs, - SideEffectLinkedToNoteHash, } from '@aztec/circuits.js'; -import { fr, makeNewSideEffect, makeNewSideEffectLinkedToNoteHash, makeProof } from '@aztec/circuits.js/testing'; +import { fr, makeProof } from '@aztec/circuits.js/testing'; import { makeTuple } from '@aztec/foundation/array'; import { MerkleTreeOperations } from '@aztec/world-state'; export const makeBloatedProcessedTx = async (builderDb: MerkleTreeOperations, seed = 0x1) => { seed *= MAX_NEW_NULLIFIERS_PER_TX; // Ensure no clashing given incremental seeds const tx = mockTx(seed); - const kernelOutput = PublicKernelCircuitPublicInputs.empty(); + const kernelOutput = KernelCircuitPublicInputs.empty(); kernelOutput.constants.historicalHeader = await builderDb.buildInitialHeader(); kernelOutput.end.publicDataUpdateRequests = makeTuple( - MAX_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, i => new PublicDataUpdateRequest(fr(i), fr(i + 10)), seed + 0x500, ); - kernelOutput.endNonRevertibleData.publicDataUpdateRequests = makeTuple( - MAX_NON_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + kernelOutput.end.publicDataUpdateRequests = makeTuple( + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, i => new PublicDataUpdateRequest(fr(i), fr(i + 10)), seed + 0x600, ); - const processedTx = makeProcessedTx(tx, kernelOutput, makeProof()); + const processedTx = makeProcessedTx(tx, kernelOutput, makeProof(), []); - processedTx.data.end.newNoteHashes = makeTuple(MAX_REVERTIBLE_NOTE_HASHES_PER_TX, makeNewSideEffect, seed + 0x100); - processedTx.data.endNonRevertibleData.newNoteHashes = makeTuple( - MAX_NON_REVERTIBLE_NOTE_HASHES_PER_TX, - makeNewSideEffect, - seed + 0x100, - ); - processedTx.data.end.newNullifiers = makeTuple( - MAX_REVERTIBLE_NULLIFIERS_PER_TX, - makeNewSideEffectLinkedToNoteHash, - seed + 0x100000, - ); - - processedTx.data.endNonRevertibleData.newNullifiers = makeTuple( - MAX_NON_REVERTIBLE_NULLIFIERS_PER_TX, - makeNewSideEffectLinkedToNoteHash, - seed + 0x100000 + MAX_REVERTIBLE_NULLIFIERS_PER_TX, - ); + processedTx.data.end.newNoteHashes = makeTuple(MAX_NEW_NOTE_HASHES_PER_TX, fr, seed + 0x100); + processedTx.data.end.newNullifiers = makeTuple(MAX_NEW_NULLIFIERS_PER_TX, fr, seed + 0x100000); - processedTx.data.end.newNullifiers[tx.data.end.newNullifiers.length - 1] = SideEffectLinkedToNoteHash.empty(); + processedTx.data.end.newNullifiers[tx.data.forPublic!.end.newNullifiers.length - 1] = Fr.zero(); processedTx.data.end.newL2ToL1Msgs = makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_TX, fr, seed + 0x300); processedTx.data.end.encryptedLogsHash = Fr.fromBuffer(processedTx.encryptedLogs.hash()); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts index b3f0e852f6b..42bae0e480f 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts @@ -1,12 +1,15 @@ import { MerkleTreeId, PROVING_STATUS, - makeEmptyProcessedTx as makeEmptyProcessedTxFromHistoricalTreeRoots, type ProcessedTx, type ProvingFailure, + PublicKernelRequest, + PublicKernelType, + makeEmptyProcessedTx as makeEmptyProcessedTxFromHistoricalTreeRoots, } from '@aztec/circuit-types'; import { AztecAddress, + type BaseOrMergeRollupPublicInputs, EthAddress, Fr, GlobalVariables, @@ -17,26 +20,28 @@ import { PUBLIC_DATA_SUBTREE_HEIGHT, Proof, PublicDataTreeLeaf, - type BaseOrMergeRollupPublicInputs, - type RootRollupPublicInputs + type RootRollupPublicInputs, } from '@aztec/circuits.js'; import { fr, makeBaseOrMergeRollupPublicInputs, makeParityPublicInputs, - makeRootRollupPublicInputs + makePublicKernelCircuitPrivateInputs, + makeRootRollupPublicInputs, } from '@aztec/circuits.js/testing'; import { range } from '@aztec/foundation/array'; import { padArrayEnd, times } from '@aztec/foundation/collection'; import { sleep } from '@aztec/foundation/sleep'; import { openTmpStore } from '@aztec/kv-store/utils'; -import { MerkleTrees, type MerkleTreeOperations } from '@aztec/world-state'; +import { WASMSimulator } from '@aztec/simulator'; +import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; -import { mock, type MockProxy } from 'jest-mock-extended'; -import { default as memdown, type MemDown } from 'memdown'; +import { type MockProxy, mock } from 'jest-mock-extended'; +import { type MemDown, default as memdown } from 'memdown'; import { makeBloatedProcessedTx } from '../mocks/fixtures.js'; import { type CircuitProver } from '../prover/index.js'; +import { TestCircuitProver } from '../prover/test_circuit_prover.js'; import { type RollupSimulator } from '../simulator/rollup.js'; import { ProvingOrchestrator } from './orchestrator.js'; @@ -48,7 +53,7 @@ describe('prover/tx-prover', () => { let expectsDb: MerkleTreeOperations; let simulator: MockProxy; - let prover: MockProxy; + const prover = new TestCircuitProver(new WASMSimulator()); let blockNumber: number; let baseRollupOutputLeft: BaseOrMergeRollupPublicInputs; @@ -58,8 +63,6 @@ describe('prover/tx-prover', () => { let globalVariables: GlobalVariables; - const emptyProof = new Proof(Buffer.alloc(32, 0)); - const chainId = Fr.ZERO; const version = Fr.ZERO; const coinbase = EthAddress.ZERO; @@ -76,8 +79,7 @@ describe('prover/tx-prover', () => { builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); expectsDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); simulator = mock(); - prover = mock(); - builder = new ProvingOrchestrator(builderDb, prover); + builder = new ProvingOrchestrator(builderDb, prover, 1); // Create mock l1 to L2 messages mockL1ToL2Messages = new Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)); @@ -89,11 +91,6 @@ describe('prover/tx-prover', () => { rootRollupOutput.header.globalVariables = globalVariables; // Set up mocks - prover.getBaseParityProof.mockResolvedValue([makeParityPublicInputs(), emptyProof]); - prover.getRootParityProof.mockResolvedValue([makeParityPublicInputs(), emptyProof]); - prover.getBaseRollupProof.mockResolvedValue([makeBaseOrMergeRollupPublicInputs(), emptyProof]); - prover.getMergeRollupProof.mockResolvedValue([makeBaseOrMergeRollupPublicInputs(), emptyProof]); - prover.getRootRollupProof.mockResolvedValue([makeRootRollupPublicInputs(), emptyProof]); simulator.baseParityCircuit .mockResolvedValueOnce(makeParityPublicInputs(1)) .mockResolvedValue(makeParityPublicInputs(2)) @@ -146,39 +143,40 @@ describe('prover/tx-prover', () => { }; describe('error handling', () => { + const mockProver: MockProxy = mock(); beforeEach(async () => { - builder = await ProvingOrchestrator.new(builderDb, prover); + builder = await ProvingOrchestrator.new(builderDb, mockProver); }); it.each([ [ 'Base Rollup Failed', () => { - prover.getBaseRollupProof.mockRejectedValue('Base Rollup Failed'); + mockProver.getBaseRollupProof.mockRejectedValue('Base Rollup Failed'); }, ], [ 'Merge Rollup Failed', () => { - prover.getMergeRollupProof.mockRejectedValue('Merge Rollup Failed'); + mockProver.getMergeRollupProof.mockRejectedValue('Merge Rollup Failed'); }, ], [ 'Root Rollup Failed', () => { - prover.getRootRollupProof.mockRejectedValue('Root Rollup Failed'); + mockProver.getRootRollupProof.mockRejectedValue('Root Rollup Failed'); }, ], [ 'Base Parity Failed', () => { - prover.getBaseParityProof.mockRejectedValue('Base Parity Failed'); + mockProver.getBaseParityProof.mockRejectedValue('Base Parity Failed'); }, ], [ 'Root Parity Failed', () => { - prover.getRootParityProof.mockRejectedValue('Root Parity Failed'); + mockProver.getRootParityProof.mockRejectedValue('Root Parity Failed'); }, ], ] as const)( @@ -279,7 +277,9 @@ describe('prover/tx-prover', () => { }, 30_000); it('builds a block with 1 transaction', async () => { - const txs = await Promise.all([makeEmptyProcessedTx()]); + const txs = await Promise.all([makeBloatedProcessedTx(builderDb, 1)]); + + await updateExpectedTreesFromTxs(txs); // This will need to be a 2 tx block const blockTicket = await builder.startNewBlock(2, globalVariables, [], await makeEmptyProcessedTx()); @@ -298,6 +298,46 @@ describe('prover/tx-prover', () => { expect(finalisedBlock.block.number).toEqual(blockNumber); }, 30_000); + it('builds a block with a transaction with public functions', async () => { + const tx = await makeBloatedProcessedTx(builderDb, 1); + + const setup: PublicKernelRequest = { + type: PublicKernelType.SETUP, + inputs: makePublicKernelCircuitPrivateInputs(2), + }; + + const app: PublicKernelRequest = { + type: PublicKernelType.APP_LOGIC, + inputs: makePublicKernelCircuitPrivateInputs(3), + }; + + const teardown: PublicKernelRequest = { + type: PublicKernelType.TEARDOWN, + inputs: makePublicKernelCircuitPrivateInputs(4), + }; + + const tail: PublicKernelRequest = { + type: PublicKernelType.TAIL, + inputs: makePublicKernelCircuitPrivateInputs(5), + }; + + tx.publicKernelRequests = [setup, app, teardown, tail]; + + // This will need to be a 2 tx block + const blockTicket = await builder.startNewBlock(2, globalVariables, [], await makeEmptyProcessedTx()); + + await builder.addNewTx(tx); + + // we need to complete the block as we have not added a full set of txs + await builder.setBlockCompleted(); + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + const finalisedBlock = await builder.finaliseBlock(); + + expect(finalisedBlock.block.number).toEqual(blockNumber); + }, 30_000); + it('builds multiple blocks in sequence', async () => { const numBlocks = 5; let header = await builderDb.buildInitialHeader(); @@ -461,7 +501,11 @@ describe('prover/tx-prover', () => { }, 10000); it('builds an unbalanced L2 block', async () => { - const txs = await Promise.all([makeBloatedProcessedTx(builderDb, 1), makeBloatedProcessedTx(builderDb, 2), makeBloatedProcessedTx(builderDb, 3)]); + const txs = await Promise.all([ + makeBloatedProcessedTx(builderDb, 1), + makeBloatedProcessedTx(builderDb, 2), + makeBloatedProcessedTx(builderDb, 3), + ]); const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); @@ -518,7 +562,7 @@ describe('prover/tx-prover', () => { ); }, 1000); - it('throws if finalising an incompletre block', async () => { + it('throws if finalising an incomplete block', async () => { await expect(async () => await builder.finaliseBlock()).rejects.toThrow( 'Invalid proving state, a block must be proven before it can be finalised', ); @@ -551,7 +595,7 @@ describe('prover/tx-prover', () => { }, 10000); it.each([[-4], [0], [1], [3], [8.1], [7]] as const)( - 'fails to start a block with %i transaxctions', + 'fails to start a block with %i transactions', async (blockSize: number) => { await expect( async () => await builder.startNewBlock(blockSize, globalVariables, [], await makeEmptyProcessedTx()), diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index a8e5d9cdb00..aafa0b37ba7 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -1,4 +1,12 @@ -import { Body, L2Block, MerkleTreeId, type ProcessedTx, type TxEffect, toTxEffect } from '@aztec/circuit-types'; +import { + Body, + L2Block, + MerkleTreeId, + type ProcessedTx, + PublicKernelType, + type TxEffect, + toTxEffect, +} from '@aztec/circuit-types'; import { type BlockResult, PROVING_STATUS, @@ -18,6 +26,7 @@ import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, NUM_BASE_PARITY_PER_ROOT_PARITY, type Proof, + PublicCallRequest, type RootParityInput, RootParityInputs, } from '@aztec/circuits.js'; @@ -79,6 +88,7 @@ export enum PROVING_JOB_TYPE { ROOT_ROLLUP, BASE_PARITY, ROOT_PARITY, + PUBLIC_KERNEL, } export type ProvingJob = { @@ -100,9 +110,9 @@ export class ProvingOrchestrator { private maxConcurrentJobs = MAX_CONCURRENT_JOBS, ) {} - public static new(db: MerkleTreeOperations, prover: CircuitProver) { + public static async new(db: MerkleTreeOperations, prover: CircuitProver) { const orchestrator = new ProvingOrchestrator(db, prover); - orchestrator.start(); + await orchestrator.start(); return Promise.resolve(orchestrator); } @@ -217,10 +227,39 @@ export class ProvingOrchestrator { logger.info(`Received transaction: ${tx.hash}`); // We start the transaction by enqueueing the state updates - const txIndex = this.provingState.addNewTx(tx); - // we start this transaction off by performing it's tree insertions and - await this.prepareBaseRollupInputs(this.provingState, BigInt(txIndex), tx); + await this.prepareBaseRollupInputs(this.provingState, tx); + this.enqueueJob(this.provingState, PROVING_JOB_TYPE.PUBLIC_KERNEL, () => + this.proveNextPublicFunction(this.provingState, txIndex, 0), + ); + } + + public async proveNextPublicFunction( + provingState: ProvingState | undefined, + txIndex: number, + nextFunctionIndex: number, + ) { + if (!provingState?.verifyState()) { + logger(`Not executing public function, state invalid`); + return; + } + const request = provingState.getNextPublicFunction(txIndex, nextFunctionIndex); + if (!request) { + logger(`No Public Functions`); + const tx = provingState.allTxs[txIndex]; + const inputs = provingState.baseRollupInputs[txIndex]; + const treeSnapshots = provingState.txTreeSnapshots[txIndex]; + logger(`Running tx at index ${txIndex}, hash ${tx.hash.toString()}`); + this.enqueueJob(provingState, PROVING_JOB_TYPE.BASE_ROLLUP, () => + this.runBaseRollup(provingState, BigInt(txIndex), tx, inputs, treeSnapshots), + ); + return; + } + logger(`Executing Public Kernel ${PublicKernelType[request.type]}`); + await sleep(100); + this.enqueueJob(provingState, PROVING_JOB_TYPE.PUBLIC_KERNEL, () => + this.proveNextPublicFunction(provingState, txIndex, nextFunctionIndex + 1), + ); } /** @@ -239,7 +278,13 @@ export class ProvingOrchestrator { ); for (let i = this.provingState.transactionsReceived; i < this.provingState.totalNumTxs; i++) { const paddingTxIndex = this.provingState.addNewTx(this.provingState.emptyTx); - await this.prepareBaseRollupInputs(this.provingState, BigInt(paddingTxIndex), this.provingState!.emptyTx); + await this.prepareBaseRollupInputs(this.provingState, this.provingState!.emptyTx); + const tx = this.provingState.allTxs[paddingTxIndex]; + const inputs = this.provingState.baseRollupInputs[paddingTxIndex]; + const treeSnapshots = this.provingState.txTreeSnapshots[paddingTxIndex]; + this.enqueueJob(this.provingState, PROVING_JOB_TYPE.BASE_ROLLUP, () => + this.runBaseRollup(this.provingState, BigInt(paddingTxIndex), tx, inputs, treeSnapshots), + ); } } @@ -331,7 +376,7 @@ export class ProvingOrchestrator { } // Updates the merkle trees for a transaction. The first enqueued job for a transaction - private async prepareBaseRollupInputs(provingState: ProvingState | undefined, index: bigint, tx: ProcessedTx) { + private async prepareBaseRollupInputs(provingState: ProvingState | undefined, tx: ProcessedTx) { if (!provingState?.verifyState()) { logger('Not preparing base rollup inputs, state invalid'); return; @@ -350,9 +395,9 @@ export class ProvingOrchestrator { logger(`Discarding proving job, state no longer valid`); return; } - this.enqueueJob(provingState, PROVING_JOB_TYPE.BASE_ROLLUP, () => - this.runBaseRollup(provingState, index, tx, inputs, treeSnapshots), - ); + provingState!.baseRollupInputs.push(inputs); + provingState!.txTreeSnapshots.push(treeSnapshots); + logger(`Added root ${treeSnapshots.get(MerkleTreeId.NOTE_HASH_TREE)?.root.toString()}`); } // Stores the intermediate inputs prepared for a merge proof @@ -384,6 +429,7 @@ export class ProvingOrchestrator { logger('Not running base rollup, state invalid'); return; } + logger(`Running base at index ${index}, ${inputs.start.noteHashTree.root.toString()}`); const [duration, baseRollupOutputs] = await elapsed(() => executeBaseRollupCircuit(tx, inputs, treeSnapshots, this.prover, logger), ); @@ -450,6 +496,8 @@ export class ProvingOrchestrator { [mergeInputData.inputs[1]!, mergeInputData.proofs[1]!], rootParityInput, provingState.newL1ToL2Messages, + provingState.messageTreeSnapshot, + provingState.messageTreeRootSiblingPath, this.prover, this.db, logger, @@ -472,9 +520,7 @@ export class ProvingOrchestrator { logger('Not running base parity, state no longer valid'); return; } - const [duration, circuitOutputs] = await elapsed(() => - executeBaseParityCircuit(inputs, this.prover, logger), - ); + const [duration, circuitOutputs] = await elapsed(() => executeBaseParityCircuit(inputs, this.prover, logger)); logger.debug(`Simulated base parity circuit`, { eventName: 'circuit-simulation', circuitName: 'base-parity', @@ -508,9 +554,7 @@ export class ProvingOrchestrator { logger(`Not running root parity circuit as state is no longer valid`); return; } - const [duration, circuitOutputs] = await elapsed(() => - executeRootParityCircuit(inputs, this.prover, logger), - ); + const [duration, circuitOutputs] = await elapsed(() => executeRootParityCircuit(inputs, this.prover, logger)); logger.debug(`Simulated root parity circuit`, { eventName: 'circuit-simulation', circuitName: 'root-parity', diff --git a/yarn-project/prover-client/src/orchestrator/proving-state.ts b/yarn-project/prover-client/src/orchestrator/proving-state.ts index 4201de80a35..6c41cf2a54e 100644 --- a/yarn-project/prover-client/src/orchestrator/proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/proving-state.ts @@ -1,7 +1,8 @@ -import { type L2Block, type ProcessedTx, type ProvingResult } from '@aztec/circuit-types'; +import { type L2Block, MerkleTreeId, type ProcessedTx, type ProvingResult } from '@aztec/circuit-types'; import { type AppendOnlyTreeSnapshot, type BaseOrMergeRollupPublicInputs, + BaseRollupInputs, type Fr, type GlobalVariables, type L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, @@ -38,6 +39,8 @@ export class ProvingState { public finalProof: Proof | undefined; public block: L2Block | undefined; private txs: ProcessedTx[] = []; + public baseRollupInputs: BaseRollupInputs[] = []; + public txTreeSnapshots: Map[] = []; constructor( public readonly totalNumTxs: number, private completionCallback: (result: ProvingResult) => void, @@ -140,6 +143,21 @@ export class ProvingState { return this.rootParityInputs.findIndex(p => !p) === -1; } + public txHasPublicFunctions(index: number) { + return index >= 0 && this.txs.length > index && this.txs[index].publicKernelRequests.length; + } + + public getNextPublicFunction(txIndex: number, nextIndex: number) { + if (txIndex < 0 || txIndex >= this.txs.length) { + return undefined; + } + const tx = this.txs[txIndex]; + if (nextIndex < 0 || nextIndex >= tx.publicKernelRequests.length) { + return undefined; + } + return tx.publicKernelRequests[nextIndex]; + } + public cancel() { this.reject('Proving cancelled'); } diff --git a/yarn-project/prover-client/src/prover/bb_prover.test.ts b/yarn-project/prover-client/src/prover/bb_prover.test.ts index 4bd76083384..2cb5dd8f98e 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.test.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.test.ts @@ -156,8 +156,14 @@ describe('prover/bb_prover', () => { await orchestrator.addNewTx(tx); } + await orchestrator.setBlockCompleted(); + const provingResult = await provingTicket.provingPromise; expect(provingResult.status).toBe(PROVING_STATUS.SUCCESS); + + await orchestrator.finaliseBlock(); + + await orchestrator.stop(); }, 600_000); }); diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index 5d9913a385a..cf241b163e5 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -16,7 +16,7 @@ import { randomBytes } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { BaseRollupArtifact, - ProtocolArtifacts, + ProtocolArtifact, ProtocolCircuitArtifacts, convertBaseParityInputsToWitnessMap, convertBaseParityOutputsFromWitnessMap, @@ -50,7 +50,6 @@ export type BBProverConfig = { * Prover implementation that uses barretenberg native proving */ export class BBNativeRollupProver implements CircuitProver { - private provingKeyDirectories: Map = new Map(); private verificationKeyDirectories: Map = new Map(); constructor(private config: BBProverConfig) {} @@ -143,7 +142,7 @@ export class BBNativeRollupProver implements CircuitProver { const witnessMap = convertRootRollupInputsToWitnessMap(input); - const [outputWitness, proof] = await this.createProof(witnessMap, 'BaseRollupArtifact'); + const [outputWitness, proof] = await this.createProof(witnessMap, 'RootRollupArtifact'); await this.verifyProof('RootRollupArtifact', proof); @@ -161,31 +160,19 @@ export class BBNativeRollupProver implements CircuitProver { this.config.bbBinaryPath, this.config.bbWorkingDirectory, circuitName, - ProtocolCircuitArtifacts[circuitName as ProtocolArtifacts], + ProtocolCircuitArtifacts[circuitName as ProtocolArtifact], logger, ).then(result => { if (result) { this.verificationKeyDirectories.set(circuitName, result); } }); - // const provingKeyPromise = generateProvingKeyForNoirCircuit( - // this.config.bbBinaryPath, - // this.config.bbWorkingDirectory, - // circuitName, - // ProtocolCircuitArtifacts[circuitName as ProtocolArtifacts], - // logger, - // ).then(result => { - // if (result) { - // this.provingKeyDirectories.set(circuitName, result); - // } - // }); promises.push(verificationKeyPromise); - //promises.push(provingKeyPromise); } await Promise.all(promises); } - private async createProof(witnessMap: WitnessMap, circuitType: string): Promise<[WitnessMap, Proof]> { + private async createProof(witnessMap: WitnessMap, circuitType: ProtocolArtifact): Promise<[WitnessMap, Proof]> { // Create random directory to be used for temp files const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(bbWorkingDirectory, { recursive: true }); @@ -197,7 +184,7 @@ export class BBNativeRollupProver implements CircuitProver { outputWitnessFile, ); - const artifact = ProtocolCircuitArtifacts[circuitType as ProtocolArtifacts]; + const artifact = ProtocolCircuitArtifacts[circuitType]; logger(`Generating witness data for ${circuitType}`); @@ -209,10 +196,9 @@ export class BBNativeRollupProver implements CircuitProver { this.config.bbBinaryPath, bbWorkingDirectory, circuitType, - BaseRollupArtifact, + artifact, outputWitnessFile, logger, - //this.provingKeyDirectories.get(circuitType)!, ); if (provingResult.result.status === BB_RESULT.FAILURE) { @@ -224,12 +210,12 @@ export class BBNativeRollupProver implements CircuitProver { await fs.rm(bbWorkingDirectory, { recursive: true, force: true }); - logger(`Generated proof for ${circuitType}, size: ${proofBuffer.length} bytes`); + logger(`Generated proof for ${circuitType} in ${provingResult.duration} ms, size: ${proofBuffer.length} bytes`); - return [outputWitness, Proof.fromBuffer(proofBuffer)]; + return [outputWitness, new Proof(proofBuffer)]; } - private async verifyProof(circuitType: string, proof: Proof) { + private async verifyProof(circuitType: ProtocolArtifact, proof: Proof) { // Create random directory to be used for temp files const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(bbWorkingDirectory, { recursive: true }); @@ -237,17 +223,18 @@ export class BBNativeRollupProver implements CircuitProver { const proofFileName = `${bbWorkingDirectory}/proof`; const verificationKeyPath = this.verificationKeyDirectories.get(circuitType); - await fs.writeFile(proofFileName, proof.toBuffer()); + await fs.writeFile(proofFileName, proof.buffer); const result = await verifyProof(this.config.bbBinaryPath, proofFileName, verificationKeyPath!, logger); await fs.rm(bbWorkingDirectory, { recursive: true, force: true }); if (result.result.status === BB_RESULT.FAILURE) { - throw new Error(`Failed to verify ${circuitType} proof!`); + const errorMessage = `Failed to verify ${circuitType} proof!`; + throw new Error(errorMessage); } - logger(`Successfully verified ${circuitType} proof!`); + logger(`Successfully verified ${circuitType} proof in ${result.duration} ms`); } private async verifyPreviousRollupProof(previousRollupData: PreviousRollupData) { diff --git a/yarn-project/prover-client/src/prover/index.ts b/yarn-project/prover-client/src/prover/index.ts index d2c4991b936..8a595f1c973 100644 --- a/yarn-project/prover-client/src/prover/index.ts +++ b/yarn-project/prover-client/src/prover/index.ts @@ -1,2 +1 @@ export * from './interface.js'; -export * from './empty.js'; diff --git a/yarn-project/prover-client/src/tx-prover/tx-prover.ts b/yarn-project/prover-client/src/tx-prover/tx-prover.ts index be7b1b5a8d8..daa7259f65c 100644 --- a/yarn-project/prover-client/src/tx-prover/tx-prover.ts +++ b/yarn-project/prover-client/src/tx-prover/tx-prover.ts @@ -21,8 +21,6 @@ export class TxProver implements ProverClient { ) { this.orchestrator = new ProvingOrchestrator( worldStateSynchronizer.getLatest(), - simulationProvider, - getVerificationKeys(), new TestCircuitProver(simulationProvider), ); } diff --git a/yarn-project/sequencer-client/src/sequencer/abstract_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/abstract_phase_manager.ts index da083c15b97..3a0a245aa8d 100644 --- a/yarn-project/sequencer-client/src/sequencer/abstract_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/abstract_phase_manager.ts @@ -1,4 +1,10 @@ -import { MerkleTreeId, type SimulationError, type Tx, type UnencryptedFunctionL2Logs } from '@aztec/circuit-types'; +import { + MerkleTreeId, + PublicKernelRequest, + type SimulationError, + type Tx, + type UnencryptedFunctionL2Logs, +} from '@aztec/circuit-types'; import { AztecAddress, CallRequest, @@ -104,6 +110,10 @@ export abstract class AbstractPhaseManager { publicKernelPublicInputs: PublicKernelCircuitPublicInputs, previousPublicKernelProof: Proof, ): Promise<{ + /** + * The collection of public kernel requests + */ + kernelRequests: PublicKernelRequest[]; /** * the output of the public kernel circuit for this phase */ @@ -195,6 +205,7 @@ export abstract class AbstractPhaseManager { previousPublicKernelProof: Proof, ): Promise< [ + PublicKernelCircuitPrivateInputs[], PublicKernelCircuitPublicInputs, Proof, UnencryptedFunctionL2Logs[], @@ -203,12 +214,13 @@ export abstract class AbstractPhaseManager { ] > { let kernelOutput = previousPublicKernelOutput; - let kernelProof = previousPublicKernelProof; + const kernelProof = previousPublicKernelProof; + const publicKernelInputs: PublicKernelCircuitPrivateInputs[] = []; const enqueuedCalls = this.extractEnqueuedPublicCalls(tx); if (!enqueuedCalls || !enqueuedCalls.length) { - return [kernelOutput, kernelProof, [], undefined, undefined]; + return [[], kernelOutput, kernelProof, [], undefined, undefined]; } const newUnencryptedFunctionLogs: UnencryptedFunctionL2Logs[] = []; @@ -254,7 +266,10 @@ export abstract class AbstractPhaseManager { executionStack.push(...result.nestedExecutions); const callData = await this.getPublicCallData(result, isExecutionRequest); - [kernelOutput, kernelProof] = await this.runKernelCircuit(kernelOutput, kernelProof, callData); + const circuitResult = await this.runKernelCircuit(kernelOutput, kernelProof, callData); + kernelOutput = circuitResult[1]; + + publicKernelInputs.push(circuitResult[0]); // sanity check. Note we can't expect them to just be equal, because e.g. // if the simulator reverts in app logic, it "resets" and result.reverted will be false when we run teardown, @@ -273,7 +288,7 @@ export abstract class AbstractPhaseManager { result.revertReason }`, ); - return [kernelOutput, kernelProof, [], result.revertReason, undefined]; + return [[], kernelOutput, kernelProof, [], result.revertReason, undefined]; } if (!enqueuedExecutionResult) { @@ -300,33 +315,32 @@ export abstract class AbstractPhaseManager { // TODO(#3675): This should be done in a public kernel circuit removeRedundantPublicDataWrites(kernelOutput, this.phase); - return [kernelOutput, kernelProof, newUnencryptedFunctionLogs, undefined, returns]; + return [publicKernelInputs, kernelOutput, kernelProof, newUnencryptedFunctionLogs, undefined, returns]; } protected async runKernelCircuit( previousOutput: PublicKernelCircuitPublicInputs, previousProof: Proof, callData: PublicCallData, - ): Promise<[PublicKernelCircuitPublicInputs, Proof]> { - const output = await this.getKernelCircuitOutput(previousOutput, previousProof, callData); - return [output, makeEmptyProof()]; + ): Promise<[PublicKernelCircuitPrivateInputs, PublicKernelCircuitPublicInputs]> { + return await this.getKernelCircuitOutput(previousOutput, previousProof, callData); } - protected getKernelCircuitOutput( + protected async getKernelCircuitOutput( previousOutput: PublicKernelCircuitPublicInputs, previousProof: Proof, callData: PublicCallData, - ): Promise { + ): Promise<[PublicKernelCircuitPrivateInputs, PublicKernelCircuitPublicInputs]> { const previousKernel = this.getPreviousKernelData(previousOutput, previousProof); const inputs = new PublicKernelCircuitPrivateInputs(previousKernel, callData); switch (this.phase) { case PublicKernelPhase.SETUP: - return this.publicKernel.publicKernelCircuitSetup(inputs); + return [inputs, await this.publicKernel.publicKernelCircuitSetup(inputs)]; case PublicKernelPhase.APP_LOGIC: - return this.publicKernel.publicKernelCircuitAppLogic(inputs); + return [inputs, await this.publicKernel.publicKernelCircuitAppLogic(inputs)]; case PublicKernelPhase.TEARDOWN: - return this.publicKernel.publicKernelCircuitTeardown(inputs); + return [inputs, await this.publicKernel.publicKernelCircuitTeardown(inputs)]; default: throw new Error(`No public kernel circuit for inputs`); } diff --git a/yarn-project/sequencer-client/src/sequencer/app_logic_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/app_logic_phase_manager.ts index 007910a0572..1fbb003a5fd 100644 --- a/yarn-project/sequencer-client/src/sequencer/app_logic_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/app_logic_phase_manager.ts @@ -1,4 +1,4 @@ -import { type Tx } from '@aztec/circuit-types'; +import { PublicKernelRequest, PublicKernelType, type Tx } from '@aztec/circuit-types'; import { type GlobalVariables, type Header, @@ -40,14 +40,20 @@ export class AppLogicPhaseManager extends AbstractPhaseManager { // TODO(@spalladino): Should we allow emitting contracts in the fee preparation phase? this.log(`Processing tx ${tx.getTxHash()}`); await this.publicContractsDB.addNewContracts(tx); - const [publicKernelOutput, publicKernelProof, newUnencryptedFunctionLogs, revertReason, returnValues] = - await this.processEnqueuedPublicCalls(tx, previousPublicKernelOutput, previousPublicKernelProof).catch( - // if we throw for any reason other than simulation, we need to rollback and drop the TX - async err => { - await this.publicStateDB.rollbackToCommit(); - throw err; - }, - ); + const [ + kernelInputs, + publicKernelOutput, + publicKernelProof, + newUnencryptedFunctionLogs, + revertReason, + returnValues, + ] = await this.processEnqueuedPublicCalls(tx, previousPublicKernelOutput, previousPublicKernelProof).catch( + // if we throw for any reason other than simulation, we need to rollback and drop the TX + async err => { + await this.publicStateDB.rollbackToCommit(); + throw err; + }, + ); if (revertReason) { await this.publicContractsDB.removeNewContracts(tx); @@ -57,6 +63,13 @@ export class AppLogicPhaseManager extends AbstractPhaseManager { await this.publicStateDB.checkpoint(); } - return { publicKernelOutput, publicKernelProof, revertReason, returnValues }; + const kernelRequests = kernelInputs.map(input => { + const request: PublicKernelRequest = { + type: PublicKernelType.APP_LOGIC, + inputs: input, + }; + return request; + }); + return { kernelRequests, publicKernelOutput, publicKernelProof, revertReason, returnValues }; } } diff --git a/yarn-project/sequencer-client/src/sequencer/public_processor.test.ts b/yarn-project/sequencer-client/src/sequencer/public_processor.test.ts index 0e572d0a1f9..01059216cfb 100644 --- a/yarn-project/sequencer-client/src/sequencer/public_processor.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/public_processor.test.ts @@ -115,6 +115,7 @@ describe('public_processor', () => { unencryptedLogs: tx.unencryptedLogs, isEmpty: false, revertReason: undefined, + publicKernelRequests: [], }; // Jest is complaining that the two objects are not equal, but they are. diff --git a/yarn-project/sequencer-client/src/sequencer/public_processor.ts b/yarn-project/sequencer-client/src/sequencer/public_processor.ts index 0d03d43d6d9..2b6f2c48337 100644 --- a/yarn-project/sequencer-client/src/sequencer/public_processor.ts +++ b/yarn-project/sequencer-client/src/sequencer/public_processor.ts @@ -2,6 +2,7 @@ import { type BlockProver, type FailedTx, type ProcessedTx, + PublicKernelRequest, type SimulationError, Tx, makeEmptyProcessedTx, @@ -105,7 +106,7 @@ export class PublicProcessor { } try { const [processedTx, returnValues] = !tx.hasPublicCalls() - ? [makeProcessedTx(tx, tx.data.toKernelCircuitPublicInputs(), tx.proof)] + ? [makeProcessedTx(tx, tx.data.toKernelCircuitPublicInputs(), tx.proof, [])] : await this.processTxWithPublicCalls(tx); validateProcessedTx(processedTx); // Re-validate the transaction @@ -151,6 +152,7 @@ export class PublicProcessor { private async processTxWithPublicCalls(tx: Tx): Promise<[ProcessedTx, ProcessReturnValues | undefined]> { let returnValues: ProcessReturnValues = undefined; + const publicRequests: PublicKernelRequest[] = []; let phase: AbstractPhaseManager | undefined = PhaseManagerFactory.phaseFromTx( tx, this.db, @@ -172,6 +174,7 @@ export class PublicProcessor { if (phase.phase === PublicKernelPhase.APP_LOGIC) { returnValues = output.returnValues; } + publicRequests.push(...output.kernelRequests); publicKernelPublicInput = output.publicKernelOutput; finalKernelOutput = output.finalKernelOutput; proof = output.publicKernelProof; @@ -193,7 +196,7 @@ export class PublicProcessor { throw new Error('Final public kernel was not executed.'); } - const processedTx = makeProcessedTx(tx, finalKernelOutput, proof, revertReason); + const processedTx = makeProcessedTx(tx, finalKernelOutput, proof, publicRequests, revertReason); this.log(`Processed public part of ${tx.getTxHash()}`, { eventName: 'tx-sequencer-processing', diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 76d6fb6a050..9071f6d7e64 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -73,7 +73,7 @@ describe('sequencer', () => { publicProcessor = mock({ process: async txs => [ - await Promise.all(txs.map(tx => makeProcessedTx(tx, tx.data.toKernelCircuitPublicInputs(), makeProof()))), + await Promise.all(txs.map(tx => makeProcessedTx(tx, tx.data.toKernelCircuitPublicInputs(), makeProof(), []))), [], [], ], diff --git a/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.ts index 43e434d9b02..cd9e412a005 100644 --- a/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.ts @@ -1,4 +1,4 @@ -import { type Tx } from '@aztec/circuit-types'; +import { PublicKernelRequest, PublicKernelType, type Tx } from '@aztec/circuit-types'; import { type GlobalVariables, type Header, @@ -35,7 +35,7 @@ export class SetupPhaseManager extends AbstractPhaseManager { previousPublicKernelProof: Proof, ) { this.log(`Processing tx ${tx.getTxHash()}`); - const [publicKernelOutput, publicKernelProof, newUnencryptedFunctionLogs, revertReason] = + const [kernelInputs, publicKernelOutput, publicKernelProof, newUnencryptedFunctionLogs, revertReason] = await this.processEnqueuedPublicCalls(tx, previousPublicKernelOutput, previousPublicKernelProof).catch( // the abstract phase manager throws if simulation gives error in a non-revertible phase async err => { @@ -45,6 +45,21 @@ export class SetupPhaseManager extends AbstractPhaseManager { ); tx.unencryptedLogs.addFunctionLogs(newUnencryptedFunctionLogs); await this.publicStateDB.checkpoint(); - return { publicKernelOutput, publicKernelProof, revertReason, returnValues: undefined }; + + const kernelRequests = kernelInputs.map(input => { + const request: PublicKernelRequest = { + type: PublicKernelType.SETUP, + inputs: input, + }; + return request; + }); + return { + kernelRequests, + kernelInputs, + publicKernelOutput, + publicKernelProof, + revertReason, + returnValues: undefined, + }; } } diff --git a/yarn-project/sequencer-client/src/sequencer/tail_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/tail_phase_manager.ts index 748dca637cd..06a850f1053 100644 --- a/yarn-project/sequencer-client/src/sequencer/tail_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/tail_phase_manager.ts @@ -1,4 +1,4 @@ -import { type Tx } from '@aztec/circuit-types'; +import { PublicKernelRequest, PublicKernelType, type Tx } from '@aztec/circuit-types'; import { type Fr, type GlobalVariables, @@ -37,7 +37,7 @@ export class TailPhaseManager extends AbstractPhaseManager { async handle(tx: Tx, previousPublicKernelOutput: PublicKernelCircuitPublicInputs, previousPublicKernelProof: Proof) { this.log(`Processing tx ${tx.getTxHash()}`); - const [finalKernelOutput, publicKernelProof] = await this.runTailKernelCircuit( + const [inputs, finalKernelOutput] = await this.runTailKernelCircuit( previousPublicKernelOutput, previousPublicKernelProof, ).catch( @@ -51,10 +51,16 @@ export class TailPhaseManager extends AbstractPhaseManager { // commit the state updates from this transaction await this.publicStateDB.commit(); + const request: PublicKernelRequest = { + type: PublicKernelType.APP_LOGIC, + inputs: inputs, + }; + return { + kernelRequests: [request], publicKernelOutput: previousPublicKernelOutput, finalKernelOutput, - publicKernelProof, + publicKernelProof: makeEmptyProof(), revertReason: undefined, returnValues: undefined, }; @@ -63,8 +69,8 @@ export class TailPhaseManager extends AbstractPhaseManager { private async runTailKernelCircuit( previousOutput: PublicKernelCircuitPublicInputs, previousProof: Proof, - ): Promise<[KernelCircuitPublicInputs, Proof]> { - const output = await this.simulate(previousOutput, previousProof); + ): Promise<[PublicKernelTailCircuitPrivateInputs, KernelCircuitPublicInputs]> { + const [inputs, output] = await this.simulate(previousOutput, previousProof); // Temporary hack. Should sort them in the tail circuit. const noteHashes = mergeAccumulatedData( @@ -74,13 +80,13 @@ export class TailPhaseManager extends AbstractPhaseManager { ); output.end.newNoteHashes = this.sortNoteHashes(noteHashes); - return [output, makeEmptyProof()]; + return [inputs, output]; } private async simulate( previousOutput: PublicKernelCircuitPublicInputs, previousProof: Proof, - ): Promise { + ): Promise<[PublicKernelTailCircuitPrivateInputs, KernelCircuitPublicInputs]> { const previousKernel = this.getPreviousKernelData(previousOutput, previousProof); const { validationRequests, endNonRevertibleData, end } = previousOutput; @@ -99,7 +105,7 @@ export class TailPhaseManager extends AbstractPhaseManager { nullifierReadRequestHints, nullifierNonExistentReadRequestHints, ); - return this.publicKernel.publicKernelCircuitTail(inputs); + return [inputs, await this.publicKernel.publicKernelCircuitTail(inputs)]; } private sortNoteHashes(noteHashes: Tuple): Tuple { diff --git a/yarn-project/sequencer-client/src/sequencer/teardown_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/teardown_phase_manager.ts index 825d2de2410..bc5c80dc8e4 100644 --- a/yarn-project/sequencer-client/src/sequencer/teardown_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/teardown_phase_manager.ts @@ -1,4 +1,4 @@ -import { type Tx } from '@aztec/circuit-types'; +import { PublicKernelRequest, PublicKernelType, type Tx } from '@aztec/circuit-types'; import { type GlobalVariables, type Header, @@ -35,7 +35,7 @@ export class TeardownPhaseManager extends AbstractPhaseManager { previousPublicKernelProof: Proof, ) { this.log(`Processing tx ${tx.getTxHash()}`); - const [publicKernelOutput, publicKernelProof, newUnencryptedFunctionLogs, revertReason] = + const [kernelInputs, publicKernelOutput, publicKernelProof, newUnencryptedFunctionLogs, revertReason] = await this.processEnqueuedPublicCalls(tx, previousPublicKernelOutput, previousPublicKernelProof).catch( // the abstract phase manager throws if simulation gives error in a non-revertible phase async err => { @@ -45,6 +45,21 @@ export class TeardownPhaseManager extends AbstractPhaseManager { ); tx.unencryptedLogs.addFunctionLogs(newUnencryptedFunctionLogs); await this.publicStateDB.checkpoint(); - return { publicKernelOutput, publicKernelProof, revertReason, returnValues: undefined }; + + const kernelRequests = kernelInputs.map(input => { + const request: PublicKernelRequest = { + type: PublicKernelType.APP_LOGIC, + inputs: input, + }; + return request; + }); + return { + kernelRequests, + kernelInputs, + publicKernelOutput, + publicKernelProof, + revertReason, + returnValues: undefined, + }; } } From 660a21b22f9ce4107e8e01be96527bcd7af76b7e Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Wed, 10 Apr 2024 14:43:06 +0000 Subject: [PATCH 11/41] Fixes --- .../circuit-types/src/tx/processed_tx.ts | 4 +-- yarn-project/prover-client/src/bb/cli.ts | 4 +-- yarn-project/prover-client/src/bb/execute.ts | 4 +-- .../prover-client/src/mocks/fixtures.ts | 2 +- .../orchestrator/block-building-helpers.ts | 2 +- .../src/orchestrator/orchestrator.test.ts | 3 +-- .../src/orchestrator/orchestrator.ts | 5 ++-- .../src/orchestrator/proving-state.ts | 4 +-- .../src/prover/bb_prover.test.ts | 26 +++---------------- .../prover-client/src/prover/bb_prover.ts | 25 +++++++++--------- .../prover-client/src/prover/interface.ts | 22 ++++++++-------- .../src/prover/test_circuit_prover.ts | 26 +++++++++---------- .../src/sequencer/abstract_phase_manager.ts | 2 +- .../src/sequencer/app_logic_phase_manager.ts | 2 +- .../src/sequencer/public_processor.ts | 2 +- .../src/sequencer/setup_phase_manager.ts | 2 +- .../src/sequencer/tail_phase_manager.ts | 2 +- .../src/sequencer/teardown_phase_manager.ts | 2 +- 18 files changed, 58 insertions(+), 81 deletions(-) diff --git a/yarn-project/circuit-types/src/tx/processed_tx.ts b/yarn-project/circuit-types/src/tx/processed_tx.ts index 3782ebfe5d5..9b12f386ab0 100644 --- a/yarn-project/circuit-types/src/tx/processed_tx.ts +++ b/yarn-project/circuit-types/src/tx/processed_tx.ts @@ -12,9 +12,9 @@ import { type Header, KernelCircuitPublicInputs, type Proof, - PublicKernelCircuitPrivateInputs, + type PublicKernelCircuitPrivateInputs, type PublicKernelCircuitPublicInputs, - PublicKernelTailCircuitPrivateInputs, + type PublicKernelTailCircuitPrivateInputs, makeEmptyProof, } from '@aztec/circuits.js'; diff --git a/yarn-project/prover-client/src/bb/cli.ts b/yarn-project/prover-client/src/bb/cli.ts index e081d4fe362..564ab1039f4 100644 --- a/yarn-project/prover-client/src/bb/cli.ts +++ b/yarn-project/prover-client/src/bb/cli.ts @@ -1,5 +1,5 @@ -import { LogFn } from '@aztec/foundation/log'; -import { ProtocolArtifact, ProtocolCircuitArtifacts } from '@aztec/noir-protocol-circuits-types'; +import { type LogFn } from '@aztec/foundation/log'; +import { type ProtocolArtifact, ProtocolCircuitArtifacts } from '@aztec/noir-protocol-circuits-types'; import { Command } from 'commander'; diff --git a/yarn-project/prover-client/src/bb/execute.ts b/yarn-project/prover-client/src/bb/execute.ts index dc71358555c..1700044d15c 100644 --- a/yarn-project/prover-client/src/bb/execute.ts +++ b/yarn-project/prover-client/src/bb/execute.ts @@ -1,7 +1,7 @@ import { sha256 } from '@aztec/foundation/crypto'; -import { LogFn } from '@aztec/foundation/log'; +import { type LogFn } from '@aztec/foundation/log'; import { Timer } from '@aztec/foundation/timer'; -import { NoirCompiledCircuit } from '@aztec/types/noir'; +import { type NoirCompiledCircuit } from '@aztec/types/noir'; import * as proc from 'child_process'; import * as fs from 'fs/promises'; diff --git a/yarn-project/prover-client/src/mocks/fixtures.ts b/yarn-project/prover-client/src/mocks/fixtures.ts index 29009fbe688..be699c58c21 100644 --- a/yarn-project/prover-client/src/mocks/fixtures.ts +++ b/yarn-project/prover-client/src/mocks/fixtures.ts @@ -10,7 +10,7 @@ import { } from '@aztec/circuits.js'; import { fr, makeProof } from '@aztec/circuits.js/testing'; import { makeTuple } from '@aztec/foundation/array'; -import { MerkleTreeOperations } from '@aztec/world-state'; +import { type MerkleTreeOperations } from '@aztec/world-state'; export const makeBloatedProcessedTx = async (builderDb: MerkleTreeOperations, seed = 0x1) => { seed *= MAX_NEW_NULLIFIERS_PER_TX; // Ensure no clashing given incremental seeds diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index fb8259b03fa..5a9e8162888 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -46,7 +46,7 @@ import { type Tuple, assertLength, toFriendlyJSON } from '@aztec/foundation/seri import { type MerkleTreeOperations } from '@aztec/world-state'; import { type VerificationKeys, getVerificationKeys } from '../mocks/verification_keys.js'; -import { CircuitProver } from '../prover/interface.js'; +import { type CircuitProver } from '../prover/interface.js'; // Denotes fields that are not used now, but will be in the future const FUTURE_FR = new Fr(0n); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts index 42bae0e480f..290d0c463b5 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts @@ -3,7 +3,7 @@ import { PROVING_STATUS, type ProcessedTx, type ProvingFailure, - PublicKernelRequest, + type PublicKernelRequest, PublicKernelType, makeEmptyProcessedTx as makeEmptyProcessedTxFromHistoricalTreeRoots, } from '@aztec/circuit-types'; @@ -18,7 +18,6 @@ import { NULLIFIER_SUBTREE_HEIGHT, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, PUBLIC_DATA_SUBTREE_HEIGHT, - Proof, PublicDataTreeLeaf, type RootRollupPublicInputs, } from '@aztec/circuits.js'; diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index aafa0b37ba7..62913b329ea 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -26,7 +26,6 @@ import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, NUM_BASE_PARITY_PER_ROOT_PARITY, type Proof, - PublicCallRequest, type RootParityInput, RootParityInputs, } from '@aztec/circuits.js'; @@ -37,11 +36,11 @@ import { createDebugLogger } from '@aztec/foundation/log'; import { type Tuple } from '@aztec/foundation/serialize'; import { sleep } from '@aztec/foundation/sleep'; import { elapsed } from '@aztec/foundation/timer'; -import { MerkleTreeOperations } from '@aztec/world-state'; +import { type MerkleTreeOperations } from '@aztec/world-state'; import { inspect } from 'util'; -import { CircuitProver } from '../prover/index.js'; +import { type CircuitProver } from '../prover/index.js'; import { buildBaseRollupInput, createMergeRollupInputs, diff --git a/yarn-project/prover-client/src/orchestrator/proving-state.ts b/yarn-project/prover-client/src/orchestrator/proving-state.ts index 6c41cf2a54e..157813cccd4 100644 --- a/yarn-project/prover-client/src/orchestrator/proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/proving-state.ts @@ -1,8 +1,8 @@ -import { type L2Block, MerkleTreeId, type ProcessedTx, type ProvingResult } from '@aztec/circuit-types'; +import { type L2Block, type MerkleTreeId, type ProcessedTx, type ProvingResult } from '@aztec/circuit-types'; import { type AppendOnlyTreeSnapshot, type BaseOrMergeRollupPublicInputs, - BaseRollupInputs, + type BaseRollupInputs, type Fr, type GlobalVariables, type L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, diff --git a/yarn-project/prover-client/src/prover/bb_prover.test.ts b/yarn-project/prover-client/src/prover/bb_prover.test.ts index 2cb5dd8f98e..27ed3edaf7a 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.test.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.test.ts @@ -1,38 +1,20 @@ import { PROVING_STATUS, makeEmptyProcessedTx } from '@aztec/circuit-types'; -import { - AztecAddress, - BaseParityInputs, - EthAddress, - Fr, - GlobalVariables, - Header, - NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, - NUM_BASE_PARITY_PER_ROOT_PARITY, - RootParityInput, - RootParityInputs, - RootRollupPublicInputs, -} from '@aztec/circuits.js'; +import { AztecAddress, EthAddress, Fr, GlobalVariables, Header, type RootRollupPublicInputs } from '@aztec/circuits.js'; import { makeRootRollupPublicInputs } from '@aztec/circuits.js/testing'; -import { padArrayEnd } from '@aztec/foundation/collection'; import { randomBytes } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; -import { Tuple } from '@aztec/foundation/serialize'; import { fileURLToPath } from '@aztec/foundation/url'; import { openTmpStore } from '@aztec/kv-store/utils'; -import { MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; +import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; import * as fs from 'fs/promises'; import { type MemDown, default as memdown } from 'memdown'; import path from 'path'; import { makeBloatedProcessedTx } from '../mocks/fixtures.js'; -import { - buildBaseRollupInput, - createMergeRollupInputs, - executeRootRollupCircuit, -} from '../orchestrator/block-building-helpers.js'; +import { buildBaseRollupInput } from '../orchestrator/block-building-helpers.js'; import { ProvingOrchestrator } from '../orchestrator/orchestrator.js'; -import { BBNativeRollupProver, BBProverConfig } from './bb_prover.js'; +import { BBNativeRollupProver, type BBProverConfig } from './bb_prover.js'; export const createMemDown = () => (memdown as any)() as MemDown; diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index cf241b163e5..33967de2534 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -1,22 +1,21 @@ /* eslint-disable require-await */ import { - BaseOrMergeRollupPublicInputs, - BaseParityInputs, - BaseRollupInputs, - MergeRollupInputs, - ParityPublicInputs, - PreviousRollupData, + type BaseOrMergeRollupPublicInputs, + type BaseParityInputs, + type BaseRollupInputs, + type MergeRollupInputs, + type ParityPublicInputs, + type PreviousRollupData, Proof, RollupTypes, - RootParityInputs, - RootRollupInputs, - RootRollupPublicInputs, + type RootParityInputs, + type RootRollupInputs, + type RootRollupPublicInputs, } from '@aztec/circuits.js'; import { randomBytes } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { - BaseRollupArtifact, - ProtocolArtifact, + type ProtocolArtifact, ProtocolCircuitArtifacts, convertBaseParityInputsToWitnessMap, convertBaseParityOutputsFromWitnessMap, @@ -31,11 +30,11 @@ import { } from '@aztec/noir-protocol-circuits-types'; import { NativeACVMSimulator } from '@aztec/simulator'; -import { WitnessMap } from '@noir-lang/types'; +import { type WitnessMap } from '@noir-lang/types'; import * as fs from 'fs/promises'; import { BB_RESULT, generateProof, generateVerificationKeyForNoirCircuit, verifyProof } from '../bb/execute.js'; -import { CircuitProver } from './interface.js'; +import { type CircuitProver } from './interface.js'; const logger = createDebugLogger('aztec:bb-prover'); diff --git a/yarn-project/prover-client/src/prover/interface.ts b/yarn-project/prover-client/src/prover/interface.ts index 0f0d124ccdd..1d84f91bef6 100644 --- a/yarn-project/prover-client/src/prover/interface.ts +++ b/yarn-project/prover-client/src/prover/interface.ts @@ -1,15 +1,15 @@ import { - BaseOrMergeRollupPublicInputs, - BaseParityInputs, - BaseRollupInputs, - MergeRollupInputs, - ParityPublicInputs, - Proof, - PublicCircuitPublicInputs, - PublicKernelCircuitPublicInputs, - RootParityInputs, - RootRollupInputs, - RootRollupPublicInputs, + type BaseOrMergeRollupPublicInputs, + type BaseParityInputs, + type BaseRollupInputs, + type MergeRollupInputs, + type ParityPublicInputs, + type Proof, + type PublicCircuitPublicInputs, + type PublicKernelCircuitPublicInputs, + type RootParityInputs, + type RootRollupInputs, + type RootRollupPublicInputs, } from '@aztec/circuits.js'; /** diff --git a/yarn-project/prover-client/src/prover/test_circuit_prover.ts b/yarn-project/prover-client/src/prover/test_circuit_prover.ts index f2a73fac716..0b8338997d9 100644 --- a/yarn-project/prover-client/src/prover/test_circuit_prover.ts +++ b/yarn-project/prover-client/src/prover/test_circuit_prover.ts @@ -1,14 +1,14 @@ -import { CircuitSimulationStats } from '@aztec/circuit-types/stats'; +import { type CircuitSimulationStats } from '@aztec/circuit-types/stats'; import { - BaseOrMergeRollupPublicInputs, - BaseParityInputs, - BaseRollupInputs, - MergeRollupInputs, - ParityPublicInputs, - Proof, - RootParityInputs, - RootRollupInputs, - RootRollupPublicInputs, + type BaseOrMergeRollupPublicInputs, + type BaseParityInputs, + type BaseRollupInputs, + type MergeRollupInputs, + type ParityPublicInputs, + type Proof, + type RootParityInputs, + type RootRollupInputs, + type RootRollupPublicInputs, makeEmptyProof, } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -21,8 +21,6 @@ import { SimulatedBaseRollupArtifact, convertBaseParityInputsToWitnessMap, convertBaseParityOutputsFromWitnessMap, - convertBaseRollupInputsToWitnessMap, - convertBaseRollupOutputsFromWitnessMap, convertMergeRollupInputsToWitnessMap, convertMergeRollupOutputsFromWitnessMap, convertRootParityInputsToWitnessMap, @@ -32,9 +30,9 @@ import { convertSimulatedBaseRollupInputsToWitnessMap, convertSimulatedBaseRollupOutputsFromWitnessMap, } from '@aztec/noir-protocol-circuits-types'; -import { SimulationProvider, WASMSimulator } from '@aztec/simulator'; +import { type SimulationProvider, WASMSimulator } from '@aztec/simulator'; -import { CircuitProver } from './interface.js'; +import { type CircuitProver } from './interface.js'; /** * A class for use in testing situations (e2e, unit test etc) diff --git a/yarn-project/sequencer-client/src/sequencer/abstract_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/abstract_phase_manager.ts index 3a0a245aa8d..05dd3868797 100644 --- a/yarn-project/sequencer-client/src/sequencer/abstract_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/abstract_phase_manager.ts @@ -1,6 +1,6 @@ import { MerkleTreeId, - PublicKernelRequest, + type PublicKernelRequest, type SimulationError, type Tx, type UnencryptedFunctionL2Logs, diff --git a/yarn-project/sequencer-client/src/sequencer/app_logic_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/app_logic_phase_manager.ts index 1fbb003a5fd..73eeec978bf 100644 --- a/yarn-project/sequencer-client/src/sequencer/app_logic_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/app_logic_phase_manager.ts @@ -1,4 +1,4 @@ -import { PublicKernelRequest, PublicKernelType, type Tx } from '@aztec/circuit-types'; +import { type PublicKernelRequest, PublicKernelType, type Tx } from '@aztec/circuit-types'; import { type GlobalVariables, type Header, diff --git a/yarn-project/sequencer-client/src/sequencer/public_processor.ts b/yarn-project/sequencer-client/src/sequencer/public_processor.ts index 2b6f2c48337..c28df3c7db5 100644 --- a/yarn-project/sequencer-client/src/sequencer/public_processor.ts +++ b/yarn-project/sequencer-client/src/sequencer/public_processor.ts @@ -2,7 +2,7 @@ import { type BlockProver, type FailedTx, type ProcessedTx, - PublicKernelRequest, + type PublicKernelRequest, type SimulationError, Tx, makeEmptyProcessedTx, diff --git a/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.ts index cd9e412a005..1b748f446cb 100644 --- a/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.ts @@ -1,4 +1,4 @@ -import { PublicKernelRequest, PublicKernelType, type Tx } from '@aztec/circuit-types'; +import { type PublicKernelRequest, PublicKernelType, type Tx } from '@aztec/circuit-types'; import { type GlobalVariables, type Header, diff --git a/yarn-project/sequencer-client/src/sequencer/tail_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/tail_phase_manager.ts index 06a850f1053..72a91d81f54 100644 --- a/yarn-project/sequencer-client/src/sequencer/tail_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/tail_phase_manager.ts @@ -1,4 +1,4 @@ -import { PublicKernelRequest, PublicKernelType, type Tx } from '@aztec/circuit-types'; +import { type PublicKernelRequest, PublicKernelType, type Tx } from '@aztec/circuit-types'; import { type Fr, type GlobalVariables, diff --git a/yarn-project/sequencer-client/src/sequencer/teardown_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/teardown_phase_manager.ts index bc5c80dc8e4..ed698140b5b 100644 --- a/yarn-project/sequencer-client/src/sequencer/teardown_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/teardown_phase_manager.ts @@ -1,4 +1,4 @@ -import { PublicKernelRequest, PublicKernelType, type Tx } from '@aztec/circuit-types'; +import { type PublicKernelRequest, PublicKernelType, type Tx } from '@aztec/circuit-types'; import { type GlobalVariables, type Header, From d0af6b7c10adfc10e02657435deb01513dcebcc4 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Wed, 10 Apr 2024 16:04:22 +0000 Subject: [PATCH 12/41] Fixes --- yarn-project/prover-client/package.json | 6 ++++-- yarn-project/prover-client/src/bb/cli.ts | 1 - 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index 436e84a14d8..086c67ce0b9 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -50,7 +50,8 @@ "@aztec/world-state": "workspace:^", "commander": "^9.0.0", "lodash.chunk": "^4.2.0", - "tslib": "^2.4.0" + "tslib": "^2.4.0", + "source-map-support": "^0.5.21" }, "devDependencies": { "@jest/globals": "^29.5.0", @@ -60,7 +61,8 @@ "jest": "^29.5.0", "jest-mock-extended": "^3.0.3", "ts-node": "^10.9.1", - "typescript": "^5.0.4" + "typescript": "^5.0.4", + "@types/source-map-support": "^0.5.10" }, "files": [ "dest", diff --git a/yarn-project/prover-client/src/bb/cli.ts b/yarn-project/prover-client/src/bb/cli.ts index 564ab1039f4..f5c761b5527 100644 --- a/yarn-project/prover-client/src/bb/cli.ts +++ b/yarn-project/prover-client/src/bb/cli.ts @@ -10,7 +10,6 @@ const { BB_WORKING_DIRECTORY, BB_BINARY_PATH } = process.env; /** * Returns commander program that defines the CLI. * @param log - Console logger. - * @param debugLogger - Debug logger. * @returns The CLI. */ export function getProgram(log: LogFn): Command { From 5bb7de269f303e70fae05c520c15d95ce5dfbf85 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Wed, 10 Apr 2024 16:06:28 +0000 Subject: [PATCH 13/41] Cleanup --- .../tooling/acvm_cli/src/cli/fs/witness.rs | 17 +++-------------- noir/noir-repo/tooling/acvm_cli/src/errors.rs | 3 --- .../noir-repo/tooling/noirc_abi_wasm/src/lib.rs | 2 +- 3 files changed, 4 insertions(+), 18 deletions(-) diff --git a/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs b/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs index cf2fcdb1c05..30ef4278f4b 100644 --- a/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs +++ b/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs @@ -30,19 +30,6 @@ fn write_to_file(bytes: &[u8], path: &Path) -> String { } } -/// Saves the provided output witnesses to a toml file created at the given location -pub(crate) fn save_witness_string_to_dir>( - output_witness: &String, - witness_dir: P, - file_name: &String, -) -> Result { - let witness_path = witness_dir.as_ref().join(file_name); - let mut file = File::create(&witness_path) - .map_err(|_| FilesystemError::OutputWitnessCreationFailed(file_name.clone()))?; - write!(file, "{}", output_witness) - .map_err(|_| FilesystemError::OutputWitnessWriteFailed(file_name.clone()))?; - Ok(witness_path) -} /// Creates a toml representation of the provided witness map pub(crate) fn create_output_witness_string(witnesses: &WitnessMap) -> Result { let mut witness_map: BTreeMap = BTreeMap::new(); @@ -61,7 +48,9 @@ pub(crate) fn save_witness_to_dir>( create_named_dir(witness_dir.as_ref(), "witness"); let witness_path = witness_dir.as_ref().join(witness_name).with_extension("gz"); - let buf: Vec = witnesses.try_into().map_err(|_op| FilesystemError::OutputWitnessCreationFailed(witness_name.to_string()))?; + let buf: Vec = witnesses + .try_into() + .map_err(|_op| FilesystemError::OutputWitnessCreationFailed(witness_name.to_string()))?; write_to_file(buf.as_slice(), &witness_path); Ok(witness_path) diff --git a/noir/noir-repo/tooling/acvm_cli/src/errors.rs b/noir/noir-repo/tooling/acvm_cli/src/errors.rs index 923046410ea..8bc79347159 100644 --- a/noir/noir-repo/tooling/acvm_cli/src/errors.rs +++ b/noir/noir-repo/tooling/acvm_cli/src/errors.rs @@ -20,9 +20,6 @@ pub(crate) enum FilesystemError { #[error(" Error: failed to create output witness file {0}.")] OutputWitnessCreationFailed(String), - - #[error(" Error: failed to write output witness file {0}.")] - OutputWitnessWriteFailed(String), } #[derive(Debug, Error)] diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs b/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs index 9874a4664ec..fad5abaebba 100644 --- a/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs +++ b/noir/noir-repo/tooling/noirc_abi_wasm/src/lib.rs @@ -13,7 +13,7 @@ use noirc_abi::{ Abi, MAIN_RETURN_NAME, }; use serde::Serialize; -use std::{collections::BTreeMap}; +use std::collections::BTreeMap; use gloo_utils::format::JsValueSerdeExt; use wasm_bindgen::{prelude::wasm_bindgen, JsValue}; From 18b93b5e786b3d56dffbe553d51a0c66e8181b99 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Wed, 10 Apr 2024 16:14:56 +0000 Subject: [PATCH 14/41] Revert bb changes --- barretenberg/cpp/src/barretenberg/bb/main.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index 5360bf7b66e..bab9d9dfae1 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -266,12 +266,11 @@ void prove(const std::string& bytecodePath, const std::string& witnessPath, cons { auto constraint_system = get_constraint_system(bytecodePath); auto witness = get_witness(witnessPath); + acir_proofs::AcirComposer acir_composer{ 0, verbose }; acir_composer.create_circuit(constraint_system, witness); - size_t circuit_size = acir_composer.get_dyadic_circuit_size(); - init_bn254_crs(circuit_size); + init_bn254_crs(acir_composer.get_dyadic_circuit_size()); acir_composer.init_proving_key(); - auto proof = acir_composer.create_proof(); if (outputPath == "-") { @@ -324,6 +323,7 @@ bool verify(const std::string& proof_path, const std::string& vk_path) auto vk_data = from_buffer(read_file(vk_path)); acir_composer.load_verification_key(std::move(vk_data)); auto verified = acir_composer.verify_proof(read_file(proof_path)); + vinfo("verified: ", verified); return verified; } From 2966163fa4108d835dc6b31225e7c4735bae3e37 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Wed, 10 Apr 2024 16:17:17 +0000 Subject: [PATCH 15/41] Revert further bb changes --- .../dsl/acir_proofs/acir_composer.cpp | 13 ------------- .../dsl/acir_proofs/acir_composer.hpp | 5 ----- .../plonk/proof_system/proving_key/serialize.hpp | 16 ---------------- 3 files changed, 34 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp index e8619a27402..07bcd68137a 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp @@ -41,19 +41,6 @@ std::shared_ptr AcirComposer::init_proving_key() return proving_key_; } -std::shared_ptr AcirComposer::init_proving_key(std::shared_ptr pk) -{ - proving_key_ = pk; - return proving_key_; -} - -std::shared_ptr AcirComposer::init_proving_key( - bb::plonk::proving_key_data&& data, std::shared_ptr> const& crs) -{ - proving_key_ = std::make_shared(std::move(data), crs); - return proving_key_; -} - std::vector AcirComposer::create_proof() { if (!proving_key_) { diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.hpp index 90d646877e0..9ff9b51ace3 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.hpp @@ -21,11 +21,6 @@ class AcirComposer { std::shared_ptr init_proving_key(); - std::shared_ptr init_proving_key(std::shared_ptr pk); - - std::shared_ptr init_proving_key( - bb::plonk::proving_key_data&& data, std::shared_ptr> const& crs); - std::vector create_proof(); void load_verification_key(bb::plonk::verification_key_data&& data); diff --git a/barretenberg/cpp/src/barretenberg/plonk/proof_system/proving_key/serialize.hpp b/barretenberg/cpp/src/barretenberg/plonk/proof_system/proving_key/serialize.hpp index 10f539fd7c2..5900997677d 100644 --- a/barretenberg/cpp/src/barretenberg/plonk/proof_system/proving_key/serialize.hpp +++ b/barretenberg/cpp/src/barretenberg/plonk/proof_system/proving_key/serialize.hpp @@ -106,14 +106,6 @@ template inline void read_from_file(B& is, std::string const& path, read(is, key.memory_write_records); } -inline void read_from_file(std::string const& path, proving_key_data& key) -{ - auto filename = format(path, "/meta"); - std::ifstream key_file; - key_file.open(filename, std::ios::in | std::ios::binary); - read_from_file(key_file, path, key); -} - template inline void write_to_file(B& os, std::string const& path, proving_key& key) { using serialize::write; @@ -146,12 +138,4 @@ template inline void write_to_file(B& os, std::string const& path, write(os, key.memory_write_records); } -inline void write_to_file(std::string const& path, proving_key& key) -{ - auto outputPath = format(path, "/meta"); - std::ofstream key_file; - key_file.open(outputPath, std::ios::out | std::ios::trunc | std::ios::binary); - write_to_file(key_file, path, key); -} - } // namespace bb::plonk From cb54097bd9ae4d1e2d9494b8a222ec1a7883f103 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Wed, 10 Apr 2024 17:26:29 +0000 Subject: [PATCH 16/41] Some more cleanup --- .../circuit-types/src/tx/processed_tx.ts | 6 + yarn-project/prover-client/src/bb/cli.ts | 8 +- yarn-project/prover-client/src/bb/execute.ts | 279 ++++++++++-------- .../src/orchestrator/orchestrator.ts | 61 ++-- .../src/prover/bb_prover.test.ts | 10 +- .../prover-client/src/prover/bb_prover.ts | 38 ++- .../src/prover/test_circuit_prover.ts | 2 +- .../simulator/src/simulator/acvm_native.ts | 9 +- 8 files changed, 225 insertions(+), 188 deletions(-) diff --git a/yarn-project/circuit-types/src/tx/processed_tx.ts b/yarn-project/circuit-types/src/tx/processed_tx.ts index 9b12f386ab0..22d0c589fe8 100644 --- a/yarn-project/circuit-types/src/tx/processed_tx.ts +++ b/yarn-project/circuit-types/src/tx/processed_tx.ts @@ -18,6 +18,9 @@ import { makeEmptyProof, } from '@aztec/circuits.js'; +/** + * Used to communicate to the prover which type of circuit to prove + */ export enum PublicKernelType { SETUP, APP_LOGIC, @@ -25,6 +28,9 @@ export enum PublicKernelType { TAIL, } +/** + * Request to the prover to prove a public kernel circuit + */ export type PublicKernelRequest = { type: PublicKernelType; inputs: PublicKernelCircuitPrivateInputs | PublicKernelTailCircuitPrivateInputs; diff --git a/yarn-project/prover-client/src/bb/cli.ts b/yarn-project/prover-client/src/bb/cli.ts index f5c761b5527..b6db780de96 100644 --- a/yarn-project/prover-client/src/bb/cli.ts +++ b/yarn-project/prover-client/src/bb/cli.ts @@ -3,7 +3,7 @@ import { type ProtocolArtifact, ProtocolCircuitArtifacts } from '@aztec/noir-pro import { Command } from 'commander'; -import { generateProvingKeyForNoirCircuit, generateVerificationKeyForNoirCircuit } from './execute.js'; +import { generateKeyForNoirCircuit } from './execute.js'; const { BB_WORKING_DIRECTORY, BB_BINARY_PATH } = process.env; @@ -40,11 +40,12 @@ export function getProgram(log: LogFn): Command { log(`Failed to find circuit ${options.circuit}`); return; } - await generateProvingKeyForNoirCircuit( + await generateKeyForNoirCircuit( options.bbPath, options.workingDirectory, options.circuit, compiledCircuit, + 'pk', log, ); }); @@ -65,11 +66,12 @@ export function getProgram(log: LogFn): Command { log(`Failed to find circuit ${options.circuit}`); return; } - await generateVerificationKeyForNoirCircuit( + await generateKeyForNoirCircuit( options.bbPath, options.workingDirectory, options.circuit, compiledCircuit, + 'vk', log, ); }); diff --git a/yarn-project/prover-client/src/bb/execute.ts b/yarn-project/prover-client/src/bb/execute.ts index 1700044d15c..28c28d6b274 100644 --- a/yarn-project/prover-client/src/bb/execute.ts +++ b/yarn-project/prover-client/src/bb/execute.ts @@ -14,6 +14,8 @@ export enum BB_RESULT { export type BBSuccess = { status: BB_RESULT.SUCCESS | BB_RESULT.ALREADY_PRESENT; + duration: number; + path?: string; }; export type BBFailure = { @@ -39,7 +41,8 @@ export function executeBB( logger: LogFn, resultParser = (code: number) => code === 0, ) { - return new Promise((resolve, reject) => { + return new Promise((resolve, reject) => { + // spawn the bb process const acvm = proc.spawn(pathToBB, [command, ...args]); acvm.stdout.on('data', data => { const message = data.toString('utf-8').replace(/\n$/, ''); @@ -51,17 +54,30 @@ export function executeBB( }); acvm.on('close', (code: number) => { if (resultParser(code)) { - resolve({ status: BB_RESULT.SUCCESS }); + resolve(BB_RESULT.SUCCESS); } else { - reject('BB execution failed'); + reject(); } }); - }).catch((reason: string) => ({ status: BB_RESULT.FAILURE, reason })); + }).catch(_ => BB_RESULT.FAILURE); } const bytecodeHashFilename = 'bytecode_hash'; -async function generateKeyForNoirCircuit( +/** + * Used for generating either a proving or verification key, will exit early if the key already exists + * It assumes the provided working directory is one where the caller wishes to maintain a permanent set of keys + * It is not considered a temporary directory + * @param pathToBB - The full path to the bb binary + * @param workingDirectory - The directory into which the key should be created + * @param circuitName - An identifier for the circuit + * @param compiledCircuit - The compiled circuit + * @param key - The type of key, either 'pk' or 'vk' + * @param log - A logging function + * @param force - Force the key to be regenerated even if it already exists + * @returns An instance of BBResult + */ +export async function generateKeyForNoirCircuit( pathToBB: string, workingDirectory: string, circuitName: string, @@ -69,12 +85,12 @@ async function generateKeyForNoirCircuit( key: 'vk' | 'pk', log: LogFn, force = false, -) { +): Promise { // The bytecode is written to e.g. /workingDirectory/pk/BaseParityArtifact-bytecode const bytecodePath = `${workingDirectory}/${key}/${circuitName}-bytecode`; const bytecode = Buffer.from(compiledCircuit.bytecode, 'base64'); - // The key generation outputs are written to e.g. /workingDirectory/pk/BaseParityArtifact/ + // The key generation is written to e.g. /workingDirectory/pk/BaseParityArtifact/pk // The bytecode hash file is also written here as /workingDirectory/pk/BaseParityArtifact/bytecode-hash const circuitOutputDirectory = `${workingDirectory}/${key}/${circuitName}`; const bytecodeHashPath = `${circuitOutputDirectory}/${bytecodeHashFilename}`; @@ -82,6 +98,7 @@ async function generateKeyForNoirCircuit( const outputPath = `${circuitOutputDirectory}/${key}`; + // Generate the key if we have been told to, or there is no bytecode hash let mustRegenerate = force || (await fs @@ -90,101 +107,105 @@ async function generateKeyForNoirCircuit( .catch(_ => true)); if (!mustRegenerate) { + // Check to see if the bytecode hash has changed from the stored value const data: Buffer = await fs.readFile(bytecodeHashPath).catch(_ => Buffer.alloc(0)); mustRegenerate = data.length == 0 || !data.equals(bytecodeHash); } if (!mustRegenerate) { - const alreadyPresent: BBSuccess = { status: BB_RESULT.ALREADY_PRESENT }; - return { result: alreadyPresent, outputPath: outputPath }; + // No need to generate, early out + return { status: BB_RESULT.ALREADY_PRESENT, duration: 0 }; } + // Check we have access to bb const binaryPresent = await fs .access(pathToBB, fs.constants.R_OK) .then(_ => true) .catch(_ => false); if (!binaryPresent) { - const failed: BBFailure = { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; - return { result: failed, outputPath: outputPath }; + return { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; } - // Clear up the circuit output directory removing anything that is there - await fs.rm(circuitOutputDirectory, { recursive: true, force: true }); - await fs.mkdir(circuitOutputDirectory, { recursive: true }); - // Write the bytecode and input witness to the working directory - await fs.writeFile(bytecodePath, bytecode); - - const args = ['-o', outputPath, '-b', bytecodePath]; - const timer = new Timer(); - const result = await executeBB(pathToBB, `write_${key}`, args, log); - const duration = timer.ms(); - await fs.rm(bytecodePath, { force: true }); - await fs.writeFile(bytecodeHashPath, bytecodeHash); - return { result, duration, outputPath }; -} + // We are now going to generate the key + try { + // Clear up the circuit output directory removing anything that is there + await fs.rm(circuitOutputDirectory, { recursive: true, force: true }); + await fs.mkdir(circuitOutputDirectory, { recursive: true }); -export async function generateVerificationKeyForNoirCircuit( - pathToBB: string, - workingDirectory: string, - circuitName: string, - compiledCircuit: NoirCompiledCircuit, - log: LogFn, -) { - const { result, duration, outputPath } = await generateKeyForNoirCircuit( - pathToBB, - workingDirectory, - circuitName, - compiledCircuit, - 'vk', - log, - ); - if (result.status === BB_RESULT.FAILURE) { - log(`Failed to generate verification key for circuit ${circuitName}, reason: ${result.reason}`); - return; - } - if (result.status === BB_RESULT.ALREADY_PRESENT) { - log(`Verification key for circuit ${circuitName} was already present`); - return outputPath; - } - const stats = await fs.stat(outputPath); - log( - `Verification key for circuit ${circuitName} generated in ${duration} ms, size: ${stats.size / (1024 * 1024)} MB`, - ); - return outputPath; -} + // Write the bytecode to the working directory + await fs.writeFile(bytecodePath, bytecode); -export async function generateProvingKeyForNoirCircuit( - pathToBB: string, - workingDirectory: string, - circuitName: string, - compiledCircuit: NoirCompiledCircuit, - log: LogFn, -) { - const { result, duration, outputPath } = await generateKeyForNoirCircuit( - pathToBB, - workingDirectory, - circuitName, - compiledCircuit, - 'pk', - log, - ); - if (result.status === BB_RESULT.FAILURE) { - log(`Failed to generate proving key for circuit ${circuitName}, reason: ${result.reason}`); - return; - } - if (result.status === BB_RESULT.ALREADY_PRESENT) { - log(`Proving key for circuit ${circuitName} was already present`); - return outputPath; + // args are the output path and the input bytecode path + const args = ['-o', outputPath, '-b', bytecodePath]; + const timer = new Timer(); + const result = await executeBB(pathToBB, `write_${key}`, args, log); + const duration = timer.ms(); + // Cleanup the bytecode file + await fs.rm(bytecodePath, { force: true }); + if (result == BB_RESULT.SUCCESS) { + // Store the bytecode hash so we don't need to regenerate at a later time + await fs.writeFile(bytecodeHashPath, bytecodeHash); + return { status: BB_RESULT.SUCCESS, duration, path: outputPath }; + } + // Not a great error message here but it is difficult to decipher what comes from bb + return { status: BB_RESULT.FAILURE, reason: `Failed to generate key` }; + } catch (error) { + return { status: BB_RESULT.FAILURE, reason: `${error}` }; } - const stats = await fs.stat(outputPath); - log( - `Proving key for circuit ${circuitName} written to ${outputPath} in ${duration} ms, size: ${ - stats.size / (1024 * 1024) - } MB`, - ); - return outputPath; } +/** + * Used for generating either a proving or verification key, will exit early if the key already exists + * It assumes the provided working directory is one where the caller wishes to maintain a permanent set of keys + * It is not considered a temporary directory + * @param pathToBB - The full path to the bb binary + * @param workingDirectory - The directory into which the key should be created + * @param circuitName - An identifier for the circuit + * @param compiledCircuit - The compiled circuit + * @param key - The type of key, either 'pk' or 'vk' + * @param log - A logging function + * @returns The path to the key, or undefined. + */ +// export async function generateKey( +// pathToBB: string, +// workingDirectory: string, +// circuitName: string, +// compiledCircuit: NoirCompiledCircuit, +// key: 'pk' | 'vk', +// log: LogFn, +// ) { +// const { result, duration, outputPath } = await generateKeyForNoirCircuit( +// pathToBB, +// workingDirectory, +// circuitName, +// compiledCircuit, +// key, +// log, +// ); +// if (result.status === BB_RESULT.FAILURE) { +// log(`Failed to generate ${key} key for circuit ${circuitName}, reason: ${result.reason}`); +// return; +// } +// if (result.status === BB_RESULT.ALREADY_PRESENT) { +// log(`Key ${key} for circuit ${circuitName} was already present`); +// return outputPath; +// } +// const stats = await fs.stat(outputPath); +// log(`Key ${key} for circuit ${circuitName} generated in ${duration} ms, size: ${stats.size / (1024 * 1024)} MB`); +// return outputPath; +// } + +/** + * Used for generating proofs of noir circuits. + * It is assumed that the working directory is a temporary and/or random directory used solely for generating this proof. + * @param pathToBB - The full path to the bb binary + * @param workingDirectory - A working directory for use by bb + * @param circuitName - An identifier for the circuit + * @param compiledCircuit - The compiled circuit + * @param inputWitnessFile - The circuit input witness + * @param log - A logging function + * @returns An object containing a result indication, the location of the proof and the duration taken + */ export async function generateProof( pathToBB: string, workingDirectory: string, @@ -192,56 +213,82 @@ export async function generateProof( compiledCircuit: NoirCompiledCircuit, inputWitnessFile: string, log: LogFn, -) { - // The bytecode is written to e.g. /workingDirectory/pk/BaseParityArtifact-bytecode - const bytecodePath = `${workingDirectory}/proof/${circuitName}-bytecode`; +): Promise { + // Clear up the circuit output directory removing anything that is there + await fs.rm(workingDirectory, { recursive: true, force: true }); + await fs.mkdir(workingDirectory, { recursive: true }); + + // The bytecode is written to e.g. /workingDirectory/BaseParityArtifact-bytecode + const bytecodePath = `${workingDirectory}/${circuitName}-bytecode`; const bytecode = Buffer.from(compiledCircuit.bytecode, 'base64'); - // The key generation outputs are written to e.g. /workingDirectory/pk/BaseParityArtifact/ - // The bytecode hash file is also written here as /workingDirectory/pk/BaseParityArtifact/bytecode-hash - const circuitOutputDirectory = `${workingDirectory}/proof/${circuitName}`; + // The proof is written to e.g. /workingDirectory/proof + const outputPath = `${workingDirectory}/proof`; const binaryPresent = await fs .access(pathToBB, fs.constants.R_OK) .then(_ => true) .catch(_ => false); if (!binaryPresent) { - const failed: BBFailure = { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; - return { result: failed, outputPath: circuitOutputDirectory, duration: 0 }; + return { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; } - // Clear up the circuit output directory removing anything that is there - await fs.rm(circuitOutputDirectory, { recursive: true, force: true }); - await fs.mkdir(circuitOutputDirectory, { recursive: true }); - // Write the bytecode and input witness to the working directory - await fs.writeFile(bytecodePath, bytecode); - - const outputPath = `${circuitOutputDirectory}/proof`; - const args = ['-o', outputPath, '-b', bytecodePath, '-w', inputWitnessFile]; - const command = 'prove'; - const timer = new Timer(); - const logFunction = (message: string) => { - log(`${circuitName} BB out - ${message}`); - }; - const result = await executeBB(pathToBB, command, args, logFunction); - const duration = timer.ms(); - await fs.rm(bytecodePath, { force: true }); - return { result, duration, outputPath }; + try { + // Write the bytecode to the working directory + await fs.writeFile(bytecodePath, bytecode); + const args = ['-o', outputPath, '-b', bytecodePath, '-w', inputWitnessFile]; + const command = 'prove'; + const timer = new Timer(); + const logFunction = (message: string) => { + log(`${circuitName} BB out - ${message}`); + }; + const result = await executeBB(pathToBB, command, args, logFunction); + const duration = timer.ms(); + // cleanup the bytecode + await fs.rm(bytecodePath, { force: true }); + if (result == BB_RESULT.SUCCESS) { + return { status: BB_RESULT.SUCCESS, duration, path: outputPath }; + } + // Not a great error message here but it is difficult to decipher what comes from bb + return { status: BB_RESULT.FAILURE, reason: `Failed to generate proof` }; + } catch (error) { + return { status: BB_RESULT.FAILURE, reason: `${error}` }; + } } -export async function verifyProof(pathToBB: string, proofFullPath: string, verificationKeyPath: string, log: LogFn) { +/** + * Used for verifying proofs of noir circuits + * @param pathToBB - The full path to the bb binary + * @param proofFullPath - The full path to the proof to be verified + * @param verificationKeyPath - The full path to the circuit verification key + * @param log - A logging function + * @returns An object containing a result indication and duration taken + */ +export async function verifyProof( + pathToBB: string, + proofFullPath: string, + verificationKeyPath: string, + log: LogFn, +): Promise { const binaryPresent = await fs .access(pathToBB, fs.constants.R_OK) .then(_ => true) .catch(_ => false); if (!binaryPresent) { - const failed: BBFailure = { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; - return { result: failed, duration: 0 }; + return { status: BB_RESULT.FAILURE, reason: `Failed to find bb binary at ${pathToBB}` }; } - const args = ['-p', proofFullPath, '-k', verificationKeyPath]; - const timer = new Timer(); - const result = await executeBB(pathToBB, 'verify', args, log); - const duration = timer.ms(); - return { result, duration }; + try { + const args = ['-p', proofFullPath, '-k', verificationKeyPath]; + const timer = new Timer(); + const result = await executeBB(pathToBB, 'verify', args, log); + const duration = timer.ms(); + if (result == BB_RESULT.SUCCESS) { + return { status: BB_RESULT.SUCCESS, duration }; + } + // Not a great error message here but it is difficult to decipher what comes from bb + return { status: BB_RESULT.FAILURE, reason: `Failed to verify proof` }; + } catch (error) { + return { status: BB_RESULT.FAILURE, reason: `${error}` }; + } } diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 4d2ccfc2951..c5ea3e41c52 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -1,12 +1,4 @@ -import { - Body, - L2Block, - MerkleTreeId, - type ProcessedTx, - PublicKernelType, - type TxEffect, - toTxEffect, -} from '@aztec/circuit-types'; +import { Body, L2Block, MerkleTreeId, type ProcessedTx, type TxEffect, toTxEffect } from '@aztec/circuit-types'; import { type BlockResult, PROVING_STATUS, @@ -233,34 +225,6 @@ export class ProvingOrchestrator { ); } - public async proveNextPublicFunction( - provingState: ProvingState | undefined, - txIndex: number, - nextFunctionIndex: number, - ) { - if (!provingState?.verifyState()) { - logger(`Not executing public function, state invalid`); - return; - } - const request = provingState.getNextPublicFunction(txIndex, nextFunctionIndex); - if (!request) { - logger(`No Public Functions`); - const tx = provingState.allTxs[txIndex]; - const inputs = provingState.baseRollupInputs[txIndex]; - const treeSnapshots = provingState.txTreeSnapshots[txIndex]; - logger(`Running tx at index ${txIndex}, hash ${tx.hash.toString()}`); - this.enqueueJob(provingState, PROVING_JOB_TYPE.BASE_ROLLUP, () => - this.runBaseRollup(provingState, BigInt(txIndex), tx, inputs, treeSnapshots), - ); - return; - } - logger(`Executing Public Kernel ${PublicKernelType[request.type]}`); - await sleep(100); - this.enqueueJob(provingState, PROVING_JOB_TYPE.PUBLIC_KERNEL, () => - this.proveNextPublicFunction(provingState, txIndex, nextFunctionIndex + 1), - ); - } - /** * Marks the block as full and pads it to the full power of 2 block size, no more transactions will be accepted. */ @@ -374,6 +338,27 @@ export class ProvingOrchestrator { this.jobQueue.put(provingJob); } + private proveNextPublicFunction(provingState: ProvingState | undefined, txIndex: number, nextFunctionIndex: number) { + if (!provingState?.verifyState()) { + logger.debug(`Not executing public function, state invalid`); + return Promise.resolve(); + } + const request = provingState.getNextPublicFunction(txIndex, nextFunctionIndex); + if (!request) { + const tx = provingState.allTxs[txIndex]; + const inputs = provingState.baseRollupInputs[txIndex]; + const treeSnapshots = provingState.txTreeSnapshots[txIndex]; + this.enqueueJob(provingState, PROVING_JOB_TYPE.BASE_ROLLUP, () => + this.runBaseRollup(provingState, BigInt(txIndex), tx, inputs, treeSnapshots), + ); + return Promise.resolve(); + } + this.enqueueJob(provingState, PROVING_JOB_TYPE.PUBLIC_KERNEL, () => + this.proveNextPublicFunction(provingState, txIndex, nextFunctionIndex + 1), + ); + return Promise.resolve(); + } + // Updates the merkle trees for a transaction. The first enqueued job for a transaction private async prepareBaseRollupInputs(provingState: ProvingState | undefined, tx: ProcessedTx) { if (!provingState?.verifyState()) { @@ -396,7 +381,6 @@ export class ProvingOrchestrator { } provingState!.baseRollupInputs.push(inputs); provingState!.txTreeSnapshots.push(treeSnapshots); - logger(`Added root ${treeSnapshots.get(MerkleTreeId.NOTE_HASH_TREE)?.root.toString()}`); } // Stores the intermediate inputs prepared for a merge proof @@ -428,7 +412,6 @@ export class ProvingOrchestrator { logger.debug('Not running base rollup, state invalid'); return; } - logger(`Running base at index ${index}, ${inputs.start.noteHashTree.root.toString()}`); const [duration, baseRollupOutputs] = await elapsed(() => executeBaseRollupCircuit(tx, inputs, treeSnapshots, this.prover, logger), ); diff --git a/yarn-project/prover-client/src/prover/bb_prover.test.ts b/yarn-project/prover-client/src/prover/bb_prover.test.ts index 27ed3edaf7a..05063a2d910 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.test.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.test.ts @@ -40,7 +40,7 @@ const getConfig = async () => { const tempWorkingDirectory = `${TEMP_DIR}/${randomBytes(4).toString('hex')}`; const bbWorkingDirectory = BB_WORKING_DIRECTORY ? BB_WORKING_DIRECTORY : `${tempWorkingDirectory}/bb`; await fs.mkdir(bbWorkingDirectory, { recursive: true }); - logger(`Using native BB binary at ${expectedBBPath} with working directory ${bbWorkingDirectory}`); + logger.verbose(`Using native BB binary at ${expectedBBPath} with working directory ${bbWorkingDirectory}`); const expectedAcvmPath = ACVM_BINARY_PATH ? ACVM_BINARY_PATH @@ -48,7 +48,7 @@ const getConfig = async () => { await fs.access(expectedAcvmPath, fs.constants.R_OK); const acvmWorkingDirectory = ACVM_WORKING_DIRECTORY ? ACVM_WORKING_DIRECTORY : `${tempWorkingDirectory}/acvm`; await fs.mkdir(acvmWorkingDirectory, { recursive: true }); - logger(`Using native ACVM binary at ${expectedAcvmPath} with working directory ${acvmWorkingDirectory}`); + logger.verbose(`Using native ACVM binary at ${expectedAcvmPath} with working directory ${acvmWorkingDirectory}`); return { acvmWorkingDirectory, bbWorkingDirectory, @@ -57,7 +57,7 @@ const getConfig = async () => { directoryToCleanup: ACVM_WORKING_DIRECTORY && BB_WORKING_DIRECTORY ? undefined : tempWorkingDirectory, }; } catch (err) { - logger(`Native BB not available, error: ${err}`); + logger.verbose(`Native BB not available, error: ${err}`); return undefined; } }; @@ -108,12 +108,12 @@ describe('prover/bb_prover', () => { it('proves the base rollup', async () => { const txs = await Promise.all([makeBloatedProcessedTx(builderDb, 1)]); - logger('Building base rollup inputs'); + logger.verbose('Building base rollup inputs'); const baseRollupInputs = []; for (const tx of txs) { baseRollupInputs.push(await buildBaseRollupInput(tx, globalVariables, builderDb)); } - logger('Proving base rollups'); + logger.verbose('Proving base rollups'); await Promise.all(baseRollupInputs.map(inputs => prover.getBaseRollupProof(inputs))); }, 600_000); diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index 33967de2534..1179e4ab114 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -33,7 +33,7 @@ import { NativeACVMSimulator } from '@aztec/simulator'; import { type WitnessMap } from '@noir-lang/types'; import * as fs from 'fs/promises'; -import { BB_RESULT, generateProof, generateVerificationKeyForNoirCircuit, verifyProof } from '../bb/execute.js'; +import { BB_RESULT, generateKeyForNoirCircuit, generateProof, verifyProof } from '../bb/execute.js'; import { type CircuitProver } from './interface.js'; const logger = createDebugLogger('aztec:bb-prover'); @@ -155,16 +155,20 @@ export class BBNativeRollupProver implements CircuitProver { ); const promises = []; for (const circuitName of realCircuits) { - const verificationKeyPromise = generateVerificationKeyForNoirCircuit( + const verificationKeyPromise = generateKeyForNoirCircuit( this.config.bbBinaryPath, this.config.bbWorkingDirectory, circuitName, ProtocolCircuitArtifacts[circuitName as ProtocolArtifact], - logger, + 'vk', + logger.debug, ).then(result => { - if (result) { - this.verificationKeyDirectories.set(circuitName, result); + if (result.status == BB_RESULT.FAILURE) { + logger.error(`Failed to generate verification key for circuit ${circuitName}`); + return; } + logger.info(`Generated verification key for circuit ${circuitName} at ${result.path!}`); + this.verificationKeyDirectories.set(circuitName, result.path!); }); promises.push(verificationKeyPromise); } @@ -185,11 +189,11 @@ export class BBNativeRollupProver implements CircuitProver { const artifact = ProtocolCircuitArtifacts[circuitType]; - logger(`Generating witness data for ${circuitType}`); + logger.debug(`Generating witness data for ${circuitType}`); const outputWitness = await simulator.simulateCircuit(witnessMap, artifact); - logger(`Proving ${circuitType}...`); + logger.debug(`Proving ${circuitType}...`); const provingResult = await generateProof( this.config.bbBinaryPath, @@ -197,19 +201,21 @@ export class BBNativeRollupProver implements CircuitProver { circuitType, artifact, outputWitnessFile, - logger, + logger.debug, ); - if (provingResult.result.status === BB_RESULT.FAILURE) { - logger.error(`Failed to generate proof for ${circuitType}: ${provingResult.result.reason}`); - throw new Error(provingResult.result.reason); + if (provingResult.status === BB_RESULT.FAILURE) { + logger.error(`Failed to generate proof for ${circuitType}: ${provingResult.reason}`); + throw new Error(provingResult.reason); } - const proofBuffer = await fs.readFile(provingResult.outputPath); + const proofBuffer = await fs.readFile(provingResult.path!); await fs.rm(bbWorkingDirectory, { recursive: true, force: true }); - logger(`Generated proof for ${circuitType} in ${provingResult.duration} ms, size: ${proofBuffer.length} bytes`); + logger.debug( + `Generated proof for ${circuitType} in ${provingResult.duration} ms, size: ${proofBuffer.length} bytes`, + ); return [outputWitness, new Proof(proofBuffer)]; } @@ -224,16 +230,16 @@ export class BBNativeRollupProver implements CircuitProver { await fs.writeFile(proofFileName, proof.buffer); - const result = await verifyProof(this.config.bbBinaryPath, proofFileName, verificationKeyPath!, logger); + const result = await verifyProof(this.config.bbBinaryPath, proofFileName, verificationKeyPath!, logger.debug); await fs.rm(bbWorkingDirectory, { recursive: true, force: true }); - if (result.result.status === BB_RESULT.FAILURE) { + if (result.status === BB_RESULT.FAILURE) { const errorMessage = `Failed to verify ${circuitType} proof!`; throw new Error(errorMessage); } - logger(`Successfully verified ${circuitType} proof in ${result.duration} ms`); + logger.info(`Successfully verified ${circuitType} proof in ${result.duration} ms`); } private async verifyPreviousRollupProof(previousRollupData: PreviousRollupData) { diff --git a/yarn-project/prover-client/src/prover/test_circuit_prover.ts b/yarn-project/prover-client/src/prover/test_circuit_prover.ts index 0b8338997d9..9231a225bea 100644 --- a/yarn-project/prover-client/src/prover/test_circuit_prover.ts +++ b/yarn-project/prover-client/src/prover/test_circuit_prover.ts @@ -119,7 +119,7 @@ export class TestCircuitProver implements CircuitProver { const result = convertRootRollupOutputsFromWitnessMap(witness); - this.logger(`Simulated root rollup circuit`, { + this.logger.debug(`Simulated root rollup circuit`, { eventName: 'circuit-simulation', circuitName: 'root-rollup', duration, diff --git a/yarn-project/simulator/src/simulator/acvm_native.ts b/yarn-project/simulator/src/simulator/acvm_native.ts index 5e53db8b0c7..4f99d406d12 100644 --- a/yarn-project/simulator/src/simulator/acvm_native.ts +++ b/yarn-project/simulator/src/simulator/acvm_native.ts @@ -8,8 +8,6 @@ import fs from 'fs/promises'; import { type SimulationProvider } from './simulation_provider.js'; -const logger = createDebugLogger('aztec:acvm_native'); - /** * Parses a TOML format witness map string into a Map structure * @param outputString - The witness map in TOML format @@ -22,12 +20,7 @@ function parseIntoWitnessMap(outputString: string) { .filter((line: string) => line.length) .map((line: string) => { const pair = line.replaceAll(' ', '').split('='); - try { - return [Number(pair[0]), pair[1].replaceAll('"', '')]; - } catch (err) { - logger(`Error: ${pair[1]}, line: ${line}`); - return [0, '']; - } + return [Number(pair[0]), pair[1].replaceAll('"', '')]; }), ); } From 2352c243ac35ac6a5fe4a7e11c9e0596447e211b Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Wed, 10 Apr 2024 17:48:31 +0000 Subject: [PATCH 17/41] Further cleanup --- .../noir-protocol-circuits-types/src/index.ts | 30 ++++++++++++++++--- .../src/orchestrator/orchestrator.ts | 3 ++ .../prover-client/src/prover/bb_prover.ts | 14 ++++----- .../prover-client/src/prover/interface.ts | 3 -- .../src/sequencer/abstract_phase_manager.ts | 1 + .../src/sequencer/app_logic_phase_manager.ts | 1 + .../src/sequencer/setup_phase_manager.ts | 1 + .../src/sequencer/tail_phase_manager.ts | 3 +- .../src/sequencer/teardown_phase_manager.ts | 3 +- .../simulator/src/simulator/acvm_native.ts | 2 +- 10 files changed, 43 insertions(+), 18 deletions(-) diff --git a/yarn-project/noir-protocol-circuits-types/src/index.ts b/yarn-project/noir-protocol-circuits-types/src/index.ts index 7d4c3d72277..568a5071356 100644 --- a/yarn-project/noir-protocol-circuits-types/src/index.ts +++ b/yarn-project/noir-protocol-circuits-types/src/index.ts @@ -135,10 +135,7 @@ export const MergeRollupArtifact = MergeRollupJson as NoirCompiledCircuit; export const RootRollupArtifact = RootRollupJson as NoirCompiledCircuit; -export type ProtocolArtifact = - | 'PrivateKernelInitArtifact' - | 'PrivateKernelInnerArtifact' - | 'PrivateKernelTailArtifact' +export type ServerProtocolArtifact = | 'PublicKernelSetupArtifact' | 'PublicKernelAppLogicArtifact' | 'PublicKernelTeardownArtifact' @@ -149,6 +146,31 @@ export type ProtocolArtifact = | 'MergeRollupArtifact' | 'RootRollupArtifact'; +export type ClientProtocolArtifact = + | 'PrivateKernelInitArtifact' + | 'PrivateKernelInnerArtifact' + | 'PrivateKernelTailArtifact'; + +export type ProtocolArtifact = ServerProtocolArtifact | ClientProtocolArtifact; + +export const ServerCircuitArtifacts: Record = { + PublicKernelSetupArtifact: PublicKernelSetupArtifact, + PublicKernelAppLogicArtifact: PublicKernelAppLogicArtifact, + PublicKernelTeardownArtifact: PublicKernelTeardownArtifact, + PublicKernelTailArtifact: PublicKernelTailArtifact, + BaseParityArtifact: BaseParityArtifact, + RootParityArtifact: RootParityArtifact, + BaseRollupArtifact: BaseRollupArtifact, + MergeRollupArtifact: MergeRollupArtifact, + RootRollupArtifact: RootRollupArtifact, +}; + +export const ClientCircuitArtifacts: Record = { + PrivateKernelInitArtifact: PrivateKernelInitArtifact, + PrivateKernelInnerArtifact: PrivateKernelInnerArtifact, + PrivateKernelTailArtifact: PrivateKernelTailArtifact, +}; + export const ProtocolCircuitArtifacts: Record = { PrivateKernelInitArtifact: PrivateKernelInitArtifact, PrivateKernelInnerArtifact: PrivateKernelInnerArtifact, diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index c5ea3e41c52..926d6bc5ccb 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -242,6 +242,7 @@ export class ProvingOrchestrator { for (let i = this.provingState.transactionsReceived; i < this.provingState.totalNumTxs; i++) { const paddingTxIndex = this.provingState.addNewTx(this.provingState.emptyTx); await this.prepareBaseRollupInputs(this.provingState, this.provingState!.emptyTx); + // TODO(@Phil): Properly encapsulate this stuff const tx = this.provingState.allTxs[paddingTxIndex]; const inputs = this.provingState.baseRollupInputs[paddingTxIndex]; const treeSnapshots = this.provingState.txTreeSnapshots[paddingTxIndex]; @@ -345,6 +346,7 @@ export class ProvingOrchestrator { } const request = provingState.getNextPublicFunction(txIndex, nextFunctionIndex); if (!request) { + // TODO(@Phil): Properly encapsulate this stuff const tx = provingState.allTxs[txIndex]; const inputs = provingState.baseRollupInputs[txIndex]; const treeSnapshots = provingState.txTreeSnapshots[txIndex]; @@ -379,6 +381,7 @@ export class ProvingOrchestrator { logger.debug(`Discarding proving job, state no longer valid`); return; } + // TODO(@Phil): Properly encapsulate this stuff provingState!.baseRollupInputs.push(inputs); provingState!.txTreeSnapshots.push(treeSnapshots); } diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index 1179e4ab114..5ada57d6d75 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -15,8 +15,9 @@ import { import { randomBytes } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { - type ProtocolArtifact, ProtocolCircuitArtifacts, + ServerCircuitArtifacts, + type ServerProtocolArtifact, convertBaseParityInputsToWitnessMap, convertBaseParityOutputsFromWitnessMap, convertBaseRollupInputsToWitnessMap, @@ -150,16 +151,13 @@ export class BBNativeRollupProver implements CircuitProver { } private async init() { - const realCircuits = Object.keys(ProtocolCircuitArtifacts).filter( - (n: string) => !n.includes('Simulated') && !n.includes('PrivateKernel'), - ); const promises = []; - for (const circuitName of realCircuits) { + for (const circuitName in ServerCircuitArtifacts) { const verificationKeyPromise = generateKeyForNoirCircuit( this.config.bbBinaryPath, this.config.bbWorkingDirectory, circuitName, - ProtocolCircuitArtifacts[circuitName as ProtocolArtifact], + ServerCircuitArtifacts[circuitName as ServerProtocolArtifact], 'vk', logger.debug, ).then(result => { @@ -175,7 +173,7 @@ export class BBNativeRollupProver implements CircuitProver { await Promise.all(promises); } - private async createProof(witnessMap: WitnessMap, circuitType: ProtocolArtifact): Promise<[WitnessMap, Proof]> { + private async createProof(witnessMap: WitnessMap, circuitType: ServerProtocolArtifact): Promise<[WitnessMap, Proof]> { // Create random directory to be used for temp files const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(bbWorkingDirectory, { recursive: true }); @@ -220,7 +218,7 @@ export class BBNativeRollupProver implements CircuitProver { return [outputWitness, new Proof(proofBuffer)]; } - private async verifyProof(circuitType: ProtocolArtifact, proof: Proof) { + private async verifyProof(circuitType: ServerProtocolArtifact, proof: Proof) { // Create random directory to be used for temp files const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(bbWorkingDirectory, { recursive: true }); diff --git a/yarn-project/prover-client/src/prover/interface.ts b/yarn-project/prover-client/src/prover/interface.ts index 1d84f91bef6..2764f76dbf3 100644 --- a/yarn-project/prover-client/src/prover/interface.ts +++ b/yarn-project/prover-client/src/prover/interface.ts @@ -25,7 +25,6 @@ export interface CircuitProver { /** * Creates a proof for the given input. * @param input - Input to the circuit. - * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. */ getRootParityProof(inputs: RootParityInputs): Promise<[ParityPublicInputs, Proof]>; @@ -38,14 +37,12 @@ export interface CircuitProver { /** * Creates a proof for the given input. * @param input - Input to the circuit. - * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. */ getMergeRollupProof(input: MergeRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]>; /** * Creates a proof for the given input. * @param input - Input to the circuit. - * @param publicInputs - Public inputs of the circuit obtained via simulation, modified by this call. */ getRootRollupProof(input: RootRollupInputs): Promise<[RootRollupPublicInputs, Proof]>; } diff --git a/yarn-project/sequencer-client/src/sequencer/abstract_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/abstract_phase_manager.ts index ea675193955..7aa1bf6bd85 100644 --- a/yarn-project/sequencer-client/src/sequencer/abstract_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/abstract_phase_manager.ts @@ -269,6 +269,7 @@ export abstract class AbstractPhaseManager { const circuitResult = await this.runKernelCircuit(kernelOutput, kernelProof, callData); kernelOutput = circuitResult[1]; + // Capture the inputs to the kernel circuit for later proving publicKernelInputs.push(circuitResult[0]); // sanity check. Note we can't expect them to just be equal, because e.g. diff --git a/yarn-project/sequencer-client/src/sequencer/app_logic_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/app_logic_phase_manager.ts index 3cbf661ed2e..831714e4468 100644 --- a/yarn-project/sequencer-client/src/sequencer/app_logic_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/app_logic_phase_manager.ts @@ -63,6 +63,7 @@ export class AppLogicPhaseManager extends AbstractPhaseManager { await this.publicStateDB.checkpoint(); } + // Return a list of app logic proving requests const kernelRequests = kernelInputs.map(input => { const request: PublicKernelRequest = { type: PublicKernelType.APP_LOGIC, diff --git a/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.ts index 7ecd7ff9195..d1735e52500 100644 --- a/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.ts @@ -46,6 +46,7 @@ export class SetupPhaseManager extends AbstractPhaseManager { tx.unencryptedLogs.addFunctionLogs(newUnencryptedFunctionLogs); await this.publicStateDB.checkpoint(); + // Return a list of setup proving requests const kernelRequests = kernelInputs.map(input => { const request: PublicKernelRequest = { type: PublicKernelType.SETUP, diff --git a/yarn-project/sequencer-client/src/sequencer/tail_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/tail_phase_manager.ts index acb343fac5d..87fbf9b460e 100644 --- a/yarn-project/sequencer-client/src/sequencer/tail_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/tail_phase_manager.ts @@ -51,8 +51,9 @@ export class TailPhaseManager extends AbstractPhaseManager { // commit the state updates from this transaction await this.publicStateDB.commit(); + // Return a tail proving request const request: PublicKernelRequest = { - type: PublicKernelType.APP_LOGIC, + type: PublicKernelType.TAIL, inputs: inputs, }; diff --git a/yarn-project/sequencer-client/src/sequencer/teardown_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/teardown_phase_manager.ts index 0c67b030a63..0fd37bb9340 100644 --- a/yarn-project/sequencer-client/src/sequencer/teardown_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/teardown_phase_manager.ts @@ -46,9 +46,10 @@ export class TeardownPhaseManager extends AbstractPhaseManager { tx.unencryptedLogs.addFunctionLogs(newUnencryptedFunctionLogs); await this.publicStateDB.checkpoint(); + // Return a list of teardown proving requests const kernelRequests = kernelInputs.map(input => { const request: PublicKernelRequest = { - type: PublicKernelType.APP_LOGIC, + type: PublicKernelType.TEARDOWN, inputs: input, }; return request; diff --git a/yarn-project/simulator/src/simulator/acvm_native.ts b/yarn-project/simulator/src/simulator/acvm_native.ts index 4f99d406d12..f64c28f4471 100644 --- a/yarn-project/simulator/src/simulator/acvm_native.ts +++ b/yarn-project/simulator/src/simulator/acvm_native.ts @@ -31,7 +31,7 @@ function parseIntoWitnessMap(outputString: string) { * @param bytecode - The circuit bytecode * @param workingDirectory - A directory to use for temporary files by the ACVM * @param pathToAcvm - The path to the ACVM binary - * @param outputFilename - If specified, the output will be stored as a file, encoded using Bincode, instead of being streamed back over stdout + * @param outputFilename - If specified, the output will be stored as a file, encoded using Bincode * @returns The completed partial witness outputted from the circuit */ export async function executeNativeCircuit( From e0b33cee03091a1a598e53717ecbb3ddb728e34e Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Wed, 10 Apr 2024 17:54:08 +0000 Subject: [PATCH 18/41] Comments and cleanup --- yarn-project/prover-client/package.json | 8 ++++---- yarn-project/prover-client/src/prover/bb_prover.ts | 8 +++++++- yarn-project/yarn.lock | 2 ++ 3 files changed, 13 insertions(+), 5 deletions(-) diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index 086c67ce0b9..a26c6f79243 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -50,19 +50,19 @@ "@aztec/world-state": "workspace:^", "commander": "^9.0.0", "lodash.chunk": "^4.2.0", - "tslib": "^2.4.0", - "source-map-support": "^0.5.21" + "source-map-support": "^0.5.21", + "tslib": "^2.4.0" }, "devDependencies": { "@jest/globals": "^29.5.0", "@types/jest": "^29.5.0", "@types/memdown": "^3.0.0", "@types/node": "^18.7.23", + "@types/source-map-support": "^0.5.10", "jest": "^29.5.0", "jest-mock-extended": "^3.0.3", "ts-node": "^10.9.1", - "typescript": "^5.0.4", - "@types/source-map-support": "^0.5.10" + "typescript": "^5.0.4" }, "files": [ "dest", diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index 5ada57d6d75..ba42ab28f63 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -177,20 +177,25 @@ export class BBNativeRollupProver implements CircuitProver { // Create random directory to be used for temp files const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(bbWorkingDirectory, { recursive: true }); + + // Have the ACVM write the partial witness here const outputWitnessFile = `${bbWorkingDirectory}/partial-witness.gz`; + // Generate the partial witness using the ACVM + // A further temp directory will be created beneath ours and then cleaned up after the partial witness has been copied to our specified location const simulator = new NativeACVMSimulator( this.config.acvmWorkingDirectory, this.config.acvmBinaryPath, outputWitnessFile, ); - const artifact = ProtocolCircuitArtifacts[circuitType]; + const artifact = ServerCircuitArtifacts[circuitType]; logger.debug(`Generating witness data for ${circuitType}`); const outputWitness = await simulator.simulateCircuit(witnessMap, artifact); + // Now prove the circuit from the generated witness logger.debug(`Proving ${circuitType}...`); const provingResult = await generateProof( @@ -207,6 +212,7 @@ export class BBNativeRollupProver implements CircuitProver { throw new Error(provingResult.reason); } + // Read the proof and then cleanup up our temporary directory const proofBuffer = await fs.readFile(provingResult.path!); await fs.rm(bbWorkingDirectory, { recursive: true, force: true }); diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 2107d924932..a956bbe91b0 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -755,10 +755,12 @@ __metadata: "@types/jest": ^29.5.0 "@types/memdown": ^3.0.0 "@types/node": ^18.7.23 + "@types/source-map-support": ^0.5.10 commander: ^9.0.0 jest: ^29.5.0 jest-mock-extended: ^3.0.3 lodash.chunk: ^4.2.0 + source-map-support: ^0.5.21 ts-node: ^10.9.1 tslib: ^2.4.0 typescript: ^5.0.4 From ff861d131bdc9aaa60e63e682232cf86c3e59cab Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Wed, 10 Apr 2024 19:46:35 +0000 Subject: [PATCH 19/41] Fixes --- yarn-project/prover-client/package.json | 2 +- yarn-project/prover-client/src/prover/bb_prover.ts | 5 ++--- yarn-project/simulator/src/simulator/acvm_native.ts | 1 - 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index a26c6f79243..a831caf6783 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -20,7 +20,7 @@ "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", "bb": "node --no-warnings ./dest/bb/index.js", - "test": "DEBUG='aztec:*' NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" + "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" }, "inherits": [ "../package.common.json" diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index ba42ab28f63..5af751c7814 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -15,7 +15,6 @@ import { import { randomBytes } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { - ProtocolCircuitArtifacts, ServerCircuitArtifacts, type ServerProtocolArtifact, convertBaseParityInputsToWitnessMap, @@ -50,7 +49,7 @@ export type BBProverConfig = { * Prover implementation that uses barretenberg native proving */ export class BBNativeRollupProver implements CircuitProver { - private verificationKeyDirectories: Map = new Map(); + private verificationKeyDirectories: Map = new Map(); constructor(private config: BBProverConfig) {} static async new(config: BBProverConfig) { @@ -166,7 +165,7 @@ export class BBNativeRollupProver implements CircuitProver { return; } logger.info(`Generated verification key for circuit ${circuitName} at ${result.path!}`); - this.verificationKeyDirectories.set(circuitName, result.path!); + this.verificationKeyDirectories.set(circuitName as ServerProtocolArtifact, result.path!); }); promises.push(verificationKeyPromise); } diff --git a/yarn-project/simulator/src/simulator/acvm_native.ts b/yarn-project/simulator/src/simulator/acvm_native.ts index f64c28f4471..b70cdab2ebb 100644 --- a/yarn-project/simulator/src/simulator/acvm_native.ts +++ b/yarn-project/simulator/src/simulator/acvm_native.ts @@ -1,5 +1,4 @@ import { randomBytes } from '@aztec/foundation/crypto'; -import { createDebugLogger } from '@aztec/foundation/log'; import { type NoirCompiledCircuit } from '@aztec/types/noir'; import { type WitnessMap } from '@noir-lang/types'; From 4d263c8936a8699f320ca67bfc45627469b1288b Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 11 Apr 2024 10:14:58 +0000 Subject: [PATCH 20/41] Test fixes --- .../src/orchestrator/orchestrator.test.ts | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts index 290d0c463b5..128ddbbc46f 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts @@ -20,6 +20,7 @@ import { PUBLIC_DATA_SUBTREE_HEIGHT, PublicDataTreeLeaf, type RootRollupPublicInputs, + makeEmptyProof, } from '@aztec/circuits.js'; import { fr, @@ -35,6 +36,7 @@ import { openTmpStore } from '@aztec/kv-store/utils'; import { WASMSimulator } from '@aztec/simulator'; import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; +import { jest } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; import { type MemDown, default as memdown } from 'memdown'; @@ -142,8 +144,10 @@ describe('prover/tx-prover', () => { }; describe('error handling', () => { - const mockProver: MockProxy = mock(); + let mockProver: CircuitProver; + beforeEach(async () => { + mockProver = new TestCircuitProver(new WASMSimulator()); builder = await ProvingOrchestrator.new(builderDb, mockProver); }); @@ -151,31 +155,31 @@ describe('prover/tx-prover', () => { [ 'Base Rollup Failed', () => { - mockProver.getBaseRollupProof.mockRejectedValue('Base Rollup Failed'); + jest.spyOn(mockProver, 'getBaseRollupProof').mockRejectedValue('Base Rollup Failed'); }, ], [ 'Merge Rollup Failed', () => { - mockProver.getMergeRollupProof.mockRejectedValue('Merge Rollup Failed'); + jest.spyOn(mockProver, 'getMergeRollupProof').mockRejectedValue('Merge Rollup Failed'); }, ], [ 'Root Rollup Failed', () => { - mockProver.getRootRollupProof.mockRejectedValue('Root Rollup Failed'); + jest.spyOn(mockProver, 'getRootRollupProof').mockRejectedValue('Root Rollup Failed'); }, ], [ 'Base Parity Failed', () => { - mockProver.getBaseParityProof.mockRejectedValue('Base Parity Failed'); + jest.spyOn(mockProver, 'getBaseParityProof').mockRejectedValue('Base Parity Failed'); }, ], [ 'Root Parity Failed', () => { - mockProver.getRootParityProof.mockRejectedValue('Root Parity Failed'); + jest.spyOn(mockProver, 'getRootParityProof').mockRejectedValue('Root Parity Failed'); }, ], ] as const)( From 2ea7278997fb5cf6652843b5d9f55c2fb82660be Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 11 Apr 2024 14:22:04 +0000 Subject: [PATCH 21/41] Cleanup and breaking up test files --- yarn-project/prover-client/src/bb/cli.ts | 13 + yarn-project/prover-client/src/bb/execute.ts | 68 +- .../prover-client/src/mocks/fixtures.ts | 120 +++- .../src/orchestrator/orchestrator.test.ts | 618 ------------------ .../orchestrator/orchestrator_errors.test.ts | 160 +++++ .../orchestrator_failures.test.ts | 127 ++++ .../orchestrator_lifecycle.test.ts | 131 ++++ .../orchestrator_mixed_blocks.test.ts | 183 ++++++ .../orchestrator_multiple_blocks.test.ts | 99 +++ .../orchestrator_single_blocks.test.ts | 187 ++++++ .../src/prover/bb_prover.test.ts | 71 +- .../prover-client/src/prover/bb_prover.ts | 4 +- .../src/prover/test_circuit_prover.ts | 6 +- .../simulator/src/simulator/acvm_native.ts | 133 ++-- 14 files changed, 1135 insertions(+), 785 deletions(-) delete mode 100644 yarn-project/prover-client/src/orchestrator/orchestrator.test.ts create mode 100644 yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts create mode 100644 yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts create mode 100644 yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts create mode 100644 yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts create mode 100644 yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts create mode 100644 yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts diff --git a/yarn-project/prover-client/src/bb/cli.ts b/yarn-project/prover-client/src/bb/cli.ts index b6db780de96..5358355d6ae 100644 --- a/yarn-project/prover-client/src/bb/cli.ts +++ b/yarn-project/prover-client/src/bb/cli.ts @@ -2,6 +2,7 @@ import { type LogFn } from '@aztec/foundation/log'; import { type ProtocolArtifact, ProtocolCircuitArtifacts } from '@aztec/noir-protocol-circuits-types'; import { Command } from 'commander'; +import * as fs from 'fs/promises'; import { generateKeyForNoirCircuit } from './execute.js'; @@ -40,6 +41,12 @@ export function getProgram(log: LogFn): Command { log(`Failed to find circuit ${options.circuit}`); return; } + try { + await fs.access(options.workingDirectory); + } catch (error) { + log(`Working directory does not exist`); + return; + } await generateKeyForNoirCircuit( options.bbPath, options.workingDirectory, @@ -66,6 +73,12 @@ export function getProgram(log: LogFn): Command { log(`Failed to find circuit ${options.circuit}`); return; } + try { + await fs.access(options.workingDirectory); + } catch (error) { + log(`Working directory does not exist`); + return; + } await generateKeyForNoirCircuit( options.bbPath, options.workingDirectory, diff --git a/yarn-project/prover-client/src/bb/execute.ts b/yarn-project/prover-client/src/bb/execute.ts index 28c28d6b274..87cfba93af7 100644 --- a/yarn-project/prover-client/src/bb/execute.ts +++ b/yarn-project/prover-client/src/bb/execute.ts @@ -63,6 +63,8 @@ export function executeBB( } const bytecodeHashFilename = 'bytecode_hash'; +const bytecodeFilename = 'bytecode'; +const proofFileName = 'proof'; /** * Used for generating either a proving or verification key, will exit early if the key already exists @@ -86,18 +88,22 @@ export async function generateKeyForNoirCircuit( log: LogFn, force = false, ): Promise { - // The bytecode is written to e.g. /workingDirectory/pk/BaseParityArtifact-bytecode - const bytecodePath = `${workingDirectory}/${key}/${circuitName}-bytecode`; const bytecode = Buffer.from(compiledCircuit.bytecode, 'base64'); // The key generation is written to e.g. /workingDirectory/pk/BaseParityArtifact/pk // The bytecode hash file is also written here as /workingDirectory/pk/BaseParityArtifact/bytecode-hash + // The bytecode is written to e.g. /workingDirectory/pk/BaseParityArtifact/bytecode + // The bytecode is removed after the key is generated, leaving just the hash file const circuitOutputDirectory = `${workingDirectory}/${key}/${circuitName}`; const bytecodeHashPath = `${circuitOutputDirectory}/${bytecodeHashFilename}`; + const bytecodePath = `${circuitOutputDirectory}/${bytecodeFilename}`; const bytecodeHash = sha256(bytecode); const outputPath = `${circuitOutputDirectory}/${key}`; + // ensure the directory exists + await fs.mkdir(circuitOutputDirectory, { recursive: true }); + // Generate the key if we have been told to, or there is no bytecode hash let mustRegenerate = force || @@ -114,7 +120,7 @@ export async function generateKeyForNoirCircuit( if (!mustRegenerate) { // No need to generate, early out - return { status: BB_RESULT.ALREADY_PRESENT, duration: 0 }; + return { status: BB_RESULT.ALREADY_PRESENT, duration: 0, path: outputPath }; } // Check we have access to bb @@ -128,10 +134,6 @@ export async function generateKeyForNoirCircuit( // We are now going to generate the key try { - // Clear up the circuit output directory removing anything that is there - await fs.rm(circuitOutputDirectory, { recursive: true, force: true }); - await fs.mkdir(circuitOutputDirectory, { recursive: true }); - // Write the bytecode to the working directory await fs.writeFile(bytecodePath, bytecode); @@ -154,47 +156,6 @@ export async function generateKeyForNoirCircuit( } } -/** - * Used for generating either a proving or verification key, will exit early if the key already exists - * It assumes the provided working directory is one where the caller wishes to maintain a permanent set of keys - * It is not considered a temporary directory - * @param pathToBB - The full path to the bb binary - * @param workingDirectory - The directory into which the key should be created - * @param circuitName - An identifier for the circuit - * @param compiledCircuit - The compiled circuit - * @param key - The type of key, either 'pk' or 'vk' - * @param log - A logging function - * @returns The path to the key, or undefined. - */ -// export async function generateKey( -// pathToBB: string, -// workingDirectory: string, -// circuitName: string, -// compiledCircuit: NoirCompiledCircuit, -// key: 'pk' | 'vk', -// log: LogFn, -// ) { -// const { result, duration, outputPath } = await generateKeyForNoirCircuit( -// pathToBB, -// workingDirectory, -// circuitName, -// compiledCircuit, -// key, -// log, -// ); -// if (result.status === BB_RESULT.FAILURE) { -// log(`Failed to generate ${key} key for circuit ${circuitName}, reason: ${result.reason}`); -// return; -// } -// if (result.status === BB_RESULT.ALREADY_PRESENT) { -// log(`Key ${key} for circuit ${circuitName} was already present`); -// return outputPath; -// } -// const stats = await fs.stat(outputPath); -// log(`Key ${key} for circuit ${circuitName} generated in ${duration} ms, size: ${stats.size / (1024 * 1024)} MB`); -// return outputPath; -// } - /** * Used for generating proofs of noir circuits. * It is assumed that the working directory is a temporary and/or random directory used solely for generating this proof. @@ -214,16 +175,19 @@ export async function generateProof( inputWitnessFile: string, log: LogFn, ): Promise { - // Clear up the circuit output directory removing anything that is there - await fs.rm(workingDirectory, { recursive: true, force: true }); - await fs.mkdir(workingDirectory, { recursive: true }); + // Check that the working directory exists + try { + await fs.access(workingDirectory); + } catch (error) { + return { status: BB_RESULT.FAILURE, reason: `Working directory ${workingDirectory} does not exist` }; + } // The bytecode is written to e.g. /workingDirectory/BaseParityArtifact-bytecode const bytecodePath = `${workingDirectory}/${circuitName}-bytecode`; const bytecode = Buffer.from(compiledCircuit.bytecode, 'base64'); // The proof is written to e.g. /workingDirectory/proof - const outputPath = `${workingDirectory}/proof`; + const outputPath = `${workingDirectory}/${proofFileName}`; const binaryPresent = await fs .access(pathToBB, fs.constants.R_OK) diff --git a/yarn-project/prover-client/src/mocks/fixtures.ts b/yarn-project/prover-client/src/mocks/fixtures.ts index be699c58c21..deaa19daf7a 100644 --- a/yarn-project/prover-client/src/mocks/fixtures.ts +++ b/yarn-project/prover-client/src/mocks/fixtures.ts @@ -1,4 +1,10 @@ -import { makeProcessedTx, mockTx } from '@aztec/circuit-types'; +import { + MerkleTreeId, + ProcessedTx, + makeEmptyProcessedTx as makeEmptyProcessedTxFromHistoricalTreeRoots, + makeProcessedTx, + mockTx, +} from '@aztec/circuit-types'; import { Fr, KernelCircuitPublicInputs, @@ -6,12 +12,85 @@ import { MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + NULLIFIER_TREE_HEIGHT, + PUBLIC_DATA_SUBTREE_HEIGHT, + PublicDataTreeLeaf, PublicDataUpdateRequest, } from '@aztec/circuits.js'; import { fr, makeProof } from '@aztec/circuits.js/testing'; import { makeTuple } from '@aztec/foundation/array'; +import { padArrayEnd } from '@aztec/foundation/collection'; +import { randomBytes } from '@aztec/foundation/crypto'; +import { DebugLogger } from '@aztec/foundation/log'; +import { fileURLToPath } from '@aztec/foundation/url'; +import { NativeACVMSimulator, SimulationProvider, WASMSimulator } from '@aztec/simulator'; import { type MerkleTreeOperations } from '@aztec/world-state'; +import * as fs from 'fs/promises'; +import path from 'path'; + +const { + BB_RELEASE_DIR = 'cpp/build/bin', + TEMP_DIR = '/tmp', + BB_BINARY_PATH = '', + BB_WORKING_DIRECTORY = '', + NOIR_RELEASE_DIR = 'noir-repo/target/release', + ACVM_BINARY_PATH = '', + ACVM_WORKING_DIRECTORY = '', +} = process.env; + +// Determines if we have access to the bb binary and a tmp folder for temp files +export const getConfig = async (logger: DebugLogger) => { + try { + const expectedBBPath = BB_BINARY_PATH + ? BB_BINARY_PATH + : `${path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../../../../barretenberg/', BB_RELEASE_DIR)}/bb`; + await fs.access(expectedBBPath, fs.constants.R_OK); + const tempWorkingDirectory = `${TEMP_DIR}/${randomBytes(4).toString('hex')}`; + const bbWorkingDirectory = BB_WORKING_DIRECTORY ? BB_WORKING_DIRECTORY : `${tempWorkingDirectory}/bb`; + await fs.mkdir(bbWorkingDirectory, { recursive: true }); + logger.verbose(`Using native BB binary at ${expectedBBPath} with working directory ${bbWorkingDirectory}`); + + const expectedAcvmPath = ACVM_BINARY_PATH + ? ACVM_BINARY_PATH + : `${path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../../../../noir/', NOIR_RELEASE_DIR)}/acvm`; + await fs.access(expectedAcvmPath, fs.constants.R_OK); + const acvmWorkingDirectory = ACVM_WORKING_DIRECTORY ? ACVM_WORKING_DIRECTORY : `${tempWorkingDirectory}/acvm`; + await fs.mkdir(acvmWorkingDirectory, { recursive: true }); + logger.verbose(`Using native ACVM binary at ${expectedAcvmPath} with working directory ${acvmWorkingDirectory}`); + return { + acvmWorkingDirectory, + bbWorkingDirectory, + expectedAcvmPath, + expectedBBPath, + directoryToCleanup: ACVM_WORKING_DIRECTORY && BB_WORKING_DIRECTORY ? undefined : tempWorkingDirectory, + }; + } catch (err) { + logger.verbose(`Native BB not available, error: ${err}`); + return undefined; + } +}; + +export async function getSimulationProvider( + config: { acvmWorkingDirectory: string | undefined; acvmBinaryPath: string | undefined }, + logger?: DebugLogger, +): Promise { + if (config.acvmBinaryPath && config.acvmWorkingDirectory) { + try { + await fs.access(config.acvmBinaryPath, fs.constants.R_OK); + await fs.mkdir(config.acvmWorkingDirectory, { recursive: true }); + logger?.info( + `Using native ACVM at ${config.acvmBinaryPath} and working directory ${config.acvmWorkingDirectory}`, + ); + return new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath); + } catch { + logger?.warn(`Failed to access ACVM at ${config.acvmBinaryPath}, falling back to WASM`); + } + } + logger?.info('Using WASM ACVM simulation'); + return new WASMSimulator(); +} + export const makeBloatedProcessedTx = async (builderDb: MerkleTreeOperations, seed = 0x1) => { seed *= MAX_NEW_NULLIFIERS_PER_TX; // Ensure no clashing given incremental seeds const tx = mockTx(seed); @@ -41,3 +120,42 @@ export const makeBloatedProcessedTx = async (builderDb: MerkleTreeOperations, se return processedTx; }; + +export const makeEmptyProcessedTx = async (builderDb: MerkleTreeOperations, chainId: Fr, version: Fr) => { + const header = await builderDb.buildInitialHeader(); + return makeEmptyProcessedTxFromHistoricalTreeRoots(header, chainId, version); +}; + +// Updates the expectedDb trees based on the new note hashes, contracts, and nullifiers from these txs +export const updateExpectedTreesFromTxs = async (db: MerkleTreeOperations, txs: ProcessedTx[]) => { + await db.appendLeaves( + MerkleTreeId.NOTE_HASH_TREE, + txs.flatMap(tx => + padArrayEnd( + tx.data.end.newNoteHashes.filter(x => !x.isZero()), + Fr.zero(), + MAX_NEW_NOTE_HASHES_PER_TX, + ), + ), + ); + await db.batchInsert( + MerkleTreeId.NULLIFIER_TREE, + txs.flatMap(tx => + padArrayEnd( + tx.data.end.newNullifiers.filter(x => !x.isZero()), + Fr.zero(), + MAX_NEW_NULLIFIERS_PER_TX, + ).map(x => x.toBuffer()), + ), + NULLIFIER_TREE_HEIGHT, + ); + for (const tx of txs) { + await db.batchInsert( + MerkleTreeId.PUBLIC_DATA_TREE, + tx.data.end.publicDataUpdateRequests.map(write => { + return new PublicDataTreeLeaf(write.leafSlot, write.newValue).toBuffer(); + }), + PUBLIC_DATA_SUBTREE_HEIGHT, + ); + } +}; diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts deleted file mode 100644 index 128ddbbc46f..00000000000 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts +++ /dev/null @@ -1,618 +0,0 @@ -import { - MerkleTreeId, - PROVING_STATUS, - type ProcessedTx, - type ProvingFailure, - type PublicKernelRequest, - PublicKernelType, - makeEmptyProcessedTx as makeEmptyProcessedTxFromHistoricalTreeRoots, -} from '@aztec/circuit-types'; -import { - AztecAddress, - type BaseOrMergeRollupPublicInputs, - EthAddress, - Fr, - GlobalVariables, - MAX_NEW_NOTE_HASHES_PER_TX, - MAX_NEW_NULLIFIERS_PER_TX, - NULLIFIER_SUBTREE_HEIGHT, - NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, - PUBLIC_DATA_SUBTREE_HEIGHT, - PublicDataTreeLeaf, - type RootRollupPublicInputs, - makeEmptyProof, -} from '@aztec/circuits.js'; -import { - fr, - makeBaseOrMergeRollupPublicInputs, - makeParityPublicInputs, - makePublicKernelCircuitPrivateInputs, - makeRootRollupPublicInputs, -} from '@aztec/circuits.js/testing'; -import { range } from '@aztec/foundation/array'; -import { padArrayEnd, times } from '@aztec/foundation/collection'; -import { sleep } from '@aztec/foundation/sleep'; -import { openTmpStore } from '@aztec/kv-store/utils'; -import { WASMSimulator } from '@aztec/simulator'; -import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; - -import { jest } from '@jest/globals'; -import { type MockProxy, mock } from 'jest-mock-extended'; -import { type MemDown, default as memdown } from 'memdown'; - -import { makeBloatedProcessedTx } from '../mocks/fixtures.js'; -import { type CircuitProver } from '../prover/index.js'; -import { TestCircuitProver } from '../prover/test_circuit_prover.js'; -import { type RollupSimulator } from '../simulator/rollup.js'; -import { ProvingOrchestrator } from './orchestrator.js'; - -export const createMemDown = () => (memdown as any)() as MemDown; - -describe('prover/tx-prover', () => { - let builder: ProvingOrchestrator; - let builderDb: MerkleTreeOperations; - let expectsDb: MerkleTreeOperations; - - let simulator: MockProxy; - const prover = new TestCircuitProver(new WASMSimulator()); - - let blockNumber: number; - let baseRollupOutputLeft: BaseOrMergeRollupPublicInputs; - let baseRollupOutputRight: BaseOrMergeRollupPublicInputs; - let rootRollupOutput: RootRollupPublicInputs; - let mockL1ToL2Messages: Fr[]; - - let globalVariables: GlobalVariables; - - const chainId = Fr.ZERO; - const version = Fr.ZERO; - const coinbase = EthAddress.ZERO; - const feeRecipient = AztecAddress.ZERO; - - const makeGlobals = (blockNumber: number) => { - return new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); - }; - - beforeEach(async () => { - blockNumber = 3; - globalVariables = makeGlobals(blockNumber); - - builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); - expectsDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); - simulator = mock(); - builder = new ProvingOrchestrator(builderDb, prover, 1); - - // Create mock l1 to L2 messages - mockL1ToL2Messages = new Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)); - - // Create mock outputs for simulator - baseRollupOutputLeft = makeBaseOrMergeRollupPublicInputs(0, globalVariables); - baseRollupOutputRight = makeBaseOrMergeRollupPublicInputs(0, globalVariables); - rootRollupOutput = makeRootRollupPublicInputs(0); - rootRollupOutput.header.globalVariables = globalVariables; - - // Set up mocks - simulator.baseParityCircuit - .mockResolvedValueOnce(makeParityPublicInputs(1)) - .mockResolvedValue(makeParityPublicInputs(2)) - .mockResolvedValue(makeParityPublicInputs(3)) - .mockResolvedValueOnce(makeParityPublicInputs(4)); - simulator.rootParityCircuit.mockResolvedValueOnce(makeParityPublicInputs(5)); - simulator.baseRollupCircuit - .mockResolvedValueOnce(baseRollupOutputLeft) - .mockResolvedValueOnce(baseRollupOutputRight); - simulator.rootRollupCircuit.mockResolvedValue(rootRollupOutput); - }, 20_000); - - const makeEmptyProcessedTx = async () => { - const header = await builderDb.buildInitialHeader(); - return makeEmptyProcessedTxFromHistoricalTreeRoots(header, chainId, version); - }; - - // Updates the expectedDb trees based on the new note hashes, contracts, and nullifiers from these txs - const updateExpectedTreesFromTxs = async (txs: ProcessedTx[]) => { - await expectsDb.appendLeaves( - MerkleTreeId.NOTE_HASH_TREE, - txs.flatMap(tx => - padArrayEnd( - tx.data.end.newNoteHashes.filter(x => !x.isZero()), - Fr.zero(), - MAX_NEW_NOTE_HASHES_PER_TX, - ), - ), - ); - await expectsDb.batchInsert( - MerkleTreeId.NULLIFIER_TREE, - txs.flatMap(tx => - padArrayEnd( - tx.data.end.newNullifiers.filter(x => !x.isZero()), - Fr.zero(), - MAX_NEW_NULLIFIERS_PER_TX, - ).map(x => x.toBuffer()), - ), - NULLIFIER_SUBTREE_HEIGHT, - ); - for (const tx of txs) { - await expectsDb.batchInsert( - MerkleTreeId.PUBLIC_DATA_TREE, - tx.data.end.publicDataUpdateRequests.map(write => { - return new PublicDataTreeLeaf(write.leafSlot, write.newValue).toBuffer(); - }), - PUBLIC_DATA_SUBTREE_HEIGHT, - ); - } - }; - - describe('error handling', () => { - let mockProver: CircuitProver; - - beforeEach(async () => { - mockProver = new TestCircuitProver(new WASMSimulator()); - builder = await ProvingOrchestrator.new(builderDb, mockProver); - }); - - it.each([ - [ - 'Base Rollup Failed', - () => { - jest.spyOn(mockProver, 'getBaseRollupProof').mockRejectedValue('Base Rollup Failed'); - }, - ], - [ - 'Merge Rollup Failed', - () => { - jest.spyOn(mockProver, 'getMergeRollupProof').mockRejectedValue('Merge Rollup Failed'); - }, - ], - [ - 'Root Rollup Failed', - () => { - jest.spyOn(mockProver, 'getRootRollupProof').mockRejectedValue('Root Rollup Failed'); - }, - ], - [ - 'Base Parity Failed', - () => { - jest.spyOn(mockProver, 'getBaseParityProof').mockRejectedValue('Base Parity Failed'); - }, - ], - [ - 'Root Parity Failed', - () => { - jest.spyOn(mockProver, 'getRootParityProof').mockRejectedValue('Root Parity Failed'); - }, - ], - ] as const)( - 'handles a %s error', - async (message: string, fn: () => void) => { - fn(); - const txs = await Promise.all([ - makeEmptyProcessedTx(), - makeEmptyProcessedTx(), - makeEmptyProcessedTx(), - makeEmptyProcessedTx(), - ]); - - const blockTicket = await builder.startNewBlock(txs.length, globalVariables, [], await makeEmptyProcessedTx()); - - for (const tx of txs) { - await builder.addNewTx(tx); - } - await expect(blockTicket.provingPromise).resolves.toEqual({ status: PROVING_STATUS.FAILURE, reason: message }); - }, - 60000, - ); - - afterEach(async () => { - await builder.stop(); - }); - }); - - describe('circuits simulator', () => { - beforeEach(async () => { - builder = await ProvingOrchestrator.new(builderDb, prover); - }); - - afterEach(async () => { - await builder.stop(); - }); - - it.each([ - [0, 2], - [1, 2], - [4, 4], - [5, 8], - [9, 16], - ] as const)( - 'builds an L2 block with %i bloated txs and %i txs total', - async (bloatedCount: number, totalCount: number) => { - const noteHashTreeBefore = await builderDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE); - const txs = [ - ...(await Promise.all(times(bloatedCount, () => makeBloatedProcessedTx(builderDb)))), - ...(await Promise.all(times(totalCount - bloatedCount, makeEmptyProcessedTx))), - ]; - - const blockTicket = await builder.startNewBlock( - txs.length, - globalVariables, - mockL1ToL2Messages, - await makeEmptyProcessedTx(), - ); - - for (const tx of txs) { - await builder.addNewTx(tx); - } - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - - const finalisedBlock = await builder.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(blockNumber); - - await updateExpectedTreesFromTxs(txs); - const noteHashTreeAfter = await builderDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE); - - if (bloatedCount > 0) { - expect(noteHashTreeAfter.root).not.toEqual(noteHashTreeBefore.root); - } - - const expectedNoteHashTreeAfter = await expectsDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE).then(t => t.root); - expect(noteHashTreeAfter.root).toEqual(expectedNoteHashTreeAfter); - }, - 60000, - ); - - it('builds an empty L2 block', async () => { - const txs = await Promise.all([makeEmptyProcessedTx(), makeEmptyProcessedTx()]); - - const blockTicket = await builder.startNewBlock(txs.length, globalVariables, [], await makeEmptyProcessedTx()); - - for (const tx of txs) { - await builder.addNewTx(tx); - } - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await builder.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(blockNumber); - }, 30_000); - - it('builds a block with 1 transaction', async () => { - const txs = await Promise.all([makeBloatedProcessedTx(builderDb, 1)]); - - await updateExpectedTreesFromTxs(txs); - - // This will need to be a 2 tx block - const blockTicket = await builder.startNewBlock(2, globalVariables, [], await makeEmptyProcessedTx()); - - for (const tx of txs) { - await builder.addNewTx(tx); - } - - // we need to complete the block as we have not added a full set of txs - await builder.setBlockCompleted(); - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await builder.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(blockNumber); - }, 30_000); - - it('builds a block with a transaction with public functions', async () => { - const tx = await makeBloatedProcessedTx(builderDb, 1); - - const setup: PublicKernelRequest = { - type: PublicKernelType.SETUP, - inputs: makePublicKernelCircuitPrivateInputs(2), - }; - - const app: PublicKernelRequest = { - type: PublicKernelType.APP_LOGIC, - inputs: makePublicKernelCircuitPrivateInputs(3), - }; - - const teardown: PublicKernelRequest = { - type: PublicKernelType.TEARDOWN, - inputs: makePublicKernelCircuitPrivateInputs(4), - }; - - const tail: PublicKernelRequest = { - type: PublicKernelType.TAIL, - inputs: makePublicKernelCircuitPrivateInputs(5), - }; - - tx.publicKernelRequests = [setup, app, teardown, tail]; - - // This will need to be a 2 tx block - const blockTicket = await builder.startNewBlock(2, globalVariables, [], await makeEmptyProcessedTx()); - - await builder.addNewTx(tx); - - // we need to complete the block as we have not added a full set of txs - await builder.setBlockCompleted(); - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await builder.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(blockNumber); - }, 30_000); - - it('builds multiple blocks in sequence', async () => { - const numBlocks = 5; - let header = await builderDb.buildInitialHeader(); - - for (let i = 0; i < numBlocks; i++) { - const tx = await makeBloatedProcessedTx(builderDb, i + 1); - const emptyTx = await makeEmptyProcessedTx(); - tx.data.constants.historicalHeader = header; - emptyTx.data.constants.historicalHeader = header; - - const blockNum = i + 1000; - - const globals = makeGlobals(blockNum); - - // This will need to be a 2 tx block - const blockTicket = await builder.startNewBlock(2, globals, [], emptyTx); - - await builder.addNewTx(tx); - - // we need to complete the block as we have not added a full set of txs - await builder.setBlockCompleted(); - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await builder.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(blockNum); - header = finalisedBlock.block.header; - - await builderDb.commit(); - } - }, 60_000); - - it('builds a mixed L2 block', async () => { - const txs = await Promise.all([ - makeBloatedProcessedTx(builderDb, 1), - makeBloatedProcessedTx(builderDb, 2), - makeBloatedProcessedTx(builderDb, 3), - makeBloatedProcessedTx(builderDb, 4), - ]); - - const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); - - const blockTicket = await builder.startNewBlock( - txs.length, - globalVariables, - l1ToL2Messages, - await makeEmptyProcessedTx(), - ); - - for (const tx of txs) { - await builder.addNewTx(tx); - } - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await builder.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(blockNumber); - }, 200_000); - - it('builds a block concurrently with transactions', async () => { - const txs = await Promise.all([ - makeBloatedProcessedTx(builderDb, 1), - makeBloatedProcessedTx(builderDb, 2), - makeBloatedProcessedTx(builderDb, 3), - makeBloatedProcessedTx(builderDb, 4), - ]); - - const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); - - const blockTicket = await builder.startNewBlock( - txs.length, - globalVariables, - l1ToL2Messages, - await makeEmptyProcessedTx(), - ); - - for (const tx of txs) { - await builder.addNewTx(tx); - await sleep(1000); - } - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await builder.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(blockNumber); - }, 200_000); - - it('cancels current block and switches to new ones', async () => { - const txs1 = await Promise.all([makeBloatedProcessedTx(builderDb, 1), makeBloatedProcessedTx(builderDb, 2)]); - - const txs2 = await Promise.all([makeBloatedProcessedTx(builderDb, 3), makeBloatedProcessedTx(builderDb, 4)]); - - const globals1: GlobalVariables = makeGlobals(100); - const globals2: GlobalVariables = makeGlobals(101); - - const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); - - const blockTicket1 = await builder.startNewBlock(2, globals1, l1ToL2Messages, await makeEmptyProcessedTx()); - - await builder.addNewTx(txs1[0]); - await builder.addNewTx(txs1[1]); - - // Now we cancel the block. The first block will come to a stop as and when current proofs complete - builder.cancelBlock(); - - const result1 = await blockTicket1.provingPromise; - - // in all likelihood, the block will have a failure code as we cancelled it - // however it may have actually completed proving before we cancelled in which case it could be a succes code - if (result1.status === PROVING_STATUS.FAILURE) { - expect((result1 as ProvingFailure).reason).toBe('Proving cancelled'); - } - - await builderDb.rollback(); - - const blockTicket2 = await builder.startNewBlock(2, globals2, l1ToL2Messages, await makeEmptyProcessedTx()); - - await builder.addNewTx(txs2[0]); - await builder.addNewTx(txs2[1]); - - const result2 = await blockTicket2.provingPromise; - expect(result2.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await builder.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(101); - }, 10000); - - it('automatically cancels an incomplete block when starting a new one', async () => { - const txs1 = await Promise.all([makeBloatedProcessedTx(builderDb, 1), makeBloatedProcessedTx(builderDb, 2)]); - - const txs2 = await Promise.all([makeBloatedProcessedTx(builderDb, 3), makeBloatedProcessedTx(builderDb, 4)]); - - const globals1: GlobalVariables = makeGlobals(100); - const globals2: GlobalVariables = makeGlobals(101); - - const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); - - const blockTicket1 = await builder.startNewBlock(2, globals1, l1ToL2Messages, await makeEmptyProcessedTx()); - - await builder.addNewTx(txs1[0]); - - await builderDb.rollback(); - - const blockTicket2 = await builder.startNewBlock(2, globals2, l1ToL2Messages, await makeEmptyProcessedTx()); - - await builder.addNewTx(txs2[0]); - await builder.addNewTx(txs2[1]); - - const result1 = await blockTicket1.provingPromise; - expect(result1.status).toBe(PROVING_STATUS.FAILURE); - expect((result1 as ProvingFailure).reason).toBe('Proving cancelled'); - - const result2 = await blockTicket2.provingPromise; - expect(result2.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await builder.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(101); - }, 10000); - - it('builds an unbalanced L2 block', async () => { - const txs = await Promise.all([ - makeBloatedProcessedTx(builderDb, 1), - makeBloatedProcessedTx(builderDb, 2), - makeBloatedProcessedTx(builderDb, 3), - ]); - - const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); - - // this needs to be a 4 tx block that will need to be completed - const blockTicket = await builder.startNewBlock(4, globalVariables, l1ToL2Messages, await makeEmptyProcessedTx()); - - for (const tx of txs) { - await builder.addNewTx(tx); - } - - await builder.setBlockCompleted(); - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await builder.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(blockNumber); - }, 200_000); - - it('throws if adding too many transactions', async () => { - const txs = await Promise.all([ - makeBloatedProcessedTx(builderDb, 1), - makeBloatedProcessedTx(builderDb, 2), - makeBloatedProcessedTx(builderDb, 3), - makeBloatedProcessedTx(builderDb, 4), - ]); - - const blockTicket = await builder.startNewBlock(txs.length, globalVariables, [], await makeEmptyProcessedTx()); - - for (const tx of txs) { - await builder.addNewTx(tx); - } - - await expect(async () => await builder.addNewTx(await makeEmptyProcessedTx())).rejects.toThrow( - 'Rollup not accepting further transactions', - ); - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await builder.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(blockNumber); - }, 30_000); - - it('throws if adding a transaction before start', async () => { - await expect(async () => await builder.addNewTx(await makeEmptyProcessedTx())).rejects.toThrow( - `Invalid proving state, call startNewBlock before adding transactions`, - ); - }, 1000); - - it('throws if completing a block before start', async () => { - await expect(async () => await builder.setBlockCompleted()).rejects.toThrow( - 'Invalid proving state, call startNewBlock before adding transactions or completing the block', - ); - }, 1000); - - it('throws if finalising an incomplete block', async () => { - await expect(async () => await builder.finaliseBlock()).rejects.toThrow( - 'Invalid proving state, a block must be proven before it can be finalised', - ); - }, 1000); - - it('throws if finalising an already finalised block', async () => { - const txs = await Promise.all([makeEmptyProcessedTx(), makeEmptyProcessedTx()]); - - const blockTicket = await builder.startNewBlock(txs.length, globalVariables, [], await makeEmptyProcessedTx()); - - for (const tx of txs) { - await builder.addNewTx(tx); - } - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await builder.finaliseBlock(); - expect(finalisedBlock.block.number).toEqual(blockNumber); - await expect(async () => await builder.finaliseBlock()).rejects.toThrow('Block already finalised'); - }, 20000); - - it('throws if adding to a cancelled block', async () => { - await builder.startNewBlock(2, globalVariables, [], await makeEmptyProcessedTx()); - - builder.cancelBlock(); - - await expect(async () => await builder.addNewTx(await makeEmptyProcessedTx())).rejects.toThrow( - 'Rollup not accepting further transactions', - ); - }, 10000); - - it.each([[-4], [0], [1], [3], [8.1], [7]] as const)( - 'fails to start a block with %i transactions', - async (blockSize: number) => { - await expect( - async () => await builder.startNewBlock(blockSize, globalVariables, [], await makeEmptyProcessedTx()), - ).rejects.toThrow(`Length of txs for the block should be a power of two and at least two (got ${blockSize})`); - }, - 10000, - ); - - it('rejects if too many l1 to l2 messages are provided', async () => { - // Assemble a fake transaction - const l1ToL2Messages = new Array(100).fill(new Fr(0n)); - await expect( - async () => await builder.startNewBlock(2, globalVariables, l1ToL2Messages, await makeEmptyProcessedTx()), - ).rejects.toThrow('Too many L1 to L2 messages'); - }); - }); -}); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts new file mode 100644 index 00000000000..6d2f5477cd0 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts @@ -0,0 +1,160 @@ +import { PROVING_STATUS, type ProcessedTx } from '@aztec/circuit-types'; +import { AztecAddress, EthAddress, Fr, GlobalVariables } from '@aztec/circuits.js'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { openTmpStore } from '@aztec/kv-store/utils'; +import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; + +import { type MemDown, default as memdown } from 'memdown'; + +import { getConfig, getSimulationProvider, makeBloatedProcessedTx, makeEmptyProcessedTx } from '../mocks/fixtures.js'; +import { TestCircuitProver } from '../prover/test_circuit_prover.js'; +import { ProvingOrchestrator } from './orchestrator.js'; + +export const createMemDown = () => (memdown as any)() as MemDown; + +const logger = createDebugLogger('aztec:orchestrator-test'); + +describe('prover/orchestrator', () => { + let builder: ProvingOrchestrator; + let builderDb: MerkleTreeOperations; + + let prover: TestCircuitProver; + + let blockNumber: number; + + let globalVariables: GlobalVariables; + + const chainId = Fr.ZERO; + const version = Fr.ZERO; + const coinbase = EthAddress.ZERO; + const feeRecipient = AztecAddress.ZERO; + + const makeGlobals = (blockNumber: number) => { + return new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); + }; + + const makeEmptyProcessedTestTx = (): Promise => { + return makeEmptyProcessedTx(builderDb, chainId, version); + }; + + beforeEach(async () => { + blockNumber = 3; + globalVariables = makeGlobals(blockNumber); + + const acvmConfig = await getConfig(logger); + const simulationProvider = await getSimulationProvider({ + acvmWorkingDirectory: acvmConfig?.acvmWorkingDirectory, + acvmBinaryPath: acvmConfig?.expectedAcvmPath, + }); + prover = new TestCircuitProver(simulationProvider); + + builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); + builder = new ProvingOrchestrator(builderDb, prover, 1); + }, 20_000); + + describe('errors', () => { + beforeEach(async () => { + builder = await ProvingOrchestrator.new(builderDb, prover); + }); + + afterEach(async () => { + await builder.stop(); + }); + + it('throws if adding too many transactions', async () => { + const txs = await Promise.all([ + makeBloatedProcessedTx(builderDb, 1), + makeBloatedProcessedTx(builderDb, 2), + makeBloatedProcessedTx(builderDb, 3), + makeBloatedProcessedTx(builderDb, 4), + ]); + + const blockTicket = await builder.startNewBlock( + txs.length, + globalVariables, + [], + await makeEmptyProcessedTestTx(), + ); + + for (const tx of txs) { + await builder.addNewTx(tx); + } + + await expect(async () => await builder.addNewTx(await makeEmptyProcessedTestTx())).rejects.toThrow( + 'Rollup not accepting further transactions', + ); + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + const finalisedBlock = await builder.finaliseBlock(); + + expect(finalisedBlock.block.number).toEqual(blockNumber); + }, 30_000); + + it('throws if adding a transaction before start', async () => { + await expect(async () => await builder.addNewTx(await makeEmptyProcessedTestTx())).rejects.toThrow( + `Invalid proving state, call startNewBlock before adding transactions`, + ); + }, 1000); + + it('throws if completing a block before start', async () => { + await expect(async () => await builder.setBlockCompleted()).rejects.toThrow( + 'Invalid proving state, call startNewBlock before adding transactions or completing the block', + ); + }, 1000); + + it('throws if finalising an incomplete block', async () => { + await expect(async () => await builder.finaliseBlock()).rejects.toThrow( + 'Invalid proving state, a block must be proven before it can be finalised', + ); + }, 1000); + + it('throws if finalising an already finalised block', async () => { + const txs = await Promise.all([makeEmptyProcessedTestTx(), makeEmptyProcessedTestTx()]); + + const blockTicket = await builder.startNewBlock( + txs.length, + globalVariables, + [], + await makeEmptyProcessedTestTx(), + ); + + for (const tx of txs) { + await builder.addNewTx(tx); + } + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + const finalisedBlock = await builder.finaliseBlock(); + expect(finalisedBlock.block.number).toEqual(blockNumber); + await expect(async () => await builder.finaliseBlock()).rejects.toThrow('Block already finalised'); + }, 60000); + + it('throws if adding to a cancelled block', async () => { + await builder.startNewBlock(2, globalVariables, [], await makeEmptyProcessedTestTx()); + + builder.cancelBlock(); + + await expect(async () => await builder.addNewTx(await makeEmptyProcessedTestTx())).rejects.toThrow( + 'Rollup not accepting further transactions', + ); + }, 10000); + + it.each([[-4], [0], [1], [3], [8.1], [7]] as const)( + 'fails to start a block with %i transactions', + async (blockSize: number) => { + await expect( + async () => await builder.startNewBlock(blockSize, globalVariables, [], await makeEmptyProcessedTestTx()), + ).rejects.toThrow(`Length of txs for the block should be a power of two and at least two (got ${blockSize})`); + }, + ); + + it('rejects if too many l1 to l2 messages are provided', async () => { + // Assemble a fake transaction + const l1ToL2Messages = new Array(100).fill(new Fr(0n)); + await expect( + async () => await builder.startNewBlock(2, globalVariables, l1ToL2Messages, await makeEmptyProcessedTestTx()), + ).rejects.toThrow('Too many L1 to L2 messages'); + }); + }); +}); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts new file mode 100644 index 00000000000..717076d486e --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts @@ -0,0 +1,127 @@ +import { PROVING_STATUS, type ProcessedTx } from '@aztec/circuit-types'; +import { AztecAddress, EthAddress, Fr, GlobalVariables } from '@aztec/circuits.js'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { openTmpStore } from '@aztec/kv-store/utils'; +import { WASMSimulator } from '@aztec/simulator'; +import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; + +import { jest } from '@jest/globals'; +import { type MemDown, default as memdown } from 'memdown'; + +import { getConfig, getSimulationProvider, makeEmptyProcessedTx } from '../mocks/fixtures.js'; +import { type CircuitProver } from '../prover/index.js'; +import { TestCircuitProver } from '../prover/test_circuit_prover.js'; +import { ProvingOrchestrator } from './orchestrator.js'; + +export const createMemDown = () => (memdown as any)() as MemDown; + +const logger = createDebugLogger('aztec:orchestrator-test'); + +describe('prover/orchestrator', () => { + let builder: ProvingOrchestrator; + let builderDb: MerkleTreeOperations; + + let prover: TestCircuitProver; + + let blockNumber: number; + + let globalVariables: GlobalVariables; + + const chainId = Fr.ZERO; + const version = Fr.ZERO; + const coinbase = EthAddress.ZERO; + const feeRecipient = AztecAddress.ZERO; + + const makeGlobals = (blockNumber: number) => { + return new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); + }; + + const makeEmptyProcessedTestTx = (): Promise => { + return makeEmptyProcessedTx(builderDb, chainId, version); + }; + + beforeEach(async () => { + blockNumber = 3; + globalVariables = makeGlobals(blockNumber); + + const acvmConfig = await getConfig(logger); + const simulationProvider = await getSimulationProvider({ + acvmWorkingDirectory: acvmConfig?.acvmWorkingDirectory, + acvmBinaryPath: acvmConfig?.expectedAcvmPath, + }); + prover = new TestCircuitProver(simulationProvider); + + builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); + builder = new ProvingOrchestrator(builderDb, prover, 1); + }, 20_000); + + describe('error handling', () => { + let mockProver: CircuitProver; + + beforeEach(async () => { + mockProver = new TestCircuitProver(new WASMSimulator()); + builder = await ProvingOrchestrator.new(builderDb, mockProver); + }); + + it.each([ + [ + 'Base Rollup Failed', + () => { + jest.spyOn(mockProver, 'getBaseRollupProof').mockRejectedValue('Base Rollup Failed'); + }, + ], + [ + 'Merge Rollup Failed', + () => { + jest.spyOn(mockProver, 'getMergeRollupProof').mockRejectedValue('Merge Rollup Failed'); + }, + ], + [ + 'Root Rollup Failed', + () => { + jest.spyOn(mockProver, 'getRootRollupProof').mockRejectedValue('Root Rollup Failed'); + }, + ], + [ + 'Base Parity Failed', + () => { + jest.spyOn(mockProver, 'getBaseParityProof').mockRejectedValue('Base Parity Failed'); + }, + ], + [ + 'Root Parity Failed', + () => { + jest.spyOn(mockProver, 'getRootParityProof').mockRejectedValue('Root Parity Failed'); + }, + ], + ] as const)( + 'handles a %s error', + async (message: string, fn: () => void) => { + fn(); + const txs = await Promise.all([ + makeEmptyProcessedTestTx(), + makeEmptyProcessedTestTx(), + makeEmptyProcessedTestTx(), + makeEmptyProcessedTestTx(), + ]); + + const blockTicket = await builder.startNewBlock( + txs.length, + globalVariables, + [], + await makeEmptyProcessedTestTx(), + ); + + for (const tx of txs) { + await builder.addNewTx(tx); + } + await expect(blockTicket.provingPromise).resolves.toEqual({ status: PROVING_STATUS.FAILURE, reason: message }); + }, + 60000, + ); + + afterEach(async () => { + await builder.stop(); + }); + }); +}); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts new file mode 100644 index 00000000000..3bbd66b8ddf --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts @@ -0,0 +1,131 @@ +import { PROVING_STATUS, type ProcessedTx, type ProvingFailure } from '@aztec/circuit-types'; +import { AztecAddress, EthAddress, Fr, GlobalVariables, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; +import { fr } from '@aztec/circuits.js/testing'; +import { range } from '@aztec/foundation/array'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { openTmpStore } from '@aztec/kv-store/utils'; +import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; + +import { type MemDown, default as memdown } from 'memdown'; + +import { getConfig, getSimulationProvider, makeBloatedProcessedTx, makeEmptyProcessedTx } from '../mocks/fixtures.js'; +import { TestCircuitProver } from '../prover/test_circuit_prover.js'; +import { ProvingOrchestrator } from './orchestrator.js'; + +export const createMemDown = () => (memdown as any)() as MemDown; + +const logger = createDebugLogger('aztec:orchestrator-test'); + +describe('prover/orchestrator', () => { + let builder: ProvingOrchestrator; + let builderDb: MerkleTreeOperations; + + let prover: TestCircuitProver; + + const chainId = Fr.ZERO; + const version = Fr.ZERO; + const coinbase = EthAddress.ZERO; + const feeRecipient = AztecAddress.ZERO; + + const makeGlobals = (blockNumber: number) => { + return new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); + }; + + const makeEmptyProcessedTestTx = (): Promise => { + return makeEmptyProcessedTx(builderDb, chainId, version); + }; + + beforeEach(async () => { + const acvmConfig = await getConfig(logger); + const simulationProvider = await getSimulationProvider({ + acvmWorkingDirectory: acvmConfig?.acvmWorkingDirectory, + acvmBinaryPath: acvmConfig?.expectedAcvmPath, + }); + prover = new TestCircuitProver(simulationProvider); + + builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); + builder = new ProvingOrchestrator(builderDb, prover, 1); + }, 20_000); + + describe('lifecycle', () => { + beforeEach(async () => { + builder = await ProvingOrchestrator.new(builderDb, prover); + }); + + afterEach(async () => { + await builder.stop(); + }); + + it('cancels current block and switches to new ones', async () => { + const txs1 = await Promise.all([makeBloatedProcessedTx(builderDb, 1), makeBloatedProcessedTx(builderDb, 2)]); + + const txs2 = await Promise.all([makeBloatedProcessedTx(builderDb, 3), makeBloatedProcessedTx(builderDb, 4)]); + + const globals1: GlobalVariables = makeGlobals(100); + const globals2: GlobalVariables = makeGlobals(101); + + const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); + + const blockTicket1 = await builder.startNewBlock(2, globals1, l1ToL2Messages, await makeEmptyProcessedTestTx()); + + await builder.addNewTx(txs1[0]); + await builder.addNewTx(txs1[1]); + + // Now we cancel the block. The first block will come to a stop as and when current proofs complete + builder.cancelBlock(); + + const result1 = await blockTicket1.provingPromise; + + // in all likelihood, the block will have a failure code as we cancelled it + // however it may have actually completed proving before we cancelled in which case it could be a success code + if (result1.status === PROVING_STATUS.FAILURE) { + expect((result1 as ProvingFailure).reason).toBe('Proving cancelled'); + } + + await builderDb.rollback(); + + const blockTicket2 = await builder.startNewBlock(2, globals2, l1ToL2Messages, await makeEmptyProcessedTestTx()); + + await builder.addNewTx(txs2[0]); + await builder.addNewTx(txs2[1]); + + const result2 = await blockTicket2.provingPromise; + expect(result2.status).toBe(PROVING_STATUS.SUCCESS); + const finalisedBlock = await builder.finaliseBlock(); + + expect(finalisedBlock.block.number).toEqual(101); + }, 20000); + + it('automatically cancels an incomplete block when starting a new one', async () => { + const txs1 = await Promise.all([makeBloatedProcessedTx(builderDb, 1), makeBloatedProcessedTx(builderDb, 2)]); + + const txs2 = await Promise.all([makeBloatedProcessedTx(builderDb, 3), makeBloatedProcessedTx(builderDb, 4)]); + + const globals1: GlobalVariables = makeGlobals(100); + const globals2: GlobalVariables = makeGlobals(101); + + const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); + + const blockTicket1 = await builder.startNewBlock(2, globals1, l1ToL2Messages, await makeEmptyProcessedTestTx()); + + await builder.addNewTx(txs1[0]); + + await builderDb.rollback(); + + const blockTicket2 = await builder.startNewBlock(2, globals2, l1ToL2Messages, await makeEmptyProcessedTestTx()); + + await builder.addNewTx(txs2[0]); + await builder.addNewTx(txs2[1]); + + const result1 = await blockTicket1.provingPromise; + expect(result1.status).toBe(PROVING_STATUS.FAILURE); + expect((result1 as ProvingFailure).reason).toBe('Proving cancelled'); + + const result2 = await blockTicket2.provingPromise; + expect(result2.status).toBe(PROVING_STATUS.SUCCESS); + const finalisedBlock = await builder.finaliseBlock(); + + expect(finalisedBlock.block.number).toEqual(101); + }, 20000); + }); +}); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts new file mode 100644 index 00000000000..a82bad42d14 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts @@ -0,0 +1,183 @@ +import { MerkleTreeId, PROVING_STATUS, type ProcessedTx } from '@aztec/circuit-types'; +import { AztecAddress, EthAddress, Fr, GlobalVariables, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; +import { fr } from '@aztec/circuits.js/testing'; +import { range } from '@aztec/foundation/array'; +import { times } from '@aztec/foundation/collection'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { openTmpStore } from '@aztec/kv-store/utils'; +import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; + +import { type MemDown, default as memdown } from 'memdown'; + +import { + getConfig, + getSimulationProvider, + makeBloatedProcessedTx, + makeEmptyProcessedTx, + updateExpectedTreesFromTxs, +} from '../mocks/fixtures.js'; +import { TestCircuitProver } from '../prover/test_circuit_prover.js'; +import { ProvingOrchestrator } from './orchestrator.js'; + +export const createMemDown = () => (memdown as any)() as MemDown; + +const logger = createDebugLogger('aztec:orchestrator-test'); + +describe('prover/orchestrator', () => { + let builder: ProvingOrchestrator; + let builderDb: MerkleTreeOperations; + let expectsDb: MerkleTreeOperations; + + let prover: TestCircuitProver; + + let blockNumber: number; + let mockL1ToL2Messages: Fr[]; + + let globalVariables: GlobalVariables; + + const chainId = Fr.ZERO; + const version = Fr.ZERO; + const coinbase = EthAddress.ZERO; + const feeRecipient = AztecAddress.ZERO; + + const makeGlobals = (blockNumber: number) => { + return new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); + }; + + const makeEmptyProcessedTestTx = (): Promise => { + return makeEmptyProcessedTx(builderDb, chainId, version); + }; + + beforeEach(async () => { + blockNumber = 3; + globalVariables = makeGlobals(blockNumber); + + const acvmConfig = await getConfig(logger); + const simulationProvider = await getSimulationProvider({ + acvmWorkingDirectory: acvmConfig?.acvmWorkingDirectory, + acvmBinaryPath: acvmConfig?.expectedAcvmPath, + }); + prover = new TestCircuitProver(simulationProvider); + + builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); + expectsDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); + builder = new ProvingOrchestrator(builderDb, prover, 1); + + // Create mock l1 to L2 messages + mockL1ToL2Messages = new Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)); + }, 20_000); + + describe('blocks', () => { + beforeEach(async () => { + builder = await ProvingOrchestrator.new(builderDb, prover); + }); + + afterEach(async () => { + await builder.stop(); + }); + + it.each([ + [0, 2], + [1, 2], + [4, 4], + [5, 8], + [9, 16], + ] as const)( + 'builds an L2 block with %i bloated txs and %i txs total', + async (bloatedCount: number, totalCount: number) => { + const noteHashTreeBefore = await builderDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE); + const txs = [ + ...(await Promise.all(times(bloatedCount, (i: number) => makeBloatedProcessedTx(builderDb, i)))), + ...(await Promise.all(times(totalCount - bloatedCount, makeEmptyProcessedTestTx))), + ]; + + const blockTicket = await builder.startNewBlock( + txs.length, + globalVariables, + mockL1ToL2Messages, + await makeEmptyProcessedTestTx(), + ); + + for (const tx of txs) { + await builder.addNewTx(tx); + } + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + + const finalisedBlock = await builder.finaliseBlock(); + + expect(finalisedBlock.block.number).toEqual(blockNumber); + + await updateExpectedTreesFromTxs(expectsDb, txs); + const noteHashTreeAfter = await builderDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE); + + if (bloatedCount > 0) { + expect(noteHashTreeAfter.root).not.toEqual(noteHashTreeBefore.root); + } + + const expectedNoteHashTreeAfter = await expectsDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE).then(t => t.root); + expect(noteHashTreeAfter.root).toEqual(expectedNoteHashTreeAfter); + }, + 60000, + ); + + it('builds a mixed L2 block', async () => { + const txs = await Promise.all([ + makeBloatedProcessedTx(builderDb, 1), + makeBloatedProcessedTx(builderDb, 2), + makeBloatedProcessedTx(builderDb, 3), + makeBloatedProcessedTx(builderDb, 4), + ]); + + const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); + + const blockTicket = await builder.startNewBlock( + txs.length, + globalVariables, + l1ToL2Messages, + await makeEmptyProcessedTestTx(), + ); + + for (const tx of txs) { + await builder.addNewTx(tx); + } + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + const finalisedBlock = await builder.finaliseBlock(); + + expect(finalisedBlock.block.number).toEqual(blockNumber); + }, 30_000); + + it('builds an unbalanced L2 block', async () => { + const txs = await Promise.all([ + makeBloatedProcessedTx(builderDb, 1), + makeBloatedProcessedTx(builderDb, 2), + makeBloatedProcessedTx(builderDb, 3), + ]); + + const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); + + // this needs to be a 4 tx block that will need to be completed + const blockTicket = await builder.startNewBlock( + 4, + globalVariables, + l1ToL2Messages, + await makeEmptyProcessedTestTx(), + ); + + for (const tx of txs) { + await builder.addNewTx(tx); + } + + await builder.setBlockCompleted(); + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + const finalisedBlock = await builder.finaliseBlock(); + + expect(finalisedBlock.block.number).toEqual(blockNumber); + }, 30_000); + }); +}); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts new file mode 100644 index 00000000000..e51e8944243 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts @@ -0,0 +1,99 @@ +import { + PROVING_STATUS, + type ProcessedTx +} from '@aztec/circuit-types'; +import { AztecAddress, EthAddress, Fr, GlobalVariables } from '@aztec/circuits.js'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { openTmpStore } from '@aztec/kv-store/utils'; +import { MerkleTrees, type MerkleTreeOperations } from '@aztec/world-state'; + +import { default as memdown, type MemDown } from 'memdown'; + +import { + getConfig, + getSimulationProvider, + makeBloatedProcessedTx, + makeEmptyProcessedTx +} from '../mocks/fixtures.js'; +import { TestCircuitProver } from '../prover/test_circuit_prover.js'; +import { ProvingOrchestrator } from './orchestrator.js'; + +export const createMemDown = () => (memdown as any)() as MemDown; + +const logger = createDebugLogger('aztec:orchestrator-test'); + +describe('prover/orchestrator', () => { + let builder: ProvingOrchestrator; + let builderDb: MerkleTreeOperations; + + let prover: TestCircuitProver; + + const chainId = Fr.ZERO; + const version = Fr.ZERO; + const coinbase = EthAddress.ZERO; + const feeRecipient = AztecAddress.ZERO; + + const makeGlobals = (blockNumber: number) => { + return new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); + }; + + const makeEmptyProcessedTestTx = (): Promise => { + return makeEmptyProcessedTx(builderDb, chainId, version); + }; + + beforeEach(async () => { + const acvmConfig = await getConfig(logger); + const simulationProvider = await getSimulationProvider({ + acvmWorkingDirectory: acvmConfig?.acvmWorkingDirectory, + acvmBinaryPath: acvmConfig?.expectedAcvmPath, + }); + prover = new TestCircuitProver(simulationProvider); + + builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); + builder = new ProvingOrchestrator(builderDb, prover, 1); + }, 20_000); + + + describe('multiple blocks', () => { + beforeEach(async () => { + builder = await ProvingOrchestrator.new(builderDb, prover); + }); + + afterEach(async () => { + await builder.stop(); + }); + + it('builds multiple blocks in sequence', async () => { + const numBlocks = 5; + let header = await builderDb.buildInitialHeader(); + + for (let i = 0; i < numBlocks; i++) { + const tx = await makeBloatedProcessedTx(builderDb, i + 1); + const emptyTx = await makeEmptyProcessedTestTx(); + tx.data.constants.historicalHeader = header; + emptyTx.data.constants.historicalHeader = header; + + const blockNum = i + 1000; + + const globals = makeGlobals(blockNum); + + // This will need to be a 2 tx block + const blockTicket = await builder.startNewBlock(2, globals, [], emptyTx); + + await builder.addNewTx(tx); + + // we need to complete the block as we have not added a full set of txs + await builder.setBlockCompleted(); + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + const finalisedBlock = await builder.finaliseBlock(); + + expect(finalisedBlock.block.number).toEqual(blockNum); + header = finalisedBlock.block.header; + + await builderDb.commit(); + } + }, 60_000); + }); +}); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts new file mode 100644 index 00000000000..931d9da6649 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts @@ -0,0 +1,187 @@ +import { PROVING_STATUS, type ProcessedTx, type PublicKernelRequest, PublicKernelType } from '@aztec/circuit-types'; +import { AztecAddress, EthAddress, Fr, GlobalVariables, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; +import { fr, makePublicKernelCircuitPrivateInputs } from '@aztec/circuits.js/testing'; +import { range } from '@aztec/foundation/array'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { sleep } from '@aztec/foundation/sleep'; +import { openTmpStore } from '@aztec/kv-store/utils'; +import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; + +import { type MemDown, default as memdown } from 'memdown'; + +import { + getConfig, + getSimulationProvider, + makeBloatedProcessedTx, + makeEmptyProcessedTx, + updateExpectedTreesFromTxs, +} from '../mocks/fixtures.js'; +import { TestCircuitProver } from '../prover/test_circuit_prover.js'; +import { ProvingOrchestrator } from './orchestrator.js'; + +export const createMemDown = () => (memdown as any)() as MemDown; + +const logger = createDebugLogger('aztec:orchestrator-test'); + +describe('prover/orchestrator', () => { + let builder: ProvingOrchestrator; + let builderDb: MerkleTreeOperations; + let expectsDb: MerkleTreeOperations; + + let prover: TestCircuitProver; + + let blockNumber: number; + + let globalVariables: GlobalVariables; + + const chainId = Fr.ZERO; + const version = Fr.ZERO; + const coinbase = EthAddress.ZERO; + const feeRecipient = AztecAddress.ZERO; + + const makeGlobals = (blockNumber: number) => { + return new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); + }; + + const makeEmptyProcessedTestTx = (): Promise => { + return makeEmptyProcessedTx(builderDb, chainId, version); + }; + + beforeEach(async () => { + blockNumber = 3; + globalVariables = makeGlobals(blockNumber); + + const acvmConfig = await getConfig(logger); + const simulationProvider = await getSimulationProvider({ + acvmWorkingDirectory: acvmConfig?.acvmWorkingDirectory, + acvmBinaryPath: acvmConfig?.expectedAcvmPath, + }); + prover = new TestCircuitProver(simulationProvider); + + builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); + expectsDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); + builder = new ProvingOrchestrator(builderDb, prover, 1); + }, 20_000); + + describe('blocks', () => { + beforeEach(async () => { + builder = await ProvingOrchestrator.new(builderDb, prover); + }); + + afterEach(async () => { + await builder.stop(); + }); + + it('builds an empty L2 block', async () => { + const txs = await Promise.all([makeEmptyProcessedTestTx(), makeEmptyProcessedTestTx()]); + + const blockTicket = await builder.startNewBlock( + txs.length, + globalVariables, + [], + await makeEmptyProcessedTestTx(), + ); + + for (const tx of txs) { + await builder.addNewTx(tx); + } + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + const finalisedBlock = await builder.finaliseBlock(); + + expect(finalisedBlock.block.number).toEqual(blockNumber); + }, 30_000); + + it('builds a block with 1 transaction', async () => { + const txs = await Promise.all([makeBloatedProcessedTx(builderDb, 1)]); + + await updateExpectedTreesFromTxs(expectsDb, txs); + + // This will need to be a 2 tx block + const blockTicket = await builder.startNewBlock(2, globalVariables, [], await makeEmptyProcessedTestTx()); + + for (const tx of txs) { + await builder.addNewTx(tx); + } + + // we need to complete the block as we have not added a full set of txs + await builder.setBlockCompleted(); + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + const finalisedBlock = await builder.finaliseBlock(); + + expect(finalisedBlock.block.number).toEqual(blockNumber); + }, 30_000); + + it('builds a block with a transaction with public functions', async () => { + const tx = await makeBloatedProcessedTx(builderDb, 1); + + const setup: PublicKernelRequest = { + type: PublicKernelType.SETUP, + inputs: makePublicKernelCircuitPrivateInputs(2), + }; + + const app: PublicKernelRequest = { + type: PublicKernelType.APP_LOGIC, + inputs: makePublicKernelCircuitPrivateInputs(3), + }; + + const teardown: PublicKernelRequest = { + type: PublicKernelType.TEARDOWN, + inputs: makePublicKernelCircuitPrivateInputs(4), + }; + + const tail: PublicKernelRequest = { + type: PublicKernelType.TAIL, + inputs: makePublicKernelCircuitPrivateInputs(5), + }; + + tx.publicKernelRequests = [setup, app, teardown, tail]; + + // This will need to be a 2 tx block + const blockTicket = await builder.startNewBlock(2, globalVariables, [], await makeEmptyProcessedTestTx()); + + await builder.addNewTx(tx); + + // we need to complete the block as we have not added a full set of txs + await builder.setBlockCompleted(); + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + const finalisedBlock = await builder.finaliseBlock(); + + expect(finalisedBlock.block.number).toEqual(blockNumber); + }, 30_000); + + it('builds a block concurrently with transaction simulation', async () => { + const txs = await Promise.all([ + makeBloatedProcessedTx(builderDb, 1), + makeBloatedProcessedTx(builderDb, 2), + makeBloatedProcessedTx(builderDb, 3), + makeBloatedProcessedTx(builderDb, 4), + ]); + + const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); + + const blockTicket = await builder.startNewBlock( + txs.length, + globalVariables, + l1ToL2Messages, + await makeEmptyProcessedTestTx(), + ); + + for (const tx of txs) { + await builder.addNewTx(tx); + await sleep(1000); + } + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + const finalisedBlock = await builder.finaliseBlock(); + + expect(finalisedBlock.block.number).toEqual(blockNumber); + }, 30_000); + }); +}); diff --git a/yarn-project/prover-client/src/prover/bb_prover.test.ts b/yarn-project/prover-client/src/prover/bb_prover.test.ts index 05063a2d910..300dd859288 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.test.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.test.ts @@ -1,17 +1,14 @@ import { PROVING_STATUS, makeEmptyProcessedTx } from '@aztec/circuit-types'; import { AztecAddress, EthAddress, Fr, GlobalVariables, Header, type RootRollupPublicInputs } from '@aztec/circuits.js'; import { makeRootRollupPublicInputs } from '@aztec/circuits.js/testing'; -import { randomBytes } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; -import { fileURLToPath } from '@aztec/foundation/url'; import { openTmpStore } from '@aztec/kv-store/utils'; import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; import * as fs from 'fs/promises'; import { type MemDown, default as memdown } from 'memdown'; -import path from 'path'; -import { makeBloatedProcessedTx } from '../mocks/fixtures.js'; +import { getConfig, makeBloatedProcessedTx } from '../mocks/fixtures.js'; import { buildBaseRollupInput } from '../orchestrator/block-building-helpers.js'; import { ProvingOrchestrator } from '../orchestrator/orchestrator.js'; import { BBNativeRollupProver, type BBProverConfig } from './bb_prover.js'; @@ -20,48 +17,6 @@ export const createMemDown = () => (memdown as any)() as MemDown; const logger = createDebugLogger('aztec:bb-prover-test'); -const { - BB_RELEASE_DIR = 'cpp/build/bin', - TEMP_DIR = '/tmp', - BB_BINARY_PATH = '', - BB_WORKING_DIRECTORY = '', - NOIR_RELEASE_DIR = 'noir-repo/target/release', - ACVM_BINARY_PATH = '', - ACVM_WORKING_DIRECTORY = '', -} = process.env; - -// Determines if we have access to the bb binary and a tmp folder for temp files -const getConfig = async () => { - try { - const expectedBBPath = BB_BINARY_PATH - ? BB_BINARY_PATH - : `${path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../../../../barretenberg/', BB_RELEASE_DIR)}/bb`; - await fs.access(expectedBBPath, fs.constants.R_OK); - const tempWorkingDirectory = `${TEMP_DIR}/${randomBytes(4).toString('hex')}`; - const bbWorkingDirectory = BB_WORKING_DIRECTORY ? BB_WORKING_DIRECTORY : `${tempWorkingDirectory}/bb`; - await fs.mkdir(bbWorkingDirectory, { recursive: true }); - logger.verbose(`Using native BB binary at ${expectedBBPath} with working directory ${bbWorkingDirectory}`); - - const expectedAcvmPath = ACVM_BINARY_PATH - ? ACVM_BINARY_PATH - : `${path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../../../../noir/', NOIR_RELEASE_DIR)}/acvm`; - await fs.access(expectedAcvmPath, fs.constants.R_OK); - const acvmWorkingDirectory = ACVM_WORKING_DIRECTORY ? ACVM_WORKING_DIRECTORY : `${tempWorkingDirectory}/acvm`; - await fs.mkdir(acvmWorkingDirectory, { recursive: true }); - logger.verbose(`Using native ACVM binary at ${expectedAcvmPath} with working directory ${acvmWorkingDirectory}`); - return { - acvmWorkingDirectory, - bbWorkingDirectory, - expectedAcvmPath, - expectedBBPath, - directoryToCleanup: ACVM_WORKING_DIRECTORY && BB_WORKING_DIRECTORY ? undefined : tempWorkingDirectory, - }; - } catch (err) { - logger.verbose(`Native BB not available, error: ${err}`); - return undefined; - } -}; - describe('prover/bb_prover', () => { let builderDb: MerkleTreeOperations; let prover: BBNativeRollupProver; @@ -77,15 +32,8 @@ describe('prover/bb_prover', () => { const coinbase = EthAddress.ZERO; const feeRecipient = AztecAddress.ZERO; - beforeEach(async () => { - blockNumber = 3; - globalVariables = new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); - - builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); - rootRollupOutput = makeRootRollupPublicInputs(0); - rootRollupOutput.header.globalVariables = globalVariables; - - const config = await getConfig(); + beforeAll(async () => { + const config = await getConfig(logger); if (!config) { throw new Error(`BB binary must be present to test the BB Prover`); } @@ -97,7 +45,16 @@ describe('prover/bb_prover', () => { bbWorkingDirectory: config.bbWorkingDirectory, }; prover = await BBNativeRollupProver.new(bbConfig); - }, 200_000); + }, 60_000); + + beforeEach(async () => { + blockNumber = 3; + globalVariables = new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); + + builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); + rootRollupOutput = makeRootRollupPublicInputs(0); + rootRollupOutput.header.globalVariables = globalVariables; + }, 60_000); afterEach(async () => { if (directoryToCleanup) { @@ -115,7 +72,7 @@ describe('prover/bb_prover', () => { } logger.verbose('Proving base rollups'); await Promise.all(baseRollupInputs.map(inputs => prover.getBaseRollupProof(inputs))); - }, 600_000); + }, 60_000); it('proves all circuits', async () => { const txs = await Promise.all([ diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index 5af751c7814..a97ee7a6c19 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -177,6 +177,8 @@ export class BBNativeRollupProver implements CircuitProver { const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(bbWorkingDirectory, { recursive: true }); + logger.error(`Created directory ${bbWorkingDirectory}`); + // Have the ACVM write the partial witness here const outputWitnessFile = `${bbWorkingDirectory}/partial-witness.gz`; @@ -216,7 +218,7 @@ export class BBNativeRollupProver implements CircuitProver { await fs.rm(bbWorkingDirectory, { recursive: true, force: true }); - logger.debug( + logger.info( `Generated proof for ${circuitType} in ${provingResult.duration} ms, size: ${proofBuffer.length} bytes`, ); diff --git a/yarn-project/prover-client/src/prover/test_circuit_prover.ts b/yarn-project/prover-client/src/prover/test_circuit_prover.ts index 9231a225bea..8be054a6e9a 100644 --- a/yarn-project/prover-client/src/prover/test_circuit_prover.ts +++ b/yarn-project/prover-client/src/prover/test_circuit_prover.ts @@ -54,7 +54,8 @@ export class TestCircuitProver implements CircuitProver { public async getBaseParityProof(inputs: BaseParityInputs): Promise<[ParityPublicInputs, Proof]> { const witnessMap = convertBaseParityInputsToWitnessMap(inputs); - const witness = await this.simulationProvider.simulateCircuit(witnessMap, BaseParityArtifact); + // use WASM here as it is faster for small circuits + const witness = await this.wasmSimulator.simulateCircuit(witnessMap, BaseParityArtifact); const result = convertBaseParityOutputsFromWitnessMap(witness); @@ -69,7 +70,8 @@ export class TestCircuitProver implements CircuitProver { public async getRootParityProof(inputs: RootParityInputs): Promise<[ParityPublicInputs, Proof]> { const witnessMap = convertRootParityInputsToWitnessMap(inputs); - const witness = await this.simulationProvider.simulateCircuit(witnessMap, RootParityArtifact); + // use WASM here as it is faster for small circuits + const witness = await this.wasmSimulator.simulateCircuit(witnessMap, RootParityArtifact); const result = convertRootParityOutputsFromWitnessMap(witness); diff --git a/yarn-project/simulator/src/simulator/acvm_native.ts b/yarn-project/simulator/src/simulator/acvm_native.ts index b70cdab2ebb..ad13509b37a 100644 --- a/yarn-project/simulator/src/simulator/acvm_native.ts +++ b/yarn-project/simulator/src/simulator/acvm_native.ts @@ -1,4 +1,5 @@ import { randomBytes } from '@aztec/foundation/crypto'; +import { Timer } from '@aztec/foundation/timer'; import { type NoirCompiledCircuit } from '@aztec/types/noir'; import { type WitnessMap } from '@noir-lang/types'; @@ -7,6 +8,24 @@ import fs from 'fs/promises'; import { type SimulationProvider } from './simulation_provider.js'; +export enum ACVM_RESULT { + SUCCESS, + FAILURE, +} + +export type ACVMSuccess = { + status: ACVM_RESULT.SUCCESS; + duration: number; + witness: Map; +}; + +export type ACVMFailure = { + status: ACVM_RESULT.FAILURE; + reason: string; +}; + +export type ACVMResult = ACVMSuccess | ACVMFailure; + /** * Parses a TOML format witness map string into a Map structure * @param outputString - The witness map in TOML format @@ -39,7 +58,7 @@ export async function executeNativeCircuit( workingDirectory: string, pathToAcvm: string, outputFilename?: string, -) { +): Promise { const bytecodeFilename = 'bytecode'; const witnessFilename = 'input_witness.toml'; @@ -49,56 +68,60 @@ export async function executeNativeCircuit( witnessMap = witnessMap.concat(`${key} = '${value}'\n`); }); - // In case the directory is still around from some time previously, remove it - await fs.rm(workingDirectory, { recursive: true, force: true }); - // Create the new working directory - await fs.mkdir(workingDirectory, { recursive: true }); - // Write the bytecode and input witness to the working directory - await fs.writeFile(`${workingDirectory}/${bytecodeFilename}`, bytecode); - await fs.writeFile(`${workingDirectory}/${witnessFilename}`, witnessMap); - - // Execute the ACVM using the given args - const args = [ - `execute`, - `--working-directory`, - `${workingDirectory}`, - `--bytecode`, - `${bytecodeFilename}`, - `--input-witness`, - `${witnessFilename}`, - '--print', - '--output-witness', - 'output-witness', - ]; - - const processPromise = new Promise((resolve, reject) => { - let outputWitness = Buffer.alloc(0); - let errorBuffer = Buffer.alloc(0); - const acvm = proc.spawn(pathToAcvm, args); - acvm.stdout.on('data', data => { - outputWitness = Buffer.concat([outputWitness, data]); - }); - acvm.stderr.on('data', data => { - errorBuffer = Buffer.concat([errorBuffer, data]); - }); - acvm.on('close', code => { - if (code === 0) { - resolve(outputWitness.toString('utf-8')); - } else { - reject(errorBuffer.toString('utf-8')); - } - }); - }); + try { + // Check that the directory exists + await fs.access(workingDirectory); + } catch (error) { + return { status: ACVM_RESULT.FAILURE, reason: `Working directory ${workingDirectory} does not exist` }; + } try { + // Write the bytecode and input witness to the working directory + await fs.writeFile(`${workingDirectory}/${bytecodeFilename}`, bytecode); + await fs.writeFile(`${workingDirectory}/${witnessFilename}`, witnessMap); + + // Execute the ACVM using the given args + const args = [ + `execute`, + `--working-directory`, + `${workingDirectory}`, + `--bytecode`, + `${bytecodeFilename}`, + `--input-witness`, + `${witnessFilename}`, + '--print', + '--output-witness', + 'output-witness', + ]; + + const processPromise = new Promise((resolve, reject) => { + let outputWitness = Buffer.alloc(0); + let errorBuffer = Buffer.alloc(0); + const acvm = proc.spawn(pathToAcvm, args); + acvm.stdout.on('data', data => { + outputWitness = Buffer.concat([outputWitness, data]); + }); + acvm.stderr.on('data', data => { + errorBuffer = Buffer.concat([errorBuffer, data]); + }); + acvm.on('close', code => { + if (code === 0) { + resolve(outputWitness.toString('utf-8')); + } else { + reject(errorBuffer.toString('utf-8')); + } + }); + }); + + const duration = new Timer(); const output = await processPromise; if (outputFilename) { await fs.copyFile(`${workingDirectory}/output-witness.gz`, outputFilename); } - return parseIntoWitnessMap(output); - } finally { - // Clean up the working directory before we leave - await fs.rm(workingDirectory, { recursive: true, force: true }); + const witness = parseIntoWitnessMap(output); + return { status: ACVM_RESULT.SUCCESS, witness, duration: duration.ms() }; + } catch (error) { + return { status: ACVM_RESULT.FAILURE, reason: `${error}` }; } } @@ -113,15 +136,17 @@ export class NativeACVMSimulator implements SimulationProvider { // Provide a unique working directory so we don't get clashes with parallel executions const directory = `${this.workingDirectory}/${randomBytes(8).toString('hex')}`; + await fs.mkdir(directory, { recursive: true }); + // Execute the circuit - const _witnessMap = await executeNativeCircuit( - input, - decodedBytecode, - directory, - this.pathToAcvm, - this.witnessFilename, - ); - - return _witnessMap; + const result = await executeNativeCircuit(input, decodedBytecode, directory, this.pathToAcvm, this.witnessFilename); + + await fs.rm(directory, { force: true, recursive: true }); + + if (result.status == ACVM_RESULT.FAILURE) { + throw new Error(`Failed to generate witness: ${result.reason}`); + } + + return result.witness; } } From 1ba88455d058783767e45fc5d47bb84c87d2fc42 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 11 Apr 2024 14:26:23 +0000 Subject: [PATCH 22/41] Formatting --- .../prover-client/src/mocks/fixtures.ts | 6 +++--- .../orchestrator_multiple_blocks.test.ts | 17 ++++------------- .../prover-client/src/prover/bb_prover.ts | 2 -- 3 files changed, 7 insertions(+), 18 deletions(-) diff --git a/yarn-project/prover-client/src/mocks/fixtures.ts b/yarn-project/prover-client/src/mocks/fixtures.ts index deaa19daf7a..68f7f83e22a 100644 --- a/yarn-project/prover-client/src/mocks/fixtures.ts +++ b/yarn-project/prover-client/src/mocks/fixtures.ts @@ -1,6 +1,6 @@ import { MerkleTreeId, - ProcessedTx, + type ProcessedTx, makeEmptyProcessedTx as makeEmptyProcessedTxFromHistoricalTreeRoots, makeProcessedTx, mockTx, @@ -21,9 +21,9 @@ import { fr, makeProof } from '@aztec/circuits.js/testing'; import { makeTuple } from '@aztec/foundation/array'; import { padArrayEnd } from '@aztec/foundation/collection'; import { randomBytes } from '@aztec/foundation/crypto'; -import { DebugLogger } from '@aztec/foundation/log'; +import { type DebugLogger } from '@aztec/foundation/log'; import { fileURLToPath } from '@aztec/foundation/url'; -import { NativeACVMSimulator, SimulationProvider, WASMSimulator } from '@aztec/simulator'; +import { NativeACVMSimulator, type SimulationProvider, WASMSimulator } from '@aztec/simulator'; import { type MerkleTreeOperations } from '@aztec/world-state'; import * as fs from 'fs/promises'; diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts index e51e8944243..6f005ac276a 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts @@ -1,20 +1,12 @@ -import { - PROVING_STATUS, - type ProcessedTx -} from '@aztec/circuit-types'; +import { PROVING_STATUS, type ProcessedTx } from '@aztec/circuit-types'; import { AztecAddress, EthAddress, Fr, GlobalVariables } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; import { openTmpStore } from '@aztec/kv-store/utils'; -import { MerkleTrees, type MerkleTreeOperations } from '@aztec/world-state'; +import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; -import { default as memdown, type MemDown } from 'memdown'; +import { type MemDown, default as memdown } from 'memdown'; -import { - getConfig, - getSimulationProvider, - makeBloatedProcessedTx, - makeEmptyProcessedTx -} from '../mocks/fixtures.js'; +import { getConfig, getSimulationProvider, makeBloatedProcessedTx, makeEmptyProcessedTx } from '../mocks/fixtures.js'; import { TestCircuitProver } from '../prover/test_circuit_prover.js'; import { ProvingOrchestrator } from './orchestrator.js'; @@ -53,7 +45,6 @@ describe('prover/orchestrator', () => { builder = new ProvingOrchestrator(builderDb, prover, 1); }, 20_000); - describe('multiple blocks', () => { beforeEach(async () => { builder = await ProvingOrchestrator.new(builderDb, prover); diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index a97ee7a6c19..0ea797e8a3b 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -177,8 +177,6 @@ export class BBNativeRollupProver implements CircuitProver { const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(bbWorkingDirectory, { recursive: true }); - logger.error(`Created directory ${bbWorkingDirectory}`); - // Have the ACVM write the partial witness here const outputWitnessFile = `${bbWorkingDirectory}/partial-witness.gz`; From 3533c3a2079f65505f4023dbe09f10cd7200dd35 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 11 Apr 2024 14:34:52 +0000 Subject: [PATCH 23/41] Review fixes --- yarn-project/circuit-types/src/tx/processed_tx.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/yarn-project/circuit-types/src/tx/processed_tx.ts b/yarn-project/circuit-types/src/tx/processed_tx.ts index 22d0c589fe8..f8553952845 100644 --- a/yarn-project/circuit-types/src/tx/processed_tx.ts +++ b/yarn-project/circuit-types/src/tx/processed_tx.ts @@ -115,7 +115,7 @@ export function makeProcessedTx( tx: Tx, kernelOutput: KernelCircuitPublicInputs, proof: Proof, - PublicKernelRequest: PublicKernelRequest[], + publicKernelRequests: PublicKernelRequest[], revertReason?: SimulationError, ): ProcessedTx { return { @@ -126,7 +126,7 @@ export function makeProcessedTx( unencryptedLogs: revertReason ? UnencryptedTxL2Logs.empty() : tx.unencryptedLogs, isEmpty: false, revertReason, - publicKernelRequests: [], + publicKernelRequests, }; } From ef6f80fb394f6b4b672f5efcd96e276aa8e8a611 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 11 Apr 2024 14:34:52 +0000 Subject: [PATCH 24/41] Review fixes --- yarn-project/circuit-types/src/tx/processed_tx.ts | 4 ++-- yarn-project/prover-client/src/orchestrator/orchestrator.ts | 2 +- yarn-project/prover-client/src/orchestrator/proving-state.ts | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/yarn-project/circuit-types/src/tx/processed_tx.ts b/yarn-project/circuit-types/src/tx/processed_tx.ts index 22d0c589fe8..f8553952845 100644 --- a/yarn-project/circuit-types/src/tx/processed_tx.ts +++ b/yarn-project/circuit-types/src/tx/processed_tx.ts @@ -115,7 +115,7 @@ export function makeProcessedTx( tx: Tx, kernelOutput: KernelCircuitPublicInputs, proof: Proof, - PublicKernelRequest: PublicKernelRequest[], + publicKernelRequests: PublicKernelRequest[], revertReason?: SimulationError, ): ProcessedTx { return { @@ -126,7 +126,7 @@ export function makeProcessedTx( unencryptedLogs: revertReason ? UnencryptedTxL2Logs.empty() : tx.unencryptedLogs, isEmpty: false, revertReason, - publicKernelRequests: [], + publicKernelRequests, }; } diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 926d6bc5ccb..99648724d00 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -344,7 +344,7 @@ export class ProvingOrchestrator { logger.debug(`Not executing public function, state invalid`); return Promise.resolve(); } - const request = provingState.getNextPublicFunction(txIndex, nextFunctionIndex); + const request = provingState.getPublicFunction(txIndex, nextFunctionIndex); if (!request) { // TODO(@Phil): Properly encapsulate this stuff const tx = provingState.allTxs[txIndex]; diff --git a/yarn-project/prover-client/src/orchestrator/proving-state.ts b/yarn-project/prover-client/src/orchestrator/proving-state.ts index 157813cccd4..488b1c3fe06 100644 --- a/yarn-project/prover-client/src/orchestrator/proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/proving-state.ts @@ -147,7 +147,7 @@ export class ProvingState { return index >= 0 && this.txs.length > index && this.txs[index].publicKernelRequests.length; } - public getNextPublicFunction(txIndex: number, nextIndex: number) { + public getPublicFunction(txIndex: number, nextIndex: number) { if (txIndex < 0 || txIndex >= this.txs.length) { return undefined; } From 868a94fec33a521fa536a4fe16c86c89c62ac4e1 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 11 Apr 2024 16:12:05 +0000 Subject: [PATCH 25/41] Review changes --- .../circuit-types/src/tx/processed_tx.ts | 16 ++-- .../orchestrator/block-building-helpers.ts | 76 +------------------ .../src/orchestrator/orchestrator.ts | 57 ++++++++------ .../src/orchestrator/proving-state.ts | 18 +++-- 4 files changed, 56 insertions(+), 111 deletions(-) diff --git a/yarn-project/circuit-types/src/tx/processed_tx.ts b/yarn-project/circuit-types/src/tx/processed_tx.ts index f8553952845..4c041c1d5f5 100644 --- a/yarn-project/circuit-types/src/tx/processed_tx.ts +++ b/yarn-project/circuit-types/src/tx/processed_tx.ts @@ -28,14 +28,18 @@ export enum PublicKernelType { TAIL, } -/** - * Request to the prover to prove a public kernel circuit - */ -export type PublicKernelRequest = { - type: PublicKernelType; - inputs: PublicKernelCircuitPrivateInputs | PublicKernelTailCircuitPrivateInputs; +export type PublicKernelTailRequest = { + type: PublicKernelType.TAIL; + inputs: PublicKernelTailCircuitPrivateInputs; }; +export type PublicKernelNonTailRequest = { + type: PublicKernelType.SETUP | PublicKernelType.APP_LOGIC | PublicKernelType.TEARDOWN; + inputs: PublicKernelCircuitPrivateInputs; +}; + +export type PublicKernelRequest = PublicKernelTailRequest | PublicKernelNonTailRequest; + /** * Represents a tx that has been processed by the sequencer public processor, * so its kernel circuit public inputs are filled in. diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index 957d5fbae91..fd727af7ed6 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -3,7 +3,6 @@ import { ARCHIVE_HEIGHT, AppendOnlyTreeSnapshot, type BaseOrMergeRollupPublicInputs, - type BaseParityInputs, BaseRollupInputs, ConstantRollupData, Fr, @@ -31,8 +30,7 @@ import { type PublicDataTreeLeafPreimage, ROLLUP_VK_TREE_HEIGHT, RollupTypes, - RootParityInput, - type RootParityInputs, + type RootParityInput, RootRollupInputs, type RootRollupPublicInputs, StateDiffHints, @@ -41,12 +39,10 @@ import { type VerificationKey, } from '@aztec/circuits.js'; import { assertPermutation, makeTuple } from '@aztec/foundation/array'; -import { type DebugLogger } from '@aztec/foundation/log'; import { type Tuple, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize'; import { type MerkleTreeOperations } from '@aztec/world-state'; import { type VerificationKeys, getVerificationKeys } from '../mocks/verification_keys.js'; -import { type CircuitProver } from '../prover/interface.js'; // Denotes fields that are not used now, but will be in the future const FUTURE_FR = new Fr(0n); @@ -181,43 +177,6 @@ export function createMergeRollupInputs( return mergeInputs; } -export async function executeMergeRollupCircuit( - mergeInputs: MergeRollupInputs, - prover: CircuitProver, - logger?: DebugLogger, -): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { - logger?.debug(`Running merge rollup circuit`); - return await prover.getMergeRollupProof(mergeInputs); -} - -export async function executeRootRollupCircuit( - left: [BaseOrMergeRollupPublicInputs, Proof], - right: [BaseOrMergeRollupPublicInputs, Proof], - l1ToL2Roots: RootParityInput, - newL1ToL2Messages: Tuple, - messageTreeSnapshot: AppendOnlyTreeSnapshot, - messageTreeRootSiblingPath: Tuple, - prover: CircuitProver, - db: MerkleTreeOperations, - logger?: DebugLogger, -): Promise<[RootRollupPublicInputs, Proof]> { - logger?.debug(`Running root rollup circuit`); - const rootInput = await getRootRollupInput( - ...left, - ...right, - l1ToL2Roots, - newL1ToL2Messages, - messageTreeSnapshot, - messageTreeRootSiblingPath, - db, - ); - - // Simulate and get proof for the root circuit - const [rootOutput, rootProof] = await prover.getRootRollupProof(rootInput); - - return [rootOutput, rootProof]; -} - // Validate that the roots of all local trees match the output of the root circuit simulation export async function validateRootOutput(rootOutput: RootRollupPublicInputs, db: MerkleTreeOperations) { await Promise.all([ @@ -441,19 +400,6 @@ export async function getMembershipWitnessFor( return new MembershipWitness(height, index, assertLength(path.toFields(), height)); } -export async function executeBaseRollupCircuit( - tx: ProcessedTx, - inputs: BaseRollupInputs, - treeSnapshots: Map, - prover: CircuitProver, - logger?: DebugLogger, -): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { - logger?.debug(`Running base rollup for ${tx.hash}`); - const [rollupOutput, proof] = await prover.getBaseRollupProof(inputs); - validatePartialState(rollupOutput.end, treeSnapshots); - return [rollupOutput, proof]; -} - export function validatePartialState( partialState: PartialStateReference, treeSnapshots: Map, @@ -486,26 +432,6 @@ export function validateSimulatedTree( } } -export async function executeBaseParityCircuit( - inputs: BaseParityInputs, - prover: CircuitProver, - logger?: DebugLogger, -): Promise { - logger?.debug(`Running base parity circuit`); - const [parityPublicInputs, proof] = await prover.getBaseParityProof(inputs); - return new RootParityInput(proof, parityPublicInputs); -} - -export async function executeRootParityCircuit( - inputs: RootParityInputs, - prover: CircuitProver, - logger?: DebugLogger, -): Promise { - logger?.debug(`Running root parity circuit`); - const [parityPublicInputs, proof] = await prover.getRootParityProof(inputs); - return new RootParityInput(proof, parityPublicInputs); -} - export function validateTx(tx: ProcessedTx) { const txHeader = tx.data.constants.historicalHeader; if (txHeader.state.l1ToL2MessageTree.isZero()) { diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 99648724d00..6090461179f 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -18,7 +18,7 @@ import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, NUM_BASE_PARITY_PER_ROOT_PARITY, type Proof, - type RootParityInput, + RootParityInput, RootParityInputs, } from '@aztec/circuits.js'; import { makeTuple } from '@aztec/foundation/array'; @@ -36,13 +36,10 @@ import { type CircuitProver } from '../prover/index.js'; import { buildBaseRollupInput, createMergeRollupInputs, - executeBaseParityCircuit, - executeBaseRollupCircuit, - executeMergeRollupCircuit, - executeRootParityCircuit, - executeRootRollupCircuit, + getRootRollupInput, getSubtreeSiblingPath, getTreeSnapshot, + validatePartialState, validateRootOutput, validateTx, } from './block-building-helpers.js'; @@ -415,15 +412,17 @@ export class ProvingOrchestrator { logger.debug('Not running base rollup, state invalid'); return; } - const [duration, baseRollupOutputs] = await elapsed(() => - executeBaseRollupCircuit(tx, inputs, treeSnapshots, this.prover, logger), - ); + const [duration, baseRollupOutputs] = await elapsed(async () => { + const [rollupOutput, proof] = await this.prover.getBaseRollupProof(inputs); + validatePartialState(rollupOutput.end, treeSnapshots); + return { rollupOutput, proof }; + }); logger.debug(`Simulated base rollup circuit`, { eventName: 'circuit-simulation', circuitName: 'base-rollup', duration, inputSize: inputs.toBuffer().length, - outputSize: baseRollupOutputs[0].toBuffer().length, + outputSize: baseRollupOutputs.rollupOutput.toBuffer().length, } satisfies CircuitSimulationStats); if (!provingState?.verifyState()) { logger.debug(`Discarding job as state no longer valid`); @@ -431,7 +430,10 @@ export class ProvingOrchestrator { } const currentLevel = provingState.numMergeLevels + 1n; logger.info(`Completed base rollup at index ${index}, current level ${currentLevel}`); - this.storeAndExecuteNextMergeLevel(provingState, currentLevel, index, baseRollupOutputs); + this.storeAndExecuteNextMergeLevel(provingState, currentLevel, index, [ + baseRollupOutputs.rollupOutput, + baseRollupOutputs.proof, + ]); } // Executes the merge rollup circuit and stored the output as intermediate state for the parent merge/root circuit @@ -450,9 +452,7 @@ export class ProvingOrchestrator { [mergeInputData.inputs[0]!, mergeInputData.proofs[0]!], [mergeInputData.inputs[1]!, mergeInputData.proofs[1]!], ); - const [duration, circuitOutputs] = await elapsed(() => - executeMergeRollupCircuit(circuitInputs, this.prover, logger), - ); + const [duration, circuitOutputs] = await elapsed(() => this.prover.getMergeRollupProof(circuitInputs)); logger.debug(`Simulated merge rollup circuit`, { eventName: 'circuit-simulation', circuitName: 'merge-rollup', @@ -476,21 +476,26 @@ export class ProvingOrchestrator { } const mergeInputData = provingState.getMergeInputs(0); const rootParityInput = provingState.finalRootParityInput!; - const [circuitsOutput, proof] = await executeRootRollupCircuit( - [mergeInputData.inputs[0]!, mergeInputData.proofs[0]!], - [mergeInputData.inputs[1]!, mergeInputData.proofs[1]!], + + const rootInput = await getRootRollupInput( + mergeInputData.inputs[0]!, + mergeInputData.proofs[0]!, + mergeInputData.inputs[1]!, + mergeInputData.proofs[1]!, rootParityInput, provingState.newL1ToL2Messages, provingState.messageTreeSnapshot, provingState.messageTreeRootSiblingPath, - this.prover, this.db, - logger, ); + + // Simulate and get proof for the root circuit + const [rootOutput, rootProof] = await this.prover.getRootRollupProof(rootInput); + logger.info(`Completed root rollup`); - provingState.rootRollupPublicInputs = circuitsOutput; - provingState.finalProof = proof; + provingState.rootRollupPublicInputs = rootOutput; + provingState.finalProof = rootProof; const provingResult: ProvingResult = { status: PROVING_STATUS.SUCCESS, @@ -505,7 +510,10 @@ export class ProvingOrchestrator { logger.debug('Not running base parity, state no longer valid'); return; } - const [duration, circuitOutputs] = await elapsed(() => executeBaseParityCircuit(inputs, this.prover, logger)); + const [duration, circuitOutputs] = await elapsed(async () => { + const [parityPublicInputs, proof] = await this.prover.getBaseParityProof(inputs); + return new RootParityInput(proof, parityPublicInputs); + }); logger.debug(`Simulated base parity circuit`, { eventName: 'circuit-simulation', circuitName: 'base-parity', @@ -539,7 +547,10 @@ export class ProvingOrchestrator { logger.debug(`Not running root parity circuit as state is no longer valid`); return; } - const [duration, circuitOutputs] = await elapsed(() => executeRootParityCircuit(inputs, this.prover, logger)); + const [duration, circuitOutputs] = await elapsed(async () => { + const [parityPublicInputs, proof] = await this.prover.getRootParityProof(inputs); + return new RootParityInput(proof, parityPublicInputs); + }); logger.debug(`Simulated root parity circuit`, { eventName: 'circuit-simulation', circuitName: 'root-parity', diff --git a/yarn-project/prover-client/src/orchestrator/proving-state.ts b/yarn-project/prover-client/src/orchestrator/proving-state.ts index 488b1c3fe06..becb6342c22 100644 --- a/yarn-project/prover-client/src/orchestrator/proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/proving-state.ts @@ -25,13 +25,17 @@ enum PROVING_STATE_LIFECYCLE { PROVING_STATE_REJECTED, } +class TxProvingState { + processedTx: ProcessedTx; +} + /** * The current state of the proving schedule. Contains the raw inputs (txs) and intermediate state to generate every constituent proof in the tree. * Carries an identifier so we can identify if the proving state is discarded and a new one started. * Captures resolve and reject callbacks to provide a promise base interface to the consumer of our proving. */ export class ProvingState { - private provingStateLifecyle = PROVING_STATE_LIFECYCLE.PROVING_STATE_CREATED; + private provingStateLifecycle = PROVING_STATE_LIFECYCLE.PROVING_STATE_CREATED; private mergeRollupInputs: MergeRollupInputData[] = []; private rootParityInputs: Array = []; private finalRootParityInputs: RootParityInput | undefined; @@ -66,7 +70,7 @@ export class ProvingState { public addNewTx(tx: ProcessedTx) { this.txs.push(tx); if (this.txs.length === this.totalNumTxs) { - this.provingStateLifecyle = PROVING_STATE_LIFECYCLE.PROVING_STATE_FULL; + this.provingStateLifecycle = PROVING_STATE_LIFECYCLE.PROVING_STATE_FULL; } return this.txs.length - 1; } @@ -89,13 +93,13 @@ export class ProvingState { public verifyState() { return ( - this.provingStateLifecyle === PROVING_STATE_LIFECYCLE.PROVING_STATE_CREATED || - this.provingStateLifecyle === PROVING_STATE_LIFECYCLE.PROVING_STATE_FULL + this.provingStateLifecycle === PROVING_STATE_LIFECYCLE.PROVING_STATE_CREATED || + this.provingStateLifecycle === PROVING_STATE_LIFECYCLE.PROVING_STATE_FULL ); } public isAcceptingTransactions() { - return this.provingStateLifecyle === PROVING_STATE_LIFECYCLE.PROVING_STATE_CREATED; + return this.provingStateLifecycle === PROVING_STATE_LIFECYCLE.PROVING_STATE_CREATED; } public get allTxs() { @@ -166,7 +170,7 @@ export class ProvingState { if (!this.verifyState()) { return; } - this.provingStateLifecyle = PROVING_STATE_LIFECYCLE.PROVING_STATE_REJECTED; + this.provingStateLifecycle = PROVING_STATE_LIFECYCLE.PROVING_STATE_REJECTED; this.rejectionCallback(reason); } @@ -174,7 +178,7 @@ export class ProvingState { if (!this.verifyState()) { return; } - this.provingStateLifecyle = PROVING_STATE_LIFECYCLE.PROVING_STATE_RESOLVED; + this.provingStateLifecycle = PROVING_STATE_LIFECYCLE.PROVING_STATE_RESOLVED; this.completionCallback(result); } } From a6986f5aece2d0ce18a5a71f1335db07714ed480 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 11 Apr 2024 16:17:00 +0000 Subject: [PATCH 26/41] Reverted breakage --- yarn-project/prover-client/src/orchestrator/proving-state.ts | 4 ---- 1 file changed, 4 deletions(-) diff --git a/yarn-project/prover-client/src/orchestrator/proving-state.ts b/yarn-project/prover-client/src/orchestrator/proving-state.ts index becb6342c22..3f70a88b450 100644 --- a/yarn-project/prover-client/src/orchestrator/proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/proving-state.ts @@ -25,10 +25,6 @@ enum PROVING_STATE_LIFECYCLE { PROVING_STATE_REJECTED, } -class TxProvingState { - processedTx: ProcessedTx; -} - /** * The current state of the proving schedule. Contains the raw inputs (txs) and intermediate state to generate every constituent proof in the tree. * Carries an identifier so we can identify if the proving state is discarded and a new one started. From eba7378f19fd11ce2e5ee0942958f958fed5d2e1 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 11 Apr 2024 16:34:10 +0000 Subject: [PATCH 27/41] Verify proof in test --- yarn-project/prover-client/src/prover/bb_prover.test.ts | 4 +++- yarn-project/prover-client/src/prover/bb_prover.ts | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/yarn-project/prover-client/src/prover/bb_prover.test.ts b/yarn-project/prover-client/src/prover/bb_prover.test.ts index 300dd859288..4461f038f87 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.test.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.test.ts @@ -101,7 +101,9 @@ describe('prover/bb_prover', () => { expect(provingResult.status).toBe(PROVING_STATUS.SUCCESS); - await orchestrator.finaliseBlock(); + const blockResult = await orchestrator.finaliseBlock(); + + await expect(prover.verifyProof('RootRollupArtifact', blockResult.proof)).resolves.not.toThrow(); await orchestrator.stop(); }, 600_000); diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index 0ea797e8a3b..8bdcde8b231 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -172,7 +172,7 @@ export class BBNativeRollupProver implements CircuitProver { await Promise.all(promises); } - private async createProof(witnessMap: WitnessMap, circuitType: ServerProtocolArtifact): Promise<[WitnessMap, Proof]> { + public async createProof(witnessMap: WitnessMap, circuitType: ServerProtocolArtifact): Promise<[WitnessMap, Proof]> { // Create random directory to be used for temp files const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(bbWorkingDirectory, { recursive: true }); @@ -223,7 +223,7 @@ export class BBNativeRollupProver implements CircuitProver { return [outputWitness, new Proof(proofBuffer)]; } - private async verifyProof(circuitType: ServerProtocolArtifact, proof: Proof) { + public async verifyProof(circuitType: ServerProtocolArtifact, proof: Proof) { // Create random directory to be used for temp files const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(bbWorkingDirectory, { recursive: true }); From 9b07debcf5bf3409fbcd58b20730688346fbf0fe Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 11 Apr 2024 16:34:10 +0000 Subject: [PATCH 28/41] Verify proof in test --- .../prover-client/src/prover/bb_prover.test.ts | 14 ++++++-------- yarn-project/prover-client/src/prover/bb_prover.ts | 4 ++-- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/yarn-project/prover-client/src/prover/bb_prover.test.ts b/yarn-project/prover-client/src/prover/bb_prover.test.ts index 300dd859288..2e691618e8b 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.test.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.test.ts @@ -1,6 +1,5 @@ import { PROVING_STATUS, makeEmptyProcessedTx } from '@aztec/circuit-types'; -import { AztecAddress, EthAddress, Fr, GlobalVariables, Header, type RootRollupPublicInputs } from '@aztec/circuits.js'; -import { makeRootRollupPublicInputs } from '@aztec/circuits.js/testing'; +import { AztecAddress, EthAddress, Fr, GlobalVariables, Header } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; import { openTmpStore } from '@aztec/kv-store/utils'; import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; @@ -23,7 +22,6 @@ describe('prover/bb_prover', () => { let directoryToCleanup: string | undefined; let blockNumber: number; - let rootRollupOutput: RootRollupPublicInputs; let globalVariables: GlobalVariables; @@ -52,11 +50,9 @@ describe('prover/bb_prover', () => { globalVariables = new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); - rootRollupOutput = makeRootRollupPublicInputs(0); - rootRollupOutput.header.globalVariables = globalVariables; }, 60_000); - afterEach(async () => { + afterAll(async () => { if (directoryToCleanup) { await fs.rm(directoryToCleanup, { recursive: true, force: true }); } @@ -101,8 +97,10 @@ describe('prover/bb_prover', () => { expect(provingResult.status).toBe(PROVING_STATUS.SUCCESS); - await orchestrator.finaliseBlock(); + const blockResult = await orchestrator.finaliseBlock(); + + await expect(prover.verifyProof('RootRollupArtifact', blockResult.proof)).resolves.not.toThrow(); await orchestrator.stop(); - }, 600_000); + }, 300_000); }); diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index 0ea797e8a3b..8bdcde8b231 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -172,7 +172,7 @@ export class BBNativeRollupProver implements CircuitProver { await Promise.all(promises); } - private async createProof(witnessMap: WitnessMap, circuitType: ServerProtocolArtifact): Promise<[WitnessMap, Proof]> { + public async createProof(witnessMap: WitnessMap, circuitType: ServerProtocolArtifact): Promise<[WitnessMap, Proof]> { // Create random directory to be used for temp files const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(bbWorkingDirectory, { recursive: true }); @@ -223,7 +223,7 @@ export class BBNativeRollupProver implements CircuitProver { return [outputWitness, new Proof(proofBuffer)]; } - private async verifyProof(circuitType: ServerProtocolArtifact, proof: Proof) { + public async verifyProof(circuitType: ServerProtocolArtifact, proof: Proof) { // Create random directory to be used for temp files const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(bbWorkingDirectory, { recursive: true }); From 1d2cd6cef034f493175d5faadb75b315e41952b8 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 11 Apr 2024 16:59:51 +0000 Subject: [PATCH 29/41] Merge fixes --- .../prover-client/src/mocks/fixtures.ts | 20 +++++++ .../orchestrator/orchestrator_errors.test.ts | 45 +++++++--------- .../orchestrator_failures.test.ts | 15 ++---- .../orchestrator_lifecycle.test.ts | 53 ++++++++++++------- .../orchestrator_mixed_blocks.test.ts | 28 +++------- .../orchestrator_multiple_blocks.test.ts | 26 ++++----- .../src/prover/bb_prover.test.ts | 11 ++-- 7 files changed, 96 insertions(+), 102 deletions(-) diff --git a/yarn-project/prover-client/src/mocks/fixtures.ts b/yarn-project/prover-client/src/mocks/fixtures.ts index 68f7f83e22a..6d691fe4ce7 100644 --- a/yarn-project/prover-client/src/mocks/fixtures.ts +++ b/yarn-project/prover-client/src/mocks/fixtures.ts @@ -6,7 +6,11 @@ import { mockTx, } from '@aztec/circuit-types'; import { + AztecAddress, + EthAddress, Fr, + GasFees, + GlobalVariables, KernelCircuitPublicInputs, MAX_NEW_L2_TO_L1_MSGS_PER_TX, MAX_NEW_NOTE_HASHES_PER_TX, @@ -159,3 +163,19 @@ export const updateExpectedTreesFromTxs = async (db: MerkleTreeOperations, txs: ); } }; + +export const makeGlobals = (blockNumber: number) => { + return new GlobalVariables( + Fr.ZERO, + Fr.ZERO, + new Fr(blockNumber), + Fr.ZERO, + EthAddress.ZERO, + AztecAddress.ZERO, + GasFees.empty(), + ); +}; + +export const makeEmptyProcessedTestTx = (builderDb: MerkleTreeOperations): Promise => { + return makeEmptyProcessedTx(builderDb, Fr.ZERO, Fr.ZERO); +}; diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts index 6d2f5477cd0..1cd597e61a9 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts @@ -1,12 +1,18 @@ -import { PROVING_STATUS, type ProcessedTx } from '@aztec/circuit-types'; -import { AztecAddress, EthAddress, Fr, GlobalVariables } from '@aztec/circuits.js'; +import { PROVING_STATUS } from '@aztec/circuit-types'; +import { Fr, type GlobalVariables } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; import { openTmpStore } from '@aztec/kv-store/utils'; import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; import { type MemDown, default as memdown } from 'memdown'; -import { getConfig, getSimulationProvider, makeBloatedProcessedTx, makeEmptyProcessedTx } from '../mocks/fixtures.js'; +import { + getConfig, + getSimulationProvider, + makeBloatedProcessedTx, + makeEmptyProcessedTestTx, + makeGlobals, +} from '../mocks/fixtures.js'; import { TestCircuitProver } from '../prover/test_circuit_prover.js'; import { ProvingOrchestrator } from './orchestrator.js'; @@ -24,19 +30,6 @@ describe('prover/orchestrator', () => { let globalVariables: GlobalVariables; - const chainId = Fr.ZERO; - const version = Fr.ZERO; - const coinbase = EthAddress.ZERO; - const feeRecipient = AztecAddress.ZERO; - - const makeGlobals = (blockNumber: number) => { - return new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); - }; - - const makeEmptyProcessedTestTx = (): Promise => { - return makeEmptyProcessedTx(builderDb, chainId, version); - }; - beforeEach(async () => { blockNumber = 3; globalVariables = makeGlobals(blockNumber); @@ -73,14 +66,14 @@ describe('prover/orchestrator', () => { txs.length, globalVariables, [], - await makeEmptyProcessedTestTx(), + await makeEmptyProcessedTestTx(builderDb), ); for (const tx of txs) { await builder.addNewTx(tx); } - await expect(async () => await builder.addNewTx(await makeEmptyProcessedTestTx())).rejects.toThrow( + await expect(async () => await builder.addNewTx(await makeEmptyProcessedTestTx(builderDb))).rejects.toThrow( 'Rollup not accepting further transactions', ); @@ -92,7 +85,7 @@ describe('prover/orchestrator', () => { }, 30_000); it('throws if adding a transaction before start', async () => { - await expect(async () => await builder.addNewTx(await makeEmptyProcessedTestTx())).rejects.toThrow( + await expect(async () => await builder.addNewTx(await makeEmptyProcessedTestTx(builderDb))).rejects.toThrow( `Invalid proving state, call startNewBlock before adding transactions`, ); }, 1000); @@ -110,13 +103,13 @@ describe('prover/orchestrator', () => { }, 1000); it('throws if finalising an already finalised block', async () => { - const txs = await Promise.all([makeEmptyProcessedTestTx(), makeEmptyProcessedTestTx()]); + const txs = await Promise.all([makeEmptyProcessedTestTx(builderDb), makeEmptyProcessedTestTx(builderDb)]); const blockTicket = await builder.startNewBlock( txs.length, globalVariables, [], - await makeEmptyProcessedTestTx(), + await makeEmptyProcessedTestTx(builderDb), ); for (const tx of txs) { @@ -131,11 +124,11 @@ describe('prover/orchestrator', () => { }, 60000); it('throws if adding to a cancelled block', async () => { - await builder.startNewBlock(2, globalVariables, [], await makeEmptyProcessedTestTx()); + await builder.startNewBlock(2, globalVariables, [], await makeEmptyProcessedTestTx(builderDb)); builder.cancelBlock(); - await expect(async () => await builder.addNewTx(await makeEmptyProcessedTestTx())).rejects.toThrow( + await expect(async () => await builder.addNewTx(await makeEmptyProcessedTestTx(builderDb))).rejects.toThrow( 'Rollup not accepting further transactions', ); }, 10000); @@ -144,7 +137,8 @@ describe('prover/orchestrator', () => { 'fails to start a block with %i transactions', async (blockSize: number) => { await expect( - async () => await builder.startNewBlock(blockSize, globalVariables, [], await makeEmptyProcessedTestTx()), + async () => + await builder.startNewBlock(blockSize, globalVariables, [], await makeEmptyProcessedTestTx(builderDb)), ).rejects.toThrow(`Length of txs for the block should be a power of two and at least two (got ${blockSize})`); }, ); @@ -153,7 +147,8 @@ describe('prover/orchestrator', () => { // Assemble a fake transaction const l1ToL2Messages = new Array(100).fill(new Fr(0n)); await expect( - async () => await builder.startNewBlock(2, globalVariables, l1ToL2Messages, await makeEmptyProcessedTestTx()), + async () => + await builder.startNewBlock(2, globalVariables, l1ToL2Messages, await makeEmptyProcessedTestTx(builderDb)), ).rejects.toThrow('Too many L1 to L2 messages'); }); }); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts index 717076d486e..15c6210f44e 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts @@ -1,5 +1,5 @@ import { PROVING_STATUS, type ProcessedTx } from '@aztec/circuit-types'; -import { AztecAddress, EthAddress, Fr, GlobalVariables } from '@aztec/circuits.js'; +import { Fr, type GlobalVariables } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; import { openTmpStore } from '@aztec/kv-store/utils'; import { WASMSimulator } from '@aztec/simulator'; @@ -8,7 +8,7 @@ import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; import { jest } from '@jest/globals'; import { type MemDown, default as memdown } from 'memdown'; -import { getConfig, getSimulationProvider, makeEmptyProcessedTx } from '../mocks/fixtures.js'; +import { getConfig, getSimulationProvider, makeEmptyProcessedTx, makeGlobals } from '../mocks/fixtures.js'; import { type CircuitProver } from '../prover/index.js'; import { TestCircuitProver } from '../prover/test_circuit_prover.js'; import { ProvingOrchestrator } from './orchestrator.js'; @@ -27,17 +27,8 @@ describe('prover/orchestrator', () => { let globalVariables: GlobalVariables; - const chainId = Fr.ZERO; - const version = Fr.ZERO; - const coinbase = EthAddress.ZERO; - const feeRecipient = AztecAddress.ZERO; - - const makeGlobals = (blockNumber: number) => { - return new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); - }; - const makeEmptyProcessedTestTx = (): Promise => { - return makeEmptyProcessedTx(builderDb, chainId, version); + return makeEmptyProcessedTx(builderDb, Fr.ZERO, Fr.ZERO); }; beforeEach(async () => { diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts index 3bbd66b8ddf..27108f89e67 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts @@ -1,5 +1,5 @@ -import { PROVING_STATUS, type ProcessedTx, type ProvingFailure } from '@aztec/circuit-types'; -import { AztecAddress, EthAddress, Fr, GlobalVariables, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; +import { PROVING_STATUS, type ProvingFailure } from '@aztec/circuit-types'; +import { type GlobalVariables, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; import { fr } from '@aztec/circuits.js/testing'; import { range } from '@aztec/foundation/array'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -8,7 +8,13 @@ import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; import { type MemDown, default as memdown } from 'memdown'; -import { getConfig, getSimulationProvider, makeBloatedProcessedTx, makeEmptyProcessedTx } from '../mocks/fixtures.js'; +import { + getConfig, + getSimulationProvider, + makeBloatedProcessedTx, + makeEmptyProcessedTestTx, + makeGlobals, +} from '../mocks/fixtures.js'; import { TestCircuitProver } from '../prover/test_circuit_prover.js'; import { ProvingOrchestrator } from './orchestrator.js'; @@ -22,19 +28,6 @@ describe('prover/orchestrator', () => { let prover: TestCircuitProver; - const chainId = Fr.ZERO; - const version = Fr.ZERO; - const coinbase = EthAddress.ZERO; - const feeRecipient = AztecAddress.ZERO; - - const makeGlobals = (blockNumber: number) => { - return new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); - }; - - const makeEmptyProcessedTestTx = (): Promise => { - return makeEmptyProcessedTx(builderDb, chainId, version); - }; - beforeEach(async () => { const acvmConfig = await getConfig(logger); const simulationProvider = await getSimulationProvider({ @@ -66,7 +59,12 @@ describe('prover/orchestrator', () => { const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); - const blockTicket1 = await builder.startNewBlock(2, globals1, l1ToL2Messages, await makeEmptyProcessedTestTx()); + const blockTicket1 = await builder.startNewBlock( + 2, + globals1, + l1ToL2Messages, + await makeEmptyProcessedTestTx(builderDb), + ); await builder.addNewTx(txs1[0]); await builder.addNewTx(txs1[1]); @@ -84,7 +82,12 @@ describe('prover/orchestrator', () => { await builderDb.rollback(); - const blockTicket2 = await builder.startNewBlock(2, globals2, l1ToL2Messages, await makeEmptyProcessedTestTx()); + const blockTicket2 = await builder.startNewBlock( + 2, + globals2, + l1ToL2Messages, + await makeEmptyProcessedTestTx(builderDb), + ); await builder.addNewTx(txs2[0]); await builder.addNewTx(txs2[1]); @@ -106,13 +109,23 @@ describe('prover/orchestrator', () => { const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); - const blockTicket1 = await builder.startNewBlock(2, globals1, l1ToL2Messages, await makeEmptyProcessedTestTx()); + const blockTicket1 = await builder.startNewBlock( + 2, + globals1, + l1ToL2Messages, + await makeEmptyProcessedTestTx(builderDb), + ); await builder.addNewTx(txs1[0]); await builderDb.rollback(); - const blockTicket2 = await builder.startNewBlock(2, globals2, l1ToL2Messages, await makeEmptyProcessedTestTx()); + const blockTicket2 = await builder.startNewBlock( + 2, + globals2, + l1ToL2Messages, + await makeEmptyProcessedTestTx(builderDb), + ); await builder.addNewTx(txs2[0]); await builder.addNewTx(txs2[1]); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts index a82bad42d14..d619d00f546 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts @@ -1,5 +1,5 @@ -import { MerkleTreeId, PROVING_STATUS, type ProcessedTx } from '@aztec/circuit-types'; -import { AztecAddress, EthAddress, Fr, GlobalVariables, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; +import { MerkleTreeId, PROVING_STATUS } from '@aztec/circuit-types'; +import { Fr, type GlobalVariables, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; import { fr } from '@aztec/circuits.js/testing'; import { range } from '@aztec/foundation/array'; import { times } from '@aztec/foundation/collection'; @@ -13,7 +13,8 @@ import { getConfig, getSimulationProvider, makeBloatedProcessedTx, - makeEmptyProcessedTx, + makeEmptyProcessedTestTx, + makeGlobals, updateExpectedTreesFromTxs, } from '../mocks/fixtures.js'; import { TestCircuitProver } from '../prover/test_circuit_prover.js'; @@ -35,19 +36,6 @@ describe('prover/orchestrator', () => { let globalVariables: GlobalVariables; - const chainId = Fr.ZERO; - const version = Fr.ZERO; - const coinbase = EthAddress.ZERO; - const feeRecipient = AztecAddress.ZERO; - - const makeGlobals = (blockNumber: number) => { - return new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); - }; - - const makeEmptyProcessedTestTx = (): Promise => { - return makeEmptyProcessedTx(builderDb, chainId, version); - }; - beforeEach(async () => { blockNumber = 3; globalVariables = makeGlobals(blockNumber); @@ -88,14 +76,14 @@ describe('prover/orchestrator', () => { const noteHashTreeBefore = await builderDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE); const txs = [ ...(await Promise.all(times(bloatedCount, (i: number) => makeBloatedProcessedTx(builderDb, i)))), - ...(await Promise.all(times(totalCount - bloatedCount, makeEmptyProcessedTestTx))), + ...(await Promise.all(times(totalCount - bloatedCount, _ => makeEmptyProcessedTestTx(builderDb)))), ]; const blockTicket = await builder.startNewBlock( txs.length, globalVariables, mockL1ToL2Messages, - await makeEmptyProcessedTestTx(), + await makeEmptyProcessedTestTx(builderDb), ); for (const tx of txs) { @@ -136,7 +124,7 @@ describe('prover/orchestrator', () => { txs.length, globalVariables, l1ToL2Messages, - await makeEmptyProcessedTestTx(), + await makeEmptyProcessedTestTx(builderDb), ); for (const tx of txs) { @@ -164,7 +152,7 @@ describe('prover/orchestrator', () => { 4, globalVariables, l1ToL2Messages, - await makeEmptyProcessedTestTx(), + await makeEmptyProcessedTestTx(builderDb), ); for (const tx of txs) { diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts index 6f005ac276a..3a2be210c63 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts @@ -1,12 +1,17 @@ -import { PROVING_STATUS, type ProcessedTx } from '@aztec/circuit-types'; -import { AztecAddress, EthAddress, Fr, GlobalVariables } from '@aztec/circuits.js'; +import { PROVING_STATUS } from '@aztec/circuit-types'; import { createDebugLogger } from '@aztec/foundation/log'; import { openTmpStore } from '@aztec/kv-store/utils'; import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; import { type MemDown, default as memdown } from 'memdown'; -import { getConfig, getSimulationProvider, makeBloatedProcessedTx, makeEmptyProcessedTx } from '../mocks/fixtures.js'; +import { + getConfig, + getSimulationProvider, + makeBloatedProcessedTx, + makeEmptyProcessedTestTx, + makeGlobals, +} from '../mocks/fixtures.js'; import { TestCircuitProver } from '../prover/test_circuit_prover.js'; import { ProvingOrchestrator } from './orchestrator.js'; @@ -20,19 +25,6 @@ describe('prover/orchestrator', () => { let prover: TestCircuitProver; - const chainId = Fr.ZERO; - const version = Fr.ZERO; - const coinbase = EthAddress.ZERO; - const feeRecipient = AztecAddress.ZERO; - - const makeGlobals = (blockNumber: number) => { - return new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); - }; - - const makeEmptyProcessedTestTx = (): Promise => { - return makeEmptyProcessedTx(builderDb, chainId, version); - }; - beforeEach(async () => { const acvmConfig = await getConfig(logger); const simulationProvider = await getSimulationProvider({ @@ -60,7 +52,7 @@ describe('prover/orchestrator', () => { for (let i = 0; i < numBlocks; i++) { const tx = await makeBloatedProcessedTx(builderDb, i + 1); - const emptyTx = await makeEmptyProcessedTestTx(); + const emptyTx = await makeEmptyProcessedTestTx(builderDb); tx.data.constants.historicalHeader = header; emptyTx.data.constants.historicalHeader = header; diff --git a/yarn-project/prover-client/src/prover/bb_prover.test.ts b/yarn-project/prover-client/src/prover/bb_prover.test.ts index 2e691618e8b..0d3573bc570 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.test.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.test.ts @@ -1,5 +1,5 @@ import { PROVING_STATUS, makeEmptyProcessedTx } from '@aztec/circuit-types'; -import { AztecAddress, EthAddress, Fr, GlobalVariables, Header } from '@aztec/circuits.js'; +import { Fr, type GlobalVariables, Header } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; import { openTmpStore } from '@aztec/kv-store/utils'; import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; @@ -7,7 +7,7 @@ import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; import * as fs from 'fs/promises'; import { type MemDown, default as memdown } from 'memdown'; -import { getConfig, makeBloatedProcessedTx } from '../mocks/fixtures.js'; +import { getConfig, makeBloatedProcessedTx, makeGlobals } from '../mocks/fixtures.js'; import { buildBaseRollupInput } from '../orchestrator/block-building-helpers.js'; import { ProvingOrchestrator } from '../orchestrator/orchestrator.js'; import { BBNativeRollupProver, type BBProverConfig } from './bb_prover.js'; @@ -25,11 +25,6 @@ describe('prover/bb_prover', () => { let globalVariables: GlobalVariables; - const chainId = Fr.ZERO; - const version = Fr.ZERO; - const coinbase = EthAddress.ZERO; - const feeRecipient = AztecAddress.ZERO; - beforeAll(async () => { const config = await getConfig(logger); if (!config) { @@ -47,7 +42,7 @@ describe('prover/bb_prover', () => { beforeEach(async () => { blockNumber = 3; - globalVariables = new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); + globalVariables = makeGlobals(blockNumber); builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); }, 60_000); From 82035e3b7be5d3d1dee6808dc65ff8468f7555ac Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 11 Apr 2024 17:30:43 +0000 Subject: [PATCH 30/41] Merge fixes --- .../circuits.js/src/tests/factories.ts | 16 +++++++ .../orchestrator_single_blocks.test.ts | 48 ++++++++++--------- 2 files changed, 41 insertions(+), 23 deletions(-) diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index 753c7ca0db4..b7a27ac83b2 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -75,6 +75,8 @@ import { NullifierKeyValidationRequest, NullifierKeyValidationRequestContext, NullifierLeafPreimage, + NullifierNonExistentReadRequestHintsBuilder, + NullifierReadRequestHintsBuilder, PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH, PUBLIC_DATA_TREE_HEIGHT, ParityPublicInputs, @@ -103,6 +105,7 @@ import { PublicKernelCircuitPrivateInputs, PublicKernelCircuitPublicInputs, PublicKernelData, + PublicKernelTailCircuitPrivateInputs, ROLLUP_VK_TREE_HEIGHT, ReadRequest, ReadRequestContext, @@ -790,6 +793,19 @@ export function makePublicKernelCircuitPrivateInputs(seed = 1): PublicKernelCirc return new PublicKernelCircuitPrivateInputs(makePublicKernelData(seed), makePublicCallData(seed + 0x1000)); } +/** + * Makes arbitrary public kernel tail inputs. + * @param seed - The seed to use for generating the public kernel inputs. + * @returns Public kernel inputs. + */ +export function makePublicKernelTailCircuitPrivateInputs(seed = 1): PublicKernelTailCircuitPrivateInputs { + return new PublicKernelTailCircuitPrivateInputs( + makePublicKernelData(seed), + NullifierReadRequestHintsBuilder.empty(), + NullifierNonExistentReadRequestHintsBuilder.empty(), + ); +} + /** * Makes arbitrary public kernel private inputs. * @param seed - The seed to use for generating the public kernel inputs. diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts index 931d9da6649..b926ce79780 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts @@ -1,6 +1,10 @@ -import { PROVING_STATUS, type ProcessedTx, type PublicKernelRequest, PublicKernelType } from '@aztec/circuit-types'; -import { AztecAddress, EthAddress, Fr, GlobalVariables, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; -import { fr, makePublicKernelCircuitPrivateInputs } from '@aztec/circuits.js/testing'; +import { PROVING_STATUS, type PublicKernelRequest, PublicKernelType } from '@aztec/circuit-types'; +import { type GlobalVariables, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; +import { + fr, + makePublicKernelCircuitPrivateInputs, + makePublicKernelTailCircuitPrivateInputs, +} from '@aztec/circuits.js/testing'; import { range } from '@aztec/foundation/array'; import { createDebugLogger } from '@aztec/foundation/log'; import { sleep } from '@aztec/foundation/sleep'; @@ -13,7 +17,8 @@ import { getConfig, getSimulationProvider, makeBloatedProcessedTx, - makeEmptyProcessedTx, + makeEmptyProcessedTestTx, + makeGlobals, updateExpectedTreesFromTxs, } from '../mocks/fixtures.js'; import { TestCircuitProver } from '../prover/test_circuit_prover.js'; @@ -34,19 +39,6 @@ describe('prover/orchestrator', () => { let globalVariables: GlobalVariables; - const chainId = Fr.ZERO; - const version = Fr.ZERO; - const coinbase = EthAddress.ZERO; - const feeRecipient = AztecAddress.ZERO; - - const makeGlobals = (blockNumber: number) => { - return new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); - }; - - const makeEmptyProcessedTestTx = (): Promise => { - return makeEmptyProcessedTx(builderDb, chainId, version); - }; - beforeEach(async () => { blockNumber = 3; globalVariables = makeGlobals(blockNumber); @@ -73,13 +65,13 @@ describe('prover/orchestrator', () => { }); it('builds an empty L2 block', async () => { - const txs = await Promise.all([makeEmptyProcessedTestTx(), makeEmptyProcessedTestTx()]); + const txs = await Promise.all([makeEmptyProcessedTestTx(builderDb), makeEmptyProcessedTestTx(builderDb)]); const blockTicket = await builder.startNewBlock( txs.length, globalVariables, [], - await makeEmptyProcessedTestTx(), + await makeEmptyProcessedTestTx(builderDb), ); for (const tx of txs) { @@ -99,7 +91,12 @@ describe('prover/orchestrator', () => { await updateExpectedTreesFromTxs(expectsDb, txs); // This will need to be a 2 tx block - const blockTicket = await builder.startNewBlock(2, globalVariables, [], await makeEmptyProcessedTestTx()); + const blockTicket = await builder.startNewBlock( + 2, + globalVariables, + [], + await makeEmptyProcessedTestTx(builderDb), + ); for (const tx of txs) { await builder.addNewTx(tx); @@ -135,13 +132,18 @@ describe('prover/orchestrator', () => { const tail: PublicKernelRequest = { type: PublicKernelType.TAIL, - inputs: makePublicKernelCircuitPrivateInputs(5), + inputs: makePublicKernelTailCircuitPrivateInputs(5), }; tx.publicKernelRequests = [setup, app, teardown, tail]; // This will need to be a 2 tx block - const blockTicket = await builder.startNewBlock(2, globalVariables, [], await makeEmptyProcessedTestTx()); + const blockTicket = await builder.startNewBlock( + 2, + globalVariables, + [], + await makeEmptyProcessedTestTx(builderDb), + ); await builder.addNewTx(tx); @@ -169,7 +171,7 @@ describe('prover/orchestrator', () => { txs.length, globalVariables, l1ToL2Messages, - await makeEmptyProcessedTestTx(), + await makeEmptyProcessedTestTx(builderDb), ); for (const tx of txs) { From cfc6ea146305e3e1290145c131c66b8866f5e6da Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 11 Apr 2024 17:56:14 +0000 Subject: [PATCH 31/41] Copy the ACVM in test dockerfile --- yarn-project/Dockerfile | 2 +- yarn-project/Dockerfile.test | 2 ++ yarn-project/prover-client/src/prover/bb_prover.test.ts | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/yarn-project/Dockerfile b/yarn-project/Dockerfile index bf2b43f1f0f..7223c9b057a 100644 --- a/yarn-project/Dockerfile +++ b/yarn-project/Dockerfile @@ -13,7 +13,7 @@ COPY --from=bb.js /usr/src/barretenberg/ts /usr/src/barretenberg/ts COPY --from=noir-packages /usr/src/noir/packages /usr/src/noir/packages COPY --from=contracts /usr/src/l1-contracts /usr/src/l1-contracts COPY --from=noir-projects /usr/src/noir-projects /usr/src/noir-projects -# We want the native ACVM binary +# We want the native ACVM and BB binaries COPY --from=noir /usr/src/noir/noir-repo/target/release/acvm /usr/src/noir/noir-repo/target/release/acvm COPY --from=barretenberg /usr/src/barretenberg/cpp/build/bin/bb /usr/src/barretenberg/cpp/build/bin/bb diff --git a/yarn-project/Dockerfile.test b/yarn-project/Dockerfile.test index c5b9b4ab89b..ef76106f4c3 100644 --- a/yarn-project/Dockerfile.test +++ b/yarn-project/Dockerfile.test @@ -12,6 +12,8 @@ COPY --from=bb.js /usr/src/barretenberg/ts /usr/src/barretenberg/ts COPY --from=noir-packages /usr/src/noir/packages /usr/src/noir/packages COPY --from=contracts /usr/src/l1-contracts /usr/src/l1-contracts COPY --from=noir-projects /usr/src/noir-projects /usr/src/noir-projects +# We want the native ACVM and BB binaries +COPY --from=noir /usr/src/noir/noir-repo/target/release/acvm /usr/src/noir/noir-repo/target/release/acvm COPY --from=barretenberg /usr/src/barretenberg/cpp/build/bin/bb /usr/src/barretenberg/cpp/build/bin/bb WORKDIR /usr/src/yarn-project diff --git a/yarn-project/prover-client/src/prover/bb_prover.test.ts b/yarn-project/prover-client/src/prover/bb_prover.test.ts index 0d3573bc570..51ce8cbddec 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.test.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.test.ts @@ -28,7 +28,7 @@ describe('prover/bb_prover', () => { beforeAll(async () => { const config = await getConfig(logger); if (!config) { - throw new Error(`BB binary must be present to test the BB Prover`); + throw new Error(`BB and ACVM binaries must be present to test the BB Prover`); } directoryToCleanup = config.directoryToCleanup; const bbConfig: BBProverConfig = { From cfeb8e1c947d0589ccf974887e844b8b0bb5d04d Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 11 Apr 2024 18:00:23 +0000 Subject: [PATCH 32/41] Fixed dockerfile --- yarn-project/Dockerfile.test | 1 + 1 file changed, 1 insertion(+) diff --git a/yarn-project/Dockerfile.test b/yarn-project/Dockerfile.test index ef76106f4c3..d5999b264ca 100644 --- a/yarn-project/Dockerfile.test +++ b/yarn-project/Dockerfile.test @@ -3,6 +3,7 @@ FROM --platform=linux/amd64 aztecprotocol/noir-packages as noir-packages FROM --platform=linux/amd64 aztecprotocol/l1-contracts as contracts FROM --platform=linux/amd64 aztecprotocol/noir-projects as noir-projects FROM --platform=linux/amd64 aztecprotocol/barretenberg-x86_64-linux-clang as barretenberg +FROM aztecprotocol/noir as noir FROM node:18.19.0 as builder RUN apt update && apt install -y jq curl perl && rm -rf /var/lib/apt/lists/* && apt-get clean From 6bb1942b18dc0141cf52603878b8b75a375cc1f9 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 11 Apr 2024 18:08:43 +0000 Subject: [PATCH 33/41] Increased test timeouts --- .../src/orchestrator/orchestrator_mixed_blocks.test.ts | 4 ++-- .../src/orchestrator/orchestrator_single_blocks.test.ts | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts index d619d00f546..7bf7d88d1b4 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts @@ -136,7 +136,7 @@ describe('prover/orchestrator', () => { const finalisedBlock = await builder.finaliseBlock(); expect(finalisedBlock.block.number).toEqual(blockNumber); - }, 30_000); + }, 60_000); it('builds an unbalanced L2 block', async () => { const txs = await Promise.all([ @@ -166,6 +166,6 @@ describe('prover/orchestrator', () => { const finalisedBlock = await builder.finaliseBlock(); expect(finalisedBlock.block.number).toEqual(blockNumber); - }, 30_000); + }, 60_000); }); }); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts index b926ce79780..87babf686e1 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts @@ -83,7 +83,7 @@ describe('prover/orchestrator', () => { const finalisedBlock = await builder.finaliseBlock(); expect(finalisedBlock.block.number).toEqual(blockNumber); - }, 30_000); + }, 60_000); it('builds a block with 1 transaction', async () => { const txs = await Promise.all([makeBloatedProcessedTx(builderDb, 1)]); @@ -110,7 +110,7 @@ describe('prover/orchestrator', () => { const finalisedBlock = await builder.finaliseBlock(); expect(finalisedBlock.block.number).toEqual(blockNumber); - }, 30_000); + }, 60_000); it('builds a block with a transaction with public functions', async () => { const tx = await makeBloatedProcessedTx(builderDb, 1); @@ -155,7 +155,7 @@ describe('prover/orchestrator', () => { const finalisedBlock = await builder.finaliseBlock(); expect(finalisedBlock.block.number).toEqual(blockNumber); - }, 30_000); + }, 60_000); it('builds a block concurrently with transaction simulation', async () => { const txs = await Promise.all([ @@ -184,6 +184,6 @@ describe('prover/orchestrator', () => { const finalisedBlock = await builder.finaliseBlock(); expect(finalisedBlock.block.number).toEqual(blockNumber); - }, 30_000); + }, 60_000); }); }); From 3838a0c85951ef38874c421a21b73910ea314a34 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Thu, 11 Apr 2024 19:21:35 +0000 Subject: [PATCH 34/41] Test fixes --- .../orchestrator_mixed_blocks.test.ts | 86 +------------- .../orchestrator_mixed_blocks_2.test.ts | 111 ++++++++++++++++++ .../src/prover/bb_prover.test.ts | 4 +- 3 files changed, 115 insertions(+), 86 deletions(-) create mode 100644 yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks_2.test.ts diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts index 7bf7d88d1b4..0277ae0664c 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts @@ -1,8 +1,7 @@ -import { MerkleTreeId, PROVING_STATUS } from '@aztec/circuit-types'; -import { Fr, type GlobalVariables, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; +import { PROVING_STATUS } from '@aztec/circuit-types'; +import { type GlobalVariables, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; import { fr } from '@aztec/circuits.js/testing'; import { range } from '@aztec/foundation/array'; -import { times } from '@aztec/foundation/collection'; import { createDebugLogger } from '@aztec/foundation/log'; import { openTmpStore } from '@aztec/kv-store/utils'; import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; @@ -15,7 +14,6 @@ import { makeBloatedProcessedTx, makeEmptyProcessedTestTx, makeGlobals, - updateExpectedTreesFromTxs, } from '../mocks/fixtures.js'; import { TestCircuitProver } from '../prover/test_circuit_prover.js'; import { ProvingOrchestrator } from './orchestrator.js'; @@ -27,12 +25,10 @@ const logger = createDebugLogger('aztec:orchestrator-test'); describe('prover/orchestrator', () => { let builder: ProvingOrchestrator; let builderDb: MerkleTreeOperations; - let expectsDb: MerkleTreeOperations; let prover: TestCircuitProver; let blockNumber: number; - let mockL1ToL2Messages: Fr[]; let globalVariables: GlobalVariables; @@ -48,11 +44,7 @@ describe('prover/orchestrator', () => { prover = new TestCircuitProver(simulationProvider); builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); - expectsDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); builder = new ProvingOrchestrator(builderDb, prover, 1); - - // Create mock l1 to L2 messages - mockL1ToL2Messages = new Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)); }, 20_000); describe('blocks', () => { @@ -64,80 +56,6 @@ describe('prover/orchestrator', () => { await builder.stop(); }); - it.each([ - [0, 2], - [1, 2], - [4, 4], - [5, 8], - [9, 16], - ] as const)( - 'builds an L2 block with %i bloated txs and %i txs total', - async (bloatedCount: number, totalCount: number) => { - const noteHashTreeBefore = await builderDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE); - const txs = [ - ...(await Promise.all(times(bloatedCount, (i: number) => makeBloatedProcessedTx(builderDb, i)))), - ...(await Promise.all(times(totalCount - bloatedCount, _ => makeEmptyProcessedTestTx(builderDb)))), - ]; - - const blockTicket = await builder.startNewBlock( - txs.length, - globalVariables, - mockL1ToL2Messages, - await makeEmptyProcessedTestTx(builderDb), - ); - - for (const tx of txs) { - await builder.addNewTx(tx); - } - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - - const finalisedBlock = await builder.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(blockNumber); - - await updateExpectedTreesFromTxs(expectsDb, txs); - const noteHashTreeAfter = await builderDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE); - - if (bloatedCount > 0) { - expect(noteHashTreeAfter.root).not.toEqual(noteHashTreeBefore.root); - } - - const expectedNoteHashTreeAfter = await expectsDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE).then(t => t.root); - expect(noteHashTreeAfter.root).toEqual(expectedNoteHashTreeAfter); - }, - 60000, - ); - - it('builds a mixed L2 block', async () => { - const txs = await Promise.all([ - makeBloatedProcessedTx(builderDb, 1), - makeBloatedProcessedTx(builderDb, 2), - makeBloatedProcessedTx(builderDb, 3), - makeBloatedProcessedTx(builderDb, 4), - ]); - - const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); - - const blockTicket = await builder.startNewBlock( - txs.length, - globalVariables, - l1ToL2Messages, - await makeEmptyProcessedTestTx(builderDb), - ); - - for (const tx of txs) { - await builder.addNewTx(tx); - } - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await builder.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(blockNumber); - }, 60_000); - it('builds an unbalanced L2 block', async () => { const txs = await Promise.all([ makeBloatedProcessedTx(builderDb, 1), diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks_2.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks_2.test.ts new file mode 100644 index 00000000000..353dd87fa9a --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks_2.test.ts @@ -0,0 +1,111 @@ +import { MerkleTreeId, PROVING_STATUS } from '@aztec/circuit-types'; +import { type GlobalVariables, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; +import { fr } from '@aztec/circuits.js/testing'; +import { range } from '@aztec/foundation/array'; +import { times } from '@aztec/foundation/collection'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { openTmpStore } from '@aztec/kv-store/utils'; +import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; + +import { type MemDown, default as memdown } from 'memdown'; + +import { + getConfig, + getSimulationProvider, + makeBloatedProcessedTx, + makeEmptyProcessedTestTx, + makeGlobals, + updateExpectedTreesFromTxs, +} from '../mocks/fixtures.js'; +import { TestCircuitProver } from '../prover/test_circuit_prover.js'; +import { ProvingOrchestrator } from './orchestrator.js'; + +export const createMemDown = () => (memdown as any)() as MemDown; + +const logger = createDebugLogger('aztec:orchestrator-test'); + +describe('prover/orchestrator', () => { + let builder: ProvingOrchestrator; + let builderDb: MerkleTreeOperations; + let expectsDb: MerkleTreeOperations; + + let prover: TestCircuitProver; + + let blockNumber: number; + + let globalVariables: GlobalVariables; + + beforeEach(async () => { + blockNumber = 3; + globalVariables = makeGlobals(blockNumber); + + const acvmConfig = await getConfig(logger); + const simulationProvider = await getSimulationProvider({ + acvmWorkingDirectory: acvmConfig?.acvmWorkingDirectory, + acvmBinaryPath: acvmConfig?.expectedAcvmPath, + }); + prover = new TestCircuitProver(simulationProvider); + + builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); + expectsDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); + builder = new ProvingOrchestrator(builderDb, prover, 1); + }, 20_000); + + describe('blocks', () => { + beforeEach(async () => { + builder = await ProvingOrchestrator.new(builderDb, prover); + }); + + afterEach(async () => { + await builder.stop(); + }); + + it.each([ + [0, 2], + [1, 2], + [4, 4], + [5, 8], + [9, 16], + ] as const)( + 'builds an L2 block with %i bloated txs and %i txs total', + async (bloatedCount: number, totalCount: number) => { + const noteHashTreeBefore = await builderDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE); + const txs = [ + ...(await Promise.all(times(bloatedCount, (i: number) => makeBloatedProcessedTx(builderDb, i)))), + ...(await Promise.all(times(totalCount - bloatedCount, _ => makeEmptyProcessedTestTx(builderDb)))), + ]; + + const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); + + const blockTicket = await builder.startNewBlock( + txs.length, + globalVariables, + l1ToL2Messages, + await makeEmptyProcessedTestTx(builderDb), + ); + + for (const tx of txs) { + await builder.addNewTx(tx); + } + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + + const finalisedBlock = await builder.finaliseBlock(); + + expect(finalisedBlock.block.number).toEqual(blockNumber); + + await updateExpectedTreesFromTxs(expectsDb, txs); + const noteHashTreeAfter = await builderDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE); + + if (bloatedCount > 0) { + expect(noteHashTreeAfter.root).not.toEqual(noteHashTreeBefore.root); + } + + const expectedNoteHashTreeAfter = await expectsDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE).then(t => t.root); + expect(noteHashTreeAfter.root).toEqual(expectedNoteHashTreeAfter); + }, + 60000, + ); + }); +}); diff --git a/yarn-project/prover-client/src/prover/bb_prover.test.ts b/yarn-project/prover-client/src/prover/bb_prover.test.ts index 51ce8cbddec..a16d5e095f0 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.test.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.test.ts @@ -63,7 +63,7 @@ describe('prover/bb_prover', () => { } logger.verbose('Proving base rollups'); await Promise.all(baseRollupInputs.map(inputs => prover.getBaseRollupProof(inputs))); - }, 60_000); + }, 600_000); it('proves all circuits', async () => { const txs = await Promise.all([ @@ -97,5 +97,5 @@ describe('prover/bb_prover', () => { await expect(prover.verifyProof('RootRollupArtifact', blockResult.proof)).resolves.not.toThrow(); await orchestrator.stop(); - }, 300_000); + }, 600_000); }); From 608063548b77cb3751887ab7042fded466a29f2e Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Fri, 12 Apr 2024 08:36:33 +0000 Subject: [PATCH 35/41] Debugging CI --- yarn-project/prover-client/package.json | 2 +- .../prover-client/src/prover/bb_prover.test.ts | 1 + yarn-project/prover-client/src/prover/bb_prover.ts | 3 +++ yarn-project/simulator/src/simulator/acvm_native.ts | 12 ++++++++++++ 4 files changed, 17 insertions(+), 1 deletion(-) diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index a831caf6783..a26c6f79243 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -20,7 +20,7 @@ "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", "bb": "node --no-warnings ./dest/bb/index.js", - "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" + "test": "DEBUG='aztec:*' NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" }, "inherits": [ "../package.common.json" diff --git a/yarn-project/prover-client/src/prover/bb_prover.test.ts b/yarn-project/prover-client/src/prover/bb_prover.test.ts index a16d5e095f0..8fc243b50c3 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.test.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.test.ts @@ -49,6 +49,7 @@ describe('prover/bb_prover', () => { afterAll(async () => { if (directoryToCleanup) { + logger.info(`Cleaning up ${directoryToCleanup}`); await fs.rm(directoryToCleanup, { recursive: true, force: true }); } }, 5000); diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index 8bdcde8b231..49a00744825 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -175,8 +175,11 @@ export class BBNativeRollupProver implements CircuitProver { public async createProof(witnessMap: WitnessMap, circuitType: ServerProtocolArtifact): Promise<[WitnessMap, Proof]> { // Create random directory to be used for temp files const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; + logger.info(`Creating directory ${bbWorkingDirectory}`); await fs.mkdir(bbWorkingDirectory, { recursive: true }); + await fs.access(bbWorkingDirectory); + // Have the ACVM write the partial witness here const outputWitnessFile = `${bbWorkingDirectory}/partial-witness.gz`; diff --git a/yarn-project/simulator/src/simulator/acvm_native.ts b/yarn-project/simulator/src/simulator/acvm_native.ts index ad13509b37a..c77d304eccd 100644 --- a/yarn-project/simulator/src/simulator/acvm_native.ts +++ b/yarn-project/simulator/src/simulator/acvm_native.ts @@ -1,4 +1,5 @@ import { randomBytes } from '@aztec/foundation/crypto'; +import { createDebugLogger } from '@aztec/foundation/log'; import { Timer } from '@aztec/foundation/timer'; import { type NoirCompiledCircuit } from '@aztec/types/noir'; @@ -8,6 +9,8 @@ import fs from 'fs/promises'; import { type SimulationProvider } from './simulation_provider.js'; +const logger = createDebugLogger('aztec:acvm-native'); + export enum ACVM_RESULT { SUCCESS, FAILURE, @@ -116,6 +119,13 @@ export async function executeNativeCircuit( const duration = new Timer(); const output = await processPromise; if (outputFilename) { + const outputWitnessFileName = `${workingDirectory}/output-witness.gz`; + try { + await fs.access(outputWitnessFileName); + } catch (error) { + logger.error(`Output witness not present at ${outputWitnessFileName}`); + return { status: ACVM_RESULT.FAILURE, reason: `Output witness not present at ${outputWitnessFileName}` }; + } await fs.copyFile(`${workingDirectory}/output-witness.gz`, outputFilename); } const witness = parseIntoWitnessMap(output); @@ -136,11 +146,13 @@ export class NativeACVMSimulator implements SimulationProvider { // Provide a unique working directory so we don't get clashes with parallel executions const directory = `${this.workingDirectory}/${randomBytes(8).toString('hex')}`; + logger.error(`Creating directory ${directory}`); await fs.mkdir(directory, { recursive: true }); // Execute the circuit const result = await executeNativeCircuit(input, decodedBytecode, directory, this.pathToAcvm, this.witnessFilename); + logger.error(`Removing directory ${directory}`); await fs.rm(directory, { force: true, recursive: true }); if (result.status == ACVM_RESULT.FAILURE) { From e6a9bd387957574cf184ec676ef7382fe3b07901 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Fri, 12 Apr 2024 09:07:12 +0000 Subject: [PATCH 36/41] Move prover client testing to it's own job --- .circleci/config.yml | 14 ++++++++ build_manifest.yml | 14 ++++++++ yarn-project/package.json | 2 +- yarn-project/prover-client/Dockerfile.test | 39 ++++++++++++++++++++++ yarn-project/prover-client/package.json | 2 +- 5 files changed, 69 insertions(+), 2 deletions(-) create mode 100644 yarn-project/prover-client/Dockerfile.test diff --git a/.circleci/config.yml b/.circleci/config.yml index 9a6268b2c75..9253b77a6be 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -521,6 +521,18 @@ jobs: command: cond_spot_run_build yarn-project-test 64 aztec_manifest_key: yarn-project-test + prover-client-test: + docker: + - image: aztecprotocol/alpine-build-image + resource_class: small + steps: + - *checkout + - *setup_env + - run: + name: "Build and test" + command: cond_spot_run_build prover-client-test 128 + aztec_manifest_key: prover-client-test + aztec-package: machine: image: default @@ -1429,6 +1441,7 @@ workflows: - end-to-end: *defaults_yarn_project - aztec-faucet: *defaults_yarn_project_pre_join - build-docs: *defaults_yarn_project_pre_join + - prover-client-test: *defaults_yarn_project - yarn-project-test: *defaults_yarn_project - yarn-project-x86_64: *defaults_yarn_project_pre_join - yarn-project-arm64: *defaults_yarn_project_pre_join @@ -1581,6 +1594,7 @@ workflows: - yellow-paper - noir-packages-tests - yarn-project-test + - prover-client-test <<: *defaults # Benchmark jobs. diff --git a/build_manifest.yml b/build_manifest.yml index e650233f6d1..e4864281a37 100644 --- a/build_manifest.yml +++ b/build_manifest.yml @@ -161,6 +161,20 @@ yarn-project-test: - noir-projects - barretenberg-x86_64-linux-clang +# Runs all prover-client checks and tests. +prover-client-test: + buildDir: prover-client + dockerfile: Dockerfile.test + rebuildPatterns: + - ^yarn-project/.*\.(ts|tsx|js|cjs|mjs|json|html|md|sh|nr|toml|snap)$ + - ^yarn-project/Dockerfile$ + dependencies: + - bb.js + - noir-packages + - l1-contracts + - noir-projects + - barretenberg-x86_64-linux-clang + # Builds all of yarn-project, with all developer dependencies. # Creates a runnable container used to run tests and formatting checks. yarn-project: diff --git a/yarn-project/package.json b/yarn-project/package.json index a74c1f2f628..4d50aaa7af2 100644 --- a/yarn-project/package.json +++ b/yarn-project/package.json @@ -10,7 +10,7 @@ "formatting:fix": "FORCE_COLOR=true yarn workspaces foreach -p -v run formatting:fix", "lint": "yarn eslint --cache --ignore-pattern l1-artifacts .", "format": "yarn prettier --cache -w .", - "test": "FORCE_COLOR=true yarn workspaces foreach --exclude @aztec/aztec3-packages --exclude @aztec/end-to-end -p -v run test", + "test": "FORCE_COLOR=true yarn workspaces foreach --exclude @aztec/aztec3-packages --exclude @aztec/end-to-end @aztec/prover-client -p -v run test", "build": "FORCE_COLOR=true yarn workspaces foreach --parallel --topological-dev --verbose --exclude @aztec/aztec3-packages --exclude @aztec/docs run build", "build:fast": "yarn generate && tsc -b", "build:dev": "./watch.sh", diff --git a/yarn-project/prover-client/Dockerfile.test b/yarn-project/prover-client/Dockerfile.test new file mode 100644 index 00000000000..a2c5e3ee3af --- /dev/null +++ b/yarn-project/prover-client/Dockerfile.test @@ -0,0 +1,39 @@ +FROM --platform=linux/amd64 aztecprotocol/bb.js as bb.js +FROM --platform=linux/amd64 aztecprotocol/noir-packages as noir-packages +FROM --platform=linux/amd64 aztecprotocol/l1-contracts as contracts +FROM --platform=linux/amd64 aztecprotocol/noir-projects as noir-projects +FROM --platform=linux/amd64 aztecprotocol/barretenberg-x86_64-linux-clang as barretenberg +FROM aztecprotocol/noir as noir + +FROM node:18.19.0 as builder +RUN apt update && apt install -y jq curl perl && rm -rf /var/lib/apt/lists/* && apt-get clean + +# Copy in portalled packages. +COPY --from=bb.js /usr/src/barretenberg/ts /usr/src/barretenberg/ts +COPY --from=noir-packages /usr/src/noir/packages /usr/src/noir/packages +COPY --from=contracts /usr/src/l1-contracts /usr/src/l1-contracts +COPY --from=noir-projects /usr/src/noir-projects /usr/src/noir-projects +# We want the native ACVM and BB binaries +COPY --from=noir /usr/src/noir/noir-repo/target/release/acvm /usr/src/noir/noir-repo/target/release/acvm +COPY --from=barretenberg /usr/src/barretenberg/cpp/build/bin/bb /usr/src/barretenberg/cpp/build/bin/bb + +WORKDIR /usr/src/yarn-project +COPY . . + +# We install a symlink to yarn-project's node_modules at a location that all portalled packages can find as they +# walk up the tree as part of module resolution. The supposedly idiomatic way of supporting module resolution +# correctly for portalled packages, is to use --preserve-symlinks when running node. +# This does kind of work, but jest doesn't honor it correctly, so this seems like a neat workaround. +# Also, --preserve-symlinks causes duplication of portalled instances such as bb.js, and breaks the singleton logic +# by initialising the module more than once. So at present I don't see a viable alternative. +RUN ln -s /usr/src/yarn-project/node_modules /usr/src/node_modules + +# TODO: Replace puppeteer with puppeteer-core to avoid this. +ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true + +RUN ./bootstrap.sh +RUN cd prover-client && yarn prepare:check && yarn formatting && yarn test + +# Avoid pushing some huge container back to ecr. +FROM scratch +COPY --from=builder /usr/src/yarn-project/README.md /usr/src/yarn-project/README.md diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index a26c6f79243..1e3b55e2770 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -20,7 +20,7 @@ "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", "bb": "node --no-warnings ./dest/bb/index.js", - "test": "DEBUG='aztec:*' NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" + "test": "LOG_LEVEL=${LOG_LEVEL:-verbose} NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" }, "inherits": [ "../package.common.json" From 960b70940780725f3e55444e9b72c3f541201824 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Fri, 12 Apr 2024 09:29:18 +0000 Subject: [PATCH 37/41] Fixes --- build_manifest.yml | 3 ++- yarn-project/package.json | 2 +- yarn-project/prover-client/Dockerfile.test | 2 +- yarn-project/prover-client/package.json | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/build_manifest.yml b/build_manifest.yml index e4864281a37..512375a7c0e 100644 --- a/build_manifest.yml +++ b/build_manifest.yml @@ -163,7 +163,8 @@ yarn-project-test: # Runs all prover-client checks and tests. prover-client-test: - buildDir: prover-client + buildDir: yarn-project + projectDir: yarn-project/prover-client dockerfile: Dockerfile.test rebuildPatterns: - ^yarn-project/.*\.(ts|tsx|js|cjs|mjs|json|html|md|sh|nr|toml|snap)$ diff --git a/yarn-project/package.json b/yarn-project/package.json index 4d50aaa7af2..70f765df28f 100644 --- a/yarn-project/package.json +++ b/yarn-project/package.json @@ -10,7 +10,7 @@ "formatting:fix": "FORCE_COLOR=true yarn workspaces foreach -p -v run formatting:fix", "lint": "yarn eslint --cache --ignore-pattern l1-artifacts .", "format": "yarn prettier --cache -w .", - "test": "FORCE_COLOR=true yarn workspaces foreach --exclude @aztec/aztec3-packages --exclude @aztec/end-to-end @aztec/prover-client -p -v run test", + "test": "FORCE_COLOR=true yarn workspaces foreach --exclude @aztec/aztec3-packages --exclude @aztec/end-to-end --exclude @aztec/prover-client -p -v run test", "build": "FORCE_COLOR=true yarn workspaces foreach --parallel --topological-dev --verbose --exclude @aztec/aztec3-packages --exclude @aztec/docs run build", "build:fast": "yarn generate && tsc -b", "build:dev": "./watch.sh", diff --git a/yarn-project/prover-client/Dockerfile.test b/yarn-project/prover-client/Dockerfile.test index a2c5e3ee3af..ce65510209b 100644 --- a/yarn-project/prover-client/Dockerfile.test +++ b/yarn-project/prover-client/Dockerfile.test @@ -32,7 +32,7 @@ RUN ln -s /usr/src/yarn-project/node_modules /usr/src/node_modules ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true RUN ./bootstrap.sh -RUN cd prover-client && yarn prepare:check && yarn formatting && yarn test +RUN cd prover-client && LOG_LEVEL=verbose yarn test # Avoid pushing some huge container back to ecr. FROM scratch diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index 1e3b55e2770..a831caf6783 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -20,7 +20,7 @@ "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", "bb": "node --no-warnings ./dest/bb/index.js", - "test": "LOG_LEVEL=${LOG_LEVEL:-verbose} NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" + "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" }, "inherits": [ "../package.common.json" From 0a049bba4bab5b03f3ec22968057bbd6053a95e6 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Fri, 12 Apr 2024 10:06:20 +0000 Subject: [PATCH 38/41] Further debugging --- yarn-project/simulator/src/simulator/acvm_native.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/yarn-project/simulator/src/simulator/acvm_native.ts b/yarn-project/simulator/src/simulator/acvm_native.ts index c77d304eccd..7209d9f882a 100644 --- a/yarn-project/simulator/src/simulator/acvm_native.ts +++ b/yarn-project/simulator/src/simulator/acvm_native.ts @@ -97,6 +97,8 @@ export async function executeNativeCircuit( 'output-witness', ]; + logger.info(`Calling ACVM with ${args.join(' ')}`); + const processPromise = new Promise((resolve, reject) => { let outputWitness = Buffer.alloc(0); let errorBuffer = Buffer.alloc(0); @@ -111,6 +113,7 @@ export async function executeNativeCircuit( if (code === 0) { resolve(outputWitness.toString('utf-8')); } else { + logger.error(`From ACVM: ${errorBuffer.toString('utf-8')}`); reject(errorBuffer.toString('utf-8')); } }); From dcf6755bc6eb6204dfb37b5bb8d1210718986307 Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Fri, 12 Apr 2024 10:38:02 +0000 Subject: [PATCH 39/41] More debugging --- yarn-project/prover-client/Dockerfile.test | 2 +- .../simulator/src/simulator/acvm_native.ts | 22 ++++++++++++++++++- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/yarn-project/prover-client/Dockerfile.test b/yarn-project/prover-client/Dockerfile.test index ce65510209b..45895a749e8 100644 --- a/yarn-project/prover-client/Dockerfile.test +++ b/yarn-project/prover-client/Dockerfile.test @@ -32,7 +32,7 @@ RUN ln -s /usr/src/yarn-project/node_modules /usr/src/node_modules ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true RUN ./bootstrap.sh -RUN cd prover-client && LOG_LEVEL=verbose yarn test +RUN cd prover-client && LOG_LEVEL=verbose ACVM_WORKING_DIRECTORY='/tmp/acvm' BB_WORKING_DIRECTORY='/tmp/bb' yarn test # Avoid pushing some huge container back to ecr. FROM scratch diff --git a/yarn-project/simulator/src/simulator/acvm_native.ts b/yarn-project/simulator/src/simulator/acvm_native.ts index 7209d9f882a..64152a35776 100644 --- a/yarn-project/simulator/src/simulator/acvm_native.ts +++ b/yarn-project/simulator/src/simulator/acvm_native.ts @@ -97,7 +97,7 @@ export async function executeNativeCircuit( 'output-witness', ]; - logger.info(`Calling ACVM with ${args.join(' ')}`); + logger.debug(`Calling ACVM with ${args.join(' ')}`); const processPromise = new Promise((resolve, reject) => { let outputWitness = Buffer.alloc(0); @@ -121,6 +121,26 @@ export async function executeNativeCircuit( const duration = new Timer(); const output = await processPromise; + + const lsPromise = new Promise(resolve => { + let outputWitness = Buffer.alloc(0); + let errorBuffer = Buffer.alloc(0); + const acvm = proc.spawn('ls', ['-lh', `${workingDirectory}`]); + acvm.stdout.on('data', data => { + outputWitness = Buffer.concat([outputWitness, data]); + logger.info(`From LS at ${workingDirectory}: ${outputWitness.toString('utf-8')}`); + }); + acvm.stderr.on('data', data => { + errorBuffer = Buffer.concat([errorBuffer, data]); + logger.info(`Error LS at ${workingDirectory}: ${errorBuffer.toString('utf-8')}`); + }); + acvm.on('close', _ => { + resolve(); + }); + }); + + await lsPromise; + if (outputFilename) { const outputWitnessFileName = `${workingDirectory}/output-witness.gz`; try { From 460c4e49eed0b6004d365458adb26b10c8b6306c Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Fri, 12 Apr 2024 11:43:35 +0000 Subject: [PATCH 40/41] Update build manifest dependencies --- build_manifest.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/build_manifest.yml b/build_manifest.yml index 512375a7c0e..afba6e3820f 100644 --- a/build_manifest.yml +++ b/build_manifest.yml @@ -160,6 +160,7 @@ yarn-project-test: - l1-contracts - noir-projects - barretenberg-x86_64-linux-clang + - noir # Runs all prover-client checks and tests. prover-client-test: @@ -175,6 +176,7 @@ prover-client-test: - l1-contracts - noir-projects - barretenberg-x86_64-linux-clang + - noir # Builds all of yarn-project, with all developer dependencies. # Creates a runnable container used to run tests and formatting checks. From f28abe817d2155d5a46c79c30c89787e822620cd Mon Sep 17 00:00:00 2001 From: PhilWindle Date: Fri, 12 Apr 2024 12:06:08 +0000 Subject: [PATCH 41/41] Cleanup --- .../src/prover/bb_prover.test.ts | 7 +++-- .../prover-client/src/prover/bb_prover.ts | 1 - .../simulator/src/simulator/acvm_native.ts | 30 +------------------ 3 files changed, 6 insertions(+), 32 deletions(-) diff --git a/yarn-project/prover-client/src/prover/bb_prover.test.ts b/yarn-project/prover-client/src/prover/bb_prover.test.ts index 8fc243b50c3..ad8ddd160b0 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.test.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.test.ts @@ -49,7 +49,6 @@ describe('prover/bb_prover', () => { afterAll(async () => { if (directoryToCleanup) { - logger.info(`Cleaning up ${directoryToCleanup}`); await fs.rm(directoryToCleanup, { recursive: true, force: true }); } }, 5000); @@ -63,7 +62,11 @@ describe('prover/bb_prover', () => { baseRollupInputs.push(await buildBaseRollupInput(tx, globalVariables, builderDb)); } logger.verbose('Proving base rollups'); - await Promise.all(baseRollupInputs.map(inputs => prover.getBaseRollupProof(inputs))); + const proofOutputs = await Promise.all(baseRollupInputs.map(inputs => prover.getBaseRollupProof(inputs))); + logger.verbose('Verifying base rollups'); + await expect( + Promise.all(proofOutputs.map(output => prover.verifyProof('BaseRollupArtifact', output[1]))), + ).resolves.not.toThrow(); }, 600_000); it('proves all circuits', async () => { diff --git a/yarn-project/prover-client/src/prover/bb_prover.ts b/yarn-project/prover-client/src/prover/bb_prover.ts index 49a00744825..29105d370ca 100644 --- a/yarn-project/prover-client/src/prover/bb_prover.ts +++ b/yarn-project/prover-client/src/prover/bb_prover.ts @@ -175,7 +175,6 @@ export class BBNativeRollupProver implements CircuitProver { public async createProof(witnessMap: WitnessMap, circuitType: ServerProtocolArtifact): Promise<[WitnessMap, Proof]> { // Create random directory to be used for temp files const bbWorkingDirectory = `${this.config.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; - logger.info(`Creating directory ${bbWorkingDirectory}`); await fs.mkdir(bbWorkingDirectory, { recursive: true }); await fs.access(bbWorkingDirectory); diff --git a/yarn-project/simulator/src/simulator/acvm_native.ts b/yarn-project/simulator/src/simulator/acvm_native.ts index 64152a35776..070e77ad114 100644 --- a/yarn-project/simulator/src/simulator/acvm_native.ts +++ b/yarn-project/simulator/src/simulator/acvm_native.ts @@ -121,35 +121,9 @@ export async function executeNativeCircuit( const duration = new Timer(); const output = await processPromise; - - const lsPromise = new Promise(resolve => { - let outputWitness = Buffer.alloc(0); - let errorBuffer = Buffer.alloc(0); - const acvm = proc.spawn('ls', ['-lh', `${workingDirectory}`]); - acvm.stdout.on('data', data => { - outputWitness = Buffer.concat([outputWitness, data]); - logger.info(`From LS at ${workingDirectory}: ${outputWitness.toString('utf-8')}`); - }); - acvm.stderr.on('data', data => { - errorBuffer = Buffer.concat([errorBuffer, data]); - logger.info(`Error LS at ${workingDirectory}: ${errorBuffer.toString('utf-8')}`); - }); - acvm.on('close', _ => { - resolve(); - }); - }); - - await lsPromise; - if (outputFilename) { const outputWitnessFileName = `${workingDirectory}/output-witness.gz`; - try { - await fs.access(outputWitnessFileName); - } catch (error) { - logger.error(`Output witness not present at ${outputWitnessFileName}`); - return { status: ACVM_RESULT.FAILURE, reason: `Output witness not present at ${outputWitnessFileName}` }; - } - await fs.copyFile(`${workingDirectory}/output-witness.gz`, outputFilename); + await fs.copyFile(outputWitnessFileName, outputFilename); } const witness = parseIntoWitnessMap(output); return { status: ACVM_RESULT.SUCCESS, witness, duration: duration.ms() }; @@ -169,13 +143,11 @@ export class NativeACVMSimulator implements SimulationProvider { // Provide a unique working directory so we don't get clashes with parallel executions const directory = `${this.workingDirectory}/${randomBytes(8).toString('hex')}`; - logger.error(`Creating directory ${directory}`); await fs.mkdir(directory, { recursive: true }); // Execute the circuit const result = await executeNativeCircuit(input, decodedBytecode, directory, this.pathToAcvm, this.witnessFilename); - logger.error(`Removing directory ${directory}`); await fs.rm(directory, { force: true, recursive: true }); if (result.status == ACVM_RESULT.FAILURE) {