From acc1c8f7f7f79e751053e267bd4d29c77b251f48 Mon Sep 17 00:00:00 2001 From: spypsy Date: Thu, 20 Jun 2024 18:53:15 +0100 Subject: [PATCH 01/21] chore: 1st attempt at automatic devnet deployment --- .github/workflows/devnet-deploys.yml | 69 + .gitmodules | 4 + barretenberg/cpp/pil/avm/kernel.pil | 8 +- barretenberg/cpp/scripts/compile_avm.sh | 21 +- barretenberg/cpp/scripts/rebuild_avm.sh | 11 + barretenberg/cpp/src/barretenberg/bb/main.cpp | 76 +- .../relations/generated/avm/alu.hpp | 406 ++-- .../relations/generated/avm/binary.hpp | 28 +- .../relations/generated/avm/conversion.hpp | 2 +- .../relations/generated/avm/declare_views.hpp | 8 +- .../relations/generated/avm/keccakf1600.hpp | 2 +- .../relations/generated/avm/kernel.hpp | 62 +- .../relations/generated/avm/main.hpp | 577 ++--- .../relations/generated/avm/mem.hpp | 84 +- .../relations/generated/avm/pedersen.hpp | 2 +- .../relations/generated/avm/poseidon2.hpp | 2 +- .../relations/generated/avm/sha256.hpp | 2 +- .../stdlib_circuit_builders/mock_circuits.hpp | 1 - .../vm/generated/avm_circuit_builder.cpp | 41 +- .../vm/generated/avm_circuit_builder.hpp | 16 +- .../barretenberg/vm/generated/avm_flavor.hpp | 76 +- .../barretenberg/vm/generated/avm_prover.cpp | 22 +- .../vm/generated/avm_verifier.cpp | 16 +- bb-pilcom/.gitignore | 1 + bb-pilcom/Cargo.lock | 1880 +++++++++++++++++ bb-pilcom/Cargo.toml | 28 + bb-pilcom/bb-pil-backend/Cargo.toml | 16 + .../bb-pil-backend/src/circuit_builder.rs | 374 ++++ .../bb-pil-backend/src/composer_builder.rs | 210 ++ bb-pilcom/bb-pil-backend/src/file_writer.rs | 58 + .../bb-pil-backend/src/flavor_builder.rs | 629 ++++++ bb-pilcom/bb-pil-backend/src/lib.rs | 11 + .../bb-pil-backend/src/lookup_builder.rs | 369 ++++ .../bb-pil-backend/src/permutation_builder.rs | 254 +++ .../bb-pil-backend/src/prover_builder.rs | 331 +++ .../bb-pil-backend/src/relation_builder.rs | 562 +++++ bb-pilcom/bb-pil-backend/src/utils.rs | 145 ++ .../bb-pil-backend/src/verifier_builder.rs | 286 +++ bb-pilcom/bb-pil-backend/src/vm_builder.rs | 236 +++ bb-pilcom/bootstrap.sh | 3 + bb-pilcom/cli/Cargo.toml | 26 + bb-pilcom/cli/README.md | 3 + bb-pilcom/cli/src/main.rs | 52 + bb-pilcom/powdr | 1 + docs/docs/migration_notes.md | 13 +- iac/mainnet-fork/Dockerfile | 18 - iac/mainnet-fork/Earthfile | 29 + iac/mainnet-fork/nginx/nginx.conf | 1 + .../aztec/src/context/private_context.nr | 2 +- .../aztec/src/encrypted_logs/incoming_body.nr | 2 +- .../aztec-nr/aztec/src/note/lifecycle.nr | 10 +- .../aztec-nr/aztec/src/note/utils.nr | 93 +- .../src/test/helpers/test_environment.nr | 4 +- .../crates/types/src/hash.nr | 19 +- yarn-project/Earthfile | 34 + .../aztec-node/src/aztec-node/server.ts | 8 +- yarn-project/aztec/terraform/node/main.tf | 14 +- .../aztec/terraform/node/variables.tf | 5 +- yarn-project/end-to-end/Earthfile | 3 + ...etwork.test.ts => e2e_p2p_network.test.ts} | 164 +- yarn-project/p2p-bootstrap/terraform/main.tf | 2 +- .../p2p-bootstrap/terraform/variables.tf | 8 +- yarn-project/p2p/src/client/index.ts | 10 +- .../p2p/src/client/p2p_client.test.ts | 1 - yarn-project/p2p/src/client/p2p_client.ts | 3 +- .../p2p/src/service/discV5_service.ts | 39 +- .../p2p/src/service/discv5_service.test.ts | 16 +- yarn-project/p2p/src/service/dummy_service.ts | 11 +- .../p2p/src/service/known_txs.test.ts | 42 - yarn-project/p2p/src/service/known_txs.ts | 56 - .../p2p/src/service/libp2p_service.ts | 153 +- yarn-project/p2p/src/service/peer_manager.ts | 199 +- yarn-project/p2p/src/service/service.ts | 21 +- 73 files changed, 6936 insertions(+), 1055 deletions(-) create mode 100644 .github/workflows/devnet-deploys.yml create mode 100755 barretenberg/cpp/scripts/rebuild_avm.sh create mode 100644 bb-pilcom/.gitignore create mode 100644 bb-pilcom/Cargo.lock create mode 100644 bb-pilcom/Cargo.toml create mode 100644 bb-pilcom/bb-pil-backend/Cargo.toml create mode 100644 bb-pilcom/bb-pil-backend/src/circuit_builder.rs create mode 100644 bb-pilcom/bb-pil-backend/src/composer_builder.rs create mode 100644 bb-pilcom/bb-pil-backend/src/file_writer.rs create mode 100644 bb-pilcom/bb-pil-backend/src/flavor_builder.rs create mode 100644 bb-pilcom/bb-pil-backend/src/lib.rs create mode 100644 bb-pilcom/bb-pil-backend/src/lookup_builder.rs create mode 100644 bb-pilcom/bb-pil-backend/src/permutation_builder.rs create mode 100644 bb-pilcom/bb-pil-backend/src/prover_builder.rs create mode 100644 bb-pilcom/bb-pil-backend/src/relation_builder.rs create mode 100644 bb-pilcom/bb-pil-backend/src/utils.rs create mode 100644 bb-pilcom/bb-pil-backend/src/verifier_builder.rs create mode 100644 bb-pilcom/bb-pil-backend/src/vm_builder.rs create mode 100755 bb-pilcom/bootstrap.sh create mode 100644 bb-pilcom/cli/Cargo.toml create mode 100644 bb-pilcom/cli/README.md create mode 100644 bb-pilcom/cli/src/main.rs create mode 160000 bb-pilcom/powdr delete mode 100644 iac/mainnet-fork/Dockerfile create mode 100644 iac/mainnet-fork/Earthfile rename yarn-project/end-to-end/src/{flakey_e2e_p2p_network.test.ts => e2e_p2p_network.test.ts} (52%) delete mode 100644 yarn-project/p2p/src/service/known_txs.test.ts delete mode 100644 yarn-project/p2p/src/service/known_txs.ts diff --git a/.github/workflows/devnet-deploys.yml b/.github/workflows/devnet-deploys.yml new file mode 100644 index 00000000000..a2b09291c1e --- /dev/null +++ b/.github/workflows/devnet-deploys.yml @@ -0,0 +1,69 @@ +name: Deploy to devnet +on: + push: + branches: [devnet] + +env: + DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }} + GIT_COMMIT: ${{ github.sha }} + # TF Vars + TF_VAR_DOCKERHUB_ACCOUNT: aztecprotocol + TF_VAR_CHAIN_ID: 31337 + TF_VAR_BOOTNODE_1_PRIVATE_KEY: ${{ secrets.BOOTNODE_1_PRIVATE_KEY }} + TF_VAR_BOOTNODE_2_PRIVATE_KEY: ${{ secrets.BOOTNODE_2_PRIVATE_KEY }} + TF_VAR_SEQ_1_PUBLISHER_PRIVATE_KEY: ${{ secrets.SEQ_1_PUBLISHER_PRIVATE_KEY }} + TF_VAR_SEQ_2_PUBLISHER_PRIVATE_KEY: ${{ secrets.SEQ_2_PUBLISHER_PRIVATE_KEY }} + TF_VAR_DEPLOY_TAG: devnet + TF_VAR_API_KEY: ${{ secrets.FORK_API_KEY }} + +jobs: + setup: + uses: ./.github/workflows/setup-runner.yml + with: + username: master + runner_type: builder-x86 + secrets: inherit + + build: + runs-on: ${{ github.actor }}-x86 + steps: + - uses: actions/checkout@v4 + with: { ref: "${{ env.GIT_COMMIT }}" } + - uses: ./.github/ci-setup-action + with: + dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" + concurrency_key: build-release-artifacts-${{ github.actor }} + - name: "Build & Push images" + timeout-minutes: 40 + # Run the build steps for each image with version and arch, push to dockerhub + run: | + earthly-ci --no-output --push ./yarn-project+export-aztec-arch --DIST_TAG=devnet + + terraform_deploy: + runs-on: ubuntu-latest + needs: build + steps: + - uses: actions/checkout@v4 + with: { ref: "${{ env.GIT_COMMIT }}" } + - uses: hashicorp/setup-terraform@v3 + with: + terraform_version: 1.7.5 + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-west-2 + + - name: Deploy Bootstrap Nodes + working-directory: ./yarn-project/aztec/terraform/node + run: | + terraform init -input=false -backend-config="key=devnet/aztec-node" + terraform apply -input=false -auto-approve + + - name: Deploy Aztec Nodes + working-directory: ./yarn-project/aztec/terraform/node + run: | + terraform init -input=false -backend-config="key=devnet/aztec-node" + terraform apply -input=false -auto-approve diff --git a/.gitmodules b/.gitmodules index f643e34f4ca..9a73cfa279a 100644 --- a/.gitmodules +++ b/.gitmodules @@ -13,3 +13,7 @@ [submodule "barretenberg/sol/lib/openzeppelin-contracts"] path = barretenberg/sol/lib/openzeppelin-contracts url = https://github.com/OpenZeppelin/openzeppelin-contracts +[submodule "bb-pilcom/powdr"] + path = bb-pilcom/powdr + url = https://github.com/AztecProtocol/powdr + branch = avm-minimal diff --git a/barretenberg/cpp/pil/avm/kernel.pil b/barretenberg/cpp/pil/avm/kernel.pil index f7d402826eb..dff09f08d77 100644 --- a/barretenberg/cpp/pil/avm/kernel.pil +++ b/barretenberg/cpp/pil/avm/kernel.pil @@ -2,11 +2,11 @@ include "main.pil"; include "constants.pil"; namespace kernel(256); - pol public(/*idx=*/0) kernel_inputs; + pol public kernel_inputs; - pol public(/*idx=*/1) kernel_value_out; - pol public(/*idx=*/2) kernel_side_effect_out; - pol public(/*idx=*/3) kernel_metadata_out; + pol public kernel_value_out; + pol public kernel_side_effect_out; + pol public kernel_metadata_out; // TODO(https://github.com/AztecProtocol/aztec-packages/issues/6463): just use one col for both of these pol commit kernel_in_offset; diff --git a/barretenberg/cpp/scripts/compile_avm.sh b/barretenberg/cpp/scripts/compile_avm.sh index 01f422e229f..6ecc8817ab2 100755 --- a/barretenberg/cpp/scripts/compile_avm.sh +++ b/barretenberg/cpp/scripts/compile_avm.sh @@ -1,23 +1,4 @@ #!/bin/bash -use_zsh_alias() { - # Run Zsh command, source .zshrc, and then execute the alias - zsh -i -c "$1" -} # Compile -use_zsh_alias "bb_pil pil/avm/avm_main.pil --name Avm" - -# Format generated folders -root_dir="src" - -# Find all directories named 'generate' under the specified root directory -find "$root_dir" -type d -name 'generate' | while read dir_path; do - echo "Processing directory: $dir_path" - - # Find all C/C++ source files in these directories and format them - find "$dir_path" -type f \( -iname '*.hpp' -o -iname '*.cpp' \) -exec clang-format -i {} + -done - - -# Build vm tests -cmake --build --preset clang16 --target vm_tests \ No newline at end of file +../../bb-pilcom/target/release/bb_pil pil/avm/main.pil --name Avm diff --git a/barretenberg/cpp/scripts/rebuild_avm.sh b/barretenberg/cpp/scripts/rebuild_avm.sh new file mode 100755 index 00000000000..b6fa11dfc23 --- /dev/null +++ b/barretenberg/cpp/scripts/rebuild_avm.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +# Rebuild +./scripts/compile_avm.sh + +# Format generated folders +git add **/generated/* +./format.sh staged + +# Build vm tests +cmake --build --preset clang16 --target vm_tests \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index d62b3326310..114e2f0caf2 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -819,7 +819,7 @@ template bool verify_honk(const std::string& proof_path, } /** - * @brief Writes a verification key for an ACIR circuit to a file + * @brief Writes a Honk verification key for an ACIR circuit to a file * * Communication: * - stdout: The verification key is written to stdout as a byte array @@ -965,6 +965,74 @@ void prove_output_all(const std::string& bytecodePath, const std::string& witnes vinfo("vk as fields written to: ", vkFieldsOutputPath); } +/** + * @brief Creates a Honk proof for an ACIR circuit, outputs the proof and verification key in binary and 'field' format + * + * Communication: + * - Filesystem: The proof is written to the path specified by outputPath + * + * @param bytecodePath Path to the file containing the serialized circuit + * @param witnessPath Path to the file containing the serialized witness + * @param outputPath Directory into which we write the proof and verification key data + */ +template +void prove_honk_output_all(const std::string& bytecodePath, + const std::string& witnessPath, + const std::string& outputPath) +{ + using Builder = Flavor::CircuitBuilder; + using Prover = UltraProver_; + using VerificationKey = Flavor::VerificationKey; + + bool honk_recursion = false; + if constexpr (IsAnyOf) { + honk_recursion = true; + } + + auto constraint_system = get_constraint_system(bytecodePath, honk_recursion); + auto witness = get_witness(witnessPath); + + auto builder = acir_format::create_circuit(constraint_system, 0, witness, honk_recursion); + + auto num_extra_gates = builder.get_num_gates_added_to_ensure_nonzero_polynomials(); + size_t srs_size = builder.get_circuit_subgroup_size(builder.get_total_circuit_size() + num_extra_gates); + init_bn254_crs(srs_size); + + // Construct Honk proof + Prover prover{ builder }; + auto proof = prover.construct_proof(); + + // We have been given a directory, we will write the proof and verification key + // into the directory in both 'binary' and 'fields' formats + std::string vkOutputPath = outputPath + "/vk"; + std::string proofPath = outputPath + "/proof"; + std::string vkFieldsOutputPath = outputPath + "/vk_fields.json"; + std::string proofFieldsPath = outputPath + "/proof_fields.json"; + + VerificationKey vk( + prover.instance->proving_key); // uses a partial form of the proving key which only has precomputed entities + + // Write the 'binary' proof + write_file(proofPath, to_buffer(proof)); + vinfo("binary proof written to: ", proofPath); + + // Write the proof as fields + std::string proofJson = to_json(proof); + write_file(proofFieldsPath, { proofJson.begin(), proofJson.end() }); + vinfo("proof as fields written to: ", proofFieldsPath); + + // Write the vk as binary + auto serialized_vk = to_buffer(vk); + write_file(vkOutputPath, serialized_vk); + vinfo("vk written to: ", vkOutputPath); + + // Write the vk as fields + std::vector vk_data = vk.to_field_elements(); + auto vk_json = honk_vk_to_json(vk_data); + write_file(vkFieldsOutputPath, { vk_json.begin(), vk_json.end() }); + vinfo("vk as fields written to: ", vkFieldsOutputPath); +} + bool flag_present(std::vector& args, const std::string& flag) { return std::find(args.begin(), args.end(), flag) != args.end(); @@ -1027,6 +1095,12 @@ int main(int argc, char* argv[]) } else if (command == "prove_output_all") { std::string output_path = get_option(args, "-o", "./proofs"); prove_output_all(bytecode_path, witness_path, output_path); + } else if (command == "prove_ultra_honk_output_all") { + std::string output_path = get_option(args, "-o", "./proofs"); + prove_honk_output_all(bytecode_path, witness_path, output_path); + } else if (command == "prove_mega_honk_output_all") { + std::string output_path = get_option(args, "-o", "./proofs"); + prove_honk_output_all(bytecode_path, witness_path, output_path); } else if (command == "client_ivc_prove_output_all") { std::string output_path = get_option(args, "-o", "./proofs"); client_ivc_prove_output_all(bytecode_path, witness_path, output_path); diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/alu.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/alu.hpp index eda58900a6e..ff431393770 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/alu.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/alu.hpp @@ -326,7 +326,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(3); - auto tmp = (alu_cf * (-alu_cf + FF(1))); + auto tmp = ((alu_cf * (-alu_cf + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<3>(evals) += tmp; } @@ -334,7 +334,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(4); - auto tmp = (alu_ff_tag * (-alu_ff_tag + FF(1))); + auto tmp = ((alu_ff_tag * (-alu_ff_tag + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<4>(evals) += tmp; } @@ -342,7 +342,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(5); - auto tmp = (alu_u8_tag * (-alu_u8_tag + FF(1))); + auto tmp = ((alu_u8_tag * (-alu_u8_tag + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<5>(evals) += tmp; } @@ -350,7 +350,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(6); - auto tmp = (alu_u16_tag * (-alu_u16_tag + FF(1))); + auto tmp = ((alu_u16_tag * (-alu_u16_tag + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<6>(evals) += tmp; } @@ -358,7 +358,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(7); - auto tmp = (alu_u32_tag * (-alu_u32_tag + FF(1))); + auto tmp = ((alu_u32_tag * (-alu_u32_tag + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<7>(evals) += tmp; } @@ -366,7 +366,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(8); - auto tmp = (alu_u64_tag * (-alu_u64_tag + FF(1))); + auto tmp = ((alu_u64_tag * (-alu_u64_tag + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<8>(evals) += tmp; } @@ -374,7 +374,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(9); - auto tmp = (alu_u128_tag * (-alu_u128_tag + FF(1))); + auto tmp = ((alu_u128_tag * (-alu_u128_tag + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<9>(evals) += tmp; } @@ -382,9 +382,10 @@ template class aluImpl { { Avm_DECLARE_VIEWS(10); - auto tmp = - (alu_sel_alu * - ((((((alu_ff_tag + alu_u8_tag) + alu_u16_tag) + alu_u32_tag) + alu_u64_tag) + alu_u128_tag) - FF(1))); + auto tmp = ((alu_sel_alu * + ((((((alu_ff_tag + alu_u8_tag) + alu_u16_tag) + alu_u32_tag) + alu_u64_tag) + alu_u128_tag) - + FF(1))) - + FF(0)); tmp *= scaling_factor; std::get<10>(evals) += tmp; } @@ -403,17 +404,18 @@ template class aluImpl { { Avm_DECLARE_VIEWS(12); - auto tmp = - (((alu_op_add + alu_op_sub) * ((((((((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + - (alu_u16_r1 * FF(4294967296UL))) + - (alu_u16_r2 * FF(281474976710656UL))) + - (alu_u16_r3 * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + - (alu_u16_r4 * FF(uint256_t{ 0UL, 65536UL, 0UL, 0UL }))) + - (alu_u16_r5 * FF(uint256_t{ 0UL, 4294967296UL, 0UL, 0UL }))) + - (alu_u16_r6 * FF(uint256_t{ 0UL, 281474976710656UL, 0UL, 0UL }))) - - alu_ia) + - (alu_ff_tag * alu_ic))) + - ((alu_op_add - alu_op_sub) * ((alu_cf * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL })) - alu_ib))); + auto tmp = ((((alu_op_add + alu_op_sub) * + ((((((((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + + (alu_u16_r1 * FF(4294967296UL))) + + (alu_u16_r2 * FF(281474976710656UL))) + + (alu_u16_r3 * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + + (alu_u16_r4 * FF(uint256_t{ 0UL, 65536UL, 0UL, 0UL }))) + + (alu_u16_r5 * FF(uint256_t{ 0UL, 4294967296UL, 0UL, 0UL }))) + + (alu_u16_r6 * FF(uint256_t{ 0UL, 281474976710656UL, 0UL, 0UL }))) - + alu_ia) + + (alu_ff_tag * alu_ic))) + + ((alu_op_add - alu_op_sub) * ((alu_cf * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL })) - alu_ib))) - + FF(0)); tmp *= scaling_factor; std::get<12>(evals) += tmp; } @@ -421,22 +423,23 @@ template class aluImpl { { Avm_DECLARE_VIEWS(13); - auto tmp = (((alu_op_add + alu_op_sub) * - (((((((alu_u8_tag * alu_u8_r0) + (alu_u16_tag * (alu_u8_r0 + (alu_u8_r1 * FF(256))))) + - (alu_u32_tag * ((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))))) + - (alu_u64_tag * ((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + - (alu_u16_r1 * FF(4294967296UL))) + - (alu_u16_r2 * FF(281474976710656UL))))) + - (alu_u128_tag * ((((((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + - (alu_u16_r1 * FF(4294967296UL))) + - (alu_u16_r2 * FF(281474976710656UL))) + - (alu_u16_r3 * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + - (alu_u16_r4 * FF(uint256_t{ 0UL, 65536UL, 0UL, 0UL }))) + - (alu_u16_r5 * FF(uint256_t{ 0UL, 4294967296UL, 0UL, 0UL }))) + - (alu_u16_r6 * FF(uint256_t{ 0UL, 281474976710656UL, 0UL, 0UL }))))) + - (alu_ff_tag * alu_ia)) - - alu_ic)) + - ((alu_ff_tag * (alu_op_add - alu_op_sub)) * alu_ib)); + auto tmp = ((((alu_op_add + alu_op_sub) * + (((((((alu_u8_tag * alu_u8_r0) + (alu_u16_tag * (alu_u8_r0 + (alu_u8_r1 * FF(256))))) + + (alu_u32_tag * ((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))))) + + (alu_u64_tag * ((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + + (alu_u16_r1 * FF(4294967296UL))) + + (alu_u16_r2 * FF(281474976710656UL))))) + + (alu_u128_tag * ((((((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + + (alu_u16_r1 * FF(4294967296UL))) + + (alu_u16_r2 * FF(281474976710656UL))) + + (alu_u16_r3 * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + + (alu_u16_r4 * FF(uint256_t{ 0UL, 65536UL, 0UL, 0UL }))) + + (alu_u16_r5 * FF(uint256_t{ 0UL, 4294967296UL, 0UL, 0UL }))) + + (alu_u16_r6 * FF(uint256_t{ 0UL, 281474976710656UL, 0UL, 0UL }))))) + + (alu_ff_tag * alu_ia)) - + alu_ic)) + + ((alu_ff_tag * (alu_op_add - alu_op_sub)) * alu_ib)) - + FF(0)); tmp *= scaling_factor; std::get<13>(evals) += tmp; } @@ -444,7 +447,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(14); - auto tmp = ((alu_ff_tag * alu_op_mul) * ((alu_ia * alu_ib) - alu_ic)); + auto tmp = (((alu_ff_tag * alu_op_mul) * ((alu_ia * alu_ib) - alu_ic)) - FF(0)); tmp *= scaling_factor; std::get<14>(evals) += tmp; } @@ -452,15 +455,16 @@ template class aluImpl { { Avm_DECLARE_VIEWS(15); - auto tmp = ((((-alu_ff_tag + FF(1)) - alu_u128_tag) * alu_op_mul) * - (((((((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + - (alu_u16_r1 * FF(4294967296UL))) + - (alu_u16_r2 * FF(281474976710656UL))) + - (alu_u16_r3 * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + - (alu_u16_r4 * FF(uint256_t{ 0UL, 65536UL, 0UL, 0UL }))) + - (alu_u16_r5 * FF(uint256_t{ 0UL, 4294967296UL, 0UL, 0UL }))) + - (alu_u16_r6 * FF(uint256_t{ 0UL, 281474976710656UL, 0UL, 0UL }))) - - (alu_ia * alu_ib))); + auto tmp = (((((-alu_ff_tag + FF(1)) - alu_u128_tag) * alu_op_mul) * + (((((((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + + (alu_u16_r1 * FF(4294967296UL))) + + (alu_u16_r2 * FF(281474976710656UL))) + + (alu_u16_r3 * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + + (alu_u16_r4 * FF(uint256_t{ 0UL, 65536UL, 0UL, 0UL }))) + + (alu_u16_r5 * FF(uint256_t{ 0UL, 4294967296UL, 0UL, 0UL }))) + + (alu_u16_r6 * FF(uint256_t{ 0UL, 281474976710656UL, 0UL, 0UL }))) - + (alu_ia * alu_ib))) - + FF(0)); tmp *= scaling_factor; std::get<15>(evals) += tmp; } @@ -469,12 +473,13 @@ template class aluImpl { Avm_DECLARE_VIEWS(16); auto tmp = - (alu_op_mul * (((((alu_u8_tag * alu_u8_r0) + (alu_u16_tag * (alu_u8_r0 + (alu_u8_r1 * FF(256))))) + - (alu_u32_tag * ((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))))) + - (alu_u64_tag * ((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + - (alu_u16_r1 * FF(4294967296UL))) + - (alu_u16_r2 * FF(281474976710656UL))))) - - (((-alu_ff_tag + FF(1)) - alu_u128_tag) * alu_ic))); + ((alu_op_mul * (((((alu_u8_tag * alu_u8_r0) + (alu_u16_tag * (alu_u8_r0 + (alu_u8_r1 * FF(256))))) + + (alu_u32_tag * ((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))))) + + (alu_u64_tag * ((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + + (alu_u16_r1 * FF(4294967296UL))) + + (alu_u16_r2 * FF(281474976710656UL))))) - + (((-alu_ff_tag + FF(1)) - alu_u128_tag) * alu_ic))) - + FF(0)); tmp *= scaling_factor; std::get<16>(evals) += tmp; } @@ -482,14 +487,15 @@ template class aluImpl { { Avm_DECLARE_VIEWS(17); - auto tmp = ((alu_u128_tag * alu_op_mul) * - ((((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + - (alu_u16_r1 * FF(4294967296UL))) + - (alu_u16_r2 * FF(281474976710656UL))) + - ((((alu_u16_r3 + (alu_u16_r4 * FF(65536))) + (alu_u16_r5 * FF(4294967296UL))) + - (alu_u16_r6 * FF(281474976710656UL))) * - FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) - - alu_ia)); + auto tmp = (((alu_u128_tag * alu_op_mul) * + ((((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + + (alu_u16_r1 * FF(4294967296UL))) + + (alu_u16_r2 * FF(281474976710656UL))) + + ((((alu_u16_r3 + (alu_u16_r4 * FF(65536))) + (alu_u16_r5 * FF(4294967296UL))) + + (alu_u16_r6 * FF(281474976710656UL))) * + FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) - + alu_ia)) - + FF(0)); tmp *= scaling_factor; std::get<17>(evals) += tmp; } @@ -498,14 +504,15 @@ template class aluImpl { Avm_DECLARE_VIEWS(18); auto tmp = - ((alu_u128_tag * alu_op_mul) * - ((((((alu_u8_r0_shift + (alu_u8_r1_shift * FF(256))) + (alu_u16_r0_shift * FF(65536))) + - (alu_u16_r1_shift * FF(4294967296UL))) + - (alu_u16_r2_shift * FF(281474976710656UL))) + - ((((alu_u16_r3_shift + (alu_u16_r4_shift * FF(65536))) + (alu_u16_r5_shift * FF(4294967296UL))) + - (alu_u16_r6_shift * FF(281474976710656UL))) * - FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) - - alu_ib)); + (((alu_u128_tag * alu_op_mul) * + ((((((alu_u8_r0_shift + (alu_u8_r1_shift * FF(256))) + (alu_u16_r0_shift * FF(65536))) + + (alu_u16_r1_shift * FF(4294967296UL))) + + (alu_u16_r2_shift * FF(281474976710656UL))) + + ((((alu_u16_r3_shift + (alu_u16_r4_shift * FF(65536))) + (alu_u16_r5_shift * FF(4294967296UL))) + + (alu_u16_r6_shift * FF(281474976710656UL))) * + FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) - + alu_ib)) - + FF(0)); tmp *= scaling_factor; std::get<18>(evals) += tmp; } @@ -514,21 +521,22 @@ template class aluImpl { Avm_DECLARE_VIEWS(19); auto tmp = - ((alu_u128_tag * alu_op_mul) * - ((((alu_ia * ((((alu_u8_r0_shift + (alu_u8_r1_shift * FF(256))) + (alu_u16_r0_shift * FF(65536))) + - (alu_u16_r1_shift * FF(4294967296UL))) + - (alu_u16_r2_shift * FF(281474976710656UL)))) + - ((((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + - (alu_u16_r1 * FF(4294967296UL))) + - (alu_u16_r2 * FF(281474976710656UL))) * - (((alu_u16_r3_shift + (alu_u16_r4_shift * FF(65536))) + (alu_u16_r5_shift * FF(4294967296UL))) + - (alu_u16_r6_shift * FF(281474976710656UL)))) * - FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) - - (((alu_cf * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL })) + - (((alu_u16_r7 + (alu_u16_r8 * FF(65536))) + (alu_u16_r9 * FF(4294967296UL))) + - (alu_u16_r10 * FF(281474976710656UL)))) * - FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))) - - alu_ic)); + (((alu_u128_tag * alu_op_mul) * + ((((alu_ia * ((((alu_u8_r0_shift + (alu_u8_r1_shift * FF(256))) + (alu_u16_r0_shift * FF(65536))) + + (alu_u16_r1_shift * FF(4294967296UL))) + + (alu_u16_r2_shift * FF(281474976710656UL)))) + + ((((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + + (alu_u16_r1 * FF(4294967296UL))) + + (alu_u16_r2 * FF(281474976710656UL))) * + (((alu_u16_r3_shift + (alu_u16_r4_shift * FF(65536))) + (alu_u16_r5_shift * FF(4294967296UL))) + + (alu_u16_r6_shift * FF(281474976710656UL)))) * + FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) - + (((alu_cf * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL })) + + (((alu_u16_r7 + (alu_u16_r8 * FF(65536))) + (alu_u16_r9 * FF(4294967296UL))) + + (alu_u16_r10 * FF(281474976710656UL)))) * + FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))) - + alu_ic)) - + FF(0)); tmp *= scaling_factor; std::get<19>(evals) += tmp; } @@ -536,7 +544,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(20); - auto tmp = (alu_op_not * alu_ff_tag); + auto tmp = ((alu_op_not * alu_ff_tag) - FF(0)); tmp *= scaling_factor; std::get<20>(evals) += tmp; } @@ -544,12 +552,12 @@ template class aluImpl { { Avm_DECLARE_VIEWS(21); - auto tmp = (alu_op_not * - ((alu_ia + alu_ic) - - ((((((alu_u8_tag * FF(256)) + (alu_u16_tag * FF(65536))) + (alu_u32_tag * FF(4294967296UL))) + - (alu_u64_tag * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + - (alu_u128_tag * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))) - - FF(1)))); + auto tmp = ((alu_op_not * ((alu_ia + alu_ic) - ((((((alu_u8_tag * FF(256)) + (alu_u16_tag * FF(65536))) + + (alu_u32_tag * FF(4294967296UL))) + + (alu_u64_tag * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + + (alu_u128_tag * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))) - + FF(1)))) - + FF(0)); tmp *= scaling_factor; std::get<21>(evals) += tmp; } @@ -557,7 +565,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(22); - auto tmp = ((alu_sel_cmp + alu_op_eq) * (alu_ic * (-alu_ic + FF(1)))); + auto tmp = (((alu_sel_cmp + alu_op_eq) * (alu_ic * (-alu_ic + FF(1)))) - FF(0)); tmp *= scaling_factor; std::get<22>(evals) += tmp; } @@ -566,9 +574,10 @@ template class aluImpl { Avm_DECLARE_VIEWS(23); auto tmp = - (alu_op_eq * - ((((alu_ia - alu_ib) * ((alu_ic * (-alu_op_eq_diff_inv + FF(1))) + alu_op_eq_diff_inv)) - FF(1)) + - alu_ic)); + ((alu_op_eq * + ((((alu_ia - alu_ib) * ((alu_ic * (-alu_op_eq_diff_inv + FF(1))) + alu_op_eq_diff_inv)) - FF(1)) + + alu_ic)) - + FF(0)); tmp *= scaling_factor; std::get<23>(evals) += tmp; } @@ -594,7 +603,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(26); - auto tmp = (alu_p_a_borrow * (-alu_p_a_borrow + FF(1))); + auto tmp = ((alu_p_a_borrow * (-alu_p_a_borrow + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<26>(evals) += tmp; } @@ -602,10 +611,11 @@ template class aluImpl { { Avm_DECLARE_VIEWS(27); - auto tmp = ((alu_p_sub_a_lo - - ((-alu_a_lo + FF(uint256_t{ 4891460686036598784UL, 2896914383306846353UL, 0UL, 0UL })) + - (alu_p_a_borrow * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL })))) * - ((alu_sel_cmp + alu_op_cast) + alu_op_div_std)); + auto tmp = (((alu_p_sub_a_lo - + ((-alu_a_lo + FF(uint256_t{ 4891460686036598784UL, 2896914383306846353UL, 0UL, 0UL })) + + (alu_p_a_borrow * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL })))) * + ((alu_sel_cmp + alu_op_cast) + alu_op_div_std)) - + FF(0)); tmp *= scaling_factor; std::get<27>(evals) += tmp; } @@ -613,10 +623,11 @@ template class aluImpl { { Avm_DECLARE_VIEWS(28); - auto tmp = ((alu_p_sub_a_hi - - ((-alu_a_hi + FF(uint256_t{ 13281191951274694749UL, 3486998266802970665UL, 0UL, 0UL })) - - alu_p_a_borrow)) * - ((alu_sel_cmp + alu_op_cast) + alu_op_div_std)); + auto tmp = (((alu_p_sub_a_hi - + ((-alu_a_hi + FF(uint256_t{ 13281191951274694749UL, 3486998266802970665UL, 0UL, 0UL })) - + alu_p_a_borrow)) * + ((alu_sel_cmp + alu_op_cast) + alu_op_div_std)) - + FF(0)); tmp *= scaling_factor; std::get<28>(evals) += tmp; } @@ -624,7 +635,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(29); - auto tmp = (alu_p_b_borrow * (-alu_p_b_borrow + FF(1))); + auto tmp = ((alu_p_b_borrow * (-alu_p_b_borrow + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<29>(evals) += tmp; } @@ -632,10 +643,11 @@ template class aluImpl { { Avm_DECLARE_VIEWS(30); - auto tmp = ((alu_p_sub_b_lo - - ((-alu_b_lo + FF(uint256_t{ 4891460686036598784UL, 2896914383306846353UL, 0UL, 0UL })) + - (alu_p_b_borrow * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL })))) * - alu_sel_cmp); + auto tmp = (((alu_p_sub_b_lo - + ((-alu_b_lo + FF(uint256_t{ 4891460686036598784UL, 2896914383306846353UL, 0UL, 0UL })) + + (alu_p_b_borrow * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL })))) * + alu_sel_cmp) - + FF(0)); tmp *= scaling_factor; std::get<30>(evals) += tmp; } @@ -643,10 +655,11 @@ template class aluImpl { { Avm_DECLARE_VIEWS(31); - auto tmp = ((alu_p_sub_b_hi - - ((-alu_b_hi + FF(uint256_t{ 13281191951274694749UL, 3486998266802970665UL, 0UL, 0UL })) - - alu_p_b_borrow)) * - alu_sel_cmp); + auto tmp = (((alu_p_sub_b_hi - + ((-alu_b_hi + FF(uint256_t{ 13281191951274694749UL, 3486998266802970665UL, 0UL, 0UL })) - + alu_p_b_borrow)) * + alu_sel_cmp) - + FF(0)); tmp *= scaling_factor; std::get<31>(evals) += tmp; } @@ -654,12 +667,13 @@ template class aluImpl { { Avm_DECLARE_VIEWS(32); - auto tmp = ((alu_res_lo - - (((((alu_a_lo - alu_b_lo) - FF(1)) + (alu_borrow * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))) * - ((alu_op_lt * alu_ic) + ((-alu_ic + FF(1)) * alu_op_lte))) + - (((alu_b_lo - alu_a_lo) + (alu_borrow * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))) * - (-((alu_op_lt * alu_ic) + ((-alu_ic + FF(1)) * alu_op_lte)) + FF(1))))) * - alu_sel_cmp); + auto tmp = (((alu_res_lo - + (((((alu_a_lo - alu_b_lo) - FF(1)) + (alu_borrow * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))) * + ((alu_op_lt * alu_ic) + ((-alu_ic + FF(1)) * alu_op_lte))) + + (((alu_b_lo - alu_a_lo) + (alu_borrow * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))) * + (-((alu_op_lt * alu_ic) + ((-alu_ic + FF(1)) * alu_op_lte)) + FF(1))))) * + alu_sel_cmp) - + FF(0)); tmp *= scaling_factor; std::get<32>(evals) += tmp; } @@ -668,11 +682,12 @@ template class aluImpl { Avm_DECLARE_VIEWS(33); auto tmp = - ((alu_res_hi - - ((((alu_a_hi - alu_b_hi) - alu_borrow) * ((alu_op_lt * alu_ic) + ((-alu_ic + FF(1)) * alu_op_lte))) + - (((alu_b_hi - alu_a_hi) - alu_borrow) * - (-((alu_op_lt * alu_ic) + ((-alu_ic + FF(1)) * alu_op_lte)) + FF(1))))) * - alu_sel_cmp); + (((alu_res_hi - + ((((alu_a_hi - alu_b_hi) - alu_borrow) * ((alu_op_lt * alu_ic) + ((-alu_ic + FF(1)) * alu_op_lte))) + + (((alu_b_hi - alu_a_hi) - alu_borrow) * + (-((alu_op_lt * alu_ic) + ((-alu_ic + FF(1)) * alu_op_lte)) + FF(1))))) * + alu_sel_cmp) - + FF(0)); tmp *= scaling_factor; std::get<33>(evals) += tmp; } @@ -680,7 +695,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(34); - auto tmp = (((alu_cmp_rng_ctr_shift - alu_cmp_rng_ctr) + FF(1)) * alu_cmp_rng_ctr); + auto tmp = ((((alu_cmp_rng_ctr_shift - alu_cmp_rng_ctr) + FF(1)) * alu_cmp_rng_ctr) - FF(0)); tmp *= scaling_factor; std::get<34>(evals) += tmp; } @@ -688,7 +703,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(35); - auto tmp = ((alu_cmp_rng_ctr_shift - FF(4)) * alu_sel_cmp); + auto tmp = (((alu_cmp_rng_ctr_shift - FF(4)) * alu_sel_cmp) - FF(0)); tmp *= scaling_factor; std::get<35>(evals) += tmp; } @@ -696,7 +711,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(36); - auto tmp = (alu_sel_rng_chk * (-alu_sel_rng_chk + FF(1))); + auto tmp = ((alu_sel_rng_chk * (-alu_sel_rng_chk + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<36>(evals) += tmp; } @@ -704,7 +719,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(37); - auto tmp = (alu_sel_rng_chk * alu_sel_cmp); + auto tmp = ((alu_sel_rng_chk * alu_sel_cmp) - FF(0)); tmp *= scaling_factor; std::get<37>(evals) += tmp; } @@ -712,9 +727,10 @@ template class aluImpl { { Avm_DECLARE_VIEWS(38); - auto tmp = ((alu_cmp_rng_ctr * - (((-alu_sel_rng_chk + FF(1)) * (-alu_op_eq_diff_inv + FF(1))) + alu_op_eq_diff_inv)) - - alu_sel_rng_chk); + auto tmp = (((alu_cmp_rng_ctr * + (((-alu_sel_rng_chk + FF(1)) * (-alu_op_eq_diff_inv + FF(1))) + alu_op_eq_diff_inv)) - + alu_sel_rng_chk) - + FF(0)); tmp *= scaling_factor; std::get<38>(evals) += tmp; } @@ -773,7 +789,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(42); - auto tmp = ((alu_a_lo_shift - alu_b_lo) * alu_sel_rng_chk_shift); + auto tmp = (((alu_a_lo_shift - alu_b_lo) * alu_sel_rng_chk_shift) - FF(0)); tmp *= scaling_factor; std::get<42>(evals) += tmp; } @@ -781,7 +797,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(43); - auto tmp = ((alu_a_hi_shift - alu_b_hi) * alu_sel_rng_chk_shift); + auto tmp = (((alu_a_hi_shift - alu_b_hi) * alu_sel_rng_chk_shift) - FF(0)); tmp *= scaling_factor; std::get<43>(evals) += tmp; } @@ -789,7 +805,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(44); - auto tmp = ((alu_b_lo_shift - alu_p_sub_a_lo) * alu_sel_rng_chk_shift); + auto tmp = (((alu_b_lo_shift - alu_p_sub_a_lo) * alu_sel_rng_chk_shift) - FF(0)); tmp *= scaling_factor; std::get<44>(evals) += tmp; } @@ -797,7 +813,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(45); - auto tmp = ((alu_b_hi_shift - alu_p_sub_a_hi) * alu_sel_rng_chk_shift); + auto tmp = (((alu_b_hi_shift - alu_p_sub_a_hi) * alu_sel_rng_chk_shift) - FF(0)); tmp *= scaling_factor; std::get<45>(evals) += tmp; } @@ -805,7 +821,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(46); - auto tmp = ((alu_p_sub_a_lo_shift - alu_p_sub_b_lo) * alu_sel_rng_chk_shift); + auto tmp = (((alu_p_sub_a_lo_shift - alu_p_sub_b_lo) * alu_sel_rng_chk_shift) - FF(0)); tmp *= scaling_factor; std::get<46>(evals) += tmp; } @@ -813,7 +829,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(47); - auto tmp = ((alu_p_sub_a_hi_shift - alu_p_sub_b_hi) * alu_sel_rng_chk_shift); + auto tmp = (((alu_p_sub_a_hi_shift - alu_p_sub_b_hi) * alu_sel_rng_chk_shift) - FF(0)); tmp *= scaling_factor; std::get<47>(evals) += tmp; } @@ -821,7 +837,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(48); - auto tmp = ((alu_p_sub_b_lo_shift - alu_res_lo) * alu_sel_rng_chk_shift); + auto tmp = (((alu_p_sub_b_lo_shift - alu_res_lo) * alu_sel_rng_chk_shift) - FF(0)); tmp *= scaling_factor; std::get<48>(evals) += tmp; } @@ -829,7 +845,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(49); - auto tmp = ((alu_p_sub_b_hi_shift - alu_res_hi) * alu_sel_rng_chk_shift); + auto tmp = (((alu_p_sub_b_hi_shift - alu_res_hi) * alu_sel_rng_chk_shift) - FF(0)); tmp *= scaling_factor; std::get<49>(evals) += tmp; } @@ -845,21 +861,22 @@ template class aluImpl { { Avm_DECLARE_VIEWS(51); - auto tmp = (alu_op_cast * - (((((((alu_u8_tag * alu_u8_r0) + (alu_u16_tag * (alu_u8_r0 + (alu_u8_r1 * FF(256))))) + - (alu_u32_tag * ((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))))) + - (alu_u64_tag * ((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + - (alu_u16_r1 * FF(4294967296UL))) + - (alu_u16_r2 * FF(281474976710656UL))))) + - (alu_u128_tag * ((((((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + - (alu_u16_r1 * FF(4294967296UL))) + - (alu_u16_r2 * FF(281474976710656UL))) + - (alu_u16_r3 * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + - (alu_u16_r4 * FF(uint256_t{ 0UL, 65536UL, 0UL, 0UL }))) + - (alu_u16_r5 * FF(uint256_t{ 0UL, 4294967296UL, 0UL, 0UL }))) + - (alu_u16_r6 * FF(uint256_t{ 0UL, 281474976710656UL, 0UL, 0UL }))))) + - (alu_ff_tag * alu_ia)) - - alu_ic)); + auto tmp = ((alu_op_cast * + (((((((alu_u8_tag * alu_u8_r0) + (alu_u16_tag * (alu_u8_r0 + (alu_u8_r1 * FF(256))))) + + (alu_u32_tag * ((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))))) + + (alu_u64_tag * ((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + + (alu_u16_r1 * FF(4294967296UL))) + + (alu_u16_r2 * FF(281474976710656UL))))) + + (alu_u128_tag * ((((((((alu_u8_r0 + (alu_u8_r1 * FF(256))) + (alu_u16_r0 * FF(65536))) + + (alu_u16_r1 * FF(4294967296UL))) + + (alu_u16_r2 * FF(281474976710656UL))) + + (alu_u16_r3 * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + + (alu_u16_r4 * FF(uint256_t{ 0UL, 65536UL, 0UL, 0UL }))) + + (alu_u16_r5 * FF(uint256_t{ 0UL, 4294967296UL, 0UL, 0UL }))) + + (alu_u16_r6 * FF(uint256_t{ 0UL, 281474976710656UL, 0UL, 0UL }))))) + + (alu_ff_tag * alu_ia)) - + alu_ic)) - + FF(0)); tmp *= scaling_factor; std::get<51>(evals) += tmp; } @@ -867,7 +884,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(52); - auto tmp = (alu_op_cast * (alu_a_lo_shift - alu_p_sub_a_lo)); + auto tmp = ((alu_op_cast * (alu_a_lo_shift - alu_p_sub_a_lo)) - FF(0)); tmp *= scaling_factor; std::get<52>(evals) += tmp; } @@ -875,7 +892,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(53); - auto tmp = (alu_op_cast * (alu_a_hi_shift - alu_p_sub_a_hi)); + auto tmp = ((alu_op_cast * (alu_a_hi_shift - alu_p_sub_a_hi)) - FF(0)); tmp *= scaling_factor; std::get<53>(evals) += tmp; } @@ -883,7 +900,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(54); - auto tmp = (((alu_op_mul * alu_u128_tag) + alu_op_cast) * alu_sel_alu_shift); + auto tmp = ((((alu_op_mul * alu_u128_tag) + alu_op_cast) * alu_sel_alu_shift) - FF(0)); tmp *= scaling_factor; std::get<54>(evals) += tmp; } @@ -891,7 +908,8 @@ template class aluImpl { { Avm_DECLARE_VIEWS(55); - auto tmp = ((alu_shift_lt_bit_len * alu_op_shr) * (alu_a_lo - ((alu_two_pow_s - alu_b_lo) - FF(1)))); + auto tmp = + (((alu_shift_lt_bit_len * alu_op_shr) * (alu_a_lo - ((alu_two_pow_s - alu_b_lo) - FF(1)))) - FF(0)); tmp *= scaling_factor; std::get<55>(evals) += tmp; } @@ -899,7 +917,9 @@ template class aluImpl { { Avm_DECLARE_VIEWS(56); - auto tmp = ((alu_shift_lt_bit_len * alu_op_shr) * (alu_a_hi - ((alu_two_pow_t_sub_s - alu_b_hi) - FF(1)))); + auto tmp = + (((alu_shift_lt_bit_len * alu_op_shr) * (alu_a_hi - ((alu_two_pow_t_sub_s - alu_b_hi) - FF(1)))) - + FF(0)); tmp *= scaling_factor; std::get<56>(evals) += tmp; } @@ -907,7 +927,9 @@ template class aluImpl { { Avm_DECLARE_VIEWS(57); - auto tmp = ((alu_shift_lt_bit_len * alu_op_shl) * (alu_a_lo - ((alu_two_pow_t_sub_s - alu_b_lo) - FF(1)))); + auto tmp = + (((alu_shift_lt_bit_len * alu_op_shl) * (alu_a_lo - ((alu_two_pow_t_sub_s - alu_b_lo) - FF(1)))) - + FF(0)); tmp *= scaling_factor; std::get<57>(evals) += tmp; } @@ -915,7 +937,8 @@ template class aluImpl { { Avm_DECLARE_VIEWS(58); - auto tmp = ((alu_shift_lt_bit_len * alu_op_shl) * (alu_a_hi - ((alu_two_pow_s - alu_b_hi) - FF(1)))); + auto tmp = + (((alu_shift_lt_bit_len * alu_op_shl) * (alu_a_hi - ((alu_two_pow_s - alu_b_hi) - FF(1)))) - FF(0)); tmp *= scaling_factor; std::get<58>(evals) += tmp; } @@ -923,7 +946,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(59); - auto tmp = (alu_shift_lt_bit_len * (-alu_shift_lt_bit_len + FF(1))); + auto tmp = ((alu_shift_lt_bit_len * (-alu_shift_lt_bit_len + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<59>(evals) += tmp; } @@ -949,7 +972,8 @@ template class aluImpl { { Avm_DECLARE_VIEWS(61); - auto tmp = ((alu_shift_lt_bit_len * alu_op_shr) * (((alu_b_hi * alu_two_pow_s) + alu_b_lo) - alu_ia)); + auto tmp = + (((alu_shift_lt_bit_len * alu_op_shr) * (((alu_b_hi * alu_two_pow_s) + alu_b_lo) - alu_ia)) - FF(0)); tmp *= scaling_factor; std::get<61>(evals) += tmp; } @@ -957,7 +981,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(62); - auto tmp = (alu_op_shr * (alu_ic - (alu_b_hi * alu_shift_lt_bit_len))); + auto tmp = ((alu_op_shr * (alu_ic - (alu_b_hi * alu_shift_lt_bit_len))) - FF(0)); tmp *= scaling_factor; std::get<62>(evals) += tmp; } @@ -965,7 +989,9 @@ template class aluImpl { { Avm_DECLARE_VIEWS(63); - auto tmp = ((alu_shift_lt_bit_len * alu_op_shl) * (((alu_b_hi * alu_two_pow_t_sub_s) + alu_b_lo) - alu_ia)); + auto tmp = + (((alu_shift_lt_bit_len * alu_op_shl) * (((alu_b_hi * alu_two_pow_t_sub_s) + alu_b_lo) - alu_ia)) - + FF(0)); tmp *= scaling_factor; std::get<63>(evals) += tmp; } @@ -973,7 +999,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(64); - auto tmp = (alu_op_shl * (alu_ic - ((alu_b_lo * alu_two_pow_s) * alu_shift_lt_bit_len))); + auto tmp = ((alu_op_shl * (alu_ic - ((alu_b_lo * alu_two_pow_s) * alu_shift_lt_bit_len))) - FF(0)); tmp *= scaling_factor; std::get<64>(evals) += tmp; } @@ -989,7 +1015,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(66); - auto tmp = (alu_op_div_a_lt_b * (-alu_op_div_a_lt_b + FF(1))); + auto tmp = ((alu_op_div_a_lt_b * (-alu_op_div_a_lt_b + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<66>(evals) += tmp; } @@ -997,7 +1023,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(67); - auto tmp = (alu_op_div_a_lt_b * (alu_a_lo - ((alu_ib - alu_ia) - FF(1)))); + auto tmp = ((alu_op_div_a_lt_b * (alu_a_lo - ((alu_ib - alu_ia) - FF(1)))) - FF(0)); tmp *= scaling_factor; std::get<67>(evals) += tmp; } @@ -1005,7 +1031,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(68); - auto tmp = (alu_op_div_a_lt_b * alu_ic); + auto tmp = ((alu_op_div_a_lt_b * alu_ic) - FF(0)); tmp *= scaling_factor; std::get<68>(evals) += tmp; } @@ -1013,7 +1039,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(69); - auto tmp = (alu_op_div_a_lt_b * (alu_ia - alu_remainder)); + auto tmp = ((alu_op_div_a_lt_b * (alu_ia - alu_remainder)) - FF(0)); tmp *= scaling_factor; std::get<69>(evals) += tmp; } @@ -1021,7 +1047,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(70); - auto tmp = (alu_op_div_std * (-alu_op_div_std + FF(1))); + auto tmp = ((alu_op_div_std * (-alu_op_div_std + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<70>(evals) += tmp; } @@ -1029,8 +1055,9 @@ template class aluImpl { { Avm_DECLARE_VIEWS(71); - auto tmp = - (alu_op_div_std * ((alu_ib - alu_divisor_lo) - (alu_divisor_hi * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL })))); + auto tmp = ((alu_op_div_std * + ((alu_ib - alu_divisor_lo) - (alu_divisor_hi * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL })))) - + FF(0)); tmp *= scaling_factor; std::get<71>(evals) += tmp; } @@ -1038,8 +1065,9 @@ template class aluImpl { { Avm_DECLARE_VIEWS(72); - auto tmp = (alu_op_div_std * - ((alu_ic - alu_quotient_lo) - (alu_quotient_hi * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL })))); + auto tmp = ((alu_op_div_std * + ((alu_ic - alu_quotient_lo) - (alu_quotient_hi * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL })))) - + FF(0)); tmp *= scaling_factor; std::get<72>(evals) += tmp; } @@ -1057,10 +1085,12 @@ template class aluImpl { Avm_DECLARE_VIEWS(74); auto tmp = - (alu_op_div_std * - ((((alu_divisor_lo * alu_quotient_lo) + (alu_partial_prod_lo * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + - ((alu_partial_prod_hi + (alu_divisor_hi * alu_quotient_hi)) * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))) - - (alu_a_lo + (alu_a_hi * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))))); + ((alu_op_div_std * + ((((alu_divisor_lo * alu_quotient_lo) + (alu_partial_prod_lo * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + + ((alu_partial_prod_hi + (alu_divisor_hi * alu_quotient_hi)) * + FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))) - + (alu_a_lo + (alu_a_hi * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))))) - + FF(0)); tmp *= scaling_factor; std::get<74>(evals) += tmp; } @@ -1068,7 +1098,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(75); - auto tmp = (alu_op_div_std * (alu_b_hi - ((alu_ib - alu_remainder) - FF(1)))); + auto tmp = ((alu_op_div_std * (alu_b_hi - ((alu_ib - alu_remainder) - FF(1)))) - FF(0)); tmp *= scaling_factor; std::get<75>(evals) += tmp; } @@ -1076,7 +1106,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(76); - auto tmp = ((alu_cmp_rng_ctr_shift - FF(2)) * alu_op_div_std); + auto tmp = (((alu_cmp_rng_ctr_shift - FF(2)) * alu_op_div_std) - FF(0)); tmp *= scaling_factor; std::get<76>(evals) += tmp; } @@ -1084,7 +1114,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(77); - auto tmp = (alu_sel_rng_chk * alu_op_div_std); + auto tmp = ((alu_sel_rng_chk * alu_op_div_std) - FF(0)); tmp *= scaling_factor; std::get<77>(evals) += tmp; } @@ -1093,10 +1123,12 @@ template class aluImpl { Avm_DECLARE_VIEWS(78); auto tmp = - (alu_op_div_std * - ((((alu_divisor_lo * alu_quotient_lo) + (alu_partial_prod_lo * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + - ((alu_partial_prod_hi + (alu_divisor_hi * alu_quotient_hi)) * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))) - - (alu_ia - alu_remainder))); + ((alu_op_div_std * + ((((alu_divisor_lo * alu_quotient_lo) + (alu_partial_prod_lo * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + + ((alu_partial_prod_hi + (alu_divisor_hi * alu_quotient_hi)) * + FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))) - + (alu_ia - alu_remainder))) - + FF(0)); tmp *= scaling_factor; std::get<78>(evals) += tmp; } @@ -1104,7 +1136,7 @@ template class aluImpl { { Avm_DECLARE_VIEWS(79); - auto tmp = (alu_sel_div_rng_chk * (-alu_sel_div_rng_chk + FF(1))); + auto tmp = ((alu_sel_div_rng_chk * (-alu_sel_div_rng_chk + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<79>(evals) += tmp; } diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/binary.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/binary.hpp index 98260f37337..615e12a21c9 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/binary.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/binary.hpp @@ -69,7 +69,7 @@ template class binaryImpl { { Avm_DECLARE_VIEWS(0); - auto tmp = (binary_sel_bin * (-binary_sel_bin + FF(1))); + auto tmp = ((binary_sel_bin * (-binary_sel_bin + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<0>(evals) += tmp; } @@ -77,7 +77,7 @@ template class binaryImpl { { Avm_DECLARE_VIEWS(1); - auto tmp = ((binary_op_id_shift - binary_op_id) * binary_mem_tag_ctr); + auto tmp = (((binary_op_id_shift - binary_op_id) * binary_mem_tag_ctr) - FF(0)); tmp *= scaling_factor; std::get<1>(evals) += tmp; } @@ -85,7 +85,7 @@ template class binaryImpl { { Avm_DECLARE_VIEWS(2); - auto tmp = (((binary_mem_tag_ctr_shift - binary_mem_tag_ctr) + FF(1)) * binary_mem_tag_ctr); + auto tmp = ((((binary_mem_tag_ctr_shift - binary_mem_tag_ctr) + FF(1)) * binary_mem_tag_ctr) - FF(0)); tmp *= scaling_factor; std::get<2>(evals) += tmp; } @@ -93,9 +93,10 @@ template class binaryImpl { { Avm_DECLARE_VIEWS(3); - auto tmp = ((binary_mem_tag_ctr * - (((-binary_sel_bin + FF(1)) * (-binary_mem_tag_ctr_inv + FF(1))) + binary_mem_tag_ctr_inv)) - - binary_sel_bin); + auto tmp = (((binary_mem_tag_ctr * + (((-binary_sel_bin + FF(1)) * (-binary_mem_tag_ctr_inv + FF(1))) + binary_mem_tag_ctr_inv)) - + binary_sel_bin) - + FF(0)); tmp *= scaling_factor; std::get<3>(evals) += tmp; } @@ -103,7 +104,7 @@ template class binaryImpl { { Avm_DECLARE_VIEWS(4); - auto tmp = ((-binary_sel_bin + FF(1)) * binary_acc_ia); + auto tmp = (((-binary_sel_bin + FF(1)) * binary_acc_ia) - FF(0)); tmp *= scaling_factor; std::get<4>(evals) += tmp; } @@ -111,7 +112,7 @@ template class binaryImpl { { Avm_DECLARE_VIEWS(5); - auto tmp = ((-binary_sel_bin + FF(1)) * binary_acc_ib); + auto tmp = (((-binary_sel_bin + FF(1)) * binary_acc_ib) - FF(0)); tmp *= scaling_factor; std::get<5>(evals) += tmp; } @@ -119,7 +120,7 @@ template class binaryImpl { { Avm_DECLARE_VIEWS(6); - auto tmp = ((-binary_sel_bin + FF(1)) * binary_acc_ic); + auto tmp = (((-binary_sel_bin + FF(1)) * binary_acc_ic) - FF(0)); tmp *= scaling_factor; std::get<6>(evals) += tmp; } @@ -127,7 +128,8 @@ template class binaryImpl { { Avm_DECLARE_VIEWS(7); - auto tmp = (((binary_acc_ia - binary_ia_bytes) - (binary_acc_ia_shift * FF(256))) * binary_mem_tag_ctr); + auto tmp = + ((((binary_acc_ia - binary_ia_bytes) - (binary_acc_ia_shift * FF(256))) * binary_mem_tag_ctr) - FF(0)); tmp *= scaling_factor; std::get<7>(evals) += tmp; } @@ -135,7 +137,8 @@ template class binaryImpl { { Avm_DECLARE_VIEWS(8); - auto tmp = (((binary_acc_ib - binary_ib_bytes) - (binary_acc_ib_shift * FF(256))) * binary_mem_tag_ctr); + auto tmp = + ((((binary_acc_ib - binary_ib_bytes) - (binary_acc_ib_shift * FF(256))) * binary_mem_tag_ctr) - FF(0)); tmp *= scaling_factor; std::get<8>(evals) += tmp; } @@ -143,7 +146,8 @@ template class binaryImpl { { Avm_DECLARE_VIEWS(9); - auto tmp = (((binary_acc_ic - binary_ic_bytes) - (binary_acc_ic_shift * FF(256))) * binary_mem_tag_ctr); + auto tmp = + ((((binary_acc_ic - binary_ic_bytes) - (binary_acc_ic_shift * FF(256))) * binary_mem_tag_ctr) - FF(0)); tmp *= scaling_factor; std::get<9>(evals) += tmp; } diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/conversion.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/conversion.hpp index b83fb6bf7ac..a51605c8f18 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/conversion.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/conversion.hpp @@ -37,7 +37,7 @@ template class conversionImpl { { Avm_DECLARE_VIEWS(0); - auto tmp = (conversion_sel_to_radix_le * (-conversion_sel_to_radix_le + FF(1))); + auto tmp = ((conversion_sel_to_radix_le * (-conversion_sel_to_radix_le + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<0>(evals) += tmp; } diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp index ef1db050b75..9dd3eb86948 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp @@ -4,6 +4,10 @@ using View = typename Accumulator::View; \ [[maybe_unused]] auto main_clk = View(new_term.main_clk); \ [[maybe_unused]] auto main_sel_first = View(new_term.main_sel_first); \ + [[maybe_unused]] auto kernel_kernel_inputs = View(new_term.kernel_kernel_inputs); \ + [[maybe_unused]] auto kernel_kernel_value_out = View(new_term.kernel_kernel_value_out); \ + [[maybe_unused]] auto kernel_kernel_side_effect_out = View(new_term.kernel_kernel_side_effect_out); \ + [[maybe_unused]] auto kernel_kernel_metadata_out = View(new_term.kernel_kernel_metadata_out); \ [[maybe_unused]] auto alu_a_hi = View(new_term.alu_a_hi); \ [[maybe_unused]] auto alu_a_lo = View(new_term.alu_a_lo); \ [[maybe_unused]] auto alu_b_hi = View(new_term.alu_b_hi); \ @@ -126,11 +130,7 @@ [[maybe_unused]] auto kernel_emit_unencrypted_log_write_offset = \ View(new_term.kernel_emit_unencrypted_log_write_offset); \ [[maybe_unused]] auto kernel_kernel_in_offset = View(new_term.kernel_kernel_in_offset); \ - [[maybe_unused]] auto kernel_kernel_inputs = View(new_term.kernel_kernel_inputs); \ - [[maybe_unused]] auto kernel_kernel_metadata_out = View(new_term.kernel_kernel_metadata_out); \ [[maybe_unused]] auto kernel_kernel_out_offset = View(new_term.kernel_kernel_out_offset); \ - [[maybe_unused]] auto kernel_kernel_side_effect_out = View(new_term.kernel_kernel_side_effect_out); \ - [[maybe_unused]] auto kernel_kernel_value_out = View(new_term.kernel_kernel_value_out); \ [[maybe_unused]] auto kernel_l1_to_l2_msg_exists_write_offset = \ View(new_term.kernel_l1_to_l2_msg_exists_write_offset); \ [[maybe_unused]] auto kernel_note_hash_exist_write_offset = View(new_term.kernel_note_hash_exist_write_offset); \ diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/keccakf1600.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/keccakf1600.hpp index 56e0a9e6a5d..18989c0e836 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/keccakf1600.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/keccakf1600.hpp @@ -37,7 +37,7 @@ template class keccakf1600Impl { { Avm_DECLARE_VIEWS(0); - auto tmp = (keccakf1600_sel_keccakf1600 * (-keccakf1600_sel_keccakf1600 + FF(1))); + auto tmp = ((keccakf1600_sel_keccakf1600 * (-keccakf1600_sel_keccakf1600 + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<0>(evals) += tmp; } diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/kernel.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/kernel.hpp index a53770f6481..e9e0d0e1748 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/kernel.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/kernel.hpp @@ -98,8 +98,9 @@ template class kernelImpl { Avm_DECLARE_VIEWS(0); auto tmp = - ((-main_sel_last + FF(1)) * (kernel_note_hash_exist_write_offset_shift - - (kernel_note_hash_exist_write_offset + main_sel_op_note_hash_exists))); + (((-main_sel_last + FF(1)) * (kernel_note_hash_exist_write_offset_shift - + (kernel_note_hash_exist_write_offset + main_sel_op_note_hash_exists))) - + FF(0)); tmp *= scaling_factor; std::get<0>(evals) += tmp; } @@ -107,8 +108,10 @@ template class kernelImpl { { Avm_DECLARE_VIEWS(1); - auto tmp = ((-main_sel_last + FF(1)) * (kernel_emit_note_hash_write_offset_shift - - (kernel_emit_note_hash_write_offset + main_sel_op_emit_note_hash))); + auto tmp = + (((-main_sel_last + FF(1)) * (kernel_emit_note_hash_write_offset_shift - + (kernel_emit_note_hash_write_offset + main_sel_op_emit_note_hash))) - + FF(0)); tmp *= scaling_factor; std::get<1>(evals) += tmp; } @@ -116,9 +119,10 @@ template class kernelImpl { { Avm_DECLARE_VIEWS(2); - auto tmp = ((-main_sel_last + FF(1)) * - (kernel_nullifier_exists_write_offset_shift - - (kernel_nullifier_exists_write_offset + (main_sel_op_nullifier_exists * main_ib)))); + auto tmp = (((-main_sel_last + FF(1)) * + (kernel_nullifier_exists_write_offset_shift - + (kernel_nullifier_exists_write_offset + (main_sel_op_nullifier_exists * main_ib)))) - + FF(0)); tmp *= scaling_factor; std::get<2>(evals) += tmp; } @@ -127,9 +131,10 @@ template class kernelImpl { Avm_DECLARE_VIEWS(3); auto tmp = - ((-main_sel_last + FF(1)) * - (kernel_nullifier_non_exists_write_offset_shift - - (kernel_nullifier_non_exists_write_offset + (main_sel_op_nullifier_exists * (-main_ib + FF(1)))))); + (((-main_sel_last + FF(1)) * + (kernel_nullifier_non_exists_write_offset_shift - + (kernel_nullifier_non_exists_write_offset + (main_sel_op_nullifier_exists * (-main_ib + FF(1)))))) - + FF(0)); tmp *= scaling_factor; std::get<3>(evals) += tmp; } @@ -137,8 +142,10 @@ template class kernelImpl { { Avm_DECLARE_VIEWS(4); - auto tmp = ((-main_sel_last + FF(1)) * (kernel_emit_nullifier_write_offset_shift - - (kernel_emit_nullifier_write_offset + main_sel_op_emit_nullifier))); + auto tmp = + (((-main_sel_last + FF(1)) * (kernel_emit_nullifier_write_offset_shift - + (kernel_emit_nullifier_write_offset + main_sel_op_emit_nullifier))) - + FF(0)); tmp *= scaling_factor; std::get<4>(evals) += tmp; } @@ -146,9 +153,10 @@ template class kernelImpl { { Avm_DECLARE_VIEWS(5); - auto tmp = ((-main_sel_last + FF(1)) * - (kernel_l1_to_l2_msg_exists_write_offset_shift - - (kernel_l1_to_l2_msg_exists_write_offset + main_sel_op_l1_to_l2_msg_exists))); + auto tmp = (((-main_sel_last + FF(1)) * + (kernel_l1_to_l2_msg_exists_write_offset_shift - + (kernel_l1_to_l2_msg_exists_write_offset + main_sel_op_l1_to_l2_msg_exists))) - + FF(0)); tmp *= scaling_factor; std::get<5>(evals) += tmp; } @@ -156,9 +164,10 @@ template class kernelImpl { { Avm_DECLARE_VIEWS(6); - auto tmp = ((-main_sel_last + FF(1)) * - (kernel_emit_unencrypted_log_write_offset_shift - - (kernel_emit_unencrypted_log_write_offset + main_sel_op_emit_unencrypted_log))); + auto tmp = (((-main_sel_last + FF(1)) * + (kernel_emit_unencrypted_log_write_offset_shift - + (kernel_emit_unencrypted_log_write_offset + main_sel_op_emit_unencrypted_log))) - + FF(0)); tmp *= scaling_factor; std::get<6>(evals) += tmp; } @@ -166,9 +175,10 @@ template class kernelImpl { { Avm_DECLARE_VIEWS(7); - auto tmp = - ((-main_sel_last + FF(1)) * (kernel_emit_l2_to_l1_msg_write_offset_shift - - (kernel_emit_l2_to_l1_msg_write_offset + main_sel_op_emit_l2_to_l1_msg))); + auto tmp = (((-main_sel_last + FF(1)) * + (kernel_emit_l2_to_l1_msg_write_offset_shift - + (kernel_emit_l2_to_l1_msg_write_offset + main_sel_op_emit_l2_to_l1_msg))) - + FF(0)); tmp *= scaling_factor; std::get<7>(evals) += tmp; } @@ -176,8 +186,9 @@ template class kernelImpl { { Avm_DECLARE_VIEWS(8); - auto tmp = ((-main_sel_last + FF(1)) * - (kernel_sload_write_offset_shift - (kernel_sload_write_offset + main_sel_op_sload))); + auto tmp = (((-main_sel_last + FF(1)) * + (kernel_sload_write_offset_shift - (kernel_sload_write_offset + main_sel_op_sload))) - + FF(0)); tmp *= scaling_factor; std::get<8>(evals) += tmp; } @@ -185,8 +196,9 @@ template class kernelImpl { { Avm_DECLARE_VIEWS(9); - auto tmp = ((-main_sel_last + FF(1)) * - (kernel_sstore_write_offset_shift - (kernel_sstore_write_offset + main_sel_op_sstore))); + auto tmp = (((-main_sel_last + FF(1)) * + (kernel_sstore_write_offset_shift - (kernel_sstore_write_offset + main_sel_op_sstore))) - + FF(0)); tmp *= scaling_factor; std::get<9>(evals) += tmp; } diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/main.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/main.hpp index 38835bdab22..1517e106ca7 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/main.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/main.hpp @@ -307,7 +307,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(0); - auto tmp = (main_l2_out_of_gas * (-main_l2_out_of_gas + FF(1))); + auto tmp = ((main_l2_out_of_gas * (-main_l2_out_of_gas + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<0>(evals) += tmp; } @@ -315,7 +315,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(1); - auto tmp = (main_da_out_of_gas * (-main_da_out_of_gas + FF(1))); + auto tmp = ((main_da_out_of_gas * (-main_da_out_of_gas + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<1>(evals) += tmp; } @@ -323,8 +323,9 @@ template class mainImpl { { Avm_DECLARE_VIEWS(2); - auto tmp = (main_sel_gas_accounting_active * - ((main_l2_gas_remaining_shift - main_l2_gas_remaining) + main_l2_gas_op_cost)); + auto tmp = ((main_sel_gas_accounting_active * + ((main_l2_gas_remaining_shift - main_l2_gas_remaining) + main_l2_gas_op_cost)) - + FF(0)); tmp *= scaling_factor; std::get<2>(evals) += tmp; } @@ -332,8 +333,9 @@ template class mainImpl { { Avm_DECLARE_VIEWS(3); - auto tmp = (main_sel_gas_accounting_active * - ((main_da_gas_remaining_shift - main_da_gas_remaining) + main_da_gas_op_cost)); + auto tmp = ((main_sel_gas_accounting_active * + ((main_da_gas_remaining_shift - main_da_gas_remaining) + main_da_gas_op_cost)) - + FF(0)); tmp *= scaling_factor; std::get<3>(evals) += tmp; } @@ -341,7 +343,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(4); - auto tmp = ((-main_sel_gas_accounting_active + FF(1)) * main_l2_gas_op_cost); + auto tmp = (((-main_sel_gas_accounting_active + FF(1)) * main_l2_gas_op_cost) - FF(0)); tmp *= scaling_factor; std::get<4>(evals) += tmp; } @@ -349,7 +351,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(5); - auto tmp = ((-main_sel_gas_accounting_active + FF(1)) * main_da_gas_op_cost); + auto tmp = (((-main_sel_gas_accounting_active + FF(1)) * main_da_gas_op_cost) - FF(0)); tmp *= scaling_factor; std::get<5>(evals) += tmp; } @@ -357,10 +359,11 @@ template class mainImpl { { Avm_DECLARE_VIEWS(6); - auto tmp = (main_sel_gas_accounting_active * - ((((-(main_l2_out_of_gas * FF(2)) + FF(1)) * main_l2_gas_remaining_shift) - - (main_abs_l2_rem_gas_hi * FF(65536))) - - main_abs_l2_rem_gas_lo)); + auto tmp = ((main_sel_gas_accounting_active * + ((((-(main_l2_out_of_gas * FF(2)) + FF(1)) * main_l2_gas_remaining_shift) - + (main_abs_l2_rem_gas_hi * FF(65536))) - + main_abs_l2_rem_gas_lo)) - + FF(0)); tmp *= scaling_factor; std::get<6>(evals) += tmp; } @@ -368,10 +371,11 @@ template class mainImpl { { Avm_DECLARE_VIEWS(7); - auto tmp = (main_sel_gas_accounting_active * - ((((-(main_da_out_of_gas * FF(2)) + FF(1)) * main_da_gas_remaining_shift) - - (main_abs_da_rem_gas_hi * FF(65536))) - - main_abs_da_rem_gas_lo)); + auto tmp = ((main_sel_gas_accounting_active * + ((((-(main_da_out_of_gas * FF(2)) + FF(1)) * main_da_gas_remaining_shift) - + (main_abs_da_rem_gas_hi * FF(65536))) - + main_abs_da_rem_gas_lo)) - + FF(0)); tmp *= scaling_factor; std::get<7>(evals) += tmp; } @@ -379,7 +383,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(8); - auto tmp = (main_sel_op_sender * (-main_sel_op_sender + FF(1))); + auto tmp = ((main_sel_op_sender * (-main_sel_op_sender + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<8>(evals) += tmp; } @@ -387,7 +391,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(9); - auto tmp = (main_sel_op_address * (-main_sel_op_address + FF(1))); + auto tmp = ((main_sel_op_address * (-main_sel_op_address + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<9>(evals) += tmp; } @@ -395,7 +399,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(10); - auto tmp = (main_sel_op_storage_address * (-main_sel_op_storage_address + FF(1))); + auto tmp = ((main_sel_op_storage_address * (-main_sel_op_storage_address + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<10>(evals) += tmp; } @@ -403,7 +407,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(11); - auto tmp = (main_sel_op_chain_id * (-main_sel_op_chain_id + FF(1))); + auto tmp = ((main_sel_op_chain_id * (-main_sel_op_chain_id + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<11>(evals) += tmp; } @@ -411,7 +415,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(12); - auto tmp = (main_sel_op_version * (-main_sel_op_version + FF(1))); + auto tmp = ((main_sel_op_version * (-main_sel_op_version + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<12>(evals) += tmp; } @@ -419,7 +423,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(13); - auto tmp = (main_sel_op_block_number * (-main_sel_op_block_number + FF(1))); + auto tmp = ((main_sel_op_block_number * (-main_sel_op_block_number + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<13>(evals) += tmp; } @@ -427,7 +431,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(14); - auto tmp = (main_sel_op_coinbase * (-main_sel_op_coinbase + FF(1))); + auto tmp = ((main_sel_op_coinbase * (-main_sel_op_coinbase + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<14>(evals) += tmp; } @@ -435,7 +439,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(15); - auto tmp = (main_sel_op_timestamp * (-main_sel_op_timestamp + FF(1))); + auto tmp = ((main_sel_op_timestamp * (-main_sel_op_timestamp + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<15>(evals) += tmp; } @@ -443,7 +447,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(16); - auto tmp = (main_sel_op_fee_per_l2_gas * (-main_sel_op_fee_per_l2_gas + FF(1))); + auto tmp = ((main_sel_op_fee_per_l2_gas * (-main_sel_op_fee_per_l2_gas + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<16>(evals) += tmp; } @@ -451,7 +455,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(17); - auto tmp = (main_sel_op_fee_per_da_gas * (-main_sel_op_fee_per_da_gas + FF(1))); + auto tmp = ((main_sel_op_fee_per_da_gas * (-main_sel_op_fee_per_da_gas + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<17>(evals) += tmp; } @@ -459,7 +463,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(18); - auto tmp = (main_sel_op_transaction_fee * (-main_sel_op_transaction_fee + FF(1))); + auto tmp = ((main_sel_op_transaction_fee * (-main_sel_op_transaction_fee + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<18>(evals) += tmp; } @@ -467,7 +471,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(19); - auto tmp = (main_sel_op_l2gasleft * (-main_sel_op_l2gasleft + FF(1))); + auto tmp = ((main_sel_op_l2gasleft * (-main_sel_op_l2gasleft + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<19>(evals) += tmp; } @@ -475,7 +479,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(20); - auto tmp = (main_sel_op_dagasleft * (-main_sel_op_dagasleft + FF(1))); + auto tmp = ((main_sel_op_dagasleft * (-main_sel_op_dagasleft + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<20>(evals) += tmp; } @@ -483,7 +487,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(21); - auto tmp = (main_sel_op_note_hash_exists * (-main_sel_op_note_hash_exists + FF(1))); + auto tmp = ((main_sel_op_note_hash_exists * (-main_sel_op_note_hash_exists + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<21>(evals) += tmp; } @@ -491,7 +495,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(22); - auto tmp = (main_sel_op_emit_note_hash * (-main_sel_op_emit_note_hash + FF(1))); + auto tmp = ((main_sel_op_emit_note_hash * (-main_sel_op_emit_note_hash + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<22>(evals) += tmp; } @@ -499,7 +503,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(23); - auto tmp = (main_sel_op_nullifier_exists * (-main_sel_op_nullifier_exists + FF(1))); + auto tmp = ((main_sel_op_nullifier_exists * (-main_sel_op_nullifier_exists + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<23>(evals) += tmp; } @@ -507,7 +511,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(24); - auto tmp = (main_sel_op_emit_nullifier * (-main_sel_op_emit_nullifier + FF(1))); + auto tmp = ((main_sel_op_emit_nullifier * (-main_sel_op_emit_nullifier + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<24>(evals) += tmp; } @@ -515,7 +519,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(25); - auto tmp = (main_sel_op_l1_to_l2_msg_exists * (-main_sel_op_l1_to_l2_msg_exists + FF(1))); + auto tmp = ((main_sel_op_l1_to_l2_msg_exists * (-main_sel_op_l1_to_l2_msg_exists + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<25>(evals) += tmp; } @@ -523,7 +527,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(26); - auto tmp = (main_sel_op_emit_unencrypted_log * (-main_sel_op_emit_unencrypted_log + FF(1))); + auto tmp = ((main_sel_op_emit_unencrypted_log * (-main_sel_op_emit_unencrypted_log + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<26>(evals) += tmp; } @@ -531,7 +535,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(27); - auto tmp = (main_sel_op_emit_l2_to_l1_msg * (-main_sel_op_emit_l2_to_l1_msg + FF(1))); + auto tmp = ((main_sel_op_emit_l2_to_l1_msg * (-main_sel_op_emit_l2_to_l1_msg + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<27>(evals) += tmp; } @@ -539,7 +543,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(28); - auto tmp = (main_sel_op_get_contract_instance * (-main_sel_op_get_contract_instance + FF(1))); + auto tmp = ((main_sel_op_get_contract_instance * (-main_sel_op_get_contract_instance + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<28>(evals) += tmp; } @@ -547,7 +551,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(29); - auto tmp = (main_sel_op_sload * (-main_sel_op_sload + FF(1))); + auto tmp = ((main_sel_op_sload * (-main_sel_op_sload + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<29>(evals) += tmp; } @@ -555,7 +559,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(30); - auto tmp = (main_sel_op_sstore * (-main_sel_op_sstore + FF(1))); + auto tmp = ((main_sel_op_sstore * (-main_sel_op_sstore + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<30>(evals) += tmp; } @@ -563,7 +567,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(31); - auto tmp = (main_sel_op_radix_le * (-main_sel_op_radix_le + FF(1))); + auto tmp = ((main_sel_op_radix_le * (-main_sel_op_radix_le + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<31>(evals) += tmp; } @@ -571,7 +575,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(32); - auto tmp = (main_sel_op_sha256 * (-main_sel_op_sha256 + FF(1))); + auto tmp = ((main_sel_op_sha256 * (-main_sel_op_sha256 + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<32>(evals) += tmp; } @@ -579,7 +583,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(33); - auto tmp = (main_sel_op_poseidon2 * (-main_sel_op_poseidon2 + FF(1))); + auto tmp = ((main_sel_op_poseidon2 * (-main_sel_op_poseidon2 + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<33>(evals) += tmp; } @@ -587,7 +591,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(34); - auto tmp = (main_sel_op_keccak * (-main_sel_op_keccak + FF(1))); + auto tmp = ((main_sel_op_keccak * (-main_sel_op_keccak + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<34>(evals) += tmp; } @@ -595,7 +599,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(35); - auto tmp = (main_sel_op_pedersen * (-main_sel_op_pedersen + FF(1))); + auto tmp = ((main_sel_op_pedersen * (-main_sel_op_pedersen + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<35>(evals) += tmp; } @@ -603,7 +607,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(36); - auto tmp = (main_sel_op_add * (-main_sel_op_add + FF(1))); + auto tmp = ((main_sel_op_add * (-main_sel_op_add + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<36>(evals) += tmp; } @@ -611,7 +615,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(37); - auto tmp = (main_sel_op_sub * (-main_sel_op_sub + FF(1))); + auto tmp = ((main_sel_op_sub * (-main_sel_op_sub + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<37>(evals) += tmp; } @@ -619,7 +623,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(38); - auto tmp = (main_sel_op_mul * (-main_sel_op_mul + FF(1))); + auto tmp = ((main_sel_op_mul * (-main_sel_op_mul + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<38>(evals) += tmp; } @@ -627,7 +631,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(39); - auto tmp = (main_sel_op_div * (-main_sel_op_div + FF(1))); + auto tmp = ((main_sel_op_div * (-main_sel_op_div + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<39>(evals) += tmp; } @@ -635,7 +639,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(40); - auto tmp = (main_sel_op_fdiv * (-main_sel_op_fdiv + FF(1))); + auto tmp = ((main_sel_op_fdiv * (-main_sel_op_fdiv + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<40>(evals) += tmp; } @@ -643,7 +647,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(41); - auto tmp = (main_sel_op_not * (-main_sel_op_not + FF(1))); + auto tmp = ((main_sel_op_not * (-main_sel_op_not + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<41>(evals) += tmp; } @@ -651,7 +655,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(42); - auto tmp = (main_sel_op_eq * (-main_sel_op_eq + FF(1))); + auto tmp = ((main_sel_op_eq * (-main_sel_op_eq + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<42>(evals) += tmp; } @@ -659,7 +663,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(43); - auto tmp = (main_sel_op_and * (-main_sel_op_and + FF(1))); + auto tmp = ((main_sel_op_and * (-main_sel_op_and + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<43>(evals) += tmp; } @@ -667,7 +671,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(44); - auto tmp = (main_sel_op_or * (-main_sel_op_or + FF(1))); + auto tmp = ((main_sel_op_or * (-main_sel_op_or + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<44>(evals) += tmp; } @@ -675,7 +679,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(45); - auto tmp = (main_sel_op_xor * (-main_sel_op_xor + FF(1))); + auto tmp = ((main_sel_op_xor * (-main_sel_op_xor + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<45>(evals) += tmp; } @@ -683,7 +687,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(46); - auto tmp = (main_sel_op_cast * (-main_sel_op_cast + FF(1))); + auto tmp = ((main_sel_op_cast * (-main_sel_op_cast + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<46>(evals) += tmp; } @@ -691,7 +695,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(47); - auto tmp = (main_sel_op_lt * (-main_sel_op_lt + FF(1))); + auto tmp = ((main_sel_op_lt * (-main_sel_op_lt + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<47>(evals) += tmp; } @@ -699,7 +703,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(48); - auto tmp = (main_sel_op_lte * (-main_sel_op_lte + FF(1))); + auto tmp = ((main_sel_op_lte * (-main_sel_op_lte + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<48>(evals) += tmp; } @@ -707,7 +711,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(49); - auto tmp = (main_sel_op_shl * (-main_sel_op_shl + FF(1))); + auto tmp = ((main_sel_op_shl * (-main_sel_op_shl + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<49>(evals) += tmp; } @@ -715,7 +719,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(50); - auto tmp = (main_sel_op_shr * (-main_sel_op_shr + FF(1))); + auto tmp = ((main_sel_op_shr * (-main_sel_op_shr + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<50>(evals) += tmp; } @@ -723,7 +727,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(51); - auto tmp = (main_sel_op_internal_call * (-main_sel_op_internal_call + FF(1))); + auto tmp = ((main_sel_op_internal_call * (-main_sel_op_internal_call + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<51>(evals) += tmp; } @@ -731,7 +735,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(52); - auto tmp = (main_sel_op_internal_return * (-main_sel_op_internal_return + FF(1))); + auto tmp = ((main_sel_op_internal_return * (-main_sel_op_internal_return + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<52>(evals) += tmp; } @@ -739,7 +743,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(53); - auto tmp = (main_sel_op_jump * (-main_sel_op_jump + FF(1))); + auto tmp = ((main_sel_op_jump * (-main_sel_op_jump + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<53>(evals) += tmp; } @@ -747,7 +751,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(54); - auto tmp = (main_sel_op_jumpi * (-main_sel_op_jumpi + FF(1))); + auto tmp = ((main_sel_op_jumpi * (-main_sel_op_jumpi + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<54>(evals) += tmp; } @@ -755,7 +759,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(55); - auto tmp = (main_sel_op_halt * (-main_sel_op_halt + FF(1))); + auto tmp = ((main_sel_op_halt * (-main_sel_op_halt + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<55>(evals) += tmp; } @@ -763,7 +767,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(56); - auto tmp = (main_sel_op_external_call * (-main_sel_op_external_call + FF(1))); + auto tmp = ((main_sel_op_external_call * (-main_sel_op_external_call + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<56>(evals) += tmp; } @@ -771,7 +775,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(57); - auto tmp = (main_sel_op_mov * (-main_sel_op_mov + FF(1))); + auto tmp = ((main_sel_op_mov * (-main_sel_op_mov + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<57>(evals) += tmp; } @@ -779,7 +783,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(58); - auto tmp = (main_sel_op_cmov * (-main_sel_op_cmov + FF(1))); + auto tmp = ((main_sel_op_cmov * (-main_sel_op_cmov + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<58>(evals) += tmp; } @@ -787,7 +791,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(59); - auto tmp = (main_op_err * (-main_op_err + FF(1))); + auto tmp = ((main_op_err * (-main_op_err + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<59>(evals) += tmp; } @@ -795,7 +799,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(60); - auto tmp = (main_tag_err * (-main_tag_err + FF(1))); + auto tmp = ((main_tag_err * (-main_tag_err + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<60>(evals) += tmp; } @@ -803,7 +807,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(61); - auto tmp = (main_id_zero * (-main_id_zero + FF(1))); + auto tmp = ((main_id_zero * (-main_id_zero + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<61>(evals) += tmp; } @@ -811,7 +815,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(62); - auto tmp = (main_sel_mem_op_a * (-main_sel_mem_op_a + FF(1))); + auto tmp = ((main_sel_mem_op_a * (-main_sel_mem_op_a + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<62>(evals) += tmp; } @@ -819,7 +823,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(63); - auto tmp = (main_sel_mem_op_b * (-main_sel_mem_op_b + FF(1))); + auto tmp = ((main_sel_mem_op_b * (-main_sel_mem_op_b + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<63>(evals) += tmp; } @@ -827,7 +831,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(64); - auto tmp = (main_sel_mem_op_c * (-main_sel_mem_op_c + FF(1))); + auto tmp = ((main_sel_mem_op_c * (-main_sel_mem_op_c + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<64>(evals) += tmp; } @@ -835,7 +839,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(65); - auto tmp = (main_sel_mem_op_d * (-main_sel_mem_op_d + FF(1))); + auto tmp = ((main_sel_mem_op_d * (-main_sel_mem_op_d + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<65>(evals) += tmp; } @@ -843,7 +847,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(66); - auto tmp = (main_rwa * (-main_rwa + FF(1))); + auto tmp = ((main_rwa * (-main_rwa + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<66>(evals) += tmp; } @@ -851,7 +855,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(67); - auto tmp = (main_rwb * (-main_rwb + FF(1))); + auto tmp = ((main_rwb * (-main_rwb + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<67>(evals) += tmp; } @@ -859,7 +863,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(68); - auto tmp = (main_rwc * (-main_rwc + FF(1))); + auto tmp = ((main_rwc * (-main_rwc + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<68>(evals) += tmp; } @@ -867,7 +871,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(69); - auto tmp = (main_rwd * (-main_rwd + FF(1))); + auto tmp = ((main_rwd * (-main_rwd + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<69>(evals) += tmp; } @@ -875,7 +879,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(70); - auto tmp = (main_sel_resolve_ind_addr_a * (-main_sel_resolve_ind_addr_a + FF(1))); + auto tmp = ((main_sel_resolve_ind_addr_a * (-main_sel_resolve_ind_addr_a + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<70>(evals) += tmp; } @@ -883,7 +887,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(71); - auto tmp = (main_sel_resolve_ind_addr_b * (-main_sel_resolve_ind_addr_b + FF(1))); + auto tmp = ((main_sel_resolve_ind_addr_b * (-main_sel_resolve_ind_addr_b + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<71>(evals) += tmp; } @@ -891,7 +895,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(72); - auto tmp = (main_sel_resolve_ind_addr_c * (-main_sel_resolve_ind_addr_c + FF(1))); + auto tmp = ((main_sel_resolve_ind_addr_c * (-main_sel_resolve_ind_addr_c + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<72>(evals) += tmp; } @@ -899,7 +903,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(73); - auto tmp = (main_sel_resolve_ind_addr_d * (-main_sel_resolve_ind_addr_d + FF(1))); + auto tmp = ((main_sel_resolve_ind_addr_d * (-main_sel_resolve_ind_addr_d + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<73>(evals) += tmp; } @@ -907,7 +911,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(74); - auto tmp = (((main_sel_op_eq + main_sel_op_lte) + main_sel_op_lt) * (main_w_in_tag - FF(1))); + auto tmp = ((((main_sel_op_eq + main_sel_op_lte) + main_sel_op_lt) * (main_w_in_tag - FF(1))) - FF(0)); tmp *= scaling_factor; std::get<74>(evals) += tmp; } @@ -915,7 +919,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(75); - auto tmp = ((main_sel_op_fdiv * (-main_op_err + FF(1))) * ((main_ic * main_ib) - main_ia)); + auto tmp = (((main_sel_op_fdiv * (-main_op_err + FF(1))) * ((main_ic * main_ib) - main_ia)) - FF(0)); tmp *= scaling_factor; std::get<75>(evals) += tmp; } @@ -923,7 +927,8 @@ template class mainImpl { { Avm_DECLARE_VIEWS(76); - auto tmp = ((main_sel_op_fdiv + main_sel_op_div) * (((main_ib * main_inv) - FF(1)) + main_op_err)); + auto tmp = + (((main_sel_op_fdiv + main_sel_op_div) * (((main_ib * main_inv) - FF(1)) + main_op_err)) - FF(0)); tmp *= scaling_factor; std::get<76>(evals) += tmp; } @@ -931,7 +936,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(77); - auto tmp = (((main_sel_op_fdiv + main_sel_op_div) * main_op_err) * (-main_inv + FF(1))); + auto tmp = ((((main_sel_op_fdiv + main_sel_op_div) * main_op_err) * (-main_inv + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<77>(evals) += tmp; } @@ -939,7 +944,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(78); - auto tmp = (main_sel_op_fdiv * (main_r_in_tag - FF(6))); + auto tmp = ((main_sel_op_fdiv * (main_r_in_tag - FF(6))) - FF(0)); tmp *= scaling_factor; std::get<78>(evals) += tmp; } @@ -947,7 +952,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(79); - auto tmp = (main_sel_op_fdiv * (main_w_in_tag - FF(6))); + auto tmp = ((main_sel_op_fdiv * (main_w_in_tag - FF(6))) - FF(0)); tmp *= scaling_factor; std::get<79>(evals) += tmp; } @@ -955,7 +960,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(80); - auto tmp = (main_op_err * ((main_sel_op_fdiv + main_sel_op_div) - FF(1))); + auto tmp = ((main_op_err * ((main_sel_op_fdiv + main_sel_op_div) - FF(1))) - FF(0)); tmp *= scaling_factor; std::get<80>(evals) += tmp; } @@ -963,16 +968,17 @@ template class mainImpl { { Avm_DECLARE_VIEWS(81); - auto tmp = (((((((((((main_sel_op_sender + main_sel_op_address) + main_sel_op_storage_address) + - main_sel_op_chain_id) + - main_sel_op_version) + - main_sel_op_block_number) + - main_sel_op_coinbase) + - main_sel_op_timestamp) + - main_sel_op_fee_per_l2_gas) + - main_sel_op_fee_per_da_gas) + - main_sel_op_transaction_fee) * - (-main_sel_q_kernel_lookup + FF(1))); + auto tmp = ((((((((((((main_sel_op_sender + main_sel_op_address) + main_sel_op_storage_address) + + main_sel_op_chain_id) + + main_sel_op_version) + + main_sel_op_block_number) + + main_sel_op_coinbase) + + main_sel_op_timestamp) + + main_sel_op_fee_per_l2_gas) + + main_sel_op_fee_per_da_gas) + + main_sel_op_transaction_fee) * + (-main_sel_q_kernel_lookup + FF(1))) - + FF(0)); tmp *= scaling_factor; std::get<81>(evals) += tmp; } @@ -981,12 +987,13 @@ template class mainImpl { Avm_DECLARE_VIEWS(82); auto tmp = - (((((((main_sel_op_note_hash_exists + main_sel_op_emit_note_hash) + main_sel_op_nullifier_exists) + - main_sel_op_emit_nullifier) + - main_sel_op_l1_to_l2_msg_exists) + - main_sel_op_emit_unencrypted_log) + - main_sel_op_emit_l2_to_l1_msg) * - (-main_sel_q_kernel_output_lookup + FF(1))); + ((((((((main_sel_op_note_hash_exists + main_sel_op_emit_note_hash) + main_sel_op_nullifier_exists) + + main_sel_op_emit_nullifier) + + main_sel_op_l1_to_l2_msg_exists) + + main_sel_op_emit_unencrypted_log) + + main_sel_op_emit_l2_to_l1_msg) * + (-main_sel_q_kernel_output_lookup + FF(1))) - + FF(0)); tmp *= scaling_factor; std::get<82>(evals) += tmp; } @@ -994,7 +1001,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(83); - auto tmp = (main_sel_op_jump * (main_pc_shift - main_ia)); + auto tmp = ((main_sel_op_jump * (main_pc_shift - main_ia)) - FF(0)); tmp *= scaling_factor; std::get<83>(evals) += tmp; } @@ -1002,8 +1009,9 @@ template class mainImpl { { Avm_DECLARE_VIEWS(84); - auto tmp = (main_sel_op_jumpi * (((-main_id_zero + FF(1)) * (main_pc_shift - main_ia)) + - (main_id_zero * ((main_pc_shift - main_pc) - FF(1))))); + auto tmp = ((main_sel_op_jumpi * (((-main_id_zero + FF(1)) * (main_pc_shift - main_ia)) + + (main_id_zero * ((main_pc_shift - main_pc) - FF(1))))) - + FF(0)); tmp *= scaling_factor; std::get<84>(evals) += tmp; } @@ -1012,7 +1020,8 @@ template class mainImpl { Avm_DECLARE_VIEWS(85); auto tmp = - (main_sel_op_internal_call * (main_internal_return_ptr_shift - (main_internal_return_ptr + FF(1)))); + ((main_sel_op_internal_call * (main_internal_return_ptr_shift - (main_internal_return_ptr + FF(1)))) - + FF(0)); tmp *= scaling_factor; std::get<85>(evals) += tmp; } @@ -1020,7 +1029,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(86); - auto tmp = (main_sel_op_internal_call * (main_internal_return_ptr - main_mem_addr_b)); + auto tmp = ((main_sel_op_internal_call * (main_internal_return_ptr - main_mem_addr_b)) - FF(0)); tmp *= scaling_factor; std::get<86>(evals) += tmp; } @@ -1028,7 +1037,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(87); - auto tmp = (main_sel_op_internal_call * (main_pc_shift - main_ia)); + auto tmp = ((main_sel_op_internal_call * (main_pc_shift - main_ia)) - FF(0)); tmp *= scaling_factor; std::get<87>(evals) += tmp; } @@ -1036,7 +1045,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(88); - auto tmp = (main_sel_op_internal_call * ((main_pc + FF(1)) - main_ib)); + auto tmp = ((main_sel_op_internal_call * ((main_pc + FF(1)) - main_ib)) - FF(0)); tmp *= scaling_factor; std::get<88>(evals) += tmp; } @@ -1044,7 +1053,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(89); - auto tmp = (main_sel_op_internal_call * (main_rwb - FF(1))); + auto tmp = ((main_sel_op_internal_call * (main_rwb - FF(1))) - FF(0)); tmp *= scaling_factor; std::get<89>(evals) += tmp; } @@ -1052,7 +1061,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(90); - auto tmp = (main_sel_op_internal_call * (main_sel_mem_op_b - FF(1))); + auto tmp = ((main_sel_op_internal_call * (main_sel_mem_op_b - FF(1))) - FF(0)); tmp *= scaling_factor; std::get<90>(evals) += tmp; } @@ -1061,7 +1070,8 @@ template class mainImpl { Avm_DECLARE_VIEWS(91); auto tmp = - (main_sel_op_internal_return * (main_internal_return_ptr_shift - (main_internal_return_ptr - FF(1)))); + ((main_sel_op_internal_return * (main_internal_return_ptr_shift - (main_internal_return_ptr - FF(1)))) - + FF(0)); tmp *= scaling_factor; std::get<91>(evals) += tmp; } @@ -1069,7 +1079,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(92); - auto tmp = (main_sel_op_internal_return * ((main_internal_return_ptr - FF(1)) - main_mem_addr_a)); + auto tmp = ((main_sel_op_internal_return * ((main_internal_return_ptr - FF(1)) - main_mem_addr_a)) - FF(0)); tmp *= scaling_factor; std::get<92>(evals) += tmp; } @@ -1077,7 +1087,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(93); - auto tmp = (main_sel_op_internal_return * (main_pc_shift - main_ia)); + auto tmp = ((main_sel_op_internal_return * (main_pc_shift - main_ia)) - FF(0)); tmp *= scaling_factor; std::get<93>(evals) += tmp; } @@ -1085,7 +1095,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(94); - auto tmp = (main_sel_op_internal_return * main_rwa); + auto tmp = ((main_sel_op_internal_return * main_rwa) - FF(0)); tmp *= scaling_factor; std::get<94>(evals) += tmp; } @@ -1093,7 +1103,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(95); - auto tmp = (main_sel_op_internal_return * (main_sel_mem_op_a - FF(1))); + auto tmp = ((main_sel_op_internal_return * (main_sel_mem_op_a - FF(1))) - FF(0)); tmp *= scaling_factor; std::get<95>(evals) += tmp; } @@ -1101,42 +1111,43 @@ template class mainImpl { { Avm_DECLARE_VIEWS(96); - auto tmp = - (((((main_sel_gas_accounting_active - - (((((((main_sel_op_fdiv + - ((((((((((main_sel_op_add + main_sel_op_sub) + main_sel_op_mul) + main_sel_op_div) + - main_sel_op_not) + - main_sel_op_eq) + - main_sel_op_lt) + - main_sel_op_lte) + - main_sel_op_shr) + - main_sel_op_shl) + - main_sel_op_cast)) + - ((main_sel_op_and + main_sel_op_or) + main_sel_op_xor)) + - (main_sel_op_cmov + main_sel_op_mov)) + - ((((main_sel_op_radix_le + main_sel_op_sha256) + main_sel_op_poseidon2) + main_sel_op_keccak) + - main_sel_op_pedersen)) + - ((((((((((main_sel_op_sender + main_sel_op_address) + main_sel_op_storage_address) + - main_sel_op_chain_id) + - main_sel_op_version) + - main_sel_op_block_number) + - main_sel_op_coinbase) + - main_sel_op_timestamp) + - main_sel_op_fee_per_l2_gas) + - main_sel_op_fee_per_da_gas) + - main_sel_op_transaction_fee)) + - ((((((main_sel_op_note_hash_exists + main_sel_op_emit_note_hash) + - main_sel_op_nullifier_exists) + - main_sel_op_emit_nullifier) + - main_sel_op_l1_to_l2_msg_exists) + - main_sel_op_emit_unencrypted_log) + - main_sel_op_emit_l2_to_l1_msg)) + - (main_sel_op_dagasleft + main_sel_op_l2gasleft))) - - (((main_sel_op_jump + main_sel_op_jumpi) + main_sel_op_internal_call) + - main_sel_op_internal_return)) - - main_sel_op_sload) - - main_sel_op_sstore) - - main_sel_mem_op_activate_gas); + auto tmp = ((((((main_sel_gas_accounting_active - + (((((((main_sel_op_fdiv + + ((((((((((main_sel_op_add + main_sel_op_sub) + main_sel_op_mul) + main_sel_op_div) + + main_sel_op_not) + + main_sel_op_eq) + + main_sel_op_lt) + + main_sel_op_lte) + + main_sel_op_shr) + + main_sel_op_shl) + + main_sel_op_cast)) + + ((main_sel_op_and + main_sel_op_or) + main_sel_op_xor)) + + (main_sel_op_cmov + main_sel_op_mov)) + + ((((main_sel_op_radix_le + main_sel_op_sha256) + main_sel_op_poseidon2) + + main_sel_op_keccak) + + main_sel_op_pedersen)) + + ((((((((((main_sel_op_sender + main_sel_op_address) + main_sel_op_storage_address) + + main_sel_op_chain_id) + + main_sel_op_version) + + main_sel_op_block_number) + + main_sel_op_coinbase) + + main_sel_op_timestamp) + + main_sel_op_fee_per_l2_gas) + + main_sel_op_fee_per_da_gas) + + main_sel_op_transaction_fee)) + + ((((((main_sel_op_note_hash_exists + main_sel_op_emit_note_hash) + + main_sel_op_nullifier_exists) + + main_sel_op_emit_nullifier) + + main_sel_op_l1_to_l2_msg_exists) + + main_sel_op_emit_unencrypted_log) + + main_sel_op_emit_l2_to_l1_msg)) + + (main_sel_op_dagasleft + main_sel_op_l2gasleft))) - + (((main_sel_op_jump + main_sel_op_jumpi) + main_sel_op_internal_call) + + main_sel_op_internal_return)) - + main_sel_op_sload) - + main_sel_op_sstore) - + main_sel_mem_op_activate_gas) - + FF(0)); tmp *= scaling_factor; std::get<96>(evals) += tmp; } @@ -1145,8 +1156,67 @@ template class mainImpl { Avm_DECLARE_VIEWS(97); auto tmp = - ((((-main_sel_first + FF(1)) * (-main_sel_op_halt + FF(1))) * - (((((((main_sel_op_fdiv + + (((((-main_sel_first + FF(1)) * (-main_sel_op_halt + FF(1))) * + (((((((main_sel_op_fdiv + + ((((((((((main_sel_op_add + main_sel_op_sub) + main_sel_op_mul) + main_sel_op_div) + + main_sel_op_not) + + main_sel_op_eq) + + main_sel_op_lt) + + main_sel_op_lte) + + main_sel_op_shr) + + main_sel_op_shl) + + main_sel_op_cast)) + + ((main_sel_op_and + main_sel_op_or) + main_sel_op_xor)) + + (main_sel_op_cmov + main_sel_op_mov)) + + ((((main_sel_op_radix_le + main_sel_op_sha256) + main_sel_op_poseidon2) + main_sel_op_keccak) + + main_sel_op_pedersen)) + + ((((((((((main_sel_op_sender + main_sel_op_address) + main_sel_op_storage_address) + + main_sel_op_chain_id) + + main_sel_op_version) + + main_sel_op_block_number) + + main_sel_op_coinbase) + + main_sel_op_timestamp) + + main_sel_op_fee_per_l2_gas) + + main_sel_op_fee_per_da_gas) + + main_sel_op_transaction_fee)) + + ((((((main_sel_op_note_hash_exists + main_sel_op_emit_note_hash) + main_sel_op_nullifier_exists) + + main_sel_op_emit_nullifier) + + main_sel_op_l1_to_l2_msg_exists) + + main_sel_op_emit_unencrypted_log) + + main_sel_op_emit_l2_to_l1_msg)) + + (main_sel_op_dagasleft + main_sel_op_l2gasleft))) * + (main_pc_shift - (main_pc + FF(1)))) - + FF(0)); + tmp *= scaling_factor; + std::get<97>(evals) += tmp; + } + // Contribution 98 + { + Avm_DECLARE_VIEWS(98); + + auto tmp = + (((-(((main_sel_first + main_sel_op_internal_call) + main_sel_op_internal_return) + main_sel_op_halt) + + FF(1)) * + (main_internal_return_ptr_shift - main_internal_return_ptr)) - + FF(0)); + tmp *= scaling_factor; + std::get<98>(evals) += tmp; + } + // Contribution 99 + { + Avm_DECLARE_VIEWS(99); + + auto tmp = + (((main_sel_op_internal_call + main_sel_op_internal_return) * (main_space_id - FF(255))) - FF(0)); + tmp *= scaling_factor; + std::get<99>(evals) += tmp; + } + // Contribution 100 + { + Avm_DECLARE_VIEWS(100); + + auto tmp = + (((((((((main_sel_op_fdiv + ((((((((((main_sel_op_add + main_sel_op_sub) + main_sel_op_mul) + main_sel_op_div) + main_sel_op_not) + main_sel_op_eq) + @@ -1173,64 +1243,9 @@ template class mainImpl { main_sel_op_l1_to_l2_msg_exists) + main_sel_op_emit_unencrypted_log) + main_sel_op_emit_l2_to_l1_msg)) + - (main_sel_op_dagasleft + main_sel_op_l2gasleft))) * - (main_pc_shift - (main_pc + FF(1)))); - tmp *= scaling_factor; - std::get<97>(evals) += tmp; - } - // Contribution 98 - { - Avm_DECLARE_VIEWS(98); - - auto tmp = - ((-(((main_sel_first + main_sel_op_internal_call) + main_sel_op_internal_return) + main_sel_op_halt) + - FF(1)) * - (main_internal_return_ptr_shift - main_internal_return_ptr)); - tmp *= scaling_factor; - std::get<98>(evals) += tmp; - } - // Contribution 99 - { - Avm_DECLARE_VIEWS(99); - - auto tmp = ((main_sel_op_internal_call + main_sel_op_internal_return) * (main_space_id - FF(255))); - tmp *= scaling_factor; - std::get<99>(evals) += tmp; - } - // Contribution 100 - { - Avm_DECLARE_VIEWS(100); - - auto tmp = - ((((((((main_sel_op_fdiv + - ((((((((((main_sel_op_add + main_sel_op_sub) + main_sel_op_mul) + main_sel_op_div) + - main_sel_op_not) + - main_sel_op_eq) + - main_sel_op_lt) + - main_sel_op_lte) + - main_sel_op_shr) + - main_sel_op_shl) + - main_sel_op_cast)) + - ((main_sel_op_and + main_sel_op_or) + main_sel_op_xor)) + - (main_sel_op_cmov + main_sel_op_mov)) + - ((((main_sel_op_radix_le + main_sel_op_sha256) + main_sel_op_poseidon2) + main_sel_op_keccak) + - main_sel_op_pedersen)) + - ((((((((((main_sel_op_sender + main_sel_op_address) + main_sel_op_storage_address) + - main_sel_op_chain_id) + - main_sel_op_version) + - main_sel_op_block_number) + - main_sel_op_coinbase) + - main_sel_op_timestamp) + - main_sel_op_fee_per_l2_gas) + - main_sel_op_fee_per_da_gas) + - main_sel_op_transaction_fee)) + - ((((((main_sel_op_note_hash_exists + main_sel_op_emit_note_hash) + main_sel_op_nullifier_exists) + - main_sel_op_emit_nullifier) + - main_sel_op_l1_to_l2_msg_exists) + - main_sel_op_emit_unencrypted_log) + - main_sel_op_emit_l2_to_l1_msg)) + - (main_sel_op_dagasleft + main_sel_op_l2gasleft)) * - (main_call_ptr - main_space_id)); + (main_sel_op_dagasleft + main_sel_op_l2gasleft)) * + (main_call_ptr - main_space_id)) - + FF(0)); tmp *= scaling_factor; std::get<100>(evals) += tmp; } @@ -1238,7 +1253,8 @@ template class mainImpl { { Avm_DECLARE_VIEWS(101); - auto tmp = ((main_sel_op_cmov + main_sel_op_jumpi) * (((main_id * main_inv) - FF(1)) + main_id_zero)); + auto tmp = + (((main_sel_op_cmov + main_sel_op_jumpi) * (((main_id * main_inv) - FF(1)) + main_id_zero)) - FF(0)); tmp *= scaling_factor; std::get<101>(evals) += tmp; } @@ -1246,7 +1262,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(102); - auto tmp = (((main_sel_op_cmov + main_sel_op_jumpi) * main_id_zero) * (-main_inv + FF(1))); + auto tmp = ((((main_sel_op_cmov + main_sel_op_jumpi) * main_id_zero) * (-main_inv + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<102>(evals) += tmp; } @@ -1270,7 +1286,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(105); - auto tmp = (main_sel_mov_ia_to_ic * (main_ia - main_ic)); + auto tmp = ((main_sel_mov_ia_to_ic * (main_ia - main_ic)) - FF(0)); tmp *= scaling_factor; std::get<105>(evals) += tmp; } @@ -1278,7 +1294,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(106); - auto tmp = (main_sel_mov_ib_to_ic * (main_ib - main_ic)); + auto tmp = ((main_sel_mov_ib_to_ic * (main_ib - main_ic)) - FF(0)); tmp *= scaling_factor; std::get<106>(evals) += tmp; } @@ -1286,7 +1302,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(107); - auto tmp = ((main_sel_op_mov + main_sel_op_cmov) * (main_r_in_tag - main_w_in_tag)); + auto tmp = (((main_sel_op_mov + main_sel_op_cmov) * (main_r_in_tag - main_w_in_tag)) - FF(0)); tmp *= scaling_factor; std::get<107>(evals) += tmp; } @@ -1313,13 +1329,14 @@ template class mainImpl { Avm_DECLARE_VIEWS(109); auto tmp = - ((((((((((main_sel_op_add + main_sel_op_sub) + main_sel_op_mul) + main_sel_op_div) + main_sel_op_not) + - main_sel_op_eq) + - main_sel_op_lt) + - main_sel_op_lte) + - main_sel_op_shr) + - main_sel_op_shl) * - (main_alu_in_tag - main_r_in_tag)); + (((((((((((main_sel_op_add + main_sel_op_sub) + main_sel_op_mul) + main_sel_op_div) + main_sel_op_not) + + main_sel_op_eq) + + main_sel_op_lt) + + main_sel_op_lte) + + main_sel_op_shr) + + main_sel_op_shl) * + (main_alu_in_tag - main_r_in_tag)) - + FF(0)); tmp *= scaling_factor; std::get<109>(evals) += tmp; } @@ -1327,7 +1344,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(110); - auto tmp = (main_sel_op_cast * (main_alu_in_tag - main_w_in_tag)); + auto tmp = ((main_sel_op_cast * (main_alu_in_tag - main_w_in_tag)) - FF(0)); tmp *= scaling_factor; std::get<110>(evals) += tmp; } @@ -1335,7 +1352,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(111); - auto tmp = (main_sel_op_l2gasleft * (main_ia - main_l2_gas_remaining_shift)); + auto tmp = ((main_sel_op_l2gasleft * (main_ia - main_l2_gas_remaining_shift)) - FF(0)); tmp *= scaling_factor; std::get<111>(evals) += tmp; } @@ -1343,7 +1360,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(112); - auto tmp = (main_sel_op_dagasleft * (main_ia - main_da_gas_remaining_shift)); + auto tmp = ((main_sel_op_dagasleft * (main_ia - main_da_gas_remaining_shift)) - FF(0)); tmp *= scaling_factor; std::get<112>(evals) += tmp; } @@ -1351,7 +1368,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(113); - auto tmp = (main_sel_op_sender * (kernel_kernel_in_offset - FF(0))); + auto tmp = ((main_sel_op_sender * (kernel_kernel_in_offset - FF(0))) - FF(0)); tmp *= scaling_factor; std::get<113>(evals) += tmp; } @@ -1359,7 +1376,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(114); - auto tmp = (main_sel_op_address * (kernel_kernel_in_offset - FF(1))); + auto tmp = ((main_sel_op_address * (kernel_kernel_in_offset - FF(1))) - FF(0)); tmp *= scaling_factor; std::get<114>(evals) += tmp; } @@ -1367,7 +1384,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(115); - auto tmp = (main_sel_op_storage_address * (kernel_kernel_in_offset - FF(2))); + auto tmp = ((main_sel_op_storage_address * (kernel_kernel_in_offset - FF(2))) - FF(0)); tmp *= scaling_factor; std::get<115>(evals) += tmp; } @@ -1375,7 +1392,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(116); - auto tmp = (main_sel_op_fee_per_da_gas * (kernel_kernel_in_offset - FF(35))); + auto tmp = ((main_sel_op_fee_per_da_gas * (kernel_kernel_in_offset - FF(35))) - FF(0)); tmp *= scaling_factor; std::get<116>(evals) += tmp; } @@ -1383,7 +1400,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(117); - auto tmp = (main_sel_op_fee_per_l2_gas * (kernel_kernel_in_offset - FF(36))); + auto tmp = ((main_sel_op_fee_per_l2_gas * (kernel_kernel_in_offset - FF(36))) - FF(0)); tmp *= scaling_factor; std::get<117>(evals) += tmp; } @@ -1391,7 +1408,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(118); - auto tmp = (main_sel_op_transaction_fee * (kernel_kernel_in_offset - FF(40))); + auto tmp = ((main_sel_op_transaction_fee * (kernel_kernel_in_offset - FF(40))) - FF(0)); tmp *= scaling_factor; std::get<118>(evals) += tmp; } @@ -1399,7 +1416,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(119); - auto tmp = (main_sel_op_chain_id * (kernel_kernel_in_offset - FF(29))); + auto tmp = ((main_sel_op_chain_id * (kernel_kernel_in_offset - FF(29))) - FF(0)); tmp *= scaling_factor; std::get<119>(evals) += tmp; } @@ -1407,7 +1424,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(120); - auto tmp = (main_sel_op_version * (kernel_kernel_in_offset - FF(30))); + auto tmp = ((main_sel_op_version * (kernel_kernel_in_offset - FF(30))) - FF(0)); tmp *= scaling_factor; std::get<120>(evals) += tmp; } @@ -1415,7 +1432,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(121); - auto tmp = (main_sel_op_block_number * (kernel_kernel_in_offset - FF(31))); + auto tmp = ((main_sel_op_block_number * (kernel_kernel_in_offset - FF(31))) - FF(0)); tmp *= scaling_factor; std::get<121>(evals) += tmp; } @@ -1423,7 +1440,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(122); - auto tmp = (main_sel_op_coinbase * (kernel_kernel_in_offset - FF(33))); + auto tmp = ((main_sel_op_coinbase * (kernel_kernel_in_offset - FF(33))) - FF(0)); tmp *= scaling_factor; std::get<122>(evals) += tmp; } @@ -1431,7 +1448,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(123); - auto tmp = (main_sel_op_timestamp * (kernel_kernel_in_offset - FF(32))); + auto tmp = ((main_sel_op_timestamp * (kernel_kernel_in_offset - FF(32))) - FF(0)); tmp *= scaling_factor; std::get<123>(evals) += tmp; } @@ -1439,8 +1456,9 @@ template class mainImpl { { Avm_DECLARE_VIEWS(124); - auto tmp = (main_sel_op_note_hash_exists * - (kernel_kernel_out_offset - (kernel_note_hash_exist_write_offset + FF(0)))); + auto tmp = ((main_sel_op_note_hash_exists * + (kernel_kernel_out_offset - (kernel_note_hash_exist_write_offset + FF(0)))) - + FF(0)); tmp *= scaling_factor; std::get<124>(evals) += tmp; } @@ -1448,7 +1466,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(125); - auto tmp = (main_sel_first * kernel_note_hash_exist_write_offset); + auto tmp = ((main_sel_first * kernel_note_hash_exist_write_offset) - FF(0)); tmp *= scaling_factor; std::get<125>(evals) += tmp; } @@ -1456,8 +1474,9 @@ template class mainImpl { { Avm_DECLARE_VIEWS(126); - auto tmp = (main_sel_op_emit_note_hash * - (kernel_kernel_out_offset - (kernel_emit_note_hash_write_offset + FF(176)))); + auto tmp = ((main_sel_op_emit_note_hash * + (kernel_kernel_out_offset - (kernel_emit_note_hash_write_offset + FF(176)))) - + FF(0)); tmp *= scaling_factor; std::get<126>(evals) += tmp; } @@ -1465,7 +1484,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(127); - auto tmp = (main_sel_first * kernel_emit_note_hash_write_offset); + auto tmp = ((main_sel_first * kernel_emit_note_hash_write_offset) - FF(0)); tmp *= scaling_factor; std::get<127>(evals) += tmp; } @@ -1473,10 +1492,11 @@ template class mainImpl { { Avm_DECLARE_VIEWS(128); - auto tmp = (main_sel_op_nullifier_exists * - (kernel_kernel_out_offset - - ((main_ib * (kernel_nullifier_exists_write_offset + FF(32))) + - ((-main_ib + FF(1)) * (kernel_nullifier_non_exists_write_offset + FF(64)))))); + auto tmp = ((main_sel_op_nullifier_exists * + (kernel_kernel_out_offset - + ((main_ib * (kernel_nullifier_exists_write_offset + FF(32))) + + ((-main_ib + FF(1)) * (kernel_nullifier_non_exists_write_offset + FF(64)))))) - + FF(0)); tmp *= scaling_factor; std::get<128>(evals) += tmp; } @@ -1484,7 +1504,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(129); - auto tmp = (main_sel_first * kernel_nullifier_exists_write_offset); + auto tmp = ((main_sel_first * kernel_nullifier_exists_write_offset) - FF(0)); tmp *= scaling_factor; std::get<129>(evals) += tmp; } @@ -1492,7 +1512,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(130); - auto tmp = (main_sel_first * kernel_nullifier_non_exists_write_offset); + auto tmp = ((main_sel_first * kernel_nullifier_non_exists_write_offset) - FF(0)); tmp *= scaling_factor; std::get<130>(evals) += tmp; } @@ -1500,8 +1520,9 @@ template class mainImpl { { Avm_DECLARE_VIEWS(131); - auto tmp = (main_sel_op_emit_nullifier * - (kernel_kernel_out_offset - (kernel_emit_nullifier_write_offset + FF(192)))); + auto tmp = ((main_sel_op_emit_nullifier * + (kernel_kernel_out_offset - (kernel_emit_nullifier_write_offset + FF(192)))) - + FF(0)); tmp *= scaling_factor; std::get<131>(evals) += tmp; } @@ -1509,7 +1530,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(132); - auto tmp = (main_sel_first * kernel_emit_nullifier_write_offset); + auto tmp = ((main_sel_first * kernel_emit_nullifier_write_offset) - FF(0)); tmp *= scaling_factor; std::get<132>(evals) += tmp; } @@ -1517,8 +1538,9 @@ template class mainImpl { { Avm_DECLARE_VIEWS(133); - auto tmp = (main_sel_op_l1_to_l2_msg_exists * - (kernel_kernel_out_offset - (kernel_l1_to_l2_msg_exists_write_offset + FF(96)))); + auto tmp = ((main_sel_op_l1_to_l2_msg_exists * + (kernel_kernel_out_offset - (kernel_l1_to_l2_msg_exists_write_offset + FF(96)))) - + FF(0)); tmp *= scaling_factor; std::get<133>(evals) += tmp; } @@ -1526,7 +1548,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(134); - auto tmp = (main_sel_first * kernel_l1_to_l2_msg_exists_write_offset); + auto tmp = ((main_sel_first * kernel_l1_to_l2_msg_exists_write_offset) - FF(0)); tmp *= scaling_factor; std::get<134>(evals) += tmp; } @@ -1534,8 +1556,9 @@ template class mainImpl { { Avm_DECLARE_VIEWS(135); - auto tmp = (main_sel_op_emit_unencrypted_log * - (kernel_kernel_out_offset - (kernel_emit_unencrypted_log_write_offset + FF(210)))); + auto tmp = ((main_sel_op_emit_unencrypted_log * + (kernel_kernel_out_offset - (kernel_emit_unencrypted_log_write_offset + FF(210)))) - + FF(0)); tmp *= scaling_factor; std::get<135>(evals) += tmp; } @@ -1543,7 +1566,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(136); - auto tmp = (main_sel_first * kernel_emit_unencrypted_log_write_offset); + auto tmp = ((main_sel_first * kernel_emit_unencrypted_log_write_offset) - FF(0)); tmp *= scaling_factor; std::get<136>(evals) += tmp; } @@ -1551,8 +1574,9 @@ template class mainImpl { { Avm_DECLARE_VIEWS(137); - auto tmp = (main_sel_op_emit_l2_to_l1_msg * - (kernel_kernel_out_offset - (kernel_emit_l2_to_l1_msg_write_offset + FF(208)))); + auto tmp = ((main_sel_op_emit_l2_to_l1_msg * + (kernel_kernel_out_offset - (kernel_emit_l2_to_l1_msg_write_offset + FF(208)))) - + FF(0)); tmp *= scaling_factor; std::get<137>(evals) += tmp; } @@ -1560,7 +1584,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(138); - auto tmp = (main_sel_first * kernel_emit_l2_to_l1_msg_write_offset); + auto tmp = ((main_sel_first * kernel_emit_l2_to_l1_msg_write_offset) - FF(0)); tmp *= scaling_factor; std::get<138>(evals) += tmp; } @@ -1568,7 +1592,8 @@ template class mainImpl { { Avm_DECLARE_VIEWS(139); - auto tmp = (main_sel_op_sload * (kernel_kernel_out_offset - (kernel_sload_write_offset + FF(144)))); + auto tmp = + ((main_sel_op_sload * (kernel_kernel_out_offset - (kernel_sload_write_offset + FF(144)))) - FF(0)); tmp *= scaling_factor; std::get<139>(evals) += tmp; } @@ -1576,7 +1601,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(140); - auto tmp = (main_sel_first * kernel_sload_write_offset); + auto tmp = ((main_sel_first * kernel_sload_write_offset) - FF(0)); tmp *= scaling_factor; std::get<140>(evals) += tmp; } @@ -1584,7 +1609,8 @@ template class mainImpl { { Avm_DECLARE_VIEWS(141); - auto tmp = (main_sel_op_sstore * (kernel_kernel_out_offset - (kernel_sstore_write_offset + FF(112)))); + auto tmp = + ((main_sel_op_sstore * (kernel_kernel_out_offset - (kernel_sstore_write_offset + FF(112)))) - FF(0)); tmp *= scaling_factor; std::get<141>(evals) += tmp; } @@ -1592,7 +1618,7 @@ template class mainImpl { { Avm_DECLARE_VIEWS(142); - auto tmp = (main_sel_first * kernel_sstore_write_offset); + auto tmp = ((main_sel_first * kernel_sstore_write_offset) - FF(0)); tmp *= scaling_factor; std::get<142>(evals) += tmp; } @@ -1601,12 +1627,13 @@ template class mainImpl { Avm_DECLARE_VIEWS(143); auto tmp = - (((((((main_sel_op_note_hash_exists + main_sel_op_emit_note_hash) + main_sel_op_nullifier_exists) + - main_sel_op_emit_nullifier) + - main_sel_op_l1_to_l2_msg_exists) + - main_sel_op_emit_unencrypted_log) + - main_sel_op_emit_l2_to_l1_msg) * - (kernel_side_effect_counter_shift - (kernel_side_effect_counter + FF(1)))); + ((((((((main_sel_op_note_hash_exists + main_sel_op_emit_note_hash) + main_sel_op_nullifier_exists) + + main_sel_op_emit_nullifier) + + main_sel_op_l1_to_l2_msg_exists) + + main_sel_op_emit_unencrypted_log) + + main_sel_op_emit_l2_to_l1_msg) * + (kernel_side_effect_counter_shift - (kernel_side_effect_counter + FF(1)))) - + FF(0)); tmp *= scaling_factor; std::get<143>(evals) += tmp; } diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/mem.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/mem.hpp index 8b1c6dcfe57..ea5a125887f 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/mem.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/mem.hpp @@ -126,7 +126,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(0); - auto tmp = (mem_lastAccess * (-mem_lastAccess + FF(1))); + auto tmp = ((mem_lastAccess * (-mem_lastAccess + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<0>(evals) += tmp; } @@ -134,7 +134,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(1); - auto tmp = (mem_last * (-mem_last + FF(1))); + auto tmp = ((mem_last * (-mem_last + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<1>(evals) += tmp; } @@ -142,7 +142,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(2); - auto tmp = (mem_rw * (-mem_rw + FF(1))); + auto tmp = ((mem_rw * (-mem_rw + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<2>(evals) += tmp; } @@ -150,7 +150,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(3); - auto tmp = (mem_tag_err * (-mem_tag_err + FF(1))); + auto tmp = ((mem_tag_err * (-mem_tag_err + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<3>(evals) += tmp; } @@ -158,7 +158,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(4); - auto tmp = (mem_sel_op_a * (-mem_sel_op_a + FF(1))); + auto tmp = ((mem_sel_op_a * (-mem_sel_op_a + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<4>(evals) += tmp; } @@ -166,7 +166,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(5); - auto tmp = (mem_sel_op_b * (-mem_sel_op_b + FF(1))); + auto tmp = ((mem_sel_op_b * (-mem_sel_op_b + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<5>(evals) += tmp; } @@ -174,7 +174,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(6); - auto tmp = (mem_sel_op_c * (-mem_sel_op_c + FF(1))); + auto tmp = ((mem_sel_op_c * (-mem_sel_op_c + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<6>(evals) += tmp; } @@ -182,7 +182,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(7); - auto tmp = (mem_sel_op_d * (-mem_sel_op_d + FF(1))); + auto tmp = ((mem_sel_op_d * (-mem_sel_op_d + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<7>(evals) += tmp; } @@ -190,7 +190,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(8); - auto tmp = (mem_sel_resolve_ind_addr_a * (-mem_sel_resolve_ind_addr_a + FF(1))); + auto tmp = ((mem_sel_resolve_ind_addr_a * (-mem_sel_resolve_ind_addr_a + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<8>(evals) += tmp; } @@ -198,7 +198,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(9); - auto tmp = (mem_sel_resolve_ind_addr_b * (-mem_sel_resolve_ind_addr_b + FF(1))); + auto tmp = ((mem_sel_resolve_ind_addr_b * (-mem_sel_resolve_ind_addr_b + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<9>(evals) += tmp; } @@ -206,7 +206,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(10); - auto tmp = (mem_sel_resolve_ind_addr_c * (-mem_sel_resolve_ind_addr_c + FF(1))); + auto tmp = ((mem_sel_resolve_ind_addr_c * (-mem_sel_resolve_ind_addr_c + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<10>(evals) += tmp; } @@ -214,7 +214,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(11); - auto tmp = (mem_sel_resolve_ind_addr_d * (-mem_sel_resolve_ind_addr_d + FF(1))); + auto tmp = ((mem_sel_resolve_ind_addr_d * (-mem_sel_resolve_ind_addr_d + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<11>(evals) += tmp; } @@ -235,7 +235,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(13); - auto tmp = (mem_sel_mem * (mem_sel_mem - FF(1))); + auto tmp = ((mem_sel_mem * (mem_sel_mem - FF(1))) - FF(0)); tmp *= scaling_factor; std::get<13>(evals) += tmp; } @@ -243,7 +243,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(14); - auto tmp = (((-main_sel_first + FF(1)) * mem_sel_mem_shift) * (-mem_sel_mem + FF(1))); + auto tmp = ((((-main_sel_first + FF(1)) * mem_sel_mem_shift) * (-mem_sel_mem + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<14>(evals) += tmp; } @@ -251,7 +251,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(15); - auto tmp = (main_sel_first * mem_sel_mem); + auto tmp = ((main_sel_first * mem_sel_mem) - FF(0)); tmp *= scaling_factor; std::get<15>(evals) += tmp; } @@ -259,7 +259,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(16); - auto tmp = (((-mem_last + FF(1)) * mem_sel_mem) * (-mem_sel_mem_shift + FF(1))); + auto tmp = ((((-mem_last + FF(1)) * mem_sel_mem) * (-mem_sel_mem_shift + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<16>(evals) += tmp; } @@ -302,7 +302,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(20); - auto tmp = (main_sel_first * (-mem_lastAccess + FF(1))); + auto tmp = ((main_sel_first * (-mem_lastAccess + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<20>(evals) += tmp; } @@ -310,7 +310,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(21); - auto tmp = ((-mem_lastAccess + FF(1)) * (mem_glob_addr_shift - mem_glob_addr)); + auto tmp = (((-mem_lastAccess + FF(1)) * (mem_glob_addr_shift - mem_glob_addr)) - FF(0)); tmp *= scaling_factor; std::get<21>(evals) += tmp; } @@ -318,11 +318,12 @@ template class memImpl { { Avm_DECLARE_VIEWS(22); - auto tmp = (mem_sel_rng_chk * (((((mem_lastAccess * (mem_glob_addr_shift - mem_glob_addr)) + - ((-mem_lastAccess + FF(1)) * (mem_tsp_shift - mem_tsp))) - - (mem_diff_hi * FF(4294967296UL))) - - (mem_diff_mid * FF(65536))) - - mem_diff_lo)); + auto tmp = ((mem_sel_rng_chk * (((((mem_lastAccess * (mem_glob_addr_shift - mem_glob_addr)) + + ((-mem_lastAccess + FF(1)) * (mem_tsp_shift - mem_tsp))) - + (mem_diff_hi * FF(4294967296UL))) - + (mem_diff_mid * FF(65536))) - + mem_diff_lo)) - + FF(0)); tmp *= scaling_factor; std::get<22>(evals) += tmp; } @@ -330,7 +331,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(23); - auto tmp = (((-mem_lastAccess + FF(1)) * (-mem_rw_shift + FF(1))) * (mem_val_shift - mem_val)); + auto tmp = ((((-mem_lastAccess + FF(1)) * (-mem_rw_shift + FF(1))) * (mem_val_shift - mem_val)) - FF(0)); tmp *= scaling_factor; std::get<23>(evals) += tmp; } @@ -338,7 +339,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(24); - auto tmp = (((-mem_lastAccess + FF(1)) * (-mem_rw_shift + FF(1))) * (mem_tag_shift - mem_tag)); + auto tmp = ((((-mem_lastAccess + FF(1)) * (-mem_rw_shift + FF(1))) * (mem_tag_shift - mem_tag)) - FF(0)); tmp *= scaling_factor; std::get<24>(evals) += tmp; } @@ -346,7 +347,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(25); - auto tmp = ((mem_lastAccess * (-mem_rw_shift + FF(1))) * mem_val_shift); + auto tmp = (((mem_lastAccess * (-mem_rw_shift + FF(1))) * mem_val_shift) - FF(0)); tmp *= scaling_factor; std::get<25>(evals) += tmp; } @@ -364,8 +365,9 @@ template class memImpl { { Avm_DECLARE_VIEWS(27); - auto tmp = (((-mem_skip_check_tag + FF(1)) * (-mem_rw + FF(1))) * - (((mem_r_in_tag - mem_tag) * (-mem_one_min_inv + FF(1))) - mem_tag_err)); + auto tmp = ((((-mem_skip_check_tag + FF(1)) * (-mem_rw + FF(1))) * + (((mem_r_in_tag - mem_tag) * (-mem_one_min_inv + FF(1))) - mem_tag_err)) - + FF(0)); tmp *= scaling_factor; std::get<27>(evals) += tmp; } @@ -373,7 +375,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(28); - auto tmp = ((-mem_tag_err + FF(1)) * mem_one_min_inv); + auto tmp = (((-mem_tag_err + FF(1)) * mem_one_min_inv) - FF(0)); tmp *= scaling_factor; std::get<28>(evals) += tmp; } @@ -381,7 +383,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(29); - auto tmp = ((mem_skip_check_tag + mem_rw) * mem_tag_err); + auto tmp = (((mem_skip_check_tag + mem_rw) * mem_tag_err) - FF(0)); tmp *= scaling_factor; std::get<29>(evals) += tmp; } @@ -389,7 +391,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(30); - auto tmp = (mem_rw * (mem_w_in_tag - mem_tag)); + auto tmp = ((mem_rw * (mem_w_in_tag - mem_tag)) - FF(0)); tmp *= scaling_factor; std::get<30>(evals) += tmp; } @@ -397,7 +399,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(31); - auto tmp = (mem_rw * mem_tag_err); + auto tmp = ((mem_rw * mem_tag_err) - FF(0)); tmp *= scaling_factor; std::get<31>(evals) += tmp; } @@ -405,7 +407,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(32); - auto tmp = (mem_sel_resolve_ind_addr_a * (mem_r_in_tag - FF(3))); + auto tmp = ((mem_sel_resolve_ind_addr_a * (mem_r_in_tag - FF(3))) - FF(0)); tmp *= scaling_factor; std::get<32>(evals) += tmp; } @@ -413,7 +415,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(33); - auto tmp = (mem_sel_resolve_ind_addr_b * (mem_r_in_tag - FF(3))); + auto tmp = ((mem_sel_resolve_ind_addr_b * (mem_r_in_tag - FF(3))) - FF(0)); tmp *= scaling_factor; std::get<33>(evals) += tmp; } @@ -421,7 +423,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(34); - auto tmp = (mem_sel_resolve_ind_addr_c * (mem_r_in_tag - FF(3))); + auto tmp = ((mem_sel_resolve_ind_addr_c * (mem_r_in_tag - FF(3))) - FF(0)); tmp *= scaling_factor; std::get<34>(evals) += tmp; } @@ -429,7 +431,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(35); - auto tmp = (mem_sel_resolve_ind_addr_d * (mem_r_in_tag - FF(3))); + auto tmp = ((mem_sel_resolve_ind_addr_d * (mem_r_in_tag - FF(3))) - FF(0)); tmp *= scaling_factor; std::get<35>(evals) += tmp; } @@ -437,7 +439,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(36); - auto tmp = (mem_sel_resolve_ind_addr_a * mem_rw); + auto tmp = ((mem_sel_resolve_ind_addr_a * mem_rw) - FF(0)); tmp *= scaling_factor; std::get<36>(evals) += tmp; } @@ -445,7 +447,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(37); - auto tmp = (mem_sel_resolve_ind_addr_b * mem_rw); + auto tmp = ((mem_sel_resolve_ind_addr_b * mem_rw) - FF(0)); tmp *= scaling_factor; std::get<37>(evals) += tmp; } @@ -453,7 +455,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(38); - auto tmp = (mem_sel_resolve_ind_addr_c * mem_rw); + auto tmp = ((mem_sel_resolve_ind_addr_c * mem_rw) - FF(0)); tmp *= scaling_factor; std::get<38>(evals) += tmp; } @@ -461,7 +463,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(39); - auto tmp = (mem_sel_resolve_ind_addr_d * mem_rw); + auto tmp = ((mem_sel_resolve_ind_addr_d * mem_rw) - FF(0)); tmp *= scaling_factor; std::get<39>(evals) += tmp; } @@ -469,7 +471,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(40); - auto tmp = ((mem_sel_mov_ia_to_ic + mem_sel_mov_ib_to_ic) * mem_tag_err); + auto tmp = (((mem_sel_mov_ia_to_ic + mem_sel_mov_ib_to_ic) * mem_tag_err) - FF(0)); tmp *= scaling_factor; std::get<40>(evals) += tmp; } diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/pedersen.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/pedersen.hpp index d09722720e9..eb73ea8dfdd 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/pedersen.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/pedersen.hpp @@ -37,7 +37,7 @@ template class pedersenImpl { { Avm_DECLARE_VIEWS(0); - auto tmp = (pedersen_sel_pedersen * (-pedersen_sel_pedersen + FF(1))); + auto tmp = ((pedersen_sel_pedersen * (-pedersen_sel_pedersen + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<0>(evals) += tmp; } diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/poseidon2.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/poseidon2.hpp index a758f8333b2..1f6d384a3f7 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/poseidon2.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/poseidon2.hpp @@ -37,7 +37,7 @@ template class poseidon2Impl { { Avm_DECLARE_VIEWS(0); - auto tmp = (poseidon2_sel_poseidon_perm * (-poseidon2_sel_poseidon_perm + FF(1))); + auto tmp = ((poseidon2_sel_poseidon_perm * (-poseidon2_sel_poseidon_perm + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<0>(evals) += tmp; } diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/sha256.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/sha256.hpp index 189cbb7c699..d77d4d69f67 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/sha256.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/sha256.hpp @@ -37,7 +37,7 @@ template class sha256Impl { { Avm_DECLARE_VIEWS(0); - auto tmp = (sha256_sel_sha256_compression * (-sha256_sel_sha256_compression + FF(1))); + auto tmp = ((sha256_sel_sha256_compression * (-sha256_sel_sha256_compression + FF(1))) - FF(0)); tmp *= scaling_factor; std::get<0>(evals) += tmp; } diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mock_circuits.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mock_circuits.hpp index 183d951f070..5d5b3c03953 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mock_circuits.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mock_circuits.hpp @@ -44,7 +44,6 @@ class MockCircuits { */ template static void add_arithmetic_gates(Builder& builder, const size_t num_gates = 4) { - // For good measure, include a gate with some public inputs for (size_t i = 0; i < num_gates; ++i) { FF a = FF::random_element(&engine); FF b = FF::random_element(&engine); diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.cpp index a0db02764b3..7d58df8071a 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.cpp @@ -20,6 +20,10 @@ template std::vector AvmFullRow::names() { return { "main_clk", "main_sel_first", + "kernel_kernel_inputs", + "kernel_kernel_value_out", + "kernel_kernel_side_effect_out", + "kernel_kernel_metadata_out", "alu_a_hi", "alu_a_lo", "alu_b_hi", @@ -140,11 +144,7 @@ template std::vector AvmFullRow::names() "kernel_emit_nullifier_write_offset", "kernel_emit_unencrypted_log_write_offset", "kernel_kernel_in_offset", - "kernel_kernel_inputs", - "kernel_kernel_metadata_out", "kernel_kernel_out_offset", - "kernel_kernel_side_effect_out", - "kernel_kernel_value_out", "kernel_l1_to_l2_msg_exists_write_offset", "kernel_note_hash_exist_write_offset", "kernel_nullifier_exists_write_offset", @@ -410,18 +410,20 @@ template std::ostream& operator<<(std::ostream& os, AvmFullRow { return os << field_to_string(row.main_clk) << "," << field_to_string(row.main_sel_first) << "," - << field_to_string(row.alu_a_hi) << "," << field_to_string(row.alu_a_lo) << "," - << field_to_string(row.alu_b_hi) << "," << field_to_string(row.alu_b_lo) << "," - << field_to_string(row.alu_borrow) << "," << field_to_string(row.alu_cf) << "," - << field_to_string(row.alu_clk) << "," << field_to_string(row.alu_cmp_rng_ctr) << "," - << field_to_string(row.alu_div_u16_r0) << "," << field_to_string(row.alu_div_u16_r1) << "," - << field_to_string(row.alu_div_u16_r2) << "," << field_to_string(row.alu_div_u16_r3) << "," - << field_to_string(row.alu_div_u16_r4) << "," << field_to_string(row.alu_div_u16_r5) << "," - << field_to_string(row.alu_div_u16_r6) << "," << field_to_string(row.alu_div_u16_r7) << "," - << field_to_string(row.alu_divisor_hi) << "," << field_to_string(row.alu_divisor_lo) << "," - << field_to_string(row.alu_ff_tag) << "," << field_to_string(row.alu_ia) << "," - << field_to_string(row.alu_ib) << "," << field_to_string(row.alu_ic) << "," - << field_to_string(row.alu_in_tag) << "," << field_to_string(row.alu_op_add) << "," + << field_to_string(row.kernel_kernel_inputs) << "," << field_to_string(row.kernel_kernel_value_out) << "," + << field_to_string(row.kernel_kernel_side_effect_out) << "," + << field_to_string(row.kernel_kernel_metadata_out) << "," << field_to_string(row.alu_a_hi) << "," + << field_to_string(row.alu_a_lo) << "," << field_to_string(row.alu_b_hi) << "," + << field_to_string(row.alu_b_lo) << "," << field_to_string(row.alu_borrow) << "," + << field_to_string(row.alu_cf) << "," << field_to_string(row.alu_clk) << "," + << field_to_string(row.alu_cmp_rng_ctr) << "," << field_to_string(row.alu_div_u16_r0) << "," + << field_to_string(row.alu_div_u16_r1) << "," << field_to_string(row.alu_div_u16_r2) << "," + << field_to_string(row.alu_div_u16_r3) << "," << field_to_string(row.alu_div_u16_r4) << "," + << field_to_string(row.alu_div_u16_r5) << "," << field_to_string(row.alu_div_u16_r6) << "," + << field_to_string(row.alu_div_u16_r7) << "," << field_to_string(row.alu_divisor_hi) << "," + << field_to_string(row.alu_divisor_lo) << "," << field_to_string(row.alu_ff_tag) << "," + << field_to_string(row.alu_ia) << "," << field_to_string(row.alu_ib) << "," << field_to_string(row.alu_ic) + << "," << field_to_string(row.alu_in_tag) << "," << field_to_string(row.alu_op_add) << "," << field_to_string(row.alu_op_cast) << "," << field_to_string(row.alu_op_cast_prev) << "," << field_to_string(row.alu_op_div) << "," << field_to_string(row.alu_op_div_a_lt_b) << "," << field_to_string(row.alu_op_div_std) << "," << field_to_string(row.alu_op_eq) << "," @@ -473,11 +475,8 @@ template std::ostream& operator<<(std::ostream& os, AvmFullRow << field_to_string(row.kernel_emit_note_hash_write_offset) << "," << field_to_string(row.kernel_emit_nullifier_write_offset) << "," << field_to_string(row.kernel_emit_unencrypted_log_write_offset) << "," - << field_to_string(row.kernel_kernel_in_offset) << "," << field_to_string(row.kernel_kernel_inputs) << "," - << field_to_string(row.kernel_kernel_metadata_out) << "," << field_to_string(row.kernel_kernel_out_offset) - << "," << field_to_string(row.kernel_kernel_side_effect_out) << "," - << field_to_string(row.kernel_kernel_value_out) << "," - << field_to_string(row.kernel_l1_to_l2_msg_exists_write_offset) << "," + << field_to_string(row.kernel_kernel_in_offset) << "," << field_to_string(row.kernel_kernel_out_offset) + << "," << field_to_string(row.kernel_l1_to_l2_msg_exists_write_offset) << "," << field_to_string(row.kernel_note_hash_exist_write_offset) << "," << field_to_string(row.kernel_nullifier_exists_write_offset) << "," << field_to_string(row.kernel_nullifier_non_exists_write_offset) << "," diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp index 83e3cd74b16..b8f8a107901 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp @@ -87,6 +87,10 @@ namespace bb { template struct AvmFullRow { FF main_clk{}; FF main_sel_first{}; + FF kernel_kernel_inputs{}; + FF kernel_kernel_value_out{}; + FF kernel_kernel_side_effect_out{}; + FF kernel_kernel_metadata_out{}; FF alu_a_hi{}; FF alu_a_lo{}; FF alu_b_hi{}; @@ -207,11 +211,7 @@ template struct AvmFullRow { FF kernel_emit_nullifier_write_offset{}; FF kernel_emit_unencrypted_log_write_offset{}; FF kernel_kernel_in_offset{}; - FF kernel_kernel_inputs{}; - FF kernel_kernel_metadata_out{}; FF kernel_kernel_out_offset{}; - FF kernel_kernel_side_effect_out{}; - FF kernel_kernel_value_out{}; FF kernel_l1_to_l2_msg_exists_write_offset{}; FF kernel_note_hash_exist_write_offset{}; FF kernel_nullifier_exists_write_offset{}; @@ -570,6 +570,10 @@ class AvmCircuitBuilder { for (size_t i = 0; i < rows.size(); i++) { polys.main_clk[i] = rows[i].main_clk; polys.main_sel_first[i] = rows[i].main_sel_first; + polys.kernel_kernel_inputs[i] = rows[i].kernel_kernel_inputs; + polys.kernel_kernel_value_out[i] = rows[i].kernel_kernel_value_out; + polys.kernel_kernel_side_effect_out[i] = rows[i].kernel_kernel_side_effect_out; + polys.kernel_kernel_metadata_out[i] = rows[i].kernel_kernel_metadata_out; polys.alu_a_hi[i] = rows[i].alu_a_hi; polys.alu_a_lo[i] = rows[i].alu_a_lo; polys.alu_b_hi[i] = rows[i].alu_b_hi; @@ -690,11 +694,7 @@ class AvmCircuitBuilder { polys.kernel_emit_nullifier_write_offset[i] = rows[i].kernel_emit_nullifier_write_offset; polys.kernel_emit_unencrypted_log_write_offset[i] = rows[i].kernel_emit_unencrypted_log_write_offset; polys.kernel_kernel_in_offset[i] = rows[i].kernel_kernel_in_offset; - polys.kernel_kernel_inputs[i] = rows[i].kernel_kernel_inputs; - polys.kernel_kernel_metadata_out[i] = rows[i].kernel_kernel_metadata_out; polys.kernel_kernel_out_offset[i] = rows[i].kernel_kernel_out_offset; - polys.kernel_kernel_side_effect_out[i] = rows[i].kernel_kernel_side_effect_out; - polys.kernel_kernel_value_out[i] = rows[i].kernel_kernel_value_out; polys.kernel_l1_to_l2_msg_exists_write_offset[i] = rows[i].kernel_l1_to_l2_msg_exists_write_offset; polys.kernel_note_hash_exist_write_offset[i] = rows[i].kernel_note_hash_exist_write_offset; polys.kernel_nullifier_exists_write_offset[i] = rows[i].kernel_nullifier_exists_write_offset; diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp index 41cae773d42..283812ece0f 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp @@ -256,6 +256,10 @@ class AvmFlavor { template class WitnessEntities { public: DEFINE_FLAVOR_MEMBERS(DataType, + kernel_kernel_inputs, + kernel_kernel_value_out, + kernel_kernel_side_effect_out, + kernel_kernel_metadata_out, alu_a_hi, alu_a_lo, alu_b_hi, @@ -376,11 +380,7 @@ class AvmFlavor { kernel_emit_nullifier_write_offset, kernel_emit_unencrypted_log_write_offset, kernel_kernel_in_offset, - kernel_kernel_inputs, - kernel_kernel_metadata_out, kernel_kernel_out_offset, - kernel_kernel_side_effect_out, - kernel_kernel_value_out, kernel_l1_to_l2_msg_exists_write_offset, kernel_note_hash_exist_write_offset, kernel_nullifier_exists_write_offset, @@ -642,7 +642,11 @@ class AvmFlavor { RefVector get_wires() { - return { alu_a_hi, + return { kernel_kernel_inputs, + kernel_kernel_value_out, + kernel_kernel_side_effect_out, + kernel_kernel_metadata_out, + alu_a_hi, alu_a_lo, alu_b_hi, alu_b_lo, @@ -762,11 +766,7 @@ class AvmFlavor { kernel_emit_nullifier_write_offset, kernel_emit_unencrypted_log_write_offset, kernel_kernel_in_offset, - kernel_kernel_inputs, - kernel_kernel_metadata_out, kernel_kernel_out_offset, - kernel_kernel_side_effect_out, - kernel_kernel_value_out, kernel_l1_to_l2_msg_exists_write_offset, kernel_note_hash_exist_write_offset, kernel_nullifier_exists_write_offset, @@ -1033,6 +1033,10 @@ class AvmFlavor { DEFINE_FLAVOR_MEMBERS(DataType, main_clk, main_sel_first, + kernel_kernel_inputs, + kernel_kernel_value_out, + kernel_kernel_side_effect_out, + kernel_kernel_metadata_out, alu_a_hi, alu_a_lo, alu_b_hi, @@ -1153,11 +1157,7 @@ class AvmFlavor { kernel_emit_nullifier_write_offset, kernel_emit_unencrypted_log_write_offset, kernel_kernel_in_offset, - kernel_kernel_inputs, - kernel_kernel_metadata_out, kernel_kernel_out_offset, - kernel_kernel_side_effect_out, - kernel_kernel_value_out, kernel_l1_to_l2_msg_exists_write_offset, kernel_note_hash_exist_write_offset, kernel_nullifier_exists_write_offset, @@ -1486,6 +1486,10 @@ class AvmFlavor { { return { main_clk, main_sel_first, + kernel_kernel_inputs, + kernel_kernel_value_out, + kernel_kernel_side_effect_out, + kernel_kernel_metadata_out, alu_a_hi, alu_a_lo, alu_b_hi, @@ -1606,11 +1610,7 @@ class AvmFlavor { kernel_emit_nullifier_write_offset, kernel_emit_unencrypted_log_write_offset, kernel_kernel_in_offset, - kernel_kernel_inputs, - kernel_kernel_metadata_out, kernel_kernel_out_offset, - kernel_kernel_side_effect_out, - kernel_kernel_value_out, kernel_l1_to_l2_msg_exists_write_offset, kernel_note_hash_exist_write_offset, kernel_nullifier_exists_write_offset, @@ -1939,6 +1939,10 @@ class AvmFlavor { { return { main_clk, main_sel_first, + kernel_kernel_inputs, + kernel_kernel_value_out, + kernel_kernel_side_effect_out, + kernel_kernel_metadata_out, alu_a_hi, alu_a_lo, alu_b_hi, @@ -2059,11 +2063,7 @@ class AvmFlavor { kernel_emit_nullifier_write_offset, kernel_emit_unencrypted_log_write_offset, kernel_kernel_in_offset, - kernel_kernel_inputs, - kernel_kernel_metadata_out, kernel_kernel_out_offset, - kernel_kernel_side_effect_out, - kernel_kernel_value_out, kernel_l1_to_l2_msg_exists_write_offset, kernel_note_hash_exist_write_offset, kernel_nullifier_exists_write_offset, @@ -2686,7 +2686,7 @@ class AvmFlavor { } } - [[nodiscard]] size_t get_polynomial_size() const { return alu_a_hi.size(); } + [[nodiscard]] size_t get_polynomial_size() const { return kernel_kernel_inputs.size(); } /** * @brief Returns the evaluations of all prover polynomials at one point on the boolean hypercube, which * represents one row in the execution trace. @@ -2748,6 +2748,10 @@ class AvmFlavor { { Base::main_clk = "MAIN_CLK"; Base::main_sel_first = "MAIN_SEL_FIRST"; + Base::kernel_kernel_inputs = "KERNEL_KERNEL_INPUTS"; + Base::kernel_kernel_value_out = "KERNEL_KERNEL_VALUE_OUT"; + Base::kernel_kernel_side_effect_out = "KERNEL_KERNEL_SIDE_EFFECT_OUT"; + Base::kernel_kernel_metadata_out = "KERNEL_KERNEL_METADATA_OUT"; Base::alu_a_hi = "ALU_A_HI"; Base::alu_a_lo = "ALU_A_LO"; Base::alu_b_hi = "ALU_B_HI"; @@ -2868,11 +2872,7 @@ class AvmFlavor { Base::kernel_emit_nullifier_write_offset = "KERNEL_EMIT_NULLIFIER_WRITE_OFFSET"; Base::kernel_emit_unencrypted_log_write_offset = "KERNEL_EMIT_UNENCRYPTED_LOG_WRITE_OFFSET"; Base::kernel_kernel_in_offset = "KERNEL_KERNEL_IN_OFFSET"; - Base::kernel_kernel_inputs = "KERNEL_KERNEL_INPUTS"; - Base::kernel_kernel_metadata_out = "KERNEL_KERNEL_METADATA_OUT"; Base::kernel_kernel_out_offset = "KERNEL_KERNEL_OUT_OFFSET"; - Base::kernel_kernel_side_effect_out = "KERNEL_KERNEL_SIDE_EFFECT_OUT"; - Base::kernel_kernel_value_out = "KERNEL_KERNEL_VALUE_OUT"; Base::kernel_l1_to_l2_msg_exists_write_offset = "KERNEL_L1_TO_L2_MSG_EXISTS_WRITE_OFFSET"; Base::kernel_note_hash_exist_write_offset = "KERNEL_NOTE_HASH_EXIST_WRITE_OFFSET"; Base::kernel_nullifier_exists_write_offset = "KERNEL_NULLIFIER_EXISTS_WRITE_OFFSET"; @@ -3150,6 +3150,10 @@ class AvmFlavor { public: uint32_t circuit_size; + Commitment kernel_kernel_inputs; + Commitment kernel_kernel_value_out; + Commitment kernel_kernel_side_effect_out; + Commitment kernel_kernel_metadata_out; Commitment alu_a_hi; Commitment alu_a_lo; Commitment alu_b_hi; @@ -3270,11 +3274,7 @@ class AvmFlavor { Commitment kernel_emit_nullifier_write_offset; Commitment kernel_emit_unencrypted_log_write_offset; Commitment kernel_kernel_in_offset; - Commitment kernel_kernel_inputs; - Commitment kernel_kernel_metadata_out; Commitment kernel_kernel_out_offset; - Commitment kernel_kernel_side_effect_out; - Commitment kernel_kernel_value_out; Commitment kernel_l1_to_l2_msg_exists_write_offset; Commitment kernel_note_hash_exist_write_offset; Commitment kernel_nullifier_exists_write_offset; @@ -3552,6 +3552,10 @@ class AvmFlavor { circuit_size = deserialize_from_buffer(proof_data, num_frs_read); size_t log_n = numeric::get_msb(circuit_size); + kernel_kernel_inputs = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + kernel_kernel_value_out = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + kernel_kernel_side_effect_out = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + kernel_kernel_metadata_out = deserialize_from_buffer(Transcript::proof_data, num_frs_read); alu_a_hi = deserialize_from_buffer(Transcript::proof_data, num_frs_read); alu_a_lo = deserialize_from_buffer(Transcript::proof_data, num_frs_read); alu_b_hi = deserialize_from_buffer(Transcript::proof_data, num_frs_read); @@ -3676,11 +3680,7 @@ class AvmFlavor { kernel_emit_unencrypted_log_write_offset = deserialize_from_buffer(Transcript::proof_data, num_frs_read); kernel_kernel_in_offset = deserialize_from_buffer(Transcript::proof_data, num_frs_read); - kernel_kernel_inputs = deserialize_from_buffer(Transcript::proof_data, num_frs_read); - kernel_kernel_metadata_out = deserialize_from_buffer(Transcript::proof_data, num_frs_read); kernel_kernel_out_offset = deserialize_from_buffer(Transcript::proof_data, num_frs_read); - kernel_kernel_side_effect_out = deserialize_from_buffer(Transcript::proof_data, num_frs_read); - kernel_kernel_value_out = deserialize_from_buffer(Transcript::proof_data, num_frs_read); kernel_l1_to_l2_msg_exists_write_offset = deserialize_from_buffer(Transcript::proof_data, num_frs_read); kernel_note_hash_exist_write_offset = @@ -3970,6 +3970,10 @@ class AvmFlavor { serialize_to_buffer(circuit_size, Transcript::proof_data); + serialize_to_buffer(kernel_kernel_inputs, Transcript::proof_data); + serialize_to_buffer(kernel_kernel_value_out, Transcript::proof_data); + serialize_to_buffer(kernel_kernel_side_effect_out, Transcript::proof_data); + serialize_to_buffer(kernel_kernel_metadata_out, Transcript::proof_data); serialize_to_buffer(alu_a_hi, Transcript::proof_data); serialize_to_buffer(alu_a_lo, Transcript::proof_data); serialize_to_buffer(alu_b_hi, Transcript::proof_data); @@ -4090,11 +4094,7 @@ class AvmFlavor { serialize_to_buffer(kernel_emit_nullifier_write_offset, Transcript::proof_data); serialize_to_buffer(kernel_emit_unencrypted_log_write_offset, Transcript::proof_data); serialize_to_buffer(kernel_kernel_in_offset, Transcript::proof_data); - serialize_to_buffer(kernel_kernel_inputs, Transcript::proof_data); - serialize_to_buffer(kernel_kernel_metadata_out, Transcript::proof_data); serialize_to_buffer(kernel_kernel_out_offset, Transcript::proof_data); - serialize_to_buffer(kernel_kernel_side_effect_out, Transcript::proof_data); - serialize_to_buffer(kernel_kernel_value_out, Transcript::proof_data); serialize_to_buffer(kernel_l1_to_l2_msg_exists_write_offset, Transcript::proof_data); serialize_to_buffer(kernel_note_hash_exist_write_offset, Transcript::proof_data); serialize_to_buffer(kernel_nullifier_exists_write_offset, Transcript::proof_data); diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp index 2bdb385ef3f..c6a7622629b 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp @@ -59,6 +59,10 @@ void AvmProver::execute_wire_commitments_round() // Commit to all polynomials (apart from logderivative inverse polynomials, which are committed to in the later // logderivative phase) + witness_commitments.kernel_kernel_inputs = commitment_key->commit(key->kernel_kernel_inputs); + witness_commitments.kernel_kernel_value_out = commitment_key->commit(key->kernel_kernel_value_out); + witness_commitments.kernel_kernel_side_effect_out = commitment_key->commit(key->kernel_kernel_side_effect_out); + witness_commitments.kernel_kernel_metadata_out = commitment_key->commit(key->kernel_kernel_metadata_out); witness_commitments.alu_a_hi = commitment_key->commit(key->alu_a_hi); witness_commitments.alu_a_lo = commitment_key->commit(key->alu_a_lo); witness_commitments.alu_b_hi = commitment_key->commit(key->alu_b_hi); @@ -183,11 +187,7 @@ void AvmProver::execute_wire_commitments_round() witness_commitments.kernel_emit_unencrypted_log_write_offset = commitment_key->commit(key->kernel_emit_unencrypted_log_write_offset); witness_commitments.kernel_kernel_in_offset = commitment_key->commit(key->kernel_kernel_in_offset); - witness_commitments.kernel_kernel_inputs = commitment_key->commit(key->kernel_kernel_inputs); - witness_commitments.kernel_kernel_metadata_out = commitment_key->commit(key->kernel_kernel_metadata_out); witness_commitments.kernel_kernel_out_offset = commitment_key->commit(key->kernel_kernel_out_offset); - witness_commitments.kernel_kernel_side_effect_out = commitment_key->commit(key->kernel_kernel_side_effect_out); - witness_commitments.kernel_kernel_value_out = commitment_key->commit(key->kernel_kernel_value_out); witness_commitments.kernel_l1_to_l2_msg_exists_write_offset = commitment_key->commit(key->kernel_l1_to_l2_msg_exists_write_offset); witness_commitments.kernel_note_hash_exist_write_offset = @@ -402,6 +402,13 @@ void AvmProver::execute_wire_commitments_round() witness_commitments.lookup_div_u16_7_counts = commitment_key->commit(key->lookup_div_u16_7_counts); // Send all commitments to the verifier + transcript->send_to_verifier(commitment_labels.kernel_kernel_inputs, witness_commitments.kernel_kernel_inputs); + transcript->send_to_verifier(commitment_labels.kernel_kernel_value_out, + witness_commitments.kernel_kernel_value_out); + transcript->send_to_verifier(commitment_labels.kernel_kernel_side_effect_out, + witness_commitments.kernel_kernel_side_effect_out); + transcript->send_to_verifier(commitment_labels.kernel_kernel_metadata_out, + witness_commitments.kernel_kernel_metadata_out); transcript->send_to_verifier(commitment_labels.alu_a_hi, witness_commitments.alu_a_hi); transcript->send_to_verifier(commitment_labels.alu_a_lo, witness_commitments.alu_a_lo); transcript->send_to_verifier(commitment_labels.alu_b_hi, witness_commitments.alu_b_hi); @@ -535,15 +542,8 @@ void AvmProver::execute_wire_commitments_round() witness_commitments.kernel_emit_unencrypted_log_write_offset); transcript->send_to_verifier(commitment_labels.kernel_kernel_in_offset, witness_commitments.kernel_kernel_in_offset); - transcript->send_to_verifier(commitment_labels.kernel_kernel_inputs, witness_commitments.kernel_kernel_inputs); - transcript->send_to_verifier(commitment_labels.kernel_kernel_metadata_out, - witness_commitments.kernel_kernel_metadata_out); transcript->send_to_verifier(commitment_labels.kernel_kernel_out_offset, witness_commitments.kernel_kernel_out_offset); - transcript->send_to_verifier(commitment_labels.kernel_kernel_side_effect_out, - witness_commitments.kernel_kernel_side_effect_out); - transcript->send_to_verifier(commitment_labels.kernel_kernel_value_out, - witness_commitments.kernel_kernel_value_out); transcript->send_to_verifier(commitment_labels.kernel_l1_to_l2_msg_exists_write_offset, witness_commitments.kernel_l1_to_l2_msg_exists_write_offset); transcript->send_to_verifier(commitment_labels.kernel_note_hash_exist_write_offset, diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp index 32cb192f93a..24a8b6c6f0b 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp @@ -70,6 +70,14 @@ bool AvmVerifier::verify_proof(const HonkProof& proof, const std::vectortemplate receive_from_prover(commitment_labels.kernel_kernel_inputs); + commitments.kernel_kernel_value_out = + transcript->template receive_from_prover(commitment_labels.kernel_kernel_value_out); + commitments.kernel_kernel_side_effect_out = + transcript->template receive_from_prover(commitment_labels.kernel_kernel_side_effect_out); + commitments.kernel_kernel_metadata_out = + transcript->template receive_from_prover(commitment_labels.kernel_kernel_metadata_out); commitments.alu_a_hi = transcript->template receive_from_prover(commitment_labels.alu_a_hi); commitments.alu_a_lo = transcript->template receive_from_prover(commitment_labels.alu_a_lo); commitments.alu_b_hi = transcript->template receive_from_prover(commitment_labels.alu_b_hi); @@ -233,16 +241,8 @@ bool AvmVerifier::verify_proof(const HonkProof& proof, const std::vectortemplate receive_from_prover(commitment_labels.kernel_kernel_in_offset); - commitments.kernel_kernel_inputs = - transcript->template receive_from_prover(commitment_labels.kernel_kernel_inputs); - commitments.kernel_kernel_metadata_out = - transcript->template receive_from_prover(commitment_labels.kernel_kernel_metadata_out); commitments.kernel_kernel_out_offset = transcript->template receive_from_prover(commitment_labels.kernel_kernel_out_offset); - commitments.kernel_kernel_side_effect_out = - transcript->template receive_from_prover(commitment_labels.kernel_kernel_side_effect_out); - commitments.kernel_kernel_value_out = - transcript->template receive_from_prover(commitment_labels.kernel_kernel_value_out); commitments.kernel_l1_to_l2_msg_exists_write_offset = transcript->template receive_from_prover(commitment_labels.kernel_l1_to_l2_msg_exists_write_offset); commitments.kernel_note_hash_exist_write_offset = diff --git a/bb-pilcom/.gitignore b/bb-pilcom/.gitignore new file mode 100644 index 00000000000..41bfb440101 --- /dev/null +++ b/bb-pilcom/.gitignore @@ -0,0 +1 @@ +**/target/* \ No newline at end of file diff --git a/bb-pilcom/Cargo.lock b/bb-pilcom/Cargo.lock new file mode 100644 index 00000000000..d1179b177b9 --- /dev/null +++ b/bb-pilcom/Cargo.lock @@ -0,0 +1,1880 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "ahash" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +dependencies = [ + "cfg-if", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anstream" +version = "0.6.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" + +[[package]] +name = "anstyle-parse" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" +dependencies = [ + "anstyle", + "windows-sys", +] + +[[package]] +name = "ark-bn254" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a22f4561524cd949590d78d7d4c5df8f592430d221f7f3c9497bbafd8972120f" +dependencies = [ + "ark-ec", + "ark-ff", + "ark-std", +] + +[[package]] +name = "ark-ec" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defd9a439d56ac24968cca0571f598a61bc8c55f71d50a89cda591cb750670ba" +dependencies = [ + "ark-ff", + "ark-poly", + "ark-serialize", + "ark-std", + "derivative", + "hashbrown 0.13.2", + "itertools 0.10.5", + "num-traits", + "zeroize", +] + +[[package]] +name = "ark-ff" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba" +dependencies = [ + "ark-ff-asm", + "ark-ff-macros", + "ark-serialize", + "ark-std", + "derivative", + "digest", + "itertools 0.10.5", + "num-bigint", + "num-traits", + "paste", + "rustc_version", + "zeroize", +] + +[[package]] +name = "ark-ff-asm" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" +dependencies = [ + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-ff-macros" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" +dependencies = [ + "num-bigint", + "num-traits", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-poly" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d320bfc44ee185d899ccbadfa8bc31aab923ce1558716e1997a1e74057fe86bf" +dependencies = [ + "ark-ff", + "ark-serialize", + "ark-std", + "derivative", + "hashbrown 0.13.2", +] + +[[package]] +name = "ark-serialize" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" +dependencies = [ + "ark-serialize-derive", + "ark-std", + "digest", + "num-bigint", +] + +[[package]] +name = "ark-serialize-derive" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae3281bc6d0fd7e549af32b52511e1302185bd688fd3359fa36423346ff682ea" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-std" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" +dependencies = [ + "num-traits", + "rand", +] + +[[package]] +name = "ascii-canvas" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6" +dependencies = [ + "term", +] + +[[package]] +name = "auto_enums" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1899bfcfd9340ceea3533ea157360ba8fa864354eccbceab58e1006ecab35393" +dependencies = [ + "derive_utils", + "proc-macro2", + "quote", + "syn 2.0.66", +] + +[[package]] +name = "autocfg" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bb-pil-backend" +version = "0.1.0" +dependencies = [ + "itertools 0.10.5", + "log", + "num-bigint", + "num-integer", + "num-traits", + "powdr-ast", + "powdr-number", + "rand", +] + +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + +[[package]] +name = "bitflags" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "cc" +version = "1.0.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96c51067fd44124faa7f870b4b1c969379ad32b2ba805aa959430ceaa384f695" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "num-traits", + "serde", + "windows-targets", +] + +[[package]] +name = "clap" +version = "4.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5db83dced34638ad474f39f250d7fea9598bdd239eaced1bdf45d597da0f433f" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7e204572485eb3fbf28f871612191521df159bc3e15a9f5064c66dba3a8c05f" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c780290ccf4fb26629baa7a1081e68ced113f1d3ec302fa5948f1c381ebf06c6" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.66", +] + +[[package]] +name = "clap_lex" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" + +[[package]] +name = "cli" +version = "0.1.0" +dependencies = [ + "bb-pil-backend", + "clap", + "itertools 0.10.5", + "log", + "num-bigint", + "num-integer", + "num-traits", + "powdr-ast", + "powdr-number", + "powdr-pil-analyzer", + "rand", +] + +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "termcolor", + "unicode-width", +] + +[[package]] +name = "colorchoice" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "csv" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac574ff4d437a7b5ad237ef331c17ccca63c46479e5b5453eb8e10bb99a759fe" +dependencies = [ + "csv-core", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "csv-core" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5efa2b3d7902f4b634a20cae3c9c4e6209dc4779feb6863329607560143efa70" +dependencies = [ + "memchr", +] + +[[package]] +name = "darling" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83b2eb4d90d12bdda5ed17de686c2acb4c57914f8f921b8da7e112b5a36f3fe1" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622687fe0bac72a04e5599029151f5796111b90f1baaa9b544d807a5e31cd120" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.66", +] + +[[package]] +name = "darling_macro" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "733cabb43482b1a1b53eee8583c2b9e8684d592215ea83efd305dd31bc2f0178" +dependencies = [ + "darling_core", + "quote", + "syn 2.0.66", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", + "serde", +] + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "derive_more" +version = "0.99.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version", + "syn 2.0.66", +] + +[[package]] +name = "derive_utils" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61bb5a1014ce6dfc2a378578509abe775a5aa06bff584a547555d9efdb81b926" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.66", +] + +[[package]] +name = "diff" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "crypto-common", +] + +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "dyn-clone" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" + +[[package]] +name = "either" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" + +[[package]] +name = "ena" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d248bdd43ce613d87415282f69b9bb99d947d290b10962dd6c56233312c2ad5" +dependencies = [ + "log", +] + +[[package]] +name = "env_filter" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a009aa4810eb158359dda09d0c87378e4bbb89b5a801f016885a4707ba24f7ea" +dependencies = [ + "log", +] + +[[package]] +name = "env_logger" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580" +dependencies = [ + "humantime", + "is-terminal", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "env_logger" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38b35839ba51819680ba087cd351788c9a3c476841207e0b8cee0b04722343b9" +dependencies = [ + "anstream", + "anstyle", + "env_filter", + "log", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "half" +version = "1.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b43ede17f21864e81be2fa654110bf1e793774238d86ef8555c37e6519c0403" + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" +dependencies = [ + "ahash", +] + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "iana-time-zone" +version = "0.1.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "ibig" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1fcc7f316b2c079dde77564a1360639c1a956a23fa96122732e416cb10717bb" +dependencies = [ + "cfg-if", + "num-traits", + "rand", + "serde", + "static_assertions", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "indexmap" +version = "2.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +dependencies = [ + "equivalent", + "hashbrown 0.14.5", + "serde", +] + +[[package]] +name = "is-terminal" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" +dependencies = [ + "hermit-abi", + "libc", + "windows-sys", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" + +[[package]] +name = "js-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "lalrpop" +version = "0.19.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a1cbf952127589f2851ab2046af368fd20645491bb4b376f04b7f94d7a9837b" +dependencies = [ + "ascii-canvas", + "bit-set", + "diff", + "ena", + "is-terminal", + "itertools 0.10.5", + "lalrpop-util", + "petgraph", + "regex", + "regex-syntax 0.6.29", + "string_cache", + "term", + "tiny-keccak", + "unicode-xid", +] + +[[package]] +name = "lalrpop-util" +version = "0.19.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3c48237b9604c5a4702de6b824e02006c3214327564636aef27c1028a8fa0ed" +dependencies = [ + "regex", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.155" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" + +[[package]] +name = "libredox" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags", + "libc", +] + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "new_debug_unreachable" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-bigint" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c165a9ab64cf766f73521c0dd2cfdff64f488b8f0b3e621face3462d3db536d7" +dependencies = [ + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets", +] + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "petgraph" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" +dependencies = [ + "fixedbitset", + "indexmap 2.2.6", +] + +[[package]] +name = "phf_shared" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" + +[[package]] +name = "powdr-ast" +version = "0.0.1" +dependencies = [ + "auto_enums", + "derive_more", + "itertools 0.11.0", + "num-traits", + "powdr-number", + "powdr-parser", + "powdr-parser-util", + "powdr-pil-analyzer", + "pretty_assertions", + "schemars", + "serde", + "serde_cbor", + "test-log", +] + +[[package]] +name = "powdr-number" +version = "0.0.1" +dependencies = [ + "ark-bn254", + "ark-ff", + "ark-serialize", + "csv", + "env_logger 0.10.2", + "ibig", + "num-bigint", + "num-traits", + "schemars", + "serde", + "serde_with", + "test-log", +] + +[[package]] +name = "powdr-parser" +version = "0.0.1" +dependencies = [ + "derive_more", + "env_logger 0.10.2", + "lalrpop", + "lalrpop-util", + "lazy_static", + "num-traits", + "powdr-ast", + "powdr-number", + "powdr-parser-util", + "pretty_assertions", + "similar", + "test-log", + "walkdir", +] + +[[package]] +name = "powdr-parser-util" +version = "0.0.1" +dependencies = [ + "codespan-reporting", + "env_logger 0.10.2", + "lalrpop-util", + "schemars", + "serde", + "test-log", +] + +[[package]] +name = "powdr-pil-analyzer" +version = "0.0.1" +dependencies = [ + "env_logger 0.10.2", + "itertools 0.10.5", + "lazy_static", + "num-traits", + "powdr-ast", + "powdr-number", + "powdr-parser", + "powdr-parser-util", + "pretty_assertions", + "test-log", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "precomputed-hash" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" + +[[package]] +name = "pretty_assertions" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" +dependencies = [ + "diff", + "yansi", +] + +[[package]] +name = "proc-macro2" +version = "1.0.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "redox_syscall" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c82cf8cff14456045f55ec4241383baeff27af886adb72ffb2162f99911de0fd" +dependencies = [ + "bitflags", +] + +[[package]] +name = "redox_users" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" +dependencies = [ + "getrandom", + "libredox", + "thiserror", +] + +[[package]] +name = "regex" +version = "1.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.4.7", + "regex-syntax 0.8.4", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.8.4", +] + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "rustversion" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" + +[[package]] +name = "ryu" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schemars" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09c024468a378b7e36765cd36702b7a90cc3cba11654f6685c8f233408e89e92" +dependencies = [ + "dyn-clone", + "indexmap 1.9.3", + "schemars_derive", + "serde", + "serde_json", +] + +[[package]] +name = "schemars_derive" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1eee588578aff73f856ab961cd2f79e36bc45d7ded33a7562adba4667aecc0e" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 2.0.66", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "semver" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" + +[[package]] +name = "serde" +version = "1.0.203" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_cbor" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" +dependencies = [ + "half", + "serde", +] + +[[package]] +name = "serde_derive" +version = "1.0.203" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.66", +] + +[[package]] +name = "serde_derive_internals" +version = "0.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.66", +] + +[[package]] +name = "serde_json" +version = "1.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "3.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ad483d2ab0149d5a5ebcd9972a3852711e0153d863bf5a5d0391d28883c4a20" +dependencies = [ + "base64", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.2.6", + "serde", + "serde_derive", + "serde_json", + "serde_with_macros", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "3.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65569b702f41443e8bc8bbb1c5779bd0450bbe723b56198980e80ec45780bce2" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 2.0.66", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "similar" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa42c91313f1d05da9b26f267f931cf178d4aba455b4c4622dd7355eb80c6640" + +[[package]] +name = "siphasher" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "string_cache" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" +dependencies = [ + "new_debug_unreachable", + "once_cell", + "parking_lot", + "phf_shared", + "precomputed-hash", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "term" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" +dependencies = [ + "dirs-next", + "rustversion", + "winapi", +] + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "test-log" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3dffced63c2b5c7be278154d76b479f9f9920ed34e7574201407f0b14e2bbb93" +dependencies = [ + "env_logger 0.11.3", + "test-log-macros", + "tracing-subscriber", +] + +[[package]] +name = "test-log-macros" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5999e24eaa32083191ba4e425deb75cdf25efefabe5aaccb7446dd0d4122a3f5" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.66", +] + +[[package]] +name = "thiserror" +version = "1.0.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.66", +] + +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "time" +version = "0.3.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "pin-project-lite", + "tracing-core", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "sharded-slab", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-width" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" + +[[package]] +name = "unicode-xid" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.66", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.66", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" + +[[package]] +name = "yansi" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" + +[[package]] +name = "zerocopy" +version = "0.7.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae87e3fcd617500e5d106f0380cf7b77f3c6092aae37191433159dda23cfb087" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.66", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.66", +] diff --git a/bb-pilcom/Cargo.toml b/bb-pilcom/Cargo.toml new file mode 100644 index 00000000000..5df0e982d9d --- /dev/null +++ b/bb-pilcom/Cargo.toml @@ -0,0 +1,28 @@ +[workspace] +resolver = "2" +members = [ + "cli", + "bb-pil-backend", + "powdr/ast", + "powdr/number", + "powdr/parser", + "powdr/parser-util", + "powdr/pil-analyzer" +] + +[workspace.package] +version = "0.0.1" +edition = "2021" +license = "MIT" +homepage = "https://powdr.org" +repository = "https://github.com/powdr-labs/powdr" + + +[workspace.dependencies] +bb-pil-backend = { path = "./bb-pil-backend" } +cli = { path = "./cli" } +powdr-ast = { path = "./powdr/ast" } +powdr-number = { path = "./powdr/number" } +powdr-parser = { path = "./powdr/parser" } +powdr-parser-util = { path = "./powdr/parser-util" } +powdr-pil-analyzer = { path = "./powdr/pil-analyzer" } diff --git a/bb-pilcom/bb-pil-backend/Cargo.toml b/bb-pilcom/bb-pil-backend/Cargo.toml new file mode 100644 index 00000000000..0904cd7f445 --- /dev/null +++ b/bb-pilcom/bb-pil-backend/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "bb-pil-backend" +version = "0.1.0" +authors = ["Aztec Labs"] +edition = "2021" + +[dependencies] +num-bigint = "0.4.3" + +powdr-number = { path = "../powdr/number" } +num-traits = "0.2.15" +num-integer = "0.1.45" +itertools = "^0.10" +log = "0.4.17" +rand = "0.8.5" +powdr-ast = { path = "../powdr/ast" } diff --git a/bb-pilcom/bb-pil-backend/src/circuit_builder.rs b/bb-pilcom/bb-pil-backend/src/circuit_builder.rs new file mode 100644 index 00000000000..1f93f0367e8 --- /dev/null +++ b/bb-pilcom/bb-pil-backend/src/circuit_builder.rs @@ -0,0 +1,374 @@ +use crate::{ + file_writer::BBFiles, + relation_builder::create_row_type, + utils::{get_relations_imports, map_with_newline, snake_case}, +}; + +pub trait CircuitBuilder { + fn create_circuit_builder_hpp( + &mut self, + name: &str, + relations: &[String], + permutations: &[String], + all_cols_without_inverses: &[String], + all_cols: &[String], + to_be_shifted: &[String], + all_cols_with_shifts: &[String], + ); + + fn create_circuit_builder_cpp(&mut self, name: &str, all_cols: &[String]); +} + +fn circuit_hpp_includes(name: &str, relations: &[String], permutations: &[String]) -> String { + let relation_imports = get_relations_imports(name, relations, permutations); + format!( + " + // AUTOGENERATED FILE + #pragma once + + #include +#ifndef __wasm__ + #include +#endif + + #include \"barretenberg/common/constexpr_utils.hpp\" + #include \"barretenberg/common/throw_or_abort.hpp\" + #include \"barretenberg/ecc/curves/bn254/fr.hpp\" + #include \"barretenberg/stdlib_circuit_builders/circuit_builder_base.hpp\" + #include \"barretenberg/relations/generic_permutation/generic_permutation_relation.hpp\" + #include \"barretenberg/relations/generic_lookup/generic_lookup_relation.hpp\" + #include \"barretenberg/honk/proof_system/logderivative_library.hpp\" + + #include \"barretenberg/vm/generated/{name}_flavor.hpp\" + {relation_imports} +" + ) +} + +fn get_params() -> &'static str { + r#" + const FF gamma = FF::random_element(); + const FF beta = FF::random_element(); + bb::RelationParameters params{ + .eta = 0, + .beta = beta, + .gamma = gamma, + .public_input_delta = 0, + .lookup_grand_product_delta = 0, + .beta_sqr = 0, + .beta_cube = 0, + .eccvm_set_permutation_delta = 0, + }; + "# +} + +impl CircuitBuilder for BBFiles { + // Create circuit builder + // Generate some code that can read a commits.bin and constants.bin into data structures that bberg understands + fn create_circuit_builder_hpp( + &mut self, + name: &str, + relations: &[String], + permutations: &[String], + all_cols_without_inverses: &[String], + all_cols: &[String], + to_be_shifted: &[String], + all_cols_with_shifts: &[String], + ) { + let includes = circuit_hpp_includes(&snake_case(name), relations, permutations); + + let row_with_all_included = create_row_type(&format!("{name}Full"), all_cols_with_shifts); + + let num_polys = all_cols.len(); + let num_cols = all_cols.len() + to_be_shifted.len(); + + // Declare mapping transformations + let compute_polys_transformation = + |name: &String| format!("polys.{name}[i] = rows[i].{name};"); + let all_polys_transformation = + |name: &String| format!("polys.{name}_shift = Polynomial(polys.{name}.shifted());"); + let check_circuit_transformation = |relation_name: &String| { + format!( + "auto {relation_name} = [=]() {{ + return evaluate_relation.template operator()<{name}_vm::{relation_name}>(\"{relation_name}\", {name}_vm::get_relation_label_{relation_name}); + }}; + ", + name = name, + relation_name = relation_name + ) + }; + let check_lookup_transformation = |lookup_name: &String| { + let lookup_name_upper = lookup_name.to_uppercase(); + format!( + "auto {lookup_name} = [=]() {{ + return evaluate_logderivative.template operator()<{lookup_name}_relation>(\"{lookup_name_upper}\"); + }}; + " + ) + }; + + // When we are running natively, we want check circuit to run as futures; however, futures are not supported in wasm, so we must provide an + // alternative codepath that will execute the closures in serial + let emplace_future_transformation = |relation_name: &String| { + format!( + " + relation_futures.emplace_back(std::async(std::launch::async, {relation_name})); + " + ) + }; + + let execute_serial_transformation = |relation_name: &String| { + format!( + " + {relation_name}(); + " + ) + }; + + // Apply transformations + let compute_polys_assignemnt = + map_with_newline(all_cols_without_inverses, compute_polys_transformation); + let all_poly_shifts = map_with_newline(to_be_shifted, all_polys_transformation); + let check_circuit_for_each_relation = + map_with_newline(relations, check_circuit_transformation); + let check_circuit_for_each_lookup = + map_with_newline(permutations, check_lookup_transformation); + + // With futures + let emplace_future_relations = map_with_newline(relations, emplace_future_transformation); + let emplace_future_lookups = map_with_newline(permutations, emplace_future_transformation); + + // With threads + let serial_relations = map_with_newline(relations, execute_serial_transformation); + let serial_lookups = map_with_newline(permutations, execute_serial_transformation); + + let (params, lookup_check_closure) = if !permutations.is_empty() { + (get_params(), get_lookup_check_closure()) + } else { + ("", "".to_owned()) + }; + let relation_check_closure = if !relations.is_empty() { + get_relation_check_closure() + } else { + "".to_owned() + }; + + let circuit_hpp = format!(" +{includes} + +namespace bb {{ + +{row_with_all_included}; + +template std::ostream& operator<<(std::ostream& os, {name}FullRow const& row); + +class {name}CircuitBuilder {{ + public: + using Flavor = bb::{name}Flavor; + using FF = Flavor::FF; + using Row = {name}FullRow; + + // TODO: template + using Polynomial = Flavor::Polynomial; + using ProverPolynomials = Flavor::ProverPolynomials; + + static constexpr size_t num_fixed_columns = {num_cols}; + static constexpr size_t num_polys = {num_polys}; + std::vector rows; + + void set_trace(std::vector&& trace) {{ rows = std::move(trace); }} + + ProverPolynomials compute_polynomials() {{ + const auto num_rows = get_circuit_subgroup_size(); + ProverPolynomials polys; + + // Allocate mem for each column + for (auto& poly : polys.get_all()) {{ + poly = Polynomial(num_rows); + }} + + for (size_t i = 0; i < rows.size(); i++) {{ + {compute_polys_assignemnt} + }} + + {all_poly_shifts } + + return polys; + }} + + [[maybe_unused]] bool check_circuit() + {{ + {params} + + auto polys = compute_polynomials(); + const size_t num_rows = polys.get_polynomial_size(); + + {relation_check_closure} + + {lookup_check_closure} + + {check_circuit_for_each_relation} + + {check_circuit_for_each_lookup} + +#ifndef __wasm__ + + // Evaluate check circuit closures as futures + std::vector> relation_futures; + + {emplace_future_relations} + {emplace_future_lookups} + + + // Wait for lookup evaluations to complete + for (auto& future : relation_futures) {{ + int result = future.get(); + if (!result) {{ + return false; + }} + }} +#else + {serial_relations} + {serial_lookups} + +#endif + + return true; + }} + + + [[nodiscard]] size_t get_num_gates() const {{ return rows.size(); }} + + [[nodiscard]] size_t get_circuit_subgroup_size() const + {{ + const size_t num_rows = get_num_gates(); + const auto num_rows_log2 = static_cast(numeric::get_msb64(num_rows)); + size_t num_rows_pow2 = 1UL << (num_rows_log2 + (1UL << num_rows_log2 == num_rows ? 0 : 1)); + return num_rows_pow2; + }} + + +}}; +}} + "); + + self.write_file( + &self.circuit, + &format!("{}_circuit_builder.hpp", snake_case(name)), + &circuit_hpp, + ); + } + + fn create_circuit_builder_cpp(&mut self, name: &str, all_cols: &[String]) { + let names_list = map_with_newline(all_cols, |name: &String| format!("\"{}\",", name)); + let stream_all_relations = map_with_newline(all_cols, |name: &String| { + format!("<< field_to_string(row.{}) << \",\"", name) + }); + let snake_name = snake_case(name); + + let circuit_cpp = format!( + " +#include \"barretenberg/vm/generated/{snake_name}_circuit_builder.hpp\" + +namespace bb {{ +namespace {{ + +template std::string field_to_string(const FF& ff) +{{ + std::ostringstream os; + os << ff; + std::string raw = os.str(); + auto first_not_zero = raw.find_first_not_of('0', 2); + std::string result = \"0x\" + (first_not_zero != std::string::npos ? raw.substr(first_not_zero) : \"0\"); + return result; +}} + +}} // namespace + +template std::vector {name}FullRow::names() {{ + return {{ + {names_list} + \"\" + }}; +}} + +template std::ostream& operator<<(std::ostream& os, {name}FullRow const& row) {{ + return os {stream_all_relations} + \"\"; +}} + +// Explicit template instantiation. +template std::ostream& operator<<(std::ostream& os, AvmFullRow const& row); +template std::vector AvmFullRow::names(); + +}} // namespace bb" + ); + + self.write_file( + &self.circuit, + &format!("{}_circuit_builder.cpp", snake_case(name)), + &circuit_cpp, + ); + } +} + +fn get_lookup_check_closure() -> String { + " + const auto evaluate_logderivative = [&](const std::string& lookup_name) { + + // Check the logderivative relation + bb::compute_logderivative_inverse< + Flavor, + LogDerivativeSettings>( + polys, params, num_rows); + + typename LogDerivativeSettings::SumcheckArrayOfValuesOverSubrelations + lookup_result; + + for (auto& r : lookup_result) { + r = 0; + } + for (size_t i = 0; i < num_rows; ++i) { + LogDerivativeSettings::accumulate(lookup_result, polys.get_row(i), params, 1); + } + for (auto r : lookup_result) { + if (r != 0) { + throw_or_abort(format(\"Lookup \", lookup_name, \" failed.\")); + return false; + } + } + return true; + }; + ".to_string() +} + +fn get_relation_check_closure() -> String { + " + const auto evaluate_relation = [&](const std::string& relation_name, + std::string (*debug_label)(int)) { + typename Relation::SumcheckArrayOfValuesOverSubrelations result; + for (auto& r : result) { + r = 0; + } + constexpr size_t NUM_SUBRELATIONS = result.size(); + + for (size_t i = 0; i < num_rows; ++i) { + Relation::accumulate(result, polys.get_row(i), {}, 1); + + bool x = true; + for (size_t j = 0; j < NUM_SUBRELATIONS; ++j) { + if (result[j] != 0) { + std::string row_name = debug_label(static_cast(j)); + throw_or_abort( + format(\"Relation \", relation_name, \", subrelation index \", row_name, \" failed at row \", i)); + x = false; + } + } + if (!x) { + return false; + } + } + return true; + }; + ".to_string() +} diff --git a/bb-pilcom/bb-pil-backend/src/composer_builder.rs b/bb-pilcom/bb-pil-backend/src/composer_builder.rs new file mode 100644 index 00000000000..2cbd8576d1a --- /dev/null +++ b/bb-pilcom/bb-pil-backend/src/composer_builder.rs @@ -0,0 +1,210 @@ +use crate::file_writer::BBFiles; +use crate::utils::snake_case; + +pub trait ComposerBuilder { + fn create_composer_cpp(&mut self, name: &str); + fn create_composer_hpp(&mut self, name: &str); +} + +impl ComposerBuilder for BBFiles { + fn create_composer_cpp(&mut self, name: &str) { + // Create a composer file, this is used to a prover and verifier for our flavour + let include_str = cpp_includes(&snake_case(name)); + + let composer_cpp = format!( + " +{include_str} + +namespace bb {{ + +using Flavor = {name}Flavor; +void {name}Composer::compute_witness(CircuitConstructor& circuit) +{{ + if (computed_witness) {{ + return; + }} + + auto polynomials = circuit.compute_polynomials(); + + for (auto [key_poly, prover_poly] : zip_view(proving_key->get_all(), polynomials.get_unshifted())) {{ + ASSERT(flavor_get_label(*proving_key, key_poly) == flavor_get_label(polynomials, prover_poly)); + key_poly = prover_poly; + }} + + computed_witness = true; +}} + +{name}Prover {name}Composer::create_prover(CircuitConstructor& circuit_constructor) +{{ + compute_proving_key(circuit_constructor); + compute_witness(circuit_constructor); + compute_commitment_key(circuit_constructor.get_circuit_subgroup_size()); + + {name}Prover output_state(proving_key, proving_key->commitment_key); + + return output_state; +}} + +{name}Verifier {name}Composer::create_verifier( + CircuitConstructor& circuit_constructor) +{{ + auto verification_key = compute_verification_key(circuit_constructor); + + {name}Verifier output_state(verification_key); + + auto pcs_verification_key = std::make_unique(); + + output_state.pcs_verification_key = std::move(pcs_verification_key); + + return output_state; +}} + +std::shared_ptr {name}Composer::compute_proving_key( + CircuitConstructor& circuit_constructor) +{{ + if (proving_key) {{ + return proving_key; + }} + + // Initialize proving_key + {{ + const size_t subgroup_size = circuit_constructor.get_circuit_subgroup_size(); + proving_key = std::make_shared(subgroup_size, 0); + }} + + proving_key->contains_recursive_proof = false; + + return proving_key; +}} + +std::shared_ptr {name}Composer::compute_verification_key( + CircuitConstructor& circuit_constructor) +{{ + if (verification_key) {{ + return verification_key; + }} + + if (!proving_key) {{ + compute_proving_key(circuit_constructor); + }} + + verification_key = + std::make_shared(proving_key->circuit_size, proving_key->num_public_inputs); + + return verification_key; +}} + +}} +"); + self.write_file( + &self.composer, + &format!("{}_composer.cpp", snake_case(name)), + &composer_cpp, + ); + } + + fn create_composer_hpp(&mut self, name: &str) { + let include_str = hpp_includes(&snake_case(name)); + + let composer_hpp = format!( + " +{include_str} + +namespace bb {{ +class {name}Composer {{ + public: + using Flavor = {name}Flavor; + using CircuitConstructor = {name}CircuitBuilder; + using ProvingKey = Flavor::ProvingKey; + using VerificationKey = Flavor::VerificationKey; + using PCS = Flavor::PCS; + using CommitmentKey = Flavor::CommitmentKey; + using VerifierCommitmentKey = Flavor::VerifierCommitmentKey; + + // TODO: which of these will we really need + static constexpr std::string_view NAME_STRING = \"{name}\"; + static constexpr size_t NUM_RESERVED_GATES = 0; + static constexpr size_t NUM_WIRES = Flavor::NUM_WIRES; + + std::shared_ptr proving_key; + std::shared_ptr verification_key; + + // The crs_factory holds the path to the srs and exposes methods to extract the srs elements + std::shared_ptr> crs_factory_; + + // The commitment key is passed to the prover but also used herein to compute the verfication key commitments + std::shared_ptr commitment_key; + + std::vector recursive_proof_public_input_indices; + bool contains_recursive_proof = false; + bool computed_witness = false; + + {name}Composer() + {{ + crs_factory_ = bb::srs::get_bn254_crs_factory(); + }} + + {name}Composer(std::shared_ptr p_key, std::shared_ptr v_key) + : proving_key(std::move(p_key)) + , verification_key(std::move(v_key)) + {{}} + + {name}Composer({name}Composer&& other) noexcept = default; + {name}Composer({name}Composer const& other) noexcept = default; + {name}Composer& operator=({name}Composer&& other) noexcept = default; + {name}Composer& operator=({name}Composer const& other) noexcept = default; + ~{name}Composer() = default; + + std::shared_ptr compute_proving_key(CircuitConstructor& circuit_constructor); + std::shared_ptr compute_verification_key(CircuitConstructor& circuit_constructor); + + void compute_witness(CircuitConstructor& circuit_constructor); + + {name}Prover create_prover(CircuitConstructor& circuit_constructor); + {name}Verifier create_verifier(CircuitConstructor& circuit_constructor); + + void add_table_column_selector_poly_to_proving_key(bb::polynomial& small, const std::string& tag); + + void compute_commitment_key(size_t circuit_size) + {{ + proving_key->commitment_key = std::make_shared(circuit_size); + }}; +}}; + +}} // namespace bb +" + ); + + self.write_file( + &self.composer, + &format!("{}_composer.hpp", snake_case(name)), + &composer_hpp, + ); + } +} + +fn cpp_includes(name: &str) -> String { + format!( + " +#include \"./{name}_composer.hpp\" +#include \"barretenberg/plonk_honk_shared/composer/composer_lib.hpp\" +#include \"barretenberg/plonk_honk_shared/composer/permutation_lib.hpp\" +#include \"barretenberg/vm/generated/{name}_circuit_builder.hpp\" +#include \"barretenberg/vm/generated/{name}_verifier.hpp\" +" + ) +} + +pub fn hpp_includes(name: &str) -> String { + format!( + " +#pragma once + +#include \"barretenberg/plonk_honk_shared/composer/composer_lib.hpp\" +#include \"barretenberg/srs/global_crs.hpp\" +#include \"barretenberg/vm/generated/{name}_circuit_builder.hpp\" +#include \"barretenberg/vm/generated/{name}_prover.hpp\" +#include \"barretenberg/vm/generated/{name}_verifier.hpp\" + " + ) +} diff --git a/bb-pilcom/bb-pil-backend/src/file_writer.rs b/bb-pilcom/bb-pil-backend/src/file_writer.rs new file mode 100644 index 00000000000..a4ed8e1e55e --- /dev/null +++ b/bb-pilcom/bb-pil-backend/src/file_writer.rs @@ -0,0 +1,58 @@ +use std::fs::File; +use std::io::Write; + +pub struct BBFiles { + // Relative paths + pub file_name: String, + pub base: String, + pub rel: String, + pub circuit: String, + pub flavor: String, + pub composer: String, + pub prover: String, // path for both prover and verifier files +} + +impl BBFiles { + pub fn default(file_name: String) -> Self { + Self::new(file_name, None, None, None, None, None, None) + } + + #[allow(clippy::too_many_arguments)] + pub fn new( + file_name: String, + base: Option, + rel: Option, + circuit: Option, + flavor: Option, + composer: Option, + prover: Option, + ) -> Self { + let base = base.unwrap_or("src/barretenberg".to_owned()); + let rel = rel.unwrap_or("relations/generated".to_owned()); + let circuit = circuit.unwrap_or("vm/generated".to_owned()); + let flavor = flavor.unwrap_or("vm/generated".to_owned()); + let composer = composer.unwrap_or("vm/generated".to_owned()); + let prover = prover.unwrap_or("vm/generated".to_owned()); + + Self { + file_name, + + base, + rel, + circuit, + flavor, + composer, + prover, + } + } + + pub fn write_file(&self, folder: &str, filename: &str, contents: &String) { + // attempt to create dir + let base_path = format!("{}/{}", self.base, folder); + let _ = std::fs::create_dir_all(&base_path); + + let joined = format!("{}/{}", base_path, filename); + let mut file = File::create(joined).unwrap(); + file.write_all(contents.as_bytes()).unwrap(); + } +} diff --git a/bb-pilcom/bb-pil-backend/src/flavor_builder.rs b/bb-pilcom/bb-pil-backend/src/flavor_builder.rs new file mode 100644 index 00000000000..dab4058bdfe --- /dev/null +++ b/bb-pilcom/bb-pil-backend/src/flavor_builder.rs @@ -0,0 +1,629 @@ +use crate::{ + file_writer::BBFiles, + utils::{get_relations_imports, map_with_newline, snake_case}, +}; + +pub trait FlavorBuilder { + #[allow(clippy::too_many_arguments)] + fn create_flavor_hpp( + &mut self, + name: &str, + relation_file_names: &[String], + lookups: &[String], + fixed: &[String], + witness: &[String], + all_cols: &[String], + to_be_shifted: &[String], + shifted: &[String], + all_cols_and_shifts: &[String], + ); +} + +/// Build the boilerplate for the flavor file +impl FlavorBuilder for BBFiles { + fn create_flavor_hpp( + &mut self, + name: &str, + relation_file_names: &[String], + lookups: &[String], + fixed: &[String], + witness: &[String], + all_cols: &[String], + to_be_shifted: &[String], + shifted: &[String], + all_cols_and_shifts: &[String], + ) { + let first_poly = &witness[0]; + let includes = flavor_includes(&snake_case(name), relation_file_names, lookups); + let num_precomputed = fixed.len(); + let num_witness = witness.len(); + let num_all = all_cols_and_shifts.len(); + + // Top of file boilerplate + let class_aliases = create_class_aliases(); + let relation_definitions = create_relation_definitions(name, relation_file_names, lookups); + let container_size_definitions = + container_size_definitions(num_precomputed, num_witness, num_all); + + // Entities classes + let precomputed_entities = create_precomputed_entities(fixed); + let witness_entities = create_witness_entities(witness); + let all_entities = + create_all_entities(all_cols, to_be_shifted, shifted, all_cols_and_shifts); + + let proving_and_verification_key = + create_proving_and_verification_key(name, lookups, to_be_shifted); + let polynomial_views = create_polynomial_views(first_poly); + + let commitment_labels_class = create_commitment_labels(all_cols); + + let verification_commitments = create_verifier_commitments(fixed); + + let transcript = generate_transcript(witness); + + let flavor_hpp = format!( + " +{includes} + +namespace bb {{ + +class {name}Flavor {{ + public: + {class_aliases} + + {container_size_definitions} + + {relation_definitions} + + static constexpr bool has_zero_row = true; + + private: + {precomputed_entities} + + {witness_entities} + + {all_entities} + + + {proving_and_verification_key} + + + {polynomial_views} + + {commitment_labels_class} + + {verification_commitments} + + {transcript} +}}; + +}} // namespace bb + + + " + ); + + self.write_file( + &self.flavor, + &format!("{}_flavor.hpp", snake_case(name)), + &flavor_hpp, + ); + } +} + +/// Imports located at the top of the flavor files +fn flavor_includes(name: &str, relation_file_names: &[String], lookups: &[String]) -> String { + let relation_imports = get_relations_imports(name, relation_file_names, lookups); + + format!( + "#pragma once + +#include \"barretenberg/commitment_schemes/kzg/kzg.hpp\" +#include \"barretenberg/ecc/curves/bn254/g1.hpp\" +#include \"barretenberg/flavor/relation_definitions.hpp\" +#include \"barretenberg/polynomials/barycentric.hpp\" +#include \"barretenberg/polynomials/univariate.hpp\" + +#include \"barretenberg/relations/generic_permutation/generic_permutation_relation.hpp\" + +#include \"barretenberg/flavor/flavor_macros.hpp\" +#include \"barretenberg/transcript/transcript.hpp\" +#include \"barretenberg/polynomials/evaluation_domain.hpp\" +#include \"barretenberg/polynomials/polynomial.hpp\" +#include \"barretenberg/flavor/flavor.hpp\" +{relation_imports} +" + ) +} + +/// Creates comma separated relations tuple file +fn create_relations_tuple(master_name: &str, relation_file_names: &[String]) -> String { + relation_file_names + .iter() + .map(|name| format!("{master_name}_vm::{name}")) + .collect::>() + .join(", ") +} + +/// Creates comma separated relations tuple file +fn create_lookups_tuple(lookups: &[String]) -> Option { + if lookups.is_empty() { + return None; + } + Some( + lookups + .iter() + .map(|lookup| format!("{}_relation", lookup.clone())) + .collect::>() + .join(", "), + ) +} + +/// Create Class Aliases +/// +/// Contains boilerplate defining key characteristics of the flavor class +fn create_class_aliases() -> &'static str { + r#" + using Curve = curve::BN254; + using G1 = Curve::Group; + using PCS = KZG; + + using FF = G1::subgroup_field; + using Polynomial = bb::Polynomial; + using PolynomialHandle = std::span; + using GroupElement = G1::element; + using Commitment = G1::affine_element; + using CommitmentHandle = G1::affine_element; + using CommitmentKey = bb::CommitmentKey; + using VerifierCommitmentKey = bb::VerifierCommitmentKey; + using RelationSeparator = FF; + "# +} + +/// Create relation definitions +/// +/// Contains all of the boilerplate code required to generate relation definitions. +/// We instantiate the Relations container, which contains a tuple of all of the separate relation file +/// definitions. +/// +/// We then also define some constants, making use of the preprocessor. +fn create_relation_definitions( + name: &str, + relation_file_names: &[String], + lookups: &[String], +) -> String { + // Relations tuple = ns::relation_name_0, ns::relation_name_1, ... ns::relation_name_n (comma speratated) + let comma_sep_relations = create_relations_tuple(name, relation_file_names); + let comma_sep_lookups: Option = create_lookups_tuple(lookups); + + // We only include the grand product relations if we are given lookups + let mut grand_product_relations = String::new(); + let mut all_relations = comma_sep_relations.to_string(); + if let Some(lookups) = comma_sep_lookups { + all_relations = all_relations + &format!(", {lookups}"); + grand_product_relations = format!("using GrandProductRelations = std::tuple<{lookups}>;"); + } + + format!(" + {grand_product_relations} + + using Relations = std::tuple<{all_relations}>; + + static constexpr size_t MAX_PARTIAL_RELATION_LENGTH = compute_max_partial_relation_length(); + + // BATCHED_RELATION_PARTIAL_LENGTH = algebraic degree of sumcheck relation *after* multiplying by the `pow_zeta` + // random polynomial e.g. For \\sum(x) [A(x) * B(x) + C(x)] * PowZeta(X), relation length = 2 and random relation + // length = 3 + static constexpr size_t BATCHED_RELATION_PARTIAL_LENGTH = MAX_PARTIAL_RELATION_LENGTH + 1; + static constexpr size_t NUM_RELATIONS = std::tuple_size_v; + + template + using ProtogalaxyTupleOfTuplesOfUnivariates = + decltype(create_protogalaxy_tuple_of_tuples_of_univariates()); + using SumcheckTupleOfTuplesOfUnivariates = decltype(create_sumcheck_tuple_of_tuples_of_univariates()); + using TupleOfArraysOfValues = decltype(create_tuple_of_arrays_of_values()); + ") +} + +/// Create the number of columns boilerplate for the flavor file +fn container_size_definitions( + num_precomputed: usize, + num_witness: usize, + num_all: usize, +) -> String { + format!(" + static constexpr size_t NUM_PRECOMPUTED_ENTITIES = {num_precomputed}; + static constexpr size_t NUM_WITNESS_ENTITIES = {num_witness}; + static constexpr size_t NUM_WIRES = NUM_WITNESS_ENTITIES + NUM_PRECOMPUTED_ENTITIES; + // We have two copies of the witness entities, so we subtract the number of fixed ones (they have no shift), one for the unshifted and one for the shifted + static constexpr size_t NUM_ALL_ENTITIES = {num_all}; + + ") +} + +/// Returns a Ref Vector with the given name, +/// +/// The vector returned will reference the columns names given +/// Used in all entities declarations +fn return_ref_vector(name: &str, columns: &[String]) -> String { + let comma_sep = create_comma_separated(columns); + + format!("RefVector {name}() {{ return {{ {comma_sep} }}; }};") +} + +/// list -> "list[0], list[1], ... list[n-1]" +fn create_comma_separated(list: &[String]) -> String { + list.join(", ") +} + +/// Create Precomputed Entities +/// +/// Precomputed first contains a pointer view defining all of the precomputed columns +/// As-well as any polys conforming to tables / ids / permutations +fn create_precomputed_entities(fixed: &[String]) -> String { + let pointer_view = create_flavor_members(fixed); + + let selectors = return_ref_vector("get_selectors", fixed); + let sigma_polys = return_ref_vector("get_sigma_polynomials", &[]); + let id_polys = return_ref_vector("get_id_polynomials", &[]); + let table_polys = return_ref_vector("get_table_polynomials", &[]); + + format!( + " + template + class PrecomputedEntities : public PrecomputedEntitiesBase {{ + public: + using DataType = DataType_; + + {pointer_view} + + {selectors} + {sigma_polys} + {id_polys} + {table_polys} + }}; + " + ) +} + +fn create_witness_entities(witness: &[String]) -> String { + let pointer_view = create_flavor_members(witness); + + let wires = return_ref_vector("get_wires", witness); + + format!( + " + template + class WitnessEntities {{ + public: + + {pointer_view} + + {wires} + }}; + " + ) +} + +/// Creates container of all witness entities and shifts +fn create_all_entities( + all_cols: &[String], + to_be_shifted: &[String], + shifted: &[String], + all_cols_and_shifts: &[String], +) -> String { + let all_entities_flavor_members = create_flavor_members(all_cols_and_shifts); + + let wires = return_ref_vector("get_wires", all_cols_and_shifts); + let get_unshifted = return_ref_vector("get_unshifted", all_cols); + let get_to_be_shifted = return_ref_vector("get_to_be_shifted", to_be_shifted); + let get_shifted = return_ref_vector("get_shifted", shifted); + + format!( + " + template + class AllEntities {{ + public: + + {all_entities_flavor_members} + + + {wires} + {get_unshifted} + {get_to_be_shifted} + {get_shifted} + }}; + " + ) +} + +fn create_proving_and_verification_key( + flavor_name: &str, + lookups: &[String], + to_be_shifted: &[String], +) -> String { + let get_to_be_shifted = return_ref_vector("get_to_be_shifted", to_be_shifted); + let compute_logderivative_inverses = + create_compute_logderivative_inverses(flavor_name, lookups); + + format!(" + public: + class ProvingKey : public ProvingKeyAvm_, WitnessEntities, CommitmentKey> {{ + public: + // Expose constructors on the base class + using Base = ProvingKeyAvm_, WitnessEntities, CommitmentKey>; + using Base::Base; + + {get_to_be_shifted} + + {compute_logderivative_inverses} + }}; + + using VerificationKey = VerificationKey_, VerifierCommitmentKey>; + ") +} + +fn create_polynomial_views(first_poly: &String) -> String { + format!(" + + class AllValues : public AllEntities {{ + public: + using Base = AllEntities; + using Base::Base; + }}; + + /** + * @brief A container for the prover polynomials handles. + */ + class ProverPolynomials : public AllEntities {{ + public: + // Define all operations as default, except copy construction/assignment + ProverPolynomials() = default; + ProverPolynomials& operator=(const ProverPolynomials&) = delete; + ProverPolynomials(const ProverPolynomials& o) = delete; + ProverPolynomials(ProverPolynomials&& o) noexcept = default; + ProverPolynomials& operator=(ProverPolynomials&& o) noexcept = default; + ~ProverPolynomials() = default; + + ProverPolynomials(ProvingKey& proving_key) + {{ + for (auto [prover_poly, key_poly] : zip_view(this->get_unshifted(), proving_key.get_all())) {{ + ASSERT(flavor_get_label(*this, prover_poly) == flavor_get_label(proving_key, key_poly)); + prover_poly = key_poly.share(); + }} + for (auto [prover_poly, key_poly] : zip_view(this->get_shifted(), proving_key.get_to_be_shifted())) {{ + ASSERT(flavor_get_label(*this, prover_poly) == (flavor_get_label(proving_key, key_poly) + \"_shift\")); + prover_poly = key_poly.shifted(); + }} + }} + + [[nodiscard]] size_t get_polynomial_size() const {{ return {first_poly}.size(); }} + /** + * @brief Returns the evaluations of all prover polynomials at one point on the boolean hypercube, which + * represents one row in the execution trace. + */ + [[nodiscard]] AllValues get_row(size_t row_idx) const + {{ + AllValues result; + for (auto [result_field, polynomial] : zip_view(result.get_all(), this->get_all())) {{ + result_field = polynomial[row_idx]; + }} + return result; + }} + }}; + + class PartiallyEvaluatedMultivariates : public AllEntities {{ + public: + PartiallyEvaluatedMultivariates() = default; + PartiallyEvaluatedMultivariates(const size_t circuit_size) + {{ + // Storage is only needed after the first partial evaluation, hence polynomials of size (n / 2) + for (auto& poly : get_all()) {{ + poly = Polynomial(circuit_size / 2); + }} + }} + }}; + + /** + * @brief A container for univariates used during Protogalaxy folding and sumcheck. + * @details During folding and sumcheck, the prover evaluates the relations on these univariates. + */ + template + using ProverUnivariates = AllEntities>; + + /** + * @brief A container for univariates used during Protogalaxy folding and sumcheck with some of the computation + * optimistically ignored + * @details During folding and sumcheck, the prover evaluates the relations on these univariates. + */ + template + using OptimisedProverUnivariates = AllEntities>; + + /** + * @brief A container for univariates produced during the hot loop in sumcheck. + */ + using ExtendedEdges = ProverUnivariates; + + /** + * @brief A container for the witness commitments. + * + */ + using WitnessCommitments = WitnessEntities; + + ") +} + +fn create_flavor_members(entities: &[String]) -> String { + let pointer_list = create_comma_separated(entities); + + format!( + "DEFINE_FLAVOR_MEMBERS(DataType, {pointer_list})", + pointer_list = pointer_list + ) +} + +fn create_labels(all_ents: &[String]) -> String { + let mut labels = String::new(); + for name in all_ents { + labels.push_str(&format!( + "Base::{name} = \"{}\"; + ", + name.to_uppercase() + )); + } + labels +} + +fn create_commitment_labels(all_ents: &[String]) -> String { + let labels = create_labels(all_ents); + + format!( + " + class CommitmentLabels: public AllEntities {{ + private: + using Base = AllEntities; + + public: + CommitmentLabels() : AllEntities() + {{ + {labels} + }}; + }}; + " + ) +} + +/// Create the compute_logderivative_inverses function +/// +/// If we do not have any lookups, we do not need to include this round +fn create_compute_logderivative_inverses(flavor_name: &str, lookups: &[String]) -> String { + if lookups.is_empty() { + return "".to_string(); + } + + let compute_inverse_transformation = |lookup_name: &String| { + format!("bb::compute_logderivative_inverse<{flavor_name}Flavor, {lookup_name}_relation>(prover_polynomials, relation_parameters, this->circuit_size);") + }; + + let compute_inverses = map_with_newline(lookups, compute_inverse_transformation); + + format!( + " + void compute_logderivative_inverses(const RelationParameters& relation_parameters) + {{ + ProverPolynomials prover_polynomials = ProverPolynomials(*this); + + {compute_inverses} + }} + " + ) +} + +fn create_key_dereference(fixed: &[String]) -> String { + let deref_transformation = |name: &String| format!("{name} = verification_key->{name};"); + + map_with_newline(fixed, deref_transformation) +} + +fn create_verifier_commitments(fixed: &[String]) -> String { + let key_dereference = create_key_dereference(fixed); + + format!( + " + class VerifierCommitments : public AllEntities {{ + private: + using Base = AllEntities; + + public: + VerifierCommitments(const std::shared_ptr& verification_key) + {{ + {key_dereference} + }} + }}; +" + ) +} + +fn generate_transcript(witness: &[String]) -> String { + // Transformations + let declaration_transform = |c: &_| format!("Commitment {c};"); + let deserialize_transform = |name: &_| { + format!( + "{name} = deserialize_from_buffer(Transcript::proof_data, num_frs_read);", + ) + }; + let serialize_transform = + |name: &_| format!("serialize_to_buffer({name}, Transcript::proof_data);"); + + // Perform Transformations + let declarations = map_with_newline(witness, declaration_transform); + let deserialize_wires = map_with_newline(witness, deserialize_transform); + let serialize_wires = map_with_newline(witness, serialize_transform); + + format!(" + class Transcript : public NativeTranscript {{ + public: + uint32_t circuit_size; + + {declarations} + + std::vector> sumcheck_univariates; + std::array sumcheck_evaluations; + std::vector zm_cq_comms; + Commitment zm_cq_comm; + Commitment zm_pi_comm; + + Transcript() = default; + + Transcript(const std::vector& proof) + : NativeTranscript(proof) + {{}} + + void deserialize_full_transcript() + {{ + size_t num_frs_read = 0; + circuit_size = deserialize_from_buffer(proof_data, num_frs_read); + size_t log_n = numeric::get_msb(circuit_size); + + {deserialize_wires} + + for (size_t i = 0; i < log_n; ++i) {{ + sumcheck_univariates.emplace_back( + deserialize_from_buffer>( + Transcript::proof_data, num_frs_read)); + }} + sumcheck_evaluations = deserialize_from_buffer>( + Transcript::proof_data, num_frs_read); + for (size_t i = 0; i < log_n; ++i) {{ + zm_cq_comms.push_back(deserialize_from_buffer(proof_data, num_frs_read)); + }} + zm_cq_comm = deserialize_from_buffer(proof_data, num_frs_read); + zm_pi_comm = deserialize_from_buffer(proof_data, num_frs_read); + }} + + void serialize_full_transcript() + {{ + size_t old_proof_length = proof_data.size(); + Transcript::proof_data.clear(); + size_t log_n = numeric::get_msb(circuit_size); + + serialize_to_buffer(circuit_size, Transcript::proof_data); + + {serialize_wires} + + for (size_t i = 0; i < log_n; ++i) {{ + serialize_to_buffer(sumcheck_univariates[i], Transcript::proof_data); + }} + serialize_to_buffer(sumcheck_evaluations, Transcript::proof_data); + for (size_t i = 0; i < log_n; ++i) {{ + serialize_to_buffer(zm_cq_comms[i], proof_data); + }} + serialize_to_buffer(zm_cq_comm, proof_data); + serialize_to_buffer(zm_pi_comm, proof_data); + + // sanity check to make sure we generate the same length of proof as before. + ASSERT(proof_data.size() == old_proof_length); + }} + }}; + ") +} diff --git a/bb-pilcom/bb-pil-backend/src/lib.rs b/bb-pilcom/bb-pil-backend/src/lib.rs new file mode 100644 index 00000000000..7644c13d1b5 --- /dev/null +++ b/bb-pilcom/bb-pil-backend/src/lib.rs @@ -0,0 +1,11 @@ +mod circuit_builder; +mod composer_builder; +mod file_writer; +mod flavor_builder; +pub mod lookup_builder; +pub mod permutation_builder; +mod prover_builder; +mod relation_builder; +mod utils; +mod verifier_builder; +pub mod vm_builder; diff --git a/bb-pilcom/bb-pil-backend/src/lookup_builder.rs b/bb-pilcom/bb-pil-backend/src/lookup_builder.rs new file mode 100644 index 00000000000..50016404a3f --- /dev/null +++ b/bb-pilcom/bb-pil-backend/src/lookup_builder.rs @@ -0,0 +1,369 @@ +use crate::{ + file_writer::BBFiles, + utils::{create_get_const_entities, create_get_nonconst_entities, snake_case}, +}; +use itertools::Itertools; +use powdr_ast::{ + analyzed::{AlgebraicExpression, Analyzed, Identity, IdentityKind}, + parsed::SelectedExpressions, +}; +use powdr_number::FieldElement; + +use crate::utils::sanitize_name; + +#[derive(Debug)] +/// Lookup +/// +/// Contains the information required to produce a lookup relation +/// Lookup object and lookup side object are very similar in structure, however they are duplicated for +/// readability. +pub struct Lookup { + /// the name given to the inverse helper column + pub attribute: Option, + /// The name of the counts polynomial that stores the number of times a lookup is read + pub counts_poly: String, + /// the left side of the lookup + pub left: LookupSide, + /// the right side of the lookup + pub right: LookupSide, +} + +#[derive(Debug)] +/// LookupSide +/// +/// One side of a two sided lookup relationship +pub struct LookupSide { + /// -> Option - the selector for the lookup ( on / off toggle ) + selector: Option, + /// The columns involved in this side of the lookup + cols: Vec, +} + +pub trait LookupBuilder { + /// Takes in an AST and works out what lookup relations are needed + /// Note: returns the name of the inverse columns, such that they can be added to the prover in subsequent steps + fn create_lookup_files( + &self, + name: &str, + analyzed: &Analyzed, + ) -> Vec; +} + +impl LookupBuilder for BBFiles { + fn create_lookup_files( + &self, + project_name: &str, + analyzed: &Analyzed, + ) -> Vec { + let lookups: Vec<&Identity>> = analyzed + .identities + .iter() + .filter(|identity| matches!(identity.kind, IdentityKind::Plookup)) + .collect(); + let new_lookups = lookups + .iter() + .map(|lookup| Lookup { + attribute: lookup.attribute.clone().map(|att| att.to_lowercase()), + counts_poly: format!( + "{}_counts", + lookup.attribute.clone().unwrap().to_lowercase() + ), + left: get_lookup_side(&lookup.left), + right: get_lookup_side(&lookup.right), + }) + .collect_vec(); + + create_lookups(self, project_name, &new_lookups); + new_lookups + } +} + +/// The attributes of a lookup contain the name of the inverse, we collect all of these to create the inverse column +pub fn get_inverses_from_lookups(lookups: &[Lookup]) -> Vec { + lookups + .iter() + .map(|lookup| lookup.attribute.clone().unwrap()) + .collect() +} + +pub fn get_counts_from_lookups(lookups: &[Lookup]) -> Vec { + lookups + .iter() + .map(|lookup| lookup.counts_poly.clone()) + .collect() +} + +/// Write the lookup settings files to disk +fn create_lookups(bb_files: &BBFiles, project_name: &str, lookups: &Vec) { + for lookup in lookups { + let lookup_settings = create_lookup_settings_file(lookup); + + let folder = format!("{}/{}", bb_files.rel, &snake_case(project_name)); + let file_name = format!( + "{}{}", + lookup.attribute.clone().unwrap_or("NONAME".to_owned()), + ".hpp".to_owned() + ); + bb_files.write_file(&folder, &file_name, &lookup_settings); + } +} + +/// All relation types eventually get wrapped in the relation type +/// This function creates the export for the relation type so that it can be added to the flavor +fn create_relation_exporter(lookup_name: &str) -> String { + let settings_name = format!("{}_lookup_settings", lookup_name); + let lookup_export = format!("template using {lookup_name}_relation = GenericLookupRelation<{settings_name}, FF_>;"); + let relation_export = format!( + "template using {lookup_name} = GenericLookup<{settings_name}, FF_>;" + ); + + format!( + " + {lookup_export} + {relation_export} + " + ) +} + +fn lookup_settings_includes() -> &'static str { + r#" + #pragma once + + #include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" + + #include + #include + "# +} + +fn create_lookup_settings_file(lookup: &Lookup) -> String { + let columns_per_set = lookup.left.cols.len(); + let lookup_name = lookup + .attribute + .clone() + .expect("Inverse column name must be provided within lookup attribute - #[]"); + let counts_poly_name = lookup.counts_poly.to_owned(); + + // NOTE: https://github.com/AztecProtocol/aztec-packages/issues/3879 + // Settings are not flexible enough to combine inverses + + let lhs_selector = lookup + .left + .selector + .clone() + .expect("Left hand side selector for lookup required"); + let rhs_selector = lookup + .right + .selector + .clone() + .expect("Right hand side selector for lookup required"); + let lhs_cols = lookup.left.cols.clone(); + let rhs_cols = lookup.right.cols.clone(); + + assert!( + lhs_cols.len() == rhs_cols.len(), + "Lookup columns lhs must be the same length as rhs" + ); + + // 0. The polynomial containing the inverse products -> taken from the attributes + // 1. The polynomial with the counts! + // 2. lhs selector + // 3. rhs selector + // 4.. + columns per set. lhs cols + // 4 + columns per set.. . rhs cols + let mut lookup_entities: Vec = [ + lookup_name.clone(), + counts_poly_name.clone(), + lhs_selector.clone(), + rhs_selector.clone(), + ] + .to_vec(); + + lookup_entities.extend(lhs_cols); + lookup_entities.extend(rhs_cols); + + // NOTE: these are hardcoded as 1 for now until more optimizations are required + let read_terms = 1; + let write_terms = 1; + let lookup_tuple_size = columns_per_set; + + // NOTE: hardcoded until optimizations required + let inverse_degree = 4; + let read_term_degree = 0; + let write_term_degree = 0; + let read_term_types = "{0}"; + let write_term_types = "{0}"; + + let lookup_settings_includes = lookup_settings_includes(); + let inverse_polynomial_is_computed_at_row = + create_inverse_computed_at(&lhs_selector, &rhs_selector); + let compute_inverse_exists = create_compute_inverse_exist(&lhs_selector, &rhs_selector); + let const_entities = create_get_const_entities(&lookup_entities); + let nonconst_entities = create_get_nonconst_entities(&lookup_entities); + let relation_exporter = create_relation_exporter(&lookup_name); + + format!( + " + {lookup_settings_includes} + + namespace bb {{ + + /** + * @brief This class contains an example of how to set LookupSettings classes used by the + * GenericLookupRelationImpl class to specify a scaled lookup + * + * @details To create your own lookup: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your lookup + * 3) Update \"DECLARE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS\" and \"DEFINE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS\" to + * include the new settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,\"` + * using Relations = std::tuple>;)` + * + */ + class {lookup_name}_lookup_settings {{ + public: + /** + * @brief The number of read terms (how many lookups we perform) in each row + * + */ + static constexpr size_t READ_TERMS = {read_terms}; + /** + * @brief The number of write terms (how many additions to the lookup table we make) in each row + * + */ + static constexpr size_t WRITE_TERMS = {write_terms}; + + /** + * @brief The type of READ_TERM used for each read index (basic and scaled) + * + */ + static constexpr size_t READ_TERM_TYPES[READ_TERMS] = {read_term_types}; + + /** + * @brief They type of WRITE_TERM used for each write index + * + */ + static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = {write_term_types}; + + /** + * @brief How many values represent a single lookup object. This value is used by the automatic read term + * implementation in the relation in case the lookup is a basic or scaled tuple and in the write term if it's a + * basic tuple + * + */ + static constexpr size_t LOOKUP_TUPLE_SIZE = {lookup_tuple_size}; + + /** + * @brief The polynomial degree of the relation telling us if the inverse polynomial value needs to be computed + * + */ + static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = {inverse_degree}; + + /** + * @brief The degree of the read term if implemented arbitrarily. This value is not used by basic and scaled read + * terms, but will cause compilation error if not defined + * + */ + static constexpr size_t READ_TERM_DEGREE = {read_term_degree}; + + /** + * @brief The degree of the write term if implemented arbitrarily. This value is not used by the basic write + * term, but will cause compilation error if not defined + * + */ + + static constexpr size_t WRITE_TERM_DEGREE = {write_term_degree}; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial exists at this index. + * Otherwise the value needs to be set to zero. + * + * @details If this is true then the lookup takes place in this row + * + */ + {inverse_polynomial_is_computed_at_row} + + /** + * @brief Subprocedure for computing the value deciding if the inverse polynomial value needs to be checked in this + * row + * + * @tparam Accumulator Type specified by the lookup relation + * @tparam AllEntities Values/Univariates of all entities row + * @param in Value/Univariate of all entities at row/edge + * @return Accumulator + */ + {compute_inverse_exists} + + /** + * @brief Get all the entities for the lookup when need to update them + * + * @details The generic structure of this tuple is described in ./generic_lookup_relation.hpp . The following is + description for the current case: + The entities are returned as a tuple of references in the following order (this is for ): + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that specifies how many times the lookup table entry at this row has been looked up + * - READ_TERMS entities/polynomials that enable individual lookup operations + * - The entity/polynomial that enables adding an entry to the lookup table in this row + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the basic tuple being looked up as the first read term + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the previous accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the shifts in the second read term (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the current accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing basic tuples added to the table + * + * @return All the entities needed for the lookup + */ + {const_entities} + + /** + * @brief Get all the entities for the lookup when we only need to read them + * @details Same as in get_const_entities, but nonconst + * + * @return All the entities needed for the lookup + */ + {nonconst_entities} + }}; + + {relation_exporter} + }} + " + ) +} + +fn create_inverse_computed_at(lhs_selector: &String, rhs_selector: &String) -> String { + let lhs_computed_selector = format!("in.{lhs_selector}"); + let rhs_computed_selector = format!("in.{rhs_selector}"); + format!(" + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) {{ + return ({lhs_computed_selector } == 1 || {rhs_computed_selector} == 1); + }}") +} + +fn create_compute_inverse_exist(lhs_selector: &String, rhs_selector: &String) -> String { + let lhs_computed_selector = format!("in.{lhs_selector}"); + let rhs_computed_selector = format!("in.{rhs_selector}"); + format!(" + template static inline auto compute_inverse_exists(const AllEntities& in) {{ + using View = typename Accumulator::View; + const auto is_operation = View({lhs_computed_selector}); + const auto is_table_entry = View({rhs_computed_selector}); + return (is_operation + is_table_entry - is_operation * is_table_entry); + }}") +} + +fn get_lookup_side( + def: &SelectedExpressions>, +) -> LookupSide { + let get_name = |expr: &AlgebraicExpression| match expr { + AlgebraicExpression::Reference(a_ref) => sanitize_name(&a_ref.name), + _ => panic!("Expected reference"), + }; + + LookupSide { + selector: def.selector.as_ref().map(get_name), + cols: def.expressions.iter().map(get_name).collect_vec(), + } +} diff --git a/bb-pilcom/bb-pil-backend/src/permutation_builder.rs b/bb-pilcom/bb-pil-backend/src/permutation_builder.rs new file mode 100644 index 00000000000..b9dbeb0130e --- /dev/null +++ b/bb-pilcom/bb-pil-backend/src/permutation_builder.rs @@ -0,0 +1,254 @@ +use crate::{ + file_writer::BBFiles, + utils::{create_get_const_entities, create_get_nonconst_entities, snake_case}, +}; +use itertools::Itertools; +use powdr_ast::{ + analyzed::{AlgebraicExpression, Analyzed, Identity, IdentityKind}, + parsed::SelectedExpressions, +}; +use powdr_number::FieldElement; + +use crate::utils::sanitize_name; + +#[derive(Debug)] +/// Permutation +/// +/// Contains the information required to produce a permutation relation +pub struct Permutation { + /// -> Attribute - the name given to the inverse helper column + pub attribute: Option, + /// -> PermSide - the left side of the permutation + pub left: PermutationSide, + /// -> PermSide - the right side of the permutation + pub right: PermutationSide, +} + +#[derive(Debug)] +/// PermSide +/// +/// One side of a two sided permutation relationship +pub struct PermutationSide { + /// -> Option - the selector for the permutation ( on / off toggle ) + selector: Option, + /// The columns involved in this side of the permutation + cols: Vec, +} + +pub trait PermutationBuilder { + /// Takes in an AST and works out what permutation relations are needed + /// Note: returns the name of the inverse columns, such that they can be added to he prover in subsequent steps + fn create_permutation_files( + &self, + name: &str, + analyzed: &Analyzed, + ) -> Vec; +} + +impl PermutationBuilder for BBFiles { + fn create_permutation_files( + &self, + project_name: &str, + analyzed: &Analyzed, + ) -> Vec { + let perms: Vec<&Identity>> = analyzed + .identities + .iter() + .filter(|identity| matches!(identity.kind, IdentityKind::Permutation)) + .collect(); + let new_perms = perms + .iter() + .map(|perm| Permutation { + attribute: perm.attribute.clone().map(|att| att.to_lowercase()), + left: get_perm_side(&perm.left), + right: get_perm_side(&perm.right), + }) + .collect_vec(); + + create_permutations(self, project_name, &new_perms); + new_perms + } +} + +/// The attributes of a permutation contain the name of the inverse, we collect all of these to create the inverse column +pub fn get_inverses_from_permutations(permutations: &[Permutation]) -> Vec { + permutations + .iter() + .map(|perm| perm.attribute.clone().unwrap()) + .collect() +} + +/// Write the permutation settings files to disk +fn create_permutations(bb_files: &BBFiles, project_name: &str, permutations: &Vec) { + for permutation in permutations { + let perm_settings = create_permutation_settings_file(permutation); + + let folder = format!("{}/{}", bb_files.rel, &snake_case(project_name)); + let file_name = format!( + "{}{}", + permutation.attribute.clone().unwrap_or("NONAME".to_owned()), + ".hpp".to_owned() + ); + bb_files.write_file(&folder, &file_name, &perm_settings); + } +} + +/// All relation types eventually get wrapped in the relation type +/// This function creates the export for the relation type so that it can be added to the flavor +fn create_relation_exporter(permutation_name: &str) -> String { + let settings_name = format!("{}_permutation_settings", permutation_name); + let permutation_export = format!("template using {permutation_name}_relation = GenericPermutationRelation<{settings_name}, FF_>;"); + let relation_export = format!("template using {permutation_name} = GenericPermutation<{settings_name}, FF_>;"); + + format!( + " + {permutation_export} + {relation_export} + " + ) +} + +fn permutation_settings_includes() -> &'static str { + r#" + #pragma once + + #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" + + #include + #include + "# +} + +fn create_permutation_settings_file(permutation: &Permutation) -> String { + log::trace!("Permutation: {:?}", permutation); + let columns_per_set = permutation.left.cols.len(); + // TODO(md): In the future we will need to condense off the back of this - combining those with the same inverse column + let permutation_name = permutation + .attribute + .clone() + .expect("Inverse column name must be provided using attribute syntax"); + + // This also will need to work for both sides of this ! + let lhs_selector = permutation + .left + .selector + .clone() + .expect("At least one selector must be provided"); + // If a rhs selector is not present, then we use the rhs selector -- TODO(md): maybe we want the default to be always on? + let rhs_selector = permutation + .right + .selector + .clone() + .unwrap_or(lhs_selector.clone()); + + let lhs_cols = permutation.left.cols.clone(); + let rhs_cols = permutation.right.cols.clone(); + + // 0. The polynomial containing the inverse products -> taken from the attributes + // 1. The polynomial enabling the relation (the selector) + // 2. lhs selector + // 3. rhs selector + // 4.. + columns per set. lhs cols + // 4 + columns per set.. . rhs cols + let mut perm_entities: Vec = [ + permutation_name.clone(), + lhs_selector.clone(), + lhs_selector.clone(), + rhs_selector.clone(), + ] + .to_vec(); + + perm_entities.extend(lhs_cols); + perm_entities.extend(rhs_cols); + + let permutation_settings_includes = permutation_settings_includes(); + + let inverse_computed_at = create_inverse_computed_at(&lhs_selector, &rhs_selector); + let const_entities = create_get_const_entities(&perm_entities); + let nonconst_entities = create_get_nonconst_entities(&perm_entities); + let relation_exporter = create_relation_exporter(&permutation_name); + + format!( + " + {permutation_settings_includes} + + namespace bb {{ + + class {permutation_name}_permutation_settings {{ + public: + // This constant defines how many columns are bundled together to form each set. + constexpr static size_t COLUMNS_PER_SET = {columns_per_set}; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial at this index. Otherwise the + * value needs to be set to zero. + * + * @details If this is true then permutation takes place in this row + */ + {inverse_computed_at} + + /** + * @brief Get all the entities for the permutation when we don't need to update them + * + * @details The entities are returned as a tuple of references in the following order: + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that switches on the subrelation of the permutation relation that ensures correctness of + * the inverse polynomial + * - The entity/polynomial that enables adding a tuple-generated value from the first set to the logderivative sum + * subrelation + * - The entity/polynomial that enables adding a tuple-generated value from the second set to the logderivative sum + * subrelation + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the first set (N.B. ORDER IS IMPORTANT!) + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the second set (N.B. ORDER IS IMPORTANT!) + * + * @return All the entities needed for the permutation + */ + {const_entities} + + /** + * @brief Get all the entities for the permutation when need to update them + * + * @details The entities are returned as a tuple of references in the following order: + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that switches on the subrelation of the permutation relation that ensures correctness of + * the inverse polynomial + * - The entity/polynomial that enables adding a tuple-generated value from the first set to the logderivative sum + * subrelation + * - The entity/polynomial that enables adding a tuple-generated value from the second set to the logderivative sum + * subrelation + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the first set (N.B. ORDER IS IMPORTANT!) + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the second set (N.B. ORDER IS IMPORTANT!) + * + * @return All the entities needed for the permutation + */ + {nonconst_entities} + }}; + + {relation_exporter} + }} + " + ) +} + +fn create_inverse_computed_at(lhs_selector: &String, rhs_selector: &String) -> String { + let lhs_computed_selector = format!("in.{lhs_selector}"); + let rhs_computed_selector = format!("in.{rhs_selector}"); + format!(" + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) {{ + return ({lhs_computed_selector } == 1 || {rhs_computed_selector} == 1); + }}") +} + +fn get_perm_side( + def: &SelectedExpressions>, +) -> PermutationSide { + let get_name = |expr: &AlgebraicExpression| match expr { + AlgebraicExpression::Reference(a_ref) => sanitize_name(&a_ref.name), + _ => panic!("Expected reference"), + }; + + PermutationSide { + selector: def.selector.as_ref().map(get_name), + cols: def.expressions.iter().map(get_name).collect_vec(), + } +} diff --git a/bb-pilcom/bb-pil-backend/src/prover_builder.rs b/bb-pilcom/bb-pil-backend/src/prover_builder.rs new file mode 100644 index 00000000000..be412cf0d57 --- /dev/null +++ b/bb-pilcom/bb-pil-backend/src/prover_builder.rs @@ -0,0 +1,331 @@ +use crate::file_writer::BBFiles; +use crate::utils::{map_with_newline, snake_case}; + +pub trait ProverBuilder { + fn create_prover_hpp(&mut self, name: &str); + + fn create_prover_cpp( + &mut self, + name: &str, + commitment_polys: &[String], + lookup_names: &[String], + ); +} + +impl ProverBuilder for BBFiles { + fn create_prover_hpp(&mut self, name: &str) { + let include_str = includes_hpp(&snake_case(name)); + + let prover_hpp = format!(" + {include_str} + namespace bb {{ + + class {name}Prover {{ + + using Flavor = {name}Flavor; + using FF = Flavor::FF; + using PCS = Flavor::PCS; + using PCSCommitmentKey = Flavor::CommitmentKey; + using ProvingKey = Flavor::ProvingKey; + using Polynomial = Flavor::Polynomial; + using ProverPolynomials = Flavor::ProverPolynomials; + using CommitmentLabels = Flavor::CommitmentLabels; + using Transcript = Flavor::Transcript; + + public: + explicit {name}Prover(std::shared_ptr input_key, std::shared_ptr commitment_key); + + void execute_preamble_round(); + void execute_wire_commitments_round(); + void execute_log_derivative_inverse_round(); + void execute_relation_check_rounds(); + void execute_zeromorph_rounds(); + + HonkProof export_proof(); + HonkProof construct_proof(); + + std::shared_ptr transcript = std::make_shared(); + + std::vector public_inputs; + + bb::RelationParameters relation_parameters; + + std::shared_ptr key; + + // Container for spans of all polynomials required by the prover (i.e. all multivariates evaluated by Sumcheck). + ProverPolynomials prover_polynomials; + + CommitmentLabels commitment_labels; + typename Flavor::WitnessCommitments witness_commitments; + + Polynomial quotient_W; + + SumcheckOutput sumcheck_output; + + std::shared_ptr commitment_key; + + using ZeroMorph = ZeroMorphProver_; + + private: + HonkProof proof; + }}; + + }} // namespace bb + + "); + self.write_file( + &self.prover, + &format!("{}_prover.hpp", snake_case(name)), + &prover_hpp, + ); + } + + /// Create the prover cpp file + /// + /// Committed polys are included as we manually unroll all commitments, as we do not commit to everything + fn create_prover_cpp( + &mut self, + name: &str, + commitment_polys: &[String], + lookup_names: &[String], + ) { + let include_str = includes_cpp(&snake_case(name)); + + let polynomial_commitment_phase = create_commitments_phase(commitment_polys); + + let (call_log_derivative_phase, log_derivative_inverse_phase): (String, String) = + if lookup_names.is_empty() { + ("".to_owned(), "".to_owned()) + } else { + ( + "execute_log_derivative_inverse_round();".to_owned(), + create_log_derivative_inverse_round(lookup_names), + ) + }; + + let prover_cpp = format!(" + {include_str} + + namespace bb {{ + + using Flavor = {name}Flavor; + using FF = Flavor::FF; + + /** + * Create {name}Prover from proving key, witness and manifest. + * + * @param input_key Proving key. + * @param input_manifest Input manifest + * + * @tparam settings Settings class. + * */ + {name}Prover::{name}Prover(std::shared_ptr input_key, + std::shared_ptr commitment_key) + : key(input_key) + , commitment_key(commitment_key) + {{ + for (auto [prover_poly, key_poly] : zip_view(prover_polynomials.get_unshifted(), key->get_all())) {{ + ASSERT(bb::flavor_get_label(prover_polynomials, prover_poly) == + bb::flavor_get_label(*key, key_poly)); + prover_poly = key_poly.share(); + }} + for (auto [prover_poly, key_poly] : zip_view(prover_polynomials.get_shifted(), key->get_to_be_shifted())) {{ + ASSERT(bb::flavor_get_label(prover_polynomials, prover_poly) == + bb::flavor_get_label(*key, key_poly) + \"_shift\"); + prover_poly = key_poly.shifted(); + }} + }} + + + /** + * @brief Add circuit size, public input size, and public inputs to transcript + * + */ + void {name}Prover::execute_preamble_round() + {{ + const auto circuit_size = static_cast(key->circuit_size); + + transcript->send_to_verifier(\"circuit_size\", circuit_size); + }} + + /** + * @brief Compute commitments to all of the witness wires (apart from the logderivative inverse wires) + * + */ + void {name}Prover::execute_wire_commitments_round() + {{ + + {polynomial_commitment_phase} + + }} + + void {name}Prover::execute_log_derivative_inverse_round() + {{ + + {log_derivative_inverse_phase} + }} + + /** + * @brief Run Sumcheck resulting in u = (u_1,...,u_d) challenges and all evaluations at u being calculated. + * + */ + void {name}Prover::execute_relation_check_rounds() + {{ + using Sumcheck = SumcheckProver; + + auto sumcheck = Sumcheck(key->circuit_size, transcript); + + FF alpha = transcript->template get_challenge(\"Sumcheck:alpha\"); + std::vector gate_challenges(numeric::get_msb(key->circuit_size)); + + for (size_t idx = 0; idx < gate_challenges.size(); idx++) {{ + gate_challenges[idx] = transcript->template get_challenge(\"Sumcheck:gate_challenge_\" + std::to_string(idx)); + }} + sumcheck_output = sumcheck.prove(prover_polynomials, relation_parameters, alpha, gate_challenges); + }} + + + /** + * @brief Execute the ZeroMorph protocol to prove the multilinear evaluations produced by Sumcheck + * @details See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the unrolled protocol. + * + * */ + void {name}Prover::execute_zeromorph_rounds() + {{ + ZeroMorph::prove(prover_polynomials.get_unshifted(), + prover_polynomials.get_to_be_shifted(), + sumcheck_output.claimed_evaluations.get_unshifted(), + sumcheck_output.claimed_evaluations.get_shifted(), + sumcheck_output.challenge, + commitment_key, + transcript); + + }} + + + HonkProof {name}Prover::export_proof() + {{ + proof = transcript->proof_data; + return proof; + }} + + HonkProof {name}Prover::construct_proof() + {{ + // Add circuit size public input size and public inputs to transcript. + execute_preamble_round(); + + // Compute wire commitments + execute_wire_commitments_round(); + + // Compute sorted list accumulator and commitment + {call_log_derivative_phase} + + // Fiat-Shamir: alpha + // Run sumcheck subprotocol. + execute_relation_check_rounds(); + + // Fiat-Shamir: rho, y, x, z + // Execute Zeromorph multilinear PCS + execute_zeromorph_rounds(); + + return export_proof(); + }} + + }} // namespace bb + + + "); + + self.write_file( + &self.prover, + &format!("{}_prover.cpp", snake_case(name)), + &prover_cpp, + ); + } +} + +fn includes_hpp(name: &str) -> String { + format!( + " +#pragma once +#include \"barretenberg/commitment_schemes/zeromorph/zeromorph.hpp\" +#include \"barretenberg/plonk/proof_system/types/proof.hpp\" +#include \"barretenberg/relations/relation_parameters.hpp\" +#include \"barretenberg/sumcheck/sumcheck_output.hpp\" +#include \"barretenberg/transcript/transcript.hpp\" + +#include \"barretenberg/vm/generated/{name}_flavor.hpp\" + + " + ) +} + +fn includes_cpp(name: &str) -> String { + format!( + " + + #include \"{name}_prover.hpp\" + #include \"barretenberg/commitment_schemes/claim.hpp\" + #include \"barretenberg/commitment_schemes/commitment_key.hpp\" + #include \"barretenberg/honk/proof_system/logderivative_library.hpp\" + #include \"barretenberg/honk/proof_system/permutation_library.hpp\" + #include \"barretenberg/plonk_honk_shared/library/grand_product_library.hpp\" + #include \"barretenberg/polynomials/polynomial.hpp\" + #include \"barretenberg/relations/lookup_relation.hpp\" + #include \"barretenberg/relations/permutation_relation.hpp\" + #include \"barretenberg/sumcheck/sumcheck.hpp\" + " + ) +} + +/// Commitment Transform +/// +/// Produces code to perform kzg commitment, then stores in the witness_commitments struct +fn commitment_transform(name: &String) -> String { + format!("witness_commitments.{name} = commitment_key->commit(key->{name});") +} + +/// Send to Verifier Transform +/// +/// Sends commitment produces in commitment_transform to the verifier +fn send_to_verifier_transform(name: &String) -> String { + format!("transcript->send_to_verifier(commitment_labels.{name}, witness_commitments.{name});") +} + +fn create_commitments_phase(polys_to_commit_to: &[String]) -> String { + let all_commit_operations = map_with_newline(polys_to_commit_to, commitment_transform); + let send_to_verifier_operations = + map_with_newline(polys_to_commit_to, send_to_verifier_transform); + + format!( + " + // Commit to all polynomials (apart from logderivative inverse polynomials, which are committed to in the later logderivative phase) + {all_commit_operations} + + // Send all commitments to the verifier + {send_to_verifier_operations} + " + ) +} + +fn create_log_derivative_inverse_round(lookup_operations: &[String]) -> String { + let all_commit_operations = map_with_newline(lookup_operations, commitment_transform); + let send_to_verifier_operations = + map_with_newline(lookup_operations, send_to_verifier_transform); + + format!( + " + auto [beta, gamm] = transcript->template get_challenges(\"beta\", \"gamma\"); + relation_parameters.beta = beta; + relation_parameters.gamma = gamm; + + key->compute_logderivative_inverses(relation_parameters); + + // Commit to all logderivative inverse polynomials + {all_commit_operations} + + // Send all commitments to the verifier + {send_to_verifier_operations} + " + ) +} diff --git a/bb-pilcom/bb-pil-backend/src/relation_builder.rs b/bb-pilcom/bb-pil-backend/src/relation_builder.rs new file mode 100644 index 00000000000..cfae7c85092 --- /dev/null +++ b/bb-pilcom/bb-pil-backend/src/relation_builder.rs @@ -0,0 +1,562 @@ +use itertools::Itertools; +use powdr_ast::analyzed::AlgebraicBinaryOperation; +use powdr_ast::analyzed::AlgebraicExpression; +use powdr_ast::analyzed::AlgebraicUnaryOperation; +use powdr_ast::analyzed::Identity; +use powdr_ast::analyzed::{ + AlgebraicBinaryOperator, AlgebraicExpression as Expression, AlgebraicUnaryOperator, + IdentityKind, +}; +use powdr_ast::parsed::SelectedExpressions; +use std::collections::HashMap; +use std::collections::HashSet; +use std::path::Path; + +use powdr_number::{BigUint, DegreeType, FieldElement}; + +use crate::file_writer::BBFiles; +use crate::utils::{capitalize, map_with_newline, snake_case}; + +/// Returned back to the vm builder from the create_relations call +pub struct RelationOutput { + /// A list of the names of the created relations + pub relations: Vec, + /// A list of the names of all of the 'used' shifted polys + pub shifted_polys: Vec, +} + +/// Each created bb Identity is passed around with its degree so as needs to be manually +/// provided for sumcheck +type BBIdentity = (DegreeType, String); + +pub trait RelationBuilder { + /// Create Relations + /// + /// Takes in the ast ( for relations ), groups each of them by file, and then + /// calls 'create relation' for each + /// + /// Relation output is passed back to the caller as the prover requires both: + /// - The shifted polys + /// - The names of the relations files created + fn create_relations( + &self, + root_name: &str, + identities: &[Identity>], + ) -> RelationOutput; + + /// Create Relation + /// + /// Name and root name are required to determine the file path, e.g. it will be in the bberg/relations/generated + /// followed by /root_name/name + /// - root name should be the name provided with the --name flag + /// - name will be a pil namespace + /// + /// - Identities are the identities that will be used to create the relations, they are generated within create_relations + /// - row_type contains all of the columns that the relations namespace touches. + fn create_relation( + &self, + root_name: &str, + name: &str, + sub_relations: &[String], + identities: &[BBIdentity], + row_type: &str, + labels_lookup: String, + ); + + /// Declare views + /// + /// Declare views is a macro that generates a reference for each of the columns + /// This reference will be a span into a sumcheck related object, it must be declared for EACH sub-relation + /// as the sumcheck object is sensitive to the degree of the relation. + fn create_declare_views(&self, name: &str, all_cols_and_shifts: &[String]); +} + +impl RelationBuilder for BBFiles { + fn create_relations( + &self, + file_name: &str, + analyzed_identities: &[Identity>], + ) -> RelationOutput { + // Group relations per file + let grouped_relations: HashMap>>> = + group_relations_per_file(analyzed_identities); + let mut relations = grouped_relations.keys().cloned().collect_vec(); + relations.sort(); + + // Contains all of the rows in each relation, will be useful for creating composite builder types + let mut all_rows: HashMap = HashMap::new(); + let mut shifted_polys: Vec = Vec::new(); + + // ----------------------- Create the relation files ----------------------- + for (relation_name, analyzed_idents) in grouped_relations.iter() { + let IdentitiesOutput { + subrelations, + identities, + collected_cols, + collected_shifts, + expression_labels, + } = create_identities(file_name, analyzed_idents); + + // TODO: This can probably be moved into the create_identities function + let row_type = create_row_type(&capitalize(relation_name), &collected_cols); + + // Aggregate all shifted polys + shifted_polys.extend(collected_shifts); + // Aggregate all rows + all_rows.insert(relation_name.to_owned(), row_type.clone()); + + let labels_lookup = create_relation_labels(relation_name, expression_labels); + self.create_relation( + file_name, + relation_name, + &subrelations, + &identities, + &row_type, + labels_lookup, + ); + } + + shifted_polys.sort(); + relations.sort(); + + RelationOutput { + relations, + shifted_polys, + } + } + + fn create_relation( + &self, + root_name: &str, + name: &str, + sub_relations: &[String], + identities: &[BBIdentity], + row_type: &str, + labels_lookup: String, + ) { + let includes = relation_includes(); + let class_boilerplate = relation_class_boilerplate(name, sub_relations, identities); + let export = get_export(name); + + let relations = format!( + "{includes} +namespace bb::{root_name}_vm {{ + +{row_type}; + +{labels_lookup} + +{class_boilerplate} + +{export} + + }}" + ); + + self.write_file( + &format!("{}/{}", &self.rel, snake_case(root_name)), + &format!("{}.hpp", snake_case(name)), + &relations, + ); + } + + fn create_declare_views(&self, name: &str, all_cols_and_shifts: &[String]) { + let view_transformation = + |name: &String| format!("[[maybe_unused]] auto {name} = View(new_term.{name}); \\"); + let make_view_per_row = map_with_newline(all_cols_and_shifts, view_transformation); + + let declare_views = format!( + " + #define {name}_DECLARE_VIEWS(index) \\ + using Accumulator = typename std::tuple_element::type; \\ + using View = typename Accumulator::View; \\ + {make_view_per_row} + + + " + ); + + self.write_file( + &format!("{}/{}", &self.rel, snake_case(name)), + "declare_views.hpp", + &declare_views, + ); + } +} + +/// Group relations per file +/// +/// The compiler returns all relations in one large vector, however we want to distinguish +/// which files .pil files the relations belong to for later code gen +/// +/// Say we have two files foo.pil and bar.pil +/// foo.pil contains the following relations: +/// - foo1 +/// - foo2 +/// bar.pil contains the following relations: +/// - bar1 +/// - bar2 +/// +/// This function will return a hashmap with the following structure: +/// { +/// "foo": [foo1, foo2], +/// "bar": [bar1, bar2] +/// } +/// +/// This allows us to generate a relation.hpp file containing ONLY the relations for that .pil file +fn group_relations_per_file( + identities: &[Identity>], +) -> HashMap>>> { + identities.iter().cloned().into_group_map_by(|identity| { + identity + .source + .file_name + .as_ref() + .and_then(|file_name| Path::new(file_name.as_ref()).file_stem()) + .map(|stem| stem.to_string_lossy().into_owned()) + .unwrap_or_default() + .replace(".pil", "") + }) +} + +fn relation_class_boilerplate( + name: &str, + sub_relations: &[String], + identities: &[BBIdentity], +) -> String { + // We add one to all degrees because we have an extra scaling factor + let degrees = identities.iter().map(|(d, _)| d + 1).collect(); + let degree_boilerplate = get_degree_boilerplate(degrees); + let relation_code = get_relation_code(sub_relations); + format!( + "template class {name}Impl {{ + public: + using FF = FF_; + + {degree_boilerplate} + + {relation_code} +}};", + ) +} + +fn get_export(name: &str) -> String { + format!( + "template using {name} = Relation<{name}Impl>;", + name = name + ) +} + +fn get_relation_code(ids: &[String]) -> String { + let mut relation_code = r#" + template + void static accumulate( + ContainerOverSubrelations& evals, + const AllEntities& new_term, + [[maybe_unused]] const RelationParameters&, + [[maybe_unused]] const FF& scaling_factor + ){ + + "# + .to_owned(); + for id in ids { + relation_code.push_str(&format!("{}\n", id)); + } + relation_code.push_str("}\n"); + relation_code +} + +fn get_degree_boilerplate(degrees: Vec) -> String { + let num_degrees = degrees.len(); + + let mut degree_boilerplate = format!( + "static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{{\n" + ); + for degree in °rees { + degree_boilerplate.push_str(&format!(" {},\n", degree)); + } + degree_boilerplate.push_str("};"); + + degree_boilerplate +} + +// The include statements required for a new relation file +fn relation_includes() -> &'static str { + r#" +#pragma once +#include "../../relation_parameters.hpp" +#include "../../relation_types.hpp" +#include "./declare_views.hpp" +"# +} + +// Each vm will need to have a row which is a combination of all of the witness columns +pub(crate) fn create_row_type(name: &str, all_rows: &[String]) -> String { + let row_transformation = |row: &_| format!(" FF {row} {{}};"); + let all_annotated = map_with_newline(all_rows, row_transformation); + + format!( + "template struct {name}Row {{ + {} + + [[maybe_unused]] static std::vector names(); + }}", + all_annotated, + ) +} + +fn create_identity( + expression: &SelectedExpressions>, + collected_cols: &mut HashSet, + collected_public_identities: &mut HashSet, +) -> Option { + // We want to read the types of operators and then create the appropiate code + + if let Some(expr) = &expression.selector { + let x = craft_expression(expr, collected_cols, collected_public_identities); + log::trace!("expression {:?}", x); + Some(x) + } else { + None + } +} + +// TODO: replace the preamble with a macro so the code looks nicer +fn create_subrelation(index: usize, preamble: String, identity: &mut BBIdentity) -> String { + // \\\ + let id = &identity.1; + + format!( + "//Contribution {index} + {{\n{preamble} + + auto tmp = {id}; + tmp *= scaling_factor; + std::get<{index}>(evals) += tmp; +}}", + ) +} + +fn craft_expression( + expr: &Expression, + // TODO: maybe make state? + collected_cols: &mut HashSet, + collected_public_identities: &mut HashSet, +) -> BBIdentity { + let var_name = match expr { + Expression::Number(n) => { + let number: BigUint = n.to_arbitrary_integer(); + if number.bit_len() < 32 { + return (1, format!("FF({})", number)); + } + if number.bit_len() < 64 { + return (1, format!("FF({}UL)", number)); + } + if number.bit_len() < 256 { + let bytes = number.to_be_bytes(); + let padding_len = 32 - bytes.len(); + + let mut padded_bytes = vec![0; padding_len]; + padded_bytes.extend_from_slice(&bytes); + + let mut chunks: Vec = padded_bytes + .chunks(8) + .map(|chunk| u64::from_be_bytes(chunk.try_into().unwrap())) + .collect(); + + chunks.resize(4, 0); + return ( + 1, + format!( + "FF(uint256_t{{{}UL, {}UL, {}UL, {}UL}})", + chunks[3], chunks[2], chunks[1], chunks[0], + ), + ); + } + unimplemented!("{:?}", expr); + } + Expression::Reference(polyref) => { + let mut poly_name = polyref.name.replace('.', "_").to_string(); + if polyref.next { + // NOTE: Naive algorithm to collect all shifted polys + poly_name = format!("{}_shift", poly_name); + } + collected_cols.insert(poly_name.clone()); + (1, poly_name) + } + Expression::BinaryOperation(AlgebraicBinaryOperation { + left: lhe, + op, + right: rhe, + }) => { + let (ld, lhs) = craft_expression(lhe, collected_cols, collected_public_identities); + let (rd, rhs) = craft_expression(rhe, collected_cols, collected_public_identities); + + let degree = std::cmp::max(ld, rd); + match op { + AlgebraicBinaryOperator::Add => match lhe.as_ref() { + // BBerg hack, we do not want a field on the lhs of an expression + Expression::Number(_) => (degree, format!("({} + {})", rhs, lhs)), + _ => (degree, format!("({} + {})", lhs, rhs)), + }, + AlgebraicBinaryOperator::Sub => match lhe.as_ref() { + // BBerg hack, we do not want a field on the lhs of an expression + Expression::Number(_) => (degree, format!("(-{} + {})", rhs, lhs)), + _ => (degree, format!("({} - {})", lhs, rhs)), + }, + AlgebraicBinaryOperator::Mul => match lhe.as_ref() { + // BBerg hack, we do not want a field on the lhs of an expression + Expression::Number(_) => (ld + rd, format!("({} * {})", rhs, lhs)), + _ => (ld + rd, format!("({} * {})", lhs, rhs)), + }, + _ => unimplemented!("{:?}", expr), + } + } + Expression::UnaryOperation(AlgebraicUnaryOperation { + op: operator, + expr: expression, + }) => match operator { + AlgebraicUnaryOperator::Minus => { + let (d, e) = + craft_expression(expression, collected_cols, collected_public_identities); + (d, format!("-{}", e)) + } + }, + // TODO: for now we do nothing with calls to public identities + // These probably can be implemented as some form of copy, however im not sure how we are going to process these down the line + Expression::PublicReference(name) => { + // We collect them for now to warn the user what is going on + collected_public_identities.insert(name.clone()); + (1, "FF(0)".to_string()) + } + // Note: challenges are not being used in our current pil construction + Expression::Challenge(_) => unimplemented!("{:?}", expr), + }; + var_name +} + +pub struct IdentitiesOutput { + subrelations: Vec, + identities: Vec, + collected_cols: Vec, + collected_shifts: Vec, + expression_labels: HashMap, +} + +pub(crate) fn create_identities( + file_name: &str, + identities: &[Identity>], +) -> IdentitiesOutput { + // We only want the expressions for now + // When we have a poly type, we only need the left side of it + let ids = identities + .iter() + .filter(|identity| identity.kind == IdentityKind::Polynomial) + .collect::>(); + + let mut identities = Vec::new(); + let mut subrelations = Vec::new(); + let mut expression_labels: HashMap = HashMap::new(); // Each relation can be given a label, this label can be assigned here + let mut collected_cols: HashSet = HashSet::new(); + let mut collected_public_identities: HashSet = HashSet::new(); + + // Collect labels for each identity + // TODO: shite + for (i, id) in ids.iter().enumerate() { + if let Some(label) = &id.attribute { + expression_labels.insert(i, label.clone()); + } + } + + let expressions = ids.iter().map(|id| id.left.clone()).collect::>(); + for (i, expression) in expressions.iter().enumerate() { + let relation_boilerplate = format!( + "{file_name}_DECLARE_VIEWS({i}); + ", + ); + + // TODO: collected pattern is shit + let mut identity = create_identity( + expression, + &mut collected_cols, + &mut collected_public_identities, + ) + .unwrap(); + let subrelation = create_subrelation(i, relation_boilerplate, &mut identity); + + identities.push(identity); + + subrelations.push(subrelation); + } + + // Print a warning to the user about usage of public identities + if !collected_public_identities.is_empty() { + log::warn!( + "Public Identities are not supported yet in codegen, however some were collected" + ); + log::warn!("Public Identities: {:?}", collected_public_identities); + } + + let mut collected_cols: Vec = collected_cols.drain().collect(); + let mut collected_shifts: Vec = collected_cols + .clone() + .iter() + .filter_map(|col| { + if col.ends_with("shift") { + Some(col.clone()) + } else { + None + } + }) + .collect(); + + collected_cols.sort(); + collected_shifts.sort(); + + IdentitiesOutput { + subrelations, + identities, + collected_cols, + collected_shifts, + expression_labels, + } +} + +/// Relation labels +/// +/// To view relation labels we create a sparse switch that contains all of the collected labels +/// Whenever there is a failure, we can lookup into this mapping +/// +/// Note: this mapping will never be that big, so we are quite naive in implementation +/// It should be able to be called from else where with relation_name::get_relation_label +fn create_relation_labels(relation_name: &str, labels: HashMap) -> String { + // Sort labels by the index + let label_transformation = |(index, label)| { + format!( + "case {index}: + return \"{label}\"; + " + ) + }; + + // Sort the labels by their index + let mut sorted_labels: Vec<(usize, String)> = labels.into_iter().collect(); + sorted_labels.sort_by(|a, b| a.0.cmp(&b.0)); + + let switch_statement: String = sorted_labels + .into_iter() + .map(label_transformation) + .collect::>() + .join("\n"); + + format!( + " + inline std::string get_relation_label_{relation_name}(int index) {{ + switch (index) {{ + {switch_statement} + }} + return std::to_string(index); + }} + " + ) +} diff --git a/bb-pilcom/bb-pil-backend/src/utils.rs b/bb-pilcom/bb-pil-backend/src/utils.rs new file mode 100644 index 00000000000..f84dfac2f1d --- /dev/null +++ b/bb-pilcom/bb-pil-backend/src/utils.rs @@ -0,0 +1,145 @@ +use itertools::Itertools; + +/// Get Relations Imports +/// +/// We may have multiple relation files in the generated foler +/// This method will return all of the imports for the relation header files +pub fn get_relations_imports(name: &str, relations: &[String], permutations: &[String]) -> String { + let all_relations = flatten(&[relations.to_vec(), permutations.to_vec()]); + let transformation = |relation_name: &_| { + format!("#include \"barretenberg/relations/generated/{name}/{relation_name}.hpp\"") + }; + + map_with_newline(&all_relations, transformation) +} + +/// Sanitize Names +/// +/// Column titles that we get from pil contain . to distinguish which pil namespace they belong to +/// We need to replace these with _ to make them valid C++ identifiers +pub fn sanitize_name(string: &str) -> String { + string.replace(['.', '[', ']'], "_") +} + +/// Capitalize +pub fn capitalize(s: &str) -> String { + let mut c = s.chars(); + match c.next() { + None => String::new(), + Some(f) => f.to_uppercase().collect::() + c.as_str(), + } +} + +/// Map With Newline +/// This utility function is used all over the codegen pipeline +/// It takes a list, usually the names of columns in an execution trace and applies a string transformation "op" +/// to each element in the list +pub fn map_with_newline(list: &[String], op: Func) -> String +where + Func: Fn(&String) -> String, +{ + transform_map(list, op).join("\n") +} + +/// Collect Col +/// +/// Transforms columns from powdr representation ( where the witnesses are linked ) +/// Into a version where we just keep the columns +/// As this is all we are about +pub fn collect_col(list: &[String], op: Func) -> Vec +where + Func: Fn(&String) -> String, +{ + list.iter().map(op).collect::>() +} + +/// Transform Map +/// +/// Apply a transformation to a list of strings +pub fn transform_map(list: &[String], op: Func) -> Vec +where + Func: Fn(&String) -> String, +{ + list.iter().map(op).collect::>() +} + +/// Flatten +/// +/// Returns a flattened concatenation of the input arrays +pub fn flatten(list: &[Vec]) -> Vec { + let arr = list.iter().cloned(); + arr.into_iter().flatten().collect() +} + +/// Create Forward As Tuple +/// +/// Helper function to create a forward as tuple cpp statement +pub fn create_forward_as_tuple(settings: &[String]) -> String { + let adjusted = settings.iter().map(|col| format!("in.{col}")).join(",\n"); + format!( + " + return std::forward_as_tuple( + {} + ); + ", + adjusted + ) +} + +// TODO: may make sense to move the below around a bit +pub fn create_get_const_entities(settings: &[String]) -> String { + let forward = create_forward_as_tuple(settings); + format!( + " + template static inline auto get_const_entities(const AllEntities& in) {{ + {forward} + }} + " + ) +} + +pub fn create_get_nonconst_entities(settings: &[String]) -> String { + let forward = create_forward_as_tuple(settings); + format!( + " + template static inline auto get_nonconst_entities(AllEntities& in) {{ + {forward} + }} + " + ) +} + +/// Snake Case +/// +/// Transform camel case string into snake case, such as: RedFlower --> red_flower +pub fn snake_case(input: &str) -> String { + let mut result = String::new(); + + // Handle the first character + if input.is_empty() { + return result; // Empty input + } + let mut first_char = input.chars().next().unwrap(); + if first_char.is_uppercase() { + first_char = first_char.to_ascii_lowercase(); + } + result.push(first_char); + + // Process remaining characters + for ch in input.chars().skip(1) { + if ch.is_uppercase() { + result.push('_'); + result.push(ch.to_ascii_lowercase()); + } else { + result.push(ch); + } + } + + result +} + +pub fn sort_cols(cols: &[String]) -> Vec { + let mut cols = cols.to_vec(); + cols.sort(); + cols +} diff --git a/bb-pilcom/bb-pil-backend/src/verifier_builder.rs b/bb-pilcom/bb-pil-backend/src/verifier_builder.rs new file mode 100644 index 00000000000..710b9cafadc --- /dev/null +++ b/bb-pilcom/bb-pil-backend/src/verifier_builder.rs @@ -0,0 +1,286 @@ +use crate::{ + file_writer::BBFiles, + utils::{map_with_newline, snake_case}, +}; + +pub trait VerifierBuilder { + fn create_verifier_cpp( + &mut self, + name: &str, + witness: &[String], + inverses: &[String], + public_cols: &[(String, usize)], + ); + + fn create_verifier_hpp(&mut self, name: &str, public_cols: &[(String, usize)]); +} + +impl VerifierBuilder for BBFiles { + fn create_verifier_cpp( + &mut self, + name: &str, + witness: &[String], + inverses: &[String], + public_cols: &[(String, usize)], + ) { + let include_str = includes_cpp(&snake_case(name)); + + let wire_transformation = |n: &String| { + format!( + "commitments.{n} = transcript->template receive_from_prover(commitment_labels.{n});" + ) + }; + let wire_commitments = map_with_newline(witness, wire_transformation); + + let has_public_input_columns = !public_cols.is_empty(); + let has_inverses = !inverses.is_empty(); + + let get_inverse_challenges = if has_inverses { + " + auto [beta, gamm] = transcript->template get_challenges(\"beta\", \"gamma\"); + relation_parameters.beta = beta; + relation_parameters.gamma = gamm; + " + .to_string() + } else { + "".to_owned() + }; + + let verify_proof_function_declaration: String = if has_public_input_columns { + format!("bool {name}Verifier::verify_proof(const HonkProof& proof, const std::vector>& public_inputs)") + } else { + format!("bool {name}Verifier::verify_proof(const HonkProof& proof)") + }; + + let public_inputs_column_transformation = + |public_inputs_column_name: &String, idx: usize| { + format!( + " + FF {public_inputs_column_name}_evaluation = evaluate_public_input_column(public_inputs[{idx}], circuit_size, multivariate_challenge); + if ({public_inputs_column_name}_evaluation != claimed_evaluations.{public_inputs_column_name}) {{ + return false; + }} + " + ) + }; + + let (public_inputs_check, evaluate_public_inputs) = if has_public_input_columns { + let inputs_check = public_cols + .iter() + .map(|(col_name, idx)| public_inputs_column_transformation(col_name, *idx)) + .collect::(); + + let evaluate_public_inputs = format!( + " + + using FF = {name}Flavor::FF; + + // Evaluate the given public input column over the multivariate challenge points + [[maybe_unused]] inline FF evaluate_public_input_column(const std::vector& points, const size_t circuit_size, std::vector challenges) {{ + + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/6361): we pad the points to the circuit size in order to get the correct evaluation. + // This is not efficient, and will not be valid in production. + std::vector new_points(circuit_size, 0); + std::copy(points.begin(), points.end(), new_points.data()); + + Polynomial polynomial(new_points); + return polynomial.evaluate_mle(challenges); + }} + " + ); + + (inputs_check, evaluate_public_inputs) + } else { + ("".to_owned(), "".to_owned()) + }; + + let inverse_commitments = map_with_newline(inverses, wire_transformation); + + let ver_cpp = format!(" +{include_str} + + namespace bb {{ + + + {name}Verifier::{name}Verifier(std::shared_ptr verifier_key) + : key(verifier_key) + {{}} + + {name}Verifier::{name}Verifier({name}Verifier&& other) noexcept + : key(std::move(other.key)) + , pcs_verification_key(std::move(other.pcs_verification_key)) + {{}} + + {name}Verifier& {name}Verifier::operator=({name}Verifier&& other) noexcept + {{ + key = other.key; + pcs_verification_key = (std::move(other.pcs_verification_key)); + commitments.clear(); + return *this; + }} + + {evaluate_public_inputs} + + + /** + * @brief This function verifies an {name} Honk proof for given program settings. + * + */ + {verify_proof_function_declaration} + {{ + using Flavor = {name}Flavor; + using FF = Flavor::FF; + using Commitment = Flavor::Commitment; + // using PCS = Flavor::PCS; + // using ZeroMorph = ZeroMorphVerifier_; + using VerifierCommitments = Flavor::VerifierCommitments; + using CommitmentLabels = Flavor::CommitmentLabels; + + RelationParameters relation_parameters; + + transcript = std::make_shared(proof); + + VerifierCommitments commitments {{ key }}; + CommitmentLabels commitment_labels; + + const auto circuit_size = transcript->template receive_from_prover(\"circuit_size\"); + + if (circuit_size != key->circuit_size) {{ + return false; + }} + + // Get commitments to VM wires + {wire_commitments} + + {get_inverse_challenges} + + // Get commitments to inverses + {inverse_commitments} + + // Execute Sumcheck Verifier + const size_t log_circuit_size = numeric::get_msb(circuit_size); + auto sumcheck = SumcheckVerifier(log_circuit_size, transcript); + + FF alpha = transcript->template get_challenge(\"Sumcheck:alpha\"); + + auto gate_challenges = std::vector(log_circuit_size); + for (size_t idx = 0; idx < log_circuit_size; idx++) {{ + gate_challenges[idx] = transcript->template get_challenge(\"Sumcheck:gate_challenge_\" + std::to_string(idx)); + }} + + auto [multivariate_challenge, claimed_evaluations, sumcheck_verified] = + sumcheck.verify(relation_parameters, alpha, gate_challenges); + + // If Sumcheck did not verify, return false + if (sumcheck_verified.has_value() && !sumcheck_verified.value()) {{ + return false; + }} + + // Public columns evaluation checks + {public_inputs_check} + + // Execute ZeroMorph rounds. See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the + // unrolled protocol. + // NOTE: temporarily disabled - facing integration issues + // auto pairing_points = ZeroMorph::verify(commitments.get_unshifted(), + // commitments.get_to_be_shifted(), + // claimed_evaluations.get_unshifted(), + // claimed_evaluations.get_shifted(), + // multivariate_challenge, + // transcript); + + // auto verified = pcs_verification_key->pairing_check(pairing_points[0], pairing_points[1]); + // return sumcheck_verified.value() && verified; + return sumcheck_verified.value(); + }} + + + }} // namespace bb + + + "); + + self.write_file( + &self.prover, + &format!("{}_verifier.cpp", snake_case(name)), + &ver_cpp, + ); + } + + fn create_verifier_hpp(&mut self, name: &str, public_cols: &[(String, usize)]) { + let include_str = include_hpp(&snake_case(name)); + + // If there are public input columns, then the generated verifier must take them in as an argument for the verify_proof + let verify_proof = if !public_cols.is_empty() { + "bool verify_proof(const HonkProof& proof, const std::vector>& public_inputs);" + .to_string() + } else { + "bool verify_proof(const HonkProof& proof);".to_owned() + }; + + let ver_hpp = format!( + " +{include_str} + + namespace bb {{ + class {name}Verifier {{ + using Flavor = {name}Flavor; + using FF = Flavor::FF; + using Commitment = Flavor::Commitment; + using VerificationKey = Flavor::VerificationKey; + using VerifierCommitmentKey = Flavor::VerifierCommitmentKey; + using Transcript = Flavor::Transcript; + + public: + explicit {name}Verifier(std::shared_ptr verifier_key = nullptr); + {name}Verifier({name}Verifier&& other) noexcept; + {name}Verifier(const {name}Verifier& other) = delete; + + {name}Verifier& operator=(const {name}Verifier& other) = delete; + {name}Verifier& operator=({name}Verifier&& other) noexcept; + + {verify_proof} + + std::shared_ptr key; + std::map commitments; + std::shared_ptr pcs_verification_key; + std::shared_ptr transcript; + }}; + + }} // namespace bb + + + " + ); + + self.write_file( + &self.prover, + &format!("{}_verifier.hpp", snake_case(name)), + &ver_hpp, + ); + } +} + +fn include_hpp(name: &str) -> String { + format!( + " +#pragma once +#include \"barretenberg/plonk/proof_system/types/proof.hpp\" +#include \"barretenberg/sumcheck/sumcheck.hpp\" +#include \"barretenberg/vm/generated/{name}_flavor.hpp\" +#include \"barretenberg/vm/avm_trace/constants.hpp\" +" + ) +} + +fn includes_cpp(name: &str) -> String { + format!( + " + #include \"./{name}_verifier.hpp\" + #include \"barretenberg/commitment_schemes/zeromorph/zeromorph.hpp\" + #include \"barretenberg/numeric/bitop/get_msb.hpp\" + #include \"barretenberg/polynomials/polynomial.hpp\" + #include \"barretenberg/transcript/transcript.hpp\" + " + ) +} diff --git a/bb-pilcom/bb-pil-backend/src/vm_builder.rs b/bb-pilcom/bb-pil-backend/src/vm_builder.rs new file mode 100644 index 00000000000..8707ef40a45 --- /dev/null +++ b/bb-pilcom/bb-pil-backend/src/vm_builder.rs @@ -0,0 +1,236 @@ +use powdr_ast::analyzed::Analyzed; +use powdr_number::FieldElement; + +use crate::circuit_builder::CircuitBuilder; +use crate::composer_builder::ComposerBuilder; +use crate::file_writer::BBFiles; +use crate::flavor_builder::FlavorBuilder; +use crate::lookup_builder::get_counts_from_lookups; +use crate::lookup_builder::get_inverses_from_lookups; +use crate::lookup_builder::Lookup; +use crate::lookup_builder::LookupBuilder; +use crate::permutation_builder::get_inverses_from_permutations; +use crate::permutation_builder::Permutation; +use crate::permutation_builder::PermutationBuilder; +use crate::prover_builder::ProverBuilder; +use crate::relation_builder::RelationBuilder; +use crate::relation_builder::RelationOutput; +use crate::utils::collect_col; +use crate::utils::flatten; +use crate::utils::sanitize_name; +use crate::utils::sort_cols; +use crate::utils::transform_map; +use crate::verifier_builder::VerifierBuilder; + +/// All of the combinations of columns that are used in a bberg flavor file +struct ColumnGroups { + /// fixed or constant columns in pil -> will be found in vk + fixed: Vec, + /// witness or commit columns in pil -> will be found in proof + witness: Vec, + /// witness or commit columns in pil, with out the inverse columns + witnesses_without_inverses: Vec, + /// fixed + witness columns without lookup inverses + all_cols_without_inverses: Vec, + /// fixed + witness columns with lookup inverses + all_cols: Vec, + /// Columns that will not be shifted + unshifted: Vec, + /// Columns that will be shifted + to_be_shifted: Vec, + /// The shifts of the columns that will be shifted + shifted: Vec, + /// fixed + witness + shifted + all_cols_with_shifts: Vec, + /// Inverses from lookups and permuations + inverses: Vec, +} + +/// Analyzed to cpp +/// +/// Converts an analyzed pil AST into a set of cpp files that can be used to generate a proof +pub fn analyzed_to_cpp( + analyzed: &Analyzed, + fixed: &[String], + witness: &[String], + public: &[String], + name: Option, +) { + // Extract public inputs information. + let mut public_inputs: Vec<(String, usize)> = public + .iter() + .enumerate() + .map(|(i, name)| (sanitize_name(name), i)) + .collect(); + public_inputs.sort_by(|a, b| a.1.cmp(&b.1)); + + // Sort fixed and witness to ensure consistent ordering + let fixed = &sort_cols(fixed); + let witness = &sort_cols(witness); + + let file_name: &str = &name.unwrap_or("Example".to_owned()); + let mut bb_files = BBFiles::default(file_name.to_owned()); + + // Inlining step to remove the intermediate poly definitions + let mut analyzed_identities = analyzed.identities_with_inlined_intermediate_polynomials(); + analyzed_identities.sort_by(|a, b| a.id.cmp(&b.id)); + + // ----------------------- Handle Standard Relation Identities ----------------------- + // We collect all references to shifts as we traverse all identities and create relation files + let RelationOutput { + relations, + shifted_polys, + } = bb_files.create_relations(file_name, &analyzed_identities); + + // ----------------------- Handle Lookup / Permutation Relation Identities ----------------------- + let permutations = bb_files.create_permutation_files(file_name, analyzed); + let lookups = bb_files.create_lookup_files(file_name, analyzed); + + // TODO: hack - this can be removed with some restructuring + let shifted_polys: Vec = shifted_polys + .clone() + .iter() + .map(|s| s.replace("_shift", "")) + .collect(); + + // Collect all column names and determine if they need a shift or not + let ColumnGroups { + fixed, + witness, + witnesses_without_inverses, + all_cols, + all_cols_without_inverses, + unshifted: _unshifted, + to_be_shifted, + shifted, + all_cols_with_shifts, + inverses, + } = get_all_col_names( + fixed, + witness, + public, + &shifted_polys, + &permutations, + &lookups, + ); + + bb_files.create_declare_views(file_name, &all_cols_with_shifts); + + // ----------------------- Create the circuit builder file ----------------------- + bb_files.create_circuit_builder_hpp( + file_name, + &relations, + &inverses, + &all_cols_without_inverses, + &all_cols, + &to_be_shifted, + &all_cols_with_shifts, + ); + + bb_files.create_circuit_builder_cpp(file_name, &all_cols); + + // ----------------------- Create the flavor file ----------------------- + bb_files.create_flavor_hpp( + file_name, + &relations, + &inverses, + &fixed, + &witness, + &all_cols, + &to_be_shifted, + &shifted, + &all_cols_with_shifts, + ); + + // ----------------------- Create the composer files ----------------------- + bb_files.create_composer_cpp(file_name); + bb_files.create_composer_hpp(file_name); + + // ----------------------- Create the Verifier files ----------------------- + bb_files.create_verifier_cpp( + file_name, + &witnesses_without_inverses, + &inverses, + &public_inputs, + ); + bb_files.create_verifier_hpp(file_name, &public_inputs); + + // ----------------------- Create the Prover files ----------------------- + bb_files.create_prover_cpp(file_name, &witnesses_without_inverses, &inverses); + bb_files.create_prover_hpp(file_name); +} + +/// Get all col names +/// +/// In the flavor file, there are a number of different groups of columns that we need to keep track of +/// This function will return all of the columns in the following groups: +/// - fixed +/// - witness +/// - all_cols +/// - unshifted +/// - to_be_shifted +/// - all_cols_with_shifts +fn get_all_col_names( + fixed: &[String], + witness: &[String], + public: &[String], + to_be_shifted: &[String], + permutations: &[Permutation], + lookups: &[Lookup], +) -> ColumnGroups { + log::info!("Getting all column names"); + + // Transformations + let sanitize = |name: &String| sanitize_name(name).to_owned(); + let append_shift = |name: &String| format!("{}_shift", *name); + + let perm_inverses = get_inverses_from_permutations(permutations); + let lookup_inverses = get_inverses_from_lookups(lookups); + let lookup_counts = get_counts_from_lookups(lookups); + + // Gather sanitized column names + let fixed_names = collect_col(fixed, sanitize); + let witness_names = collect_col(witness, sanitize); + let public_names = collect_col(public, sanitize); + let inverses = flatten(&[perm_inverses, lookup_inverses]); + let witnesses_without_inverses = flatten(&[ + public_names.clone(), + witness_names.clone(), + lookup_counts.clone(), + ]); + let witnesses_with_inverses = flatten(&[ + public_names.clone(), + witness_names, + inverses.clone(), + lookup_counts, + ]); + + // Group columns by properties + let shifted = transform_map(to_be_shifted, append_shift); + let all_cols_without_inverses: Vec = + flatten(&[fixed_names.clone(), witnesses_without_inverses.clone()]); + let all_cols: Vec = flatten(&[fixed_names.clone(), witnesses_with_inverses.clone()]); + let unshifted: Vec = flatten(&[fixed_names.clone(), witnesses_with_inverses.clone()]) + .into_iter() + .filter(|name| !shifted.contains(name)) + .collect(); + + let all_cols_with_shifts: Vec = flatten(&[ + fixed_names.clone(), + witnesses_with_inverses.clone(), + shifted.clone(), + ]); + + ColumnGroups { + fixed: fixed_names, + witness: witnesses_with_inverses, + all_cols_without_inverses, + witnesses_without_inverses, + all_cols, + unshifted, + to_be_shifted: to_be_shifted.to_vec(), + shifted, + all_cols_with_shifts, + inverses, + } +} diff --git a/bb-pilcom/bootstrap.sh b/bb-pilcom/bootstrap.sh new file mode 100755 index 00000000000..ef8e4c9c117 --- /dev/null +++ b/bb-pilcom/bootstrap.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +cargo build --release \ No newline at end of file diff --git a/bb-pilcom/cli/Cargo.toml b/bb-pilcom/cli/Cargo.toml new file mode 100644 index 00000000000..6f717a88f5f --- /dev/null +++ b/bb-pilcom/cli/Cargo.toml @@ -0,0 +1,26 @@ + +[package] +name = "cli" +version = "0.1.0" +authors = ["Aztec Labs"] +edition = "2021" + +[[bin]] +name = "bb_pil" +path = "src/main.rs" + +[dependencies] +clap = { version = "^4.3", features = ["derive"] } +num-bigint = "0.4.3" +bb-pil-backend ={ path = "../bb-pil-backend" } + +powdr-pil-analyzer ={ path = "../powdr/pil-analyzer" } +powdr-number = { path = "../powdr/number" } +num-traits = "0.2.15" +num-integer = "0.1.45" +itertools = "^0.10" +log = "0.4.17" +rand = "0.8.5" +powdr-ast = { path = "../powdr/ast" } + + diff --git a/bb-pilcom/cli/README.md b/bb-pilcom/cli/README.md new file mode 100644 index 00000000000..9c6fd860593 --- /dev/null +++ b/bb-pilcom/cli/README.md @@ -0,0 +1,3 @@ +## BBERG PIL CLI +A small wrapper around powdr pil that only implements the parts of powdr required for direct pil -> bberg codegen + diff --git a/bb-pilcom/cli/src/main.rs b/bb-pilcom/cli/src/main.rs new file mode 100644 index 00000000000..37f6b4cfd41 --- /dev/null +++ b/bb-pilcom/cli/src/main.rs @@ -0,0 +1,52 @@ +use std::{io, path::Path}; + +use bb_pil_backend::vm_builder::analyzed_to_cpp; +use clap::Parser; +use powdr_ast::analyzed::{Analyzed, FunctionValueDefinition, Symbol}; +use powdr_number::Bn254Field; +use powdr_pil_analyzer::analyze_file; + +#[derive(Parser)] +#[command(name = "bb-pil-cli", author, version, about, long_about = None)] +struct Cli { + /// Input file + file: String, + + /// Output directory for the PIL file, json file and fixed and witness column data. + #[arg(short, long)] + #[arg(default_value_t = String::from("."))] + output_directory: String, + + /// BBerg: Name of the output file for bberg + #[arg(long)] + name: Option, +} + +fn extract_col_name(cols: Vec<&(Symbol, Option)>) -> Vec { + // Note that function val def should be none + cols.iter() + .map(|(sym, _def)| sym.absolute_name.replace(".", "_")) + .collect() +} + +fn main() -> Result<(), io::Error> { + let args = Cli::parse(); + + let file_name = args.file; + let name = args.name; + + let analyzed: Analyzed = analyze_file(Path::new(&file_name)); + + let fixed = analyzed.constant_polys_in_source_order(); + let witness = analyzed.committed_polys_in_source_order(); + let public = analyzed.public_polys_in_source_order(); + + analyzed_to_cpp( + &analyzed, + &extract_col_name(fixed), + &extract_col_name(witness), + &extract_col_name(public), + name, + ); + Ok(()) +} diff --git a/bb-pilcom/powdr b/bb-pilcom/powdr new file mode 160000 index 00000000000..c3006c11819 --- /dev/null +++ b/bb-pilcom/powdr @@ -0,0 +1 @@ +Subproject commit c3006c11819d9b53fb183c9c12a10b83481bb631 diff --git a/docs/docs/migration_notes.md b/docs/docs/migration_notes.md index 35a9d71ba0c..6fed5c9c3b5 100644 --- a/docs/docs/migration_notes.md +++ b/docs/docs/migration_notes.md @@ -61,13 +61,6 @@ These changes were done because having the note hash exposed allowed us to not h + } ``` - -## 0.43.0 - -### [Aztec.nr] break `token.transfer()` into `transfer` and `transferFrom` -Earlier we had just one function - `transfer()` which used authwits to handle the case where a contract/user wants to transfer funds on behalf of another user. -To reduce circuit sizes and proof times, we are breaking up `transfer` and introducing a dedicated `transferFrom()` function like in the ERC20 standard. - ### [Aztec.nr] `note_getter` returns `BoundedVec` The `get_notes` and `view_notes` function no longer return an array of options (i.e. `[Option, N_NOTES]`) but instead a `BoundedVec`. This better conveys the useful property the old array had of having all notes collapsed at the beginning of the array, which allows for powerful optimizations and gate count reduction when setting the `options.limit` value. @@ -96,6 +89,12 @@ To further reduce gate count, you can iterate over `options.limit` instead of `m + for i in 0..options.limit { ``` +## 0.43.0 + +### [Aztec.nr] break `token.transfer()` into `transfer` and `transferFrom` +Earlier we had just one function - `transfer()` which used authwits to handle the case where a contract/user wants to transfer funds on behalf of another user. +To reduce circuit sizes and proof times, we are breaking up `transfer` and introducing a dedicated `transferFrom()` function like in the ERC20 standard. + ### [Aztec.nr] `options.limit` has to be constant The `limit` parameter in `NoteGetterOptions` and `NoteViewerOptions` is now required to be a compile-time constant. This allows performing loops over this value, which leads to reduced circuit gate counts when setting a `limit` value. diff --git a/iac/mainnet-fork/Dockerfile b/iac/mainnet-fork/Dockerfile deleted file mode 100644 index c0240ccadc8..00000000000 --- a/iac/mainnet-fork/Dockerfile +++ /dev/null @@ -1,18 +0,0 @@ -FROM ubuntu:focal - -# Install nginx -RUN echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections -RUN apt-get update && apt install -y git curl nginx - -# Copy nginx config -COPY . . -COPY nginx/gateway.conf /etc/nginx/gateway.conf -COPY nginx/nginx.conf /etc/nginx/nginx.conf - -# Install foundry -RUN ./scripts/install_foundry.sh -ENV PATH="./foundry/bin:${PATH}" - -# Run anvil and nginx -EXPOSE 80 -ENTRYPOINT ["sh", "-c", "./scripts/run_nginx_anvil.sh"] \ No newline at end of file diff --git a/iac/mainnet-fork/Earthfile b/iac/mainnet-fork/Earthfile new file mode 100644 index 00000000000..fb480d1801d --- /dev/null +++ b/iac/mainnet-fork/Earthfile @@ -0,0 +1,29 @@ +VERSION 0.8 + +build: + FROM ubuntu:focal + + # Install nginx + RUN echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections + RUN apt-get update && apt install -y git curl nginx + + # Copy nginx config + COPY . . + COPY nginx/gateway.conf /etc/nginx/gateway.conf + COPY nginx/nginx.conf /etc/nginx/nginx.conf + + # Install foundry + RUN ./scripts/install_foundry.sh + ENV PATH="./foundry/bin:${PATH}" + + # Expose port 80 + EXPOSE 80 + + # Set entrypoint + ENTRYPOINT ["sh", "-c", "./scripts/run_nginx_anvil.sh"] + +export-mainnet-fork: + FROM +build + ARG DIST_TAG="aztec-dev" + ARG ARCH + SAVE IMAGE --push spypsy/mainnet-fork:${DIST_TAG}${ARCH:+-$ARCH} diff --git a/iac/mainnet-fork/nginx/nginx.conf b/iac/mainnet-fork/nginx/nginx.conf index 0deef80724e..5e078c52cc8 100644 --- a/iac/mainnet-fork/nginx/nginx.conf +++ b/iac/mainnet-fork/nginx/nginx.conf @@ -10,6 +10,7 @@ http { # Basic Settings ## + client_max_body_size 20M; sendfile on; tcp_nopush on; tcp_nodelay on; diff --git a/noir-projects/aztec-nr/aztec/src/context/private_context.nr b/noir-projects/aztec-nr/aztec/src/context/private_context.nr index 2bdc6b0a51f..71d69f484a6 100644 --- a/noir-projects/aztec-nr/aztec/src/context/private_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/private_context.nr @@ -5,7 +5,7 @@ use crate::{ messaging::process_l1_to_l2_message, hash::{hash_args_array, ArgsHasher, compute_unencrypted_log_hash}, keys::constants::{NULLIFIER_INDEX, OUTGOING_INDEX, NUM_KEY_TYPES, sk_generators}, - note::{note_interface::NoteInterface, utils::compute_note_hash_for_insertion}, + note::note_interface::NoteInterface, oracle::{ key_validation_request::get_key_validation_request, arguments, returns::pack_returns, call_private_function::call_private_function_internal, header::get_header_at, diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/incoming_body.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/incoming_body.nr index 48c0f5bfd69..871f5fd7771 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/incoming_body.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/incoming_body.nr @@ -42,7 +42,7 @@ mod test { }; use crate::{ - note::{note_header::NoteHeader, note_interface::NoteInterface, utils::compute_note_hash_for_consumption}, + note::{note_header::NoteHeader, note_interface::NoteInterface}, event::event_interface::EventInterface, oracle::unsafe_rand::unsafe_rand, context::PrivateContext }; diff --git a/noir-projects/aztec-nr/aztec/src/note/lifecycle.nr b/noir-projects/aztec-nr/aztec/src/note/lifecycle.nr index 501decd69a9..4a7a3a95e94 100644 --- a/noir-projects/aztec-nr/aztec/src/note/lifecycle.nr +++ b/noir-projects/aztec-nr/aztec/src/note/lifecycle.nr @@ -2,8 +2,7 @@ use dep::protocol_types::grumpkin_point::GrumpkinPoint; use crate::context::{PrivateContext, PublicContext}; use crate::note::{ note_header::NoteHeader, note_interface::NoteInterface, - utils::{compute_note_hash_for_insertion, compute_note_hash_for_consumption}, - note_emission::NoteEmission + utils::{compute_inner_note_hash, compute_note_hash_for_consumption}, note_emission::NoteEmission }; use crate::oracle::notes::{notify_created_note, notify_nullified_note}; @@ -18,7 +17,7 @@ pub fn create_note( let header = NoteHeader { contract_address, storage_slot, nonce: 0, note_hash_counter }; // TODO: change this to note.set_header(header) once https://github.com/noir-lang/noir/issues/4095 is fixed Note::set_header(note, header); - let inner_note_hash = compute_note_hash_for_insertion(*note); + let inner_note_hash = compute_inner_note_hash(*note); // TODO: Strong typing required because of https://github.com/noir-lang/noir/issues/4088 let serialized_note: [Field; N] = Note::serialize_content(*note); @@ -46,9 +45,8 @@ pub fn create_note_hash_from_public( let contract_address = (*context).this_address(); // Public note hashes are transient, but have no side effect counters, so we just need note_hash_counter != 0 let header = NoteHeader { contract_address, storage_slot, nonce: 0, note_hash_counter: 1 }; - // TODO: change this to note.set_header(header) once https://github.com/noir-lang/noir/issues/4095 is fixed - Note::set_header(note, header); - let inner_note_hash = compute_note_hash_for_insertion(*note); + note.set_header(header); + let inner_note_hash = compute_inner_note_hash(*note); context.push_new_note_hash(inner_note_hash); } diff --git a/noir-projects/aztec-nr/aztec/src/note/utils.nr b/noir-projects/aztec-nr/aztec/src/note/utils.nr index 6c00104d611..59843201ffb 100644 --- a/noir-projects/aztec-nr/aztec/src/note/utils.nr +++ b/noir-projects/aztec-nr/aztec/src/note/utils.nr @@ -1,24 +1,14 @@ use crate::{context::PrivateContext, note::{note_header::NoteHeader, note_interface::NoteInterface}}; use dep::protocol_types::{ - address::AztecAddress, - constants::{ - GENERATOR_INDEX__OUTER_NULLIFIER, GENERATOR_INDEX__UNIQUE_NOTE_HASH, - GENERATOR_INDEX__SILOED_NOTE_HASH, GENERATOR_INDEX__INNER_NOTE_HASH + constants::GENERATOR_INDEX__INNER_NOTE_HASH, + hash::{ + pedersen_hash, compute_unique_note_hash, compute_siloed_note_hash as compute_siloed_note_hash, + compute_siloed_nullifier as compute_siloed_nullifier_from_preimage }, - hash::pedersen_hash, utils::arr_copy_slice + utils::arr_copy_slice }; -fn compute_siloed_hash(contract_address: AztecAddress, unique_note_hash: Field) -> Field { - let inputs = [contract_address.to_field(), unique_note_hash]; - pedersen_hash(inputs, GENERATOR_INDEX__SILOED_NOTE_HASH) -} - -fn compute_unique_hash(nonce: Field, inner_note_hash: Field) -> Field { - let inputs = [nonce, inner_note_hash]; - pedersen_hash(inputs, GENERATOR_INDEX__UNIQUE_NOTE_HASH) -} - fn compute_inner_note_hash(note: Note) -> Field where Note: NoteInterface { let header = note.get_header(); let note_hash = note.compute_note_content_hash(); @@ -29,29 +19,6 @@ fn compute_inner_note_hash(note: Note) -> Field where Note: NoteInte ) } -fn compute_unique_note_hash(note_with_header: Note) -> Field where Note: NoteInterface { - let header = note_with_header.get_header(); - - let inner_note_hash = compute_inner_note_hash(note_with_header); - - compute_unique_hash(header.nonce, inner_note_hash) -} - -fn compute_siloed_note_hash(note_with_header: Note) -> Field where Note: NoteInterface { - let header = note_with_header.get_header(); - - let unique_note_hash = if (header.nonce == 0) { - // If nonce is zero, that means we are reading a public note. - // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1386) - // Remove this once notes added from public also include nonces. - compute_inner_note_hash(note_with_header) - } else { - compute_unique_note_hash(note_with_header) - }; - - compute_siloed_hash(header.contract_address, unique_note_hash) -} - pub fn compute_siloed_nullifier( note_with_header: Note, context: &mut PrivateContext @@ -59,22 +26,27 @@ pub fn compute_siloed_nullifier( let header = note_with_header.get_header(); let (_, inner_nullifier) = note_with_header.compute_note_hash_and_nullifier(context); - let input = [header.contract_address.to_field(), inner_nullifier]; - pedersen_hash(input, GENERATOR_INDEX__OUTER_NULLIFIER) + compute_siloed_nullifier_from_preimage(header.contract_address, inner_nullifier) } -pub fn compute_note_hash_for_insertion(note: Note) -> Field where Note: NoteInterface { - compute_inner_note_hash(note) +fn compute_note_hash_for_read_request_from_innter_and_nonce( + inner_note_hash: Field, + nonce: Field +) -> Field { + // TODO(#1386): This if-else can be nuked once we have nonces injected from public + if (nonce == 0) { + // If nonce is zero, that means we are reading a public note. + inner_note_hash + } else { + compute_unique_note_hash(nonce, inner_note_hash) + } } pub fn compute_note_hash_for_read_request(note: Note) -> Field where Note: NoteInterface { - let header = note.get_header(); + let inner_note_hash = compute_inner_note_hash(note); + let nonce = note.get_header().nonce; - if (header.nonce != 0) { - compute_unique_note_hash(note) - } else { - compute_inner_note_hash(note) - } + compute_note_hash_for_read_request_from_innter_and_nonce(inner_note_hash, nonce) } pub fn compute_note_hash_for_consumption(note: Note) -> Field where Note: NoteInterface { @@ -84,15 +56,18 @@ pub fn compute_note_hash_for_consumption(note: Note) -> Field where // 2. The note was inserted in a previous transaction, and was inserted in public // 3. The note was inserted in a previous transaction, and was inserted in private + let inner_note_hash = compute_inner_note_hash(note); + if (header.note_hash_counter != 0) { // If a note is transient, we just read the inner_note_hash (kernel will silo by contract address). - compute_inner_note_hash(note) + inner_note_hash } else { // If a note is not transient, that means we are reading a settled note (from tree) created in a // previous TX. So we need the siloed_note_hash which has already been hashed with // nonce and then contract address. This hash will match the existing leaf in the note hash // tree, so the kernel can just perform a membership check directly on this hash/leaf. - compute_siloed_note_hash(note) + let unique_note_hash = compute_note_hash_for_read_request_from_innter_and_nonce(inner_note_hash, header.nonce); + compute_siloed_note_hash(header.contract_address, unique_note_hash) // IMPORTANT NOTE ON REDUNDANT SILOING BY CONTRACT ADDRESS: The note hash computed above is // "siloed" by contract address. When a note hash is computed solely for the purpose of // nullification, it is not strictly necessary to silo the note hash before computing @@ -105,27 +80,17 @@ pub fn compute_note_hash_for_consumption(note: Note) -> Field where } pub fn compute_note_hash_and_optionally_a_nullifier( - // docs:start:compute_note_hash_and_optionally_a_nullifier_args deserialize_content: fn([Field; N]) -> T, note_header: NoteHeader, compute_nullifier: bool, - serialized_note: [Field; S] // docs:end:compute_note_hash_and_optionally_a_nullifier_args + serialized_note: [Field; S] ) -> [Field; 4] where T: NoteInterface { let mut note = deserialize_content(arr_copy_slice(serialized_note, [0; N], 0)); - // TODO: change this to note.set_header(header) once https://github.com/noir-lang/noir/issues/4095 is fixed - T::set_header((&mut note), note_header); + note.set_header(note_header); let inner_note_hash = compute_inner_note_hash(note); - - // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1386) - // Should always be calling compute_unique_hash() once notes added from public also include nonces. - let unique_note_hash = if note_header.nonce != 0 { - compute_unique_hash(note_header.nonce, inner_note_hash) - } else { - inner_note_hash - }; - - let siloed_note_hash = compute_siloed_hash(note_header.contract_address, unique_note_hash); + let unique_note_hash = compute_note_hash_for_read_request_from_innter_and_nonce(inner_note_hash, note_header.nonce); + let siloed_note_hash = compute_siloed_note_hash(note_header.contract_address, unique_note_hash); let inner_nullifier = if compute_nullifier { let (_, nullifier) = note.compute_note_hash_and_nullifier_without_context(); diff --git a/noir-projects/aztec-nr/aztec/src/test/helpers/test_environment.nr b/noir-projects/aztec-nr/aztec/src/test/helpers/test_environment.nr index 142f6fd58f8..4f2800b19fc 100644 --- a/noir-projects/aztec-nr/aztec/src/test/helpers/test_environment.nr +++ b/noir-projects/aztec-nr/aztec/src/test/helpers/test_environment.nr @@ -14,7 +14,7 @@ use crate::hash::hash_args; use crate::note::{ note_header::NoteHeader, note_interface::NoteInterface, - utils::{compute_note_hash_for_insertion, compute_note_hash_for_consumption} + utils::{compute_inner_note_hash, compute_note_hash_for_consumption} }; use crate::oracle::notes::notify_created_note; @@ -188,7 +188,7 @@ impl TestEnvironment { let header = NoteHeader { contract_address, storage_slot, nonce: 0, note_hash_counter }; // TODO: change this to note.set_header(header) once https://github.com/noir-lang/noir/issues/4095 is fixed Note::set_header(note, header); - let inner_note_hash = compute_note_hash_for_insertion(*note); + let inner_note_hash = compute_inner_note_hash(*note); // TODO: Strong typing required because of https://github.com/noir-lang/noir/issues/4088 let serialized_note: [Field; N] = Note::serialize_content(*note); diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr b/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr index 890d1ee3a60..561c3d1029b 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr @@ -45,20 +45,15 @@ pub fn compute_note_hash_nonce(first_nullifier: Field, note_hash_index: u32) -> ) } -fn compute_unique_note_hash(nonce: Field, note_hash: Field) -> Field { - pedersen_hash( - [ - nonce, - note_hash - ], - GENERATOR_INDEX__UNIQUE_NOTE_HASH - ) +pub fn compute_unique_note_hash(nonce: Field, inner_note_hash: Field) -> Field { + let inputs = [nonce, inner_note_hash]; + pedersen_hash(inputs, GENERATOR_INDEX__UNIQUE_NOTE_HASH) } -pub fn compute_siloed_note_hash(address: AztecAddress, unique_note_hash: Field) -> Field { +pub fn compute_siloed_note_hash(app: AztecAddress, unique_note_hash: Field) -> Field { pedersen_hash( [ - address.to_field(), + app.to_field(), unique_note_hash ], GENERATOR_INDEX__SILOED_NOTE_HASH @@ -75,10 +70,10 @@ pub fn silo_note_hash(note_hash: ScopedNoteHash, first_nullifier: Field, index: } } -pub fn compute_siloed_nullifier(address: AztecAddress, nullifier: Field) -> Field { +pub fn compute_siloed_nullifier(app: AztecAddress, nullifier: Field) -> Field { pedersen_hash( [ - address.to_field(), + app.to_field(), nullifier ], GENERATOR_INDEX__OUTER_NULLIFIER diff --git a/yarn-project/Earthfile b/yarn-project/Earthfile index f39379e20a1..82f52d6e6eb 100644 --- a/yarn-project/Earthfile +++ b/yarn-project/Earthfile @@ -161,6 +161,28 @@ aztec: CMD curl -fsSd '{\"jsonrpc\":\"2.0\",\"method\":\"pxe_getNodeInfo\",\"id\":1}' http://127.0.0.1:$port EXPOSE $port +aztec-faucet-build: + FROM +build + RUN yarn workspaces focus @aztec/aztec-faucet --production && yarn cache clean + RUN rm -rf \ + ../noir-projects \ + ../l1-contracts \ + ../barretenberg/ts/src \ + ../barretenberg/ts/dest/node-cjs \ + ../barretenberg/ts/dest/browser \ + aztec.js/dest/main.js \ + end-to-end \ + **/src \ + **/artifacts + SAVE ARTIFACT /usr/src /usr/src + +aztec-faucet: + FROM ubuntu:noble + RUN apt update && apt install nodejs curl -y && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + COPY +aztec-faucet/usr/src /usr/src + ENTRYPOINT ["node", "--no-warnings", "/usr/src/yarn-project/aztec-faucet/dest/bin/index.js"] + LET port=8080 + # We care about creating a slimmed down e2e image because we have to serialize it from earthly to docker for running. end-to-end-prod: FROM +build @@ -219,6 +241,18 @@ export-aztec: FROM +aztec SAVE IMAGE aztecprotocol/aztec:$EARTHLY_GIT_HASH +export-aztec-arch: + FROM +aztec + ARG DIST_TAG="latest" + ARG ARCH + SAVE IMAGE --push aztecprotocol/aztec:${DIST_TAG}${ARCH:+-$ARCH} + +export-aztec-faucet: + FROM +aztec-faucet + ARG DIST_TAG="latest" + ARG ARCH + SAVE IMAGE --push aztecprotocol/aztec-faucet:${DIST_TAG}${ARCH:+-$ARCH} + export-end-to-end: ARG EARTHLY_GIT_HASH FROM +end-to-end diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 5ccbf6266c7..da82aa420e7 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -122,7 +122,11 @@ export class AztecNodeService implements AztecNode { * @param config - The configuration to be used by the aztec node. * @returns - A fully synced Aztec Node for use in development/testing. */ - public static async createAndSync(config: AztecNodeConfig) { + public static async createAndSync( + config: AztecNodeConfig, + log = createDebugLogger('aztec:node'), + storeLog = createDebugLogger('aztec:node:lmdb'), + ) { const ethereumChain = createEthereumChain(config.rpcUrl, config.apiKey); //validate that the actual chain id matches that specified in configuration if (config.chainId !== ethereumChain.chainInfo.id) { @@ -131,8 +135,6 @@ export class AztecNodeService implements AztecNode { ); } - const log = createDebugLogger('aztec:node'); - const storeLog = createDebugLogger('aztec:node:lmdb'); const store = await initStoreForRollup( AztecLmdbStore.open(config.dataDirectory, false, storeLog), config.l1Contracts.rollupAddress, diff --git a/yarn-project/aztec/terraform/node/main.tf b/yarn-project/aztec/terraform/node/main.tf index 3a205d1ff0b..d627d416f0a 100644 --- a/yarn-project/aztec/terraform/node/main.tf +++ b/yarn-project/aztec/terraform/node/main.tf @@ -234,31 +234,31 @@ resource "aws_ecs_task_definition" "aztec-node" { { "name": "ROLLUP_CONTRACT_ADDRESS", - "value": "${var.ROLLUP_CONTRACT_ADDRESS}" + "value": "${data.terraform_remote_state.l1_contracts.outputs.rollup_contract_address}" }, { "name": "INBOX_CONTRACT_ADDRESS", - "value": "${var.INBOX_CONTRACT_ADDRESS}" + "value": "${data.terraform_remote_state.l1_contracts.outputs.inbox_contract_address}" }, { "name": "OUTBOX_CONTRACT_ADDRESS", - "value": "${var.OUTBOX_CONTRACT_ADDRESS}" + "value": "${data.terraform_remote_state.l1_contracts.outputs.outbox_contract_address}" }, { "name": "REGISTRY_CONTRACT_ADDRESS", - "value": "${var.REGISTRY_CONTRACT_ADDRESS}" + "value": "${data.terraform_remote_state.l1_contracts.outputs.registry_contract_address}" }, { "name": "AVAILABILITY_ORACLE_CONTRACT_ADDRESS", - "value": "${var.AVAILABILITY_ORACLE_CONTRACT_ADDRESS}" + "value": "${data.terraform_remote_state.l1_contracts.outputs.availability_oracle_contract_address}" }, { "name": "GAS_TOKEN_CONTRACT_ADDRESS", - "value": "${var.GAS_TOKEN_CONTRACT_ADDRESS}" + "value": "${data.terraform_remote_state.l1_contracts.outputs.gas_token_contract_address}" }, { "name": "GAS_PORTAL_CONTRACT_ADDRESS", - "value": "${var.GAS_PORTAL_CONTRACT_ADDRESS}" + "value": "${data.terraform_remote_state.l1_contracts.outputs.gas_portal_contract_address}" }, { "name": "API_KEY", diff --git a/yarn-project/aztec/terraform/node/variables.tf b/yarn-project/aztec/terraform/node/variables.tf index 4463487e2c3..2febb315fa9 100644 --- a/yarn-project/aztec/terraform/node/variables.tf +++ b/yarn-project/aztec/terraform/node/variables.tf @@ -55,7 +55,7 @@ variable "SEQ_MIN_TX_PER_BLOCK" { variable "P2P_MIN_PEERS" { type = string - default = 50 + default = 5 } variable "P2P_MAX_PEERS" { @@ -85,5 +85,6 @@ variable "IMAGE_TAG" { } variable "FULL_IMAGE" { - type = string + type = string + default = "${var.DOCKERHUB_ACCOUNT}/aztec:${var.IMAGE_TAG}" } diff --git a/yarn-project/end-to-end/Earthfile b/yarn-project/end-to-end/Earthfile index 10db3c25b4a..2b3eda22113 100644 --- a/yarn-project/end-to-end/Earthfile +++ b/yarn-project/end-to-end/Earthfile @@ -46,6 +46,9 @@ E2E_TEST: # Run our docker compose, ending whenever sandbox ends, filtering out noisy eth_getLogs RUN docker run -e HARDWARE_CONCURRENCY=$hardware_concurrency --rm aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG $test || $allow_fail +e2e-p2p: + DO +E2E_TEST --test=./src/e2e_p2p_network.test.ts + e2e-2-pxes: DO +E2E_TEST --test=./src/e2e_2_pxes.test.ts diff --git a/yarn-project/end-to-end/src/flakey_e2e_p2p_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts similarity index 52% rename from yarn-project/end-to-end/src/flakey_e2e_p2p_network.test.ts rename to yarn-project/end-to-end/src/e2e_p2p_network.test.ts index 84173febc0d..42a5f26cb8b 100644 --- a/yarn-project/end-to-end/src/flakey_e2e_p2p_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts @@ -8,10 +8,13 @@ import { GrumpkinScalar, type SentTx, TxStatus, + createDebugLogger, + sleep, } from '@aztec/aztec.js'; import { type BootNodeConfig, BootstrapNode, createLibP2PPeerId } from '@aztec/p2p'; import { type PXEService, createPXEService, getPXEServiceConfig as getRpcConfig } from '@aztec/pxe'; +import fs from 'fs'; import { mnemonicToAccount } from 'viem/accounts'; import { MNEMONIC } from './fixtures/fixtures.js'; @@ -30,21 +33,36 @@ interface NodeContext { account: AztecAddress; } +const PEER_ID_PRIVATE_KEYS = [ + '0802122002f651fd8653925529e3baccb8489b3af4d7d9db440cbf5df4a63ff04ea69683', + '08021220c3bd886df5fe5b33376096ad0dab3d2dc86ed2a361d5fde70f24d979dc73da41', + '080212206b6567ac759db5434e79495ec7458e5e93fe479a5b80713446e0bce5439a5655', + '08021220366453668099bdacdf08fab476ee1fced6bf00ddc1223d6c2ee626e7236fb526', +]; + describe('e2e_p2p_network', () => { let config: AztecNodeConfig; let logger: DebugLogger; let teardown: () => Promise; + let bootstrapNode: BootstrapNode; + let bootstrapNodeEnr: string; beforeEach(async () => { - ({ teardown, config, logger } = await setup(1)); + ({ teardown, config, logger } = await setup(0)); + bootstrapNode = await createBootstrapNode(); + bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); }); afterEach(() => teardown()); + afterAll(() => { + for (let i = 0; i < NUM_NODES; i++) { + fs.rmSync(`./data-${i}`, { recursive: true, force: true }); + } + }); + it('should rollup txs from all peers', async () => { // create the bootstrap node for the network - const bootstrapNode = await createBootstrapNode(); - const bootstrapNodeEnr = bootstrapNode.getENR(); if (!bootstrapNodeEnr) { throw new Error('Bootstrap node ENR is not available'); } @@ -53,14 +71,29 @@ describe('e2e_p2p_network', () => { // should be set so that the only way for rollups to be built // is if the txs are successfully gossiped around the nodes. const contexts: NodeContext[] = []; + const nodes: AztecNodeService[] = []; for (let i = 0; i < NUM_NODES; i++) { - const node = await createNode(i + 1 + BOOT_NODE_UDP_PORT, bootstrapNodeEnr?.encodeTxt(), i); + const node = await createNode(i + 1 + BOOT_NODE_UDP_PORT, bootstrapNodeEnr, i); + nodes.push(node); + } + + // wait a bit for peers to discover each other + await sleep(2000); + + for (const node of nodes) { const context = await createPXEServiceAndSubmitTransactions(node, NUM_TXS_PER_NODE); contexts.push(context); } // now ensure that all txs were successfully mined - await Promise.all(contexts.flatMap(context => context.txs.map(tx => tx.wait()))); + await Promise.all( + contexts.flatMap((context, i) => + context.txs.map(async (tx, j) => { + logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); + return tx.wait(); + }), + ), + ); // shutdown all nodes. for (const context of contexts) { @@ -70,6 +103,61 @@ describe('e2e_p2p_network', () => { await bootstrapNode.stop(); }); + it('should re-discover stored peers without bootstrap node', async () => { + const contexts: NodeContext[] = []; + const nodes: AztecNodeService[] = []; + for (let i = 0; i < NUM_NODES; i++) { + const node = await createNode(i + 1 + BOOT_NODE_UDP_PORT, bootstrapNodeEnr, i, `./data-${i}`); + nodes.push(node); + } + // wait a bit for peers to discover each other + await sleep(3000); + + // stop bootstrap node + await bootstrapNode.stop(); + + // create new nodes from datadir + const newNodes: AztecNodeService[] = []; + + // stop all nodes + for (let i = 0; i < NUM_NODES; i++) { + const node = nodes[i]; + await node.stop(); + logger.info(`Node ${i} stopped`); + await sleep(1200); + const newNode = await createNode(i + 1 + BOOT_NODE_UDP_PORT, undefined, i, `./data-${i}`); + logger.info(`Node ${i} restarted`); + newNodes.push(newNode); + // const context = await createPXEServiceAndSubmitTransactions(node, NUM_TXS_PER_NODE); + // contexts.push(context); + } + + // wait a bit for peers to discover each other + await sleep(2000); + + for (const node of newNodes) { + const context = await createPXEServiceAndSubmitTransactions(node, NUM_TXS_PER_NODE); + contexts.push(context); + } + + // now ensure that all txs were successfully mined + await Promise.all( + contexts.flatMap((context, i) => + context.txs.map(async (tx, j) => { + logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); + return tx.wait(); + }), + ), + ); + + // shutdown all nodes. + // for (const context of contexts) { + for (const context of contexts) { + await context.node.stop(); + await context.pxeService.stop(); + } + }); + const createBootstrapNode = async () => { const peerId = await createLibP2PPeerId(); const bootstrapNode = new BootstrapNode(); @@ -87,7 +175,12 @@ describe('e2e_p2p_network', () => { }; // creates a P2P enabled instance of Aztec Node Service - const createNode = async (tcpListenPort: number, bootstrapNode: string, publisherAddressIndex: number) => { + const createNode = async ( + tcpListenPort: number, + bootstrapNode: string | undefined, + publisherAddressIndex: number, + dataDirectory?: string, + ) => { // We use different L1 publisher accounts in order to avoid duplicate tx nonces. We start from // publisherAddressIndex + 1 because index 0 was already used during test environment setup. const hdAccount = mnemonicToAccount(MNEMONIC, { addressIndex: publisherAddressIndex + 1 }); @@ -96,38 +189,21 @@ describe('e2e_p2p_network', () => { const newConfig: AztecNodeConfig = { ...config, + peerIdPrivateKey: PEER_ID_PRIVATE_KEYS[publisherAddressIndex], udpListenAddress: `0.0.0.0:${tcpListenPort}`, tcpListenAddress: `0.0.0.0:${tcpListenPort}`, tcpAnnounceAddress: `127.0.0.1:${tcpListenPort}`, udpAnnounceAddress: `127.0.0.1:${tcpListenPort}`, - bootstrapNodes: [bootstrapNode], minTxsPerBlock: NUM_TXS_PER_BLOCK, maxTxsPerBlock: NUM_TXS_PER_BLOCK, p2pEnabled: true, p2pBlockCheckIntervalMS: 1000, p2pL2QueueSize: 1, transactionProtocol: '', + dataDirectory, + bootstrapNodes: bootstrapNode ? [bootstrapNode] : [], }; - return await AztecNodeService.createAndSync(newConfig); - }; - - // submits a set of transactions to the provided Private eXecution Environment (PXE) - const submitTxsTo = async (pxe: PXEService, account: AztecAddress, numTxs: number) => { - const txs: SentTx[] = []; - for (let i = 0; i < numTxs; i++) { - const tx = getSchnorrAccount(pxe, Fr.random(), GrumpkinScalar.random(), Fr.random()).deploy(); - logger.info(`Tx sent with hash ${await tx.getTxHash()}`); - const receipt = await tx.getReceipt(); - expect(receipt).toEqual( - expect.objectContaining({ - status: TxStatus.PENDING, - error: '', - }), - ); - logger.info(`Receipt received for ${await tx.getTxHash()}`); - txs.push(tx); - } - return txs; + return await AztecNodeService.createAndSync(newConfig, createDebugLogger(`aztec:node-${tcpListenPort}`)); }; // creates an instance of the PXE and submit a given number of transactions to it. @@ -142,7 +218,7 @@ describe('e2e_p2p_network', () => { const completeAddress = CompleteAddress.fromSecretKeyAndPartialAddress(secretKey, Fr.random()); await pxeService.registerAccount(secretKey, completeAddress.partialAddress); - const txs = await submitTxsTo(pxeService, completeAddress.address, numTxs); + const txs = await submitTxsTo(pxeService, numTxs); return { txs, account: completeAddress.address, @@ -150,4 +226,36 @@ describe('e2e_p2p_network', () => { node, }; }; + + // submits a set of transactions to the provided Private eXecution Environment (PXE) + const submitTxsTo = async (pxe: PXEService, numTxs: number) => { + const txs: SentTx[] = []; + for (let i = 0; i < numTxs; i++) { + // const tx = getSchnorrAccount(pxe, Fr.random(), GrumpkinScalar.random(), Fr.random()).deploy(); + const accountManager = getSchnorrAccount(pxe, Fr.random(), GrumpkinScalar.random(), Fr.random()); + const deployMethod = await accountManager.getDeployMethod(); + await deployMethod.create({ + contractAddressSalt: accountManager.salt, + skipClassRegistration: true, + skipPublicDeployment: true, + universalDeploy: true, + }); + await deployMethod.prove({}); + const tx = deployMethod.send(); + + const txHash = await tx.getTxHash(); + + logger.info(`Tx sent with hash ${txHash}`); + const receipt = await tx.getReceipt(); + expect(receipt).toEqual( + expect.objectContaining({ + status: TxStatus.PENDING, + error: '', + }), + ); + logger.info(`Receipt received for ${txHash}`); + txs.push(tx); + } + return txs; + }; }); diff --git a/yarn-project/p2p-bootstrap/terraform/main.tf b/yarn-project/p2p-bootstrap/terraform/main.tf index 7dbfed502b3..3536c88ee06 100644 --- a/yarn-project/p2p-bootstrap/terraform/main.tf +++ b/yarn-project/p2p-bootstrap/terraform/main.tf @@ -104,7 +104,7 @@ resource "aws_ecs_task_definition" "p2p-bootstrap" { container_definitions = < { - let discv5Service; let p2pService; if (config.p2pEnabled) { @@ -40,7 +39,7 @@ export const createP2PClient = async ( config.tcpAnnounceAddress = tcpAnnounceAddress; } else { throw new Error( - `Invalid announceTcpAddress provided: ${splitTcpAnnounceAddress}. Expected format: :`, + `Invalid announceTcpAddress provided: ${configTcpAnnounceAddress}. Expected format: :`, ); } } @@ -59,11 +58,10 @@ export const createP2PClient = async ( // Create peer discovery service const peerId = await createLibP2PPeerId(config.peerIdPrivateKey); - discv5Service = new DiscV5Service(peerId, config); - p2pService = await LibP2PService.new(config, discv5Service, peerId, txPool); + const discoveryService = new DiscV5Service(peerId, config); + p2pService = await LibP2PService.new(config, discoveryService, peerId, txPool, store); } else { p2pService = new DummyP2PService(); - discv5Service = new DummyPeerDiscoveryService(); } return new P2PClient(store, l2BlockSource, txPool, p2pService); }; diff --git a/yarn-project/p2p/src/client/p2p_client.test.ts b/yarn-project/p2p/src/client/p2p_client.test.ts index 91c0a5561f0..6726df9aeaa 100644 --- a/yarn-project/p2p/src/client/p2p_client.test.ts +++ b/yarn-project/p2p/src/client/p2p_client.test.ts @@ -37,7 +37,6 @@ describe('In-Memory P2P Client', () => { start: jest.fn(), stop: jest.fn(), propagateTx: jest.fn(), - settledTxs: jest.fn(), }; blockSource = new MockBlockSource(); diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index fe3c58db602..96401b35685 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -194,7 +194,7 @@ export class P2PClient implements P2P { this.log.debug('Stopped block downloader'); await this.runningPromise; this.setCurrentState(P2PClientState.STOPPED); - this.log.info('P2P client stopped...'); + this.log.info('P2P client stopped.'); } /** @@ -278,7 +278,6 @@ export class P2PClient implements P2P { for (const block of blocks) { const txHashes = block.body.txEffects.map(txEffect => txEffect.txHash); await this.txPool.deleteTxs(txHashes); - this.p2pService.settledTxs(txHashes); } } diff --git a/yarn-project/p2p/src/service/discV5_service.ts b/yarn-project/p2p/src/service/discV5_service.ts index 8838c180b2f..557a431e19b 100644 --- a/yarn-project/p2p/src/service/discV5_service.ts +++ b/yarn-project/p2p/src/service/discV5_service.ts @@ -1,9 +1,8 @@ import { createDebugLogger } from '@aztec/foundation/log'; -import { RunningPromise } from '@aztec/foundation/running-promise'; import { sleep } from '@aztec/foundation/sleep'; import { Discv5, type Discv5EventEmitter } from '@chainsafe/discv5'; -import { type ENR, SignableENR } from '@chainsafe/enr'; +import { ENR, SignableENR } from '@chainsafe/enr'; import type { PeerId } from '@libp2p/interface'; import { multiaddr } from '@multiformats/multiaddr'; import EventEmitter from 'events'; @@ -14,6 +13,8 @@ import { type PeerDiscoveryService, PeerDiscoveryState } from './service.js'; export const AZTEC_ENR_KEY = 'aztec_network'; +const delayBeforeStart = 2000; // 2sec + export enum AztecENR { devnet = 0x01, testnet = 0x02, @@ -33,11 +34,12 @@ export class DiscV5Service extends EventEmitter implements PeerDiscoveryService /** This instance's ENR */ private enr: SignableENR; - private runningPromise: RunningPromise; - private currentState = PeerDiscoveryState.STOPPED; private bootstrapNodes: string[]; + private bootstrapNodePeerIds: PeerId[] = []; + + private startTime = 0; constructor(private peerId: PeerId, config: P2PConfig, private logger = createDebugLogger('aztec:discv5_service')) { super(); @@ -83,18 +85,17 @@ export class DiscV5Service extends EventEmitter implements PeerDiscoveryService const multiAddrUdp = await enr.getFullMultiaddr('udp'); this.logger.debug(`ENR multiaddr: ${multiAddrTcp?.toString()}, ${multiAddrUdp?.toString()}`); }); - - this.runningPromise = new RunningPromise(async () => { - await this.discv5.findRandomNode(); - }, config.p2pPeerCheckIntervalMS); } public async start(): Promise { + // Do this conversion once since it involves an async function call + this.bootstrapNodePeerIds = await Promise.all(this.bootstrapNodes.map(enr => ENR.decodeTxt(enr).peerId())); if (this.currentState === PeerDiscoveryState.RUNNING) { throw new Error('DiscV5Service already started'); } this.logger.info('Starting DiscV5'); await this.discv5.start(); + this.startTime = Date.now(); this.logger.info('DiscV5 started'); this.currentState = PeerDiscoveryState.RUNNING; @@ -110,12 +111,25 @@ export class DiscV5Service extends EventEmitter implements PeerDiscoveryService this.logger.error(`Error adding bootnode ENRs: ${e}`); } } + } + + public async runRandomNodesQuery(): Promise { + if (this.currentState !== PeerDiscoveryState.RUNNING) { + throw new Error('DiscV5Service not running'); + } // First, wait some time before starting the peer discovery // reference: https://github.com/ChainSafe/lodestar/issues/3423 - await sleep(2000); + const msSinceStart = Date.now() - this.startTime; + if (Date.now() - this.startTime <= delayBeforeStart) { + await sleep(delayBeforeStart - msSinceStart); + } - this.runningPromise.start(); + try { + await this.discv5.findRandomNode(); + } catch (err) { + this.logger.error(`Error running discV5 random node query: ${err}`); + } } public getAllPeers(): ENR[] { @@ -134,8 +148,11 @@ export class DiscV5Service extends EventEmitter implements PeerDiscoveryService return this.currentState; } + public isBootstrapPeer(peerId: PeerId): boolean { + return this.bootstrapNodePeerIds.some(node => node.equals(peerId)); + } + public async stop(): Promise { - await this.runningPromise.stop(); await this.discv5.stop(); this.currentState = PeerDiscoveryState.STOPPED; } diff --git a/yarn-project/p2p/src/service/discv5_service.test.ts b/yarn-project/p2p/src/service/discv5_service.test.ts index dd5a58b9aae..67442f0a87e 100644 --- a/yarn-project/p2p/src/service/discv5_service.test.ts +++ b/yarn-project/p2p/src/service/discv5_service.test.ts @@ -1,3 +1,5 @@ +import { sleep } from '@aztec/foundation/sleep'; + import { jest } from '@jest/globals'; import type { PeerId } from '@libp2p/interface'; import { SemVer } from 'semver'; @@ -8,7 +10,7 @@ import { createLibP2PPeerId } from './libp2p_service.js'; import { PeerDiscoveryState } from './service.js'; const waitForPeers = (node: DiscV5Service, expectedCount: number): Promise => { - const timeout = 5_000; + const timeout = 7_000; return new Promise((resolve, reject) => { const timeoutId = setTimeout(() => { reject(new Error(`Timeout: Failed to connect to ${expectedCount} peers within ${timeout} ms`)); @@ -67,7 +69,17 @@ describe('Discv5Service', () => { const node2 = await createNode(basePort); await node1.start(); await node2.start(); - await waitForPeers(node2, 2); + await Promise.all([ + waitForPeers(node2, 2), + (async () => { + await sleep(2000); // wait for peer discovery to be able to start + for (let i = 0; i < 5; i++) { + await node1.runRandomNodesQuery(); + await node2.runRandomNodesQuery(); + await sleep(100); + } + })(), + ]); const node1Peers = await Promise.all(node1.getAllPeers().map(async peer => (await peer.peerId()).toString())); const node2Peers = await Promise.all(node2.getAllPeers().map(async peer => (await peer.peerId()).toString())); diff --git a/yarn-project/p2p/src/service/dummy_service.ts b/yarn-project/p2p/src/service/dummy_service.ts index cd1ed8d0d41..aeeedb1f03d 100644 --- a/yarn-project/p2p/src/service/dummy_service.ts +++ b/yarn-project/p2p/src/service/dummy_service.ts @@ -1,5 +1,6 @@ -import { type Tx, type TxHash } from '@aztec/circuit-types'; +import type { Tx, TxHash } from '@aztec/circuit-types'; +import type { PeerId } from '@libp2p/interface'; import EventEmitter from 'events'; import { type P2PService, type PeerDiscoveryService, PeerDiscoveryState } from './service.js'; @@ -66,6 +67,14 @@ export class DummyPeerDiscoveryService extends EventEmitter implements PeerDisco return []; } + public runRandomNodesQuery(): Promise { + return Promise.resolve(); + } + + public isBootstrapPeer(_: PeerId): boolean { + return false; + } + public getStatus(): PeerDiscoveryState { return this.currentState; } diff --git a/yarn-project/p2p/src/service/known_txs.test.ts b/yarn-project/p2p/src/service/known_txs.test.ts deleted file mode 100644 index 7c93b085320..00000000000 --- a/yarn-project/p2p/src/service/known_txs.test.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { randomTxHash } from '@aztec/circuit-types'; - -import { expect } from '@jest/globals'; -import type { Ed25519PeerId, PeerId } from '@libp2p/interface'; -import { mock } from 'jest-mock-extended'; - -import { KnownTxLookup } from './known_txs.js'; - -const createMockPeerId = (peerId: string): PeerId => { - return mock({ - toString: () => peerId, - }); -}; - -describe('Known Txs', () => { - it('Returns false when a peer has not seen a tx', () => { - const knownTxs = new KnownTxLookup(); - - const peer = createMockPeerId('Peer 1'); - const txHash = randomTxHash(); - - expect(knownTxs.hasPeerSeenTx(peer, txHash.toString())).toEqual(false); - }); - - it('Returns true when a peer has seen a tx', () => { - const knownTxs = new KnownTxLookup(); - - const peer = createMockPeerId('Peer 1'); - const peer2 = createMockPeerId('Peer 2'); - const txHash = randomTxHash(); - - knownTxs.addPeerForTx(peer, txHash.toString()); - - expect(knownTxs.hasPeerSeenTx(peer, txHash.toString())).toEqual(true); - expect(knownTxs.hasPeerSeenTx(peer2, txHash.toString())).toEqual(false); - - knownTxs.addPeerForTx(peer2, txHash.toString()); - - expect(knownTxs.hasPeerSeenTx(peer, txHash.toString())).toEqual(true); - expect(knownTxs.hasPeerSeenTx(peer2, txHash.toString())).toEqual(true); - }); -}); diff --git a/yarn-project/p2p/src/service/known_txs.ts b/yarn-project/p2p/src/service/known_txs.ts deleted file mode 100644 index d25c866aebe..00000000000 --- a/yarn-project/p2p/src/service/known_txs.ts +++ /dev/null @@ -1,56 +0,0 @@ -import { type PeerId } from '@libp2p/interface'; - -/** - * Keeps a record of which Peers have 'seen' which transactions. - */ -export class KnownTxLookup { - private lookup: { [key: string]: { [key: string]: boolean } } = {}; - - constructor() {} - - /** - * Inform this lookup that a peer has 'seen' a transaction. - * @param peerId - The peerId of the peer that has 'seen' the transaction. - * @param txHash - The thHash of the 'seen' transaction. - */ - public addPeerForTx(peerId: PeerId, txHash: string) { - const peerIdAsString = peerId.toString(); - const existingLookup = this.lookup[txHash]; - if (existingLookup === undefined) { - const newLookup: { [key: string]: boolean } = {}; - newLookup[peerIdAsString] = true; - this.lookup[txHash] = newLookup; - return; - } - existingLookup[peerIdAsString] = true; - } - - /** - * Determine if a peer has 'seen' a transaction. - * @param peerId - The peerId of the peer. - * @param txHash - The thHash of the transaction. - * @returns A boolean indicating if the transaction has been 'seen' by the peer. - */ - public hasPeerSeenTx(peerId: PeerId, txHash: string) { - const existingLookup = this.lookup[txHash]; - if (existingLookup === undefined) { - return false; - } - const peerIdAsString = peerId.toString(); - return !!existingLookup[peerIdAsString]; - } - - /** - * Updates the lookup from the result of settled txs - * These txs will be cleared out of the lookup. - * It is possible that some txs could still be gossiped for a - * short period of time meaning they come back into this lookup - * but this should be infrequent and cause no undesirable effects - * @param txHashes - The hashes of the newly settled transactions - */ - public handleSettledTxs(txHashes: string[]) { - for (const txHash of txHashes) { - delete this.lookup[txHash]; - } - } -} diff --git a/yarn-project/p2p/src/service/libp2p_service.ts b/yarn-project/p2p/src/service/libp2p_service.ts index 6ae680020d0..5164ebfcd64 100644 --- a/yarn-project/p2p/src/service/libp2p_service.ts +++ b/yarn-project/p2p/src/service/libp2p_service.ts @@ -1,17 +1,16 @@ -import { type Tx, type TxHash } from '@aztec/circuit-types'; +import { type Tx } from '@aztec/circuit-types'; import { SerialQueue } from '@aztec/foundation/fifo'; import { createDebugLogger } from '@aztec/foundation/log'; -import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; +import { RunningPromise } from '@aztec/foundation/running-promise'; +import type { AztecKVStore } from '@aztec/kv-store'; -import { ENR } from '@chainsafe/enr'; import { type GossipsubEvents, gossipsub } from '@chainsafe/libp2p-gossipsub'; import { noise } from '@chainsafe/libp2p-noise'; import { yamux } from '@chainsafe/libp2p-yamux'; import { identify } from '@libp2p/identify'; -import type { PeerId, PubSub, Stream } from '@libp2p/interface'; +import type { PeerId, PubSub } from '@libp2p/interface'; import '@libp2p/kad-dht'; import { mplex } from '@libp2p/mplex'; -import { peerIdFromString } from '@libp2p/peer-id'; import { createFromJSON, createSecp256k1PeerId } from '@libp2p/peer-id-factory'; import { tcp } from '@libp2p/tcp'; import { type Libp2p, createLibp2p } from 'libp2p'; @@ -20,7 +19,6 @@ import { type P2PConfig } from '../config.js'; import { type TxPool } from '../tx_pool/index.js'; import { convertToMultiaddr } from '../util.js'; import { AztecDatastore } from './data_store.js'; -import { KnownTxLookup } from './known_txs.js'; import { PeerManager } from './peer_manager.js'; import type { P2PService, PeerDiscoveryService } from './service.js'; import { AztecTxMessageCreator, fromTxMessage } from './tx_messages.js'; @@ -30,7 +28,6 @@ export interface PubSubLibp2p extends Libp2p { pubsub: PubSub; }; } - /** * Create a libp2p peer ID from the private key if provided, otherwise creates a new random ID. * @param privateKey - Optional peer ID private key as hex string @@ -52,16 +49,14 @@ export async function createLibP2PPeerId(privateKey?: string): Promise { */ export class LibP2PService implements P2PService { private jobQueue: SerialQueue = new SerialQueue(); - private knownTxLookup: KnownTxLookup = new KnownTxLookup(); private messageCreator: AztecTxMessageCreator; private peerManager: PeerManager; + private discoveryRunningPromise?: RunningPromise; constructor( private config: P2PConfig, private node: PubSubLibp2p, private peerDiscoveryService: PeerDiscoveryService, - private protocolId: string, private txPool: TxPool, - private bootstrapPeerIds: PeerId[] = [], private logger = createDebugLogger('aztec:libp2p_service'), ) { this.messageCreator = new AztecTxMessageCreator(config.txGossipVersion); @@ -73,54 +68,42 @@ export class LibP2PService implements P2PService { * @returns An empty promise. */ public async start() { + // Check if service is already started if (this.node.status === 'started') { throw new Error('P2P service already started'); } + + // Log listen & announce addresses const { tcpListenAddress, tcpAnnounceAddress } = this.config; this.logger.info(`Starting P2P node on ${tcpListenAddress}`); - if (!tcpAnnounceAddress) { throw new Error('Announce address not provided.'); } - const announceTcpMultiaddr = convertToMultiaddr(tcpAnnounceAddress, 'tcp'); - this.logger.info(`Announcing at ${announceTcpMultiaddr}`); - // handle discovered peers from external discovery service - this.peerDiscoveryService.on('peer:discovered', async (enr: ENR) => { - await this.addPeer(enr); - }); - - this.node.addEventListener('peer:connect', async evt => { - const peerId = evt.detail; - await this.handleNewConnection(peerId as PeerId); - }); - - this.node.addEventListener('peer:disconnect', async evt => { - const peerId = evt.detail; - if (this.isBootstrapPeer(peerId)) { - this.logger.info(`Disconnect from bootstrap peer ${peerId.toString()}`); - } else { - this.logger.info(`Disconnected from transaction peer ${peerId.toString()}`); - await this.peerManager.updateDiscoveryService(); - } - }); + // Start job queue, peer discovery service and libp2p node this.jobQueue.start(); await this.peerDiscoveryService.start(); await this.node.start(); this.logger.info(`Started P2P client with Peer ID ${this.node.peerId.toString()}`); - // Subscribe to standard topics by default + // Subscribe to standard GossipSub topics by default this.subscribeToTopic(this.messageCreator.getTopic()); - // add gossipsub listener + // add GossipSub listener this.node.services.pubsub.addEventListener('gossipsub:message', async e => { const { msg } = e.detail; this.logger.debug(`Received PUBSUB message.`); await this.jobQueue.put(() => this.handleNewGossipMessage(msg.topic, msg.data)); }); + + // Start running promise for peer discovery + this.discoveryRunningPromise = new RunningPromise(() => { + this.peerManager.discover(); + }, this.config.p2pPeerCheckIntervalMS); + this.discoveryRunningPromise.start(); } /** @@ -130,8 +113,12 @@ export class LibP2PService implements P2PService { public async stop() { this.logger.debug('Stopping job queue...'); await this.jobQueue.end(); + this.logger.debug('Stopping running promise...'); + await this.discoveryRunningPromise?.stop(); + this.logger.debug('Stopping peer discovery service...'); + await this.peerDiscoveryService.stop(); this.logger.debug('Stopping LibP2P...'); - await this.node.stop(); + await this.stopLibP2P(); this.logger.info('LibP2P service stopped'); } @@ -146,11 +133,14 @@ export class LibP2PService implements P2PService { peerDiscoveryService: PeerDiscoveryService, peerId: PeerId, txPool: TxPool, + store: AztecKVStore, ) { - const { tcpListenAddress, minPeerCount, maxPeerCount, transactionProtocol: protocolId } = config; + const { tcpListenAddress, tcpAnnounceAddress, minPeerCount, maxPeerCount } = config; const bindAddrTcp = convertToMultiaddr(tcpListenAddress, 'tcp'); + // We know tcpAnnounceAddress cannot be null here because we set it or throw when setting up the service. + const announceAddrTcp = convertToMultiaddr(tcpAnnounceAddress!, 'tcp'); - const datastore = new AztecDatastore(AztecLmdbStore.open()); + const datastore = new AztecDatastore(store); // The autonat service seems quite problematic in that using it seems to cause a lot of attempts // to dial ephemeral ports. I suspect that it works better if you can get the uPNPnat service to @@ -171,10 +161,19 @@ export class LibP2PService implements P2PService { peerId, addresses: { listen: [bindAddrTcp], + announce: [announceAddrTcp], }, transports: [ tcp({ maxConnections: config.maxPeerCount, + // socket option: the maximum length of the queue of pending connections + // https://nodejs.org/dist/latest-v18.x/docs/api/net.html#serverlisten + // it's not safe if we increase this number + backlog: 5, + closeServerOnMaxConnections: { + closeAbove: maxPeerCount ?? Infinity, + listenBelow: maxPeerCount ?? Infinity, + }, }), ], datastore, @@ -200,15 +199,7 @@ export class LibP2PService implements P2PService { }, }); - // extract bootstrap node peer IDs - let bootstrapPeerIds: PeerId[] = []; - if (config.bootstrapNodes.length) { - bootstrapPeerIds = await Promise.all( - config.bootstrapNodes.map(bootnodeEnr => ENR.decodeTxt(bootnodeEnr).peerId()), - ); - } - - return new LibP2PService(config, node, peerDiscoveryService, protocolId, txPool, bootstrapPeerIds); + return new LibP2PService(config, node, peerDiscoveryService, txPool); } /** @@ -260,71 +251,31 @@ export class LibP2PService implements P2PService { void this.jobQueue.put(() => Promise.resolve(this.sendTxToPeers(tx))); } - /** - * Handles the settling of a new batch of transactions. - * @param txHashes - The hashes of the newly settled transactions. - */ - public settledTxs(txHashes: TxHash[]): void { - this.knownTxLookup.handleSettledTxs(txHashes.map(x => x.toString())); - } - - private async addPeer(enr: ENR) { - const peerMultiAddr = await enr.getFullMultiaddr('tcp'); - if (!peerMultiAddr) { - // No TCP address, can't connect - return; - } - const peerIdStr = peerMultiAddr.getPeerId(); - - if (!peerIdStr) { - this.logger.debug(`Peer ID not found in discovered node's multiaddr: ${peerMultiAddr}`); - return; - } - - // check if peer is already known - const peerId = peerIdFromString(peerIdStr); - const hasPeer = await this.node.peerStore.has(peerId); - - // add to peer store if not already known - if (!hasPeer) { - this.logger.info(`Discovered peer ${peerIdStr}. Adding to libp2p peer list`); - let stream: Stream | undefined; - try { - stream = await this.node.dialProtocol(peerMultiAddr, this.protocolId); - } catch (err) { - this.logger.debug(`Failed to dial peer ${peerIdStr}: ${err}`); - } finally { - if (stream) { - await stream.close(); - } - } - } - } - - private async handleNewConnection(peerId: PeerId) { - if (this.isBootstrapPeer(peerId)) { - this.logger.info(`Connected to bootstrap peer ${peerId.toString()}`); - } else { - this.logger.info(`Connected to transaction peer ${peerId.toString()}`); - await this.peerManager.updateDiscoveryService(); - } - } - private async processTxFromPeer(tx: Tx): Promise { const txHash = tx.getTxHash(); const txHashString = txHash.toString(); - this.logger.debug(`Received tx ${txHashString} from external peer.`); + this.logger.verbose(`Received tx ${txHashString} from external peer.`); await this.txPool.addTxs([tx]); } private async sendTxToPeers(tx: Tx) { const { data: txData } = this.messageCreator.createTxMessage(tx); - this.logger.debug(`Sending tx ${tx.getTxHash().toString()} to peers`); + this.logger.verbose(`Sending tx ${tx.getTxHash().toString()} to peers`); const recipientsNum = await this.publishToTopic(this.messageCreator.getTopic(), txData); - this.logger.debug(`Sent tx ${tx.getTxHash().toString()} to ${recipientsNum} peers`); + this.logger.verbose(`Sent tx ${tx.getTxHash().toString()} to ${recipientsNum} peers`); } - private isBootstrapPeer(peer: PeerId) { - return this.bootstrapPeerIds.some(bootstrapPeer => bootstrapPeer.equals(peer)); + // Libp2p seems to hang sometimes if new peers are initiating connections. + private async stopLibP2P() { + const TIMEOUT_MS = 5000; // 5 seconds timeout + const timeout = new Promise((resolve, reject) => { + setTimeout(() => reject(new Error('Timeout during libp2p.stop()')), TIMEOUT_MS); + }); + try { + await Promise.race([this.node.stop(), timeout]); + this.logger.debug('Libp2p stopped'); + } catch (error) { + this.logger.error('Error during stop or timeout:', error); + } } } diff --git a/yarn-project/p2p/src/service/peer_manager.ts b/yarn-project/p2p/src/service/peer_manager.ts index 9e2993103d9..c81dab40124 100644 --- a/yarn-project/p2p/src/service/peer_manager.ts +++ b/yarn-project/p2p/src/service/peer_manager.ts @@ -1,26 +1,201 @@ import { createDebugLogger } from '@aztec/foundation/log'; +import { type ENR } from '@chainsafe/enr'; +import { type PeerId } from '@libp2p/interface'; +import { type Multiaddr } from '@multiformats/multiaddr'; import { type Libp2p } from 'libp2p'; import { type P2PConfig } from '../config.js'; -import { type PeerDiscoveryService, PeerDiscoveryState } from './service.js'; +import { type PeerDiscoveryService } from './service.js'; + +const MAX_DIAL_ATTEMPTS = 3; +const MAX_CACHED_PEERS = 100; + +type CachedPeer = { + peerId: PeerId; + enr: ENR; + multiaddrTcp: Multiaddr; + dialAttempts: number; +}; export class PeerManager { + private cachedPeers: Map = new Map(); constructor( private libP2PNode: Libp2p, - private discV5Node: PeerDiscoveryService, + private peerDiscoveryService: PeerDiscoveryService, private config: P2PConfig, private logger = createDebugLogger('aztec:p2p:peer_manager'), - ) {} - - async updateDiscoveryService() { - const peerCount = this.libP2PNode.getPeers().length; - if (peerCount >= this.config.maxPeerCount && this.discV5Node.getStatus() === PeerDiscoveryState.RUNNING) { - this.logger.debug('Max peer count reached, stopping discovery service'); - await this.discV5Node.stop(); - } else if (peerCount <= this.config.minPeerCount && this.discV5Node.getStatus() === PeerDiscoveryState.STOPPED) { - this.logger.debug('Min peer count reached, starting discovery service'); - await this.discV5Node.start(); + ) { + // Handle new established connections + this.libP2PNode.addEventListener('peer:connect', evt => { + const peerId = evt.detail; + if (this.peerDiscoveryService.isBootstrapPeer(peerId)) { + this.logger.debug(`Connected to bootstrap peer ${peerId.toString()}`); + } else { + this.logger.debug(`Connected to transaction peer ${peerId.toString()}`); + } + }); + + // Handle lost connections + this.libP2PNode.addEventListener('peer:disconnect', evt => { + const peerId = evt.detail; + if (this.peerDiscoveryService.isBootstrapPeer(peerId)) { + this.logger.debug(`Disconnected from bootstrap peer ${peerId.toString()}`); + } else { + this.logger.debug(`Disconnected from transaction peer ${peerId.toString()}`); + } + }); + + // Handle Discovered peers + this.peerDiscoveryService.on('peer:discovered', async (enr: ENR) => { + await this.handleDiscoveredPeer(enr); + }); + } + + /** + * Discovers peers. + */ + public discover() { + // Get current connections + const connections = this.libP2PNode.getConnections(); + + // Calculate how many connections we're looking to make + const peersToConnect = this.config.maxPeerCount - connections.length; + + this.logger.debug( + `Connections: ${connections.length}, Peers to connect: ${peersToConnect}, maxPeerCount: ${this.config.maxPeerCount}, cachedPeers: ${this.cachedPeers.size}`, + ); + + // Exit if no peers to connect + if (peersToConnect <= 0) { + return; + } + + const cachedPeersToDial: CachedPeer[] = []; + + const pendingDials = new Set( + this.libP2PNode + .getDialQueue() + .map(pendingDial => pendingDial.peerId?.toString()) + .filter(Boolean) as string[], + ); + + for (const [id, peerData] of this.cachedPeers.entries()) { + // if already dialling or connected to, remove from cache + if (pendingDials.has(id) || connections.some(conn => conn.remotePeer.equals(peerData.peerId))) { + this.cachedPeers.delete(id); + } else { + // cachedPeersToDial.set(id, enr); + cachedPeersToDial.push(peerData); + } + } + + // reverse to dial older entries first + cachedPeersToDial.reverse(); + + for (const peer of cachedPeersToDial) { + this.cachedPeers.delete(peer.peerId.toString()); + void this.dialPeer(peer); + } + + // if we need more peers, start randomNodesQuery + if (peersToConnect > 0) { + this.logger.debug('Running random nodes query'); + void this.peerDiscoveryService.runRandomNodesQuery(); + } + } + + /** + * Handles a discovered peer. + * @param enr - The discovered peer's ENR. + */ + private async handleDiscoveredPeer(enr: ENR) { + // TODO: Will be handling peer scoring here + + // check if peer is already connected + const [peerId, multiaddrTcp] = await Promise.all([enr.peerId(), enr.getFullMultiaddr('tcp')]); + + this.logger.debug(`Handling discovered peer ${peerId.toString()}, ${multiaddrTcp?.toString()}`); + + // throw if no tcp addr in multiaddr + if (!multiaddrTcp) { + this.logger.debug(`No TCP address in discovered node's multiaddr: ${enr.toString()}`); + return; + } + const connections = this.libP2PNode.getConnections(); + if (connections.some(conn => conn.remotePeer.equals(peerId))) { + this.logger.debug(`Already connected to peer ${peerId.toString()}`); + return; + } + + // check if peer is already in cache + const id = peerId.toString(); + if (this.cachedPeers.has(id)) { + this.logger.debug(`Already in cache ${id}`); + return; + } + + // create cached peer object + const cachedPeer: CachedPeer = { + peerId, + enr, + multiaddrTcp, + dialAttempts: 0, + }; + + // Determine if we should dial immediately or not + if (this.shouldDialPeer()) { + this.logger.debug(`Dialing peer ${id}`); + void this.dialPeer(cachedPeer); + } else { + this.logger.debug(`Caching peer ${id}`); + this.cachedPeers.set(id, cachedPeer); + // Prune set of cached peers + this.pruneCachedPeers(); + } + } + + async dialPeer(peer: CachedPeer) { + const id = peer.peerId.toString(); + await this.libP2PNode.peerStore.merge(peer.peerId, { multiaddrs: [peer.multiaddrTcp] }); + + this.logger.debug(`Dialing peer ${id}`); + try { + await this.libP2PNode.dial(peer.multiaddrTcp); + } catch { + this.logger.debug(`Failed to dial peer ${id}`); + peer.dialAttempts++; + if (peer.dialAttempts < MAX_DIAL_ATTEMPTS) { + this.cachedPeers.set(id, peer); + } else { + this.cachedPeers.delete(id); + } + } + } + + private shouldDialPeer(): boolean { + const connections = this.libP2PNode.getConnections().length; + this.logger.debug(`Connections: ${connections}, maxPeerCount: ${this.config.maxPeerCount}`); + if (connections >= this.config.maxPeerCount) { + this.logger.debug('Not dialing peer, maxPeerCount reached'); + return false; + } + return true; + } + + private pruneCachedPeers() { + let peersToDelete = this.cachedPeers.size - MAX_CACHED_PEERS; + if (peersToDelete <= 0) { + return; + } + + // Remove the oldest peers + for (const key of this.cachedPeers.keys()) { + this.cachedPeers.delete(key); + peersToDelete--; + if (peersToDelete <= 0) { + break; + } } } } diff --git a/yarn-project/p2p/src/service/service.ts b/yarn-project/p2p/src/service/service.ts index 5d3389af54d..f9933dd3b34 100644 --- a/yarn-project/p2p/src/service/service.ts +++ b/yarn-project/p2p/src/service/service.ts @@ -1,6 +1,7 @@ -import type { Tx, TxHash } from '@aztec/circuit-types'; +import type { Tx } from '@aztec/circuit-types'; import type { ENR } from '@chainsafe/enr'; +import type { PeerId } from '@libp2p/interface'; import type EventEmitter from 'events'; export enum PeerDiscoveryState { @@ -29,12 +30,6 @@ export interface P2PService { * @param tx - The transaction to be propagated. */ propagateTx(tx: Tx): void; - - /** - * Called upon receipt of settled transactions. - * @param txHashes - The hashes of the settled transactions. - */ - settledTxs(txHashes: TxHash[]): void; } /** @@ -57,6 +52,18 @@ export interface PeerDiscoveryService extends EventEmitter { */ getAllPeers(): ENR[]; + /** + * Runs findRandomNode query. + */ + runRandomNodesQuery(): Promise; + + /** + * Checks if the given peer is a bootstrap peer. + * @param peerId - The peer ID to check. + * @returns True if the peer is a bootstrap peer. + */ + isBootstrapPeer(peerId: PeerId): boolean; + /** * Event emitted when a new peer is discovered. */ From d3b6a297680cdfc4f2bbd02bb18b091cbbe2fcaf Mon Sep 17 00:00:00 2001 From: spypsy Date: Fri, 21 Jun 2024 07:42:16 +0000 Subject: [PATCH 02/21] fix FULL_IMAGE var --- yarn-project/aztec/terraform/node/main.tf | 4 ++-- yarn-project/aztec/terraform/node/variables.tf | 9 --------- yarn-project/p2p-bootstrap/terraform/main.tf | 2 +- yarn-project/p2p-bootstrap/terraform/variables.tf | 5 ----- 4 files changed, 3 insertions(+), 17 deletions(-) diff --git a/yarn-project/aztec/terraform/node/main.tf b/yarn-project/aztec/terraform/node/main.tf index d627d416f0a..9a3627e9b69 100644 --- a/yarn-project/aztec/terraform/node/main.tf +++ b/yarn-project/aztec/terraform/node/main.tf @@ -165,7 +165,7 @@ resource "aws_ecs_task_definition" "aztec-node" { [ { "name": "${var.DEPLOY_TAG}-aztec-node-${count.index + 1}", - "image": "${var.FULL_IMAGE}", + "image": "${var.DOCKERHUB_ACCOUNT}/aztec:${var.DEPLOY_TAG}", "command": ["start", "--node", "--archiver", "--sequencer", "--prover"], "essential": true, "memoryReservation": 3776, @@ -591,7 +591,7 @@ resource "aws_ecs_task_definition" "aztec-proving-agent" { [ { "name": "${var.DEPLOY_TAG}-aztec-proving-agent-group-${count.index + 1}", - "image": "${var.FULL_IMAGE}", + "image": "${var.DOCKERHUB_ACCOUNT}/aztec:${var.DEPLOY_TAG}", "command": ["start", "--prover"], "essential": true, "memoryReservation": 98304, diff --git a/yarn-project/aztec/terraform/node/variables.tf b/yarn-project/aztec/terraform/node/variables.tf index 2febb315fa9..fc2ee4e2f86 100644 --- a/yarn-project/aztec/terraform/node/variables.tf +++ b/yarn-project/aztec/terraform/node/variables.tf @@ -79,12 +79,3 @@ variable "PROVING_ENABLED" { type = bool default = true } - -variable "IMAGE_TAG" { - type = string -} - -variable "FULL_IMAGE" { - type = string - default = "${var.DOCKERHUB_ACCOUNT}/aztec:${var.IMAGE_TAG}" -} diff --git a/yarn-project/p2p-bootstrap/terraform/main.tf b/yarn-project/p2p-bootstrap/terraform/main.tf index 3536c88ee06..63a7b079ac0 100644 --- a/yarn-project/p2p-bootstrap/terraform/main.tf +++ b/yarn-project/p2p-bootstrap/terraform/main.tf @@ -104,7 +104,7 @@ resource "aws_ecs_task_definition" "p2p-bootstrap" { container_definitions = < Date: Fri, 21 Jun 2024 12:16:45 +0000 Subject: [PATCH 03/21] fix missing vars --- .../aztec/terraform/node/variables.tf | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/yarn-project/aztec/terraform/node/variables.tf b/yarn-project/aztec/terraform/node/variables.tf index fc2ee4e2f86..4090d722f15 100644 --- a/yarn-project/aztec/terraform/node/variables.tf +++ b/yarn-project/aztec/terraform/node/variables.tf @@ -64,7 +64,18 @@ variable "P2P_MAX_PEERS" { } variable "P2P_ENABLED" { - type = bool + type = bool + default = true +} + +variable "PROVING_ENABLED" { + type = bool + default = true +} + +variable "AGENTS_PER_SEQUENCER" { + type = string + default = 4 } variable "AVAILABILITY_ORACLE_CONTRACT_ADDRESS" { type = string } @@ -74,8 +85,4 @@ variable "INBOX_CONTRACT_ADDRESS" { type = string } variable "OUTBOX_CONTRACT_ADDRESS" { type = string } variable "GAS_TOKEN_CONTRACT_ADDRESS" { type = string } variable "GAS_PORTAL_CONTRACT_ADDRESS" { type = string } -variable "AGENTS_PER_SEQUENCER" { type = string } -variable "PROVING_ENABLED" { - type = bool - default = true -} + From 1a45de71320f129a3339206c76d1c748ecc7443a Mon Sep 17 00:00:00 2001 From: spypsy Date: Fri, 21 Jun 2024 12:45:36 +0000 Subject: [PATCH 04/21] rm contract address vars --- yarn-project/aztec/terraform/node/variables.tf | 9 --------- 1 file changed, 9 deletions(-) diff --git a/yarn-project/aztec/terraform/node/variables.tf b/yarn-project/aztec/terraform/node/variables.tf index 4090d722f15..f761cfee2e7 100644 --- a/yarn-project/aztec/terraform/node/variables.tf +++ b/yarn-project/aztec/terraform/node/variables.tf @@ -77,12 +77,3 @@ variable "AGENTS_PER_SEQUENCER" { type = string default = 4 } - -variable "AVAILABILITY_ORACLE_CONTRACT_ADDRESS" { type = string } -variable "ROLLUP_CONTRACT_ADDRESS" { type = string } -variable "REGISTRY_CONTRACT_ADDRESS" { type = string } -variable "INBOX_CONTRACT_ADDRESS" { type = string } -variable "OUTBOX_CONTRACT_ADDRESS" { type = string } -variable "GAS_TOKEN_CONTRACT_ADDRESS" { type = string } -variable "GAS_PORTAL_CONTRACT_ADDRESS" { type = string } - From 368cb6ea1b85fd1ab4921d445322de9b3ec1b81a Mon Sep 17 00:00:00 2001 From: spypsy Date: Fri, 21 Jun 2024 14:06:47 +0000 Subject: [PATCH 05/21] fix deploy p2p --- .github/workflows/devnet-deploys.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/devnet-deploys.yml b/.github/workflows/devnet-deploys.yml index a2b09291c1e..47d2c6f5b6f 100644 --- a/.github/workflows/devnet-deploys.yml +++ b/.github/workflows/devnet-deploys.yml @@ -57,9 +57,9 @@ jobs: aws-region: us-west-2 - name: Deploy Bootstrap Nodes - working-directory: ./yarn-project/aztec/terraform/node + working-directory: ./yarn-project/p2p-bootstrap/terraform run: | - terraform init -input=false -backend-config="key=devnet/aztec-node" + terraform init -input=false -backend-config="key=devnet/p2p-bootstrap" terraform apply -input=false -auto-approve - name: Deploy Aztec Nodes From d38fe6ef205cab1d5816312b73550f60f7aa4477 Mon Sep 17 00:00:00 2001 From: spypsy Date: Mon, 24 Jun 2024 08:24:08 +0000 Subject: [PATCH 06/21] fix ecs name --- yarn-project/p2p-bootstrap/terraform/main.tf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/p2p-bootstrap/terraform/main.tf b/yarn-project/p2p-bootstrap/terraform/main.tf index 63a7b079ac0..7dbfed502b3 100644 --- a/yarn-project/p2p-bootstrap/terraform/main.tf +++ b/yarn-project/p2p-bootstrap/terraform/main.tf @@ -104,7 +104,7 @@ resource "aws_ecs_task_definition" "p2p-bootstrap" { container_definitions = < Date: Mon, 24 Jun 2024 12:34:05 +0000 Subject: [PATCH 07/21] hardcode fork url --- yarn-project/ethereum/src/testnet.ts | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/yarn-project/ethereum/src/testnet.ts b/yarn-project/ethereum/src/testnet.ts index c6e28871a3e..6b28b99bc8b 100644 --- a/yarn-project/ethereum/src/testnet.ts +++ b/yarn-project/ethereum/src/testnet.ts @@ -2,7 +2,9 @@ import { type Chain } from 'viem'; import { type EthereumChain } from './ethereum_chain.js'; -const { DEPLOY_TAG = 'aztec-dev', CHAIN_ID = 31337 } = process.env; +// TODO: restore DEPLOY_TAG +// Temporarily hardcoding DEPLOY_TAG to 'aztec-dev' until mainnet fork is also deployed via devnet flow +const { /* DEPLOY_TAG = 'aztec-dev', */ CHAIN_ID = 31337 } = process.env; export const createTestnetChain = (apiKey: string) => { const chain: Chain = { @@ -16,10 +18,12 @@ export const createTestnetChain = (apiKey: string) => { }, rpcUrls: { default: { - http: [`https://${DEPLOY_TAG}-mainnet-fork.aztec.network:8545/${apiKey}`], + // http: [`https://${DEPLOY_TAG}-mainnet-fork.aztec.network:8545/${apiKey}`], + http: [`https://aztec-dev-mainnet-fork.aztec.network:8545/${apiKey}`], }, public: { - http: [`https://${DEPLOY_TAG}-mainnet-fork.aztec.network:8545/${apiKey}`], + // http: [`https://${DEPLOY_TAG}-mainnet-fork.aztec.network:8545/${apiKey}`], + http: [`https://aztec-dev-mainnet-fork.aztec.network:8545/${apiKey}`], }, }, }; From 80253eb34a0461cc6b7ac985f9d021666b8fdf22 Mon Sep 17 00:00:00 2001 From: spypsy Date: Mon, 24 Jun 2024 13:53:43 +0000 Subject: [PATCH 08/21] force new deployments --- yarn-project/aztec/terraform/node/main.tf | 1 + yarn-project/p2p-bootstrap/terraform/main.tf | 1 + 2 files changed, 2 insertions(+) diff --git a/yarn-project/aztec/terraform/node/main.tf b/yarn-project/aztec/terraform/node/main.tf index 9a3627e9b69..251d0d279c9 100644 --- a/yarn-project/aztec/terraform/node/main.tf +++ b/yarn-project/aztec/terraform/node/main.tf @@ -357,6 +357,7 @@ resource "aws_ecs_service" "aztec-node" { deployment_maximum_percent = 100 deployment_minimum_healthy_percent = 0 platform_version = "1.4.0" + force_new_deployment = true network_configuration { diff --git a/yarn-project/p2p-bootstrap/terraform/main.tf b/yarn-project/p2p-bootstrap/terraform/main.tf index 7dbfed502b3..30a9520f63c 100644 --- a/yarn-project/p2p-bootstrap/terraform/main.tf +++ b/yarn-project/p2p-bootstrap/terraform/main.tf @@ -174,6 +174,7 @@ resource "aws_ecs_service" "p2p-bootstrap" { deployment_maximum_percent = 100 deployment_minimum_healthy_percent = 0 platform_version = "1.4.0" + force_new_deployment = true network_configuration { subnets = [ From 16fe39becacf08bc5070ff5bd9bb6045e80ab3ce Mon Sep 17 00:00:00 2001 From: spypsy Date: Mon, 24 Jun 2024 14:26:05 +0000 Subject: [PATCH 09/21] hardcode aztec-dev in tf file --- yarn-project/aztec/terraform/node/main.tf | 2 +- yarn-project/ethereum/src/testnet.ts | 10 +++------- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/yarn-project/aztec/terraform/node/main.tf b/yarn-project/aztec/terraform/node/main.tf index 251d0d279c9..2af04bd4731 100644 --- a/yarn-project/aztec/terraform/node/main.tf +++ b/yarn-project/aztec/terraform/node/main.tf @@ -205,7 +205,7 @@ resource "aws_ecs_task_definition" "aztec-node" { }, { "name": "ETHEREUM_HOST", - "value": "https://${var.DEPLOY_TAG}-mainnet-fork.aztec.network:8545/${var.API_KEY}" + "value": "https://aztec-dev-mainnet-fork.aztec.network:8545/${var.API_KEY}" }, { "name": "DATA_DIRECTORY", diff --git a/yarn-project/ethereum/src/testnet.ts b/yarn-project/ethereum/src/testnet.ts index 6b28b99bc8b..c6e28871a3e 100644 --- a/yarn-project/ethereum/src/testnet.ts +++ b/yarn-project/ethereum/src/testnet.ts @@ -2,9 +2,7 @@ import { type Chain } from 'viem'; import { type EthereumChain } from './ethereum_chain.js'; -// TODO: restore DEPLOY_TAG -// Temporarily hardcoding DEPLOY_TAG to 'aztec-dev' until mainnet fork is also deployed via devnet flow -const { /* DEPLOY_TAG = 'aztec-dev', */ CHAIN_ID = 31337 } = process.env; +const { DEPLOY_TAG = 'aztec-dev', CHAIN_ID = 31337 } = process.env; export const createTestnetChain = (apiKey: string) => { const chain: Chain = { @@ -18,12 +16,10 @@ export const createTestnetChain = (apiKey: string) => { }, rpcUrls: { default: { - // http: [`https://${DEPLOY_TAG}-mainnet-fork.aztec.network:8545/${apiKey}`], - http: [`https://aztec-dev-mainnet-fork.aztec.network:8545/${apiKey}`], + http: [`https://${DEPLOY_TAG}-mainnet-fork.aztec.network:8545/${apiKey}`], }, public: { - // http: [`https://${DEPLOY_TAG}-mainnet-fork.aztec.network:8545/${apiKey}`], - http: [`https://aztec-dev-mainnet-fork.aztec.network:8545/${apiKey}`], + http: [`https://${DEPLOY_TAG}-mainnet-fork.aztec.network:8545/${apiKey}`], }, }, }; From 4ed2e5b4b017c59aca3b8a18aeb70c169c344f60 Mon Sep 17 00:00:00 2001 From: spypsy Date: Tue, 25 Jun 2024 13:38:36 +0000 Subject: [PATCH 10/21] separate node - prover terraforms --- .github/workflows/devnet-deploys.yml | 6 + yarn-project/aztec/terraform/node/main.tf | 314 +----------------- yarn-project/aztec/terraform/prover/main.tf | 247 ++++++++++++++ .../aztec/terraform/prover/variables.tf | 17 + 4 files changed, 274 insertions(+), 310 deletions(-) create mode 100644 yarn-project/aztec/terraform/prover/main.tf create mode 100644 yarn-project/aztec/terraform/prover/variables.tf diff --git a/.github/workflows/devnet-deploys.yml b/.github/workflows/devnet-deploys.yml index 47d2c6f5b6f..6ce952dec31 100644 --- a/.github/workflows/devnet-deploys.yml +++ b/.github/workflows/devnet-deploys.yml @@ -67,3 +67,9 @@ jobs: run: | terraform init -input=false -backend-config="key=devnet/aztec-node" terraform apply -input=false -auto-approve + + - name: Deploy Provers + working-directory: ./yarn-project/aztec/terraform/prover + run: | + terraform init -input=false -backend-config="key=devnet/prover" + terraform apply -input=false -auto-approve diff --git a/yarn-project/aztec/terraform/node/main.tf b/yarn-project/aztec/terraform/node/main.tf index 2af04bd4731..a1a52ffeb41 100644 --- a/yarn-project/aztec/terraform/node/main.tf +++ b/yarn-project/aztec/terraform/node/main.tf @@ -62,6 +62,10 @@ locals { agents_per_sequencer = var.AGENTS_PER_SEQUENCER } +output "node_count" { + value = local.node_count +} + resource "aws_cloudwatch_log_group" "aztec-node-log-group" { count = local.node_count name = "/fargate/service/${var.DEPLOY_TAG}/aztec-node-${count.index + 1}" @@ -115,20 +119,6 @@ resource "aws_efs_file_system" "node_data_store" { } } -# resource "aws_efs_mount_target" "private_az1" { -# count = local.node_count -# file_system_id = aws_efs_file_system.node_data_store[count.index].id -# subnet_id = data.terraform_remote_state.setup_iac.outputs.subnet_az1_private_id -# security_groups = [data.terraform_remote_state.setup_iac.outputs.security_group_private_id] -# } - -# resource "aws_efs_mount_target" "private_az2" { -# count = local.node_count -# file_system_id = aws_efs_file_system.node_data_store[count.index].id -# subnet_id = data.terraform_remote_state.setup_iac.outputs.subnet_az2_private_id -# security_groups = [data.terraform_remote_state.setup_iac.outputs.security_group_private_id] -# } - resource "aws_efs_mount_target" "public_az1" { count = local.node_count file_system_id = aws_efs_file_system.node_data_store[count.index].id @@ -374,19 +364,6 @@ resource "aws_ecs_service" "aztec-node" { container_port = 80 } - - # load_balancer { - # target_group_arn = aws_lb_target_group.aztec-node-tcp[count.index].arn - # container_name = "${var.DEPLOY_TAG}-aztec-node-${count.index + 1}" - # container_port = var.NODE_P2P_TCP_PORT + count.index - # } - - # load_balancer { - # target_group_arn = aws_lb_target_group.aztec-node-udp[count.index].arn - # container_name = "${var.DEPLOY_TAG}-aztec-node-${count.index + 1}" - # container_port = var.NODE_P2P_UDP_PORT + count.index - # } - service_registries { registry_arn = aws_service_discovery_service.aztec-node[count.index].arn container_name = "${var.DEPLOY_TAG}-aztec-node-${count.index + 1}" @@ -437,23 +414,6 @@ resource "aws_lb_listener_rule" "api" { } } -# resource "aws_lb_target_group" "aztec-node-tcp" { -# count = local.node_count -# name = "${var.DEPLOY_TAG}-node-${count.index + 1}-p2p-tcp-target" -# port = var.NODE_P2P_TCP_PORT + count.index -# protocol = "TCP" -# target_type = "ip" -# vpc_id = data.terraform_remote_state.setup_iac.outputs.vpc_id - -# health_check { -# protocol = "TCP" -# interval = 10 -# healthy_threshold = 2 -# unhealthy_threshold = 2 -# port = var.NODE_P2P_TCP_PORT + count.index -# } -# } - resource "aws_security_group_rule" "allow-node-tcp-in" { count = local.node_count type = "ingress" @@ -474,40 +434,6 @@ resource "aws_security_group_rule" "allow-node-tcp-out" { security_group_id = data.terraform_remote_state.aztec-network_iac.outputs.p2p_security_group_id } -# resource "aws_lb_listener" "aztec-node-tcp-listener" { -# count = local.node_count -# load_balancer_arn = data.terraform_remote_state.aztec-network_iac.outputs.nlb_arn -# port = var.NODE_P2P_TCP_PORT + count.index -# protocol = "TCP" - -# tags = { -# name = "aztec-node-${count.index}-tcp-listener" -# } - -# default_action { -# type = "forward" -# target_group_arn = aws_lb_target_group.aztec-node-tcp[count.index].arn -# } -# } - - -# resource "aws_lb_target_group" "aztec-node-udp" { -# count = local.node_count -# name = "${var.DEPLOY_TAG}-node-${count.index + 1}-p2p-udp-target" -# port = var.NODE_P2P_UDP_PORT + count.index -# protocol = "UDP" -# target_type = "ip" -# vpc_id = data.terraform_remote_state.setup_iac.outputs.vpc_id - -# health_check { -# protocol = "TCP" -# interval = 10 -# healthy_threshold = 2 -# unhealthy_threshold = 2 -# port = var.NODE_P2P_TCP_PORT + count.index -# } -# } - resource "aws_security_group_rule" "allow-node-udp-in" { type = "ingress" from_port = var.NODE_P2P_UDP_PORT @@ -525,235 +451,3 @@ resource "aws_security_group_rule" "allow-node-udp-out" { cidr_blocks = ["0.0.0.0/0"] security_group_id = data.terraform_remote_state.aztec-network_iac.outputs.p2p_security_group_id } - -# resource "aws_lb_listener" "aztec-node-udp-listener" { -# count = local.node_count -# load_balancer_arn = data.terraform_remote_state.aztec-network_iac.outputs.nlb_arn -# port = var.NODE_P2P_UDP_PORT + count.index -# protocol = "UDP" - -# tags = { -# name = "aztec-node-${count.index}-udp-listener" -# } - -# default_action { -# type = "forward" -# target_group_arn = aws_lb_target_group.aztec-node-udp[count.index].arn -# } -# } - - - -// Configuration for proving agents - -resource "aws_cloudwatch_log_group" "aztec-proving-agent-log-group" { - count = local.node_count - name = "/fargate/service/${var.DEPLOY_TAG}/aztec-proving-agent-group-${count.index + 1}" - retention_in_days = 14 -} - -resource "aws_service_discovery_service" "aztec-proving-agent" { - count = local.node_count - name = "${var.DEPLOY_TAG}-aztec-proving-agent-group-${count.index + 1}" - - health_check_custom_config { - failure_threshold = 1 - } - dns_config { - namespace_id = data.terraform_remote_state.setup_iac.outputs.local_service_discovery_id - dns_records { - ttl = 60 - type = "A" - } - dns_records { - ttl = 60 - type = "SRV" - } - routing_policy = "MULTIVALUE" - } - # Terraform just fails if this resource changes and you have registered instances. - provisioner "local-exec" { - when = destroy - command = "${path.module}/servicediscovery-drain.sh ${self.id}" - } -} - -# Define task definitions for each node. -resource "aws_ecs_task_definition" "aztec-proving-agent" { - count = local.node_count - family = "${var.DEPLOY_TAG}-aztec-proving-agent-group-${count.index + 1}" - requires_compatibilities = ["FARGATE"] - network_mode = "awsvpc" - cpu = "16384" - memory = "98304" - execution_role_arn = data.terraform_remote_state.setup_iac.outputs.ecs_task_execution_role_arn - task_role_arn = data.terraform_remote_state.aztec2_iac.outputs.cloudwatch_logging_ecs_role_arn - container_definitions = < Date: Tue, 25 Jun 2024 13:51:38 +0000 Subject: [PATCH 11/21] add data resources --- yarn-project/aztec/terraform/prover/main.tf | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/yarn-project/aztec/terraform/prover/main.tf b/yarn-project/aztec/terraform/prover/main.tf index bcd684aa793..d7745fc09d1 100644 --- a/yarn-project/aztec/terraform/prover/main.tf +++ b/yarn-project/aztec/terraform/prover/main.tf @@ -11,6 +11,24 @@ terraform { } } +data "terraform_remote_state" "setup_iac" { + backend = "s3" + config = { + bucket = "aztec-terraform" + key = "setup/setup-iac" + region = "eu-west-2" + } +} + +data "terraform_remote_state" "aztec2_iac" { + backend = "s3" + config = { + bucket = "aztec-terraform" + key = "aztec2/iac" + region = "eu-west-2" + } +} + data "terraform_remote_state" "aztec-network_iac" { backend = "s3" config = { From 7e20a58ec52d65f4f7d54875cc48d472cf6e17d3 Mon Sep 17 00:00:00 2001 From: spypsy Date: Wed, 26 Jun 2024 08:22:51 +0000 Subject: [PATCH 12/21] add provider block --- yarn-project/aztec/terraform/prover/main.tf | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/yarn-project/aztec/terraform/prover/main.tf b/yarn-project/aztec/terraform/prover/main.tf index d7745fc09d1..a9034ad7bc7 100644 --- a/yarn-project/aztec/terraform/prover/main.tf +++ b/yarn-project/aztec/terraform/prover/main.tf @@ -11,6 +11,11 @@ terraform { } } +# Define provider and region +provider "aws" { + region = "eu-west-2" +} + data "terraform_remote_state" "setup_iac" { backend = "s3" config = { From b88a3eaf316ca2d1487bb92a59a52b45ec6ea42d Mon Sep 17 00:00:00 2001 From: spypsy Date: Wed, 26 Jun 2024 14:04:13 +0000 Subject: [PATCH 13/21] merge with master --- .circleci/config.yml | 10 - .devcontainer/scripts/onCreateCommand.sh | 4 +- .devcontainer/scripts/postAttachCommand.sh | 2 +- .github/workflows/devnet-deploys.yml | 2 +- .github/workflows/publish-docs.yml | 39 + .github/workflows/vm_full_tests.yml | 7 +- CODEOWNERS | 36 +- barretenberg/.gitrepo | 4 +- .../gen_inner_proof_inputs_ultra_honk.sh | 2 +- barretenberg/cpp/pil/avm/binary.pil | 4 +- .../cpp/pil/avm/{ => fixed}/byte_lookup.pil | 1 - barretenberg/cpp/pil/avm/{ => fixed}/gas.pil | 7 +- barretenberg/cpp/pil/avm/fixed/powers.pil | 9 + barretenberg/cpp/pil/avm/main.pil | 15 +- barretenberg/cpp/pil/avm/mem.pil | 8 +- .../benchmark/ipa_bench/ipa.bench.cpp | 2 +- .../relations_bench/relations.bench.cpp | 6 +- .../ultra_bench/ultra_honk_rounds.bench.cpp | 2 +- .../ultra_circuit_builder.test.cpp | 17 + .../commitment_schemes/ipa/ipa.fuzzer.cpp | 7 +- .../commitment_schemes/ipa/ipa.hpp | 12 +- .../commitment_schemes/ipa/ipa.test.cpp | 28 +- .../commitment_schemes/kzg/kzg.hpp | 14 +- .../commitment_schemes/kzg/kzg.test.cpp | 18 +- .../commitment_schemes/shplonk/shplonk.hpp | 104 +- .../shplonk/shplonk.test.cpp | 29 +- .../commitment_schemes/verification_key.hpp | 4 +- .../zeromorph/zeromorph.hpp | 172 +- .../zeromorph/zeromorph.test.cpp | 336 +-- .../eccvm/eccvm_composer.test.cpp | 6 +- .../src/barretenberg/eccvm/eccvm_flavor.hpp | 60 +- .../src/barretenberg/eccvm/eccvm_prover.cpp | 90 +- .../src/barretenberg/eccvm/eccvm_prover.hpp | 2 +- .../eccvm/eccvm_transcript.test.cpp | 22 +- .../src/barretenberg/eccvm/eccvm_verifier.cpp | 103 +- .../eccvm_recursive_verifier.cpp | 107 +- .../eccvm_recursive_verifier.test.cpp | 1 - .../verifier_commitment_key.hpp | 6 +- .../verifier_commitment_key.test.cpp | 2 +- .../plonk/composer/composer_lib.hpp | 75 + .../plonk_honk_shared/CMakeLists.txt | 2 +- .../composer/composer_lib.hpp | 90 +- .../composer/composer_lib.test.cpp | 75 +- .../protogalaxy/decider_verifier.cpp | 17 +- .../protogalaxy/protogalaxy.test.cpp | 114 +- .../protogalaxy/protogalaxy_prover.hpp | 1 - .../protogalaxy/protogalaxy_prover_impl.hpp | 7 +- .../relations/databus_lookup_relation.hpp | 16 +- .../relations/generated/avm/declare_views.hpp | 2 +- .../relations/generated/avm/gas.hpp | 69 + .../generated/avm/lookup_pow_2_0.hpp | 4 +- .../generated/avm/lookup_pow_2_1.hpp | 4 +- .../relations/generated/avm/mem.hpp | 6 +- .../relations/generated/avm/powers.hpp | 49 + .../relations/logderiv_lookup_relation.hpp | 207 ++ .../relations/lookup_relation.hpp | 224 -- .../ultra_relation_consistency.test.cpp | 79 - .../srs/factories/crs_factory.hpp | 4 +- .../srs/factories/file_crs_factory.cpp | 4 +- .../srs/factories/file_crs_factory.hpp | 8 +- .../srs/factories/mem_bn254_crs_factory.cpp | 4 +- .../srs/factories/mem_crs_factory.test.cpp | 2 +- .../factories/mem_grumpkin_crs_factory.cpp | 2 +- .../client_ivc_recursive_verifier.cpp | 6 + .../client_ivc_recursive_verifier.test.cpp | 4 + .../verifier/decider_recursive_verifier.cpp | 17 +- .../verifier/goblin_recursive_verifier.cpp | 12 +- .../protogalaxy_recursive_verifier.cpp | 17 +- .../verifier/ultra_recursive_verifier.cpp | 39 +- .../honk_recursion/verifier/verifier.test.cpp | 2 +- .../grand_product_library.test.cpp | 133 - .../stdlib_circuit_builders/mega_flavor.hpp | 143 +- .../stdlib_circuit_builders/mock_circuits.hpp | 29 + .../plookup_tables/plookup_tables.cpp | 18 +- .../plookup_tables/types.hpp | 76 +- .../stdlib_circuit_builders/ultra_flavor.hpp | 164 +- .../ultra_recursive_flavor.hpp | 12 +- .../sumcheck/instance/prover_instance.hpp | 5 +- .../instance/prover_instance.test.cpp | 89 - .../barretenberg/sumcheck/sumcheck.test.cpp | 1 - .../translator_vm/translator_prover.cpp | 32 +- .../translator_vm/translator_prover.hpp | 2 +- .../translator_vm/translator_verifier.cpp | 24 +- .../translator_recursive_verifier.cpp | 26 +- .../ultra_honk/decider_prover.cpp | 23 +- .../ultra_honk/decider_prover.hpp | 5 +- .../ultra_honk/mega_transcript.test.cpp | 9 +- .../barretenberg/ultra_honk/oink_prover.cpp | 31 +- .../barretenberg/ultra_honk/oink_verifier.cpp | 17 +- .../ultra_honk/relation_correctness.test.cpp | 56 +- .../barretenberg/ultra_honk/sumcheck.test.cpp | 8 +- ..._composer.test.cpp => ultra_honk.test.cpp} | 132 +- .../ultra_honk/ultra_transcript.test.cpp | 9 +- .../ultra_honk/ultra_verifier.cpp | 20 +- .../vm/avm_trace/avm_gas_trace.cpp | 15 +- .../vm/avm_trace/avm_gas_trace.hpp | 113 +- .../vm/avm_trace/avm_mem_trace.cpp | 2 +- .../barretenberg/vm/avm_trace/avm_trace.cpp | 2168 +++++++---------- .../barretenberg/vm/avm_trace/avm_trace.hpp | 62 +- .../barretenberg/vm/avm_trace/fixed_gas.cpp | 23 + .../barretenberg/vm/avm_trace/fixed_gas.hpp | 36 + .../vm/avm_trace/fixed_powers.cpp | 25 + .../vm/avm_trace/fixed_powers.hpp | 32 + .../vm/generated/avm_circuit_builder.cpp | 54 +- .../vm/generated/avm_circuit_builder.hpp | 23 +- .../barretenberg/vm/generated/avm_flavor.hpp | 22 +- .../barretenberg/vm/generated/avm_prover.cpp | 24 +- .../barretenberg/vm/generated/avm_prover.hpp | 6 +- .../vm/generated/avm_verifier.cpp | 11 +- .../vm/tests/avm_arithmetic.test.cpp | 39 +- .../vm/tests/avm_bitwise.test.cpp | 19 +- .../barretenberg/vm/tests/avm_cast.test.cpp | 23 +- .../vm/tests/avm_comparison.test.cpp | 21 +- .../vm/tests/avm_control_flow.test.cpp | 19 +- .../vm/tests/avm_execution.test.cpp | 347 +-- .../barretenberg/vm/tests/avm_gas.test.cpp | 3 +- .../vm/tests/avm_indirect_mem.test.cpp | 19 +- .../vm/tests/avm_inter_table.test.cpp | 19 +- .../barretenberg/vm/tests/avm_kernel.test.cpp | 5 +- .../vm/tests/avm_mem_opcodes.test.cpp | 29 +- .../barretenberg/vm/tests/avm_memory.test.cpp | 20 +- .../barretenberg/vm/tests/helpers.test.cpp | 12 +- .../barretenberg/vm/tests/helpers.test.hpp | 2 + barretenberg/ts/src/types/fields.ts | 2 +- boxes/boxes/react/package.json | 1 - boxes/boxes/react/src/contracts/src/main.nr | 6 +- boxes/boxes/react/webpack.config.js | 1 - boxes/boxes/vanilla/package.json | 1 - boxes/boxes/vanilla/src/contracts/src/main.nr | 6 +- boxes/boxes/vanilla/webpack.config.js | 1 - boxes/contract-only/package.json | 1 - boxes/yarn.lock | 9 - build-images/Earthfile | 14 +- cspell.json | 12 +- docker-compose.yml | 139 +- docs/.gitignore | 1 + docs/docs/aztec/_category_.json | 2 +- docs/docs/getting_started.md | 42 +- docs/docs/getting_started/codespaces.md | 25 + docs/docs/getting_started/manual_install.md | 77 + .../how_to_compile_contract.md | 2 +- .../writing_contracts/authwit.md | 8 +- .../common_patterns/index.md | 2 +- .../writing_contracts/initializers.md | 2 +- .../docs/reference/sandbox_reference/index.md | 20 - .../sandbox_reference/sandbox-reference.md | 61 - .../aztecjs-getting-started.md | 2 +- .../advanced/_category_.json | 2 +- .../contract_tutorials/counter_contract.md} | 36 +- .../crowdfunding_contract.md | 2 +- .../private_voting_contract.md | 5 +- .../contract_tutorials/token_contract.md | 2 +- docs/docs/vision.mdx | 2 +- docs/sidebars.js | 9 - .../aztec/aztec-node-dashboard.json | 576 +++++ .../aztec/protocol-circuits-dashboard.json | 747 ++++++ grafana_dashboards/default.yml | 11 + noir-projects/Dockerfile.test | 4 +- noir-projects/Earthfile | 5 +- noir-projects/aztec-nr/.gitrepo | 4 +- noir-projects/aztec-nr/authwit/src/account.nr | 7 +- noir-projects/aztec-nr/authwit/src/auth.nr | 35 +- .../aztec-nr/authwit/src/cheatcodes.nr | 44 + noir-projects/aztec-nr/authwit/src/lib.nr | 1 + .../aztec/src/context/call_interfaces.nr | 59 +- .../aztec/src/context/private_context.nr | 40 +- .../src/context/unconstrained_context.nr | 22 +- .../aztec-nr/aztec/src/encrypted_logs.nr | 1 + .../encrypted_event_emission.nr | 45 + .../encrypted_logs/encrypted_note_emission.nr | 4 +- .../aztec/src/encrypted_logs/header.nr | 2 +- .../aztec/src/encrypted_logs/incoming_body.nr | 50 +- .../aztec/src/encrypted_logs/outgoing_body.nr | 2 +- .../aztec/src/encrypted_logs/payload.nr | 60 +- .../aztec/src/event/event_interface.nr | 11 +- .../aztec/src/keys/point_to_symmetric_key.nr | 2 +- .../aztec-nr/aztec/src/keys/public_keys.nr | 3 +- .../aztec-nr/aztec/src/note/lifecycle.nr | 6 +- .../oracle/enqueue_public_function_call.nr | 1 + .../aztec-nr/aztec/src/oracle/logs_traits.nr | 25 +- .../aztec-nr/aztec/src/oracle/notes.nr | 3 +- .../aztec-nr/aztec/src/test/helpers.nr | 2 +- .../aztec/src/test/helpers/cheatcodes.nr | 94 +- .../src/test/helpers/test_environment.nr | 87 +- .../src/test/helpers/{types.nr => utils.nr} | 51 +- .../src/easy_private_uint.nr | 6 +- .../aztec-nr/value-note/src/utils.nr | 4 +- noir-projects/noir-contracts/Nargo.toml | 1 + .../app_subscription_contract/src/main.nr | 6 +- .../auth_wit_test_contract/Nargo.toml | 9 + .../auth_wit_test_contract/src/main.nr | 14 + .../contracts/avm_test_contract/src/main.nr | 6 +- .../contracts/card_game_contract/src/cards.nr | 4 +- .../contracts/child_contract/src/main.nr | 4 +- .../contracts/counter_contract/src/main.nr | 24 +- .../crowdfunding_contract/src/main.nr | 16 +- .../delegated_on_contract/src/main.nr | 4 +- .../docs_example_contract/src/main.nr | 17 +- .../ecdsa_account_contract/src/main.nr | 15 +- .../contracts/escrow_contract/src/main.nr | 4 +- .../inclusion_proofs_contract/src/main.nr | 4 +- .../contracts/parent_contract/src/main.nr | 5 +- .../pending_note_hashes_contract/src/main.nr | 20 +- .../schnorr_account_contract/src/main.nr | 31 +- .../src/main.nr | 11 +- .../src/main.nr | 11 +- .../static_child_contract/src/main.nr | 6 +- .../contracts/test_contract/src/main.nr | 48 +- .../contracts/test_log_contract/src/main.nr | 99 +- .../token_blacklist_contract/src/main.nr | 12 +- .../contracts/token_contract/src/main.nr | 102 +- .../contracts/token_contract/src/test.nr | 9 + .../token_contract/src/test/access_control.nr | 52 + .../contracts/token_contract/src/test/burn.nr | 179 ++ .../token_contract/src/test/minting.nr | 239 ++ .../src/test/reading_constants.nr | 29 + .../token_contract/src/test/shielding.nr | 156 ++ .../src/test/transfer_private.nr | 131 + .../src/test/transfer_public.nr | 122 + .../token_contract/src/test/unshielding.nr | 89 + .../token_contract/src/test/utils.nr | 89 + .../crates/types/src/abis.nr | 1 + .../crates/types/src/abis/event_selector.nr | 70 + .../types/src/abis/public_call_stack_item.nr | 1 + .../crates/types/src/constants.nr | 19 +- .../crates/types/src/utils.nr | 3 +- noir/noir-repo/Cargo.lock | 1 + noir/noir-repo/aztec_macros/Cargo.toml | 2 +- noir/noir-repo/aztec_macros/src/lib.rs | 12 +- .../src/transforms/contract_interface.rs | 24 +- .../aztec_macros/src/transforms/events.rs | 488 ++-- .../src/transforms/note_interface.rs | 94 +- .../aztec_macros/src/utils/constants.rs | 1 - .../aztec_macros/src/utils/errors.rs | 6 + .../compiler/noirc_driver/src/abi_gen.rs | 5 +- .../src/hir/resolution/import.rs | 43 +- .../noirc_frontend/src/node_interner.rs | 9 + .../verify_honk_proof/Prover.toml | 2 +- .../verify_honk_proof/src/main.nr | 2 +- yarn-project/Earthfile | 9 +- yarn-project/accounts/package.json | 10 +- yarn-project/archiver/package.json | 11 +- .../archiver/src/archiver/archiver.test.ts | 3 + .../archiver/src/archiver/archiver.ts | 13 +- .../archiver/src/archiver/instrumentation.ts | 30 + .../archiver/kv_archiver_store/block_store.ts | 1 - yarn-project/archiver/src/index.ts | 3 + yarn-project/archiver/tsconfig.json | 3 + yarn-project/aztec-faucet/package.json | 10 +- yarn-project/aztec-node/package.json | 11 +- .../aztec-node/src/aztec-node/server.test.ts | 5 +- .../aztec-node/src/aztec-node/server.ts | 15 +- yarn-project/aztec-node/src/bin/index.ts | 3 +- yarn-project/aztec-node/tsconfig.json | 3 + yarn-project/aztec.js/package.json | 11 +- .../aztec.js/src/account/interface.ts | 29 +- yarn-project/aztec.js/src/account/wallet.ts | 9 +- .../src/fee/private_fee_payment_method.ts | 12 +- .../src/fee/public_fee_payment_method.ts | 33 +- yarn-project/aztec.js/src/index.ts | 3 +- .../aztec.js/src/rpc_clients/pxe_client.ts | 2 + yarn-project/aztec.js/src/utils/authwit.ts | 87 +- .../aztec.js/src/wallet/account_wallet.ts | 230 +- .../aztec.js/src/wallet/base_wallet.ts | 19 +- .../aztec.js/src/wallet/signerless_wallet.ts | 4 +- yarn-project/aztec.js/webpack.config.js | 1 - yarn-project/aztec/package.json | 11 +- .../aztec/src/cli/cmds/start_archiver.ts | 7 +- yarn-project/aztec/src/cli/cmds/start_node.ts | 7 +- .../aztec/src/cli/cmds/start_prover.ts | 22 +- yarn-project/aztec/src/sandbox.ts | 8 +- yarn-project/aztec/tsconfig.json | 3 + yarn-project/bb-prover/package.json | 11 +- .../bb-prover/src/avm_proving.test.ts | 37 +- yarn-project/bb-prover/src/bb/execute.ts | 20 +- yarn-project/bb-prover/src/instrumentation.ts | 149 ++ .../src/prover/bb_native_proof_creator.ts | 6 +- .../bb-prover/src/prover/bb_prover.ts | 74 +- yarn-project/bb-prover/src/stats.ts | 16 +- .../bb-prover/src/test/test_circuit_prover.ts | 30 +- yarn-project/bb-prover/tsconfig.json | 3 + yarn-project/builder/package.json | 10 +- .../src/contract-interface-gen/typescript.ts | 12 +- yarn-project/circuit-types/package.json | 10 +- .../src/logs/encrypted_l2_note_log.ts | 4 + .../src/logs/function_l2_logs.ts | 3 +- .../encrypted_note_log_incoming_body.test.ts | 7 +- .../encrypted_note_log_incoming_body.ts | 8 +- .../logs/l1_payload/l1_event_payload.test.ts | 3 +- .../src/logs/l1_payload/l1_event_payload.ts | 23 +- .../src/logs/l1_payload/l1_note_payload.ts | 7 +- .../src/logs/l1_payload/tagged_log.test.ts | 3 +- .../src/logs/l1_payload/tagged_log.ts | 28 +- .../circuit-types/src/logs/tx_l2_logs.ts | 53 +- yarn-project/circuit-types/src/mocks.ts | 70 +- .../circuit-types/src/notes/extended_note.ts | 6 +- .../circuit-types/src/tx/processed_tx.ts | 23 +- yarn-project/circuit-types/src/tx/tx.ts | 38 +- yarn-project/circuits.js/package.json | 10 +- yarn-project/circuits.js/src/constants.gen.ts | 19 +- .../contract_address.test.ts.snap | 2 +- .../circuits.js/src/keys/derivation.test.ts | 2 +- .../circuits.js/src/structs/avm/avm.ts | 9 + .../src/structs/complete_address.test.ts | 4 +- .../src/structs/contract_storage_read.ts | 33 +- .../contract_storage_update_request.ts | 15 +- .../src/structs/public_call_stack_item.ts | 17 +- yarn-project/cli/package.json | 10 +- yarn-project/cli/src/cmds/add_note.ts | 4 +- yarn-project/cli/src/inspect.ts | 2 +- yarn-project/end-to-end/package.json | 12 +- .../src/composed/e2e_aztec_js_browser.test.ts | 6 +- .../composed/integration_l1_publisher.test.ts | 3 +- .../end-to-end/src/e2e_authwit.test.ts | 171 +- .../end-to-end/src/e2e_avm_simulator.test.ts | 2 +- .../e2e_blacklist_token_contract/burn.test.ts | 12 +- .../transfer_private.test.ts | 12 +- .../unshielding.test.ts | 6 +- .../src/e2e_cross_chain_messaging.test.ts | 9 +- .../end-to-end/src/e2e_event_logs.test.ts | 29 +- .../end-to-end/src/e2e_fees/failures.test.ts | 131 +- .../src/e2e_lending_contract.test.ts | 19 +- .../end-to-end/src/e2e_p2p_network.test.ts | 7 +- .../end-to-end/src/e2e_prover/full.test.ts | 4 +- .../deposits.test.ts | 22 +- .../src/e2e_token_contract/burn.test.ts | 12 +- .../transfer_private.test.ts | 67 +- .../transfer_public.test.ts | 39 +- .../e2e_token_contract/unshielding.test.ts | 6 +- .../src/fixtures/snapshot_manager.ts | 7 +- yarn-project/end-to-end/src/fixtures/utils.ts | 10 +- yarn-project/end-to-end/src/shared/browser.ts | 4 +- .../end-to-end/src/shared/uniswap_l1_l2.ts | 136 +- yarn-project/end-to-end/tsconfig.json | 3 + yarn-project/end-to-end/webpack.config.js | 1 - yarn-project/entrypoints/package.json | 10 +- .../entrypoints/src/dapp_entrypoint.ts | 10 +- yarn-project/ethereum/package.json | 10 +- yarn-project/foundation/.prettierrc.json | 2 +- yarn-project/foundation/package.json | 10 +- yarn-project/foundation/src/abi/abi.ts | 3 +- yarn-project/foundation/src/abi/index.ts | 3 +- .../foundation/src/abi/note_selector.ts | 73 + .../src/crypto/random/randomness_singleton.ts | 4 +- .../src/json-rpc/server/json_rpc_server.ts | 4 +- yarn-project/foundation/src/log/logger.ts | 43 +- .../src/serialize/buffer_reader.test.ts | 68 + .../foundation/src/serialize/buffer_reader.ts | 16 + yarn-project/key-store/package.json | 10 +- yarn-project/key-store/src/key_store.test.ts | 30 +- yarn-project/kv-store/package.json | 10 +- yarn-project/merkle-tree/package.json | 10 +- .../snapshots/indexed_tree_snapshot.test.ts | 8 +- yarn-project/noir-contracts.js/package.json | 10 +- .../noir-protocol-circuits-types/package.json | 10 +- .../noir-protocol-circuits-types/src/index.ts | 2 +- .../src/type_conversion.ts | 4 +- yarn-project/p2p-bootstrap/package.json | 10 +- yarn-project/p2p-bootstrap/terraform/main.tf | 2 +- yarn-project/p2p/package.json | 11 +- .../p2p/src/tx_pool/aztec_kv_tx_pool.test.ts | 3 +- .../p2p/src/tx_pool/aztec_kv_tx_pool.ts | 15 +- .../p2p/src/tx_pool/instrumentation.ts | 58 + .../p2p/src/tx_pool/memory_tx_pool.test.ts | 4 +- .../p2p/src/tx_pool/memory_tx_pool.ts | 9 +- yarn-project/p2p/tsconfig.json | 3 + yarn-project/package.common.json | 14 +- yarn-project/package.json | 3 +- yarn-project/protocol-contracts/package.json | 10 +- yarn-project/prover-client/package.json | 11 +- .../prover-client/src/mocks/test_context.ts | 9 +- .../src/orchestrator/orchestrator.ts | 201 +- .../orchestrator_failures.test.ts | 5 +- .../orchestrator_lifecycle.test.ts | 5 +- .../orchestrator_workflow.test.ts | 3 +- .../src/test/bb_prover_base_rollup.test.ts | 3 +- .../src/test/bb_prover_full_rollup.test.ts | 3 +- .../src/test/bb_prover_parity.test.ts | 3 +- .../prover-client/src/tx-prover/tx-prover.ts | 25 +- yarn-project/prover-client/tsconfig.json | 3 + yarn-project/pxe/package.json | 10 +- .../src/database/deferred_note_dao.test.ts | 3 +- .../pxe/src/database/deferred_note_dao.ts | 5 +- .../src/database/incoming_note_dao.test.ts | 3 +- .../pxe/src/database/incoming_note_dao.ts | 7 +- .../src/database/outgoing_note_dao.test.ts | 3 +- .../pxe/src/database/outgoing_note_dao.ts | 5 +- yarn-project/pxe/src/index.ts | 1 + .../src/kernel_prover/kernel_prover.test.ts | 3 +- .../pxe/src/note_processor/note_processor.ts | 23 +- .../pxe/src/pxe_http/pxe_http_server.ts | 2 + .../pxe/src/pxe_service/pxe_service.ts | 10 +- yarn-project/scripts/package.json | 10 +- yarn-project/sequencer-client/package.json | 11 +- .../src/client/sequencer-client.ts | 10 +- .../src/sequencer/sequencer.test.ts | 2 + .../src/sequencer/sequencer.ts | 178 +- yarn-project/sequencer-client/tsconfig.json | 3 + yarn-project/simulator/package.json | 11 +- .../simulator/src/acvm/oracle/oracle.ts | 14 +- .../simulator/src/acvm/oracle/typed_oracle.ts | 20 +- .../simulator/src/avm/avm_context.test.ts | 2 + .../src/avm/avm_execution_environment.test.ts | 3 + .../src/avm/avm_execution_environment.ts | 13 +- .../simulator/src/avm/avm_simulator.test.ts | 1044 ++++---- .../simulator/src/avm/avm_simulator.ts | 5 +- .../simulator/src/avm/fixtures/index.ts | 35 +- .../simulator/src/avm/journal/journal.test.ts | 774 +++--- .../simulator/src/avm/journal/journal.ts | 337 +-- .../src/avm/journal/nullifiers.test.ts | 22 +- .../simulator/src/avm/journal/nullifiers.ts | 43 +- .../src/avm/journal/public_storage.test.ts | 4 +- .../src/avm/journal/public_storage.ts | 10 + .../simulator/src/avm/journal/trace.test.ts | 294 --- .../simulator/src/avm/journal/trace.ts | 181 -- .../simulator/src/avm/journal/trace_types.ts | 91 - .../src/avm/opcodes/accrued_substate.test.ts | 457 ++-- .../src/avm/opcodes/accrued_substate.ts | 10 +- .../src/avm/opcodes/contract.test.ts | 51 +- .../src/avm/opcodes/external_calls.test.ts | 58 +- .../src/avm/opcodes/external_calls.ts | 52 +- .../simulator/src/avm/opcodes/storage.test.ts | 14 +- yarn-project/simulator/src/avm/test_utils.ts | 53 + .../src/client/client_execution_context.ts | 14 +- .../simulator/src/client/execution_result.ts | 3 +- .../src/client/private_execution.test.ts | 11 +- .../simulator/src/client/simulator.ts | 10 +- .../client/unconstrained_execution.test.ts | 2 + .../simulator/src/client/view_data_oracle.ts | 8 + yarn-project/simulator/src/mocks/fixtures.ts | 2 +- .../src/public/abstract_phase_manager.ts | 6 +- .../src/public/app_logic_phase_manager.ts | 1 + .../simulator/src/public/execution.ts | 58 +- yarn-project/simulator/src/public/executor.ts | 76 +- .../src/public/public_processor.test.ts | 31 +- .../simulator/src/public/public_processor.ts | 13 +- .../src/public/side_effect_trace.test.ts | 284 +++ .../simulator/src/public/side_effect_trace.ts | 323 +++ .../src/public/side_effect_trace_interface.ts | 41 + .../src/public/teardown_phase_manager.ts | 1 + .../src/public/transitional_adaptors.ts | 104 +- yarn-project/simulator/tsconfig.json | 3 + yarn-project/telemetry-client/.eslintrc.cjs | 1 + yarn-project/telemetry-client/package.json | 77 + .../telemetry-client/src/attributes.ts | 49 + yarn-project/telemetry-client/src/index.ts | 1 + yarn-project/telemetry-client/src/metrics.ts | 30 + yarn-project/telemetry-client/src/noop.ts | 83 + yarn-project/telemetry-client/src/otel.ts | 71 + yarn-project/telemetry-client/src/start.ts | 27 + .../telemetry-client/src/telemetry.ts | 180 ++ yarn-project/telemetry-client/tsconfig.json | 14 + yarn-project/txe/package.json | 13 +- yarn-project/txe/src/bin/index.ts | 15 +- yarn-project/txe/src/oracle/txe_oracle.ts | 453 +++- .../txe/src/txe_service/txe_service.ts | 184 +- .../txe/src/util/expected_failure_error.ts | 5 + .../util/txe_public_contract_data_source.ts | 63 + .../txe/src/util/txe_public_state_db.ts | 57 + yarn-project/types/package.json | 10 +- .../types/src/abi/contract_artifact.ts | 7 +- yarn-project/world-state/package.json | 10 +- yarn-project/yarn.lock | 370 ++- 463 files changed, 13089 insertions(+), 7990 deletions(-) rename barretenberg/cpp/pil/avm/{ => fixed}/byte_lookup.pil (99%) rename barretenberg/cpp/pil/avm/{ => fixed}/gas.pil (63%) create mode 100644 barretenberg/cpp/pil/avm/fixed/powers.pil create mode 100644 barretenberg/cpp/src/barretenberg/relations/generated/avm/gas.hpp create mode 100644 barretenberg/cpp/src/barretenberg/relations/generated/avm/powers.hpp create mode 100644 barretenberg/cpp/src/barretenberg/relations/logderiv_lookup_relation.hpp delete mode 100644 barretenberg/cpp/src/barretenberg/relations/lookup_relation.hpp delete mode 100644 barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.test.cpp rename barretenberg/cpp/src/barretenberg/ultra_honk/{ultra_composer.test.cpp => ultra_honk.test.cpp} (88%) create mode 100644 barretenberg/cpp/src/barretenberg/vm/avm_trace/fixed_gas.cpp create mode 100644 barretenberg/cpp/src/barretenberg/vm/avm_trace/fixed_gas.hpp create mode 100644 barretenberg/cpp/src/barretenberg/vm/avm_trace/fixed_powers.cpp create mode 100644 barretenberg/cpp/src/barretenberg/vm/avm_trace/fixed_powers.hpp create mode 100644 docs/docs/getting_started/codespaces.md create mode 100644 docs/docs/getting_started/manual_install.md rename docs/docs/{getting_started => tutorials}/aztecjs-getting-started.md (99%) rename docs/docs/{getting_started/aztecnr-getting-started.md => tutorials/contract_tutorials/counter_contract.md} (78%) create mode 100644 grafana_dashboards/aztec/aztec-node-dashboard.json create mode 100644 grafana_dashboards/aztec/protocol-circuits-dashboard.json create mode 100644 grafana_dashboards/default.yml create mode 100644 noir-projects/aztec-nr/authwit/src/cheatcodes.nr create mode 100644 noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr rename noir-projects/aztec-nr/aztec/src/test/helpers/{types.nr => utils.nr} (67%) create mode 100644 noir-projects/noir-contracts/contracts/auth_wit_test_contract/Nargo.toml create mode 100644 noir-projects/noir-contracts/contracts/auth_wit_test_contract/src/main.nr create mode 100644 noir-projects/noir-contracts/contracts/token_contract/src/test.nr create mode 100644 noir-projects/noir-contracts/contracts/token_contract/src/test/access_control.nr create mode 100644 noir-projects/noir-contracts/contracts/token_contract/src/test/burn.nr create mode 100644 noir-projects/noir-contracts/contracts/token_contract/src/test/minting.nr create mode 100644 noir-projects/noir-contracts/contracts/token_contract/src/test/reading_constants.nr create mode 100644 noir-projects/noir-contracts/contracts/token_contract/src/test/shielding.nr create mode 100644 noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_private.nr create mode 100644 noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_public.nr create mode 100644 noir-projects/noir-contracts/contracts/token_contract/src/test/unshielding.nr create mode 100644 noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr create mode 100644 noir-projects/noir-protocol-circuits/crates/types/src/abis/event_selector.nr create mode 100644 yarn-project/archiver/src/archiver/instrumentation.ts create mode 100644 yarn-project/bb-prover/src/instrumentation.ts create mode 100644 yarn-project/foundation/src/abi/note_selector.ts create mode 100644 yarn-project/p2p/src/tx_pool/instrumentation.ts delete mode 100644 yarn-project/simulator/src/avm/journal/trace.test.ts delete mode 100644 yarn-project/simulator/src/avm/journal/trace.ts delete mode 100644 yarn-project/simulator/src/avm/journal/trace_types.ts create mode 100644 yarn-project/simulator/src/avm/test_utils.ts create mode 100644 yarn-project/simulator/src/public/side_effect_trace.test.ts create mode 100644 yarn-project/simulator/src/public/side_effect_trace.ts create mode 100644 yarn-project/simulator/src/public/side_effect_trace_interface.ts create mode 100644 yarn-project/telemetry-client/.eslintrc.cjs create mode 100644 yarn-project/telemetry-client/package.json create mode 100644 yarn-project/telemetry-client/src/attributes.ts create mode 100644 yarn-project/telemetry-client/src/index.ts create mode 100644 yarn-project/telemetry-client/src/metrics.ts create mode 100644 yarn-project/telemetry-client/src/noop.ts create mode 100644 yarn-project/telemetry-client/src/otel.ts create mode 100644 yarn-project/telemetry-client/src/start.ts create mode 100644 yarn-project/telemetry-client/src/telemetry.ts create mode 100644 yarn-project/telemetry-client/tsconfig.json create mode 100644 yarn-project/txe/src/util/expected_failure_error.ts create mode 100644 yarn-project/txe/src/util/txe_public_contract_data_source.ts create mode 100644 yarn-project/txe/src/util/txe_public_state_db.ts diff --git a/.circleci/config.yml b/.circleci/config.yml index 8c3765900aa..2322e6159b3 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -440,16 +440,6 @@ jobs: command: | should_release || exit 0 yarn-project/deploy_npm.sh latest - - run: - name: "Release canary to NPM: l1-contracts" - command: | - should_release || exit 0 - deploy_npm l1-contracts canary - - run: - name: "Release latest to NPM: l1-contracts" - command: | - should_release || exit 0 - deploy_npm l1-contracts latest - run: name: "Update aztec-up" command: | diff --git a/.devcontainer/scripts/onCreateCommand.sh b/.devcontainer/scripts/onCreateCommand.sh index 0f2f25affee..c0970999305 100755 --- a/.devcontainer/scripts/onCreateCommand.sh +++ b/.devcontainer/scripts/onCreateCommand.sh @@ -11,11 +11,11 @@ if ! grep -q "PXE_URL" ~/.bashrc; then fi if ! grep -q "alias sandbox" ~/.bashrc; then - echo "alias sandbox=\"npx create-aztec-app sandbox\"" >> ~/.bashrc + echo "alias sandbox=\"npx aztec-app sandbox\"" >> ~/.bashrc fi source ~/.bashrc -yes | npx create-aztec-app -t $TYPE -n $NAME -s +yes | npx aztec-app -t $TYPE -n $NAME -s mv $NAME/* $NAME/.* . rm -rf $NAME diff --git a/.devcontainer/scripts/postAttachCommand.sh b/.devcontainer/scripts/postAttachCommand.sh index 2ff4a39973b..9eeff69f350 100755 --- a/.devcontainer/scripts/postAttachCommand.sh +++ b/.devcontainer/scripts/postAttachCommand.sh @@ -5,7 +5,7 @@ NAME=$2 apt install gh gh codespace ports visibility 8080:public -c $CODESPACE_NAME -npx create-aztec-app sandbox start +npx aztec-app sandbox start r=$(tput sgr0) # Reset color bold=$(tput bold) # Bold text diff --git a/.github/workflows/devnet-deploys.yml b/.github/workflows/devnet-deploys.yml index 6ce952dec31..80fa8aae0c2 100644 --- a/.github/workflows/devnet-deploys.yml +++ b/.github/workflows/devnet-deploys.yml @@ -56,7 +56,7 @@ jobs: aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-region: us-west-2 - - name: Deploy Bootstrap Nodes + - name: Deploy P2P Bootstrap Nodes working-directory: ./yarn-project/p2p-bootstrap/terraform run: | terraform init -input=false -backend-config="key=devnet/p2p-bootstrap" diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml index 11a065c8938..e1abe1531d9 100644 --- a/.github/workflows/publish-docs.yml +++ b/.github/workflows/publish-docs.yml @@ -29,3 +29,42 @@ jobs: - timeout-minutes: 25 run: earthly-ci --no-output ./docs/+deploy-prod --NETLIFY_AUTH_TOKEN=${{ secrets.NETLIFY_AUTH_TOKEN }} --NETLIFY_SITE_ID=${{ secrets.NETLIFY_SITE_ID }} --COMMIT_TAG=${{ inputs.tag }} + + pdf: + needs: setup + runs-on: master-x86 + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + token: ${{ secrets.AZTEC_BOT_GITHUB_TOKEN }} + path: aztec-packages + - name: Install Prince + run: | + curl https://www.princexml.com/download/prince-14.2-linux-generic-x86_64.tar.gz -O + tar zxf prince-14.2-linux-generic-x86_64.tar.gz + cd prince-14.2-linux-generic-x86_64 + yes "" | sudo ./install.sh + - name: Serve docs + run: | + cd aztec-packages/docs + yarn build + yarn serve & + - name: Checkout PDF repo + uses: actions/checkout@v3 + with: + token: ${{ secrets.AZTEC_BOT_GITHUB_TOKEN }} + repository: AztecProtocol/protocol-specs-pdf + path: protocol-specs-pdf + - name: Generate PDF + run: | + npx docusaurus-prince-pdf -u http://localhost:3000/protocol-specs/intro --output protocol-specs-pdf/protocol-specs.pdf + timeout-minutes: 4 + - name: Push to PDF repo + run: | + git config --global user.name AztecBot + git config --global user.email tech@aztecprotocol.com + cd protocol-specs-pdf + git add protocol-specs.pdf + git commit -m "chore: update protocol-specs.pdf" + git push origin main diff --git a/.github/workflows/vm_full_tests.yml b/.github/workflows/vm_full_tests.yml index 1e912a05438..e1e135b0201 100644 --- a/.github/workflows/vm_full_tests.yml +++ b/.github/workflows/vm_full_tests.yml @@ -55,6 +55,7 @@ jobs: concurrency_key: avm-full-tests-x86 - name: "AVM Full Tests" working-directory: ./barretenberg/cpp/ - timeout-minutes: 90 - # limit our parallelism to half our cores - run: earthly-ci --no-output +vm-full-test --hardware_concurrency=64 + timeout-minutes: 70 + run: | + sudo shutdown -P 70 # hack until core part of the scripts + earthly-ci --no-output +vm-full-test --hardware_concurrency=64 # limit our parallelism to half our cores diff --git a/CODEOWNERS b/CODEOWNERS index 1b8a6f2f1d2..9d8055da4cd 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1,17 +1,31 @@ /build-images/ @charlielye -# Notify the AVM team of any changes to public oracle. -/yarn-project/simulator/src/public/public_execution_context.ts @Maddiaa0 @fcarreiro @dbanks12 - -# Notify the AVM team of changes to generated PIL code -barretenberg/cpp/src/barretenberg/**/generated/* @Maddiaa0 @jeanmon @IlyasRidhuan +# Notify the Noir team of any changes to ACIR serialization +/noir/noir-repo/acvm-repo/acir/codegen/* @TomAFrench @vezenovm @guipublic -# Notify the AVM team of any changes to public context or avm context. +##################################################### +# Notify the AVM team +# +# on changes to PIL code-generator +/bb-pilcom @Maddiaa0 @jeanmon @IlyasRidhuan @fcarreiro +# on changes to PIL code (AVM circuit) +/barretenberg/cpp/pil @Maddiaa0 @jeanmon @IlyasRidhuan @fcarreiro +# on changes to PIL-generated C++ +/barretenberg/cpp/src/barretenberg/**/generated @jeanmon @IlyasRidhuan @fcarreiro +# on changes to AVM trace (C++ witness generator) +/barretenberg/cpp/src/barretenberg/vm/avm_trace @jeanmon @IlyasRidhuan @fcarreiro +# on changes to public context in aztec-nr /noir-projects/aztec-nr/aztec/src/context/inputs/public_context_inputs.nr @fcarreiro @dbanks12 -/noir-projects/aztec-nr/aztec/src/context/inputs/avm_context_inputs.nr @fcarreiro @dbanks12 /noir-projects/aztec-nr/aztec/src/context/public_context.nr @fcarreiro @dbanks12 -/noir-projects/aztec-nr/aztec/src/context/avm_context.nr @fcarreiro @dbanks12 -/noir-projects/aztec-nr/aztec/src/context/interface.nr @fcarreiro @dbanks12 +# on changes to the AVM simulator and supporting modules +/yarn-project/simulator/src/avm @fcarreiro @dbanks12 +/yarn-project/simulator/src/public/execution.ts @fcarreiro @dbanks12 +/yarn-project/simulator/src/public/executor.ts @fcarreiro @dbanks12 +/yarn-project/simulator/src/public/side_effect_trace.test.ts @fcarreiro @dbanks12 +/yarn-project/simulator/src/public/side_effect_trace.ts @fcarreiro @dbanks12 +/yarn-project/simulator/src/public/side_effect_trace_interface.ts @fcarreiro @dbanks12 +/yarn-project/simulator/src/public/transitional_adaptors.ts @fcarreiro @dbanks12 +# on changes to the AVM transpiler +/avm-transpiler/src @fcarreiro @dbanks12 +##################################################### -# Notify the Noir team of any changes to ACIR serialization -/noir/noir-repo/acvm-repo/acir/codegen/* @TomAFrench @vezenovm @guipublic diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index d2508be6360..b994cf1a68c 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = 2d7b8b24571369e693e7e19470d7c85f0560368c - parent = 77761c670f2d516ab486de0f7bde036ff00ebd99 + commit = 947c5552eeb784dad1abb5ecebdb6a80880fec08 + parent = 94954131ea61bb6b58efe4e9f8b4e1f489f53fa9 method = merge cmdver = 0.4.6 diff --git a/barretenberg/acir_tests/gen_inner_proof_inputs_ultra_honk.sh b/barretenberg/acir_tests/gen_inner_proof_inputs_ultra_honk.sh index a013a7129b3..30548202ad3 100755 --- a/barretenberg/acir_tests/gen_inner_proof_inputs_ultra_honk.sh +++ b/barretenberg/acir_tests/gen_inner_proof_inputs_ultra_honk.sh @@ -3,7 +3,7 @@ # BIN: to specify a different binary to test with (e.g. bb.js or bb.js-dev). set -eu -BIN=${BIN:-../cpp/build-debug/bin/bb} +BIN=${BIN:-../cpp/build/bin/bb} CRS_PATH=~/.bb-crs BRANCH=master VERBOSE=${VERBOSE:-} diff --git a/barretenberg/cpp/pil/avm/binary.pil b/barretenberg/cpp/pil/avm/binary.pil index 441f4c56bd1..c951481caf6 100644 --- a/barretenberg/cpp/pil/avm/binary.pil +++ b/barretenberg/cpp/pil/avm/binary.pil @@ -1,6 +1,4 @@ - -include "byte_lookup.pil"; -include "main.pil"; +include "fixed/byte_lookup.pil"; namespace binary(256); diff --git a/barretenberg/cpp/pil/avm/byte_lookup.pil b/barretenberg/cpp/pil/avm/fixed/byte_lookup.pil similarity index 99% rename from barretenberg/cpp/pil/avm/byte_lookup.pil rename to barretenberg/cpp/pil/avm/fixed/byte_lookup.pil index a91272a86e6..1759bf83899 100644 --- a/barretenberg/cpp/pil/avm/byte_lookup.pil +++ b/barretenberg/cpp/pil/avm/fixed/byte_lookup.pil @@ -1,4 +1,3 @@ - namespace byte_lookup(256); // These columns are commited for now, but will be migrated to constant/fixed when // we support more *exotic* code generation options diff --git a/barretenberg/cpp/pil/avm/gas.pil b/barretenberg/cpp/pil/avm/fixed/gas.pil similarity index 63% rename from barretenberg/cpp/pil/avm/gas.pil rename to barretenberg/cpp/pil/avm/fixed/gas.pil index 0a1ed20bdb2..e366c85c67f 100644 --- a/barretenberg/cpp/pil/avm/gas.pil +++ b/barretenberg/cpp/pil/avm/fixed/gas.pil @@ -5,4 +5,9 @@ namespace gas(256); // TODO(ISSUE_NUMBER): Constrain variable gas costs pol commit l2_gas_fixed_table; - pol commit da_gas_fixed_table; \ No newline at end of file + pol commit da_gas_fixed_table; + + // DUMMY RELATIONS to force creation of hpp. + sel_gas_cost - sel_gas_cost = 0; + l2_gas_fixed_table - l2_gas_fixed_table = 0; + da_gas_fixed_table - da_gas_fixed_table = 0; \ No newline at end of file diff --git a/barretenberg/cpp/pil/avm/fixed/powers.pil b/barretenberg/cpp/pil/avm/fixed/powers.pil new file mode 100644 index 00000000000..bc497eca04c --- /dev/null +++ b/barretenberg/cpp/pil/avm/fixed/powers.pil @@ -0,0 +1,9 @@ +// This table should eventually be fixed. +// Contains 256 rows with the powers of 2 for 8-bit numbers. +// power_of_2 = 1 << clk; +namespace powers(256); + // clk will be the implicit power. + pol commit power_of_2; + + // DUMMY RELATION to force creation of hpp. + power_of_2 - power_of_2 = 0; \ No newline at end of file diff --git a/barretenberg/cpp/pil/avm/main.pil b/barretenberg/cpp/pil/avm/main.pil index 36b2b6e2b97..19e445d6290 100644 --- a/barretenberg/cpp/pil/avm/main.pil +++ b/barretenberg/cpp/pil/avm/main.pil @@ -3,7 +3,8 @@ include "alu.pil"; include "binary.pil"; include "constants.pil"; include "kernel.pil"; -include "gas.pil"; +include "fixed/gas.pil"; +include "fixed/powers.pil"; include "gadgets/conversion.pil"; include "gadgets/sha256.pil"; include "gadgets/poseidon2.pil"; @@ -139,9 +140,6 @@ namespace main(256); pol commit sel_rng_8; // Boolean selector for the 8-bit range check lookup pol commit sel_rng_16; // Boolean selector for the 16-bit range check lookup - //===== Lookup table powers of 2 ============================================= - pol commit table_pow_2; // Table of powers of 2 for 8-bit numbers. - //===== CONTROL FLOW ========================================================== // Program counter pol commit pc; @@ -487,6 +485,11 @@ namespace main(256); sel_gas_accounting_active - OPCODE_SELECTORS - SEL_ALL_CTRL_FLOW - sel_op_sload - sel_op_sstore - sel_mem_op_activate_gas = 0; // Program counter must increment if not jumping or returning + // TODO: support for muli-rows opcode in execution trace such as + // radix, hash gadgets operations. At the moment, we have to increment + // the pc in witness generation for all rows pertaining to the original + // opcode. This is misleading. Ultimately, we want the pc to be incremented + // just after the last row of a given opcode. #[PC_INCREMENT] (1 - sel_first) * (1 - sel_op_halt) * OPCODE_SELECTORS * (pc' - (pc + 1)) = 0; @@ -768,11 +771,11 @@ namespace main(256); // Lookup for 2**(ib) #[LOOKUP_POW_2_0] - alu.sel_shift_which {alu.ib, alu.two_pow_s} in sel_rng_8 {clk, table_pow_2}; + alu.sel_shift_which {alu.ib, alu.two_pow_s} in sel_rng_8 {clk, powers.power_of_2}; // Lookup for 2**(t-ib) #[LOOKUP_POW_2_1] - alu.sel_shift_which {alu.t_sub_s_bits , alu.two_pow_t_sub_s} in sel_rng_8 {clk, table_pow_2}; + alu.sel_shift_which {alu.t_sub_s_bits , alu.two_pow_t_sub_s} in sel_rng_8 {clk, powers.power_of_2}; //====== Inter-table Constraints (Range Checks) ============================================ // TODO: Investigate optimising these range checks. Handling non-FF elements should require less range checks. diff --git a/barretenberg/cpp/pil/avm/mem.pil b/barretenberg/cpp/pil/avm/mem.pil index a5aa3080f03..0cb9d7c491a 100644 --- a/barretenberg/cpp/pil/avm/mem.pil +++ b/barretenberg/cpp/pil/avm/mem.pil @@ -171,10 +171,14 @@ namespace mem(256); // instead of (r_in_tag - tag)^(-1) as this allows to store zero by default (i.e., when tag_err == 0). // The new column one_min_inv is set to 1 - (r_in_tag - tag)^(-1) when tag_err == 1 // but must be set to 0 when tags are matching and tag_err = 0 + // Relaxation: This relation is relaxed when skip_check_tag is enabled or for + // uninitialized memory, i.e. tag == 0. #[MEM_IN_TAG_CONSISTENCY_1] - (1 - skip_check_tag) * (1 - rw) * ((r_in_tag - tag) * (1 - one_min_inv) - tag_err) = 0; + tag * (1 - skip_check_tag) * (1 - rw) * ((r_in_tag - tag) * (1 - one_min_inv) - tag_err) = 0; + // TODO: Try to decrease the degree of the above relation, e.g., skip_check_tag might be consolidated + // with tag == 0 and rw == 1. #[MEM_IN_TAG_CONSISTENCY_2] - (1 - tag_err) * one_min_inv = 0; + tag * (1 - tag_err) * one_min_inv = 0; #[NO_TAG_ERR_WRITE_OR_SKIP] (skip_check_tag + rw) * tag_err = 0; diff --git a/barretenberg/cpp/src/barretenberg/benchmark/ipa_bench/ipa.bench.cpp b/barretenberg/cpp/src/barretenberg/benchmark/ipa_bench/ipa.bench.cpp index 2f4b2cd88f4..fac0f30f3b5 100644 --- a/barretenberg/cpp/src/barretenberg/benchmark/ipa_bench/ipa.bench.cpp +++ b/barretenberg/cpp/src/barretenberg/benchmark/ipa_bench/ipa.bench.cpp @@ -43,7 +43,7 @@ void ipa_open(State& state) noexcept auto prover_transcript = std::make_shared(); state.ResumeTiming(); // Compute proof - IPA::compute_opening_proof(ck, opening_pair, poly, prover_transcript); + IPA::compute_opening_proof(ck, { poly, opening_pair }, prover_transcript); // Store info for verifier prover_transcripts[static_cast(state.range(0)) - MIN_POLYNOMIAL_DEGREE_LOG2] = prover_transcript; opening_claims[static_cast(state.range(0)) - MIN_POLYNOMIAL_DEGREE_LOG2] = opening_claim; diff --git a/barretenberg/cpp/src/barretenberg/benchmark/relations_bench/relations.bench.cpp b/barretenberg/cpp/src/barretenberg/benchmark/relations_bench/relations.bench.cpp index 6e7069f08ae..f735d2cfb19 100644 --- a/barretenberg/cpp/src/barretenberg/benchmark/relations_bench/relations.bench.cpp +++ b/barretenberg/cpp/src/barretenberg/benchmark/relations_bench/relations.bench.cpp @@ -65,7 +65,7 @@ BENCHMARK(execute_relation_for_pg_univariates>); BENCHMARK(execute_relation_for_pg_univariates>); BENCHMARK(execute_relation_for_pg_univariates>); -BENCHMARK(execute_relation_for_pg_univariates>); +BENCHMARK(execute_relation_for_pg_univariates>); BENCHMARK(execute_relation_for_pg_univariates>); // Goblin-Ultra only relations (PG prover combiner work) @@ -79,7 +79,7 @@ BENCHMARK(execute_relation_for_univariates>); BENCHMARK(execute_relation_for_univariates>); BENCHMARK(execute_relation_for_univariates>); -BENCHMARK(execute_relation_for_univariates>); +BENCHMARK(execute_relation_for_univariates>); BENCHMARK(execute_relation_for_univariates>); // Goblin-Ultra only relations (Sumcheck prover work) @@ -93,7 +93,7 @@ BENCHMARK(execute_relation_for_values>) BENCHMARK(execute_relation_for_values>); BENCHMARK(execute_relation_for_values>); BENCHMARK(execute_relation_for_values>); -BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); BENCHMARK(execute_relation_for_values>); // Goblin-Ultra only relations (verifier work) diff --git a/barretenberg/cpp/src/barretenberg/benchmark/ultra_bench/ultra_honk_rounds.bench.cpp b/barretenberg/cpp/src/barretenberg/benchmark/ultra_bench/ultra_honk_rounds.bench.cpp index 2d8cbe748e3..5625d4fddac 100644 --- a/barretenberg/cpp/src/barretenberg/benchmark/ultra_bench/ultra_honk_rounds.bench.cpp +++ b/barretenberg/cpp/src/barretenberg/benchmark/ultra_bench/ultra_honk_rounds.bench.cpp @@ -60,7 +60,7 @@ BB_PROFILE static void test_round_inner(State& state, MegaProver& prover, size_t DeciderProver_ decider_prover(prover.instance, prover.transcript); time_if_index(RELATION_CHECK, [&] { decider_prover.execute_relation_check_rounds(); }); - time_if_index(ZEROMORPH, [&] { decider_prover.execute_zeromorph_rounds(); }); + time_if_index(ZEROMORPH, [&] { decider_prover.execute_pcs_rounds(); }); } BB_PROFILE static void test_round(State& state, size_t index) noexcept { diff --git a/barretenberg/cpp/src/barretenberg/circuit_checker/ultra_circuit_builder.test.cpp b/barretenberg/cpp/src/barretenberg/circuit_checker/ultra_circuit_builder.test.cpp index 005a50ea156..5d1eec57741 100644 --- a/barretenberg/cpp/src/barretenberg/circuit_checker/ultra_circuit_builder.test.cpp +++ b/barretenberg/cpp/src/barretenberg/circuit_checker/ultra_circuit_builder.test.cpp @@ -1,6 +1,7 @@ #include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" #include "barretenberg/circuit_checker/circuit_checker.hpp" #include "barretenberg/crypto/pedersen_commitment/pedersen.hpp" +#include "barretenberg/stdlib_circuit_builders/mock_circuits.hpp" #include "barretenberg/stdlib_circuit_builders/plookup_tables/fixed_base/fixed_base.hpp" #include @@ -104,6 +105,22 @@ TEST(ultra_circuit_constructor, create_gates_from_plookup_accumulators) EXPECT_EQ(result, true); } +TEST(ultra_circuit_constructor, bad_lookup_failure) +{ + UltraCircuitBuilder builder; + MockCircuits::add_lookup_gates(builder); + + // Erroneously set a non-zero wire value to zero in one of the lookup gates + for (auto& wire_3_witness_idx : builder.blocks.lookup.w_o()) { + if (wire_3_witness_idx != builder.zero_idx) { + wire_3_witness_idx = builder.zero_idx; + break; + } + } + + EXPECT_FALSE(CircuitChecker::check(builder)); +} + TEST(ultra_circuit_constructor, base_case) { UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.fuzzer.cpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.fuzzer.cpp index cebb8c59c7a..df6c3ec3115 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.fuzzer.cpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.fuzzer.cpp @@ -21,11 +21,10 @@ class ProxyCaller { public: template static void compute_opening_proof_internal(const std::shared_ptr>& ck, - const OpeningPair& opening_pair, - const Polynomial& polynomial, + const ProverOpeningClaim& opening_claim, const std::shared_ptr& transcript) { - IPA::compute_opening_proof_internal(ck, opening_pair, polynomial, transcript); + IPA::compute_opening_proof_internal(ck, opening_claim, transcript); } template static bool verify_internal(const std::shared_ptr>& vk, @@ -145,7 +144,7 @@ extern "C" int LLVMFuzzerTestOneInput(const unsigned char* data, size_t size) } auto const opening_pair = OpeningPair{ x, poly.evaluate(x) }; auto const opening_claim = OpeningClaim{ opening_pair, ck->commit(poly) }; - ProxyCaller::compute_opening_proof_internal(ck, opening_pair, poly, transcript); + ProxyCaller::compute_opening_proof_internal(ck, { poly, opening_pair }, transcript); // Reset challenge indices transcript->reset_indices(); diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.hpp index 288ad34b3ab..0fcc7c65841 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.hpp @@ -130,10 +130,12 @@ template class IPA { */ template static void compute_opening_proof_internal(const std::shared_ptr& ck, - const OpeningPair& opening_pair, - const Polynomial& polynomial, + const ProverOpeningClaim& opening_claim, const std::shared_ptr& transcript) { + + Polynomial polynomial = opening_claim.polynomial; + // clang-format on auto poly_length = static_cast(polynomial.size()); @@ -184,6 +186,7 @@ template class IPA { // Step 5. // Compute vector b (vector of the powers of the challenge) + OpeningPair opening_pair = opening_claim.opening_pair; std::vector b_vec(poly_length); run_loop_in_parallel_if_effective( poly_length, @@ -603,11 +606,10 @@ template class IPA { * compute_opening_proof_internal \endlink. */ static void compute_opening_proof(const std::shared_ptr& ck, - const OpeningPair& opening_pair, - const Polynomial& polynomial, + const ProverOpeningClaim& opening_claim, const std::shared_ptr& transcript) { - compute_opening_proof_internal(ck, opening_pair, polynomial, transcript); + compute_opening_proof_internal(ck, opening_claim, transcript); } /** diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.test.cpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.test.cpp index 4defedb4500..db8a2597a5b 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.test.cpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.test.cpp @@ -67,7 +67,7 @@ TEST_F(IPATest, OpenZeroPolynomial) // initialize empty prover transcript auto prover_transcript = std::make_shared(); - IPA::compute_opening_proof(this->ck(), opening_pair, poly, prover_transcript); + IPA::compute_opening_proof(this->ck(), { poly, opening_pair }, prover_transcript); // initialize verifier transcript from proof data auto verifier_transcript = std::make_shared(prover_transcript->proof_data); @@ -92,7 +92,7 @@ TEST_F(IPATest, OpenAtZero) // initialize empty prover transcript auto prover_transcript = std::make_shared(); - IPA::compute_opening_proof(this->ck(), opening_pair, poly, prover_transcript); + IPA::compute_opening_proof(this->ck(), { poly, opening_pair }, prover_transcript); // initialize verifier transcript from proof data auto verifier_transcript = std::make_shared(prover_transcript->proof_data); @@ -131,7 +131,7 @@ TEST_F(IPATest, ChallengesAreZero) auto new_random_vector = random_vector; new_random_vector[i] = Fr::zero(); transcript->initialize(new_random_vector); - EXPECT_ANY_THROW(IPA::compute_opening_proof_internal(this->ck(), opening_pair, poly, transcript)); + EXPECT_ANY_THROW(IPA::compute_opening_proof_internal(this->ck(), { poly, opening_pair }, transcript)); } // Fill out a vector of affine elements that the verifier receives from the prover with generators (we don't care // about them right now) @@ -181,7 +181,7 @@ TEST_F(IPATest, AIsZeroAfterOneRound) transcript->initialize(random_vector); // Compute opening proof - IPA::compute_opening_proof_internal(this->ck(), opening_pair, poly, transcript); + IPA::compute_opening_proof_internal(this->ck(), { poly, opening_pair }, transcript); // Reset indices transcript->reset_indices(); @@ -221,7 +221,7 @@ TEST_F(IPATest, Open) // initialize empty prover transcript auto prover_transcript = std::make_shared(); - IPA::compute_opening_proof(this->ck(), opening_pair, poly, prover_transcript); + IPA::compute_opening_proof(this->ck(), { poly, opening_pair }, prover_transcript); // initialize verifier transcript from proof data auto verifier_transcript = std::make_shared(prover_transcript->proof_data); @@ -295,22 +295,18 @@ TEST_F(IPATest, GeminiShplonkIPAWithShift) const auto [gemini_opening_pairs, gemini_witnesses] = GeminiProver::compute_fold_polynomial_evaluations( mle_opening_point, std::move(gemini_polynomials), r_challenge); + std::vector> opening_claims; + for (size_t l = 0; l < log_n; ++l) { std::string label = "Gemini:a_" + std::to_string(l); const auto& evaluation = gemini_opening_pairs[l + 1].evaluation; prover_transcript->send_to_verifier(label, evaluation); + opening_claims.emplace_back(gemini_witnesses[l], gemini_opening_pairs[l]); } + opening_claims.emplace_back(gemini_witnesses[log_n], gemini_opening_pairs[log_n]); - const Fr nu_challenge = prover_transcript->template get_challenge("Shplonk:nu"); - auto batched_quotient_Q = - ShplonkProver::compute_batched_quotient(gemini_opening_pairs, gemini_witnesses, nu_challenge); - prover_transcript->send_to_verifier("Shplonk:Q", this->ck()->commit(batched_quotient_Q)); - - const Fr z_challenge = prover_transcript->template get_challenge("Shplonk:z"); - const auto [shplonk_opening_pair, shplonk_witness] = ShplonkProver::compute_partially_evaluated_batched_quotient( - gemini_opening_pairs, gemini_witnesses, std::move(batched_quotient_Q), nu_challenge, z_challenge); - - IPA::compute_opening_proof(this->ck(), shplonk_opening_pair, shplonk_witness, prover_transcript); + const auto opening_claim = ShplonkProver::prove(this->ck(), opening_claims, prover_transcript); + IPA::compute_opening_proof(this->ck(), opening_claim, prover_transcript); auto verifier_transcript = NativeTranscript::verifier_init_empty(prover_transcript); @@ -321,7 +317,7 @@ TEST_F(IPATest, GeminiShplonkIPAWithShift) verifier_transcript); const auto shplonk_verifier_claim = - ShplonkVerifier::reduce_verification(this->vk(), gemini_verifier_claim, verifier_transcript); + ShplonkVerifier::reduce_verification(this->vk()->get_g1_identity(), gemini_verifier_claim, verifier_transcript); auto result = IPA::reduce_verify(this->vk(), shplonk_verifier_claim, verifier_transcript); EXPECT_EQ(result, true); diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/kzg/kzg.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/kzg/kzg.hpp index c763f3a2ecf..a067b224fc6 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/kzg/kzg.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/kzg/kzg.hpp @@ -26,19 +26,19 @@ template class KZG { * @brief Computes the KZG commitment to an opening proof polynomial at a single evaluation point * * @param ck The commitment key which has a commit function, the srs and pippenger_runtime_state - * @param opening_pair OpeningPair = {r, v = p(r)} - * @param polynomial The witness whose opening proof needs to be computed + * @param opening_claim {p, (r, v = p(r))} where p is the witness polynomial whose opening proof needs to be + * computed * @param prover_transcript Prover transcript */ static void compute_opening_proof(std::shared_ptr ck, - const OpeningPair& opening_pair, - const Polynomial& polynomial, + const ProverOpeningClaim& opening_claim, const std::shared_ptr& prover_trancript) { - Polynomial quotient = polynomial; - quotient[0] -= opening_pair.evaluation; + Polynomial quotient = opening_claim.polynomial; + OpeningPair pair = opening_claim.opening_pair; + quotient[0] -= pair.evaluation; // Computes the coefficients for the quotient polynomial q(X) = (p(X) - v) / (X - r) through an FFT - quotient.factor_roots(opening_pair.challenge); + quotient.factor_roots(pair.challenge); auto quotient_commitment = ck->commit(quotient); // TODO(#479): for now we compute the KZG commitment directly to unify the KZG and IPA interfaces but in the // future we might need to adjust this to use the incoming alternative to work queue (i.e. variation of diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/kzg/kzg.test.cpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/kzg/kzg.test.cpp index 5271e92b890..5dd1fa892c4 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/kzg/kzg.test.cpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/kzg/kzg.test.cpp @@ -41,7 +41,7 @@ TYPED_TEST(KZGTest, single) auto prover_transcript = NativeTranscript::prover_init_empty(); - KZG::compute_opening_proof(this->ck(), opening_pair, witness, prover_transcript); + KZG::compute_opening_proof(this->ck(), { witness, opening_pair }, prover_transcript); auto verifier_transcript = NativeTranscript::verifier_init_empty(prover_transcript); auto pairing_points = KZG::reduce_verify(opening_claim, verifier_transcript); @@ -130,27 +130,23 @@ TYPED_TEST(KZGTest, GeminiShplonkKzgWithShift) const auto [gemini_opening_pairs, gemini_witnesses] = GeminiProver::compute_fold_polynomial_evaluations( mle_opening_point, std::move(gemini_polynomials), r_challenge); + std::vector> opening_claims; for (size_t l = 0; l < log_n; ++l) { std::string label = "Gemini:a_" + std::to_string(l); const auto& evaluation = gemini_opening_pairs[l + 1].evaluation; prover_transcript->send_to_verifier(label, evaluation); + opening_claims.emplace_back(gemini_witnesses[l], gemini_opening_pairs[l]); } + opening_claims.emplace_back(gemini_witnesses[log_n], gemini_opening_pairs[log_n]); // Shplonk prover output: // - opening pair: (z_challenge, 0) // - witness: polynomial Q - Q_z - const Fr nu_challenge = prover_transcript->template get_challenge("Shplonk:nu"); - auto batched_quotient_Q = - ShplonkProver::compute_batched_quotient(gemini_opening_pairs, gemini_witnesses, nu_challenge); - prover_transcript->send_to_verifier("Shplonk:Q", this->ck()->commit(batched_quotient_Q)); - - const Fr z_challenge = prover_transcript->template get_challenge("Shplonk:z"); - const auto [shplonk_opening_pair, shplonk_witness] = ShplonkProver::compute_partially_evaluated_batched_quotient( - gemini_opening_pairs, gemini_witnesses, std::move(batched_quotient_Q), nu_challenge, z_challenge); + const auto opening_claim = ShplonkProver::prove(this->ck(), opening_claims, prover_transcript); // KZG prover: // - Adds commitment [W] to transcript - KZG::compute_opening_proof(this->ck(), shplonk_opening_pair, shplonk_witness, prover_transcript); + KZG::compute_opening_proof(this->ck(), opening_claim, prover_transcript); // Run the full verifier PCS protocol with genuine opening claims (genuine commitment, genuine evaluation) @@ -166,7 +162,7 @@ TYPED_TEST(KZGTest, GeminiShplonkKzgWithShift) // Shplonk verifier claim: commitment [Q] - [Q_z], opening point (z_challenge, 0) const auto shplonk_verifier_claim = - ShplonkVerifier::reduce_verification(this->vk(), gemini_verifier_claim, verifier_transcript); + ShplonkVerifier::reduce_verification(this->vk()->get_g1_identity(), gemini_verifier_claim, verifier_transcript); // KZG verifier: // aggregates inputs [Q] - [Q_z] and [W] into an 'accumulator' (can perform pairing check on result) diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplonk.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplonk.hpp index 74b3b500e79..9eac7b4a48c 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplonk.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplonk.hpp @@ -20,24 +20,6 @@ */ namespace bb { -/** - * @brief Polynomial G(X) = Q(X) - ∑ₖ ẑₖ(r)⋅( Bₖ(X) − Tₖ(z) ), where Q(X) = ∑ₖ ( Bₖ(X) − Tₖ(X) ) / zₖ(X) - * - * @tparam Curve EC parameters - */ -template using OutputWitness = bb::Polynomial; - -/** - * @brief Prover output (claim=([G], r, 0), witness = G(X), proof = [Q]) - * that can be passed on to a univariate opening protocol. - * - * @tparam Curve EC parameters - */ -template struct ShplonkProverOutput { - OpeningPair opening_pair; // single opening pair (challenge, evaluation) - OutputWitness witness; // single polynomial G(X) -}; - /** * @brief Shplonk Prover * @@ -51,34 +33,31 @@ template class ShplonkProver_ { /** * @brief Compute batched quotient polynomial Q(X) = ∑ⱼ ρʲ ⋅ ( fⱼ(X) − vⱼ) / ( X − xⱼ ) * - * @param opening_pairs list of opening pairs (xⱼ, vⱼ) for a witness polynomial fⱼ(X), s.t. fⱼ(xⱼ) = vⱼ. - * @param witness_polynomials list of polynomials fⱼ(X). - * @param nu + * @param opening_claims list of prover opening claims {fⱼ(X), (xⱼ, vⱼ)} for a witness polynomial fⱼ(X), s.t. fⱼ(xⱼ) + * = vⱼ. + * @param nu batching challenge * @return Polynomial Q(X) */ - static Polynomial compute_batched_quotient(std::span> opening_pairs, - std::span witness_polynomials, - const Fr& nu) + static Polynomial compute_batched_quotient(std::span> opening_claims, const Fr& nu) { // Find n, the maximum size of all polynomials fⱼ(X) size_t max_poly_size{ 0 }; - for (const auto& poly : witness_polynomials) { - max_poly_size = std::max(max_poly_size, poly.size()); + for (const auto& claim : opening_claims) { + max_poly_size = std::max(max_poly_size, claim.polynomial.size()); } // Q(X) = ∑ⱼ ρʲ ⋅ ( fⱼ(X) − vⱼ) / ( X − xⱼ ) Polynomial Q(max_poly_size); Polynomial tmp(max_poly_size); Fr current_nu = Fr::one(); - for (size_t j = 0; j < opening_pairs.size(); ++j) { - // (Cⱼ, xⱼ, vⱼ) - const auto& [challenge, evaluation] = opening_pairs[j]; + for (const auto& claim : opening_claims) { - // tmp = ρʲ ⋅ ( fⱼ(X) − vⱼ) / ( X − xⱼ ) - tmp = witness_polynomials[j]; - tmp[0] -= evaluation; - tmp.factor_roots(challenge); + // Compute individual claim quotient tmp = ( fⱼ(X) − vⱼ) / ( X − xⱼ ) + tmp = claim.polynomial; + tmp[0] -= claim.opening_pair.evaluation; + tmp.factor_roots(claim.opening_pair.challenge); + // Add the claim quotient to the batched quotient polynomial Q.add_scaled(tmp, current_nu); current_nu *= nu; } @@ -97,20 +76,19 @@ template class ShplonkProver_ { * @param z_challenge * @return Output{OpeningPair, Polynomial} */ - static ShplonkProverOutput compute_partially_evaluated_batched_quotient( - std::span> opening_pairs, - std::span witness_polynomials, - Polynomial&& batched_quotient_Q, + static ProverOpeningClaim compute_partially_evaluated_batched_quotient( + std::span> opening_claims, + Polynomial& batched_quotient_Q, const Fr& nu_challenge, const Fr& z_challenge) { - const size_t num_opening_pairs = opening_pairs.size(); + const size_t num_opening_claims = opening_claims.size(); // {ẑⱼ(r)}ⱼ , where ẑⱼ(r) = 1/zⱼ(r) = 1/(r - xⱼ) std::vector inverse_vanishing_evals; - inverse_vanishing_evals.reserve(num_opening_pairs); - for (const auto& pair : opening_pairs) { - inverse_vanishing_evals.emplace_back(z_challenge - pair.challenge); + inverse_vanishing_evals.reserve(num_opening_claims); + for (const auto& claim : opening_claims) { + inverse_vanishing_evals.emplace_back(z_challenge - claim.opening_pair.challenge); } Fr::batch_invert(inverse_vanishing_evals); @@ -121,24 +99,44 @@ template class ShplonkProver_ { // G₀ = ∑ⱼ ρʲ ⋅ vⱼ / ( r − xⱼ ) Fr current_nu = Fr::one(); Polynomial tmp(G.size()); - for (size_t j = 0; j < num_opening_pairs; ++j) { - // (Cⱼ, xⱼ, vⱼ) - const auto& [challenge, evaluation] = opening_pairs[j]; - + size_t idx = 0; + for (const auto& claim : opening_claims) { // tmp = ρʲ ⋅ ( fⱼ(X) − vⱼ) / ( r − xⱼ ) - tmp = witness_polynomials[j]; - tmp[0] -= evaluation; - Fr scaling_factor = current_nu * inverse_vanishing_evals[j]; // = ρʲ / ( r − xⱼ ) + tmp = claim.polynomial; + tmp[0] -= claim.opening_pair.evaluation; + Fr scaling_factor = current_nu * inverse_vanishing_evals[idx]; // = ρʲ / ( r − xⱼ ) // G -= ρʲ ⋅ ( fⱼ(X) − vⱼ) / ( r − xⱼ ) G.add_scaled(tmp, -scaling_factor); current_nu *= nu_challenge; + idx++; } // Return opening pair (z, 0) and polynomial G(X) = Q(X) - Q_z(X) - return { .opening_pair = { .challenge = z_challenge, .evaluation = Fr::zero() }, .witness = std::move(G) }; + return { .polynomial = G, .opening_pair = { .challenge = z_challenge, .evaluation = Fr::zero() } }; }; + + /** + * @brief Returns a batched opening claim equivalent to a set of opening claims consisting of polynomials, each + * opened at a single point. + * + * @param commitment_key + * @param opening_claims + * @param transcript + * @return ProverOpeningClaim + */ + static ProverOpeningClaim prove(const std::shared_ptr>& commitment_key, + std::span> opening_claims, + auto& transcript) + { + const Fr nu = transcript->template get_challenge("Shplonk:nu"); + auto batched_quotient = compute_batched_quotient(opening_claims, nu); + auto batched_quotient_commitment = commitment_key->commit(batched_quotient); + transcript->send_to_verifier("Shplonk:Q", batched_quotient_commitment); + const Fr z = transcript->template get_challenge("Shplonk:z"); + return compute_partially_evaluated_batched_quotient(opening_claims, batched_quotient, nu, z); + } }; /** @@ -156,12 +154,12 @@ template class ShplonkVerifier_ { * @brief Recomputes the new claim commitment [G] given the proof and * the challenge r. No verification happens so this function always succeeds. * + * @param g1_identity the identity element for the Curve * @param claims list of opening claims (Cⱼ, xⱼ, vⱼ) for a witness polynomial fⱼ(X), s.t. fⱼ(xⱼ) = vⱼ. - * @param proof [Q(X)] = [ ∑ⱼ ρʲ ⋅ ( fⱼ(X) − vⱼ) / ( X − xⱼ ) ] * @param transcript * @return OpeningClaim */ - static OpeningClaim reduce_verification(std::shared_ptr vk, + static OpeningClaim reduce_verification(Commitment g1_identity, std::span> claims, auto& transcript) { @@ -227,7 +225,7 @@ template class ShplonkVerifier_ { scalars.emplace_back(-scaling_factor); } - commitments.emplace_back(GroupElement::one(builder)); + commitments.emplace_back(g1_identity); scalars.emplace_back(G_commitment_constant); // [G] += G₀⋅[1] = [G] + (∑ⱼ ρʲ ⋅ vⱼ / ( r − xⱼ ))⋅[1] @@ -264,7 +262,7 @@ template class ShplonkVerifier_ { } // [G] += G₀⋅[1] = [G] + (∑ⱼ ρʲ ⋅ vⱼ / ( r − xⱼ ))⋅[1] - G_commitment += vk->get_first_g1() * G_commitment_constant; + G_commitment += g1_identity * G_commitment_constant; } // Return opening pair (z, 0) and commitment [G] diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplonk.test.cpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplonk.test.cpp index 35976616526..299ee846a2a 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplonk.test.cpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplonk.test.cpp @@ -22,8 +22,8 @@ TYPED_TEST(ShplonkTest, ShplonkSimple) using ShplonkProver = ShplonkProver_; using ShplonkVerifier = ShplonkVerifier_; using Fr = typename TypeParam::ScalarField; - using Polynomial = typename bb::Polynomial; - using OpeningPair = bb::OpeningPair; + using ProverOpeningClaim = ProverOpeningClaim; + using OpeningClaim = OpeningClaim; const size_t n = 16; @@ -43,32 +43,23 @@ TYPED_TEST(ShplonkTest, ShplonkSimple) const auto commitment2 = this->commit(poly2); // Aggregate polynomials and their opening pairs - std::vector opening_pairs = { { r1, eval1 }, { r2, eval2 } }; - std::vector polynomials = { poly1.share(), poly2.share() }; + std::vector prover_opening_claims = { { poly1, { r1, eval1 } }, { poly2, { r2, eval2 } } }; // Execute the shplonk prover functionality - const Fr nu_challenge = prover_transcript->template get_challenge("Shplonk:nu"); - auto batched_quotient_Q = ShplonkProver::compute_batched_quotient(opening_pairs, polynomials, nu_challenge); - prover_transcript->send_to_verifier("Shplonk:Q", this->ck()->commit(batched_quotient_Q)); - - const Fr z_challenge = prover_transcript->template get_challenge("Shplonk:z"); - const auto [prover_opening_pair, shplonk_prover_witness] = - ShplonkProver::compute_partially_evaluated_batched_quotient( - opening_pairs, polynomials, std::move(batched_quotient_Q), nu_challenge, z_challenge); - + const auto batched_opening_claim = ShplonkProver::prove(this->ck(), prover_opening_claims, prover_transcript); // An intermediate check to confirm the opening of the shplonk prover witness Q - this->verify_opening_pair(prover_opening_pair, shplonk_prover_witness); + this->verify_opening_pair(batched_opening_claim.opening_pair, batched_opening_claim.polynomial); // Aggregate polynomial commitments and their opening pairs - std::vector opening_claims; - opening_claims.emplace_back(OpeningClaim{ opening_pairs[0], commitment1 }); - opening_claims.emplace_back(OpeningClaim{ opening_pairs[1], commitment2 }); + std::vector verifier_opening_claims = { { { r1, eval1 }, commitment1 }, + { { r2, eval2 }, commitment2 } }; auto verifier_transcript = NativeTranscript::verifier_init_empty(prover_transcript); // Execute the shplonk verifier functionality - const auto verifier_claim = ShplonkVerifier::reduce_verification(this->vk(), opening_claims, verifier_transcript); + const auto batched_verifier_claim = ShplonkVerifier::reduce_verification( + this->vk()->get_g1_identity(), verifier_opening_claims, verifier_transcript); - this->verify_opening_claim(verifier_claim, shplonk_prover_witness); + this->verify_opening_claim(batched_verifier_claim, batched_opening_claim.polynomial); } } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/verification_key.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/verification_key.hpp index 42fac7a1fab..23fb76a9502 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/verification_key.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/verification_key.hpp @@ -40,7 +40,7 @@ template <> class VerifierCommitmentKey { srs = srs::get_crs_factory()->get_verifier_crs(); }; - Commitment get_first_g1() { return srs->get_first_g1(); } + Commitment get_g1_identity() { return srs->get_g1_identity(); } /** * @brief verifies a pairing equation over 2 points using the verifier SRS @@ -93,7 +93,7 @@ template <> class VerifierCommitmentKey { srs = srs::get_crs_factory()->get_verifier_crs(num_points); } - Commitment get_first_g1() { return srs->get_first_g1(); } + Commitment get_g1_identity() { return srs->get_g1_identity(); } Commitment* get_monomial_points() { return srs->get_monomial_points(); } diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.hpp index fe8947cbf01..f6a77ba302c 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.hpp @@ -31,13 +31,13 @@ template inline std::vector powers_of_challenge(const FF challeng /** * @brief Prover for ZeroMorph multilinear PCS * - * @tparam PCS - The univariate PCS used inside ZeroMorph as a building block + * @tparam Curve - The curve used for arithmetising ZeroMorph */ -template class ZeroMorphProver_ { - using Curve = typename PCS::Curve; +template class ZeroMorphProver_ { using FF = typename Curve::ScalarField; using Commitment = typename Curve::AffineElement; using Polynomial = bb::Polynomial; + using OpeningClaim = ProverOpeningClaim; // TODO(#742): Set this N_max to be the number of G1 elements in the mocked zeromorph SRS once it's in place. // (Then, eventually, set it based on the real SRS). For now we set it to be larger then the Client IVC recursive @@ -65,7 +65,8 @@ template class ZeroMorphProver_ { * @param u_challenge Multivariate challenge u = (u_0, ..., u_{d-1}) * @return std::vector The quotients q_k */ - static std::vector compute_multilinear_quotients(Polynomial polynomial, std::span u_challenge) + static std::vector compute_multilinear_quotients(Polynomial& polynomial, + std::span u_challenge) { size_t log_N = numeric::get_msb(polynomial.size()); // The size of the multilinear challenge must equal the log of the polynomial size @@ -310,8 +311,8 @@ template class ZeroMorphProver_ { } /** - * @brief Prove a set of multilinear evaluation claims for unshifted polynomials f_i and to-be-shifted - * polynomials g_i + * @brief * @brief Returns a univariate opening claim equivalent to a set of multilinear evaluation claims for + * unshifted polynomials f_i and to-be-shifted polynomials g_i to be subsequently proved with a univariate PCS * * @param f_polynomials Unshifted polynomials * @param g_polynomials To-be-shifted polynomials (of which the shifts h_i were evaluated by sumcheck) @@ -319,17 +320,19 @@ template class ZeroMorphProver_ { * @param multilinear_challenge Multilinear challenge point u * @param commitment_key * @param transcript + * + * @todo https://github.com/AztecProtocol/barretenberg/issues/1030: document concatenation trick */ - static void prove(RefSpan f_polynomials, - RefSpan g_polynomials, - RefSpan f_evaluations, - RefSpan g_shift_evaluations, - std::span multilinear_challenge, - const std::shared_ptr>& commitment_key, - const std::shared_ptr& transcript, - RefSpan concatenated_polynomials = {}, - RefSpan concatenated_evaluations = {}, - const std::vector>& concatenation_groups = {}) + static OpeningClaim prove(RefSpan f_polynomials, + RefSpan g_polynomials, + RefSpan f_evaluations, + RefSpan g_shift_evaluations, + std::span multilinear_challenge, + const std::shared_ptr>& commitment_key, + const std::shared_ptr& transcript, + RefSpan concatenated_polynomials = {}, + RefSpan concatenated_evaluations = {}, + const std::vector>& concatenation_groups = {}) { // Generate batching challenge \rho and powers 1,...,\rho^{m-1} const FF rho = transcript->template get_challenge("rho"); @@ -428,22 +431,20 @@ template class ZeroMorphProver_ { // Compute batched degree-check and ZM-identity quotient polynomial pi auto pi_polynomial = compute_batched_evaluation_and_degree_check_polynomial(zeta_x, Z_x, z_challenge); - // Compute opening proof for x_challenge using the underlying univariate PCS - PCS::compute_opening_proof( - commitment_key, { .challenge = x_challenge, .evaluation = FF(0) }, pi_polynomial, transcript); + + // Returns the claim used to generate an opening proof for the univariate polynomial at x_challenge + return { pi_polynomial, { .challenge = x_challenge, .evaluation = FF(0) } }; } }; /** * @brief Verifier for ZeroMorph multilinear PCS * - * @tparam Curve + * @tparam Curve - The Curve used to arithmetise ZeroMorph */ -template class ZeroMorphVerifier_ { - using Curve = typename PCS::Curve; +template class ZeroMorphVerifier_ { using FF = typename Curve::ScalarField; using Commitment = typename Curve::AffineElement; - using VerifierAccumulator = typename PCS::VerifierAccumulator; public: /** @@ -458,7 +459,10 @@ template class ZeroMorphVerifier_ { * @param x_challenge * @return Commitment */ - static Commitment compute_C_zeta_x(Commitment C_q, std::vector& C_q_k, FF y_challenge, FF x_challenge) + static Commitment compute_C_zeta_x(const Commitment& C_q, + std::vector& C_q_k, + FF y_challenge, + FF x_challenge) { size_t log_N = C_q_k.size(); size_t N = 1 << log_N; @@ -510,7 +514,7 @@ template class ZeroMorphVerifier_ { * * @note The concatenation term arises from an implementation detail in the Translator and is not part of the * conventional ZM protocol - * @param first_g1 first element in the SRS + * @param g1_identity first element in the SRS * @param f_commitments Commitments to unshifted polynomials [f_i] * @param g_commitments Commitments to to-be-shifted polynomials [g_i] * @param C_q_k Commitments to q_k @@ -521,7 +525,7 @@ template class ZeroMorphVerifier_ { * @param concatenation_groups_commitments * @return Commitment */ - static Commitment compute_C_Z_x(Commitment first_g1, + static Commitment compute_C_Z_x(const Commitment& g1_identity, RefSpan f_commitments, RefSpan g_commitments, std::span C_q_k, @@ -544,7 +548,7 @@ template class ZeroMorphVerifier_ { // Add contribution: -v * x * \Phi_n(x) * [1]_1 scalars.emplace_back(FF(-1) * batched_evaluation * x_challenge * phi_n_x); - commitments.emplace_back(first_g1); + commitments.emplace_back(g1_identity); // Add contribution: x * \sum_{i=0}^{m-1} \rho^i*[f_i] auto rho_pow = FF(1); @@ -625,30 +629,24 @@ template class ZeroMorphVerifier_ { } /** - * @brief Compute the univariate opening claim used in the last step of Zeromorph to verify the univariate PCS - * evaluation. + * @brief Return the univariate opening claim used to verify, in a subsequent PCS, a set of multilinear evaluation + * claims for unshifted polynomials f_i and to-be-shifted polynomials g_i * - * @param unshifted_commitments - * @param to_be_shifted_commitments - * @param unshifted_evaluations - * @param shifted_evaluations - * @param multivariate_challenge - * @param first_g1 + * @param commitments Commitments to polynomials f_i and g_i (unshifted and to-be-shifted) + * @param claimed_evaluations Claimed evaluations v_i = f_i(u) and w_i = h_i(u) = g_i_shifted(u) + * @param multivariate_challenge Challenge point u * @param transcript - * @param concatenation_group_commitments - * @param concatenated_evaluations - * @return OpeningClaim + * @return VerifierAccumulator Inputs to the final PCS verification check that will be accumulated */ - static OpeningClaim compute_univariate_evaluation_opening_claim( - RefSpan unshifted_commitments, - RefSpan to_be_shifted_commitments, - RefSpan unshifted_evaluations, - RefSpan shifted_evaluations, - std::span multivariate_challenge, - Commitment first_g1, - auto& transcript, - const std::vector>& concatenation_group_commitments = {}, - RefSpan concatenated_evaluations = {}) + static OpeningClaim verify(RefSpan unshifted_commitments, + RefSpan to_be_shifted_commitments, + RefSpan unshifted_evaluations, + RefSpan shifted_evaluations, + std::span multivariate_challenge, + const Commitment& g1_identity, + auto& transcript, + const std::vector>& concatenation_group_commitments = {}, + RefSpan concatenated_evaluations = {}) { size_t log_N = multivariate_challenge.size(); FF rho = transcript->template get_challenge("rho"); @@ -689,7 +687,7 @@ template class ZeroMorphVerifier_ { auto C_zeta_x = compute_C_zeta_x(C_q, C_q_k, y_challenge, x_challenge); // Compute commitment C_{Z_x} - Commitment C_Z_x = compute_C_Z_x(first_g1, + Commitment C_Z_x = compute_C_Z_x(g1_identity, unshifted_commitments, to_be_shifted_commitments, C_q_k, @@ -714,82 +712,6 @@ template class ZeroMorphVerifier_ { return { .opening_pair = { .challenge = x_challenge, .evaluation = FF(0) }, .commitment = C_zeta_Z }; } - - /** - * @brief Verify a set of multilinear evaluation claims for unshifted polynomials f_i and to-be-shifted - * polynomials g_i - * - * @param commitments Commitments to polynomials f_i and g_i (unshifted and to-be-shifted) - * @param claimed_evaluations Claimed evaluations v_i = f_i(u) and w_i = h_i(u) = g_i_shifted(u) - * @param multivariate_challenge Challenge point u - * @param transcript - * @return VerifierAccumulator Inputs to the final PCS verification check that will be accumulated - */ - static VerifierAccumulator verify(RefSpan unshifted_commitments, - RefSpan to_be_shifted_commitments, - RefSpan unshifted_evaluations, - RefSpan shifted_evaluations, - std::span multivariate_challenge, - auto& transcript, - const std::vector>& concatenation_group_commitments = {}, - RefSpan concatenated_evaluations = {}) - { - Commitment first_g1; - - if constexpr (Curve::is_stdlib_type) { - auto builder = multivariate_challenge[0].get_context(); - first_g1 = Commitment::one(builder); - } else { - first_g1 = Commitment::one(); - } - auto opening_claim = compute_univariate_evaluation_opening_claim(unshifted_commitments, - to_be_shifted_commitments, - unshifted_evaluations, - shifted_evaluations, - multivariate_challenge, - first_g1, - transcript, - concatenation_group_commitments, - concatenated_evaluations); - return PCS::reduce_verify(opening_claim, transcript); - } - - /** - * @brief Verify a set of multilinear evaluation claims for unshifted polynomials f_i and to-be-shifted - * polynomials g_i. - * - * @details Identical purpose as the function above but used when the verification of the PCS evaluation protocol - * requires the verification key prior to the last step that is accumulated. - * - * @param commitments Commitments to polynomials f_i and g_i (unshifted and to-be-shifted) - * @param claimed_evaluations Claimed evaluations v_i = f_i(u) and w_i = h_i(u) = g_i_shifted(u) - * @param multivariate_challenge Challenge point u - * @param transcript - * @return VerifierAccumulator Inputs to the final PCS verification check that will be accumulated - */ - static VerifierAccumulator verify(RefSpan unshifted_commitments, - RefSpan to_be_shifted_commitments, - RefSpan unshifted_evaluations, - RefSpan shifted_evaluations, - std::span multivariate_challenge, - const std::shared_ptr>& vk, - auto& transcript, - const std::vector>& concatenation_group_commitments = {}, - RefSpan concatenated_evaluations = {}) - { - Commitment first_g1 = vk->get_first_g1(); - - auto opening_claim = compute_univariate_evaluation_opening_claim(unshifted_commitments, - to_be_shifted_commitments, - unshifted_evaluations, - shifted_evaluations, - multivariate_challenge, - first_g1, - transcript, - concatenation_group_commitments, - concatenated_evaluations); - return PCS::reduce_verify(vk, opening_claim, transcript); - } }; } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.test.cpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.test.cpp index 3fcb56aa3af..122fcb1187f 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.test.cpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.test.cpp @@ -16,10 +16,42 @@ template class ZeroMorphTest : public CommitmentTest; - using ZeroMorphVerifier = ZeroMorphVerifier_; + using ZeroMorphProver = ZeroMorphProver_; + using ZeroMorphVerifier = ZeroMorphVerifier_; - // Evaluate Phi_k(x) = \sum_{i=0}^k x^i using the direct inefficent formula + using TupleOfConcatenationInputs = std::tuple>, + std::vector, + std::vector, + std::vector>>; + + /** + * @brief Data structure for encapsulating a set of multilinear polynomials used to test the protocol, their + * evaluations at the point that we want to create an evaluation proof for and + * their commitments. Alternatively, the polynomials and commitments can be the ones to-be-shifted, while the + * evaluations are for their shifted version. + * + */ + struct PolynomialsEvaluationsCommitments { + std::vector polynomials; + std::vector evaluations; + std::vector commitments; + }; + + /** + * @brief Data structure used to test the protocol's alternative for Goblin Translator. + * + */ + struct ConcatenationInputs { + std::vector> concatenation_groups; + std::vector concatenated_polynomials; + std::vector c_evaluations; + std::vector> concatenation_groups_commitments; + }; + + /** + * @brief Evaluate Phi_k(x) = \sum_{i=0}^k x^i using the direct inefficent formula + * + */ Fr Phi(Fr challenge, size_t subscript) { size_t length = 1 << subscript; @@ -37,152 +69,91 @@ template class ZeroMorphTest : public CommitmentTest u_challenge = this->random_evaluation_point(log_N); - // Construct some random multilinear polynomials f_i and their evaluations v_i = f_i(u) - std::vector f_polynomials; // unshifted polynomials - std::vector v_evaluations; - for (size_t i = 0; i < NUM_UNSHIFTED; ++i) { - f_polynomials.emplace_back(this->random_polynomial(N)); - f_polynomials[i][0] = Fr(0); // ensure f is "shiftable" - v_evaluations.emplace_back(f_polynomials[i].evaluate_mle(u_challenge)); - } - - // Construct some "shifted" multilinear polynomials h_i as the left-shift-by-1 of f_i - std::vector g_polynomials; // to-be-shifted polynomials - std::vector h_polynomials; // shifts of the to-be-shifted polynomials - std::vector w_evaluations; - for (size_t i = 0; i < NUM_SHIFTED; ++i) { - g_polynomials.emplace_back(f_polynomials[i]); - h_polynomials.emplace_back(g_polynomials[i].shifted()); - w_evaluations.emplace_back(h_polynomials[i].evaluate_mle(u_challenge)); - // ASSERT_EQ(w_evaluations[i], g_polynomials[i].evaluate_mle(u_challenge, /* shift = */ true)); - } - - // Compute commitments [f_i] - std::vector f_commitments; - for (size_t i = 0; i < NUM_UNSHIFTED; ++i) { - f_commitments.emplace_back(this->commit(f_polynomials[i])); - } - - // Construct container of commitments of the "to-be-shifted" polynomials [g_i] (= [f_i]) - std::vector g_commitments; - for (size_t i = 0; i < NUM_SHIFTED; ++i) { - g_commitments.emplace_back(f_commitments[i]); - } - - // Initialize an empty NativeTranscript - auto prover_transcript = NativeTranscript::prover_init_empty(); - - // Execute Prover protocol - ZeroMorphProver::prove(RefVector(f_polynomials), - RefVector(g_polynomials), - RefVector(v_evaluations), - RefVector(w_evaluations), - u_challenge, - this->commitment_key, - prover_transcript); + // Construct some random multilinear polynomials f_i, their commitments and their evaluations v_i = f_i(u) + PolynomialsEvaluationsCommitments unshifted_input = + polynomials_comms_and_evaluations(u_challenge, NUM_UNSHIFTED); - auto verifier_transcript = NativeTranscript::verifier_init_empty(prover_transcript); + // Construct polynomials and commitments from f_i that are to be shifted and compute their shifted evaluations + PolynomialsEvaluationsCommitments shifted_input = + to_be_shifted_polynomials_and_comms_and_shifted_evaluations(unshifted_input, u_challenge, NUM_SHIFTED); - VerifierAccumulator result; bool verified = false; - if constexpr (std::same_as>) { - // Execute Verifier protocol without the need for vk prior the final check - result = ZeroMorphVerifier::verify(RefVector(f_commitments), // unshifted - RefVector(g_commitments), // to-be-shifted - RefVector(v_evaluations), // unshifted - RefVector(w_evaluations), // shifted - u_challenge, - verifier_transcript); - verified = this->vk()->pairing_check(result[0], result[1]); + if (NUM_CONCATENATED == 0) { + verified = prove_and_verify(unshifted_input, shifted_input, u_challenge); } else { - // Execute Verifier protocol with vk - result = ZeroMorphVerifier::verify(RefVector(f_commitments), // unshifted - RefVector(g_commitments), // to-be-shifted - RefVector(v_evaluations), // unshifted - RefVector(w_evaluations), // shifted - u_challenge, - this->vk(), - verifier_transcript); - verified = result; + verified = + prove_and_verify_with_concatenation(unshifted_input, shifted_input, u_challenge, NUM_CONCATENATED); } - // The prover and verifier manifests should agree - EXPECT_EQ(prover_transcript->get_manifest(), verifier_transcript->get_manifest()); - return verified; } -}; - -template class ZeroMorphWithConcatenationTest : public CommitmentTest { - public: - using Curve = typename PCS::Curve; - using Fr = typename Curve::ScalarField; - using Polynomial = bb::Polynomial; - using Commitment = typename Curve::AffineElement; - using GroupElement = typename Curve::Element; - using VerifierAccumulator = typename PCS::VerifierAccumulator; - using ZeroMorphProver = ZeroMorphProver_; - using ZeroMorphVerifier = ZeroMorphVerifier_; - - // Evaluate Phi_k(x) = \sum_{i=0}^k x^i using the direct inefficent formula - Fr Phi(Fr challenge, size_t subscript) - { - size_t length = 1 << subscript; - auto result = Fr(0); - for (size_t idx = 0; idx < length; ++idx) { - result += challenge.pow(idx); - } - return result; - } /** - * @brief Construct and verify ZeroMorph proof of batched multilinear evaluation with shifts and concatenation - * @details The goal is to construct and verify a single batched multilinear evaluation proof for m polynomials f_i, - * l polynomials h_i and o groups of polynomials where each polynomial is concatenated from several shorter - * polynomials. It is assumed that the h_i are shifts of polynomials g_i (the "to-be-shifted" polynomials), which - * are a subset of the f_i. This is what is encountered in practice. We accomplish this using evaluations of h_i but - * commitments to only their unshifted counterparts g_i (which we get for "free" since commitments [g_i] are - * contained in the set of commitments [f_i]). - * + * @brief Generate some random multilinear polynomials and compute their evaluation at the set challenge as well as + * their commitments, returned as a tuple to be used in the subsequent protocol. */ - bool execute_zeromorph_protocol(size_t NUM_UNSHIFTED, size_t NUM_SHIFTED, size_t NUM_CONCATENATED) + PolynomialsEvaluationsCommitments polynomials_comms_and_evaluations(std::vector u_challenge, + size_t NUM_UNSHIFTED) { - bool verified = false; - size_t concatenation_index = 2; - size_t N = 64; - size_t MINI_CIRCUIT_N = N / concatenation_index; - size_t log_N = numeric::get_msb(N); - - auto u_challenge = this->random_evaluation_point(log_N); - // Construct some random multilinear polynomials f_i and their evaluations v_i = f_i(u) std::vector f_polynomials; // unshifted polynomials std::vector v_evaluations; + std::vector f_commitments; + size_t poly_length = 1 << u_challenge.size(); for (size_t i = 0; i < NUM_UNSHIFTED; ++i) { - f_polynomials.emplace_back(this->random_polynomial(N)); + f_polynomials.emplace_back(this->random_polynomial(poly_length)); f_polynomials[i][0] = Fr(0); // ensure f is "shiftable" v_evaluations.emplace_back(f_polynomials[i].evaluate_mle(u_challenge)); + f_commitments.emplace_back(this->commit(f_polynomials[i])); } + return { f_polynomials, v_evaluations, f_commitments }; + } + + /** + * @brief Generate shifts of polynomials and compute their evaluation at the + * set challenge as well as their commitments, returned as a tuple to be used in the subsequent protocol. + */ + PolynomialsEvaluationsCommitments to_be_shifted_polynomials_and_comms_and_shifted_evaluations( + PolynomialsEvaluationsCommitments unshifted_inputs, std::vector u_challenge, size_t NUM_SHIFTED) + { + std::vector f_polynomials = unshifted_inputs.polynomials; + std::vector f_commitments = unshifted_inputs.commitments; - // Construct some "shifted" multilinear polynomials h_i as the left-shift-by-1 of f_i std::vector g_polynomials; // to-be-shifted polynomials std::vector h_polynomials; // shifts of the to-be-shifted polynomials - std::vector w_evaluations; + std::vector w_evaluations; // shifted evaluations + std::vector g_commitments; + + // For testing purposes, pick the first NUM_SHIFTED polynomials to be shifted for (size_t i = 0; i < NUM_SHIFTED; ++i) { g_polynomials.emplace_back(f_polynomials[i]); h_polynomials.emplace_back(g_polynomials[i].shifted()); w_evaluations.emplace_back(h_polynomials[i].evaluate_mle(u_challenge)); - // ASSERT_EQ(w_evaluations[i], g_polynomials[i].evaluate_mle(u_challenge, /* shift = */ true)); + g_commitments.emplace_back(f_commitments[i]); } + return { g_polynomials, w_evaluations, g_commitments }; + } + + /** + * @brief Generate the tuple of concatenation inputs used to test Zeromorph special functionality that avoids high + * degrees in the Goblin Translator. + */ + ConcatenationInputs concatenation_inputs(std::vector u_challenge, size_t NUM_CONCATENATED) + { + + size_t concatenation_index = 2; + size_t N = 1 << u_challenge.size(); + size_t MINI_CIRCUIT_N = N / concatenation_index; // Polynomials "chunks" that are concatenated in the PCS std::vector> concatenation_groups; @@ -221,18 +192,6 @@ template class ZeroMorphWithConcatenationTest : public CommitmentTes c_evaluations.emplace_back(concatenated_polynomial.evaluate_mle(u_challenge)); } - // Compute commitments [f_i] - std::vector f_commitments; - for (size_t i = 0; i < NUM_UNSHIFTED; ++i) { - f_commitments.emplace_back(this->commit(f_polynomials[i])); - } - - // Construct container of commitments of the "to-be-shifted" polynomials [g_i] (= [f_i]) - std::vector g_commitments; - for (size_t i = 0; i < NUM_SHIFTED; ++i) { - g_commitments.emplace_back(f_commitments[i]); - } - // Compute commitments of all polynomial chunks std::vector> concatenation_groups_commitments; for (size_t i = 0; i < NUM_CONCATENATED; ++i) { @@ -243,46 +202,100 @@ template class ZeroMorphWithConcatenationTest : public CommitmentTes concatenation_groups_commitments.emplace_back(concatenation_group_commitment); } - // Initialize an empty NativeTranscript + return { concatenation_groups, concatenated_polynomials, c_evaluations, concatenation_groups_commitments }; + }; + + bool prove_and_verify(PolynomialsEvaluationsCommitments& unshifted, + PolynomialsEvaluationsCommitments& shifted, + std::vector u_challenge) + { auto prover_transcript = NativeTranscript::prover_init_empty(); // Execute Prover protocol - ZeroMorphProver::prove(RefVector(f_polynomials), // unshifted - RefVector(g_polynomials), // to-be-shifted - RefVector(v_evaluations), // unshifted - RefVector(w_evaluations), // shifted - u_challenge, - this->commitment_key, - prover_transcript, - RefVector(concatenated_polynomials), - RefVector(c_evaluations), - to_vector_of_ref_vectors(concatenation_groups)); + auto prover_opening_claim = ZeroMorphProver::prove(RefVector(unshifted.polynomials), // unshifted + RefVector(shifted.polynomials), // to-be shifted + RefVector(unshifted.evaluations), // unshifted + RefVector(shifted.evaluations), // shifted + u_challenge, + this->commitment_key, + prover_transcript); + + PCS::compute_opening_proof(this->commitment_key, prover_opening_claim, prover_transcript); auto verifier_transcript = NativeTranscript::verifier_init_empty(prover_transcript); + + auto verifier_opening_claim = ZeroMorphVerifier::verify(RefVector(unshifted.commitments), // unshifted + RefVector(shifted.commitments), // to-be-shifted + RefVector(unshifted.evaluations), // unshifted + RefVector(shifted.evaluations), // shifted + u_challenge, + this->vk()->get_g1_identity(), + verifier_transcript); VerifierAccumulator result; + + bool verified = false; if constexpr (std::same_as>) { - // Execute Verifier protocol without the need for vk prior the final check - result = ZeroMorphVerifier::verify(RefVector(f_commitments), // unshifted - RefVector(g_commitments), // to-be-shifted - RefVector(v_evaluations), // unshifted - RefVector(w_evaluations), // shifted - u_challenge, - verifier_transcript, - to_vector_of_ref_vectors(concatenation_groups_commitments), - RefVector(c_evaluations)); + + result = PCS::reduce_verify(verifier_opening_claim, verifier_transcript); verified = this->vk()->pairing_check(result[0], result[1]); + } else { + // Execute Verifier protocol with vk + result = PCS::reduce_verify(this->vk(), verifier_opening_claim, verifier_transcript); + verified = result; + } + + // The prover and verifier manifests should agree + EXPECT_EQ(prover_transcript->get_manifest(), verifier_transcript->get_manifest()); + return verified; + }; + + bool prove_and_verify_with_concatenation(PolynomialsEvaluationsCommitments& unshifted, + PolynomialsEvaluationsCommitments& shifted, + std::vector u_challenge, + size_t NUM_CONCATENATED) + { + ConcatenationInputs concatenation = concatenation_inputs(u_challenge, NUM_CONCATENATED); + + auto prover_transcript = NativeTranscript::prover_init_empty(); + + // Execute Prover protocol + auto prover_opening_claim = + ZeroMorphProver::prove(RefVector(unshifted.polynomials), // unshifted + RefVector(shifted.polynomials), // to-be-shifted + RefVector(unshifted.evaluations), // unshifted + RefVector(shifted.evaluations), // shifted + u_challenge, + this->commitment_key, + prover_transcript, + RefVector(concatenation.concatenated_polynomials), + RefVector(concatenation.c_evaluations), + to_vector_of_ref_vectors(concatenation.concatenation_groups)); + PCS::compute_opening_proof(this->commitment_key, prover_opening_claim, prover_transcript); + + auto verifier_transcript = NativeTranscript::verifier_init_empty(prover_transcript); + + auto verifier_opening_claim = + ZeroMorphVerifier::verify(RefVector(unshifted.commitments), // unshifted + RefVector(shifted.commitments), // to-be-shifted + RefVector(unshifted.evaluations), // unshifted + RefVector(shifted.evaluations), // shifted + u_challenge, + this->vk()->get_g1_identity(), + verifier_transcript, + to_vector_of_ref_vectors(concatenation.concatenation_groups_commitments), + RefVector(concatenation.c_evaluations)); + VerifierAccumulator result; + + bool verified = false; + if constexpr (std::same_as>) { + + result = PCS::reduce_verify(verifier_opening_claim, verifier_transcript); + verified = this->vk()->pairing_check(result[0], result[1]); } else { // Execute Verifier protocol with vk - result = ZeroMorphVerifier::verify(RefVector(f_commitments), // unshifted - RefVector(g_commitments), // to-be-shifted - RefVector(v_evaluations), // unshifted - RefVector(w_evaluations), // shifted - u_challenge, - this->vk(), - verifier_transcript, - to_vector_of_ref_vectors(concatenation_groups_commitments), - RefVector(c_evaluations)); + result = PCS::reduce_verify(this->vk(), verifier_opening_claim, verifier_transcript); + verified = result; } @@ -294,7 +307,6 @@ template class ZeroMorphWithConcatenationTest : public CommitmentTes using PCSTypes = ::testing::Types, IPA>; TYPED_TEST_SUITE(ZeroMorphTest, PCSTypes); -TYPED_TEST_SUITE(ZeroMorphWithConcatenationTest, PCSTypes); /** * @brief Test method for computing q_k given multilinear f @@ -307,8 +319,8 @@ TYPED_TEST_SUITE(ZeroMorphWithConcatenationTest, PCSTypes); TYPED_TEST(ZeroMorphTest, QuotientConstruction) { // Define some useful type aliases - using ZeroMorphProver = ZeroMorphProver_; using Curve = typename TypeParam::Curve; + using ZeroMorphProver = ZeroMorphProver_; using Fr = typename Curve::ScalarField; using Polynomial = bb::Polynomial; @@ -355,8 +367,8 @@ TYPED_TEST(ZeroMorphTest, QuotientConstruction) TYPED_TEST(ZeroMorphTest, BatchedLiftedDegreeQuotient) { // Define some useful type aliases - using ZeroMorphProver = ZeroMorphProver_; using Curve = typename TypeParam::Curve; + using ZeroMorphProver = ZeroMorphProver_; using Fr = typename Curve::ScalarField; using Polynomial = bb::Polynomial; @@ -400,8 +412,8 @@ TYPED_TEST(ZeroMorphTest, BatchedLiftedDegreeQuotient) TYPED_TEST(ZeroMorphTest, PartiallyEvaluatedQuotientZeta) { // Define some useful type aliases - using ZeroMorphProver = ZeroMorphProver_; using Curve = typename TypeParam::Curve; + using ZeroMorphProver = ZeroMorphProver_; using Fr = typename Curve::ScalarField; using Polynomial = bb::Polynomial; @@ -484,8 +496,8 @@ TYPED_TEST(ZeroMorphTest, PhiEvaluation) TYPED_TEST(ZeroMorphTest, PartiallyEvaluatedQuotientZ) { // Define some useful type aliases - using ZeroMorphProver = ZeroMorphProver_; using Curve = typename TypeParam::Curve; + using ZeroMorphProver = ZeroMorphProver_; using Fr = typename Curve::ScalarField; using Polynomial = bb::Polynomial; @@ -565,7 +577,7 @@ TYPED_TEST(ZeroMorphTest, ProveAndVerifyBatchedWithShifts) * @brief Test full Prover/Verifier protocol for proving single multilinear evaluation * */ -TYPED_TEST(ZeroMorphWithConcatenationTest, ProveAndVerify) +TYPED_TEST(ZeroMorphTest, ProveAndVerifyWithConcatenation) { size_t num_unshifted = 1; size_t num_shifted = 0; diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_composer.test.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_composer.test.cpp index cd2db6b7124..c3ae908ff71 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_composer.test.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_composer.test.cpp @@ -15,7 +15,7 @@ using namespace bb; -class ECCVMComposerTests : public ::testing::Test { +class ECCVMTests : public ::testing::Test { protected: void SetUp() override { srs::init_grumpkin_crs_factory("../srs_db/grumpkin"); }; }; @@ -60,7 +60,7 @@ ECCVMCircuitBuilder generate_circuit(numeric::RNG* engine = nullptr) return builder; } -TEST_F(ECCVMComposerTests, BaseCase) +TEST_F(ECCVMTests, BaseCase) { ECCVMCircuitBuilder builder = generate_circuit(&engine); ECCVMProver prover(builder); @@ -71,7 +71,7 @@ TEST_F(ECCVMComposerTests, BaseCase) ASSERT_TRUE(verified); } -TEST_F(ECCVMComposerTests, EqFails) +TEST_F(ECCVMTests, EqFails) { auto builder = generate_circuit(&engine); // Tamper with the eq op such that the expected value is incorect diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp index 821de070780..4a87e300d09 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp @@ -911,10 +911,6 @@ class ECCVMFlavor { std::array sumcheck_evaluations; std::vector zm_cq_comms; Commitment zm_cq_comm; - uint32_t ipa_poly_degree; - std::vector ipa_l_comms; - std::vector ipa_r_comms; - FF ipa_a_0_eval; Commitment translation_hack_comm; FF translation_eval_op; FF translation_eval_px; @@ -922,10 +918,11 @@ class ECCVMFlavor { FF translation_eval_z1; FF translation_eval_z2; FF hack_eval; - uint32_t translation_ipa_poly_degree; - std::vector translation_ipa_l_comms; - std::vector translation_ipa_r_comms; - FF translation_ipa_a_0_eval; + Commitment shplonk_q_comm; + uint32_t ipa_poly_degree; + std::vector ipa_l_comms; + std::vector ipa_r_comms; + FF ipa_a_0_eval; Transcript() = default; @@ -1129,17 +1126,6 @@ class ECCVMFlavor { } zm_cq_comm = NativeTranscript::template deserialize_from_buffer(proof_data, num_frs_read); - ipa_poly_degree = NativeTranscript::template deserialize_from_buffer(NativeTranscript::proof_data, - num_frs_read); - auto log_poly_degree = static_cast(numeric::get_msb(ipa_poly_degree)); - for (size_t i = 0; i < log_poly_degree; ++i) { - ipa_l_comms.emplace_back(NativeTranscript::template deserialize_from_buffer( - NativeTranscript::proof_data, num_frs_read)); - ipa_r_comms.emplace_back(NativeTranscript::template deserialize_from_buffer( - NativeTranscript::proof_data, num_frs_read)); - } - ipa_a_0_eval = - NativeTranscript::template deserialize_from_buffer(NativeTranscript::proof_data, num_frs_read); translation_hack_comm = NativeTranscript::template deserialize_from_buffer( NativeTranscript::proof_data, num_frs_read); translation_eval_op = @@ -1155,17 +1141,20 @@ class ECCVMFlavor { hack_eval = NativeTranscript::template deserialize_from_buffer(NativeTranscript::proof_data, num_frs_read); - translation_ipa_poly_degree = NativeTranscript::template deserialize_from_buffer( - NativeTranscript::proof_data, num_frs_read); + shplonk_q_comm = NativeTranscript::template deserialize_from_buffer(proof_data, num_frs_read); + ipa_poly_degree = NativeTranscript::template deserialize_from_buffer(NativeTranscript::proof_data, + num_frs_read); + + auto log_poly_degree = static_cast(numeric::get_msb(ipa_poly_degree)); for (size_t i = 0; i < log_poly_degree; ++i) { - translation_ipa_l_comms.emplace_back(NativeTranscript::template deserialize_from_buffer( + ipa_l_comms.emplace_back(NativeTranscript::template deserialize_from_buffer( NativeTranscript::proof_data, num_frs_read)); - translation_ipa_r_comms.emplace_back(NativeTranscript::template deserialize_from_buffer( + ipa_r_comms.emplace_back(NativeTranscript::template deserialize_from_buffer( NativeTranscript::proof_data, num_frs_read)); } - translation_ipa_a_0_eval = + ipa_a_0_eval = NativeTranscript::template deserialize_from_buffer(NativeTranscript::proof_data, num_frs_read); } @@ -1284,15 +1273,6 @@ class ECCVMFlavor { } NativeTranscript::template serialize_to_buffer(zm_cq_comm, NativeTranscript::proof_data); - NativeTranscript::template serialize_to_buffer(ipa_poly_degree, NativeTranscript::proof_data); - - auto log_poly_degree = static_cast(numeric::get_msb(ipa_poly_degree)); - for (size_t i = 0; i < log_poly_degree; ++i) { - NativeTranscript::template serialize_to_buffer(ipa_l_comms[i], NativeTranscript::proof_data); - NativeTranscript::template serialize_to_buffer(ipa_r_comms[i], NativeTranscript::proof_data); - } - - NativeTranscript::template serialize_to_buffer(ipa_a_0_eval, NativeTranscript::proof_data); NativeTranscript::template serialize_to_buffer(translation_hack_comm, NativeTranscript::proof_data); NativeTranscript::template serialize_to_buffer(translation_eval_op, NativeTranscript::proof_data); NativeTranscript::template serialize_to_buffer(translation_eval_px, NativeTranscript::proof_data); @@ -1301,16 +1281,16 @@ class ECCVMFlavor { NativeTranscript::template serialize_to_buffer(translation_eval_z2, NativeTranscript::proof_data); NativeTranscript::template serialize_to_buffer(hack_eval, NativeTranscript::proof_data); - NativeTranscript::template serialize_to_buffer(translation_ipa_poly_degree, NativeTranscript::proof_data); - log_poly_degree = static_cast(numeric::get_msb(translation_ipa_poly_degree)); + NativeTranscript::template serialize_to_buffer(shplonk_q_comm, NativeTranscript::proof_data); + + NativeTranscript::template serialize_to_buffer(ipa_poly_degree, NativeTranscript::proof_data); + auto log_poly_degree = static_cast(numeric::get_msb(ipa_poly_degree)); for (size_t i = 0; i < log_poly_degree; ++i) { - NativeTranscript::template serialize_to_buffer(translation_ipa_l_comms[i], - NativeTranscript::proof_data); - NativeTranscript::template serialize_to_buffer(translation_ipa_r_comms[i], - NativeTranscript::proof_data); + NativeTranscript::template serialize_to_buffer(ipa_l_comms[i], NativeTranscript::proof_data); + NativeTranscript::template serialize_to_buffer(ipa_r_comms[i], NativeTranscript::proof_data); } - serialize_to_buffer(translation_ipa_a_0_eval, proof_data); + serialize_to_buffer(ipa_a_0_eval, proof_data); ASSERT(NativeTranscript::proof_data.size() == old_proof_length); } diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp index 43cd7248f11..7d049b16970 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp @@ -1,12 +1,12 @@ #include "eccvm_prover.hpp" #include "barretenberg/commitment_schemes/claim.hpp" #include "barretenberg/commitment_schemes/commitment_key.hpp" +#include "barretenberg/commitment_schemes/shplonk/shplonk.hpp" #include "barretenberg/common/ref_array.hpp" #include "barretenberg/honk/proof_system/logderivative_library.hpp" #include "barretenberg/honk/proof_system/permutation_library.hpp" #include "barretenberg/plonk_honk_shared/library/grand_product_library.hpp" #include "barretenberg/polynomials/polynomial.hpp" -#include "barretenberg/relations/lookup_relation.hpp" #include "barretenberg/relations/permutation_relation.hpp" #include "barretenberg/sumcheck/sumcheck.hpp" @@ -104,67 +104,74 @@ void ECCVMProver::execute_relation_check_rounds() } /** - * @brief Execute the ZeroMorph protocol to prove the multilinear evaluations produced by Sumcheck - * @details See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the unrolled protocol. + * @brief Produce a univariate opening claim for the sumcheck multivariate evalutions and a batched univariate claim + * for the transcript polynomials (for the Translator consistency check). Reduce the two opening claims to a single one + * via Shplonk and produce an opening proof with the univariate PCS of choice (IPA when operating on Grumpkin). + * @details See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the unrolled ZeroMorph + * protocol. * - * */ -void ECCVMProver::execute_zeromorph_rounds() -{ - ZeroMorph::prove(key->polynomials.get_unshifted(), - key->polynomials.get_to_be_shifted(), - sumcheck_output.claimed_evaluations.get_unshifted(), - sumcheck_output.claimed_evaluations.get_shifted(), - sumcheck_output.challenge, - commitment_key, - transcript); -} - -/** - * @brief Batch open the transcript polynomials as univariates for Translator consistency check - * TODO(#768): Find a better way to do this. See issue for details. - * - * @tparam Flavor */ -void ECCVMProver::execute_transcript_consistency_univariate_opening_round() +void ECCVMProver::execute_pcs_rounds() { - // Since IPA cannot currently handle polynomials for which the latter half of the coefficients are 0, we hackily - // batch the constant polynomial 1 in with the 5 transcript polynomials. See issue #768 for more details. + using Curve = typename Flavor::Curve; + using ZeroMorph = ZeroMorphProver_; + using Shplonk = ShplonkProver_; + using OpeningClaim = ProverOpeningClaim; + + // Execute the ZeroMorph protocol to produce a univariate opening claim for the multilinear evaluations produced by + // Sumcheck + auto multivariate_to_univariate_opening_claim = + ZeroMorph::prove(key->polynomials.get_unshifted(), + key->polynomials.get_to_be_shifted(), + sumcheck_output.claimed_evaluations.get_unshifted(), + sumcheck_output.claimed_evaluations.get_shifted(), + sumcheck_output.challenge, + commitment_key, + transcript); + + // Batch open the transcript polynomials as univariates for Translator consistency check. Since IPA cannot + // currently handle polynomials for which the latter half of the coefficients are 0, we hackily + // batch the constant polynomial 1 in with the 5 transcript polynomials. + // TODO(https://github.com/AztecProtocol/barretenberg/issues/768): fix IPA to avoid the need for the hack polynomial Polynomial hack(key->circuit_size); for (size_t idx = 0; idx < key->circuit_size; idx++) { hack[idx] = 1; } transcript->send_to_verifier("Translation:hack_commitment", commitment_key->commit(hack)); - // Get the challenge at which we evaluate the polynomials as univariates + // Get the challenge at which we evaluate all transcript polynomials as univariates evaluation_challenge_x = transcript->template get_challenge("Translation:evaluation_challenge_x"); + // Evaluate the transcript polynomials at the challenge translation_evaluations.op = key->polynomials.transcript_op.evaluate(evaluation_challenge_x); translation_evaluations.Px = key->polynomials.transcript_Px.evaluate(evaluation_challenge_x); translation_evaluations.Py = key->polynomials.transcript_Py.evaluate(evaluation_challenge_x); translation_evaluations.z1 = key->polynomials.transcript_z1.evaluate(evaluation_challenge_x); translation_evaluations.z2 = key->polynomials.transcript_z2.evaluate(evaluation_challenge_x); - // Add the univariate evaluations to the transcript + // Add the univariate evaluations to the transcript so the verifier can reconstruct the batched evaluation transcript->send_to_verifier("Translation:op", translation_evaluations.op); transcript->send_to_verifier("Translation:Px", translation_evaluations.Px); transcript->send_to_verifier("Translation:Py", translation_evaluations.Py); transcript->send_to_verifier("Translation:z1", translation_evaluations.z1); transcript->send_to_verifier("Translation:z2", translation_evaluations.z2); - transcript->send_to_verifier("Translation:hack_evaluation", hack.evaluate(evaluation_challenge_x)); - // Get another challenge for batching the univariate claims + FF hack_evaluation = hack.evaluate(evaluation_challenge_x); + transcript->send_to_verifier("Translation:hack_evaluation", hack_evaluation); + + // Get another challenge for batching the univariates and evaluations FF ipa_batching_challenge = transcript->template get_challenge("Translation:ipa_batching_challenge"); // Collect the polynomials and evaluations to be batched RefArray univariate_polynomials{ key->polynomials.transcript_op, key->polynomials.transcript_Px, key->polynomials.transcript_Py, key->polynomials.transcript_z1, key->polynomials.transcript_z2, hack }; - std::array univariate_evaluations; - for (auto [eval, polynomial] : zip_view(univariate_evaluations, univariate_polynomials)) { - eval = polynomial.evaluate(evaluation_challenge_x); - } + std::array univariate_evaluations{ + translation_evaluations.op, translation_evaluations.Px, translation_evaluations.Py, + translation_evaluations.z1, translation_evaluations.z2, hack_evaluation + }; - // Construct the batched polynomial and batched evaluation + // Construct the batched polynomial and batched evaluation to produce the batched opening claim Polynomial batched_univariate{ key->circuit_size }; FF batched_evaluation{ 0 }; auto batching_scalar = FF(1); @@ -174,12 +181,17 @@ void ECCVMProver::execute_transcript_consistency_univariate_opening_round() batching_scalar *= ipa_batching_challenge; } - // TODO(https://github.com/AztecProtocol/barretenberg/issues/922): We are doing another round of IPA here with - // exactly the same labels and no domain separation so if/when labels are going to matter we are clashing. - PCS::compute_opening_proof( - commitment_key, { evaluation_challenge_x, batched_evaluation }, batched_univariate, transcript); + std::array opening_claims = { multivariate_to_univariate_opening_claim, + { .polynomial = batched_univariate, + .opening_pair = { evaluation_challenge_x, batched_evaluation } } }; + + // Reduce the opening claims to a single opening claim via Shplonk + const OpeningClaim batched_opening_claim = Shplonk::prove(commitment_key, opening_claims, transcript); - // Get another challenge for batching the univariate claims + // Compute the opening proof for the batched opening claim with the univariate PCS + PCS::compute_opening_proof(commitment_key, batched_opening_claim, transcript); + + // Produce another challenge passed as input to the translator verifier translation_batching_challenge_v = transcript->template get_challenge("Translation:batching_challenge"); } @@ -203,9 +215,7 @@ HonkProof ECCVMProver::construct_proof() execute_relation_check_rounds(); - execute_zeromorph_rounds(); - - execute_transcript_consistency_univariate_opening_round(); + execute_pcs_rounds(); return export_proof(); } diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp index c6661069473..52d243ca06c 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp @@ -35,7 +35,7 @@ class ECCVMProver { BB_PROFILE void execute_log_derivative_commitments_round(); BB_PROFILE void execute_grand_product_computation_round(); BB_PROFILE void execute_relation_check_rounds(); - BB_PROFILE void execute_zeromorph_rounds(); + BB_PROFILE void execute_pcs_rounds(); BB_PROFILE void execute_transcript_consistency_univariate_opening_round(); HonkProof export_proof(); diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_transcript.test.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_transcript.test.cpp index 0b2e13a7850..9eaedc9df93 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_transcript.test.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_transcript.test.cpp @@ -164,20 +164,6 @@ class ECCVMTranscriptTests : public ::testing::Test { manifest_expected.add_challenge(round, "ZM:x", "ZM:z"); round++; - manifest_expected.add_entry(round, "IPA:poly_degree_plus_1", frs_per_uint32); - manifest_expected.add_challenge(round, "IPA:generator_challenge"); - - for (size_t i = 0; i < log_n; ++i) { - round++; - std::string idx = std::to_string(log_n - i - 1); - manifest_expected.add_entry(round, "IPA:L_" + idx, frs_per_G); - manifest_expected.add_entry(round, "IPA:R_" + idx, frs_per_G); - std::string label = "IPA:round_challenge_" + idx; - manifest_expected.add_challenge(round, label); - } - - round++; - manifest_expected.add_entry(round, "IPA:a_0", frs_per_Fr); manifest_expected.add_entry(round, "Translation:hack_commitment", frs_per_G); manifest_expected.add_challenge(round, "Translation:evaluation_challenge_x"); @@ -190,6 +176,13 @@ class ECCVMTranscriptTests : public ::testing::Test { manifest_expected.add_entry(round, "Translation:hack_evaluation", frs_per_Fr); manifest_expected.add_challenge(round, "Translation:ipa_batching_challenge"); + round++; + manifest_expected.add_challenge(round, "Shplonk:nu"); + + round++; + manifest_expected.add_entry(round, "Shplonk:Q", frs_per_G); + manifest_expected.add_challenge(round, "Shplonk:z"); + round++; manifest_expected.add_entry(round, "IPA:poly_degree_plus_1", frs_per_uint32); manifest_expected.add_challenge(round, "IPA:generator_challenge"); @@ -209,6 +202,7 @@ class ECCVMTranscriptTests : public ::testing::Test { return manifest_expected; } + ECCVMCircuitBuilder generate_trace(numeric::RNG* engine = nullptr) { std::shared_ptr op_queue = std::make_shared(); diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp index 8cc715a97c5..2c1e3d6dc57 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp @@ -1,4 +1,5 @@ #include "./eccvm_verifier.hpp" +#include "barretenberg/commitment_schemes/shplonk/shplonk.hpp" #include "barretenberg/commitment_schemes/zeromorph/zeromorph.hpp" #include "barretenberg/sumcheck/sumcheck.hpp" @@ -9,7 +10,9 @@ namespace bb { */ bool ECCVMVerifier::verify_proof(const HonkProof& proof) { - using ZeroMorph = ZeroMorphVerifier_; + using Curve = typename Flavor::Curve; + using ZeroMorph = ZeroMorphVerifier_; + using Shplonk = ShplonkVerifier_; RelationParameters relation_parameters; transcript = std::make_shared(proof); @@ -57,56 +60,58 @@ bool ECCVMVerifier::verify_proof(const HonkProof& proof) return false; } - bool multivariate_opening_verified = ZeroMorph::verify(commitments.get_unshifted(), - commitments.get_to_be_shifted(), - claimed_evaluations.get_unshifted(), - claimed_evaluations.get_shifted(), - multivariate_challenge, - key->pcs_verification_key, - transcript); + // Reduce the multivariate evaluation claims produced by sumcheck to a single univariate opening claim + auto multivariate_to_univariate_opening_claim = ZeroMorph::verify(commitments.get_unshifted(), + commitments.get_to_be_shifted(), + claimed_evaluations.get_unshifted(), + claimed_evaluations.get_shifted(), + multivariate_challenge, + key->pcs_verification_key->get_g1_identity(), + transcript); + // Execute transcript consistency univariate opening round - // TODO(#768): Find a better way to do this. See issue for details. - bool univariate_opening_verified = false; - { - auto hack_commitment = transcript->template receive_from_prover("Translation:hack_commitment"); - - FF evaluation_challenge_x = transcript->template get_challenge("Translation:evaluation_challenge_x"); - - // Construct arrays of commitments and evaluations to be batched - const size_t NUM_UNIVARIATES = 6; - std::array transcript_commitments = { - commitments.transcript_op, commitments.transcript_Px, commitments.transcript_Py, - commitments.transcript_z1, commitments.transcript_z2, hack_commitment - }; - std::array transcript_evaluations = { - transcript->template receive_from_prover("Translation:op"), - transcript->template receive_from_prover("Translation:Px"), - transcript->template receive_from_prover("Translation:Py"), - transcript->template receive_from_prover("Translation:z1"), - transcript->template receive_from_prover("Translation:z2"), - transcript->template receive_from_prover("Translation:hack_evaluation") - }; - - // Get another challenge for batching the univariate claims - FF ipa_batching_challenge = transcript->template get_challenge("Translation:ipa_batching_challenge"); - - // Construct batched commitment and batched evaluation - auto batched_commitment = transcript_commitments[0]; - auto batched_transcript_eval = transcript_evaluations[0]; - auto batching_scalar = ipa_batching_challenge; - for (size_t idx = 1; idx < transcript_commitments.size(); ++idx) { - batched_commitment = batched_commitment + transcript_commitments[idx] * batching_scalar; - batched_transcript_eval += batching_scalar * transcript_evaluations[idx]; - batching_scalar *= ipa_batching_challenge; - } - - // Construct and verify batched opening claim - OpeningClaim batched_univariate_claim = { { evaluation_challenge_x, batched_transcript_eval }, - batched_commitment }; - univariate_opening_verified = - PCS::reduce_verify(key->pcs_verification_key, batched_univariate_claim, transcript); + auto hack_commitment = transcript->template receive_from_prover("Translation:hack_commitment"); + + FF evaluation_challenge_x = transcript->template get_challenge("Translation:evaluation_challenge_x"); + + // Construct arrays of commitments and evaluations to be batched, the evaluations being received from the prover + const size_t NUM_UNIVARIATES = 6; + std::array transcript_commitments = { + commitments.transcript_op, commitments.transcript_Px, commitments.transcript_Py, + commitments.transcript_z1, commitments.transcript_z2, hack_commitment + }; + std::array transcript_evaluations = { + transcript->template receive_from_prover("Translation:op"), + transcript->template receive_from_prover("Translation:Px"), + transcript->template receive_from_prover("Translation:Py"), + transcript->template receive_from_prover("Translation:z1"), + transcript->template receive_from_prover("Translation:z2"), + transcript->template receive_from_prover("Translation:hack_evaluation") + }; + + // Get the batching challenge for commitments and evaluations + FF ipa_batching_challenge = transcript->template get_challenge("Translation:ipa_batching_challenge"); + + // Compute the batched commitment and batched evaluation for the univariate opening claim + auto batched_commitment = transcript_commitments[0]; + auto batched_transcript_eval = transcript_evaluations[0]; + auto batching_scalar = ipa_batching_challenge; + for (size_t idx = 1; idx < transcript_commitments.size(); ++idx) { + batched_commitment = batched_commitment + transcript_commitments[idx] * batching_scalar; + batched_transcript_eval += batching_scalar * transcript_evaluations[idx]; + batching_scalar *= ipa_batching_challenge; } - return sumcheck_verified.value() && multivariate_opening_verified && univariate_opening_verified; + std::array, 2> opening_claims = { multivariate_to_univariate_opening_claim, + { { evaluation_challenge_x, batched_transcript_eval }, + batched_commitment } }; + + // Construct and verify the combined opening claim + auto batched_opening_claim = + Shplonk::reduce_verification(key->pcs_verification_key->get_g1_identity(), opening_claims, transcript); + + bool batched_opening_verified = PCS::reduce_verify(key->pcs_verification_key, batched_opening_claim, transcript); + + return sumcheck_verified.value() && batched_opening_verified; } } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/eccvm_recursion/eccvm_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/eccvm_recursion/eccvm_recursive_verifier.cpp index 7bef58336b1..4ceb6478179 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm_recursion/eccvm_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm_recursion/eccvm_recursive_verifier.cpp @@ -1,4 +1,5 @@ #include "./eccvm_recursive_verifier.hpp" +#include "barretenberg/commitment_schemes/shplonk/shplonk.hpp" #include "barretenberg/commitment_schemes/zeromorph/zeromorph.hpp" #include "barretenberg/sumcheck/sumcheck.hpp" #include "barretenberg/transcript/transcript.hpp" @@ -18,7 +19,10 @@ ECCVMRecursiveVerifier_::ECCVMRecursiveVerifier_( // TODO(https://github.com/AztecProtocol/barretenberg/issues/1007): Finish this template void ECCVMRecursiveVerifier_::verify_proof(const HonkProof& proof) { - using ZeroMorph = ZeroMorphVerifier_; + using Curve = typename Flavor::Curve; + using ZeroMorph = ZeroMorphVerifier_; + using Shplonk = ShplonkVerifier_; + RelationParameters relation_parameters; StdlibProof stdlib_proof = bb::convert_proof_to_witness(builder, proof); @@ -71,57 +75,58 @@ template void ECCVMRecursiveVerifier_::verify_proof(co auto [multivariate_challenge, claimed_evaluations, sumcheck_verified] = sumcheck.verify(relation_parameters, alpha, gate_challenges); - // removed return bool - bool multivariate_opening_verified = ZeroMorph::verify(commitments.get_unshifted(), - commitments.get_to_be_shifted(), - claimed_evaluations.get_unshifted(), - claimed_evaluations.get_shifted(), - multivariate_challenge, - key->pcs_verification_key, - transcript); - // Execute transcript consistency univariate opening round - // TODO(#768): Find a better way to do this. See issue for details. - bool univariate_opening_verified = false; - { - auto hack_commitment = transcript->template receive_from_prover("Translation:hack_commitment"); - - FF evaluation_challenge_x = transcript->template get_challenge("Translation:evaluation_challenge_x"); - - // Construct arrays of commitments and evaluations to be batched - const size_t NUM_UNIVARIATES = 6; - std::array transcript_commitments = { - commitments.transcript_op, commitments.transcript_Px, commitments.transcript_Py, - commitments.transcript_z1, commitments.transcript_z2, hack_commitment - }; - std::array transcript_evaluations = { - transcript->template receive_from_prover("Translation:op"), - transcript->template receive_from_prover("Translation:Px"), - transcript->template receive_from_prover("Translation:Py"), - transcript->template receive_from_prover("Translation:z1"), - transcript->template receive_from_prover("Translation:z2"), - transcript->template receive_from_prover("Translation:hack_evaluation") - }; - - // Get another challenge for batching the univariate claims - FF ipa_batching_challenge = transcript->template get_challenge("Translation:ipa_batching_challenge"); - - // Construct batched commitment and batched evaluation - auto batched_commitment = transcript_commitments[0]; - auto batched_transcript_eval = transcript_evaluations[0]; - auto batching_scalar = ipa_batching_challenge; - for (size_t idx = 1; idx < transcript_commitments.size(); ++idx) { - batched_commitment = batched_commitment + transcript_commitments[idx] * batching_scalar; - batched_transcript_eval += batching_scalar * transcript_evaluations[idx]; - batching_scalar *= ipa_batching_challenge; - } - - // Construct and verify batched opening claim - OpeningClaim batched_univariate_claim = { { evaluation_challenge_x, batched_transcript_eval }, - batched_commitment }; - univariate_opening_verified = - PCS::reduce_verify(key->pcs_verification_key, batched_univariate_claim, transcript); + auto multivariate_to_univariate_opening_claim = ZeroMorph::verify(commitments.get_unshifted(), + commitments.get_to_be_shifted(), + claimed_evaluations.get_unshifted(), + claimed_evaluations.get_shifted(), + multivariate_challenge, + key->pcs_verification_key->get_g1_identity(), + transcript); + auto hack_commitment = transcript->template receive_from_prover("Translation:hack_commitment"); + + FF evaluation_challenge_x = transcript->template get_challenge("Translation:evaluation_challenge_x"); + + // Construct the vector of commitments (needs to be vector for the batch_mul) and array of evaluations to be batched + std::vector transcript_commitments = { commitments.transcript_op, commitments.transcript_Px, + commitments.transcript_Py, commitments.transcript_z1, + commitments.transcript_z2, hack_commitment }; + + std::vector transcript_evaluations = { transcript->template receive_from_prover("Translation:op"), + transcript->template receive_from_prover("Translation:Px"), + transcript->template receive_from_prover("Translation:Py"), + transcript->template receive_from_prover("Translation:z1"), + transcript->template receive_from_prover("Translation:z2"), + transcript->template receive_from_prover( + "Translation:hack_evaluation") }; + + // Get the batching challenge for commitments and evaluations + FF ipa_batching_challenge = transcript->template get_challenge("Translation:ipa_batching_challenge"); + + // Compute the batched commitment and batched evaluation for the univariate opening claim + auto batched_transcript_eval = transcript_evaluations[0]; + auto batching_scalar = ipa_batching_challenge; + + std::vector batching_challenges = { FF::one() }; + for (size_t idx = 1; idx < transcript_commitments.size(); ++idx) { + batched_transcript_eval += batching_scalar * transcript_evaluations[idx]; + batching_challenges.emplace_back(batching_scalar); + batching_scalar *= ipa_batching_challenge; } - ASSERT(sumcheck_verified && multivariate_opening_verified && univariate_opening_verified); + auto batched_commitment = Commitment::batch_mul(transcript_commitments, batching_challenges); + + // Construct and verify the combined opening claim + OpeningClaim batched_univariate_claim = { { evaluation_challenge_x, batched_transcript_eval }, + batched_commitment }; + + std::array, 2> opening_claims = { multivariate_to_univariate_opening_claim, + batched_univariate_claim }; + + auto batched_opening_claim = + Shplonk::reduce_verification(key->pcs_verification_key->get_g1_identity(), opening_claims, transcript); + + auto batched_opening_verified = PCS::reduce_verify(key->pcs_verification_key, batched_opening_claim, transcript); + + ASSERT(sumcheck_verified && batched_opening_verified); } template class ECCVMRecursiveVerifier_>; diff --git a/barretenberg/cpp/src/barretenberg/eccvm_recursion/eccvm_recursive_verifier.test.cpp b/barretenberg/cpp/src/barretenberg/eccvm_recursion/eccvm_recursive_verifier.test.cpp index 8be139c096a..2d2c1fe93bf 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm_recursion/eccvm_recursive_verifier.test.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm_recursion/eccvm_recursive_verifier.test.cpp @@ -76,7 +76,6 @@ template class ECCVMRecursiveTests : public ::testing { InnerBuilder builder = generate_circuit(&engine); InnerProver prover(builder); - info(builder.get_num_gates()); auto proof = prover.construct_proof(); auto verification_key = std::make_shared(prover.key); diff --git a/barretenberg/cpp/src/barretenberg/eccvm_recursion/verifier_commitment_key.hpp b/barretenberg/cpp/src/barretenberg/eccvm_recursion/verifier_commitment_key.hpp index 8b2011d792f..5dcb13ffacb 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm_recursion/verifier_commitment_key.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm_recursion/verifier_commitment_key.hpp @@ -25,7 +25,7 @@ template class VerifierCommitmentKey { VerifierCommitmentKey([[maybe_unused]] Builder* builder, size_t num_points, std::shared_ptr>& native_pcs_verification_key) - : first_g1(Commitment(native_pcs_verification_key->get_first_g1())) + : g1_identity(Commitment(native_pcs_verification_key->get_g1_identity())) { auto* native_points = native_pcs_verification_key->get_monomial_points(); @@ -34,11 +34,11 @@ template class VerifierCommitmentKey { } } - Commitment get_first_g1() { return first_g1; } + Commitment get_g1_identity() { return g1_identity; } std::vector get_monomial_points() { return monomial_points; } private: - Commitment first_g1; + Commitment g1_identity; std::vector monomial_points; }; } // namespace bb \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/eccvm_recursion/verifier_commitment_key.test.cpp b/barretenberg/cpp/src/barretenberg/eccvm_recursion/verifier_commitment_key.test.cpp index 66dc19302ea..b9496e39ca4 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm_recursion/verifier_commitment_key.test.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm_recursion/verifier_commitment_key.test.cpp @@ -25,7 +25,7 @@ template class RecursiveVeriferCommitmentKeyTest : public testi Builder builder; auto native_vk = std::make_shared(num_points); auto recursive_vk = std::make_shared(&builder, num_points, native_vk); - EXPECT_EQ(native_vk->get_first_g1(), recursive_vk->get_first_g1().get_value()); + EXPECT_EQ(native_vk->get_g1_identity(), recursive_vk->get_g1_identity().get_value()); auto* native_monomial_points = native_vk->get_monomial_points(); auto recursive_monomial_points = recursive_vk->get_monomial_points(); diff --git a/barretenberg/cpp/src/barretenberg/plonk/composer/composer_lib.hpp b/barretenberg/cpp/src/barretenberg/plonk/composer/composer_lib.hpp index 0ca1c00e747..88c518cb4b3 100644 --- a/barretenberg/cpp/src/barretenberg/plonk/composer/composer_lib.hpp +++ b/barretenberg/cpp/src/barretenberg/plonk/composer/composer_lib.hpp @@ -48,4 +48,79 @@ std::shared_ptr compute_verification_key_common( // silencing for now but need to figure out where to extract type of VerifierCrs from :-/ std::shared_ptr> const& vrs); +/** + * @brief Construct polynomials containing the sorted concatenation of the lookups and the lookup tables + * + * @tparam Flavor + * @param circuit + * @param dyadic_circuit_size + * @param additional_offset Additional space needed in polynomials to add randomness for zk (Plonk only) + * @return std::array + */ +template +std::array construct_sorted_list_polynomials(typename Flavor::CircuitBuilder& circuit, + const size_t dyadic_circuit_size, + size_t additional_offset = 0) +{ + using Polynomial = typename Flavor::Polynomial; + std::array sorted_polynomials; + // Initialise the sorted concatenated list polynomials for the lookup argument + for (auto& s_i : sorted_polynomials) { + s_i = Polynomial(dyadic_circuit_size); + } + + // The sorted list polynomials have (tables_size + lookups_size) populated entries. We define the index below so + // that these entries are written into the last indices of the polynomials. The values on the first + // dyadic_circuit_size - (tables_size + lookups_size) indices are automatically initialized to zero via the + // polynomial constructor. + size_t s_index = dyadic_circuit_size - (circuit.get_tables_size() + circuit.get_lookups_size()) - additional_offset; + ASSERT(s_index > 0); // We need at least 1 row of zeroes for the permutation argument + + for (auto& table : circuit.lookup_tables) { + const fr table_index(table.table_index); + auto& lookup_gates = table.lookup_gates; + for (size_t i = 0; i < table.size(); ++i) { + if (table.use_twin_keys) { + lookup_gates.push_back({ + { + table.column_1[i].from_montgomery_form().data[0], + table.column_2[i].from_montgomery_form().data[0], + }, + { + table.column_3[i], + 0, + }, + }); + } else { + lookup_gates.push_back({ + { + table.column_1[i].from_montgomery_form().data[0], + 0, + }, + { + table.column_2[i], + table.column_3[i], + }, + }); + } + } + +#ifdef NO_TBB + std::sort(lookup_gates.begin(), lookup_gates.end()); +#else + std::sort(std::execution::par_unseq, lookup_gates.begin(), lookup_gates.end()); +#endif + + for (const auto& entry : lookup_gates) { + const auto components = entry.to_table_components(table.use_twin_keys); + sorted_polynomials[0][s_index] = components[0]; + sorted_polynomials[1][s_index] = components[1]; + sorted_polynomials[2][s_index] = components[2]; + sorted_polynomials[3][s_index] = table_index; + ++s_index; + } + } + return sorted_polynomials; +} + } // namespace bb::plonk diff --git a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/CMakeLists.txt index d3024bcdbb1..7603c0f6775 100644 --- a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/CMakeLists.txt +++ b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/CMakeLists.txt @@ -1 +1 @@ -barretenberg_module(plonk_honk_shared polynomials) \ No newline at end of file +barretenberg_module(plonk_honk_shared polynomials ultra_honk) \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/composer/composer_lib.hpp b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/composer/composer_lib.hpp index 7745a853c58..d4e75dc9e14 100644 --- a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/composer/composer_lib.hpp +++ b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/composer/composer_lib.hpp @@ -2,6 +2,7 @@ #include "barretenberg/common/ref_array.hpp" #include "barretenberg/flavor/flavor.hpp" #include "barretenberg/polynomials/polynomial_store.hpp" +#include "barretenberg/stdlib_circuit_builders/plookup_tables/types.hpp" #include @@ -20,6 +21,7 @@ void construct_lookup_table_polynomials(RefArray // ^^^^^^^^^ ^^^^^^^^ ^^^^^^^ ^nonzero to ensure uniqueness and to avoid infinity commitments // | table randomness // ignored, as used for regular constraints and padding to the next power of 2. + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1033): construct tables and counts at top of trace ASSERT(dyadic_circuit_size > circuit.get_tables_size() + additional_offset); size_t offset = dyadic_circuit_size - circuit.get_tables_size() - additional_offset; @@ -37,78 +39,40 @@ void construct_lookup_table_polynomials(RefArray } /** - * @brief Construct polynomials containing the sorted concatenation of the lookups and the lookup tables - * - * @tparam Flavor - * @param circuit - * @param dyadic_circuit_size - * @param additional_offset Additional space needed in polynomials to add randomness for zk (Plonk only) - * @return std::array + * @brief Construct polynomial whose value at index i is the number of times the table entry at that index has been + * read. + * @details Read counts are needed for the log derivative lookup argument. The table polynomials are constructed as a + * concatenation of basic 3-column tables. Similarly, the read counts polynomial is constructed as the concatenation of + * read counts for the individual tables. */ template -std::array construct_sorted_list_polynomials(typename Flavor::CircuitBuilder& circuit, - const size_t dyadic_circuit_size, - size_t additional_offset = 0) +void construct_lookup_read_counts(typename Flavor::Polynomial& read_counts, + typename Flavor::Polynomial& read_tags, + typename Flavor::CircuitBuilder& circuit, + size_t dyadic_circuit_size) { - using Polynomial = typename Flavor::Polynomial; - std::array sorted_polynomials; - // Initialise the sorted concatenated list polynomials for the lookup argument - for (auto& s_i : sorted_polynomials) { - s_i = Polynomial(dyadic_circuit_size); - } - - // The sorted list polynomials have (tables_size + lookups_size) populated entries. We define the index below so - // that these entries are written into the last indices of the polynomials. The values on the first - // dyadic_circuit_size - (tables_size + lookups_size) indices are automatically initialized to zero via the - // polynomial constructor. - size_t s_index = dyadic_circuit_size - (circuit.get_tables_size() + circuit.get_lookups_size()) - additional_offset; - ASSERT(s_index > 0); // We need at least 1 row of zeroes for the permutation argument + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1033): construct tables and counts at top of trace + size_t offset = dyadic_circuit_size - circuit.get_tables_size(); + size_t table_offset = offset; // offset of the present table in the table polynomials + // loop over all tables used in the circuit; each table contains data about the lookups made on it for (auto& table : circuit.lookup_tables) { - const fr table_index(table.table_index); - auto& lookup_gates = table.lookup_gates; - for (size_t i = 0; i < table.size(); ++i) { - if (table.use_twin_keys) { - lookup_gates.push_back({ - { - table.column_1[i].from_montgomery_form().data[0], - table.column_2[i].from_montgomery_form().data[0], - }, - { - table.column_3[i], - 0, - }, - }); - } else { - lookup_gates.push_back({ - { - table.column_1[i].from_montgomery_form().data[0], - 0, - }, - { - table.column_2[i], - table.column_3[i], - }, - }); - } - } + table.initialize_index_map(); + + for (auto& gate_data : table.lookup_gates) { + // convert lookup gate data to an array of three field elements, one for each of the 3 columns + auto table_entry = gate_data.to_table_components(table.use_twin_keys); -#ifdef NO_TBB - std::sort(lookup_gates.begin(), lookup_gates.end()); -#else - std::sort(std::execution::par_unseq, lookup_gates.begin(), lookup_gates.end()); -#endif + // find the index of the entry in the table + auto index_in_table = table.index_map[table_entry]; - for (const auto& entry : lookup_gates) { - const auto components = entry.to_sorted_list_components(table.use_twin_keys); - sorted_polynomials[0][s_index] = components[0]; - sorted_polynomials[1][s_index] = components[1]; - sorted_polynomials[2][s_index] = components[2]; - sorted_polynomials[3][s_index] = table_index; - ++s_index; + // increment the read count at the corresponding index in the full polynomial + size_t index_in_poly = table_offset + index_in_table; + read_counts[index_in_poly]++; + read_tags[index_in_poly] = 1; // tag is 1 if entry has been read 1 or more times } + table_offset += table.size(); // set the offset of the next table within the polynomials } - return sorted_polynomials; } } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/composer/composer_lib.test.cpp b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/composer/composer_lib.test.cpp index 94219c72a48..33534bc958d 100644 --- a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/composer/composer_lib.test.cpp +++ b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/composer/composer_lib.test.cpp @@ -1,21 +1,78 @@ #include "barretenberg/plonk_honk_shared/composer/composer_lib.hpp" -#include "barretenberg/common/slab_allocator.hpp" -#include "barretenberg/plonk_honk_shared/types/circuit_type.hpp" #include "barretenberg/srs/factories/crs_factory.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" #include "barretenberg/stdlib_circuit_builders/ultra_flavor.hpp" + #include #include using namespace bb; class ComposerLibTests : public ::testing::Test { + public: + using Flavor = UltraFlavor; + using FF = typename Flavor::FF; + protected: + static void SetUpTestSuite() { bb::srs::init_crs_factory("../srs_db/ignition"); } +}; + +/** + * @brief A test to demonstrate that lookup read counts/tags are computed correctly for a simple 'hand-computable' case + * using the uint32 XOR table + * + */ +TEST_F(ComposerLibTests, LookupReadCounts) +{ + using Builder = UltraCircuitBuilder; using Flavor = UltraFlavor; using FF = typename Flavor::FF; - Flavor::CircuitBuilder circuit_constructor; - Flavor::ProvingKey proving_key = []() { - auto crs_factory = srs::factories::CrsFactory(); - auto crs = crs_factory.get_prover_crs(4); - return Flavor::ProvingKey(/*circuit_size=*/8, /*num_public_inputs=*/0); - }(); -}; \ No newline at end of file + using Polynomial = typename Flavor::Polynomial; + auto UINT32_XOR = plookup::MultiTableId::UINT32_XOR; + + Builder builder; + + // define some very simply inputs to XOR + FF left{ 1 }; + FF right{ 5 }; + + auto left_idx = builder.add_variable(left); + auto right_idx = builder.add_variable(right); + + // create a single lookup from the uint32 XOR table + auto accumulators = plookup::get_lookup_accumulators(UINT32_XOR, left, right, /*is_2_to_1_lookup*/ true); + builder.create_gates_from_plookup_accumulators(UINT32_XOR, accumulators, left_idx, right_idx); + + EXPECT_EQ(builder.lookup_tables.size(), 1); // we only used a single table + EXPECT_EQ(builder.lookup_tables[0].size(), 4096); // table has size 64*64 (6 bit operands) + + size_t circuit_size = 8192; + + Polynomial read_counts{ circuit_size }; + Polynomial read_tags{ circuit_size }; + + construct_lookup_read_counts(read_counts, read_tags, builder, circuit_size); + + // The table polys are constructed at the bottom of the trace, thus so to are the counts/tags + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1033): construct tables and counts at top of trace + size_t offset = circuit_size - builder.get_tables_size(); + + // The uint32 XOR lookup table is constructed for 6 bit operands via double for loop that iterates through the left + // operand externally (0 to 63) then the right operand internally (0 to 63). Computing (1 XOR 5) will thus result in + // 1 lookup from the (1*64 + 5)th index in the table and 5 lookups from the (0*64 + 0)th index (for the remaining 5 + // limbs that are all 0). The counts and tags at all other indices should be zero. + size_t idx = 0; + for (auto [count, tag] : zip_view(read_counts, read_tags)) { + if (idx == (0 + offset)) { + EXPECT_EQ(count, 5); + EXPECT_EQ(tag, 1); + } else if (idx == (69 + offset)) { + EXPECT_EQ(count, 1); + EXPECT_EQ(tag, 1); + } else { + EXPECT_EQ(count, 0); + EXPECT_EQ(tag, 0); + } + idx++; + } +} \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/decider_verifier.cpp b/barretenberg/cpp/src/barretenberg/protogalaxy/decider_verifier.cpp index 43441174ecf..5ae609a0e9a 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/decider_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/decider_verifier.cpp @@ -28,7 +28,8 @@ DeciderVerifier_::DeciderVerifier_() template bool DeciderVerifier_::verify_proof(const HonkProof& proof) { using PCS = typename Flavor::PCS; - using ZeroMorph = ZeroMorphVerifier_; + using Curve = typename Flavor::Curve; + using ZeroMorph = ZeroMorphVerifier_; using VerifierCommitments = typename Flavor::VerifierCommitments; transcript = std::make_shared(proof); @@ -48,12 +49,14 @@ template bool DeciderVerifier_::verify_proof(const Hon // Execute ZeroMorph rounds. See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the // unrolled protocol. - auto pairing_points = ZeroMorph::verify(commitments.get_unshifted(), - commitments.get_to_be_shifted(), - claimed_evaluations.get_unshifted(), - claimed_evaluations.get_shifted(), - multivariate_challenge, - transcript); + auto opening_claim = ZeroMorph::verify(commitments.get_unshifted(), + commitments.get_to_be_shifted(), + claimed_evaluations.get_unshifted(), + claimed_evaluations.get_shifted(), + multivariate_challenge, + Commitment::one(), + transcript); + auto pairing_points = PCS::reduce_verify(opening_claim, transcript); auto verified = pcs_verification_key->pairing_check(pairing_points[0], pairing_points[1]); diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy.test.cpp b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy.test.cpp index 6c717c1d126..da09210655f 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy.test.cpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy.test.cpp @@ -136,12 +136,10 @@ template class ProtoGalaxyTests : public testing::Test { instance->relation_parameters.beta = FF::random_element(); instance->relation_parameters.gamma = FF::random_element(); - instance->proving_key.compute_sorted_accumulator_polynomials(instance->relation_parameters.eta, - instance->relation_parameters.eta_two, - instance->relation_parameters.eta_three); - if constexpr (IsGoblinFlavor) { - instance->proving_key.compute_logderivative_inverse(instance->relation_parameters); - } + instance->proving_key.add_ram_rom_memory_records_to_wire_4(instance->relation_parameters.eta, + instance->relation_parameters.eta_two, + instance->relation_parameters.eta_three); + instance->proving_key.compute_logderivative_inverses(instance->relation_parameters); instance->proving_key.compute_grand_product_polynomials(instance->relation_parameters); for (auto& alpha : instance->alphas) { @@ -311,21 +309,94 @@ template class ProtoGalaxyTests : public testing::Test { } /** - * @brief Testing one valid round of folding followed by the decider. - * @brief For additional robustness we give one of the circuits more public inputs than the other + * @brief Testing one valid round of folding (plus decider) for two inhomogeneous circuits + * @details For robustness we fold circuits with different numbers/types of gates (but the same dyadic size) * */ - static void test_full_protogalaxy_simple() + static void test_protogalaxy_inhomogeneous() { - // Construct a first circuit with some public inputs - Builder builder1; - construct_circuit(builder1); - bb::MockCircuits::add_arithmetic_gates_with_public_inputs(builder1, /*num_gates=*/4); + auto check_fold_and_decide = [](Builder& circuit_1, Builder& circuit_2) { + // Construct the prover/verifier instances for each + TupleOfInstances instances; + construct_prover_and_verifier_instance(instances, circuit_1); + construct_prover_and_verifier_instance(instances, circuit_2); + + // Perform prover and verifier folding + auto [prover_accumulator, verifier_accumulator] = fold_and_verify(get<0>(instances), get<1>(instances)); + check_accumulator_target_sum_manual(prover_accumulator, true); + + // Run decider + decide_and_verify(prover_accumulator, verifier_accumulator, true); + }; + + // One circuit has more arithmetic gates + { + // Construct two equivalent circuits + Builder builder1; + Builder builder2; + construct_circuit(builder1); + construct_circuit(builder2); + + // Add some arithmetic gates + bb::MockCircuits::add_arithmetic_gates(builder1, /*num_gates=*/4); + + check_fold_and_decide(builder1, builder2); + } + + // One circuit has more arithmetic gates with public inputs + { + // Construct two equivalent circuits + Builder builder1; + Builder builder2; + construct_circuit(builder1); + construct_circuit(builder2); + + // Add some arithmetic gates with public inputs to the first circuit + bb::MockCircuits::add_arithmetic_gates_with_public_inputs(builder1, /*num_gates=*/4); + + check_fold_and_decide(builder1, builder2); + } + + // One circuit has more lookup gates + { + // Construct two equivalent circuits + Builder builder1; + Builder builder2; + construct_circuit(builder1); + construct_circuit(builder2); + + // Add a different number of lookup gates to each circuit + bb::MockCircuits::add_lookup_gates(builder1, /*num_iterations=*/2); // 12 gates plus 4096 table + bb::MockCircuits::add_lookup_gates(builder2, /*num_iterations=*/1); // 6 gates plus 4096 table + + check_fold_and_decide(builder1, builder2); + } + } - // Construct a second circuit with no public inputs + /** + * @brief Ensure failure for a bad lookup gate in one of the circuits being folded + * + */ + static void test_protogalaxy_bad_lookup_failure() + { + // Construct two equivalent circuits + Builder builder1; Builder builder2; + construct_circuit(builder1); construct_circuit(builder2); + // Add a different number of lookup gates to each circuit + bb::MockCircuits::add_lookup_gates(builder1, /*num_iterations=*/2); // 12 gates plus 4096 table + bb::MockCircuits::add_lookup_gates(builder2, /*num_iterations=*/1); // 6 gates plus 4096 table + + // Erroneously set a non-zero wire value to zero in one of the lookup gates + for (auto& wire_3_witness_idx : builder1.blocks.lookup.w_o()) { + if (wire_3_witness_idx != builder1.zero_idx) { + wire_3_witness_idx = builder1.zero_idx; + break; + } + } + // Construct the prover/verifier instances for each TupleOfInstances instances; construct_prover_and_verifier_instance(instances, builder1); @@ -333,9 +404,11 @@ template class ProtoGalaxyTests : public testing::Test { // Perform prover and verifier folding auto [prover_accumulator, verifier_accumulator] = fold_and_verify(get<0>(instances), get<1>(instances)); - check_accumulator_target_sum_manual(prover_accumulator, true); - decide_and_verify(prover_accumulator, verifier_accumulator, true); + // Expect failure in manual target sum check and decider + bool expected_result = false; + check_accumulator_target_sum_manual(prover_accumulator, expected_result); + decide_and_verify(prover_accumulator, verifier_accumulator, expected_result); } /** @@ -517,9 +590,9 @@ TYPED_TEST(ProtoGalaxyTests, CombineAlpha) TestFixture::test_combine_alpha(); } -TYPED_TEST(ProtoGalaxyTests, FullProtogalaxySimple) +TYPED_TEST(ProtoGalaxyTests, ProtogalaxyInhomogeneous) { - TestFixture::test_full_protogalaxy_simple(); + TestFixture::test_protogalaxy_inhomogeneous(); } TYPED_TEST(ProtoGalaxyTests, FullProtogalaxyTest) @@ -546,6 +619,11 @@ TYPED_TEST(ProtoGalaxyTests, TamperedAccumulatorPolynomial) TestFixture::test_tampered_accumulator_polynomial(); } +TYPED_TEST(ProtoGalaxyTests, BadLookupFailure) +{ + TestFixture::test_protogalaxy_bad_lookup_failure(); +} + // We only fold one instance currently due to significant compile time added by multiple instances TYPED_TEST(ProtoGalaxyTests, Fold1Instance) { diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp index 3091c0259e2..cbfbe35cbda 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp @@ -364,7 +364,6 @@ template class ProtoGalaxyProver_ { const FF& scaling_factor) { using Relation = std::tuple_element_t; - // WORKTODO: disable skipping for the combiner for now.. // Check if the relation is skippable to speed up accumulation if constexpr (!isSkippable) { // If not, accumulate normally diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover_impl.hpp b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover_impl.hpp index f38ff10b3e1..36774b7c25f 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover_impl.hpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover_impl.hpp @@ -187,7 +187,12 @@ FoldingResult ProtoGalaxyProver_proving_key.circuit_size == instances[idx + 1]->proving_key.circuit_size); + if (instances[idx]->proving_key.circuit_size != instances[idx + 1]->proving_key.circuit_size) { + info("ProtogalaxyProver: circuit size mismatch!"); + info("Instance ", idx, " size = ", instances[idx]->proving_key.circuit_size); + info("Instance ", idx + 1, " size = ", instances[idx + 1]->proving_key.circuit_size); + ASSERT(false); + } } preparation_round(); perturbator_round(); diff --git a/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp index 3c897ce3909..c0ec529cdd7 100644 --- a/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp @@ -243,24 +243,24 @@ template class DatabusLookupRelationImpl { const auto inverses = View(BusData::inverses(in)); // Degree 1 const auto read_counts = View(BusData::read_counts(in)); // Degree 1 - const auto read_term = compute_read_term(in, params); // Degree 1 - const auto write_term = compute_write_term(in, params); // Degree 1 - const auto inverse_exists = compute_inverse_exists(in); // Degree 1 + const auto read_term = compute_read_term(in, params); // Degree 1 (2) + const auto write_term = compute_write_term(in, params); // Degree 1 (2) + const auto inverse_exists = compute_inverse_exists(in); // Degree 2 const auto read_selector = get_read_selector(in); // Degree 2 - const auto write_inverse = inverses * read_term; // Degree 2 - const auto read_inverse = inverses * write_term; // Degree 2 + const auto write_inverse = inverses * read_term; // Degree 2 (3) + const auto read_inverse = inverses * write_term; // Degree 2 (3) // Determine which pair of subrelations to update based on which bus column is being read constexpr size_t subrel_idx_1 = 2 * bus_idx; constexpr size_t subrel_idx_2 = 2 * bus_idx + 1; // Establish the correctness of the polynomial of inverses I. Note: inverses is computed so that the value is 0 - // if !inverse_exists. Degree 3 + // if !inverse_exists. Degree 3 (5) std::get(accumulator) += (read_term * write_term * inverses - inverse_exists) * scaling_factor; // Establish validity of the read. Note: no scaling factor here since this constraint is enforced across the - // entire trace, not on a per-row basis - std::get(accumulator) += read_selector * read_inverse - read_counts * write_inverse; // Degree 4 + // entire trace, not on a per-row basis. + std::get(accumulator) += read_selector * read_inverse - read_counts * write_inverse; // Deg 4 (5) } /** diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp index 9dd3eb86948..2cdb82e6d1e 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp @@ -251,7 +251,6 @@ [[maybe_unused]] auto main_sel_rng_16 = View(new_term.main_sel_rng_16); \ [[maybe_unused]] auto main_sel_rng_8 = View(new_term.main_sel_rng_8); \ [[maybe_unused]] auto main_space_id = View(new_term.main_space_id); \ - [[maybe_unused]] auto main_table_pow_2 = View(new_term.main_table_pow_2); \ [[maybe_unused]] auto main_tag_err = View(new_term.main_tag_err); \ [[maybe_unused]] auto main_w_in_tag = View(new_term.main_w_in_tag); \ [[maybe_unused]] auto mem_addr = View(new_term.mem_addr); \ @@ -293,6 +292,7 @@ [[maybe_unused]] auto poseidon2_input = View(new_term.poseidon2_input); \ [[maybe_unused]] auto poseidon2_output = View(new_term.poseidon2_output); \ [[maybe_unused]] auto poseidon2_sel_poseidon_perm = View(new_term.poseidon2_sel_poseidon_perm); \ + [[maybe_unused]] auto powers_power_of_2 = View(new_term.powers_power_of_2); \ [[maybe_unused]] auto sha256_clk = View(new_term.sha256_clk); \ [[maybe_unused]] auto sha256_input = View(new_term.sha256_input); \ [[maybe_unused]] auto sha256_output = View(new_term.sha256_output); \ diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/gas.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/gas.hpp new file mode 100644 index 00000000000..7c69045c001 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/gas.hpp @@ -0,0 +1,69 @@ + +#pragma once +#include "../../relation_parameters.hpp" +#include "../../relation_types.hpp" +#include "./declare_views.hpp" + +namespace bb::Avm_vm { + +template struct GasRow { + FF gas_da_gas_fixed_table{}; + FF gas_l2_gas_fixed_table{}; + FF gas_sel_gas_cost{}; + + [[maybe_unused]] static std::vector names(); +}; + +inline std::string get_relation_label_gas(int index) +{ + switch (index) {} + return std::to_string(index); +} + +template class gasImpl { + public: + using FF = FF_; + + static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{ + 2, + 2, + 2, + }; + + template + void static accumulate(ContainerOverSubrelations& evals, + const AllEntities& new_term, + [[maybe_unused]] const RelationParameters&, + [[maybe_unused]] const FF& scaling_factor) + { + + // Contribution 0 + { + Avm_DECLARE_VIEWS(0); + + auto tmp = ((gas_sel_gas_cost - gas_sel_gas_cost) - FF(0)); + tmp *= scaling_factor; + std::get<0>(evals) += tmp; + } + // Contribution 1 + { + Avm_DECLARE_VIEWS(1); + + auto tmp = ((gas_l2_gas_fixed_table - gas_l2_gas_fixed_table) - FF(0)); + tmp *= scaling_factor; + std::get<1>(evals) += tmp; + } + // Contribution 2 + { + Avm_DECLARE_VIEWS(2); + + auto tmp = ((gas_da_gas_fixed_table - gas_da_gas_fixed_table) - FF(0)); + tmp *= scaling_factor; + std::get<2>(evals) += tmp; + } + } +}; + +template using gas = Relation>; + +} // namespace bb::Avm_vm \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_pow_2_0.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_pow_2_0.hpp index b042e72cf58..7ec3d3283b1 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_pow_2_0.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_pow_2_0.hpp @@ -140,7 +140,7 @@ class lookup_pow_2_0_lookup_settings { in.alu_ib, in.alu_two_pow_s, in.main_clk, - in.main_table_pow_2); + in.powers_power_of_2); } /** @@ -160,7 +160,7 @@ class lookup_pow_2_0_lookup_settings { in.alu_ib, in.alu_two_pow_s, in.main_clk, - in.main_table_pow_2); + in.powers_power_of_2); } }; diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_pow_2_1.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_pow_2_1.hpp index 0e3a413289c..4101469c97f 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_pow_2_1.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_pow_2_1.hpp @@ -140,7 +140,7 @@ class lookup_pow_2_1_lookup_settings { in.alu_t_sub_s_bits, in.alu_two_pow_t_sub_s, in.main_clk, - in.main_table_pow_2); + in.powers_power_of_2); } /** @@ -160,7 +160,7 @@ class lookup_pow_2_1_lookup_settings { in.alu_t_sub_s_bits, in.alu_two_pow_t_sub_s, in.main_clk, - in.main_table_pow_2); + in.powers_power_of_2); } }; diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/mem.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/mem.hpp index ea5a125887f..6e4c4fdd982 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/mem.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/mem.hpp @@ -112,7 +112,7 @@ template class memImpl { static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 4, 3, 4, 3, 4, 3, 3, - 3, 4, 4, 4, 4, 4, 5, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 4, 4, 4, 4, 4, 6, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, }; template @@ -365,7 +365,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(27); - auto tmp = ((((-mem_skip_check_tag + FF(1)) * (-mem_rw + FF(1))) * + auto tmp = ((((mem_tag * (-mem_skip_check_tag + FF(1))) * (-mem_rw + FF(1))) * (((mem_r_in_tag - mem_tag) * (-mem_one_min_inv + FF(1))) - mem_tag_err)) - FF(0)); tmp *= scaling_factor; @@ -375,7 +375,7 @@ template class memImpl { { Avm_DECLARE_VIEWS(28); - auto tmp = (((-mem_tag_err + FF(1)) * mem_one_min_inv) - FF(0)); + auto tmp = (((mem_tag * (-mem_tag_err + FF(1))) * mem_one_min_inv) - FF(0)); tmp *= scaling_factor; std::get<28>(evals) += tmp; } diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/powers.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/powers.hpp new file mode 100644 index 00000000000..7c43cb2db78 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/powers.hpp @@ -0,0 +1,49 @@ + +#pragma once +#include "../../relation_parameters.hpp" +#include "../../relation_types.hpp" +#include "./declare_views.hpp" + +namespace bb::Avm_vm { + +template struct PowersRow { + FF powers_power_of_2{}; + + [[maybe_unused]] static std::vector names(); +}; + +inline std::string get_relation_label_powers(int index) +{ + switch (index) {} + return std::to_string(index); +} + +template class powersImpl { + public: + using FF = FF_; + + static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{ + 2, + }; + + template + void static accumulate(ContainerOverSubrelations& evals, + const AllEntities& new_term, + [[maybe_unused]] const RelationParameters&, + [[maybe_unused]] const FF& scaling_factor) + { + + // Contribution 0 + { + Avm_DECLARE_VIEWS(0); + + auto tmp = ((powers_power_of_2 - powers_power_of_2) - FF(0)); + tmp *= scaling_factor; + std::get<0>(evals) += tmp; + } + } +}; + +template using powers = Relation>; + +} // namespace bb::Avm_vm \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/relations/logderiv_lookup_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/logderiv_lookup_relation.hpp new file mode 100644 index 00000000000..92078db8e85 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/logderiv_lookup_relation.hpp @@ -0,0 +1,207 @@ +#pragma once +#include +#include + +#include "barretenberg/common/constexpr_utils.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" +#include "barretenberg/polynomials/polynomial.hpp" +#include "barretenberg/polynomials/univariate.hpp" +#include "barretenberg/relations/relation_types.hpp" + +namespace bb { + +template class LogDerivLookupRelationImpl { + public: + using FF = FF_; + static constexpr size_t READ_TERMS = 1; + static constexpr size_t WRITE_TERMS = 1; + // 1 + polynomial degree of this relation + static constexpr size_t LENGTH = 5; // both subrelations are degree 4 + + static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{ + LENGTH, // inverse construction sub-relation + LENGTH // log derivative lookup argument sub-relation + }; + + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1036): Scrutinize these adjustment factors. Counting + // degrees suggests the first subrelation should require an adjustment of 2. + static constexpr std::array TOTAL_LENGTH_ADJUSTMENTS{ + 1, // inverse construction sub-relation + 1 // log derivative lookup argument sub-relation + }; + + static constexpr std::array SUBRELATION_LINEARLY_INDEPENDENT = { true, false }; + + template inline static bool skip(const AllEntities& in) + { + // Ensure the input does not contain a lookup gate or data that is being read + return in.q_lookup.is_zero() && in.lookup_read_counts.is_zero(); + } + + /** + * @brief Does the provided row contain data relevant to table lookups; Used to determine whether the polynomial of + * inverses must be computed at a given row + * @details In order to avoid unnecessary computation, the polynomial of inverses I is only computed for rows at + * which the lookup relation is "active". It is active if either (1) the present row contains a lookup gate (i.e. + * q_lookup == 1), or (2) the present row contains table data that has been looked up in this circuit + * (lookup_read_tags == 1, or equivalently, if the row in consideration has index i, the data in polynomials table_i + * has been utlized in the circuit). + * + */ + template static bool operation_exists_at_row(const AllValues& row) + { + // is the row a lookup gate or does it contain table data that has been read at some point in this circuit + return (row.q_lookup == 1) || (row.lookup_read_tags == 1); + } + + // Get the inverse polynomial for this relation + template static auto& get_inverse_polynomial(AllEntities& in) { return in.lookup_inverses; } + + // Used in the inverse correctness subrelation; facilitates only computing inverses where necessary + template + static Accumulator compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + + const auto row_has_write = View(in.lookup_read_tags); + const auto row_has_read = View(in.q_lookup); + return row_has_write + row_has_read - (row_has_write * row_has_read); + } + + template + static Accumulator lookup_read_counts(const AllEntities& in) + { + using View = typename Accumulator::View; + return Accumulator(View(in.lookup_read_counts)); + } + + // Compute table_1 + gamma + table_2 * eta + table_3 * eta_2 + table_4 * eta_3 + template + static Accumulator compute_write_term(const AllEntities& in, const Parameters& params) + { + using View = typename Accumulator::View; + using ParameterView = GetParameterView; + + static_assert(write_index < WRITE_TERMS); + + const auto& gamma = ParameterView(params.gamma); + const auto& eta = ParameterView(params.eta); + const auto& eta_two = ParameterView(params.eta_two); + const auto& eta_three = ParameterView(params.eta_three); + + auto table_1 = View(in.table_1); + auto table_2 = View(in.table_2); + auto table_3 = View(in.table_3); + auto table_4 = View(in.table_4); + + return table_1 + gamma + table_2 * eta + table_3 * eta_two + table_4 * eta_three; + } + + template + static Accumulator compute_read_term(const AllEntities& in, const Parameters& params) + { + using View = typename Accumulator::View; + using ParameterView = GetParameterView; + + const auto& gamma = ParameterView(params.gamma); + const auto& eta = ParameterView(params.eta); + const auto& eta_two = ParameterView(params.eta_two); + const auto& eta_three = ParameterView(params.eta_three); + + auto w_1 = View(in.w_l); + auto w_2 = View(in.w_r); + auto w_3 = View(in.w_o); + + auto w_1_shift = View(in.w_l_shift); + auto w_2_shift = View(in.w_r_shift); + auto w_3_shift = View(in.w_o_shift); + + auto table_index = View(in.q_o); + auto negative_column_1_step_size = View(in.q_r); + auto negative_column_2_step_size = View(in.q_m); + auto negative_column_3_step_size = View(in.q_c); + + // The wire values for lookup gates are accumulators structured in such a way that the differences w_i - + // step_size*w_i_shift result in values present in column i of a corresponding table. See the documentation in + // method get_lookup_accumulators() in for a detailed explanation. + auto derived_table_entry_1 = w_1 + gamma + negative_column_1_step_size * w_1_shift; + auto derived_table_entry_2 = w_2 + negative_column_2_step_size * w_2_shift; + auto derived_table_entry_3 = w_3 + negative_column_3_step_size * w_3_shift; + + // (w_1 + q_2*w_1_shift) + η(w_2 + q_m*w_2_shift) + η₂(w_3 + q_c*w_3_shift) + η₃q_index. + // deg 2 or 3 + return derived_table_entry_1 + derived_table_entry_2 * eta + derived_table_entry_3 * eta_two + + table_index * eta_three; + } + + /** + * @brief Log-derivative style lookup argument for conventional lookups form tables with 3 or fewer columns + * @details The identity to be checked is of the form + * + * \sum{i=0}^{n-1} \frac{read_counts_i}{write_term_i} - \frac{q_lookup}{read_term_i} = 0 + * + * where write_term = table_col_1 + \gamma + table_col_2 * \eta_1 + table_col_3 * \eta_2 + table_index * \eta_3 + * and read_term = derived_table_entry_1 + \gamma + derived_table_entry_2 * \eta_1 + derived_table_entry_3 * \eta_2 + * + table_index * \eta_3, with derived_table_entry_i = w_i - col_step_size_i\cdot w_i_shift. (The table entries + * must be 'derived' from wire values in this way since the stored witnesses are actually successive accumulators, + * the differences of which are equal to entries in a table. This is an efficiency trick to avoid using additional + * gates to reconstruct full size values from the limbs contained in tables). + * + * In practice this identity is expressed in terms of polynomials by defining a polynomial of inverses I_i = + * \frac{1}{read_term_i\cdot write_term_i} then rewriting the above identity as + * + * (1) \sum{i=0}^{n-1} (read_counts_i\cdot I_i\cdot read_term_i) - (q_lookup\cdot I_i\cdot write_term_i) = 0 + * + * This requires a second subrelation to check that polynomial I was computed correctly. For all i, it must hold + * that + * + * (2) I_i\cdot read_term_i\cdot write_term_i - 1 = 0 + * + * Note that (1) is 'linearly dependent' in the sense that it holds only as a sum across the entire execution trace. + * (2) on the other hand holds independently at every row. Finally, note that to avoid unnecessary computation, we + * only compute I_i at indices where the relation is 'active', i.e. on rows which either contain a lookup gate or + * table data that has been read. For inactive rows i, we set I_i = 0. We can thus rewrite (2) as + * + * (2) I_i\cdot read_term_i\cdot write_term_i - is_active_i + * + * where is_active = q_lookup + read_tags - q_lookup\cdot read_tags + * + * and read_tags is a polynomial taking boolean values indicating whether the table entry at the corresponding row + * has been read or not. + * @note This relation utilizes functionality in the log-derivative library to compute the polynomial of inverses + * + */ + template + static void accumulate(ContainerOverSubrelations& accumulator, + const AllEntities& in, + const Parameters& params, + const FF& scaling_factor) + { + BB_OP_COUNT_TIME_NAME("Lookup::accumulate"); + using Accumulator = typename std::tuple_element_t<0, ContainerOverSubrelations>; + using View = typename Accumulator::View; + + const auto inverses = View(in.lookup_inverses); // Degree 1 + const auto read_counts = View(in.lookup_read_counts); // Degree 1 + const auto read_selector = View(in.q_lookup); // Degree 1 + const auto inverse_exists = compute_inverse_exists(in); // Degree 2 + const auto read_term = compute_read_term(in, params); // Degree 2 (3) + const auto write_term = compute_write_term(in, params); // Degree 1 (2) + const auto write_inverse = inverses * read_term; // Degree 3 (4) + const auto read_inverse = inverses * write_term; // Degree 2 (3) + + // Establish the correctness of the polynomial of inverses I. Note: inverses is computed so that the value is 0 + // if !inverse_exists. + // Degrees: 2 (3) 1 (2) 1 1 + std::get<0>(accumulator) += (read_term * write_term * inverses - inverse_exists) * scaling_factor; // Deg 4 (6) + + // Establish validity of the read. Note: no scaling factor here since this constraint is 'linearly dependent, + // i.e. enforced across the entire trace, not on a per-row basis. + // Degrees: 1 2 (3) 1 3 (4) + std::get<1>(accumulator) += read_selector * read_inverse - read_counts * write_inverse; // Deg 4 (5) + } +}; + +template using LogDerivLookupRelation = Relation>; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/lookup_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/lookup_relation.hpp deleted file mode 100644 index 46b70df7cab..00000000000 --- a/barretenberg/cpp/src/barretenberg/relations/lookup_relation.hpp +++ /dev/null @@ -1,224 +0,0 @@ -#pragma once -#include "barretenberg/relations/relation_types.hpp" - -namespace bb { - -/** - * @brief LookupRelationImpl defines the algebra for the lookup polynomial: - * - * ∏ (1 + β) ⋅ (q_lookup*f_k + γ) ⋅ (t_k + βt_{k+1} + γ(1 + β)) - * Z_lookup(g^j) = -------------------------------------------------------------------------- - * ∏ (s_k + βs_{k+1} + γ(1 + β)) - * - * - * The method `compute_numerator_term` computes polynomials f, t and incorporate them into terms that are ultimately - * needed to construct the grand product polynomial Z_lookup(X): Note 1: In the above, 't' is associated with table - * values (and is not to be confused with the quotient polynomial, also refered to as 't' elsewhere). Polynomial 's' is - * the sorted concatenation of the witnesses and the table values. - * - * @tparam FF parametrises the prime field class being used - */ -template class LookupRelationImpl { - public: - using FF = FF_; - - static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{ - 6, // grand product construction sub-relation - 3 // left-shiftable polynomial sub-relation - }; - - static constexpr std::array TOTAL_LENGTH_ADJUSTMENTS{ - 4, // grand product construction sub-relation - 0 // left-shiftable polynomial sub-relation - }; - - /** - * @brief Returns true if the contribution from all subrelations for the provided inputs is identically zero - * - */ - template inline static bool skip([[maybe_unused]] const AllEntities& in) - { - // TODO(https://github.com/AztecProtocol/barretenberg/issues/952): figure out why skip condition described in - // issue causes failures in acir tests. - return false; - } - - /** - * @brief Get the grand product polynomial object (either from the proving key or AllEntities depending on context) - * - * @param input - * @return auto& either std::span or Flavor::Polynomial depending on context - */ - inline static auto& get_grand_product_polynomial(auto& input) { return input.z_lookup; } - - /** - * @brief Get the shifted grand product polynomial object (either from the proving key or AllEntities depending on - * context) - * - * @param input - * @return auto& either std::span or Flavor::Polynomial depending on context - */ - inline static auto& get_shifted_grand_product_polynomial(auto& input) { return input.z_lookup_shift; } - - /** - * @brief Compute numerator term of the lookup relation: - * - * N_{index} = (1 + β) ⋅ ∏ (q_lookup*f_k + γ) ⋅ (t_k + βt_{k+1} + γ(1 + β)) - * - * @tparam AccumulatorTypes - * @param in - * @param relation_parameters - * @param index If calling this method over vector inputs, index >= 0 - */ - template - inline static Accumulator compute_grand_product_numerator(const AllEntities& in, const Parameters& params) - { - using View = typename Accumulator::View; - using ParameterView = GetParameterView; - - const auto& beta = ParameterView(params.beta); - const auto& gamma = ParameterView(params.gamma); - const auto& eta = ParameterView(params.eta); - const auto& eta_two = ParameterView(params.eta_two); - const auto& eta_three = ParameterView(params.eta_three); - - const auto one_plus_beta = beta + FF(1); - const auto gamma_by_one_plus_beta = gamma * one_plus_beta; - - auto w_1 = View(in.w_l); - auto w_2 = View(in.w_r); - auto w_3 = View(in.w_o); - - auto w_1_shift = View(in.w_l_shift); - auto w_2_shift = View(in.w_r_shift); - auto w_3_shift = View(in.w_o_shift); - - auto table_1 = View(in.table_1); - auto table_2 = View(in.table_2); - auto table_3 = View(in.table_3); - auto table_4 = View(in.table_4); - - auto table_1_shift = View(in.table_1_shift); - auto table_2_shift = View(in.table_2_shift); - auto table_3_shift = View(in.table_3_shift); - auto table_4_shift = View(in.table_4_shift); - - auto table_index = View(in.q_o); - auto column_1_step_size = View(in.q_r); - auto column_2_step_size = View(in.q_m); - auto column_3_step_size = View(in.q_c); - auto q_lookup = View(in.q_lookup); - - // (w_1 + q_2*w_1_shift) + η(w_2 + q_m*w_2_shift) + η₂(w_3 + q_c*w_3_shift) + η₃q_index. - // deg 2 or 3 - auto wire_accum = (w_1 + column_1_step_size * w_1_shift) + (w_2 + column_2_step_size * w_2_shift) * eta + - (w_3 + column_3_step_size * w_3_shift) * eta_two + table_index * eta_three; - - // t_1 + ηt_2 + η₂t_3 + η₃t_4 - // deg 1 or 2 - auto table_accum = table_1 + table_2 * eta + table_3 * eta_two + table_4 * eta_three; - - // t_1_shift + ηt_2_shift + η₂t_3_shift + η₃t_4_shift - // deg 1 or 2 - auto table_accum_shift = - table_1_shift + table_2_shift * eta + table_3_shift * eta_two + table_4_shift * eta_three; - - auto tmp = (q_lookup * wire_accum + gamma); // deg 3 or 4 - tmp *= (table_accum + table_accum_shift * beta + gamma_by_one_plus_beta); // 1 or 3 - tmp *= one_plus_beta; // deg 0 or 1 - return tmp; // deg 4 or 8 - } - - /** - * @brief Compute denominator term of the lookup relation: - * - * (s_k + βs_{k+1} + γ(1 + β)) - * - * @tparam AccumulatorTypes - * @param in - * @param relation_parameters - * @param index - */ - template - inline static Accumulator compute_grand_product_denominator(const AllEntities& in, const Parameters& params) - { - - using View = typename Accumulator::View; - using ParameterView = GetParameterView; - - const auto& beta = ParameterView(params.beta); - const auto& gamma = ParameterView(params.gamma); - - const auto one_plus_beta = beta + FF(1); - const auto gamma_by_one_plus_beta = gamma * one_plus_beta; // deg 0 or 2 - - // Contribution (1) - auto s_accum = View(in.sorted_accum); - auto s_accum_shift = View(in.sorted_accum_shift); - - auto tmp = (s_accum + s_accum_shift * beta + gamma_by_one_plus_beta); // 1 or 2 - return tmp; - } - - /** - * @brief Compute contribution of the lookup grand prod relation for a given edge (internal function) - * - * @details This the relation confirms faithful calculation of the lookup grand - * product polynomial Z_lookup. The contribution is - * z_lookup * (1 + β) * [q_lookup * f + γ] * (t_accum_k + βt_accum_{k+1} + γ(1 + β)) - - * z_lookup_shift * (s_accum_k + βs_accum_{k+1} + γ(1 + β)) - * where - * f = (w_1 + q_2*w_1_shift) + η(w_2 + q_m*w_2_shift) + η²(w_3 + q_c*w_3_shift) + η³q_index, - * t_accum = table_1 + ηtable_2 + η²table_3 + η³table_4, and - * s_accum = s_1 + ηs_2 + η²s_3 + η³s_4. - * Note: Selectors q_2, q_m and q_c are repurposed as 'column step size' for lookup gates. - * - * @param evals transformed to `evals + C(in(X)...)*scaling_factor` - * @param in an std::array containing the fully extended Univariate edges. - * @param parameters contains beta, gamma, and public_input_delta, .... - * @param scaling_factor optional term to scale the evaluation before adding to evals. - */ - template - inline static void accumulate(ContainerOverSubrelations& accumulators, - const AllEntities& in, - const Parameters& params, - const FF& scaling_factor) - { - BB_OP_COUNT_TIME_NAME("Lookup::accumulate"); - { - using Accumulator = std::tuple_element_t<0, ContainerOverSubrelations>; - using View = typename Accumulator::View; - using ParameterView = GetParameterView; - - const auto& grand_product_delta = ParameterView(params.lookup_grand_product_delta); - - auto z_lookup = View(in.z_lookup); - auto z_lookup_shift = View(in.z_lookup_shift); - - auto lagrange_first = View(in.lagrange_first); - auto lagrange_last = View(in.lagrange_last); - - const auto lhs = compute_grand_product_numerator(in, params); // deg 4 or 8 - const auto rhs = compute_grand_product_denominator(in, params); // deg 1 or 2 - - // (deg 5 or 9) - (deg 3 or 5) - const auto tmp = - lhs * (z_lookup + lagrange_first) - rhs * (z_lookup_shift + lagrange_last * grand_product_delta); - std::get<0>(accumulators) += tmp * scaling_factor; - }; - - { - using Accumulator = std::tuple_element_t<1, ContainerOverSubrelations>; - using View = typename Accumulator::View; - auto z_lookup_shift = View(in.z_lookup_shift); - auto lagrange_last = View(in.lagrange_last); - - // Contribution (2) - std::get<1>(accumulators) += (lagrange_last * z_lookup_shift) * scaling_factor; - }; - }; -}; - -template using LookupRelation = Relation>; - -} // namespace bb \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/relations/ultra_relation_consistency.test.cpp b/barretenberg/cpp/src/barretenberg/relations/ultra_relation_consistency.test.cpp index 0406f82bcef..b40b45da3c6 100644 --- a/barretenberg/cpp/src/barretenberg/relations/ultra_relation_consistency.test.cpp +++ b/barretenberg/cpp/src/barretenberg/relations/ultra_relation_consistency.test.cpp @@ -16,7 +16,6 @@ #include "barretenberg/relations/auxiliary_relation.hpp" #include "barretenberg/relations/delta_range_constraint_relation.hpp" #include "barretenberg/relations/elliptic_relation.hpp" -#include "barretenberg/relations/lookup_relation.hpp" #include "barretenberg/relations/permutation_relation.hpp" #include "barretenberg/relations/poseidon2_external_relation.hpp" #include "barretenberg/relations/poseidon2_internal_relation.hpp" @@ -204,84 +203,6 @@ TEST_F(UltraRelationConsistency, UltraPermutationRelation) run_test(/*random_inputs=*/true); }; -TEST_F(UltraRelationConsistency, LookupRelation) -{ - const auto run_test = [](bool random_inputs) { - using Relation = LookupRelation; - using SumcheckArrayOfValuesOverSubrelations = typename Relation::SumcheckArrayOfValuesOverSubrelations; - - const InputElements input_elements = random_inputs ? InputElements::get_random() : InputElements::get_special(); - const auto& w_1 = input_elements.w_l; - const auto& w_2 = input_elements.w_r; - const auto& w_3 = input_elements.w_o; - - const auto& w_1_shift = input_elements.w_l_shift; - const auto& w_2_shift = input_elements.w_r_shift; - const auto& w_3_shift = input_elements.w_o_shift; - - const auto& table_1 = input_elements.table_1; - const auto& table_2 = input_elements.table_2; - const auto& table_3 = input_elements.table_3; - const auto& table_4 = input_elements.table_4; - - const auto& table_1_shift = input_elements.table_1_shift; - const auto& table_2_shift = input_elements.table_2_shift; - const auto& table_3_shift = input_elements.table_3_shift; - const auto& table_4_shift = input_elements.table_4_shift; - - const auto& s_accum = input_elements.sorted_accum; - const auto& s_accum_shift = input_elements.sorted_accum_shift; - const auto& z_lookup = input_elements.z_lookup; - const auto& z_lookup_shift = input_elements.z_lookup_shift; - - const auto& table_index = input_elements.q_o; - const auto& column_1_step_size = input_elements.q_r; - const auto& column_2_step_size = input_elements.q_m; - const auto& column_3_step_size = input_elements.q_c; - const auto& q_lookup = input_elements.q_lookup; - - const auto& lagrange_first = input_elements.lagrange_first; - const auto& lagrange_last = input_elements.lagrange_last; - - SumcheckArrayOfValuesOverSubrelations expected_values; - - const auto parameters = RelationParameters::get_random(); - - const auto eta = parameters.eta; - const auto eta_two = parameters.eta_two; - const auto eta_three = parameters.eta_three; - const auto beta = parameters.beta; - const auto gamma = parameters.gamma; - auto grand_product_delta = parameters.lookup_grand_product_delta; - - // Extract the extended edges for manual computation of relation contribution - auto one_plus_beta = FF::one() + beta; - auto gamma_by_one_plus_beta = gamma * one_plus_beta; - - auto wire_accum = (w_1 + column_1_step_size * w_1_shift) + (w_2 + column_2_step_size * w_2_shift) * eta + - (w_3 + column_3_step_size * w_3_shift) * eta_two + table_index * eta_three; - - auto table_accum = table_1 + table_2 * eta + table_3 * eta_two + table_4 * eta_three; - auto table_accum_shift = - table_1_shift + table_2_shift * eta + table_3_shift * eta_two + table_4_shift * eta_three; - - // Contribution 1 - auto contribution_1 = (z_lookup + lagrange_first) * (q_lookup * wire_accum + gamma) * - (table_accum + table_accum_shift * beta + gamma_by_one_plus_beta) * one_plus_beta; - contribution_1 -= (z_lookup_shift + lagrange_last * grand_product_delta) * - (s_accum + s_accum_shift * beta + gamma_by_one_plus_beta); - expected_values[0] = contribution_1; - - // Contribution 2 - auto contribution_2 = z_lookup_shift * lagrange_last; - expected_values[1] = contribution_2; - - validate_relation_execution(expected_values, input_elements, parameters); - }; - run_test(/*random_inputs=*/false); - run_test(/*random_inputs=*/true); -}; - TEST_F(UltraRelationConsistency, DeltaRangeConstraintRelation) { const auto run_test = [](bool random_inputs) { diff --git a/barretenberg/cpp/src/barretenberg/srs/factories/crs_factory.hpp b/barretenberg/cpp/src/barretenberg/srs/factories/crs_factory.hpp index 66e5e55f27d..635bc3c0f5f 100644 --- a/barretenberg/cpp/src/barretenberg/srs/factories/crs_factory.hpp +++ b/barretenberg/cpp/src/barretenberg/srs/factories/crs_factory.hpp @@ -45,7 +45,7 @@ template <> class VerifierCrs { * @brief Returns the first G_1 element from the CRS, used by the Shplonk verifier to compute the final * commtiment. */ - virtual Curve::AffineElement get_first_g1() const = 0; + virtual Curve::AffineElement get_g1_identity() const = 0; }; template <> class VerifierCrs { @@ -62,7 +62,7 @@ template <> class VerifierCrs { * @brief Returns the first G_1 element from the CRS, used by the Shplonk verifier to compute the final * commtiment. */ - virtual Curve::AffineElement get_first_g1() const = 0; + virtual Curve::AffineElement get_g1_identity() const = 0; }; /** diff --git a/barretenberg/cpp/src/barretenberg/srs/factories/file_crs_factory.cpp b/barretenberg/cpp/src/barretenberg/srs/factories/file_crs_factory.cpp index f082700e32f..967e4e3612a 100644 --- a/barretenberg/cpp/src/barretenberg/srs/factories/file_crs_factory.cpp +++ b/barretenberg/cpp/src/barretenberg/srs/factories/file_crs_factory.cpp @@ -18,7 +18,7 @@ FileVerifierCrs::FileVerifierCrs(std::string const& path, const si srs::IO::read_transcript_g2(g2_x, path); bb::pairing::precompute_miller_lines(bb::g2::one, precomputed_g2_lines[0]); bb::pairing::precompute_miller_lines(g2_x, precomputed_g2_lines[1]); - first_g1 = point_buf[0]; + g1_identity = point_buf[0]; } FileVerifierCrs::~FileVerifierCrs() @@ -33,7 +33,7 @@ FileVerifierCrs::FileVerifierCrs(std::string const& path, const monomials_ = scalar_multiplication::point_table_alloc(num_points); srs::IO::read_transcript_g1(monomials_.get(), num_points, path); scalar_multiplication::generate_pippenger_point_table(monomials_.get(), monomials_.get(), num_points); - first_g1 = monomials_[0]; + g1_identity = monomials_[0]; }; curve::Grumpkin::AffineElement* FileVerifierCrs::get_monomial_points() const diff --git a/barretenberg/cpp/src/barretenberg/srs/factories/file_crs_factory.hpp b/barretenberg/cpp/src/barretenberg/srs/factories/file_crs_factory.hpp index 09dad29a1cb..149d9794098 100644 --- a/barretenberg/cpp/src/barretenberg/srs/factories/file_crs_factory.hpp +++ b/barretenberg/cpp/src/barretenberg/srs/factories/file_crs_factory.hpp @@ -62,10 +62,10 @@ template <> class FileVerifierCrs : public VerifierCrs class FileVerifierCrs : public VerifierCrs monomials_; }; diff --git a/barretenberg/cpp/src/barretenberg/srs/factories/mem_bn254_crs_factory.cpp b/barretenberg/cpp/src/barretenberg/srs/factories/mem_bn254_crs_factory.cpp index 0c9767328a0..42ce7e2c30e 100644 --- a/barretenberg/cpp/src/barretenberg/srs/factories/mem_bn254_crs_factory.cpp +++ b/barretenberg/cpp/src/barretenberg/srs/factories/mem_bn254_crs_factory.cpp @@ -28,10 +28,10 @@ class MemVerifierCrs : public VerifierCrs { g2::affine_element get_g2x() const { return g2_x; } pairing::miller_lines const* get_precomputed_g2_lines() const { return precomputed_g2_lines; } - g1::affine_element get_first_g1() const { return first_g1x; }; + g1::affine_element get_g1_identity() const { return g1_identityx; }; private: - g1::affine_element first_g1x; + g1::affine_element g1_identityx; g2::affine_element g2_x; pairing::miller_lines* precomputed_g2_lines; }; diff --git a/barretenberg/cpp/src/barretenberg/srs/factories/mem_crs_factory.test.cpp b/barretenberg/cpp/src/barretenberg/srs/factories/mem_crs_factory.test.cpp index 190fa75cef1..df243d73785 100644 --- a/barretenberg/cpp/src/barretenberg/srs/factories/mem_crs_factory.test.cpp +++ b/barretenberg/cpp/src/barretenberg/srs/factories/mem_crs_factory.test.cpp @@ -68,7 +68,7 @@ TEST(reference_string, DISABLED_mem_grumpkin_file_consistency) auto file_verifier_crs = file_crs.get_verifier_crs(); auto mem_verifier_crs = file_crs.get_verifier_crs(); - EXPECT_EQ(mem_verifier_crs->get_first_g1(), file_verifier_crs->get_first_g1()); + EXPECT_EQ(mem_verifier_crs->get_g1_identity(), file_verifier_crs->get_g1_identity()); EXPECT_EQ(memcmp(file_verifier_crs->get_monomial_points(), mem_verifier_crs->get_monomial_points(), sizeof(Grumpkin::AffineElement) * 1024 * 2), diff --git a/barretenberg/cpp/src/barretenberg/srs/factories/mem_grumpkin_crs_factory.cpp b/barretenberg/cpp/src/barretenberg/srs/factories/mem_grumpkin_crs_factory.cpp index 1001c9519a4..bbec0dccf0d 100644 --- a/barretenberg/cpp/src/barretenberg/srs/factories/mem_grumpkin_crs_factory.cpp +++ b/barretenberg/cpp/src/barretenberg/srs/factories/mem_grumpkin_crs_factory.cpp @@ -26,7 +26,7 @@ class MemVerifierCrs : public VerifierCrs { virtual ~MemVerifierCrs() = default; Grumpkin::AffineElement* get_monomial_points() const override { return monomials_.get(); } size_t get_monomial_size() const override { return num_points; } - Grumpkin::AffineElement get_first_g1() const override { return monomials_[0]; }; + Grumpkin::AffineElement get_g1_identity() const override { return monomials_[0]; }; private: size_t num_points; diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/client_ivc_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/client_ivc_recursive_verifier.cpp index 0286a7dbc94..055fa5ca299 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/client_ivc_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/client_ivc_recursive_verifier.cpp @@ -2,6 +2,12 @@ namespace bb::stdlib::recursion::honk { +/** + * @brief Performs recursive verification of the Client IVC proof. + * + * @todo (https://github.com/AztecProtocol/barretenberg/issues/934): Add logic for accumulating the pairing points + * produced by the verifiers (and potentially IPA accumulators for ECCVM verifier) + */ void ClientIVCRecursiveVerifier::verify(const ClientIVC::Proof& proof) { // Perform recursive folding verification diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/client_ivc_recursive_verifier.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/client_ivc_recursive_verifier.test.cpp index b7c5f01e502..4be2ca25960 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/client_ivc_recursive_verifier.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/client_ivc_recursive_verifier.test.cpp @@ -88,6 +88,10 @@ TEST_F(ClientIVCRecursionTests, Basic) // Generate the recursive verification circuit verifier.verify(proof); + info("Recursive Verifier: num gates = ", builder->num_gates); + + EXPECT_EQ(builder->failed(), false) << builder->err(); + EXPECT_TRUE(CircuitChecker::check(*builder)); } diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/decider_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/decider_recursive_verifier.cpp index 55d31e12096..44c083a544c 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/decider_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/decider_recursive_verifier.cpp @@ -15,7 +15,8 @@ std::array DeciderRecursiveVerifier_:: { using Sumcheck = ::bb::SumcheckVerifier; using PCS = typename Flavor::PCS; - using ZeroMorph = ::bb::ZeroMorphVerifier_; + using Curve = typename Flavor::Curve; + using ZeroMorph = ::bb::ZeroMorphVerifier_; using VerifierCommitments = typename Flavor::VerifierCommitments; using Transcript = typename Flavor::Transcript; @@ -32,12 +33,14 @@ std::array DeciderRecursiveVerifier_:: // Execute ZeroMorph rounds. See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the // unrolled protocol. - auto pairing_points = ZeroMorph::verify(commitments.get_unshifted(), - commitments.get_to_be_shifted(), - claimed_evaluations.get_unshifted(), - claimed_evaluations.get_shifted(), - multivariate_challenge, - transcript); + auto opening_claim = ZeroMorph::verify(commitments.get_unshifted(), + commitments.get_to_be_shifted(), + claimed_evaluations.get_unshifted(), + claimed_evaluations.get_shifted(), + multivariate_challenge, + Commitment::one(builder), + transcript); + auto pairing_points = PCS::reduce_verify(opening_claim, transcript); return pairing_points; } diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/goblin_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/goblin_recursive_verifier.cpp index 692382da3a0..5ad5a190621 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/goblin_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/goblin_recursive_verifier.cpp @@ -2,6 +2,12 @@ namespace bb::stdlib::recursion::honk { +/** + * @brief Runs the Goblin recursive verifier consisting of ECCVM, Translator and Merge verifiers. + * + * @todo https://github.com/AztecProtocol/barretenberg/issues/934: Add logic for accumulating the pairing points + * produced by the translator and merge verifier (and potentially IPA accumulators for ECCVM verifier) + */ void GoblinRecursiveVerifier::verify(const GoblinProof& proof) { // Run the ECCVM recursive verifier @@ -28,9 +34,7 @@ void GoblinRecursiveVerifier::verify(const GoblinProof& proof) }; translator_verifier.verify_translation(translation_evaluations); - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1024): Perform recursive merge verification once it - // works with Ultra arithmetization - // MergeVerifier merge_verified{ builder }; - // [[maybe_unused]] auto merge_pairing_points = merge_verifier.verify_proof(proof.merge_proof); + MergeVerifier merge_verifier{ builder }; + merge_verifier.verify_proof(proof.merge_proof); } } // namespace bb::stdlib::recursion::honk \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/protogalaxy_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/protogalaxy_recursive_verifier.cpp index daedb38fd71..0a1b48068bd 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/protogalaxy_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/protogalaxy_recursive_verifier.cpp @@ -53,17 +53,26 @@ void ProtoGalaxyRecursiveVerifier_::receive_and_finalise_inst domain_separator + "_" + labels.return_data_read_counts); } - // Get challenge for sorted list batching and wire four memory records commitment + // Get eta challenges auto [eta, eta_two, eta_three] = transcript->template get_challenges( domain_separator + "_eta", domain_separator + "_eta_two", domain_separator + "_eta_three"); - witness_commitments.sorted_accum = - transcript->template receive_from_prover(domain_separator + "_" + labels.sorted_accum); + + // Receive commitments to lookup argument polynomials + witness_commitments.lookup_read_counts = + transcript->template receive_from_prover(domain_separator + "_" + labels.lookup_read_counts); + witness_commitments.lookup_read_tags = + transcript->template receive_from_prover(domain_separator + "_" + labels.lookup_read_tags); + + // Receive commitments to wire 4 witness_commitments.w_4 = transcript->template receive_from_prover(domain_separator + "_" + labels.w_4); // Get permutation challenges and commitment to permutation and lookup grand products auto [beta, gamma] = transcript->template get_challenges(domain_separator + "_beta", domain_separator + "_gamma"); + witness_commitments.lookup_inverses = transcript->template receive_from_prover( + domain_separator + "_" + commitment_labels.lookup_inverses); + // If Goblin (i.e. using DataBus) receive commitments to log-deriv inverses polynomial if constexpr (IsGoblinFlavor) { witness_commitments.calldata_inverses = transcript->template receive_from_prover( @@ -74,8 +83,6 @@ void ProtoGalaxyRecursiveVerifier_::receive_and_finalise_inst witness_commitments.z_perm = transcript->template receive_from_prover(domain_separator + "_" + labels.z_perm); - witness_commitments.z_lookup = - transcript->template receive_from_prover(domain_separator + "_" + labels.z_lookup); // Compute correction terms for grand products const FF public_input_delta = diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/ultra_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/ultra_recursive_verifier.cpp index abb38ea241a..f609464efec 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/ultra_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/ultra_recursive_verifier.cpp @@ -40,7 +40,8 @@ std::array UltraRecursiveVerifier_::ve { using Sumcheck = ::bb::SumcheckVerifier; using PCS = typename Flavor::PCS; - using ZeroMorph = ::bb::ZeroMorphVerifier_; + using Curve = typename Flavor::Curve; + using ZeroMorph = ::bb::ZeroMorphVerifier_; using VerifierCommitments = typename Flavor::VerifierCommitments; using CommitmentLabels = typename Flavor::CommitmentLabels; using RelationParams = ::bb::RelationParameters; @@ -56,7 +57,8 @@ std::array UltraRecursiveVerifier_::ve transcript->template receive_from_prover("public_input_size"); transcript->template receive_from_prover("pub_inputs_offset"); - // For debugging purposes only + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1032): Uncomment these once it doesn't cause issues + // with the flows // ASSERT(static_cast(circuit_size.get_value()) == key->circuit_size); // ASSERT(static_cast(public_input_size.get_value()) == key->num_public_inputs); // ASSERT(static_cast(pub_inputs_offset.get_value()) == key->pub_inputs_offset); @@ -89,19 +91,25 @@ std::array UltraRecursiveVerifier_::ve transcript->template receive_from_prover(commitment_labels.return_data_read_counts); } - // Get challenge for sorted list batching and wire four memory records + // Get eta challenges; used in RAM/ROM memory records and log derivative lookup argument auto [eta, eta_two, eta_three] = transcript->template get_challenges("eta", "eta_two", "eta_three"); relation_parameters.eta = eta; relation_parameters.eta_two = eta_two; relation_parameters.eta_three = eta_three; - // Get commitments to sorted list accumulator and fourth wire - commitments.sorted_accum = transcript->template receive_from_prover(commitment_labels.sorted_accum); + // Get commitments to lookup argument polynomials and fourth wire + commitments.lookup_read_counts = + transcript->template receive_from_prover(commitment_labels.lookup_read_counts); + commitments.lookup_read_tags = + transcript->template receive_from_prover(commitment_labels.lookup_read_tags); commitments.w_4 = transcript->template receive_from_prover(commitment_labels.w_4); // Get permutation challenges auto [beta, gamma] = transcript->template get_challenges("beta", "gamma"); + commitments.lookup_inverses = + transcript->template receive_from_prover(commitment_labels.lookup_inverses); + // If Goblin (i.e. using DataBus) receive commitments to log-deriv inverses polynomial if constexpr (IsGoblinFlavor) { commitments.calldata_inverses = @@ -121,7 +129,6 @@ std::array UltraRecursiveVerifier_::ve // Get commitment to permutation and lookup grand products commitments.z_perm = transcript->template receive_from_prover(commitment_labels.z_perm); - commitments.z_lookup = transcript->template receive_from_prover(commitment_labels.z_lookup); // Execute Sumcheck Verifier and extract multivariate opening point u = (u_0, ..., u_{d-1}) and purported // multivariate evaluations at u @@ -138,14 +145,18 @@ std::array UltraRecursiveVerifier_::ve } auto [multivariate_challenge, claimed_evaluations, sumcheck_verified] = sumcheck.verify(relation_parameters, alpha, gate_challenges); - // Execute ZeroMorph multilinear PCS evaluation verifier - auto verifier_accumulator = ZeroMorph::verify(commitments.get_unshifted(), - commitments.get_to_be_shifted(), - claimed_evaluations.get_unshifted(), - claimed_evaluations.get_shifted(), - multivariate_challenge, - transcript); - return verifier_accumulator; + + // Execute ZeroMorph to produce an opening claim subsequently verified by a univariate PCS + auto opening_claim = ZeroMorph::verify(commitments.get_unshifted(), + commitments.get_to_be_shifted(), + claimed_evaluations.get_unshifted(), + claimed_evaluations.get_shifted(), + multivariate_challenge, + Commitment::one(builder), + transcript); + auto pairing_points = PCS::reduce_verify(opening_claim, transcript); + + return pairing_points; } template class UltraRecursiveVerifier_>; diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/verifier.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/verifier.test.cpp index f8658a39fe3..605b44b702f 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/verifier.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/verifier.test.cpp @@ -227,7 +227,7 @@ template class RecursiveVerifierTest : public testing // Arbitrarily tamper with the proof to be verified inner_prover.transcript->deserialize_full_transcript(); - inner_prover.transcript->sorted_accum_comm = InnerCommitment::one() * InnerFF::random_element(); + inner_prover.transcript->z_perm_comm = InnerCommitment::one() * InnerFF::random_element(); inner_prover.transcript->serialize_full_transcript(); inner_proof = inner_prover.export_proof(); diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/grand_product_library.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/grand_product_library.test.cpp index 9ab7c789f67..0bf80a8db8c 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/grand_product_library.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/grand_product_library.test.cpp @@ -146,134 +146,6 @@ template class GrandProductTests : public testing::Test { // Check consistency between locally computed z_perm and the one computed by the prover library EXPECT_EQ(prover_polynomials.z_perm, z_permutation_expected); }; - - /** - * @brief Check consistency of the computation of the lookup grand product polynomial z_lookup. - * @details This test compares a simple, unoptimized, easily readable calculation of the grand product z_lookup - * to the optimized implementation used by the prover. It's purpose is to provide confidence that some optimization - * introduced into the calculation has not changed the result. - * @note This test does confirm the correctness of z_lookup, only that the two implementations yield an - * identical result. - */ - static void test_lookup_grand_product_construction() - { - using Flavor = UltraFlavor; - using ProverPolynomials = typename Flavor::ProverPolynomials; - - // Set a mock circuit size - static const size_t circuit_size = 8; - - // Construct a ProverPolynomials object with completely random polynomials - ProverPolynomials prover_polynomials; - for (auto& poly : prover_polynomials.get_unshifted()) { - poly = get_random_polynomial(circuit_size); - poly[0] = 0; // for shiftability - } - prover_polynomials.set_shifted(); - - // Get random challenges - auto beta = FF::random_element(); - auto gamma = FF::random_element(); - auto eta = FF::random_element(); - auto eta_two = FF::random_element(); - auto eta_three = FF::random_element(); - - RelationParameters params{ - .eta = eta, - .eta_two = eta_two, - .eta_three = eta_three, - .beta = beta, - .gamma = gamma, - .public_input_delta = 1, - .lookup_grand_product_delta = 1, - }; - - // Method 1: Compute z_lookup using the prover library method - constexpr size_t LOOKUP_RELATION_INDEX = 1; - using LHS = typename std::tuple_element::type; - using RHS = LookupRelation; - static_assert(std::same_as); - compute_grand_product(prover_polynomials, params); - - // Method 2: Compute the lookup grand product polynomial Z_lookup: - // - // ∏(1 + β) ⋅ ∏(q_lookup*f_k + γ) ⋅ ∏(t_k + βt_{k+1} + γ(1 + β)) - // Z_lookup(X_j) = ----------------------------------------------------------------- - // ∏(s_k + βs_{k+1} + γ(1 + β)) - // - // in a way that is simple to read (but inefficient). See prover library method for more details. - - std::array accumulators; - for (size_t i = 0; i < 4; ++i) { - accumulators[i] = Polynomial{ circuit_size }; - } - - // Step (1) - - auto wires = prover_polynomials.get_wires(); - auto tables = prover_polynomials.get_tables(); - auto sorted_batched = prover_polynomials.sorted_accum; - auto column_1_step_size = prover_polynomials.q_r; - auto column_2_step_size = prover_polynomials.q_m; - auto column_3_step_size = prover_polynomials.q_c; - auto lookup_index_selector = prover_polynomials.q_o; - auto lookup_selector = prover_polynomials.q_lookup; - - // Note: block_mask is used for efficient modulus, i.e. i % N := i & (N-1), for N = 2^k - const size_t block_mask = circuit_size - 1; - // Initialize 't(X)' to be used in an expression of the form t(X) + β*t(Xω) - FF table_i = tables[0][0] + tables[1][0] * eta + tables[2][0] * eta_two + tables[3][0] * eta_three; - for (size_t i = 0; i < circuit_size; ++i) { - size_t shift_idx = (i + 1) & block_mask; - - // f = (w_1 + q_2*w_1(Xω)) + η(w_2 + q_m*w_2(Xω)) + η²(w_3 + q_c*w_3(Xω)) + η³q_index. - FF f_i = (wires[0][i] + wires[0][shift_idx] * column_1_step_size[i]) + - (wires[1][i] + wires[1][shift_idx] * column_2_step_size[i]) * eta + - (wires[2][i] + wires[2][shift_idx] * column_3_step_size[i]) * eta_two + - eta_three * lookup_index_selector[i]; - - // q_lookup * f + γ - accumulators[0][i] = lookup_selector[i] * f_i + gamma; - - // t = t_1 + ηt_2 + η²t_3 + η³t_4 - FF table_i_plus_1 = tables[0][shift_idx] + eta * tables[1][shift_idx] + eta_two * tables[2][shift_idx] + - eta_three * tables[3][shift_idx]; - - // t + βt(Xω) + γ(1 + β) - accumulators[1][i] = table_i + table_i_plus_1 * beta + gamma * (FF::one() + beta); - - // (1 + β) - accumulators[2][i] = FF::one() + beta; - - // s + βs(Xω) + γ(1 + β) - accumulators[3][i] = sorted_batched[i] + beta * sorted_batched[shift_idx] + gamma * (FF::one() + beta); - - // Set t(X_i) for next iteration - table_i = table_i_plus_1; - } - - // Step (2) - for (auto& accum : accumulators) { - for (size_t i = 0; i < circuit_size - 1; ++i) { - accum[i + 1] *= accum[i]; - } - } - - // Step (3) - Polynomial z_lookup_expected(circuit_size); - z_lookup_expected[0] = FF::zero(); // Z_lookup_0 = 0 - - // Compute the numerator in accumulators[0]; The denominator is in accumulators[3] - for (size_t i = 0; i < circuit_size - 1; ++i) { - accumulators[0][i] *= accumulators[1][i] * accumulators[2][i]; - } - // Compute Z_lookup_i, i = [1, n-1] - for (size_t i = 0; i < circuit_size - 1; ++i) { - z_lookup_expected[i + 1] = accumulators[0][i] / accumulators[3][i]; - } - - EXPECT_EQ(prover_polynomials.z_lookup, z_lookup_expected); - }; }; using FieldTypes = testing::Types; @@ -283,8 +155,3 @@ TYPED_TEST(GrandProductTests, GrandProductPermutation) { TestFixture::template test_permutation_grand_product_construction(); } - -TYPED_TEST(GrandProductTests, GrandProductLookup) -{ - TestFixture::test_lookup_grand_product_construction(); -} diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp index 267c8643f4f..45467f78325 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp @@ -13,7 +13,7 @@ #include "barretenberg/relations/delta_range_constraint_relation.hpp" #include "barretenberg/relations/ecc_op_queue_relation.hpp" #include "barretenberg/relations/elliptic_relation.hpp" -#include "barretenberg/relations/lookup_relation.hpp" +#include "barretenberg/relations/logderiv_lookup_relation.hpp" #include "barretenberg/relations/permutation_relation.hpp" #include "barretenberg/relations/poseidon2_external_relation.hpp" #include "barretenberg/relations/poseidon2_internal_relation.hpp" @@ -39,24 +39,23 @@ class MegaFlavor { static constexpr size_t NUM_WIRES = CircuitBuilder::NUM_WIRES; // The number of multivariate polynomials on which a sumcheck prover sumcheck operates (including shifts). We often // need containers of this size to hold related data, so we choose a name more agnostic than `NUM_POLYNOMIALS`. - // Note: this number does not include the individual sorted list polynomials. - static constexpr size_t NUM_ALL_ENTITIES = 58; + static constexpr size_t NUM_ALL_ENTITIES = 57; // The number of polynomials precomputed to describe a circuit and to aid a prover in constructing a satisfying // assignment of witnesses. We again choose a neutral name. static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 30; // The total number of witness entities not including shifts. - static constexpr size_t NUM_WITNESS_ENTITIES = 17; + static constexpr size_t NUM_WITNESS_ENTITIES = 18; // Total number of folded polynomials, which is just all polynomials except the shifts static constexpr size_t NUM_FOLDED_ENTITIES = NUM_PRECOMPUTED_ENTITIES + NUM_WITNESS_ENTITIES; - using GrandProductRelations = std::tuple, bb::LookupRelation>; + using GrandProductRelations = std::tuple>; // define the tuple of Relations that comprise the Sumcheck relation // Note: made generic for use in MegaRecursive. template using Relations_ = std::tuple, bb::UltraPermutationRelation, - bb::LookupRelation, + bb::LogDerivLookupRelation, bb::DeltaRangeConstraintRelation, bb::EllipticRelation, bb::AuxiliaryRelation, @@ -66,8 +65,6 @@ class MegaFlavor { bb::Poseidon2InternalRelation>; using Relations = Relations_; - using LogDerivLookupRelation = bb::DatabusLookupRelation; - static constexpr size_t MAX_PARTIAL_RELATION_LENGTH = compute_max_partial_relation_length(); static constexpr size_t MAX_TOTAL_RELATION_LENGTH = compute_max_total_relation_length(); @@ -179,19 +176,20 @@ class MegaFlavor { template class DerivedEntities { public: DEFINE_FLAVOR_MEMBERS(DataType, - sorted_accum, // column 4 - z_perm, // column 5 - z_lookup, // column 6 - ecc_op_wire_1, // column 7 - ecc_op_wire_2, // column 8 - ecc_op_wire_3, // column 9 - ecc_op_wire_4, // column 10 - calldata, // column 11 - calldata_read_counts, // column 12 - calldata_inverses, // column 13 - return_data, // column 14 - return_data_read_counts, // column 15 - return_data_inverses); // column 16 + z_perm, // column 4 + lookup_inverses, // column 5 + lookup_read_counts, // column 6 + lookup_read_tags, // column 7 + ecc_op_wire_1, // column 8 + ecc_op_wire_2, // column 9 + ecc_op_wire_3, // column 10 + ecc_op_wire_4, // column 11 + calldata, // column 12 + calldata_read_counts, // column 13 + calldata_inverses, // column 14 + return_data, // column 15 + return_data_read_counts, // column 16 + return_data_inverses); // column 17 }; /** @@ -214,9 +212,10 @@ class MegaFlavor { this->w_r, this->w_o, this->w_4, - this->sorted_accum, this->z_perm, - this->z_lookup, + this->lookup_inverses, + this->lookup_read_counts, + this->lookup_read_tags, this->ecc_op_wire_1, this->ecc_op_wire_2, this->ecc_op_wire_3, @@ -232,18 +231,15 @@ class MegaFlavor { template class ShiftedEntities { public: DEFINE_FLAVOR_MEMBERS(DataType, - table_1_shift, // column 0 - table_2_shift, // column 1 - table_3_shift, // column 2 - table_4_shift, // column 3 - w_l_shift, // column 4 - w_r_shift, // column 5 - w_o_shift, // column 6 - w_4_shift, // column 7 - sorted_accum_shift, // column 8 - z_perm_shift, // column 9 - z_lookup_shift // column 10 - ) + table_1_shift, // column 0 + table_2_shift, // column 1 + table_3_shift, // column 2 + table_4_shift, // column 3 + w_l_shift, // column 4 + w_r_shift, // column 5 + w_o_shift, // column 6 + w_4_shift, // column 7 + z_perm_shift) // column 8 }; public: @@ -281,8 +277,8 @@ class MegaFlavor { auto get_witness() { return WitnessEntities::get_all(); }; auto get_to_be_shifted() { - return RefArray{ this->table_1, this->table_2, this->table_3, this->table_4, this->w_l, this->w_r, - this->w_o, this->w_4, this->sorted_accum, this->z_perm, this->z_lookup }; + return RefArray{ this->table_1, this->table_2, this->table_3, this->table_4, this->w_l, + this->w_r, this->w_o, this->w_4, this->z_perm }; }; auto get_precomputed() { return PrecomputedEntities::get_all(); } auto get_shifted() { return ShiftedEntities::get_all(); }; @@ -351,44 +347,8 @@ class MegaFlavor { std::vector memory_read_records; std::vector memory_write_records; - std::array sorted_polynomials; ProverPolynomials polynomials; // storage for all polynomials evaluated by the prover - void compute_sorted_accumulator_polynomials(const FF& eta, const FF& eta_two, const FF& eta_three) - { - // Compute sorted witness-table accumulator - compute_sorted_list_accumulator(eta, eta_two, eta_three); - - // Finalize fourth wire polynomial by adding lookup memory records - add_plookup_memory_records_to_wire_4(eta, eta_two, eta_three); - } - - /** - * @brief Construct sorted list accumulator polynomial 's'. - * - * @details Compute s = s_1 + η*s_2 + η²*s_3 + η³*s_4 (via Horner) where s_i are the - * sorted concatenated witness/table polynomials - * - * @param key proving key - * @param sorted_list_polynomials sorted concatenated witness/table polynomials - * @param eta random challenge - * @return Polynomial - */ - void compute_sorted_list_accumulator(const FF& eta, const FF& eta_two, const FF& eta_three) - { - - auto& sorted_list_accumulator = polynomials.sorted_accum; - - // Construct s via Horner, i.e. s = s_1 + η(s_2 + η(s_3 + η*s_4)) - for (size_t i = 0; i < this->circuit_size; ++i) { - FF T0 = sorted_polynomials[3][i] * eta_three; - T0 += sorted_polynomials[2][i] * eta_two; - T0 += sorted_polynomials[1][i] * eta; - T0 += sorted_polynomials[0][i]; - sorted_list_accumulator[i] = T0; - } - } - /** * @brief Add plookup memory records to the fourth wire polynomial * @@ -398,7 +358,7 @@ class MegaFlavor { * @tparam Flavor * @param eta challenge produced after commitment to first three wire polynomials */ - void add_plookup_memory_records_to_wire_4(const FF& eta, const FF& eta_two, const FF& eta_three) + void add_ram_rom_memory_records_to_wire_4(const FF& eta, const FF& eta_two, const FF& eta_three) { // The plookup memory record values are computed at the indicated indices as // w4 = w3 * eta^3 + w2 * eta^2 + w1 * eta + read_write_flag; @@ -422,14 +382,18 @@ class MegaFlavor { } /** - * @brief Compute the inverse polynomial used in the databus log derivative lookup argument + * @brief Compute the inverse polynomials used in the log derivative lookup relations * * @tparam Flavor * @param beta * @param gamma */ - void compute_logderivative_inverse(const RelationParameters& relation_parameters) + void compute_logderivative_inverses(const RelationParameters& relation_parameters) { + // Compute inverses for conventional lookups + compute_logderivative_inverse>( + this->polynomials, relation_parameters, this->circuit_size); + // Compute inverses for calldata reads DatabusLookupRelation::compute_logderivative_inverse( this->polynomials, relation_parameters, this->circuit_size); @@ -440,7 +404,7 @@ class MegaFlavor { } /** - * @brief Computes public_input_delta, lookup_grand_product_delta, the z_perm and z_lookup polynomials + * @brief Computes public_input_delta and the permutation grand product polynomial * * @param relation_parameters */ @@ -677,8 +641,9 @@ class MegaFlavor { w_o = "W_O"; w_4 = "W_4"; z_perm = "Z_PERM"; - z_lookup = "Z_LOOKUP"; - sorted_accum = "SORTED_ACCUM"; + lookup_inverses = "LOOKUP_INVERSES"; + lookup_read_counts = "LOOKUP_READ_COUNTS"; + lookup_read_tags = "LOOKUP_READ_TAGS"; ecc_op_wire_1 = "ECC_OP_WIRE_1"; ecc_op_wire_2 = "ECC_OP_WIRE_2"; ecc_op_wire_3 = "ECC_OP_WIRE_3"; @@ -768,9 +733,10 @@ class MegaFlavor { this->w_r = commitments.w_r; this->w_o = commitments.w_o; this->w_4 = commitments.w_4; - this->sorted_accum = commitments.sorted_accum; this->z_perm = commitments.z_perm; - this->z_lookup = commitments.z_lookup; + this->lookup_inverses = commitments.lookup_inverses; + this->lookup_read_counts = commitments.lookup_read_counts; + this->lookup_read_tags = commitments.lookup_read_tags; this->ecc_op_wire_1 = commitments.ecc_op_wire_1; this->ecc_op_wire_2 = commitments.ecc_op_wire_2; this->ecc_op_wire_3 = commitments.ecc_op_wire_3; @@ -811,10 +777,11 @@ class MegaFlavor { Commitment return_data_comm; Commitment return_data_read_counts_comm; Commitment return_data_inverses_comm; - Commitment sorted_accum_comm; Commitment w_4_comm; Commitment z_perm_comm; - Commitment z_lookup_comm; + Commitment lookup_inverses_comm; + Commitment lookup_read_counts_comm; + Commitment lookup_read_tags_comm; std::vector> sumcheck_univariates; std::array sumcheck_evaluations; std::vector zm_cq_comms; @@ -867,10 +834,11 @@ class MegaFlavor { return_data_comm = deserialize_from_buffer(proof_data, num_frs_read); return_data_read_counts_comm = deserialize_from_buffer(proof_data, num_frs_read); return_data_inverses_comm = deserialize_from_buffer(proof_data, num_frs_read); - sorted_accum_comm = deserialize_from_buffer(proof_data, num_frs_read); + lookup_read_counts_comm = deserialize_from_buffer(proof_data, num_frs_read); + lookup_read_tags_comm = deserialize_from_buffer(proof_data, num_frs_read); w_4_comm = deserialize_from_buffer(proof_data, num_frs_read); + lookup_inverses_comm = deserialize_from_buffer(proof_data, num_frs_read); z_perm_comm = deserialize_from_buffer(proof_data, num_frs_read); - z_lookup_comm = deserialize_from_buffer(proof_data, num_frs_read); for (size_t i = 0; i < log_n; ++i) { sumcheck_univariates.push_back( deserialize_from_buffer>(proof_data, @@ -908,10 +876,11 @@ class MegaFlavor { serialize_to_buffer(return_data_comm, proof_data); serialize_to_buffer(return_data_read_counts_comm, proof_data); serialize_to_buffer(return_data_inverses_comm, proof_data); - serialize_to_buffer(sorted_accum_comm, proof_data); + serialize_to_buffer(lookup_read_counts_comm, proof_data); + serialize_to_buffer(lookup_read_tags_comm, proof_data); serialize_to_buffer(w_4_comm, proof_data); + serialize_to_buffer(lookup_inverses_comm, proof_data); serialize_to_buffer(z_perm_comm, proof_data); - serialize_to_buffer(z_lookup_comm, proof_data); for (size_t i = 0; i < log_n; ++i) { serialize_to_buffer(sumcheck_univariates[i], proof_data); } diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mock_circuits.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mock_circuits.hpp index 5d5b3c03953..0682a8a44ab 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mock_circuits.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mock_circuits.hpp @@ -58,6 +58,35 @@ class MockCircuits { } } + /** + * @brief Add lookup gates using the uint32 XOR lookup table (table size 4096) + * @brief Each iteration adds 6 lookup gates and results in a minimum circuit size of 4096 + * + * @param builder + * @param num_gates + */ + template static void add_lookup_gates(Builder& builder, size_t num_iterations = 1) + { + auto UINT32_XOR = plookup::MultiTableId::UINT32_XOR; + + // Each iteration adds 6 lookup gates (due to six 6-bit limbs); the first adds a table of size 4096 + for (size_t i = 0; i < num_iterations; ++i) { + // define some arbitrary inputs to uint32 XOR + uint32_t left_value = engine.get_random_uint32(); + uint32_t right_value = engine.get_random_uint32(); + + fr left = fr{ left_value, 0, 0, 0 }.to_montgomery_form(); + fr right = fr{ right_value, 0, 0, 0 }.to_montgomery_form(); + + auto left_idx = builder.add_variable(left); + auto right_idx = builder.add_variable(right); + + // perform lookups from the uint32 XOR table + auto accumulators = plookup::get_lookup_accumulators(UINT32_XOR, left, right, /*is_2_to_1_lookup*/ true); + builder.create_gates_from_plookup_accumulators(UINT32_XOR, accumulators, left_idx, right_idx); + } + } + /** * @brief Populate a builder with a specified number of arithmetic gates; includes a PI * diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/plookup_tables/plookup_tables.cpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/plookup_tables/plookup_tables.cpp index b897fc8c309..0440eb17b9c 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/plookup_tables/plookup_tables.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/plookup_tables/plookup_tables.cpp @@ -143,14 +143,14 @@ const MultiTable& get_multitable(const MultiTableId id) /** * @brief Given a table ID and the key(s) for a key-value lookup, return the lookup accumulators - * @details In general the number of bits in key/value is greater than what can be efficiently supported in lookup - * tables. For this reason we actually perform lookups on the corresponding limbs. However, since we're interested in - * the full values and not the limbs, its convenient to structure the witnesses of lookup gates to store the former. - * This way we don't have to waste gates reaccumulating the limbs to compute the actual value of interest. The way to do - * this is to populate the wires with 'accumulator' values such that the first gate in the series contains the full - * accumulated values, and successive gates contain prior stages of the accumulator such that wire_i - r*wire_{i-1} = - * v_i, where r = num limb bits and v_i is a limb that explicitly appears in one of the lookup tables. See the detailed - * comment block below for more explanation. + * @details In general the number of bits in original key/value is greater than what can be efficiently supported in + * lookup tables. For this reason we actually perform lookups on the corresponding limbs. However, since we're + * interested in the original values and not the limbs, its convenient to structure the witnesses of lookup gates to + * store the former. This way we don't have to waste gates reaccumulating the limbs to compute the actual value of + * interest. The way to do this is to populate the wires with 'accumulator' values such that the first gate in the + * series contains the full accumulated values, and successive gates contain prior stages of the accumulator such that + * wire_i - r*wire_{i-1} = v_i, where r = num limb bits and v_i is a limb that explicitly appears in one of the lookup + * tables. See the detailed comment block below for more explanation. * * @param id * @param key_a @@ -176,7 +176,7 @@ ReadData get_lookup_accumulators(const MultiTableId id, std::vector column_3_raw_values; for (size_t i = 0; i < num_lookups; ++i) { - // compute the value(s) corresponding to the key(s) using on the i-th basic table query function + // compute the value(s) corresponding to the key(s) using the i-th basic table query function const auto values = multi_table.get_table_values[i]({ key_a_slices[i], key_b_slices[i] }); // store all query data in raw columns and key entry column_1_raw_values.emplace_back(key_a_slices[i]); diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/plookup_tables/types.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/plookup_tables/types.hpp index c41d4e94670..259082820eb 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/plookup_tables/types.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/plookup_tables/types.hpp @@ -122,8 +122,8 @@ enum MultiTableId { }; /** - * @brief Container for managing multiple BasicTables plus the data needed to combine basic table outputs (limbs) into - * accumulators. Does not store actual raw table data. + * @brief Container for managing multiple BasicTables plus the data needed to combine basic table outputs (e.g. limbs) + * into accumulators. Does not store actual raw table data. * @details As a simple example, consider using lookups to compute XOR on uint32_t inputs. To do this we decompose the * inputs into 6 limbs and use a BasicTable for 6-bit XOR lookups. In this case the MultiTable simply manages 6 basic * tables, all of which are the XOR BasicTable. (In many cases all of the BasicTables managed by a MultiTable are @@ -213,7 +213,7 @@ struct MultiTable { // std::array value{ bb::fr(0), bb::fr(0) }; // bool operator<(const KeyEntry& other) const { return key < other.key; } -// std::array to_sorted_list_components(const bool use_two_keys) const +// std::array to_table_components(const bool use_two_keys) const // { // return { // key[0], @@ -248,7 +248,7 @@ struct MultiTable { // return (key.from_montgomery_form() < other.key.from_montgomery_form()); // } -// std::array to_sorted_list_components() const { return { key, values[0], values[0] }; } +// std::array to_table_components() const { return { key, values[0], values[0] }; } // } // BasicTableId id; @@ -268,6 +268,66 @@ struct MultiTable { // } +/** + * @brief A map from 'entry' to 'index' where entry is a row in a BasicTable and index is the row at which that entry + * exists in the table + * @details Such a map is needed to in order to construct read_counts (the polynomial containing the number of reads + * from each entry in a table) for the log-derivative lookup argument. A BasicTable essentially consists of 3 columns, + * and 'lookups' are recorded as rows in this table. The index at which this data exists in the table is not explicitly + * known at the time of lookup gate creation. This map can be used to construct read counts from the set of lookups that + * have been performed via an operation like read_counts[index_map[lookup_data]]++ + * + */ +struct LookupHashTable { + using FF = bb::fr; + using Key = std::array; // an entry in a lookup table + using Value = size_t; // the index of an entry in a lookup table + + // Define a simple hash on three field elements + struct HashFunction { + FF mult_const; + FF const_sqr; + + HashFunction() + : mult_const(FF(uint256_t(0x1337, 0x1336, 0x1335, 0x1334))) + , const_sqr(mult_const.sqr()) + {} + + size_t operator()(const Key& entry) const + { + FF result = entry[0] + mult_const * entry[1] + const_sqr * entry[2]; + return static_cast(result.reduce_once().data[0]); + } + }; + + std::unordered_map index_map; + + LookupHashTable() = default; + + // Initialize the entry-index map with the columns of a table + void initialize(std::vector& column_1, std::vector& column_2, std::vector& column_3) + { + for (size_t i = 0; i < column_1.size(); ++i) { + index_map[{ column_1[i], column_2[i], column_3[i] }] = i; + } + } + + // Given an entry in the table, return its index in the table + Value operator[](const Key& key) const + { + auto it = index_map.find(key); + if (it != index_map.end()) { + return it->second; + } else { + info("LookupHashTable: Key not found!"); + ASSERT(false); + return 0; + } + } + + bool operator==(const LookupHashTable& other) const = default; +}; + /** * @brief A basic table from which we can perform lookups (for example, an xor table) * @details Also stores the lookup gate data for all lookups performed on this table @@ -289,7 +349,8 @@ struct BasicTable { return key[0] < other.key[0] || ((key[0] == other.key[0]) && key[1] < other.key[1]); } - std::array to_sorted_list_components(const bool use_two_keys) const + // Express the key-value pair as the entries of a 3-column row in a table + std::array to_table_components(const bool use_two_keys) const { return { bb::fr(key[0]), @@ -313,6 +374,11 @@ struct BasicTable { std::vector column_3; std::vector lookup_gates; // wire data for all lookup gates created for lookups on this table + // Map from a table entry to its index in the table; used for constructing read counts + LookupHashTable index_map; + + void initialize_index_map() { index_map.initialize(column_1, column_2, column_3); } + std::array (*get_values_from_key)(const std::array); bool operator==(const BasicTable& other) const = default; diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp index 909aa29d0d2..6ca68d7037e 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp @@ -12,7 +12,7 @@ #include "barretenberg/relations/auxiliary_relation.hpp" #include "barretenberg/relations/delta_range_constraint_relation.hpp" #include "barretenberg/relations/elliptic_relation.hpp" -#include "barretenberg/relations/lookup_relation.hpp" +#include "barretenberg/relations/logderiv_lookup_relation.hpp" #include "barretenberg/relations/permutation_relation.hpp" #include "barretenberg/relations/relation_parameters.hpp" #include "barretenberg/relations/ultra_arithmetic_relation.hpp" @@ -36,23 +36,22 @@ class UltraFlavor { static constexpr size_t NUM_WIRES = CircuitBuilder::NUM_WIRES; // The number of multivariate polynomials on which a sumcheck prover sumcheck operates (including shifts). We often // need containers of this size to hold related data, so we choose a name more agnostic than `NUM_POLYNOMIALS`. - // Note: this number does not include the individual sorted list polynomials. - static constexpr size_t NUM_ALL_ENTITIES = 43; + static constexpr size_t NUM_ALL_ENTITIES = 42; // The number of polynomials precomputed to describe a circuit and to aid a prover in constructing a satisfying // assignment of witnesses. We again choose a neutral name. static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 25; // The total number of witness entities not including shifts. - static constexpr size_t NUM_WITNESS_ENTITIES = 7; + static constexpr size_t NUM_WITNESS_ENTITIES = 8; // Total number of folded polynomials, which is just all polynomials except the shifts static constexpr size_t NUM_FOLDED_ENTITIES = NUM_PRECOMPUTED_ENTITIES + NUM_WITNESS_ENTITIES; - using GrandProductRelations = std::tuple, bb::LookupRelation>; + using GrandProductRelations = std::tuple>; // define the tuple of Relations that comprise the Sumcheck relation // Note: made generic for use in MegaRecursive. template using Relations_ = std::tuple, bb::UltraPermutationRelation, - bb::LookupRelation, + bb::LogDerivLookupRelation, bb::DeltaRangeConstraintRelation, bb::EllipticRelation, bb::AuxiliaryRelation>; @@ -144,17 +143,18 @@ class UltraFlavor { template class WitnessEntities { public: DEFINE_FLAVOR_MEMBERS(DataType, - w_l, // column 0 - w_r, // column 1 - w_o, // column 2 - w_4, // column 3 - sorted_accum, // column 4 - z_perm, // column 5 - z_lookup) // column 6 + w_l, // column 0 + w_r, // column 1 + w_o, // column 2 + w_4, // column 3 + z_perm, // column 4 + lookup_inverses, // column 5 + lookup_read_counts, // column 6 + lookup_read_tags) // column 7 auto get_wires() { return RefArray{ w_l, w_r, w_o, w_4 }; }; - MSGPACK_FIELDS(w_l, w_r, w_o, w_4, sorted_accum, z_perm, z_lookup); + MSGPACK_FIELDS(w_l, w_r, w_o, w_4, z_perm, lookup_inverses, lookup_read_counts, lookup_read_tags); }; /** @@ -163,22 +163,20 @@ class UltraFlavor { template class ShiftedEntities { public: DEFINE_FLAVOR_MEMBERS(DataType, - table_1_shift, // column 0 - table_2_shift, // column 1 - table_3_shift, // column 2 - table_4_shift, // column 3 - w_l_shift, // column 4 - w_r_shift, // column 5 - w_o_shift, // column 6 - w_4_shift, // column 7 - sorted_accum_shift, // column 8 - z_perm_shift, // column 9 - z_lookup_shift) // column 10 + table_1_shift, // column 0 + table_2_shift, // column 1 + table_3_shift, // column 2 + table_4_shift, // column 3 + w_l_shift, // column 4 + w_r_shift, // column 5 + w_o_shift, // column 6 + w_4_shift, // column 7 + z_perm_shift) // column 10 auto get_shifted() { - return RefArray{ table_1_shift, table_2_shift, table_3_shift, table_4_shift, w_l_shift, w_r_shift, - w_o_shift, w_4_shift, sorted_accum_shift, z_perm_shift, z_lookup_shift }; + return RefArray{ table_1_shift, table_2_shift, table_3_shift, table_4_shift, w_l_shift, + w_r_shift, w_o_shift, w_4_shift, z_perm_shift }; }; }; @@ -203,7 +201,6 @@ class UltraFlavor { auto get_sigmas() { return RefArray{ this->sigma_1, this->sigma_2, this->sigma_3, this->sigma_4 }; }; auto get_ids() { return RefArray{ this->id_1, this->id_2, this->id_3, this->id_4 }; }; auto get_tables() { return RefArray{ this->table_1, this->table_2, this->table_3, this->table_4 }; }; - // Gemini-specific getters. auto get_unshifted() { return concatenate(PrecomputedEntities::get_all(), WitnessEntities::get_all()); @@ -214,8 +211,8 @@ class UltraFlavor { auto get_witness() { return WitnessEntities::get_all(); }; auto get_to_be_shifted() { - return RefArray{ this->table_1, this->table_2, this->table_3, this->table_4, this->w_l, this->w_r, - this->w_o, this->w_4, this->sorted_accum, this->z_perm, this->z_lookup }; + return RefArray{ this->table_1, this->table_2, this->table_3, this->table_4, this->w_l, + this->w_r, this->w_o, this->w_4, this->z_perm }; }; auto get_shifted() { return ShiftedEntities::get_all(); }; }; @@ -240,7 +237,8 @@ class UltraFlavor { // Define all operations as default, except copy construction/assignment ProverPolynomials() = default; ProverPolynomials(size_t circuit_size) - { // Initialize all unshifted polynomials to the zero polynomial and initialize the shifted polys + { // Initialize all unshifted polynomials to the zero polynomial and initialize the + // shifted polys for (auto& poly : get_unshifted()) { poly = Polynomial{ circuit_size }; } @@ -285,57 +283,22 @@ class UltraFlavor { std::vector memory_read_records; std::vector memory_write_records; - std::array sorted_polynomials; ProverPolynomials polynomials; // storage for all polynomials evaluated by the prover - void compute_sorted_accumulator_polynomials(const FF& eta, const FF& eta_two, const FF& eta_three) - { - // Compute sorted witness-table accumulator - compute_sorted_list_accumulator(eta, eta_two, eta_three); - - // Finalize fourth wire polynomial by adding lookup memory records - add_plookup_memory_records_to_wire_4(eta, eta_two, eta_three); - } - /** - * @brief Construct sorted list accumulator polynomial 's'. + * @brief Add RAM/ROM memory records to the fourth wire polynomial * - * @details Compute s = s_1 + η*s_2 + η²*s_3 + η³*s_4 (via Horner) where s_i are the - * sorted concatenated witness/table polynomials - * - * @param key proving key - * @param sorted_list_polynomials sorted concatenated witness/table polynomials - * @param eta random challenge - * @return Polynomial - */ - void compute_sorted_list_accumulator(const FF& eta, const FF& eta_two, const FF& eta_three) - { - auto& sorted_list_accumulator = polynomials.sorted_accum; - - // Construct s via Horner, i.e. s = s_1 + η(s_2 + η(s_3 + η*s_4)) - for (size_t i = 0; i < this->circuit_size; ++i) { - FF T0 = sorted_polynomials[3][i] * eta_three; - T0 += sorted_polynomials[2][i] * eta_two; - T0 += sorted_polynomials[1][i] * eta; - T0 += sorted_polynomials[0][i]; - sorted_list_accumulator[i] = T0; - } - } - - /** - * @brief Add plookup memory records to the fourth wire polynomial - * - * @details This operation must be performed after the first three wires have been committed to, hence the - * dependence on the `eta` challenge. + * @details This operation must be performed after the first three wires have been + * committed to, hence the dependence on the `eta` challenge. * * @tparam Flavor * @param eta challenge produced after commitment to first three wire polynomials */ - void add_plookup_memory_records_to_wire_4(const FF& eta, const FF& eta_two, const FF& eta_three) + void add_ram_rom_memory_records_to_wire_4(const FF& eta, const FF& eta_two, const FF& eta_three) { - // The plookup memory record values are computed at the indicated indices as + // The memory record values are computed at the indicated indices as // w4 = w3 * eta^3 + w2 * eta^2 + w1 * eta + read_write_flag; - // (See plookup_auxiliary_widget.hpp for details) + // (See the Auxiliary relation for details) auto wires = polynomials.get_wires(); // Compute read record values @@ -355,7 +318,21 @@ class UltraFlavor { } /** - * @brief Computes public_input_delta, lookup_grand_product_delta, the z_perm and z_lookup polynomials + * @brief Compute the inverse polynomial used in the log derivative lookup argument + * + * @tparam Flavor + * @param beta + * @param gamma + */ + void compute_logderivative_inverses(const RelationParameters& relation_parameters) + { + // Compute inverses for conventional lookups + compute_logderivative_inverse>( + this->polynomials, relation_parameters, this->circuit_size); + } + + /** + * @brief Computes public_input_delta and the permutation grand product polynomial * * @param relation_parameters */ @@ -403,7 +380,8 @@ class UltraFlavor { commitment = proving_key.commitment_key->commit(polynomial); } } - // TODO(https://github.com/AztecProtocol/barretenberg/issues/964): Clean the boilerplate up. + // TODO(https://github.com/AztecProtocol/barretenberg/issues/964): Clean the boilerplate + // up. VerificationKey(const uint64_t circuit_size, const uint64_t num_public_inputs, const uint64_t pub_inputs_offset, @@ -530,7 +508,8 @@ class UltraFlavor { PartiallyEvaluatedMultivariates() = default; PartiallyEvaluatedMultivariates(const size_t circuit_size) { - // Storage is only needed after the first partial evaluation, hence polynomials of size (n / 2) + // Storage is only needed after the first partial evaluation, hence polynomials of + // size (n / 2) for (auto& poly : this->get_all()) { poly = Polynomial(circuit_size / 2); } @@ -574,8 +553,9 @@ class UltraFlavor { w_o = "W_O"; w_4 = "W_4"; z_perm = "Z_PERM"; - z_lookup = "Z_LOOKUP"; - sorted_accum = "SORTED_ACCUM"; + lookup_inverses = "LOOKUP_INVERSES"; + lookup_read_counts = "LOOKUP_READ_COUNTS"; + lookup_read_tags = "LOOKUP_READ_TAGS"; q_c = "Q_C"; q_l = "Q_L"; @@ -647,10 +627,11 @@ class UltraFlavor { this->w_l = commitments.w_l; this->w_r = commitments.w_r; this->w_o = commitments.w_o; - this->sorted_accum = commitments.sorted_accum; + this->lookup_inverses = commitments.lookup_inverses; + this->lookup_read_counts = commitments.lookup_read_counts; + this->lookup_read_tags = commitments.lookup_read_tags; this->w_4 = commitments.w_4; this->z_perm = commitments.z_perm; - this->z_lookup = commitments.z_lookup; } } }; @@ -671,10 +652,11 @@ class UltraFlavor { Commitment w_l_comm; Commitment w_r_comm; Commitment w_o_comm; - Commitment sorted_accum_comm; + Commitment lookup_read_counts_comm; + Commitment lookup_read_tags_comm; Commitment w_4_comm; Commitment z_perm_comm; - Commitment z_lookup_comm; + Commitment lookup_inverses_comm; std::vector> sumcheck_univariates; std::array sumcheck_evaluations; std::vector zm_cq_comms; @@ -704,8 +686,9 @@ class UltraFlavor { }; /** - * @brief Takes a FULL Ultra proof and deserializes it into the public member variables that compose the - * structure. Must be called in order to access the structure of the proof. + * @brief Takes a FULL Ultra proof and deserializes it into the public member variables + * that compose the structure. Must be called in order to access the structure of the + * proof. * */ void deserialize_full_transcript() @@ -723,10 +706,11 @@ class UltraFlavor { w_l_comm = deserialize_from_buffer(proof_data, num_frs_read); w_r_comm = deserialize_from_buffer(proof_data, num_frs_read); w_o_comm = deserialize_from_buffer(proof_data, num_frs_read); - sorted_accum_comm = deserialize_from_buffer(proof_data, num_frs_read); + lookup_read_counts_comm = deserialize_from_buffer(proof_data, num_frs_read); + lookup_read_tags_comm = deserialize_from_buffer(proof_data, num_frs_read); w_4_comm = deserialize_from_buffer(proof_data, num_frs_read); + lookup_inverses_comm = deserialize_from_buffer(proof_data, num_frs_read); z_perm_comm = deserialize_from_buffer(proof_data, num_frs_read); - z_lookup_comm = deserialize_from_buffer(proof_data, num_frs_read); for (size_t i = 0; i < log_n; ++i) { sumcheck_univariates.push_back( deserialize_from_buffer>(proof_data, @@ -740,8 +724,9 @@ class UltraFlavor { kzg_w_comm = deserialize_from_buffer(proof_data, num_frs_read); } /** - * @brief Serializes the structure variables into a FULL Ultra proof. Should be called only if - * deserialize_full_transcript() was called and some transcript variable was modified. + * @brief Serializes the structure variables into a FULL Ultra proof. Should be called + * only if deserialize_full_transcript() was called and some transcript variable was + * modified. * */ void serialize_full_transcript() @@ -758,10 +743,11 @@ class UltraFlavor { serialize_to_buffer(w_l_comm, proof_data); serialize_to_buffer(w_r_comm, proof_data); serialize_to_buffer(w_o_comm, proof_data); - serialize_to_buffer(sorted_accum_comm, proof_data); + serialize_to_buffer(lookup_read_counts_comm, proof_data); + serialize_to_buffer(lookup_read_tags_comm, proof_data); serialize_to_buffer(w_4_comm, proof_data); + serialize_to_buffer(lookup_inverses_comm, proof_data); serialize_to_buffer(z_perm_comm, proof_data); - serialize_to_buffer(z_lookup_comm, proof_data); for (size_t i = 0; i < log_n; ++i) { serialize_to_buffer(sumcheck_univariates[i], proof_data); } diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_recursive_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_recursive_flavor.hpp index a73509fe018..195097c2c7f 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_recursive_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_recursive_flavor.hpp @@ -11,7 +11,6 @@ #include "barretenberg/relations/auxiliary_relation.hpp" #include "barretenberg/relations/delta_range_constraint_relation.hpp" #include "barretenberg/relations/elliptic_relation.hpp" -#include "barretenberg/relations/lookup_relation.hpp" #include "barretenberg/relations/permutation_relation.hpp" #include "barretenberg/relations/ultra_arithmetic_relation.hpp" #include "barretenberg/srs/factories/crs_factory.hpp" @@ -63,12 +62,12 @@ template class UltraRecursiveFlavor_ { // The number of multivariate polynomials on which a sumcheck prover sumcheck operates (including shifts). We often // need containers of this size to hold related data, so we choose a name more agnostic than `NUM_POLYNOMIALS`. // Note: this number does not include the individual sorted list polynomials. - static constexpr size_t NUM_ALL_ENTITIES = 43; + static constexpr size_t NUM_ALL_ENTITIES = UltraFlavor::NUM_ALL_ENTITIES; // The number of polynomials precomputed to describe a circuit and to aid a prover in constructing a satisfying // assignment of witnesses. We again choose a neutral name. - static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 25; + static constexpr size_t NUM_PRECOMPUTED_ENTITIES = UltraFlavor::NUM_PRECOMPUTED_ENTITIES; // The total number of witness entities not including shifts. - static constexpr size_t NUM_WITNESS_ENTITIES = 7; + static constexpr size_t NUM_WITNESS_ENTITIES = UltraFlavor::NUM_WITNESS_ENTITIES; // define the tuple of Relations that comprise the Sumcheck relation using Relations = UltraFlavor::Relations_; @@ -238,10 +237,11 @@ template class UltraRecursiveFlavor_ { this->w_l = commitments.w_l; this->w_r = commitments.w_r; this->w_o = commitments.w_o; - this->sorted_accum = commitments.sorted_accum; + this->lookup_inverses = commitments.lookup_inverses; + this->lookup_read_counts = commitments.lookup_read_counts; + this->lookup_read_tags = commitments.lookup_read_tags; this->w_4 = commitments.w_4; this->z_perm = commitments.z_perm; - this->z_lookup = commitments.z_lookup; } } }; diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.hpp b/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.hpp index 3a5de3c3c5e..d3a9d0bf8ee 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.hpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.hpp @@ -81,7 +81,10 @@ template class ProverInstance_ { construct_lookup_table_polynomials(proving_key.polynomials.get_tables(), circuit, dyadic_circuit_size); - proving_key.sorted_polynomials = construct_sorted_list_polynomials(circuit, dyadic_circuit_size); + construct_lookup_read_counts(proving_key.polynomials.lookup_read_counts, + proving_key.polynomials.lookup_read_tags, + circuit, + dyadic_circuit_size); std::span public_wires_source = proving_key.polynomials.w_r; diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.test.cpp b/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.test.cpp deleted file mode 100644 index 4a8b5d36734..00000000000 --- a/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.test.cpp +++ /dev/null @@ -1,89 +0,0 @@ - -#include "prover_instance.hpp" -#include "barretenberg/ecc/curves/bn254/bn254.hpp" -#include "barretenberg/plonk_honk_shared/library/grand_product_library.hpp" -#include "barretenberg/polynomials/polynomial.hpp" -#include "barretenberg/srs/factories/file_crs_factory.hpp" -#include -using namespace bb; - -template class InstanceTests : public testing::Test { - using FF = typename Flavor::FF; - using Polynomial = bb::Polynomial; - using Builder = typename Flavor::CircuitBuilder; - - public: - /** - * @brief Get a random polynomial - * - * @param size - * @return Polynomial - */ - static constexpr Polynomial get_random_polynomial(size_t size) - { - Polynomial random_polynomial{ size }; - for (auto& coeff : random_polynomial) { - coeff = FF::random_element(); - } - return random_polynomial; - } - - static void populate_span(auto& polynomial_view, const auto& polynomial) - { - ASSERT(polynomial_view.size() <= polynomial.size()); - for (size_t idx = 0; idx < polynomial.size(); idx++) { - polynomial_view[idx] = polynomial[idx]; - } - }; - /** - * @brief Check consistency of the computation of the sorted list accumulator - * @details This test compares a simple, unoptimized, easily readable calculation of the sorted list accumulator - * to the optimized implementation used by the prover. It's purpose is to provide confidence that some optimization - * introduced into the calculation has not changed the result. - * @note This test does confirm the correctness of the sorted list accumulator, only that the two implementations - * yield an identical result. - */ - static void test_sorted_list_accumulator_construction() - { - srs::init_crs_factory("../srs_db/ignition"); - - // Construct a simple circuit of size n = 8 (i.e. the minimum circuit size) - Builder builder; - - auto a = 2; - builder.add_variable(a); - - builder.add_gates_to_ensure_all_polys_are_non_zero(); - builder.finalize_circuit(); - auto instance = ProverInstance_(builder); - - // Get random challenge eta - auto eta = FF::random_element(); - auto eta_two = FF::random_element(); - auto eta_three = FF::random_element(); - - auto sorted_list_polynomials = instance.proving_key.sorted_polynomials; - - // Method 1: computed sorted list accumulator polynomial using prover library method - instance.proving_key.compute_sorted_list_accumulator(eta, eta_two, eta_three); - auto sorted_list_accumulator = instance.proving_key.polynomials.sorted_accum; - - // Compute s = s_1 + η*s_2 + η²*s_3 + η³*s_4 - Polynomial sorted_list_accumulator_expected{ sorted_list_polynomials[0] }; - for (size_t i = 0; i < instance.proving_key.circuit_size; ++i) { - sorted_list_accumulator_expected[i] += sorted_list_polynomials[1][i] * eta + - sorted_list_polynomials[2][i] * eta_two + - sorted_list_polynomials[3][i] * eta_three; - } - - EXPECT_EQ(sorted_list_accumulator, sorted_list_accumulator_expected); - }; -}; - -using FlavorTypes = testing::Types; -TYPED_TEST_SUITE(InstanceTests, FlavorTypes); - -TYPED_TEST(InstanceTests, SortedListAccumulator) -{ - TestFixture::test_sorted_list_accumulator_construction(); -} diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck.test.cpp b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck.test.cpp index 05c0938fa10..bfc9b0facac 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck.test.cpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck.test.cpp @@ -3,7 +3,6 @@ #include "barretenberg/relations/auxiliary_relation.hpp" #include "barretenberg/relations/delta_range_constraint_relation.hpp" #include "barretenberg/relations/elliptic_relation.hpp" -#include "barretenberg/relations/lookup_relation.hpp" #include "barretenberg/relations/permutation_relation.hpp" #include "barretenberg/relations/ultra_arithmetic_relation.hpp" #include "barretenberg/stdlib_circuit_builders/plookup_tables/fixed_base/fixed_base.hpp" diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.cpp b/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.cpp index f0b7101086b..0d103c30291 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.cpp @@ -163,23 +163,27 @@ void TranslatorProver::execute_relation_check_rounds() } /** - * @brief Execute the ZeroMorph protocol to prove the multilinear evaluations produced by Sumcheck + * @brief Execute the ZeroMorph protocol to produce an opening claim for the multilinear evaluations produced by + * Sumcheck and then produce an opening proof with a univariate PCS * @details See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the unrolled protocol. * * */ -void TranslatorProver::execute_zeromorph_rounds() +void TranslatorProver::execute_pcs_rounds() { - using ZeroMorph = ZeroMorphProver_; - ZeroMorph::prove(key->polynomials.get_unshifted_without_concatenated(), - key->polynomials.get_to_be_shifted(), - sumcheck_output.claimed_evaluations.get_unshifted_without_concatenated(), - sumcheck_output.claimed_evaluations.get_shifted(), - sumcheck_output.challenge, - commitment_key, - transcript, - key->polynomials.get_concatenated_constraints(), - sumcheck_output.claimed_evaluations.get_concatenated_constraints(), - key->polynomials.get_concatenation_groups()); + using Curve = typename Flavor::Curve; + using ZeroMorph = ZeroMorphProver_; + auto prover_opening_claim = + ZeroMorph::prove(key->polynomials.get_unshifted_without_concatenated(), + key->polynomials.get_to_be_shifted(), + sumcheck_output.claimed_evaluations.get_unshifted_without_concatenated(), + sumcheck_output.claimed_evaluations.get_shifted(), + sumcheck_output.challenge, + commitment_key, + transcript, + key->polynomials.get_concatenated_constraints(), + sumcheck_output.claimed_evaluations.get_concatenated_constraints(), + key->polynomials.get_concatenation_groups()); + PCS::compute_opening_proof(commitment_key, prover_opening_claim, transcript); } HonkProof TranslatorProver::export_proof() @@ -208,7 +212,7 @@ HonkProof TranslatorProver::construct_proof() // Fiat-Shamir: rho, y, x, z // Execute Zeromorph multilinear PCS - execute_zeromorph_rounds(); + execute_pcs_rounds(); return export_proof(); } diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.hpp b/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.hpp index 62409ffbe8b..d61e9dc23cd 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.hpp @@ -36,7 +36,7 @@ class TranslatorProver { BB_PROFILE void execute_wire_and_sorted_constraints_commitments_round(); BB_PROFILE void execute_grand_product_computation_round(); BB_PROFILE void execute_relation_check_rounds(); - BB_PROFILE void execute_zeromorph_rounds(); + BB_PROFILE void execute_pcs_rounds(); HonkProof export_proof(); HonkProof construct_proof(); diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.cpp b/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.cpp index 53880bc0cf0..cfae12f3a5c 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.cpp @@ -53,6 +53,10 @@ void TranslatorVerifier::put_translation_data_in_relation_parameters(const uint2 */ bool TranslatorVerifier::verify_proof(const HonkProof& proof) { + using Curve = typename Flavor::Curve; + using PCS = typename Flavor::PCS; + using ZeroMorph = ::bb::ZeroMorphVerifier_; + batching_challenge_v = transcript->template get_challenge("Translation:batching_challenge"); // Load the proof produced by the translator prover @@ -108,15 +112,17 @@ bool TranslatorVerifier::verify_proof(const HonkProof& proof) // Execute ZeroMorph rounds. See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description ofthe // unrolled protocol. - auto pairing_points = - ZeroMorphVerifier_::verify(commitments.get_unshifted_without_concatenated(), - commitments.get_to_be_shifted(), - claimed_evaluations.get_unshifted_without_concatenated(), - claimed_evaluations.get_shifted(), - multivariate_challenge, - transcript, - commitments.get_concatenation_groups(), - claimed_evaluations.get_concatenated_constraints()); + + auto opening_claim = ZeroMorph::verify(commitments.get_unshifted_without_concatenated(), + commitments.get_to_be_shifted(), + claimed_evaluations.get_unshifted_without_concatenated(), + claimed_evaluations.get_shifted(), + multivariate_challenge, + Commitment::one(), + transcript, + commitments.get_concatenation_groups(), + claimed_evaluations.get_concatenated_constraints()); + auto pairing_points = PCS::reduce_verify(opening_claim, transcript); auto verified = key->pcs_verification_key->pairing_check(pairing_points[0], pairing_points[1]); diff --git a/barretenberg/cpp/src/barretenberg/translator_vm_recursion/translator_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/translator_vm_recursion/translator_recursive_verifier.cpp index e22a831aa26..bf171d2c4a1 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm_recursion/translator_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm_recursion/translator_recursive_verifier.cpp @@ -60,7 +60,8 @@ std::array TranslatorRecursiveVerifier_; using PCS = typename Flavor::PCS; - using ZeroMorph = ::bb::ZeroMorphVerifier_; + using Curve = typename Flavor::Curve; + using ZeroMorph = ::bb::ZeroMorphVerifier_; using VerifierCommitments = typename Flavor::VerifierCommitments; using CommitmentLabels = typename Flavor::CommitmentLabels; @@ -109,16 +110,19 @@ std::array TranslatorRecursiveVerifier_ void DeciderProver_::execute_relation_ch } /** - * @brief Execute the ZeroMorph protocol to prove the multilinear evaluations produced by Sumcheck + * @brief Execute the ZeroMorph protocol to produce an opening claim for the multilinear evaluations produced by + * Sumcheck and then produce an opening proof with a univariate PCS. * @details See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the unrolled protocol. * * */ -template void DeciderProver_::execute_zeromorph_rounds() +template void DeciderProver_::execute_pcs_rounds() { - ZeroMorph::prove(accumulator->proving_key.polynomials.get_unshifted(), - accumulator->proving_key.polynomials.get_to_be_shifted(), - sumcheck_output.claimed_evaluations.get_unshifted(), - sumcheck_output.claimed_evaluations.get_shifted(), - sumcheck_output.challenge, - commitment_key, - transcript); + using ZeroMorph = ZeroMorphProver_; + auto prover_opening_claim = ZeroMorph::prove(accumulator->proving_key.polynomials.get_unshifted(), + accumulator->proving_key.polynomials.get_to_be_shifted(), + sumcheck_output.claimed_evaluations.get_unshifted(), + sumcheck_output.claimed_evaluations.get_shifted(), + sumcheck_output.challenge, + commitment_key, + transcript); + PCS::compute_opening_proof(commitment_key, prover_opening_claim, transcript); } template HonkProof DeciderProver_::export_proof() @@ -64,7 +67,7 @@ template HonkProof DeciderProver_::construct_proo // Fiat-Shamir: rho, y, x, z // Execute Zeromorph multilinear PCS - execute_zeromorph_rounds(); + execute_pcs_rounds(); return export_proof(); } diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/decider_prover.hpp b/barretenberg/cpp/src/barretenberg/ultra_honk/decider_prover.hpp index 910bcd898e0..2a3902d9ad1 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/decider_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/decider_prover.hpp @@ -12,6 +12,7 @@ namespace bb { template class DeciderProver_ { using FF = typename Flavor::FF; + using Curve = typename Flavor::Curve; using Commitment = typename Flavor::Commitment; using CommitmentKey = typename Flavor::CommitmentKey; using ProvingKey = typename Flavor::ProvingKey; @@ -28,7 +29,7 @@ template class DeciderProver_ { const std::shared_ptr& transcript = std::make_shared()); BB_PROFILE void execute_relation_check_rounds(); - BB_PROFILE void execute_zeromorph_rounds(); + BB_PROFILE void execute_pcs_rounds(); HonkProof export_proof(); HonkProof construct_proof(); @@ -47,8 +48,6 @@ template class DeciderProver_ { std::shared_ptr commitment_key; - using ZeroMorph = ZeroMorphProver_; - private: HonkProof proof; }; diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/mega_transcript.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/mega_transcript.test.cpp index 4b9f122c966..8dfc816e01a 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/mega_transcript.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/mega_transcript.test.cpp @@ -65,15 +65,16 @@ class MegaTranscriptTests : public ::testing::Test { manifest_expected.add_challenge(round, "eta", "eta_two", "eta_three"); round++; - manifest_expected.add_entry(round, "SORTED_ACCUM", frs_per_G); + manifest_expected.add_entry(round, "LOOKUP_READ_COUNTS", frs_per_G); + manifest_expected.add_entry(round, "LOOKUP_READ_TAGS", frs_per_G); manifest_expected.add_entry(round, "W_4", frs_per_G); manifest_expected.add_challenge(round, "beta", "gamma"); round++; + manifest_expected.add_entry(round, "LOOKUP_INVERSES", frs_per_G); manifest_expected.add_entry(round, "CALLDATA_INVERSES", frs_per_G); manifest_expected.add_entry(round, "RETURN_DATA_INVERSES", frs_per_G); manifest_expected.add_entry(round, "Z_PERM", frs_per_G); - manifest_expected.add_entry(round, "Z_LOOKUP", frs_per_G); for (size_t i = 0; i < NUM_SUBRELATIONS - 1; i++) { std::string label = "alpha_" + std::to_string(i); @@ -242,7 +243,7 @@ TEST_F(MegaTranscriptTests, StructureTest) Flavor::Commitment one_group_val = Flavor::Commitment::one(); FF rand_val = FF::random_element(); - prover.transcript->sorted_accum_comm = one_group_val * rand_val; // choose random object to modify + prover.transcript->z_perm_comm = one_group_val * rand_val; // choose random object to modify EXPECT_TRUE(verifier.verify_proof( prover.export_proof())); // we have not serialized it back to the proof so it should still be fine @@ -250,5 +251,5 @@ TEST_F(MegaTranscriptTests, StructureTest) EXPECT_FALSE(verifier.verify_proof(prover.export_proof())); // the proof is now wrong after serializing it prover.transcript->deserialize_full_transcript(); - EXPECT_EQ(static_cast(prover.transcript->sorted_accum_comm), one_group_val * rand_val); + EXPECT_EQ(static_cast(prover.transcript->z_perm_comm), one_group_val * rand_val); } diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.cpp index f0c15496c95..6626b48623f 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.cpp @@ -1,4 +1,5 @@ #include "barretenberg/ultra_honk/oink_prover.hpp" +#include "barretenberg/relations/logderiv_lookup_relation.hpp" namespace bb { @@ -109,21 +110,25 @@ template void OinkProver::execute_wire_commitment */ template void OinkProver::execute_sorted_list_accumulator_round() { - + // Get eta challenges auto [eta, eta_two, eta_three] = transcript->template get_challenges( domain_separator + "eta", domain_separator + "eta_two", domain_separator + "eta_three"); relation_parameters.eta = eta; relation_parameters.eta_two = eta_two; relation_parameters.eta_three = eta_three; - proving_key.compute_sorted_accumulator_polynomials( + proving_key.add_ram_rom_memory_records_to_wire_4( relation_parameters.eta, relation_parameters.eta_two, relation_parameters.eta_three); - // Commit to the sorted witness-table accumulator and the finalized (i.e. with memory records) fourth wire - // polynomial - witness_commitments.sorted_accum = commitment_key->commit(proving_key.polynomials.sorted_accum); + + // Commit to lookup argument polynomials and the finalized (i.e. with memory records) fourth wire polynomial + witness_commitments.lookup_read_counts = commitment_key->commit(proving_key.polynomials.lookup_read_counts); + witness_commitments.lookup_read_tags = commitment_key->commit(proving_key.polynomials.lookup_read_tags); witness_commitments.w_4 = commitment_key->commit(proving_key.polynomials.w_4); - transcript->send_to_verifier(domain_separator + commitment_labels.sorted_accum, witness_commitments.sorted_accum); + transcript->send_to_verifier(domain_separator + commitment_labels.lookup_read_counts, + witness_commitments.lookup_read_counts); + transcript->send_to_verifier(domain_separator + commitment_labels.lookup_read_tags, + witness_commitments.lookup_read_tags); transcript->send_to_verifier(domain_separator + commitment_labels.w_4, witness_commitments.w_4); } @@ -136,10 +141,16 @@ template void OinkProver::execute_log_derivative_ auto [beta, gamma] = transcript->template get_challenges(domain_separator + "beta", domain_separator + "gamma"); relation_parameters.beta = beta; relation_parameters.gamma = gamma; - if constexpr (IsGoblinFlavor) { - // Compute and commit to the logderivative inverse used in DataBus - proving_key.compute_logderivative_inverse(relation_parameters); + // Compute the inverses used in log-derivative lookup relations + proving_key.compute_logderivative_inverses(relation_parameters); + + witness_commitments.lookup_inverses = commitment_key->commit(proving_key.polynomials.lookup_inverses); + transcript->send_to_verifier(domain_separator + commitment_labels.lookup_inverses, + witness_commitments.lookup_inverses); + + // If Mega, commit to the databus inverse polynomials and send + if constexpr (IsGoblinFlavor) { witness_commitments.calldata_inverses = commitment_key->commit(proving_key.polynomials.calldata_inverses); witness_commitments.return_data_inverses = commitment_key->commit(proving_key.polynomials.return_data_inverses); transcript->send_to_verifier(domain_separator + commitment_labels.calldata_inverses, @@ -158,10 +169,8 @@ template void OinkProver::execute_grand_product_c proving_key.compute_grand_product_polynomials(relation_parameters); witness_commitments.z_perm = commitment_key->commit(proving_key.polynomials.z_perm); - witness_commitments.z_lookup = commitment_key->commit(proving_key.polynomials.z_lookup); transcript->send_to_verifier(domain_separator + commitment_labels.z_perm, witness_commitments.z_perm); - transcript->send_to_verifier(domain_separator + commitment_labels.z_lookup, witness_commitments.z_lookup); } template typename Flavor::RelationSeparator OinkProver::generate_alphas_round() diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_verifier.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_verifier.cpp index 69a2b20a57b..0a5a1810e4c 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_verifier.cpp @@ -87,15 +87,18 @@ template void OinkVerifier::execute_wire_commitme */ template void OinkVerifier::execute_sorted_list_accumulator_round() { - // Get challenge for sorted list batching and wire four memory records + // Get eta challenges auto [eta, eta_two, eta_three] = transcript->template get_challenges( domain_separator + "eta", domain_separator + "eta_two", domain_separator + "eta_three"); relation_parameters.eta = eta; relation_parameters.eta_two = eta_two; relation_parameters.eta_three = eta_three; - // Get commitments to sorted list accumulator and fourth wire - witness_comms.sorted_accum = - transcript->template receive_from_prover(domain_separator + comm_labels.sorted_accum); + + // Get commitments to lookup argument polynomials and fourth wire + witness_comms.lookup_read_counts = + transcript->template receive_from_prover(domain_separator + comm_labels.lookup_read_counts); + witness_comms.lookup_read_tags = + transcript->template receive_from_prover(domain_separator + comm_labels.lookup_read_tags); witness_comms.w_4 = transcript->template receive_from_prover(domain_separator + comm_labels.w_4); } @@ -109,6 +112,10 @@ template void OinkVerifier::execute_log_derivativ auto [beta, gamma] = transcript->template get_challenges(domain_separator + "beta", domain_separator + "gamma"); relation_parameters.beta = beta; relation_parameters.gamma = gamma; + + witness_comms.lookup_inverses = + transcript->template receive_from_prover(domain_separator + comm_labels.lookup_inverses); + // If Goblin (i.e. using DataBus) receive commitments to log-deriv inverses polynomials if constexpr (IsGoblinFlavor) { witness_comms.calldata_inverses = @@ -137,8 +144,6 @@ template void OinkVerifier::execute_grand_product // Get commitment to permutation and lookup grand products witness_comms.z_perm = transcript->template receive_from_prover(domain_separator + comm_labels.z_perm); - witness_comms.z_lookup = - transcript->template receive_from_prover(domain_separator + comm_labels.z_lookup); } template typename Flavor::RelationSeparator OinkVerifier::generate_alphas_round() diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/relation_correctness.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/relation_correctness.test.cpp index b928a0bcd04..cedd1fd4bb2 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/relation_correctness.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/relation_correctness.test.cpp @@ -4,7 +4,7 @@ #include "barretenberg/relations/delta_range_constraint_relation.hpp" #include "barretenberg/relations/ecc_op_queue_relation.hpp" #include "barretenberg/relations/elliptic_relation.hpp" -#include "barretenberg/relations/lookup_relation.hpp" +#include "barretenberg/relations/logderiv_lookup_relation.hpp" #include "barretenberg/relations/permutation_relation.hpp" #include "barretenberg/relations/relation_parameters.hpp" #include "barretenberg/relations/ultra_arithmetic_relation.hpp" @@ -29,9 +29,8 @@ void ensure_non_zero(auto& polynomial) * @brief Check that a given relation is satified for a set of polynomials * * @tparam relation_idx Index into a tuple of provided relations - * @tparam Flavor */ -template void check_relation(auto circuit_size, auto& polynomials, auto params) +template void check_relation(auto circuit_size, auto& polynomials, auto params) { for (size_t i = 0; i < circuit_size; i++) { // Define the appropriate SumcheckArrayOfValuesOverSubrelations type for this relation and initialize to zero @@ -273,9 +272,10 @@ TEST_F(UltraRelationCorrectnessTests, Ultra) instance->relation_parameters.beta = FF::random_element(); instance->relation_parameters.gamma = FF::random_element(); - instance->proving_key.compute_sorted_accumulator_polynomials(instance->relation_parameters.eta, - instance->relation_parameters.eta_two, - instance->relation_parameters.eta_three); + instance->proving_key.add_ram_rom_memory_records_to_wire_4(instance->relation_parameters.eta, + instance->relation_parameters.eta_two, + instance->relation_parameters.eta_three); + instance->proving_key.compute_logderivative_inverses(instance->relation_parameters); instance->proving_key.compute_grand_product_polynomials(instance->relation_parameters); // Check that selectors are nonzero to ensure corresponding relation has nontrivial contribution @@ -285,18 +285,15 @@ TEST_F(UltraRelationCorrectnessTests, Ultra) ensure_non_zero(proving_key.polynomials.q_elliptic); ensure_non_zero(proving_key.polynomials.q_aux); - // Construct the round for applying sumcheck relations and results for storing computed results - using Relations = typename Flavor::Relations; - auto& prover_polynomials = instance->proving_key.polynomials; auto params = instance->relation_parameters; // Check that each relation is satisfied across each row of the prover polynomials - check_relation>(circuit_size, prover_polynomials, params); - check_relation>(circuit_size, prover_polynomials, params); - check_relation>(circuit_size, prover_polynomials, params); - check_relation>(circuit_size, prover_polynomials, params); - check_relation>(circuit_size, prover_polynomials, params); - check_relation>(circuit_size, prover_polynomials, params); + check_relation>(circuit_size, prover_polynomials, params); + check_relation>(circuit_size, prover_polynomials, params); + check_relation>(circuit_size, prover_polynomials, params); + check_relation>(circuit_size, prover_polynomials, params); + check_relation>(circuit_size, prover_polynomials, params); + check_linearly_dependent_relation>(circuit_size, prover_polynomials, params); } TEST_F(UltraRelationCorrectnessTests, Mega) @@ -328,10 +325,10 @@ TEST_F(UltraRelationCorrectnessTests, Mega) instance->relation_parameters.beta = FF::random_element(); instance->relation_parameters.gamma = FF::random_element(); - instance->proving_key.compute_sorted_accumulator_polynomials(instance->relation_parameters.eta, - instance->relation_parameters.eta_two, - instance->relation_parameters.eta_three); - instance->proving_key.compute_logderivative_inverse(instance->relation_parameters); + instance->proving_key.add_ram_rom_memory_records_to_wire_4(instance->relation_parameters.eta, + instance->relation_parameters.eta_two, + instance->relation_parameters.eta_three); + instance->proving_key.compute_logderivative_inverses(instance->relation_parameters); instance->proving_key.compute_grand_product_polynomials(instance->relation_parameters); // Check that selectors are nonzero to ensure corresponding relation has nontrivial contribution @@ -351,19 +348,18 @@ TEST_F(UltraRelationCorrectnessTests, Mega) ensure_non_zero(proving_key.polynomials.return_data_read_counts); ensure_non_zero(proving_key.polynomials.return_data_inverses); - // Construct the round for applying sumcheck relations and results for storing computed results - using Relations = typename Flavor::Relations; auto& prover_polynomials = instance->proving_key.polynomials; auto params = instance->relation_parameters; // Check that each relation is satisfied across each row of the prover polynomials - check_relation>(circuit_size, prover_polynomials, params); - check_relation>(circuit_size, prover_polynomials, params); - check_relation>(circuit_size, prover_polynomials, params); - check_relation>(circuit_size, prover_polynomials, params); - check_relation>(circuit_size, prover_polynomials, params); - check_relation>(circuit_size, prover_polynomials, params); - check_relation>(circuit_size, prover_polynomials, params); - check_linearly_dependent_relation>( - circuit_size, prover_polynomials, params); + check_relation>(circuit_size, prover_polynomials, params); + check_relation>(circuit_size, prover_polynomials, params); + check_relation>(circuit_size, prover_polynomials, params); + check_relation>(circuit_size, prover_polynomials, params); + check_relation>(circuit_size, prover_polynomials, params); + check_relation>(circuit_size, prover_polynomials, params); + check_relation>(circuit_size, prover_polynomials, params); + check_relation>(circuit_size, prover_polynomials, params); + check_linearly_dependent_relation>(circuit_size, prover_polynomials, params); + check_linearly_dependent_relation>(circuit_size, prover_polynomials, params); } \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/sumcheck.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/sumcheck.test.cpp index 5962b8ba212..665538278c1 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/sumcheck.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/sumcheck.test.cpp @@ -5,7 +5,6 @@ #include "barretenberg/relations/auxiliary_relation.hpp" #include "barretenberg/relations/delta_range_constraint_relation.hpp" #include "barretenberg/relations/elliptic_relation.hpp" -#include "barretenberg/relations/lookup_relation.hpp" #include "barretenberg/relations/permutation_relation.hpp" #include "barretenberg/relations/ultra_arithmetic_relation.hpp" #include "barretenberg/stdlib_circuit_builders/plookup_tables/fixed_base/fixed_base.hpp" @@ -157,9 +156,10 @@ TEST_F(SumcheckTestsRealCircuit, Ultra) instance->relation_parameters.beta = FF::random_element(); instance->relation_parameters.gamma = FF::random_element(); - instance->proving_key.compute_sorted_accumulator_polynomials(instance->relation_parameters.eta, - instance->relation_parameters.eta_two, - instance->relation_parameters.eta_three); + instance->proving_key.add_ram_rom_memory_records_to_wire_4(instance->relation_parameters.eta, + instance->relation_parameters.eta_two, + instance->relation_parameters.eta_three); + instance->proving_key.compute_logderivative_inverses(instance->relation_parameters); instance->proving_key.compute_grand_product_polynomials(instance->relation_parameters); auto prover_transcript = Transcript::prover_init_empty(); diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_honk.test.cpp similarity index 88% rename from barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.test.cpp rename to barretenberg/cpp/src/barretenberg/ultra_honk/ultra_honk.test.cpp index b04bd0b7fd1..31d0423fd99 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_honk.test.cpp @@ -48,7 +48,7 @@ void ensure_non_zero(auto& polynomial) ASSERT_TRUE(has_non_zero_coefficient); } -class UltraHonkComposerTests : public ::testing::Test { +class UltraHonkTests : public ::testing::Test { protected: static void SetUpTestSuite() { bb::srs::init_crs_factory("../srs_db/ignition"); } }; @@ -60,7 +60,7 @@ class UltraHonkComposerTests : public ::testing::Test { * to achieve non-zero polynomials * */ -TEST_F(UltraHonkComposerTests, ANonZeroPolynomialIsAGoodPolynomial) +TEST_F(UltraHonkTests, ANonZeroPolynomialIsAGoodPolynomial) { auto circuit_builder = UltraCircuitBuilder(); @@ -86,7 +86,7 @@ TEST_F(UltraHonkComposerTests, ANonZeroPolynomialIsAGoodPolynomial) * @brief Test proof construction/verification for a structured execution trace * */ -TEST_F(UltraHonkComposerTests, StructuredTrace) +TEST_F(UltraHonkTests, StructuredTrace) { auto builder = UltraCircuitBuilder(); size_t num_gates = 3; @@ -109,7 +109,7 @@ TEST_F(UltraHonkComposerTests, StructuredTrace) * @brief Test simple circuit with public inputs * */ -TEST_F(UltraHonkComposerTests, PublicInputs) +TEST_F(UltraHonkTests, PublicInputs) { auto builder = UltraCircuitBuilder(); size_t num_gates = 10; @@ -120,7 +120,7 @@ TEST_F(UltraHonkComposerTests, PublicInputs) prove_and_verify(builder, /*expected_result=*/true); } -TEST_F(UltraHonkComposerTests, XorConstraint) +TEST_F(UltraHonkTests, XorConstraint) { auto circuit_builder = UltraCircuitBuilder(); @@ -147,7 +147,7 @@ TEST_F(UltraHonkComposerTests, XorConstraint) prove_and_verify(circuit_builder, /*expected_result=*/true); } -TEST_F(UltraHonkComposerTests, create_gates_from_plookup_accumulators) +TEST_F(UltraHonkTests, create_gates_from_plookup_accumulators) { auto circuit_builder = UltraCircuitBuilder(); @@ -207,7 +207,91 @@ TEST_F(UltraHonkComposerTests, create_gates_from_plookup_accumulators) prove_and_verify(circuit_builder, /*expected_result=*/true); } -TEST_F(UltraHonkComposerTests, test_no_lookup_proof) +/** + * @brief Test various failure modes for the lookup relation via bad input polynomials + * + */ +TEST_F(UltraHonkTests, LookupFailure) +{ + // Construct a circuit with lookup and arithmetic gates + auto construct_circuit_with_lookups = []() { + UltraCircuitBuilder builder; + + MockCircuits::add_lookup_gates(builder); + MockCircuits::add_arithmetic_gates(builder); + + return builder; + }; + + auto prove_and_verify = [](auto& instance) { + UltraProver prover(instance); + auto verification_key = std::make_shared(instance->proving_key); + UltraVerifier verifier(verification_key); + auto proof = prover.construct_proof(); + return verifier.verify_proof(proof); + }; + + // Ensure the unaltered test circuit is valid + { + auto builder = construct_circuit_with_lookups(); + + auto instance = std::make_shared(builder); + + EXPECT_TRUE(prove_and_verify(instance)); + } + + // Failure mode 1: bad read counts/tags + { + auto builder = construct_circuit_with_lookups(); + + auto instance = std::make_shared(builder); + auto& polynomials = instance->proving_key.polynomials; + + // Erroneously update the read counts/tags at an arbitrary index + // Note: updating only one or the other may not cause failure due to the design of the relation algebra. For + // example, the inverse is only computed if read tags is non-zero, otherwise the inverse at the row in question + // will be zero. So if read counts is incremented at some arbitrary index but read tags is not, the inverse will + // be 0 and the erroneous read_counts value will get multiplied by 0 in the relation. This is expected behavior. + polynomials.lookup_read_counts[25] = 1; + polynomials.lookup_read_tags[25] = 1; + + EXPECT_FALSE(prove_and_verify(instance)); + } + + // Failure mode 2: bad lookup gate wire value + { + auto builder = construct_circuit_with_lookups(); + + auto instance = std::make_shared(builder); + auto& polynomials = instance->proving_key.polynomials; + + // Find a lookup gate and alter one of the wire values + for (auto [q_lookup, wire_3] : zip_view(polynomials.q_lookup, polynomials.w_o)) { + if (!q_lookup.is_zero()) { + wire_3 += 1; + break; + } + } + + EXPECT_FALSE(prove_and_verify(instance)); + } + + // Failure mode 3: erroneous lookup gate + { + auto builder = construct_circuit_with_lookups(); + + auto instance = std::make_shared(builder); + auto& polynomials = instance->proving_key.polynomials; + + // Turn the lookup selector on for an arbitrary row where it is not already active + EXPECT_TRUE(polynomials.q_lookup[25] != 1); + polynomials.q_lookup[25] = 1; + + EXPECT_FALSE(prove_and_verify(instance)); + } +} + +TEST_F(UltraHonkTests, test_no_lookup_proof) { auto circuit_builder = UltraCircuitBuilder(); @@ -229,7 +313,7 @@ TEST_F(UltraHonkComposerTests, test_no_lookup_proof) prove_and_verify(circuit_builder, /*expected_result=*/true); } -TEST_F(UltraHonkComposerTests, test_elliptic_gate) +TEST_F(UltraHonkTests, test_elliptic_gate) { typedef grumpkin::g1::affine_element affine_element; typedef grumpkin::g1::element element; @@ -262,7 +346,7 @@ TEST_F(UltraHonkComposerTests, test_elliptic_gate) prove_and_verify(circuit_builder, /*expected_result=*/true); } -TEST_F(UltraHonkComposerTests, non_trivial_tag_permutation) +TEST_F(UltraHonkTests, non_trivial_tag_permutation) { auto circuit_builder = UltraCircuitBuilder(); fr a = fr::random_element(); @@ -289,7 +373,7 @@ TEST_F(UltraHonkComposerTests, non_trivial_tag_permutation) prove_and_verify(circuit_builder, /*expected_result=*/true); } -TEST_F(UltraHonkComposerTests, non_trivial_tag_permutation_and_cycles) +TEST_F(UltraHonkTests, non_trivial_tag_permutation_and_cycles) { auto circuit_builder = UltraCircuitBuilder(); fr a = fr::random_element(); @@ -326,7 +410,7 @@ TEST_F(UltraHonkComposerTests, non_trivial_tag_permutation_and_cycles) prove_and_verify(circuit_builder, /*expected_result=*/true); } -TEST_F(UltraHonkComposerTests, bad_tag_permutation) +TEST_F(UltraHonkTests, bad_tag_permutation) { { auto circuit_builder = UltraCircuitBuilder(); @@ -369,7 +453,7 @@ TEST_F(UltraHonkComposerTests, bad_tag_permutation) } } -TEST_F(UltraHonkComposerTests, sort_widget) +TEST_F(UltraHonkTests, sort_widget) { auto circuit_builder = UltraCircuitBuilder(); fr a = fr::one(); @@ -386,7 +470,7 @@ TEST_F(UltraHonkComposerTests, sort_widget) prove_and_verify(circuit_builder, /*expected_result=*/true); } -TEST_F(UltraHonkComposerTests, sort_with_edges_gate) +TEST_F(UltraHonkTests, sort_with_edges_gate) { fr a = fr::one(); fr b = fr(2); @@ -476,7 +560,7 @@ TEST_F(UltraHonkComposerTests, sort_with_edges_gate) } } -TEST_F(UltraHonkComposerTests, range_constraint) +TEST_F(UltraHonkTests, range_constraint) { { auto circuit_builder = UltraCircuitBuilder(); @@ -545,7 +629,7 @@ TEST_F(UltraHonkComposerTests, range_constraint) } } -TEST_F(UltraHonkComposerTests, range_with_gates) +TEST_F(UltraHonkTests, range_with_gates) { auto circuit_builder = UltraCircuitBuilder(); auto idx = add_variables(circuit_builder, { 1, 2, 3, 4, 5, 6, 7, 8 }); @@ -563,7 +647,7 @@ TEST_F(UltraHonkComposerTests, range_with_gates) prove_and_verify(circuit_builder, /*expected_result=*/true); } -TEST_F(UltraHonkComposerTests, range_with_gates_where_range_is_not_a_power_of_two) +TEST_F(UltraHonkTests, range_with_gates_where_range_is_not_a_power_of_two) { auto circuit_builder = UltraCircuitBuilder(); auto idx = add_variables(circuit_builder, { 1, 2, 3, 4, 5, 6, 7, 8 }); @@ -581,7 +665,7 @@ TEST_F(UltraHonkComposerTests, range_with_gates_where_range_is_not_a_power_of_tw prove_and_verify(circuit_builder, /*expected_result=*/true); } -TEST_F(UltraHonkComposerTests, sort_widget_complex) +TEST_F(UltraHonkTests, sort_widget_complex) { { @@ -607,7 +691,7 @@ TEST_F(UltraHonkComposerTests, sort_widget_complex) } } -TEST_F(UltraHonkComposerTests, sort_widget_neg) +TEST_F(UltraHonkTests, sort_widget_neg) { auto circuit_builder = UltraCircuitBuilder(); fr a = fr::one(); @@ -624,7 +708,7 @@ TEST_F(UltraHonkComposerTests, sort_widget_neg) prove_and_verify(circuit_builder, /*expected_result=*/false); } -TEST_F(UltraHonkComposerTests, composed_range_constraint) +TEST_F(UltraHonkTests, composed_range_constraint) { auto circuit_builder = UltraCircuitBuilder(); auto c = fr::random_element(); @@ -637,7 +721,7 @@ TEST_F(UltraHonkComposerTests, composed_range_constraint) prove_and_verify(circuit_builder, /*expected_result=*/true); } -TEST_F(UltraHonkComposerTests, non_native_field_multiplication) +TEST_F(UltraHonkTests, non_native_field_multiplication) { using fq = fq; auto circuit_builder = UltraCircuitBuilder(); @@ -693,7 +777,7 @@ TEST_F(UltraHonkComposerTests, non_native_field_multiplication) prove_and_verify(circuit_builder, /*expected_result=*/true); } -TEST_F(UltraHonkComposerTests, rom) +TEST_F(UltraHonkTests, rom) { auto circuit_builder = UltraCircuitBuilder(); @@ -734,7 +818,7 @@ TEST_F(UltraHonkComposerTests, rom) prove_and_verify(circuit_builder, /*expected_result=*/true); } -TEST_F(UltraHonkComposerTests, ram) +TEST_F(UltraHonkTests, ram) { auto circuit_builder = UltraCircuitBuilder(); @@ -797,7 +881,7 @@ TEST_F(UltraHonkComposerTests, ram) prove_and_verify(circuit_builder, /*expected_result=*/true); } -TEST_F(UltraHonkComposerTests, range_checks_on_duplicates) +TEST_F(UltraHonkTests, range_checks_on_duplicates) { auto circuit_builder = UltraCircuitBuilder(); @@ -836,7 +920,7 @@ TEST_F(UltraHonkComposerTests, range_checks_on_duplicates) // range constrained, do not break the set equivalence checks because of indices mismatch. // 2^14 is DEFAULT_PLOOKUP_RANGE_BITNUM i.e. the maximum size before a variable gets sliced // before range constraints are applied to it. -TEST_F(UltraHonkComposerTests, range_constraint_small_variable) +TEST_F(UltraHonkTests, range_constraint_small_variable) { auto circuit_builder = UltraCircuitBuilder(); diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_transcript.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_transcript.test.cpp index 952894e4a36..df541aeb2df 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_transcript.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_transcript.test.cpp @@ -58,13 +58,14 @@ class UltraTranscriptTests : public ::testing::Test { manifest_expected.add_challenge(round, "eta", "eta_two", "eta_three"); round++; - manifest_expected.add_entry(round, "SORTED_ACCUM", frs_per_G); + manifest_expected.add_entry(round, "LOOKUP_READ_COUNTS", frs_per_G); + manifest_expected.add_entry(round, "LOOKUP_READ_TAGS", frs_per_G); manifest_expected.add_entry(round, "W_4", frs_per_G); manifest_expected.add_challenge(round, "beta", "gamma"); round++; + manifest_expected.add_entry(round, "LOOKUP_INVERSES", frs_per_G); manifest_expected.add_entry(round, "Z_PERM", frs_per_G); - manifest_expected.add_entry(round, "Z_LOOKUP", frs_per_G); for (size_t i = 0; i < NUM_SUBRELATIONS - 1; i++) { std::string label = "alpha_" + std::to_string(i); @@ -226,7 +227,7 @@ TEST_F(UltraTranscriptTests, StructureTest) Flavor::Commitment one_group_val = Flavor::Commitment::one(); FF rand_val = FF::random_element(); - prover.transcript->sorted_accum_comm = one_group_val * rand_val; // choose random object to modify + prover.transcript->z_perm_comm = one_group_val * rand_val; // choose random object to modify EXPECT_TRUE(verifier.verify_proof( prover.export_proof())); // we have not serialized it back to the proof so it should still be fine @@ -234,5 +235,5 @@ TEST_F(UltraTranscriptTests, StructureTest) EXPECT_FALSE(verifier.verify_proof(prover.export_proof())); // the proof is now wrong after serializing it prover.transcript->deserialize_full_transcript(); - EXPECT_EQ(static_cast(prover.transcript->sorted_accum_comm), one_group_val * rand_val); + EXPECT_EQ(static_cast(prover.transcript->z_perm_comm), one_group_val * rand_val); } diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.cpp index 039591d2ba4..942af05365b 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.cpp @@ -43,7 +43,8 @@ template bool UltraVerifier_::verify_proof(const HonkP { using FF = typename Flavor::FF; using PCS = typename Flavor::PCS; - using ZeroMorph = ZeroMorphVerifier_; + using Curve = typename Flavor::Curve; + using ZeroMorph = ZeroMorphVerifier_; using VerifierCommitments = typename Flavor::VerifierCommitments; transcript = std::make_shared(proof); @@ -72,14 +73,17 @@ template bool UltraVerifier_::verify_proof(const HonkP return false; } - // Execute ZeroMorph rounds and check the pcs verifier accumulator returned. See + // Execute ZeroMorph rounds to produce an opening claim and verify it with a univariate PCS. See // https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the unrolled protocol. - auto pairing_points = ZeroMorph::verify(commitments.get_unshifted(), - commitments.get_to_be_shifted(), - claimed_evaluations.get_unshifted(), - claimed_evaluations.get_shifted(), - multivariate_challenge, - transcript); + auto opening_claim = ZeroMorph::verify(commitments.get_unshifted(), + commitments.get_to_be_shifted(), + claimed_evaluations.get_unshifted(), + claimed_evaluations.get_shifted(), + multivariate_challenge, + Commitment::one(), + transcript); + auto pairing_points = PCS::reduce_verify(opening_claim, transcript); + auto pcs_verified = key->pcs_verification_key->pairing_check(pairing_points[0], pairing_points[1]); return sumcheck_verified.value() && pcs_verified; } diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_gas_trace.cpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_gas_trace.cpp index 4c8bb0f9872..59b0c41d9ba 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_gas_trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_gas_trace.cpp @@ -1,5 +1,10 @@ #include "barretenberg/vm/avm_trace/avm_gas_trace.hpp" + +#include +#include + #include "barretenberg/vm/avm_trace/avm_opcode.hpp" +#include "barretenberg/vm/avm_trace/fixed_gas.hpp" namespace bb::avm_trace { @@ -39,8 +44,9 @@ void AvmGasTraceBuilder::constrain_gas_lookup(uint32_t clk, OpCode opcode) gas_opcode_lookup_counter[opcode]++; // Get the gas prices for this opcode - uint32_t l2_gas_cost = GAS_COST_TABLE.at(opcode).l2_fixed_gas_cost; - uint32_t da_gas_cost = GAS_COST_TABLE.at(opcode).da_fixed_gas_cost; + const auto& GAS_COST_TABLE = FixedGasTable::get(); + auto l2_gas_cost = static_cast(GAS_COST_TABLE.at(opcode).gas_l2_gas_fixed_table); + auto da_gas_cost = static_cast(GAS_COST_TABLE.at(opcode).gas_da_gas_fixed_table); remaining_l2_gas -= l2_gas_cost; remaining_da_gas -= da_gas_cost; @@ -69,8 +75,9 @@ void AvmGasTraceBuilder::constrain_gas_for_external_call(uint32_t clk, // gas_opcode_lookup_counter[opcode]++; // Get the gas prices for this opcode - uint32_t opcode_l2_gas_cost = GAS_COST_TABLE.at(opcode).l2_fixed_gas_cost; - uint32_t opcode_da_gas_cost = GAS_COST_TABLE.at(opcode).da_fixed_gas_cost; + const auto& GAS_COST_TABLE = FixedGasTable::get(); + auto opcode_l2_gas_cost = static_cast(GAS_COST_TABLE.at(opcode).gas_l2_gas_fixed_table); + auto opcode_da_gas_cost = static_cast(GAS_COST_TABLE.at(opcode).gas_da_gas_fixed_table); remaining_l2_gas -= opcode_l2_gas_cost + nested_l2_gas_cost; remaining_da_gas -= opcode_da_gas_cost + nested_da_gas_cost; diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_gas_trace.hpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_gas_trace.hpp index 8085321586b..1e5f226b55a 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_gas_trace.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_gas_trace.hpp @@ -1,120 +1,17 @@ +#pragma once + +#include + #include "barretenberg/vm/avm_trace/avm_common.hpp" #include "barretenberg/vm/avm_trace/avm_opcode.hpp" -#include namespace bb::avm_trace { -struct GasTableEntry { - uint32_t l2_fixed_gas_cost = 0; - uint32_t da_fixed_gas_cost = 0; -}; - -// Temporary values until the definitive gas cost values are settled. -// See TS counterpart constant TemporaryDefaultGasCost in avm_gas.ts -static const inline GasTableEntry temp_default_gas_entry{ .l2_fixed_gas_cost = 10, .da_fixed_gas_cost = 2 }; - -static const inline std::unordered_map GAS_COST_TABLE = { - // Compute - // Compute - Arithmetic - { OpCode::ADD, temp_default_gas_entry }, - { OpCode::SUB, temp_default_gas_entry }, - { OpCode::MUL, temp_default_gas_entry }, - { OpCode::DIV, temp_default_gas_entry }, - { OpCode::FDIV, temp_default_gas_entry }, - // Compute - Comparators - { OpCode::EQ, temp_default_gas_entry }, - { OpCode::LT, temp_default_gas_entry }, - { OpCode::LTE, temp_default_gas_entry }, - // Compute - Bitwise - { OpCode::AND, temp_default_gas_entry }, - { OpCode::OR, temp_default_gas_entry }, - { OpCode::XOR, temp_default_gas_entry }, - { OpCode::NOT, temp_default_gas_entry }, - { OpCode::SHL, temp_default_gas_entry }, - { OpCode::SHR, temp_default_gas_entry }, - // Compute - Type Conversions - { OpCode::CAST, temp_default_gas_entry }, - - // Execution Environment - { OpCode::ADDRESS, temp_default_gas_entry }, - { OpCode::STORAGEADDRESS, temp_default_gas_entry }, - { OpCode::SENDER, temp_default_gas_entry }, - { OpCode::FEEPERL2GAS, temp_default_gas_entry }, - { OpCode::FEEPERDAGAS, temp_default_gas_entry }, - { OpCode::TRANSACTIONFEE, temp_default_gas_entry }, - { OpCode::CONTRACTCALLDEPTH, temp_default_gas_entry }, - // Execution Environment - Globals - { OpCode::CHAINID, temp_default_gas_entry }, - { OpCode::VERSION, temp_default_gas_entry }, - { OpCode::BLOCKNUMBER, temp_default_gas_entry }, - { OpCode::TIMESTAMP, temp_default_gas_entry }, - { OpCode::COINBASE, temp_default_gas_entry }, - { OpCode::BLOCKL2GASLIMIT, temp_default_gas_entry }, - { OpCode::BLOCKDAGASLIMIT, temp_default_gas_entry }, - // Execution Environment - Calldata - { OpCode::CALLDATACOPY, temp_default_gas_entry }, - - // Machine State - // Machine State - Gas - { OpCode::L2GASLEFT, temp_default_gas_entry }, - { OpCode::DAGASLEFT, temp_default_gas_entry }, - // Machine State - Internal Control Flow - { OpCode::JUMP, temp_default_gas_entry }, - { OpCode::JUMPI, temp_default_gas_entry }, - { OpCode::INTERNALCALL, temp_default_gas_entry }, - { OpCode::INTERNALRETURN, temp_default_gas_entry }, - // Machine State - Memory - { OpCode::SET, temp_default_gas_entry }, - { OpCode::MOV, temp_default_gas_entry }, - { OpCode::CMOV, temp_default_gas_entry }, - - // World State - { OpCode::SLOAD, temp_default_gas_entry }, - { OpCode::SSTORE, temp_default_gas_entry }, - { OpCode::NOTEHASHEXISTS, temp_default_gas_entry }, - { OpCode::EMITNOTEHASH, temp_default_gas_entry }, - { OpCode::NULLIFIEREXISTS, temp_default_gas_entry }, - { OpCode::EMITNULLIFIER, temp_default_gas_entry }, - { OpCode::L1TOL2MSGEXISTS, temp_default_gas_entry }, - { OpCode::HEADERMEMBER, temp_default_gas_entry }, - { OpCode::GETCONTRACTINSTANCE, temp_default_gas_entry }, - - // Accrued Substate - { OpCode::EMITUNENCRYPTEDLOG, temp_default_gas_entry }, - { OpCode::SENDL2TOL1MSG, temp_default_gas_entry }, - - // Control Flow - Contract Calls - { OpCode::CALL, temp_default_gas_entry }, - { OpCode::STATICCALL, temp_default_gas_entry }, - { OpCode::DELEGATECALL, temp_default_gas_entry }, - { OpCode::RETURN, temp_default_gas_entry }, - { OpCode::REVERT, temp_default_gas_entry }, - - // Misc - { OpCode::DEBUGLOG, temp_default_gas_entry }, - - // Gadgets - { OpCode::KECCAK, temp_default_gas_entry }, - { OpCode::POSEIDON2, temp_default_gas_entry }, - { OpCode::SHA256, temp_default_gas_entry }, - { OpCode::PEDERSEN, temp_default_gas_entry }, - { OpCode::ECADD, temp_default_gas_entry }, - - // Conversions - { OpCode::TORADIXLE, temp_default_gas_entry }, - - // Future Gadgets -- pending changes in noir - { OpCode::SHA256COMPRESSION, temp_default_gas_entry }, - { OpCode::KECCAKF1600, temp_default_gas_entry }, // Here for when we eventually support this - // Sentinel - // LAST_OPCODE_SENTINEL, -}; - class AvmGasTraceBuilder { public: struct GasTraceEntry { uint32_t clk = 0; - OpCode opcode = OpCode::ADD; // 0 + OpCode opcode; uint32_t l2_gas_cost = 0; uint32_t da_gas_cost = 0; uint32_t remaining_l2_gas = 0; diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_mem_trace.cpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_mem_trace.cpp index 8ee4f02595f..e46fb93d670 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_mem_trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_mem_trace.cpp @@ -150,7 +150,7 @@ bool AvmMemTraceBuilder::load_from_mem_trace(uint8_t space_id, AvmMemoryTag m_tag = mem_space.contains(addr) ? mem_space.at(addr).tag : AvmMemoryTag::U0; if (m_tag == AvmMemoryTag::U0 || m_tag == r_in_tag) { - insert_in_mem_trace(space_id, clk, sub_clk, addr, val, r_in_tag, r_in_tag, w_in_tag, false); + insert_in_mem_trace(space_id, clk, sub_clk, addr, val, m_tag, r_in_tag, w_in_tag, false); return true; } diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.cpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.cpp index c97c71eccb2..ae472b6643d 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.cpp @@ -21,6 +21,8 @@ #include "barretenberg/vm/avm_trace/avm_helper.hpp" #include "barretenberg/vm/avm_trace/avm_opcode.hpp" #include "barretenberg/vm/avm_trace/avm_trace.hpp" +#include "barretenberg/vm/avm_trace/fixed_gas.hpp" +#include "barretenberg/vm/avm_trace/fixed_powers.hpp" namespace bb::avm_trace { @@ -67,49 +69,103 @@ void AvmTraceBuilder::reset() external_call_counter = 0; } -AvmTraceBuilder::IndirectThreeResolution AvmTraceBuilder::resolve_ind_three( - uint8_t space_id, uint32_t clk, uint8_t indirect, uint32_t a_offset, uint32_t b_offset, uint32_t c_offset) +/** + * @brief Returns an array of mem_offsets and tags them with their given Addressing Mode (direct/indirect) based on the + * given indirect byte. + * @tparam N The number of memory offsets to resolve. + */ +template +std::array unpack_indirects(uint8_t indirect, std::array mem_offsets) { - bool indirect_flag_a = is_operand_indirect(indirect, 0); - bool indirect_flag_b = is_operand_indirect(indirect, 1); - bool indirect_flag_c = is_operand_indirect(indirect, 2); - - uint32_t direct_a_offset = a_offset; - uint32_t direct_b_offset = b_offset; - uint32_t direct_c_offset = c_offset; - - bool tag_match = true; - - if (indirect_flag_a) { - auto read_ind_a = - mem_trace_builder.indirect_read_and_load_from_memory(space_id, clk, IndirectRegister::IND_A, a_offset); - direct_a_offset = uint32_t(read_ind_a.val); - tag_match = tag_match && read_ind_a.tag_match; + std::array addr_mode_arr; + + for (size_t i = 0; i < N; i++) { + // No need to type this as a bool as is implied by the (& 1). + uint8_t indirect_bit = (indirect >> i) & 1; + // Cast straight to AddressingMode, saves having to have a branching statement here. + auto addr_mode = static_cast(indirect_bit); + addr_mode_arr[i] = { addr_mode, mem_offsets[i] }; } + return addr_mode_arr; +} - if (indirect_flag_b) { - auto read_ind_b = - mem_trace_builder.indirect_read_and_load_from_memory(space_id, clk, IndirectRegister::IND_B, b_offset); - direct_b_offset = uint32_t(read_ind_b.val); - tag_match = tag_match && read_ind_b.tag_match; +/** + * @brief Loads a value from memory into a given intermediate register at a specified clock cycle. + * Handles both direct and indirect memory access. + * @tparam reg The intermediate register to load the value into. + */ +AvmTraceBuilder::MemOp AvmTraceBuilder::constrained_read_from_memory(uint8_t space_id, + uint32_t clk, + AddressWithMode addr, + AvmMemoryTag read_tag, + AvmMemoryTag write_tag, + IntermRegister reg) +{ + // Get the same matching indirect register for the given intermediate register. + // This is a hack that we can replace with a mapping of IntermediateRegister to IndirectRegister. + auto indirect_reg = static_cast(reg); + // Set up direct and indirect offsets that may be overwritten + uint32_t direct_offset = addr.offset; + uint32_t indirect_offset = 0; + bool tag_match = true; + bool is_indirect = false; + if (addr.mode == AddressingMode::INDIRECT) { + is_indirect = true; + indirect_offset = direct_offset; + auto read_ind = + mem_trace_builder.indirect_read_and_load_from_memory(space_id, clk, indirect_reg, indirect_offset); + if (!read_ind.tag_match) { + tag_match = false; + } + direct_offset = uint32_t(read_ind.val); } + auto read_dir = mem_trace_builder.read_and_load_from_memory(space_id, clk, reg, direct_offset, read_tag, write_tag); + + return MemOp{ + .is_indirect = is_indirect, + .indirect_address = indirect_offset, + .direct_address = direct_offset, + .tag = read_tag, + .tag_match = tag_match && read_dir.tag_match, + .val = read_dir.val, + }; +} - if (indirect_flag_c) { - auto read_ind_c = - mem_trace_builder.indirect_read_and_load_from_memory(space_id, clk, IndirectRegister::IND_C, c_offset); - direct_c_offset = uint32_t(read_ind_c.val); - tag_match = tag_match && read_ind_c.tag_match; +/** + * @brief Writes a value to memory from a given intermediate register at a specified clock cycle. + * Handles both direct and indirect memory access. + * @tparam reg The intermediate register to write the value from. + */ +AvmTraceBuilder::MemOp AvmTraceBuilder::constrained_write_to_memory(uint8_t space_id, + uint32_t clk, + AddressWithMode addr, + FF const& value, + AvmMemoryTag read_tag, + AvmMemoryTag write_tag, + IntermRegister reg) +{ + auto indirect_reg = static_cast(reg); + uint32_t direct_offset = addr.offset; + uint32_t indirect_offset = 0; + bool tag_match = true; + bool is_indirect = false; + if (addr.mode == AddressingMode::INDIRECT) { + is_indirect = true; + indirect_offset = direct_offset; + auto read_ind = + mem_trace_builder.indirect_read_and_load_from_memory(space_id, clk, indirect_reg, indirect_offset); + if (!read_ind.tag_match) { + tag_match = false; + } + direct_offset = uint32_t(read_ind.val); } - - return IndirectThreeResolution{ - .tag_match = tag_match, - .direct_a_offset = direct_a_offset, - .direct_b_offset = direct_b_offset, - .direct_c_offset = direct_c_offset, - .indirect_flag_a = indirect_flag_a, - .indirect_flag_b = indirect_flag_b, - .indirect_flag_c = indirect_flag_c, - }; + mem_trace_builder.write_into_memory(space_id, clk, reg, direct_offset, value, read_tag, write_tag); + return MemOp{ .is_indirect = is_indirect, + .indirect_address = indirect_offset, + .direct_address = direct_offset, + .tag = write_tag, + .tag_match = tag_match, + .val = value }; } /** @@ -126,15 +182,14 @@ void AvmTraceBuilder::op_add( { auto clk = static_cast(main_trace.size()) + 1; - auto const res = resolve_ind_three(call_ptr, clk, indirect, a_offset, b_offset, dst_offset); - bool tag_match = res.tag_match; + // Resolve any potential indirects in the order they are encoded in the indirect byte. + auto [resolved_a, resolved_b, resolved_c] = unpack_indirects<3>(indirect, { a_offset, b_offset, dst_offset }); // Reading from memory and loading into ia resp. ib. - auto read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, res.direct_a_offset, in_tag, in_tag); - auto read_b = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, res.direct_b_offset, in_tag, in_tag); - tag_match = read_a.tag_match && read_b.tag_match; + auto read_a = constrained_read_from_memory(call_ptr, clk, resolved_a, in_tag, in_tag, IntermRegister::IA); + auto read_b = constrained_read_from_memory(call_ptr, clk, resolved_b, in_tag, in_tag, IntermRegister::IB); + + bool tag_match = read_a.tag_match && read_b.tag_match; // a + b = c FF a = read_a.val; @@ -146,7 +201,7 @@ void AvmTraceBuilder::op_add( FF c = tag_match ? alu_trace_builder.op_add(a, b, in_tag, clk) : FF(0); // Write into memory value c from intermediate register ic. - mem_trace_builder.write_into_memory(call_ptr, clk, IntermRegister::IC, res.direct_c_offset, c, in_tag, in_tag); + auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::ADD); @@ -155,16 +210,16 @@ void AvmTraceBuilder::op_add( .main_clk = clk, .main_alu_in_tag = FF(static_cast(in_tag)), .main_call_ptr = call_ptr, - .main_ia = a, - .main_ib = b, - .main_ic = c, - .main_ind_addr_a = res.indirect_flag_a ? FF(a_offset) : FF(0), - .main_ind_addr_b = res.indirect_flag_b ? FF(b_offset) : FF(0), - .main_ind_addr_c = res.indirect_flag_c ? FF(dst_offset) : FF(0), + .main_ia = read_a.val, + .main_ib = read_b.val, + .main_ic = write_c.val, + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(read_b.indirect_address), + .main_ind_addr_c = FF(write_c.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(res.direct_a_offset), - .main_mem_addr_b = FF(res.direct_b_offset), - .main_mem_addr_c = FF(res.direct_c_offset), + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(read_b.direct_address), + .main_mem_addr_c = FF(write_c.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(in_tag)), .main_rwc = FF(1), @@ -172,9 +227,9 @@ void AvmTraceBuilder::op_add( .main_sel_mem_op_b = FF(1), .main_sel_mem_op_c = FF(1), .main_sel_op_add = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(res.indirect_flag_a)), - .main_sel_resolve_ind_addr_b = FF(static_cast(res.indirect_flag_b)), - .main_sel_resolve_ind_addr_c = FF(static_cast(res.indirect_flag_c)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_b.is_indirect)), + .main_sel_resolve_ind_addr_c = FF(static_cast(write_c.is_indirect)), .main_tag_err = FF(static_cast(!tag_match)), .main_w_in_tag = FF(static_cast(in_tag)), }); @@ -194,15 +249,14 @@ void AvmTraceBuilder::op_sub( { auto clk = static_cast(main_trace.size()) + 1; - auto const res = resolve_ind_three(call_ptr, clk, indirect, a_offset, b_offset, dst_offset); - bool tag_match = res.tag_match; + // Resolve any potential indirects in the order they are encoded in the indirect byte. + auto [resolved_a, resolved_b, resolved_c] = unpack_indirects<3>(indirect, { a_offset, b_offset, dst_offset }); // Reading from memory and loading into ia resp. ib. - auto read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, res.direct_a_offset, in_tag, in_tag); - auto read_b = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, res.direct_b_offset, in_tag, in_tag); - tag_match = read_a.tag_match && read_b.tag_match; + auto read_a = constrained_read_from_memory(call_ptr, clk, resolved_a, in_tag, in_tag, IntermRegister::IA); + auto read_b = constrained_read_from_memory(call_ptr, clk, resolved_b, in_tag, in_tag, IntermRegister::IB); + + bool tag_match = read_a.tag_match && read_b.tag_match; // a - b = c FF a = read_a.val; @@ -214,7 +268,7 @@ void AvmTraceBuilder::op_sub( FF c = tag_match ? alu_trace_builder.op_sub(a, b, in_tag, clk) : FF(0); // Write into memory value c from intermediate register ic. - mem_trace_builder.write_into_memory(call_ptr, clk, IntermRegister::IC, res.direct_c_offset, c, in_tag, in_tag); + auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::SUB); @@ -223,16 +277,16 @@ void AvmTraceBuilder::op_sub( .main_clk = clk, .main_alu_in_tag = FF(static_cast(in_tag)), .main_call_ptr = call_ptr, - .main_ia = a, - .main_ib = b, - .main_ic = c, - .main_ind_addr_a = res.indirect_flag_a ? FF(a_offset) : FF(0), - .main_ind_addr_b = res.indirect_flag_b ? FF(b_offset) : FF(0), - .main_ind_addr_c = res.indirect_flag_c ? FF(dst_offset) : FF(0), + .main_ia = read_a.val, + .main_ib = read_b.val, + .main_ic = write_c.val, + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(read_b.indirect_address), + .main_ind_addr_c = FF(write_c.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(res.direct_a_offset), - .main_mem_addr_b = FF(res.direct_b_offset), - .main_mem_addr_c = FF(res.direct_c_offset), + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(read_b.direct_address), + .main_mem_addr_c = FF(write_c.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(in_tag)), .main_rwc = FF(1), @@ -240,9 +294,9 @@ void AvmTraceBuilder::op_sub( .main_sel_mem_op_b = FF(1), .main_sel_mem_op_c = FF(1), .main_sel_op_sub = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(res.indirect_flag_a)), - .main_sel_resolve_ind_addr_b = FF(static_cast(res.indirect_flag_b)), - .main_sel_resolve_ind_addr_c = FF(static_cast(res.indirect_flag_c)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_b.is_indirect)), + .main_sel_resolve_ind_addr_c = FF(static_cast(write_c.is_indirect)), .main_tag_err = FF(static_cast(!tag_match)), .main_w_in_tag = FF(static_cast(in_tag)), }); @@ -262,15 +316,14 @@ void AvmTraceBuilder::op_mul( { auto clk = static_cast(main_trace.size()) + 1; - auto const res = resolve_ind_three(call_ptr, clk, indirect, a_offset, b_offset, dst_offset); - bool tag_match = res.tag_match; + // Resolve any potential indirects in the order they are encoded in the indirect byte. + auto [resolved_a, resolved_b, resolved_c] = unpack_indirects<3>(indirect, { a_offset, b_offset, dst_offset }); // Reading from memory and loading into ia resp. ib. - auto read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, res.direct_a_offset, in_tag, in_tag); - auto read_b = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, res.direct_b_offset, in_tag, in_tag); - tag_match = read_a.tag_match && read_b.tag_match; + auto read_a = constrained_read_from_memory(call_ptr, clk, resolved_a, in_tag, in_tag, IntermRegister::IA); + auto read_b = constrained_read_from_memory(call_ptr, clk, resolved_b, in_tag, in_tag, IntermRegister::IB); + + bool tag_match = read_a.tag_match && read_b.tag_match; // a * b = c FF a = read_a.val; @@ -282,7 +335,7 @@ void AvmTraceBuilder::op_mul( FF c = tag_match ? alu_trace_builder.op_mul(a, b, in_tag, clk) : FF(0); // Write into memory value c from intermediate register ic. - mem_trace_builder.write_into_memory(call_ptr, clk, IntermRegister::IC, res.direct_c_offset, c, in_tag, in_tag); + auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::MUL); @@ -291,16 +344,16 @@ void AvmTraceBuilder::op_mul( .main_clk = clk, .main_alu_in_tag = FF(static_cast(in_tag)), .main_call_ptr = call_ptr, - .main_ia = a, - .main_ib = b, - .main_ic = c, - .main_ind_addr_a = res.indirect_flag_a ? FF(a_offset) : FF(0), - .main_ind_addr_b = res.indirect_flag_b ? FF(b_offset) : FF(0), - .main_ind_addr_c = res.indirect_flag_c ? FF(dst_offset) : FF(0), + .main_ia = read_a.val, + .main_ib = read_b.val, + .main_ic = write_c.val, + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(read_b.indirect_address), + .main_ind_addr_c = FF(write_c.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(res.direct_a_offset), - .main_mem_addr_b = FF(res.direct_b_offset), - .main_mem_addr_c = FF(res.direct_c_offset), + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(read_b.direct_address), + .main_mem_addr_c = FF(write_c.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(in_tag)), .main_rwc = FF(1), @@ -308,9 +361,9 @@ void AvmTraceBuilder::op_mul( .main_sel_mem_op_b = FF(1), .main_sel_mem_op_c = FF(1), .main_sel_op_mul = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(res.indirect_flag_a)), - .main_sel_resolve_ind_addr_b = FF(static_cast(res.indirect_flag_b)), - .main_sel_resolve_ind_addr_c = FF(static_cast(res.indirect_flag_c)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_b.is_indirect)), + .main_sel_resolve_ind_addr_c = FF(static_cast(write_c.is_indirect)), .main_tag_err = FF(static_cast(!tag_match)), .main_w_in_tag = FF(static_cast(in_tag)), }); @@ -329,15 +382,16 @@ void AvmTraceBuilder::op_fdiv(uint8_t indirect, uint32_t a_offset, uint32_t b_of { auto clk = static_cast(main_trace.size()) + 1; - auto const res = resolve_ind_three(call_ptr, clk, indirect, a_offset, b_offset, dst_offset); - bool tag_match = res.tag_match; + // Resolve any potential indirects in the order they are encoded in the indirect byte. + auto [resolved_a, resolved_b, resolved_c] = unpack_indirects<3>(indirect, { a_offset, b_offset, dst_offset }); // Reading from memory and loading into ia resp. ib. - auto read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, res.direct_a_offset, AvmMemoryTag::FF, AvmMemoryTag::FF); - auto read_b = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, res.direct_b_offset, AvmMemoryTag::FF, AvmMemoryTag::FF); - tag_match = read_a.tag_match && read_b.tag_match; + auto read_a = + constrained_read_from_memory(call_ptr, clk, resolved_a, AvmMemoryTag::FF, AvmMemoryTag::FF, IntermRegister::IA); + auto read_b = + constrained_read_from_memory(call_ptr, clk, resolved_b, AvmMemoryTag::FF, AvmMemoryTag::FF, IntermRegister::IB); + + bool tag_match = read_a.tag_match && read_b.tag_match; // a * b^(-1) = c FF a = read_a.val; @@ -358,8 +412,8 @@ void AvmTraceBuilder::op_fdiv(uint8_t indirect, uint32_t a_offset, uint32_t b_of } // Write into memory value c from intermediate register ic. - mem_trace_builder.write_into_memory( - call_ptr, clk, IntermRegister::IC, res.direct_c_offset, c, AvmMemoryTag::FF, AvmMemoryTag::FF); + auto write_c = constrained_write_to_memory( + call_ptr, clk, resolved_c, c, AvmMemoryTag::FF, AvmMemoryTag::FF, IntermRegister::IC); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::FDIV); @@ -367,17 +421,17 @@ void AvmTraceBuilder::op_fdiv(uint8_t indirect, uint32_t a_offset, uint32_t b_of main_trace.push_back(Row{ .main_clk = clk, .main_call_ptr = call_ptr, - .main_ia = tag_match ? a : FF(0), - .main_ib = tag_match ? b : FF(0), - .main_ic = tag_match ? c : FF(0), - .main_ind_addr_a = res.indirect_flag_a ? FF(a_offset) : FF(0), - .main_ind_addr_b = res.indirect_flag_b ? FF(b_offset) : FF(0), - .main_ind_addr_c = res.indirect_flag_c ? FF(dst_offset) : FF(0), + .main_ia = tag_match ? read_a.val : FF(0), + .main_ib = tag_match ? read_b.val : FF(0), + .main_ic = tag_match ? write_c.val : FF(0), + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(read_b.indirect_address), + .main_ind_addr_c = FF(write_c.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), .main_inv = tag_match ? inv : FF(1), - .main_mem_addr_a = FF(res.direct_a_offset), - .main_mem_addr_b = FF(res.direct_b_offset), - .main_mem_addr_c = FF(res.direct_c_offset), + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(read_b.direct_address), + .main_mem_addr_c = FF(write_c.direct_address), .main_op_err = tag_match ? error : FF(1), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(AvmMemoryTag::FF)), @@ -386,9 +440,9 @@ void AvmTraceBuilder::op_fdiv(uint8_t indirect, uint32_t a_offset, uint32_t b_of .main_sel_mem_op_b = FF(1), .main_sel_mem_op_c = FF(1), .main_sel_op_fdiv = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(res.indirect_flag_a)), - .main_sel_resolve_ind_addr_b = FF(static_cast(res.indirect_flag_b)), - .main_sel_resolve_ind_addr_c = FF(static_cast(res.indirect_flag_c)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_b.is_indirect)), + .main_sel_resolve_ind_addr_c = FF(static_cast(write_c.is_indirect)), .main_tag_err = FF(static_cast(!tag_match)), .main_w_in_tag = FF(static_cast(AvmMemoryTag::FF)), }); @@ -405,31 +459,14 @@ void AvmTraceBuilder::op_fdiv(uint8_t indirect, uint32_t a_offset, uint32_t b_of void AvmTraceBuilder::op_not(uint8_t indirect, uint32_t a_offset, uint32_t dst_offset, AvmMemoryTag in_tag) { auto clk = static_cast(main_trace.size()) + 1; - bool tag_match = true; - uint32_t direct_a_offset = a_offset; - uint32_t direct_dst_offset = dst_offset; - bool indirect_a_flag = is_operand_indirect(indirect, 0); - bool indirect_c_flag = is_operand_indirect(indirect, 1); + // Resolve any potential indirects in the order they are encoded in the indirect byte. + auto [resolved_a, resolved_c] = unpack_indirects<2>(indirect, { a_offset, dst_offset }); - if (indirect_a_flag) { - auto read_ind_a = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, a_offset); - tag_match = read_ind_a.tag_match; - direct_a_offset = uint32_t(read_ind_a.val); - } - - if (indirect_c_flag) { - auto read_ind_c = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_C, dst_offset); - tag_match = tag_match && read_ind_c.tag_match; - direct_dst_offset = uint32_t(read_ind_c.val); - } + // Reading from memory and loading into ia + auto read_a = constrained_read_from_memory(call_ptr, clk, resolved_a, in_tag, in_tag, IntermRegister::IA); - // Reading from memory and loading into ia. - auto read_a = - mem_trace_builder.read_and_load_from_memory(call_ptr, clk, IntermRegister::IA, direct_a_offset, in_tag, in_tag); - tag_match = read_a.tag_match && tag_match; + bool tag_match = read_a.tag_match; // ~a = c FF a = read_a.val; @@ -439,7 +476,7 @@ void AvmTraceBuilder::op_not(uint8_t indirect, uint32_t a_offset, uint32_t dst_o FF c = tag_match ? alu_trace_builder.op_not(a, in_tag, clk) : FF(0); // Write into memory value c from intermediate register ic. - mem_trace_builder.write_into_memory(call_ptr, clk, IntermRegister::IC, direct_dst_offset, c, in_tag, in_tag); + auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::NOT); @@ -448,21 +485,21 @@ void AvmTraceBuilder::op_not(uint8_t indirect, uint32_t a_offset, uint32_t dst_o .main_clk = clk, .main_alu_in_tag = FF(static_cast(in_tag)), .main_call_ptr = call_ptr, - .main_ia = a, - .main_ic = c, - .main_ind_addr_a = indirect_a_flag ? FF(a_offset) : FF(0), - .main_ind_addr_c = indirect_c_flag ? FF(dst_offset) : FF(0), + .main_ia = read_a.val, + .main_ic = write_c.val, + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_c = FF(write_c.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(direct_a_offset), - .main_mem_addr_c = FF(direct_dst_offset), + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_c = FF(write_c.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(in_tag)), .main_rwc = FF(1), .main_sel_mem_op_a = FF(1), .main_sel_mem_op_c = FF(1), .main_sel_op_not = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(indirect_a_flag)), - .main_sel_resolve_ind_addr_c = FF(static_cast(indirect_c_flag)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_c = FF(static_cast(write_c.is_indirect)), .main_tag_err = FF(static_cast(!read_a.tag_match)), .main_w_in_tag = FF(static_cast(in_tag)), }); @@ -482,15 +519,12 @@ void AvmTraceBuilder::op_eq( { auto clk = static_cast(main_trace.size()) + 1; - auto const res = resolve_ind_three(call_ptr, clk, indirect, a_offset, b_offset, dst_offset); - bool tag_match = res.tag_match; + auto [resolved_a, resolved_b, resolved_c] = unpack_indirects<3>(indirect, { a_offset, b_offset, dst_offset }); // Reading from memory and loading into ia resp. ib. - auto read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, res.direct_a_offset, in_tag, AvmMemoryTag::U8); - auto read_b = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, res.direct_b_offset, in_tag, AvmMemoryTag::U8); - tag_match = read_a.tag_match && read_b.tag_match; + auto read_a = constrained_read_from_memory(call_ptr, clk, resolved_a, in_tag, AvmMemoryTag::U8, IntermRegister::IA); + auto read_b = constrained_read_from_memory(call_ptr, clk, resolved_b, in_tag, AvmMemoryTag::U8, IntermRegister::IB); + bool tag_match = read_a.tag_match && read_b.tag_match; FF a = read_a.val; FF b = read_b.val; @@ -501,8 +535,8 @@ void AvmTraceBuilder::op_eq( FF c = tag_match ? alu_trace_builder.op_eq(a, b, in_tag, clk) : FF(0); // Write into memory value c from intermediate register ic. - mem_trace_builder.write_into_memory( - call_ptr, clk, IntermRegister::IC, res.direct_c_offset, c, in_tag, AvmMemoryTag::U8); + auto write_c = + constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, AvmMemoryTag::U8, IntermRegister::IC); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::EQ); @@ -511,16 +545,16 @@ void AvmTraceBuilder::op_eq( .main_clk = clk, .main_alu_in_tag = FF(static_cast(in_tag)), .main_call_ptr = call_ptr, - .main_ia = a, - .main_ib = b, - .main_ic = c, - .main_ind_addr_a = res.indirect_flag_a ? FF(a_offset) : FF(0), - .main_ind_addr_b = res.indirect_flag_b ? FF(b_offset) : FF(0), - .main_ind_addr_c = res.indirect_flag_c ? FF(dst_offset) : FF(0), + .main_ia = read_a.val, + .main_ib = read_b.val, + .main_ic = write_c.val, + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(read_b.indirect_address), + .main_ind_addr_c = FF(write_c.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(res.direct_a_offset), - .main_mem_addr_b = FF(res.direct_b_offset), - .main_mem_addr_c = FF(res.direct_c_offset), + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(read_b.direct_address), + .main_mem_addr_c = FF(write_c.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(in_tag)), .main_rwc = FF(1), @@ -528,9 +562,9 @@ void AvmTraceBuilder::op_eq( .main_sel_mem_op_b = FF(1), .main_sel_mem_op_c = FF(1), .main_sel_op_eq = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(res.indirect_flag_a)), - .main_sel_resolve_ind_addr_b = FF(static_cast(res.indirect_flag_b)), - .main_sel_resolve_ind_addr_c = FF(static_cast(res.indirect_flag_c)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_b.is_indirect)), + .main_sel_resolve_ind_addr_c = FF(static_cast(write_c.is_indirect)), .main_tag_err = FF(static_cast(!tag_match)), .main_w_in_tag = FF(static_cast(AvmMemoryTag::U8)), }); @@ -541,15 +575,12 @@ void AvmTraceBuilder::op_and( { auto clk = static_cast(main_trace.size()) + 1; - auto const res = resolve_ind_three(call_ptr, clk, indirect, a_offset, b_offset, dst_offset); - bool tag_match = res.tag_match; + auto [resolved_a, resolved_b, resolved_c] = unpack_indirects<3>(indirect, { a_offset, b_offset, dst_offset }); // Reading from memory and loading into ia resp. ib. - auto read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, res.direct_a_offset, in_tag, in_tag); - auto read_b = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, res.direct_b_offset, in_tag, in_tag); - tag_match = read_a.tag_match && read_b.tag_match; + auto read_a = constrained_read_from_memory(call_ptr, clk, resolved_a, in_tag, in_tag, IntermRegister::IA); + auto read_b = constrained_read_from_memory(call_ptr, clk, resolved_b, in_tag, in_tag, IntermRegister::IB); + bool tag_match = read_a.tag_match && read_b.tag_match; FF a = tag_match ? read_a.val : FF(0); FF b = tag_match ? read_b.val : FF(0); @@ -557,7 +588,7 @@ void AvmTraceBuilder::op_and( FF c = tag_match ? bin_trace_builder.op_and(a, b, in_tag, clk) : FF(0); // Write into memory value c from intermediate register ic. - mem_trace_builder.write_into_memory(call_ptr, clk, IntermRegister::IC, res.direct_c_offset, c, in_tag, in_tag); + auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::AND); @@ -566,16 +597,16 @@ void AvmTraceBuilder::op_and( .main_clk = clk, .main_bin_op_id = FF(0), .main_call_ptr = call_ptr, - .main_ia = a, - .main_ib = b, - .main_ic = c, - .main_ind_addr_a = res.indirect_flag_a ? FF(a_offset) : FF(0), - .main_ind_addr_b = res.indirect_flag_b ? FF(b_offset) : FF(0), - .main_ind_addr_c = res.indirect_flag_c ? FF(dst_offset) : FF(0), + .main_ia = read_a.val, + .main_ib = read_b.val, + .main_ic = write_c.val, + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(read_b.indirect_address), + .main_ind_addr_c = FF(write_c.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(res.direct_a_offset), - .main_mem_addr_b = FF(res.direct_b_offset), - .main_mem_addr_c = FF(res.direct_c_offset), + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(read_b.direct_address), + .main_mem_addr_c = FF(write_c.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(in_tag)), .main_rwc = FF(1), @@ -584,9 +615,9 @@ void AvmTraceBuilder::op_and( .main_sel_mem_op_b = FF(1), .main_sel_mem_op_c = FF(1), .main_sel_op_and = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(res.indirect_flag_a)), - .main_sel_resolve_ind_addr_b = FF(static_cast(res.indirect_flag_b)), - .main_sel_resolve_ind_addr_c = FF(static_cast(res.indirect_flag_c)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_b.is_indirect)), + .main_sel_resolve_ind_addr_c = FF(static_cast(write_c.is_indirect)), .main_tag_err = FF(static_cast(!tag_match)), .main_w_in_tag = FF(static_cast(in_tag)), }); @@ -596,16 +627,12 @@ void AvmTraceBuilder::op_or( uint8_t indirect, uint32_t a_offset, uint32_t b_offset, uint32_t dst_offset, AvmMemoryTag in_tag) { auto clk = static_cast(main_trace.size()) + 1; - - auto const res = resolve_ind_three(call_ptr, clk, indirect, a_offset, b_offset, dst_offset); - bool tag_match = res.tag_match; + auto [resolved_a, resolved_b, resolved_c] = unpack_indirects<3>(indirect, { a_offset, b_offset, dst_offset }); // Reading from memory and loading into ia resp. ib. - auto read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, res.direct_a_offset, in_tag, in_tag); - auto read_b = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, res.direct_b_offset, in_tag, in_tag); - tag_match = read_a.tag_match && read_b.tag_match; + auto read_a = constrained_read_from_memory(call_ptr, clk, resolved_a, in_tag, in_tag, IntermRegister::IA); + auto read_b = constrained_read_from_memory(call_ptr, clk, resolved_b, in_tag, in_tag, IntermRegister::IB); + bool tag_match = read_a.tag_match && read_b.tag_match; FF a = tag_match ? read_a.val : FF(0); FF b = tag_match ? read_b.val : FF(0); @@ -613,7 +640,7 @@ void AvmTraceBuilder::op_or( FF c = tag_match ? bin_trace_builder.op_or(a, b, in_tag, clk) : FF(0); // Write into memory value c from intermediate register ic. - mem_trace_builder.write_into_memory(call_ptr, clk, IntermRegister::IC, res.direct_c_offset, c, in_tag, in_tag); + auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::OR); @@ -622,16 +649,16 @@ void AvmTraceBuilder::op_or( .main_clk = clk, .main_bin_op_id = FF(1), .main_call_ptr = call_ptr, - .main_ia = a, - .main_ib = b, - .main_ic = c, - .main_ind_addr_a = res.indirect_flag_a ? FF(a_offset) : FF(0), - .main_ind_addr_b = res.indirect_flag_b ? FF(b_offset) : FF(0), - .main_ind_addr_c = res.indirect_flag_c ? FF(dst_offset) : FF(0), + .main_ia = read_a.val, + .main_ib = read_b.val, + .main_ic = write_c.val, + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(read_b.indirect_address), + .main_ind_addr_c = FF(write_c.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(res.direct_a_offset), - .main_mem_addr_b = FF(res.direct_b_offset), - .main_mem_addr_c = FF(res.direct_c_offset), + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(read_b.direct_address), + .main_mem_addr_c = FF(write_c.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(in_tag)), .main_rwc = FF(1), @@ -640,9 +667,9 @@ void AvmTraceBuilder::op_or( .main_sel_mem_op_b = FF(1), .main_sel_mem_op_c = FF(1), .main_sel_op_or = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(res.indirect_flag_a)), - .main_sel_resolve_ind_addr_b = FF(static_cast(res.indirect_flag_b)), - .main_sel_resolve_ind_addr_c = FF(static_cast(res.indirect_flag_c)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_b.is_indirect)), + .main_sel_resolve_ind_addr_c = FF(static_cast(write_c.is_indirect)), .main_tag_err = FF(static_cast(!tag_match)), .main_w_in_tag = FF(static_cast(in_tag)), }); @@ -653,15 +680,12 @@ void AvmTraceBuilder::op_xor( { auto clk = static_cast(main_trace.size()) + 1; - auto const res = resolve_ind_three(call_ptr, clk, indirect, a_offset, b_offset, dst_offset); - bool tag_match = res.tag_match; + auto [resolved_a, resolved_b, resolved_c] = unpack_indirects<3>(indirect, { a_offset, b_offset, dst_offset }); // Reading from memory and loading into ia resp. ib. - auto read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, res.direct_a_offset, in_tag, in_tag); - auto read_b = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, res.direct_b_offset, in_tag, in_tag); - tag_match = read_a.tag_match && read_b.tag_match; + auto read_a = constrained_read_from_memory(call_ptr, clk, resolved_a, in_tag, in_tag, IntermRegister::IA); + auto read_b = constrained_read_from_memory(call_ptr, clk, resolved_b, in_tag, in_tag, IntermRegister::IB); + bool tag_match = read_a.tag_match && read_b.tag_match; FF a = tag_match ? read_a.val : FF(0); FF b = tag_match ? read_b.val : FF(0); @@ -669,7 +693,7 @@ void AvmTraceBuilder::op_xor( FF c = tag_match ? bin_trace_builder.op_xor(a, b, in_tag, clk) : FF(0); // Write into memory value c from intermediate register ic. - mem_trace_builder.write_into_memory(call_ptr, clk, IntermRegister::IC, res.direct_c_offset, c, in_tag, in_tag); + auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::XOR); @@ -678,16 +702,16 @@ void AvmTraceBuilder::op_xor( .main_clk = clk, .main_bin_op_id = FF(2), .main_call_ptr = call_ptr, - .main_ia = a, - .main_ib = b, - .main_ic = c, - .main_ind_addr_a = res.indirect_flag_a ? FF(a_offset) : FF(0), - .main_ind_addr_b = res.indirect_flag_b ? FF(b_offset) : FF(0), - .main_ind_addr_c = res.indirect_flag_c ? FF(dst_offset) : FF(0), + .main_ia = read_a.val, + .main_ib = read_b.val, + .main_ic = write_c.val, + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(read_b.indirect_address), + .main_ind_addr_c = FF(write_c.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(res.direct_a_offset), - .main_mem_addr_b = FF(res.direct_b_offset), - .main_mem_addr_c = FF(res.direct_c_offset), + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(read_b.direct_address), + .main_mem_addr_c = FF(write_c.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(in_tag)), .main_rwc = FF(1), @@ -696,9 +720,9 @@ void AvmTraceBuilder::op_xor( .main_sel_mem_op_b = FF(1), .main_sel_mem_op_c = FF(1), .main_sel_op_xor = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(res.indirect_flag_a)), - .main_sel_resolve_ind_addr_b = FF(static_cast(res.indirect_flag_b)), - .main_sel_resolve_ind_addr_c = FF(static_cast(res.indirect_flag_c)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_b.is_indirect)), + .main_sel_resolve_ind_addr_c = FF(static_cast(write_c.is_indirect)), .main_tag_err = FF(static_cast(!tag_match)), .main_w_in_tag = FF(static_cast(in_tag)), }); @@ -709,15 +733,11 @@ void AvmTraceBuilder::op_lt( { auto clk = static_cast(main_trace.size()) + 1; - auto const res = resolve_ind_three(call_ptr, clk, indirect, a_offset, b_offset, dst_offset); - bool tag_match = res.tag_match; + auto [resolved_a, resolved_b, resolved_c] = unpack_indirects<3>(indirect, { a_offset, b_offset, dst_offset }); - // Reading from memory and loading into ia resp. ib. - auto read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, res.direct_a_offset, in_tag, AvmMemoryTag::U8); - auto read_b = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, res.direct_b_offset, in_tag, AvmMemoryTag::U8); - tag_match = read_a.tag_match && read_b.tag_match; + auto read_a = constrained_read_from_memory(call_ptr, clk, resolved_a, in_tag, AvmMemoryTag::U8, IntermRegister::IA); + auto read_b = constrained_read_from_memory(call_ptr, clk, resolved_b, in_tag, AvmMemoryTag::U8, IntermRegister::IB); + bool tag_match = read_a.tag_match && read_b.tag_match; FF a = tag_match ? read_a.val : FF(0); FF b = tag_match ? read_b.val : FF(0); @@ -725,8 +745,8 @@ void AvmTraceBuilder::op_lt( FF c = tag_match ? alu_trace_builder.op_lt(a, b, in_tag, clk) : FF(0); // Write into memory value c from intermediate register ic. - mem_trace_builder.write_into_memory( - call_ptr, clk, IntermRegister::IC, res.direct_c_offset, c, in_tag, AvmMemoryTag::U8); + auto write_c = + constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, AvmMemoryTag::U8, IntermRegister::IC); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::LT); @@ -735,16 +755,16 @@ void AvmTraceBuilder::op_lt( .main_clk = clk, .main_alu_in_tag = FF(static_cast(in_tag)), .main_call_ptr = call_ptr, - .main_ia = a, - .main_ib = b, - .main_ic = c, - .main_ind_addr_a = res.indirect_flag_a ? FF(a_offset) : FF(0), - .main_ind_addr_b = res.indirect_flag_b ? FF(b_offset) : FF(0), - .main_ind_addr_c = res.indirect_flag_c ? FF(dst_offset) : FF(0), + .main_ia = read_a.val, + .main_ib = read_b.val, + .main_ic = write_c.val, + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(read_b.indirect_address), + .main_ind_addr_c = FF(write_c.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(res.direct_a_offset), - .main_mem_addr_b = FF(res.direct_b_offset), - .main_mem_addr_c = FF(res.direct_c_offset), + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(read_b.direct_address), + .main_mem_addr_c = FF(write_c.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(in_tag)), .main_rwc = FF(1), @@ -752,9 +772,9 @@ void AvmTraceBuilder::op_lt( .main_sel_mem_op_b = FF(1), .main_sel_mem_op_c = FF(1), .main_sel_op_lt = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(res.indirect_flag_a)), - .main_sel_resolve_ind_addr_b = FF(static_cast(res.indirect_flag_b)), - .main_sel_resolve_ind_addr_c = FF(static_cast(res.indirect_flag_c)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_b.is_indirect)), + .main_sel_resolve_ind_addr_c = FF(static_cast(write_c.is_indirect)), .main_tag_err = FF(static_cast(!tag_match)), .main_w_in_tag = FF(static_cast(AvmMemoryTag::U8)), }); @@ -765,15 +785,12 @@ void AvmTraceBuilder::op_lte( { auto clk = static_cast(main_trace.size()) + 1; - auto const res = resolve_ind_three(call_ptr, clk, indirect, a_offset, b_offset, dst_offset); - bool tag_match = res.tag_match; + auto [resolved_a, resolved_b, resolved_c] = unpack_indirects<3>(indirect, { a_offset, b_offset, dst_offset }); // Reading from memory and loading into ia resp. ib. - auto read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, res.direct_a_offset, in_tag, AvmMemoryTag::U8); - auto read_b = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, res.direct_b_offset, in_tag, AvmMemoryTag::U8); - tag_match = read_a.tag_match && read_b.tag_match; + auto read_a = constrained_read_from_memory(call_ptr, clk, resolved_a, in_tag, AvmMemoryTag::U8, IntermRegister::IA); + auto read_b = constrained_read_from_memory(call_ptr, clk, resolved_b, in_tag, AvmMemoryTag::U8, IntermRegister::IB); + bool tag_match = read_a.tag_match && read_b.tag_match; FF a = tag_match ? read_a.val : FF(0); FF b = tag_match ? read_b.val : FF(0); @@ -781,8 +798,8 @@ void AvmTraceBuilder::op_lte( FF c = tag_match ? alu_trace_builder.op_lte(a, b, in_tag, clk) : FF(0); // Write into memory value c from intermediate register ic. - mem_trace_builder.write_into_memory( - call_ptr, clk, IntermRegister::IC, res.direct_c_offset, c, in_tag, AvmMemoryTag::U8); + auto write_c = + constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, AvmMemoryTag::U8, IntermRegister::IC); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::LTE); @@ -791,16 +808,16 @@ void AvmTraceBuilder::op_lte( .main_clk = clk, .main_alu_in_tag = FF(static_cast(in_tag)), .main_call_ptr = call_ptr, - .main_ia = a, - .main_ib = b, - .main_ic = c, - .main_ind_addr_a = res.indirect_flag_a ? FF(a_offset) : FF(0), - .main_ind_addr_b = res.indirect_flag_b ? FF(b_offset) : FF(0), - .main_ind_addr_c = res.indirect_flag_c ? FF(dst_offset) : FF(0), + .main_ia = read_a.val, + .main_ib = read_b.val, + .main_ic = write_c.val, + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(read_b.indirect_address), + .main_ind_addr_c = FF(write_c.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(res.direct_a_offset), - .main_mem_addr_b = FF(res.direct_b_offset), - .main_mem_addr_c = FF(res.direct_c_offset), + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(read_b.direct_address), + .main_mem_addr_c = FF(write_c.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(in_tag)), .main_rwc = FF(1), @@ -808,9 +825,9 @@ void AvmTraceBuilder::op_lte( .main_sel_mem_op_b = FF(1), .main_sel_mem_op_c = FF(1), .main_sel_op_lte = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(res.indirect_flag_a)), - .main_sel_resolve_ind_addr_b = FF(static_cast(res.indirect_flag_b)), - .main_sel_resolve_ind_addr_c = FF(static_cast(res.indirect_flag_c)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_b.is_indirect)), + .main_sel_resolve_ind_addr_c = FF(static_cast(write_c.is_indirect)), .main_tag_err = FF(static_cast(!tag_match)), .main_w_in_tag = FF(static_cast(AvmMemoryTag::U8)), }); @@ -822,15 +839,12 @@ void AvmTraceBuilder::op_shr( auto clk = static_cast(main_trace.size()) + 1; - auto const res = resolve_ind_three(call_ptr, clk, indirect, a_offset, b_offset, dst_offset); - bool tag_match = res.tag_match; + auto [resolved_a, resolved_b, resolved_c] = unpack_indirects<3>(indirect, { a_offset, b_offset, dst_offset }); // Reading from memory and loading into ia resp. ib. - auto read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, res.direct_a_offset, in_tag, in_tag); - auto read_b = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, res.direct_b_offset, in_tag, in_tag); - tag_match = read_a.tag_match && read_b.tag_match; + auto read_a = constrained_read_from_memory(call_ptr, clk, resolved_a, in_tag, in_tag, IntermRegister::IA); + auto read_b = constrained_read_from_memory(call_ptr, clk, resolved_b, in_tag, in_tag, IntermRegister::IB); + bool tag_match = read_a.tag_match && read_b.tag_match; FF a = tag_match ? read_a.val : FF(0); FF b = tag_match ? read_b.val : FF(0); @@ -838,8 +852,7 @@ void AvmTraceBuilder::op_shr( FF c = tag_match ? alu_trace_builder.op_shr(a, b, in_tag, clk) : FF(0); // Write into memory value c from intermediate register ic. - mem_trace_builder.write_into_memory(call_ptr, clk, IntermRegister::IC, res.direct_c_offset, c, in_tag, in_tag); - + auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::SHR); @@ -847,16 +860,16 @@ void AvmTraceBuilder::op_shr( .main_clk = clk, .main_alu_in_tag = FF(static_cast(in_tag)), .main_call_ptr = call_ptr, - .main_ia = a, - .main_ib = b, - .main_ic = c, - .main_ind_addr_a = res.indirect_flag_a ? FF(a_offset) : FF(0), - .main_ind_addr_b = res.indirect_flag_b ? FF(b_offset) : FF(0), - .main_ind_addr_c = res.indirect_flag_c ? FF(dst_offset) : FF(0), + .main_ia = read_a.val, + .main_ib = read_b.val, + .main_ic = write_c.val, + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(read_b.indirect_address), + .main_ind_addr_c = FF(write_c.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(res.direct_a_offset), - .main_mem_addr_b = FF(res.direct_b_offset), - .main_mem_addr_c = FF(res.direct_c_offset), + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(read_b.direct_address), + .main_mem_addr_c = FF(write_c.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(in_tag)), .main_rwc = FF(1), @@ -864,9 +877,9 @@ void AvmTraceBuilder::op_shr( .main_sel_mem_op_b = FF(1), .main_sel_mem_op_c = FF(1), .main_sel_op_shr = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(res.indirect_flag_a)), - .main_sel_resolve_ind_addr_b = FF(static_cast(res.indirect_flag_b)), - .main_sel_resolve_ind_addr_c = FF(static_cast(res.indirect_flag_c)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_b.is_indirect)), + .main_sel_resolve_ind_addr_c = FF(static_cast(write_c.is_indirect)), .main_tag_err = FF(static_cast(!tag_match)), .main_w_in_tag = FF(static_cast(in_tag)), }); @@ -877,15 +890,12 @@ void AvmTraceBuilder::op_shl( { auto clk = static_cast(main_trace.size()) + 1; - auto const res = resolve_ind_three(call_ptr, clk, indirect, a_offset, b_offset, dst_offset); - bool tag_match = res.tag_match; + auto [resolved_a, resolved_b, resolved_c] = unpack_indirects<3>(indirect, { a_offset, b_offset, dst_offset }); // Reading from memory and loading into ia resp. ib. - auto read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, res.direct_a_offset, in_tag, in_tag); - auto read_b = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, res.direct_b_offset, in_tag, in_tag); - tag_match = read_a.tag_match && read_b.tag_match; + auto read_a = constrained_read_from_memory(call_ptr, clk, resolved_a, in_tag, in_tag, IntermRegister::IA); + auto read_b = constrained_read_from_memory(call_ptr, clk, resolved_b, in_tag, in_tag, IntermRegister::IB); + bool tag_match = read_a.tag_match && read_b.tag_match; FF a = tag_match ? read_a.val : FF(0); FF b = tag_match ? read_b.val : FF(0); @@ -893,8 +903,7 @@ void AvmTraceBuilder::op_shl( FF c = tag_match ? alu_trace_builder.op_shl(a, b, in_tag, clk) : FF(0); // Write into memory value c from intermediate register ic. - mem_trace_builder.write_into_memory(call_ptr, clk, IntermRegister::IC, res.direct_c_offset, c, in_tag, in_tag); - + auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::SHL); @@ -902,16 +911,16 @@ void AvmTraceBuilder::op_shl( .main_clk = clk, .main_alu_in_tag = FF(static_cast(in_tag)), .main_call_ptr = call_ptr, - .main_ia = a, - .main_ib = b, - .main_ic = c, - .main_ind_addr_a = res.indirect_flag_a ? FF(a_offset) : FF(0), - .main_ind_addr_b = res.indirect_flag_b ? FF(b_offset) : FF(0), - .main_ind_addr_c = res.indirect_flag_c ? FF(dst_offset) : FF(0), + .main_ia = read_a.val, + .main_ib = read_b.val, + .main_ic = write_c.val, + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(read_b.indirect_address), + .main_ind_addr_c = FF(write_c.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(res.direct_a_offset), - .main_mem_addr_b = FF(res.direct_b_offset), - .main_mem_addr_c = FF(res.direct_c_offset), + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(read_b.direct_address), + .main_mem_addr_c = FF(write_c.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(in_tag)), .main_rwc = FF(1), @@ -919,9 +928,9 @@ void AvmTraceBuilder::op_shl( .main_sel_mem_op_b = FF(1), .main_sel_mem_op_c = FF(1), .main_sel_op_shl = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(res.indirect_flag_a)), - .main_sel_resolve_ind_addr_b = FF(static_cast(res.indirect_flag_b)), - .main_sel_resolve_ind_addr_c = FF(static_cast(res.indirect_flag_c)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_b.is_indirect)), + .main_sel_resolve_ind_addr_c = FF(static_cast(write_c.is_indirect)), .main_tag_err = FF(static_cast(!tag_match)), .main_w_in_tag = FF(static_cast(in_tag)), }); @@ -945,19 +954,10 @@ void AvmTraceBuilder::op_set(uint8_t indirect, uint128_t val, uint32_t dst_offse { auto const clk = static_cast(main_trace.size()) + 1; auto const val_ff = FF{ uint256_t::from_uint128(val) }; - uint32_t direct_dst_offset = dst_offset; // Overriden in indirect mode - bool indirect_dst_flag = is_operand_indirect(indirect, 0); - bool tag_match = true; - - if (indirect_dst_flag) { - auto read_ind_c = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_C, dst_offset); - tag_match = read_ind_c.tag_match; - direct_dst_offset = uint32_t(read_ind_c.val); - } + auto [resolved_c] = unpack_indirects<1>(indirect, { dst_offset }); - mem_trace_builder.write_into_memory( - call_ptr, clk, IntermRegister::IC, direct_dst_offset, val_ff, AvmMemoryTag::U0, in_tag); + auto write_c = + constrained_write_to_memory(call_ptr, clk, resolved_c, val_ff, AvmMemoryTag::U0, in_tag, IntermRegister::IC); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::SET); @@ -965,16 +965,16 @@ void AvmTraceBuilder::op_set(uint8_t indirect, uint128_t val, uint32_t dst_offse main_trace.push_back(Row{ .main_clk = clk, .main_call_ptr = call_ptr, - .main_ic = val_ff, - .main_ind_addr_c = indirect_dst_flag ? dst_offset : 0, + .main_ic = write_c.val, + .main_ind_addr_c = FF(write_c.indirect_address), .main_internal_return_ptr = internal_return_ptr, - .main_mem_addr_c = direct_dst_offset, + .main_mem_addr_c = FF(write_c.direct_address), .main_pc = pc++, .main_rwc = 1, .main_sel_mem_op_activate_gas = 1, // TODO: remove in the long term .main_sel_mem_op_c = 1, - .main_sel_resolve_ind_addr_c = static_cast(indirect_dst_flag), - .main_tag_err = static_cast(!tag_match), + .main_sel_resolve_ind_addr_c = FF(static_cast(write_c.is_indirect)), + .main_tag_err = static_cast(!write_c.tag_match), .main_w_in_tag = static_cast(in_tag), }); } @@ -1160,35 +1160,28 @@ void AvmTraceBuilder::op_cmov( // Helper function to add kernel lookup operations into the main trace // TODO: add tag match to kernel_input_lookup opcodes to - it isnt written to - -ve test would catch Row AvmTraceBuilder::create_kernel_lookup_opcode( - bool indirect, uint32_t dst_offset, uint32_t selector, FF value, AvmMemoryTag w_tag) + uint8_t indirect, uint32_t dst_offset, uint32_t selector, FF value, AvmMemoryTag w_tag) { auto const clk = static_cast(main_trace.size()) + 1; - bool tag_match = true; - uint32_t direct_dst_offset = dst_offset; - if (indirect) { - auto read_ind_dst = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, dst_offset); - direct_dst_offset = uint32_t(read_ind_dst.val); - tag_match = tag_match && read_ind_dst.tag_match; - } - - AvmMemoryTag r_tag = AvmMemoryTag::U0; - mem_trace_builder.write_into_memory(call_ptr, clk, IntermRegister::IA, direct_dst_offset, value, r_tag, w_tag); + auto [resolved_dst] = unpack_indirects<1>(indirect, { dst_offset }); + auto write_dst = + constrained_write_to_memory(call_ptr, clk, resolved_dst, value, AvmMemoryTag::U0, w_tag, IntermRegister::IA); return Row{ .main_clk = clk, .kernel_kernel_in_offset = selector, .main_call_ptr = call_ptr, .main_ia = value, - .main_ind_addr_a = indirect ? FF(dst_offset) : FF(0), + .main_ind_addr_a = FF(write_dst.indirect_address), .main_internal_return_ptr = internal_return_ptr, - .main_mem_addr_a = direct_dst_offset, + .main_mem_addr_a = FF(write_dst.direct_address), .main_pc = pc++, .main_rwa = 1, .main_sel_mem_op_a = 1, .main_sel_q_kernel_lookup = 1, - .main_sel_resolve_ind_addr_a = FF(static_cast(indirect)), + .main_sel_resolve_ind_addr_a = FF(static_cast(write_dst.is_indirect)), + .main_tag_err = FF(static_cast(!write_dst.tag_match)), .main_w_in_tag = static_cast(w_tag), }; } @@ -1196,10 +1189,7 @@ Row AvmTraceBuilder::create_kernel_lookup_opcode( void AvmTraceBuilder::op_storage_address(uint8_t indirect, uint32_t dst_offset) { FF ia_value = kernel_trace_builder.op_storage_address(); - - bool indirect_dst_flag = is_operand_indirect(indirect, 0); - Row row = create_kernel_lookup_opcode( - indirect_dst_flag, dst_offset, STORAGE_ADDRESS_SELECTOR, ia_value, AvmMemoryTag::FF); + Row row = create_kernel_lookup_opcode(indirect, dst_offset, STORAGE_ADDRESS_SELECTOR, ia_value, AvmMemoryTag::FF); row.main_sel_op_storage_address = FF(1); // Constrain gas cost @@ -1211,9 +1201,7 @@ void AvmTraceBuilder::op_storage_address(uint8_t indirect, uint32_t dst_offset) void AvmTraceBuilder::op_sender(uint8_t indirect, uint32_t dst_offset) { FF ia_value = kernel_trace_builder.op_sender(); - - bool indirect_dst_flag = is_operand_indirect(indirect, 0); - Row row = create_kernel_lookup_opcode(indirect_dst_flag, dst_offset, SENDER_SELECTOR, ia_value, AvmMemoryTag::FF); + Row row = create_kernel_lookup_opcode(indirect, dst_offset, SENDER_SELECTOR, ia_value, AvmMemoryTag::FF); row.main_sel_op_sender = FF(1); // Constrain gas cost @@ -1225,9 +1213,7 @@ void AvmTraceBuilder::op_sender(uint8_t indirect, uint32_t dst_offset) void AvmTraceBuilder::op_address(uint8_t indirect, uint32_t dst_offset) { FF ia_value = kernel_trace_builder.op_address(); - - bool indirect_dst_flag = is_operand_indirect(indirect, 0); - Row row = create_kernel_lookup_opcode(indirect_dst_flag, dst_offset, ADDRESS_SELECTOR, ia_value, AvmMemoryTag::FF); + Row row = create_kernel_lookup_opcode(indirect, dst_offset, ADDRESS_SELECTOR, ia_value, AvmMemoryTag::FF); row.main_sel_op_address = FF(1); // Constrain gas cost @@ -1239,10 +1225,7 @@ void AvmTraceBuilder::op_address(uint8_t indirect, uint32_t dst_offset) void AvmTraceBuilder::op_fee_per_da_gas(uint8_t indirect, uint32_t dst_offset) { FF ia_value = kernel_trace_builder.op_fee_per_da_gas(); - - bool indirect_dst_flag = is_operand_indirect(indirect, 0); - Row row = - create_kernel_lookup_opcode(indirect_dst_flag, dst_offset, FEE_PER_DA_GAS_SELECTOR, ia_value, AvmMemoryTag::FF); + Row row = create_kernel_lookup_opcode(indirect, dst_offset, FEE_PER_DA_GAS_SELECTOR, ia_value, AvmMemoryTag::FF); row.main_sel_op_fee_per_da_gas = FF(1); // Constrain gas cost @@ -1254,10 +1237,7 @@ void AvmTraceBuilder::op_fee_per_da_gas(uint8_t indirect, uint32_t dst_offset) void AvmTraceBuilder::op_fee_per_l2_gas(uint8_t indirect, uint32_t dst_offset) { FF ia_value = kernel_trace_builder.op_fee_per_l2_gas(); - - bool indirect_dst_flag = is_operand_indirect(indirect, 0); - Row row = - create_kernel_lookup_opcode(indirect_dst_flag, dst_offset, FEE_PER_L2_GAS_SELECTOR, ia_value, AvmMemoryTag::FF); + Row row = create_kernel_lookup_opcode(indirect, dst_offset, FEE_PER_L2_GAS_SELECTOR, ia_value, AvmMemoryTag::FF); row.main_sel_op_fee_per_l2_gas = FF(1); // Constrain gas cost @@ -1269,10 +1249,7 @@ void AvmTraceBuilder::op_fee_per_l2_gas(uint8_t indirect, uint32_t dst_offset) void AvmTraceBuilder::op_transaction_fee(uint8_t indirect, uint32_t dst_offset) { FF ia_value = kernel_trace_builder.op_transaction_fee(); - - bool indirect_dst_flag = is_operand_indirect(indirect, 0); - Row row = create_kernel_lookup_opcode( - indirect_dst_flag, dst_offset, TRANSACTION_FEE_SELECTOR, ia_value, AvmMemoryTag::FF); + Row row = create_kernel_lookup_opcode(indirect, dst_offset, TRANSACTION_FEE_SELECTOR, ia_value, AvmMemoryTag::FF); row.main_sel_op_transaction_fee = FF(1); // Constrain gas cost @@ -1284,9 +1261,7 @@ void AvmTraceBuilder::op_transaction_fee(uint8_t indirect, uint32_t dst_offset) void AvmTraceBuilder::op_chain_id(uint8_t indirect, uint32_t dst_offset) { FF ia_value = kernel_trace_builder.op_chain_id(); - - bool indirect_dst_flag = is_operand_indirect(indirect, 0); - Row row = create_kernel_lookup_opcode(indirect_dst_flag, dst_offset, CHAIN_ID_SELECTOR, ia_value, AvmMemoryTag::FF); + Row row = create_kernel_lookup_opcode(indirect, dst_offset, CHAIN_ID_SELECTOR, ia_value, AvmMemoryTag::FF); row.main_sel_op_chain_id = FF(1); // Constrain gas cost @@ -1298,9 +1273,7 @@ void AvmTraceBuilder::op_chain_id(uint8_t indirect, uint32_t dst_offset) void AvmTraceBuilder::op_version(uint8_t indirect, uint32_t dst_offset) { FF ia_value = kernel_trace_builder.op_version(); - - bool indirect_dst_flag = is_operand_indirect(indirect, 0); - Row row = create_kernel_lookup_opcode(indirect_dst_flag, dst_offset, VERSION_SELECTOR, ia_value, AvmMemoryTag::FF); + Row row = create_kernel_lookup_opcode(indirect, dst_offset, VERSION_SELECTOR, ia_value, AvmMemoryTag::FF); row.main_sel_op_version = FF(1); // Constrain gas cost @@ -1312,10 +1285,7 @@ void AvmTraceBuilder::op_version(uint8_t indirect, uint32_t dst_offset) void AvmTraceBuilder::op_block_number(uint8_t indirect, uint32_t dst_offset) { FF ia_value = kernel_trace_builder.op_block_number(); - - bool indirect_dst_flag = is_operand_indirect(indirect, 0); - Row row = - create_kernel_lookup_opcode(indirect_dst_flag, dst_offset, BLOCK_NUMBER_SELECTOR, ia_value, AvmMemoryTag::FF); + Row row = create_kernel_lookup_opcode(indirect, dst_offset, BLOCK_NUMBER_SELECTOR, ia_value, AvmMemoryTag::FF); row.main_sel_op_block_number = FF(1); // Constrain gas cost @@ -1327,9 +1297,7 @@ void AvmTraceBuilder::op_block_number(uint8_t indirect, uint32_t dst_offset) void AvmTraceBuilder::op_coinbase(uint8_t indirect, uint32_t dst_offset) { FF ia_value = kernel_trace_builder.op_coinbase(); - - bool indirect_dst_flag = is_operand_indirect(indirect, 0); - Row row = create_kernel_lookup_opcode(indirect_dst_flag, dst_offset, COINBASE_SELECTOR, ia_value, AvmMemoryTag::FF); + Row row = create_kernel_lookup_opcode(indirect, dst_offset, COINBASE_SELECTOR, ia_value, AvmMemoryTag::FF); row.main_sel_op_coinbase = FF(1); // Constrain gas cost @@ -1341,10 +1309,7 @@ void AvmTraceBuilder::op_coinbase(uint8_t indirect, uint32_t dst_offset) void AvmTraceBuilder::op_timestamp(uint8_t indirect, uint32_t dst_offset) { FF ia_value = kernel_trace_builder.op_timestamp(); - - bool indirect_dst_flag = is_operand_indirect(indirect, 0); - Row row = - create_kernel_lookup_opcode(indirect_dst_flag, dst_offset, TIMESTAMP_SELECTOR, ia_value, AvmMemoryTag::U64); + Row row = create_kernel_lookup_opcode(indirect, dst_offset, TIMESTAMP_SELECTOR, ia_value, AvmMemoryTag::U64); row.main_sel_op_timestamp = FF(1); // Constrain gas cost @@ -1356,32 +1321,24 @@ void AvmTraceBuilder::op_timestamp(uint8_t indirect, uint32_t dst_offset) // Helper function to add kernel lookup operations into the main trace Row AvmTraceBuilder::create_kernel_output_opcode(uint8_t indirect, uint32_t clk, uint32_t data_offset) { - bool indirect_data_flag = is_operand_indirect(indirect, 0); - - bool tag_match = true; - uint32_t direct_data_offset = data_offset; - if (indirect) { - auto read_ind_dst = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, data_offset); - direct_data_offset = uint32_t(read_ind_dst.val); - tag_match = tag_match && read_ind_dst.tag_match; - } - - AvmMemTraceBuilder::MemRead read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, direct_data_offset, AvmMemoryTag::FF, AvmMemoryTag::U0); + auto [resolved_data] = unpack_indirects<1>(indirect, { data_offset }); + auto read_a = constrained_read_from_memory( + call_ptr, clk, resolved_data, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IA); + bool tag_match = read_a.tag_match; return Row{ .main_clk = clk, .main_ia = read_a.val, - .main_ind_addr_a = indirect_data_flag ? FF(data_offset) : FF(0), + .main_ind_addr_a = FF(read_a.indirect_address), .main_internal_return_ptr = internal_return_ptr, - .main_mem_addr_a = direct_data_offset, + .main_mem_addr_a = FF(read_a.direct_address), .main_pc = pc++, .main_r_in_tag = static_cast(AvmMemoryTag::FF), .main_rwa = 0, .main_sel_mem_op_a = 1, .main_sel_q_kernel_output_lookup = 1, - .main_sel_resolve_ind_addr_a = FF(static_cast(indirect)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_tag_err = FF(static_cast(!tag_match)), }; } @@ -1392,43 +1349,23 @@ Row AvmTraceBuilder::create_kernel_output_opcode_with_metadata(uint8_t indirect, uint32_t metadata_offset, AvmMemoryTag metadata_r_tag) { + auto [resolved_data, resolved_metadata] = unpack_indirects<2>(indirect, { data_offset, metadata_offset }); - bool indirect_a_flag = is_operand_indirect(indirect, 0); - bool indirect_b_flag = is_operand_indirect(indirect, 1); - - bool tag_match = true; - uint32_t direct_data_offset = data_offset; - uint32_t direct_metadata_offset = metadata_offset; - if (indirect_a_flag) { - auto read_a_ind_dst = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, data_offset); - direct_data_offset = static_cast(read_a_ind_dst.val); - - tag_match = tag_match && read_a_ind_dst.tag_match; - } - if (indirect_b_flag) { - auto read_b_ind_dst = mem_trace_builder.indirect_read_and_load_from_memory( - call_ptr, clk, IndirectRegister::IND_B, metadata_offset); - direct_metadata_offset = static_cast(read_b_ind_dst.val); - - tag_match = tag_match && read_b_ind_dst.tag_match; - } - - AvmMemTraceBuilder::MemRead read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, direct_data_offset, data_r_tag, AvmMemoryTag::U0); - - AvmMemTraceBuilder::MemRead read_b = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, direct_metadata_offset, metadata_r_tag, AvmMemoryTag::U0); + auto read_a = + constrained_read_from_memory(call_ptr, clk, resolved_data, data_r_tag, AvmMemoryTag::U0, IntermRegister::IA); + auto read_b = constrained_read_from_memory( + call_ptr, clk, resolved_metadata, metadata_r_tag, AvmMemoryTag::U0, IntermRegister::IB); + bool tag_match = read_a.tag_match && read_b.tag_match; return Row{ .main_clk = clk, .main_ia = read_a.val, .main_ib = read_b.val, - .main_ind_addr_a = indirect_a_flag ? data_offset : FF(0), - .main_ind_addr_b = indirect_b_flag ? metadata_offset : FF(0), + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(read_b.indirect_address), .main_internal_return_ptr = internal_return_ptr, - .main_mem_addr_a = direct_data_offset, - .main_mem_addr_b = direct_metadata_offset, + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(read_b.direct_address), .main_pc = pc++, .main_r_in_tag = static_cast(data_r_tag), .main_rwa = 0, @@ -1436,8 +1373,9 @@ Row AvmTraceBuilder::create_kernel_output_opcode_with_metadata(uint8_t indirect, .main_sel_mem_op_a = 1, .main_sel_mem_op_b = 1, .main_sel_q_kernel_output_lookup = 1, - .main_sel_resolve_ind_addr_a = FF(static_cast(indirect_a_flag)), - .main_sel_resolve_ind_addr_b = FF(static_cast(indirect_b_flag)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_b.is_indirect)), + .main_tag_err = FF(static_cast(!tag_match)), }; } @@ -1450,43 +1388,23 @@ Row AvmTraceBuilder::create_kernel_output_opcode_with_set_metadata_output_from_h FF exists = execution_hints.get_side_effect_hints().at(side_effect_counter); // TODO: throw error if incorrect - bool indirect_a_flag = is_operand_indirect(indirect, 0); - bool indirect_b_flag = is_operand_indirect(indirect, 1); - - bool tag_match = true; - uint32_t direct_data_offset = data_offset; - uint32_t direct_metadata_offset = metadata_offset; - if (indirect_a_flag) { - auto read_a_ind_dst = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, data_offset); - direct_data_offset = uint32_t(read_a_ind_dst.val); - - tag_match = tag_match && read_a_ind_dst.tag_match; - } - - if (indirect_b_flag) { - auto read_b_ind_dst = mem_trace_builder.indirect_read_and_load_from_memory( - call_ptr, clk, IndirectRegister::IND_B, metadata_offset); - direct_metadata_offset = uint32_t(read_b_ind_dst.val); - - tag_match = tag_match && read_b_ind_dst.tag_match; - } - - AvmMemTraceBuilder::MemRead read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, direct_data_offset, AvmMemoryTag::FF, AvmMemoryTag::U8); + auto [resolved_data, resolved_metadata] = unpack_indirects<2>(indirect, { data_offset, metadata_offset }); + auto read_a = constrained_read_from_memory( + call_ptr, clk, resolved_data, AvmMemoryTag::FF, AvmMemoryTag::U8, IntermRegister::IA); - mem_trace_builder.write_into_memory( - call_ptr, clk, IntermRegister::IB, direct_metadata_offset, exists, AvmMemoryTag::FF, AvmMemoryTag::U8); + auto write_b = constrained_write_to_memory( + call_ptr, clk, resolved_metadata, exists, AvmMemoryTag::FF, AvmMemoryTag::U8, IntermRegister::IB); + bool tag_match = read_a.tag_match && write_b.tag_match; return Row{ .main_clk = clk, .main_ia = read_a.val, - .main_ib = exists, - .main_ind_addr_a = indirect_a_flag ? data_offset : FF(0), - .main_ind_addr_b = indirect_b_flag ? metadata_offset : FF(0), + .main_ib = write_b.val, + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(write_b.indirect_address), .main_internal_return_ptr = internal_return_ptr, - .main_mem_addr_a = direct_data_offset, - .main_mem_addr_b = direct_metadata_offset, + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(write_b.direct_address), .main_pc = pc++, .main_r_in_tag = static_cast(AvmMemoryTag::FF), .main_rwa = 0, @@ -1494,8 +1412,9 @@ Row AvmTraceBuilder::create_kernel_output_opcode_with_set_metadata_output_from_h .main_sel_mem_op_a = 1, .main_sel_mem_op_b = 1, .main_sel_q_kernel_output_lookup = 1, - .main_sel_resolve_ind_addr_a = FF(static_cast(indirect_a_flag)), - .main_sel_resolve_ind_addr_b = FF(static_cast(indirect_b_flag)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(write_b.is_indirect)), + .main_tag_err = static_cast(!tag_match), .main_w_in_tag = static_cast(AvmMemoryTag::U8), }; } @@ -1508,39 +1427,22 @@ Row AvmTraceBuilder::create_kernel_output_opcode_with_set_value_from_hint(uint8_ FF value = execution_hints.get_side_effect_hints().at(side_effect_counter); // TODO: throw error if incorrect - bool indirect_a_flag = is_operand_indirect(indirect, 0); - bool indirect_b_flag = is_operand_indirect(indirect, 1); - - bool tag_match = true; - uint32_t direct_data_offset = data_offset; - uint32_t direct_metadata_offset = metadata_offset; - if (indirect) { - auto read_a_ind_dst = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, data_offset); - auto read_b_ind_dst = mem_trace_builder.indirect_read_and_load_from_memory( - call_ptr, clk, IndirectRegister::IND_B, metadata_offset); - - direct_data_offset = uint32_t(read_a_ind_dst.val); - direct_metadata_offset = uint32_t(read_b_ind_dst.val); - - tag_match = tag_match && read_a_ind_dst.tag_match && read_b_ind_dst.tag_match; - } - - mem_trace_builder.write_into_memory( - call_ptr, clk, IntermRegister::IA, direct_data_offset, value, AvmMemoryTag::FF, AvmMemoryTag::FF); - - AvmMemTraceBuilder::MemRead read_b = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, direct_metadata_offset, AvmMemoryTag::FF, AvmMemoryTag::FF); + auto [resolved_data, resolved_metadata] = unpack_indirects<2>(indirect, { data_offset, metadata_offset }); + auto write_a = constrained_write_to_memory( + call_ptr, clk, resolved_data, value, AvmMemoryTag::FF, AvmMemoryTag::FF, IntermRegister::IA); + auto read_b = constrained_read_from_memory( + call_ptr, clk, resolved_metadata, AvmMemoryTag::FF, AvmMemoryTag::FF, IntermRegister::IB); + bool tag_match = write_a.tag_match && read_b.tag_match; return Row{ .main_clk = clk, - .main_ia = value, + .main_ia = write_a.val, .main_ib = read_b.val, - .main_ind_addr_a = indirect_a_flag ? data_offset : FF(0), - .main_ind_addr_b = indirect_b_flag ? metadata_offset : FF(0), + .main_ind_addr_a = FF(write_a.indirect_address), + .main_ind_addr_b = FF(read_b.indirect_address), .main_internal_return_ptr = internal_return_ptr, - .main_mem_addr_a = direct_data_offset, - .main_mem_addr_b = direct_metadata_offset, + .main_mem_addr_a = FF(write_a.direct_address), + .main_mem_addr_b = FF(read_b.direct_address), .main_pc = pc, // No PC increment here since we do it in the specific ops .main_r_in_tag = static_cast(AvmMemoryTag::FF), .main_rwa = 1, @@ -1548,8 +1450,9 @@ Row AvmTraceBuilder::create_kernel_output_opcode_with_set_value_from_hint(uint8_ .main_sel_mem_op_a = 1, .main_sel_mem_op_b = 1, .main_sel_q_kernel_output_lookup = 1, - .main_sel_resolve_ind_addr_a = FF(static_cast(indirect_a_flag)), - .main_sel_resolve_ind_addr_b = FF(static_cast(indirect_b_flag)), + .main_sel_resolve_ind_addr_a = FF(static_cast(write_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_b.is_indirect)), + .main_tag_err = static_cast(!tag_match), .main_w_in_tag = static_cast(AvmMemoryTag::FF), }; } @@ -1670,61 +1573,52 @@ void AvmTraceBuilder::op_sload(uint8_t indirect, uint32_t slot_offset, uint32_t { auto clk = static_cast(main_trace.size()) + 1; - // TODO: align usage of indirect with simulator - // TODO: support indirect slot offset - bool dest_offset_is_indirect = is_operand_indirect(indirect, 1); - - auto direct_dest_offset = dest_offset; - if (dest_offset_is_indirect) { - auto read_ind_dest_offset = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, dest_offset); - direct_dest_offset = uint32_t(read_ind_dest_offset.val); - } - auto read_dest_value = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, direct_dest_offset, AvmMemoryTag::FF, AvmMemoryTag::FF); - - AvmMemTraceBuilder::MemRead read_slot = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, slot_offset, AvmMemoryTag::FF, AvmMemoryTag::FF); + auto [resolved_slot, resolved_dest] = unpack_indirects<2>(indirect, { slot_offset, dest_offset }); + auto read_slot = constrained_read_from_memory( + call_ptr, clk, resolved_slot, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IA); + // Read the slot value that we will write hints to in a row main_trace.push_back(Row{ .main_clk = clk, - .main_ia = read_dest_value.val, - .main_ib = read_slot.val, - .main_ind_addr_a = dest_offset_is_indirect ? dest_offset : 0, + .main_ia = read_slot.val, + .main_ind_addr_a = FF(read_slot.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(direct_dest_offset), - .main_mem_addr_b = FF(slot_offset), + .main_mem_addr_a = FF(read_slot.direct_address), .main_pc = pc, // No PC increment here since this is the same opcode as the rows created below .main_r_in_tag = FF(static_cast(AvmMemoryTag::FF)), .main_sel_mem_op_a = FF(1), - .main_sel_mem_op_b = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(dest_offset_is_indirect)), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::FF)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_slot.is_indirect)), + .main_tag_err = FF(static_cast(!read_slot.tag_match)), }); clk++; + AddressWithMode write_dst = resolved_dest; + // Loop over the size and write the hints to memory for (uint32_t i = 0; i < size; i++) { FF value = execution_hints.get_side_effect_hints().at(side_effect_counter); - mem_trace_builder.write_into_memory( - call_ptr, clk, IntermRegister::IA, direct_dest_offset + i, value, AvmMemoryTag::FF, AvmMemoryTag::FF); + auto write_a = constrained_write_to_memory( + call_ptr, clk, write_dst, value, AvmMemoryTag::U0, AvmMemoryTag::FF, IntermRegister::IA); auto row = Row{ .main_clk = clk, .main_ia = value, .main_ib = read_slot.val + i, // slot increments each time + .main_ind_addr_a = write_a.indirect_address, .main_internal_return_ptr = internal_return_ptr, - .main_mem_addr_a = direct_dest_offset + i, + .main_mem_addr_a = write_a.direct_address, // direct address incremented at end of the loop .main_pc = pc, // No PC increment here since this is the same opcode for all loop iterations - .main_r_in_tag = static_cast(AvmMemoryTag::FF), .main_rwa = 1, .main_sel_mem_op_a = 1, .main_sel_op_sload = FF(1), .main_sel_q_kernel_output_lookup = 1, + .main_sel_resolve_ind_addr_a = FF(static_cast(write_a.is_indirect)), + .main_tag_err = FF(static_cast(!write_a.tag_match)), .main_w_in_tag = static_cast(AvmMemoryTag::FF), }; // Output storage read to kernel outputs (performs lookup) + // Tuples of (slot, value) in the kernel lookup kernel_trace_builder.op_sload(clk, side_effect_counter, row.main_ib, row.main_ia); // Constrain gas cost @@ -1733,6 +1627,9 @@ void AvmTraceBuilder::op_sload(uint8_t indirect, uint32_t slot_offset, uint32_t main_trace.push_back(row); side_effect_counter++; clk++; + + // After the first loop, all future write destinations are direct, increment the direct address + write_dst = AddressWithMode{ AddressingMode::DIRECT, write_a.direct_address + 1 }; } pc++; } @@ -1741,54 +1638,47 @@ void AvmTraceBuilder::op_sstore(uint8_t indirect, uint32_t src_offset, uint32_t { auto clk = static_cast(main_trace.size()) + 1; - // TODO: align usage of indirect with simulator - // TODO: support indirect slot offset - bool src_offset_is_indirect = is_operand_indirect(indirect, 0); + auto [resolved_src, resolved_slot] = unpack_indirects<2>(indirect, { src_offset, slot_offset }); - // Resolve loads and indirect - auto direct_src_offset = src_offset; - if (src_offset_is_indirect) { - auto read_ind_src_offset = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, src_offset); - direct_src_offset = uint32_t(read_ind_src_offset.val); - } - auto read_src_value = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, direct_src_offset, AvmMemoryTag::FF, AvmMemoryTag::FF); - - auto read_slot = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, slot_offset, AvmMemoryTag::FF, AvmMemoryTag::FF); + auto read_slot = constrained_read_from_memory( + call_ptr, clk, resolved_slot, AvmMemoryTag::FF, AvmMemoryTag::FF, IntermRegister::IA); main_trace.push_back(Row{ .main_clk = clk, - .main_ia = read_src_value.val, - .main_ib = read_slot.val, - .main_ind_addr_a = src_offset_is_indirect ? src_offset : 0, + .main_ia = read_slot.val, + .main_ind_addr_a = FF(read_slot.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(direct_src_offset), - .main_mem_addr_b = FF(slot_offset), + .main_mem_addr_a = FF(read_slot.direct_address), .main_pc = pc, // No PC increment here since this is the same opcode as the rows created below .main_r_in_tag = FF(static_cast(AvmMemoryTag::FF)), .main_sel_mem_op_a = FF(1), - .main_sel_mem_op_b = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(src_offset_is_indirect)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_slot.is_indirect)), + .main_tag_err = FF(static_cast(!read_slot.tag_match)), .main_w_in_tag = FF(static_cast(AvmMemoryTag::FF)), }); clk++; + AddressWithMode read_src = resolved_src; + + // This loop reads a _size_ number of elements from memory and places them into a tuple of (ele, slot) + // in the kernel lookup. for (uint32_t i = 0; i < size; i++) { - auto read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, direct_src_offset + i, AvmMemoryTag::FF, AvmMemoryTag::U0); + auto read_a = constrained_read_from_memory( + call_ptr, clk, read_src, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IA); Row row = Row{ .main_clk = clk, .main_ia = read_a.val, .main_ib = read_slot.val + i, // slot increments each time + .main_ind_addr_a = read_a.indirect_address, .main_internal_return_ptr = internal_return_ptr, - .main_mem_addr_a = direct_src_offset + i, + .main_mem_addr_a = read_a.direct_address, // direct address incremented at end of the loop .main_pc = pc, .main_r_in_tag = static_cast(AvmMemoryTag::FF), .main_sel_mem_op_a = 1, .main_sel_q_kernel_output_lookup = 1, + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_tag_err = FF(static_cast(!read_a.tag_match)), }; row.main_sel_op_sstore = FF(1); kernel_trace_builder.op_sstore(clk, side_effect_counter, row.main_ib, row.main_ia); @@ -1799,6 +1689,8 @@ void AvmTraceBuilder::op_sstore(uint8_t indirect, uint32_t src_offset, uint32_t main_trace.push_back(row); side_effect_counter++; clk++; + // All future reads are direct, increment the direct address + read_src = AddressWithMode{ AddressingMode::DIRECT, read_a.direct_address + 1 }; } pc++; } @@ -1888,15 +1780,12 @@ void AvmTraceBuilder::op_div( { auto clk = static_cast(main_trace.size()) + 1; - auto const res = resolve_ind_three(call_ptr, clk, indirect, a_offset, b_offset, dst_offset); - bool tag_match = res.tag_match; + auto [resolved_a, resolved_b, resolved_dst] = unpack_indirects<3>(indirect, { a_offset, b_offset, dst_offset }); // Reading from memory and loading into ia resp. ib. - auto read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, res.direct_a_offset, in_tag, in_tag); - auto read_b = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, res.direct_b_offset, in_tag, in_tag); - tag_match = read_a.tag_match && read_b.tag_match; + auto read_a = constrained_read_from_memory(call_ptr, clk, resolved_a, in_tag, in_tag, IntermRegister::IA); + auto read_b = constrained_read_from_memory(call_ptr, clk, resolved_b, in_tag, in_tag, IntermRegister::IB); + bool tag_match = read_a.tag_match && read_b.tag_match; // a / b = c FF a = read_a.val; @@ -1921,7 +1810,7 @@ void AvmTraceBuilder::op_div( } // Write into memory value c from intermediate register ic. - mem_trace_builder.write_into_memory(call_ptr, clk, IntermRegister::IC, res.direct_c_offset, c, in_tag, in_tag); + auto write_dst = constrained_write_to_memory(call_ptr, clk, resolved_dst, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::DIV); @@ -1930,17 +1819,17 @@ void AvmTraceBuilder::op_div( .main_clk = clk, .main_alu_in_tag = FF(static_cast(in_tag)), .main_call_ptr = call_ptr, - .main_ia = a, - .main_ib = b, + .main_ia = read_a.val, + .main_ib = read_b.val, .main_ic = c, - .main_ind_addr_a = res.indirect_flag_a ? FF(a_offset) : FF(0), - .main_ind_addr_b = res.indirect_flag_b ? FF(b_offset) : FF(0), - .main_ind_addr_c = res.indirect_flag_c ? FF(dst_offset) : FF(0), + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(read_b.indirect_address), + .main_ind_addr_c = FF(write_dst.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), .main_inv = tag_match ? inv : FF(1), - .main_mem_addr_a = FF(res.direct_a_offset), - .main_mem_addr_b = FF(res.direct_b_offset), - .main_mem_addr_c = FF(res.direct_c_offset), + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(read_b.direct_address), + .main_mem_addr_c = FF(write_dst.direct_address), .main_op_err = tag_match ? error : FF(1), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(in_tag)), @@ -1949,9 +1838,9 @@ void AvmTraceBuilder::op_div( .main_sel_mem_op_b = FF(1), .main_sel_mem_op_c = FF(1), .main_sel_op_div = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(res.indirect_flag_a)), - .main_sel_resolve_ind_addr_b = FF(static_cast(res.indirect_flag_b)), - .main_sel_resolve_ind_addr_c = FF(static_cast(res.indirect_flag_c)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_b.is_indirect)), + .main_sel_resolve_ind_addr_c = FF(static_cast(write_dst.is_indirect)), .main_tag_err = FF(static_cast(!tag_match)), .main_w_in_tag = FF(static_cast(in_tag)), }); @@ -2243,18 +2132,8 @@ void AvmTraceBuilder::execute_gasleft(OpCode opcode, uint8_t indirect, uint32_t assert(opcode == OpCode::L2GASLEFT || opcode == OpCode::DAGASLEFT); auto clk = static_cast(main_trace.size()) + 1; - bool tag_match = true; - - uint32_t direct_dst_offset = dst_offset; - - bool indirect_dst_flag = is_operand_indirect(indirect, 0); - if (indirect_dst_flag) { - auto read_ind_dst = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, dst_offset); - direct_dst_offset = uint32_t(read_ind_dst.val); - tag_match = tag_match && read_ind_dst.tag_match; - } + auto [resolved_dst] = unpack_indirects<1>(indirect, { dst_offset }); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, opcode); @@ -2268,29 +2147,25 @@ void AvmTraceBuilder::execute_gasleft(OpCode opcode, uint8_t indirect, uint32_t } // Write into memory from intermediate register ia. - mem_trace_builder.write_into_memory(call_ptr, - clk, - IntermRegister::IA, - direct_dst_offset, - gas_remaining, - AvmMemoryTag::U0, - AvmMemoryTag::FF); // TODO: probably will be U32 in final version + // TODO: probably will be U32 in final version + auto write_dst = constrained_write_to_memory( + call_ptr, clk, resolved_dst, gas_remaining, AvmMemoryTag::U0, AvmMemoryTag::FF, IntermRegister::IA); main_trace.push_back(Row{ .main_clk = clk, .main_call_ptr = call_ptr, .main_ia = gas_remaining, - .main_ind_addr_a = indirect_dst_flag ? FF(dst_offset) : FF(0), + .main_ind_addr_a = FF(write_dst.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(direct_dst_offset), + .main_mem_addr_a = FF(write_dst.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(AvmMemoryTag::U0)), .main_rwa = FF(1), .main_sel_mem_op_a = FF(1), .main_sel_op_dagasleft = (opcode == OpCode::DAGASLEFT) ? FF(1) : FF(0), .main_sel_op_l2gasleft = (opcode == OpCode::L2GASLEFT) ? FF(1) : FF(0), - .main_sel_resolve_ind_addr_a = FF(static_cast(indirect_dst_flag)), - .main_tag_err = FF(static_cast(!tag_match)), + .main_sel_resolve_ind_addr_a = FF(static_cast(is_operand_indirect(indirect, 0))), + .main_tag_err = FF(static_cast(!write_dst.tag_match)), .main_w_in_tag = FF(static_cast(AvmMemoryTag::FF)), // TODO: probably will be U32 in final version // Should the circuit (pil) constrain U32? }); @@ -2483,14 +2358,16 @@ void AvmTraceBuilder::internal_return() } // TODO(ilyas: #6383): Temporary way to bulk write slices -void AvmTraceBuilder::write_slice_to_memory(uint8_t space_id, - uint32_t clk, - uint32_t dst_offset, - AvmMemoryTag r_tag, - AvmMemoryTag w_tag, - FF internal_return_ptr, - std::vector const& slice) +uint32_t AvmTraceBuilder::write_slice_to_memory(uint8_t space_id, + uint32_t clk, + AddressWithMode addr, + AvmMemoryTag r_tag, + AvmMemoryTag w_tag, + FF internal_return_ptr, + std::vector const& slice) { + bool is_indirect = addr.mode == AddressingMode::INDIRECT; + auto dst_offset = addr.offset; // We have 4 registers that we are able to use to write to memory within a single main trace row auto register_order = std::array{ IntermRegister::IA, IntermRegister::IB, IntermRegister::IC, IntermRegister::ID }; // If the slice size isnt a multiple of 4, we still need an extra row to write the remainder @@ -2511,33 +2388,53 @@ void AvmTraceBuilder::write_slice_to_memory(uint8_t space_id, if (offset >= slice.size()) { break; } - mem_trace_builder.write_into_memory( - space_id, clk + i, register_order[j], dst_offset + offset, slice.at(offset), r_tag, w_tag); + MemOp mem_write; + if (is_indirect) { + mem_write = constrained_write_to_memory( + space_id, clk + i, addr, slice.at(offset), r_tag, w_tag, IntermRegister::IA); + // Ensure futures calls are direct + is_indirect = false; + dst_offset = mem_write.direct_address; + } else { + mem_trace_builder.write_into_memory( + space_id, clk + i, register_order[j], dst_offset + offset, slice.at(offset), r_tag, w_tag); + mem_write = MemOp{ + .is_indirect = false, + .indirect_address = 0, + .direct_address = dst_offset + offset, + .tag = w_tag, + .tag_match = true, + .val = slice.at(offset), + }; + } // This looks a bit gross, but it is fine for now. if (j == 0) { main_row.main_ia = slice.at(offset); - main_row.main_mem_addr_a = FF(dst_offset + offset); + main_row.main_ind_addr_a = FF(mem_write.indirect_address); + main_row.main_sel_resolve_ind_addr_a = FF(static_cast(mem_write.is_indirect)); + main_row.main_mem_addr_a = FF(mem_write.direct_address); main_row.main_sel_mem_op_a = FF(1); main_row.main_rwa = FF(1); } else if (j == 1) { main_row.main_ib = slice.at(offset); - main_row.main_mem_addr_b = FF(dst_offset + offset); + main_row.main_mem_addr_b = FF(mem_write.direct_address); main_row.main_sel_mem_op_b = FF(1); main_row.main_rwb = FF(1); } else if (j == 2) { main_row.main_ic = slice.at(offset); - main_row.main_mem_addr_c = FF(dst_offset + offset); + main_row.main_mem_addr_c = FF(mem_write.direct_address); main_row.main_sel_mem_op_c = FF(1); main_row.main_rwc = FF(1); } else { main_row.main_id = slice.at(offset); - main_row.main_mem_addr_d = FF(dst_offset + offset); + main_row.main_mem_addr_d = FF(mem_write.direct_address); main_row.main_sel_mem_op_d = FF(1); main_row.main_rwd = FF(1); } } main_trace.emplace_back(main_row); } + return num_main_rows; } template std::array vec_to_arr(std::vector const& vec) @@ -2553,13 +2450,16 @@ template std::array vec_to_arr(std::vector template uint32_t AvmTraceBuilder::read_slice_to_memory(uint8_t space_id, uint32_t clk, - uint32_t src_offset, + AddressWithMode addr, AvmMemoryTag r_tag, AvmMemoryTag w_tag, FF internal_return_ptr, size_t slice_len, std::vector& slice) { + // If the mem_op is indirect, it goes into register A + bool is_indirect = addr.mode == AddressingMode::INDIRECT; + auto src_offset = addr.offset; // We have 4 registers that we are able to use to read from memory within a single main trace row auto register_order = std::array{ IntermRegister::IA, IntermRegister::IB, IntermRegister::IC, IntermRegister::ID }; // If the slice size isnt a multiple of 4, we still need an extra row to write the remainder @@ -2579,28 +2479,47 @@ uint32_t AvmTraceBuilder::read_slice_to_memory(uint8_t space_id, if (offset >= slice_len) { break; } - auto mem_read = mem_trace_builder.read_and_load_from_memory( - space_id, clk + i, register_order[j], src_offset + offset, r_tag, w_tag); + MemOp mem_read; + if (is_indirect) { + // If the first address is indirect we read it into register A, this can only happen once per slice read + mem_read = constrained_read_from_memory(space_id, clk + i, addr, r_tag, w_tag, IntermRegister::IA); + // Set this to false for the rest of the reads + is_indirect = false; + src_offset = mem_read.direct_address; + } else { + auto mem_load = mem_trace_builder.read_and_load_from_memory( + space_id, clk + i, register_order[j], src_offset + offset, r_tag, w_tag); + mem_read = MemOp{ + .is_indirect = false, + .indirect_address = 0, + .direct_address = src_offset + offset, + .tag = r_tag, + .tag_match = mem_load.tag_match, + .val = MEM(mem_load.val), + }; + } slice.emplace_back(MEM(mem_read.val)); // This looks a bit gross, but it is fine for now. if (j == 0) { main_row.main_ia = slice.at(offset); - main_row.main_mem_addr_a = FF(src_offset + offset); + main_row.main_ind_addr_a = FF(mem_read.indirect_address); + main_row.main_sel_resolve_ind_addr_a = FF(static_cast(mem_read.is_indirect)); + main_row.main_mem_addr_a = FF(mem_read.direct_address); main_row.main_sel_mem_op_a = FF(1); main_row.main_tag_err = FF(static_cast(!mem_read.tag_match)); } else if (j == 1) { main_row.main_ib = slice.at(offset); - main_row.main_mem_addr_b = FF(src_offset + offset); + main_row.main_mem_addr_b = FF(mem_read.direct_address); main_row.main_sel_mem_op_b = FF(1); main_row.main_tag_err = FF(static_cast(!mem_read.tag_match)); } else if (j == 2) { main_row.main_ic = slice.at(offset); - main_row.main_mem_addr_c = FF(src_offset + offset); + main_row.main_mem_addr_c = FF(mem_read.direct_address); main_row.main_sel_mem_op_c = FF(1); main_row.main_tag_err = FF(static_cast(!mem_read.tag_match)); } else { main_row.main_id = slice.at(offset); - main_row.main_mem_addr_d = FF(src_offset + offset); + main_row.main_mem_addr_d = FF(mem_read.direct_address); main_row.main_sel_mem_op_d = FF(1); main_row.main_tag_err = FF(static_cast(!mem_read.tag_match)); } @@ -2621,144 +2540,102 @@ uint32_t AvmTraceBuilder::read_slice_to_memory(uint8_t space_id, * @param addr_offset An index in memory pointing to the target contract address * @param args_offset An index in memory pointing to the first value of the input array for the external call * @param args_size The number of values in the input array for the external call - * @param ret_offset An index in memory pointing to where the first value of the external calls return value should be - * stored. + * @param ret_offset An index in memory pointing to where the first value of the external calls return value should + * be stored. * @param ret_size The number of values in the return array * @param success_offset An index in memory pointing to where the success flag (U8) of the external call should be * stored * @param function_selector_offset An index in memory pointing to the function selector of the external call (TEMP) */ -void AvmTraceBuilder::op_call([[maybe_unused]] uint8_t indirect, - [[maybe_unused]] uint32_t gas_offset, - [[maybe_unused]] uint32_t addr_offset, - [[maybe_unused]] uint32_t args_offset, - [[maybe_unused]] uint32_t args_size, - [[maybe_unused]] uint32_t ret_offset, - [[maybe_unused]] uint32_t ret_size, - [[maybe_unused]] uint32_t success_offset, +void AvmTraceBuilder::op_call(uint8_t indirect, + uint32_t gas_offset, + uint32_t addr_offset, + uint32_t args_offset, + uint32_t args_size, + uint32_t ret_offset, + uint32_t ret_size, + uint32_t success_offset, [[maybe_unused]] uint32_t function_selector_offset) { - // pc++; auto clk = static_cast(main_trace.size()) + 1; const ExternalCallHint& hint = execution_hints.externalcall_hints.at(external_call_counter); - // We can load up to 4 things per row - auto register_order = std::array{ IntermRegister::IA, IntermRegister::IB, IntermRegister::IC, IntermRegister::ID }; - // Constrain gas cost + gas_trace_builder.constrain_gas_for_external_call( clk, static_cast(hint.l2_gas_used), static_cast(hint.da_gas_used)); - // Indirect is ZEROTH, SECOND and FOURTH bit COME BACK TO MAKING THIS ALL SUPPORTED - auto read_ind_gas_offset = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, gas_offset); - auto read_ind_args_offset = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_C, args_offset); - - std::vector first_row_load = { - uint32_t(read_ind_gas_offset.val), - addr_offset, - uint32_t(read_ind_args_offset.val), - }; - std::vector first_row_values = {}; - for (uint32_t j = 0; j < first_row_load.size(); j++) { - // We just read and load to set up the constraints, we dont actually use these values for now. - // info("Register order ", register_order[j]); - auto mem_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, register_order[j], first_row_load[j], AvmMemoryTag::FF, AvmMemoryTag::U0); - first_row_values.emplace_back(mem_read.val); - } + + auto [resolved_gas_offset, + resolved_addr_offset, + resolved_args_offset, + resolved_args_size, + resolved_ret_offset, + resolved_success_offset] = + unpack_indirects<6>(indirect, { gas_offset, addr_offset, args_offset, args_size, ret_offset, success_offset }); + + // Should read the address next to read_gas as well (tuple of gas values (l2Gas, daGas)) + auto read_gas_l2 = constrained_read_from_memory( + call_ptr, clk, resolved_gas_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IA); + auto read_gas_da = mem_trace_builder.read_and_load_from_memory( + call_ptr, clk, IntermRegister::IB, read_gas_l2.direct_address + 1, AvmMemoryTag::FF, AvmMemoryTag::U0); + auto read_addr = constrained_read_from_memory( + call_ptr, clk, resolved_addr_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IC); + auto read_args = constrained_read_from_memory( + call_ptr, clk, resolved_args_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::ID); + bool tag_match = read_gas_l2.tag_match && read_gas_da.tag_match && read_addr.tag_match && read_args.tag_match; // We read the input and output addresses in one row as they should contain FF elements main_trace.push_back(Row{ .main_clk = clk, - .main_ia = first_row_values[0], /* gas_offset */ - .main_ib = first_row_values[1], /* addr_offset */ - .main_ic = first_row_values[2], /* args_offset */ - .main_ind_addr_a = gas_offset, - .main_ind_addr_c = args_offset, + .main_ia = read_gas_l2.val, /* gas_offset_l2 */ + .main_ib = read_gas_da.val, /* gas_offset_da */ + .main_ic = read_addr.val, /* addr_offset */ + .main_id = read_args.val, /* args_offset */ + .main_ind_addr_a = FF(read_gas_l2.indirect_address), + .main_ind_addr_c = FF(read_addr.indirect_address), + .main_ind_addr_d = FF(read_args.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = read_ind_gas_offset.val, - .main_mem_addr_b = addr_offset, - .main_mem_addr_c = read_ind_args_offset.val, - .main_pc = FF(pc++), + .main_mem_addr_a = FF(read_gas_l2.direct_address), + .main_mem_addr_b = FF(read_gas_l2.direct_address + 1), + .main_mem_addr_c = FF(read_addr.direct_address), + .main_mem_addr_d = FF(read_args.direct_address), + .main_pc = FF(pc), .main_r_in_tag = FF(static_cast(AvmMemoryTag::FF)), .main_sel_mem_op_a = FF(1), .main_sel_mem_op_b = FF(1), .main_sel_mem_op_c = FF(1), + .main_sel_mem_op_d = FF(1), .main_sel_op_external_call = FF(1), - .main_sel_resolve_ind_addr_a = FF(1), - .main_sel_resolve_ind_addr_c = FF(1), - }); - clk++; - // Read the rest on a separate line, remember that the 4th operand is indirect - auto read_ind_ret_offset = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, ret_offset); - // We just read and load to set up the constraints, we dont actually use these values for now. - auto mem_read_ret = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, uint32_t(read_ind_ret_offset.val), AvmMemoryTag::FF, AvmMemoryTag::U0); - main_trace.push_back(Row{ - .main_clk = clk, - .main_ia = mem_read_ret.val, /* ret_offset */ - .main_ind_addr_a = ret_offset, - .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = read_ind_ret_offset.val, - .main_pc = FF(pc), - .main_r_in_tag = FF(static_cast(AvmMemoryTag::FF)), - .main_sel_mem_op_a = FF(1), - .main_sel_resolve_ind_addr_a = FF(1), - }); - clk++; - auto mem_read_success = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, success_offset, AvmMemoryTag::U32, AvmMemoryTag::U0); - main_trace.push_back(Row{ - .main_clk = clk, - .main_ia = mem_read_success.val, /* success_offset */ - .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(success_offset), - .main_pc = FF(pc), - .main_r_in_tag = FF(static_cast(AvmMemoryTag::U32)), - .main_sel_mem_op_a = FF(1), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_gas_l2.is_indirect)), + .main_sel_resolve_ind_addr_c = FF(static_cast(read_addr.is_indirect)), + .main_sel_resolve_ind_addr_d = FF(static_cast(read_args.is_indirect)), + .main_tag_err = FF(static_cast(!tag_match)), }); clk++; + // The return data hint is used for now, we check it has the same length as the ret_size + ASSERT(hint.return_data.size() == ret_size); + // Write the return data to memory + uint32_t num_rows = write_slice_to_memory( + call_ptr, clk, resolved_ret_offset, AvmMemoryTag::U0, AvmMemoryTag::FF, internal_return_ptr, hint.return_data); + clk += num_rows; + // Write the success flag to memory write_slice_to_memory(call_ptr, clk, - uint32_t(read_ind_ret_offset.val), + resolved_success_offset, AvmMemoryTag::U0, - AvmMemoryTag::FF, + AvmMemoryTag::U8, internal_return_ptr, - hint.return_data); - clk++; - write_slice_to_memory( - call_ptr, clk, success_offset, AvmMemoryTag::U0, AvmMemoryTag::U8, internal_return_ptr, { hint.success }); + { hint.success }); external_call_counter++; + pc++; } void AvmTraceBuilder::op_get_contract_instance(uint8_t indirect, uint32_t address_offset, uint32_t dst_offset) { auto clk = static_cast(main_trace.size()) + 1; - bool tag_match = true; - uint32_t direct_address_offset = address_offset; - uint32_t direct_dst_offset = dst_offset; - - bool indirect_address_flag = is_operand_indirect(indirect, 0); - bool indirect_dst_flag = is_operand_indirect(indirect, 1); - if (indirect_address_flag) { - auto read_ind_address = mem_trace_builder.indirect_read_and_load_from_memory( - call_ptr, clk, IndirectRegister::IND_A, address_offset); - direct_address_offset = uint32_t(read_ind_address.val); - tag_match = tag_match && read_ind_address.tag_match; - } - - if (indirect_dst_flag) { - auto read_ind_dst = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_B, dst_offset); - direct_dst_offset = uint32_t(read_ind_dst.val); - tag_match = tag_match && read_ind_dst.tag_match; - } - - auto read_address = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, direct_address_offset, AvmMemoryTag::FF, AvmMemoryTag::U0); - auto read_dst = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, direct_dst_offset, AvmMemoryTag::FF, AvmMemoryTag::U0); + auto [resolved_address_offset, resolved_dst_offset] = unpack_indirects<2>(indirect, { address_offset, dst_offset }); + auto read_address = constrained_read_from_memory( + call_ptr, clk, resolved_address_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IA); + bool tag_match = read_address.tag_match; // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::GETCONTRACTINSTANCE); @@ -2766,20 +2643,16 @@ void AvmTraceBuilder::op_get_contract_instance(uint8_t indirect, uint32_t addres main_trace.push_back(Row{ .main_clk = clk, .main_ia = read_address.val, - .main_ib = read_dst.val, - .main_ind_addr_a = indirect_address_flag ? address_offset : 0, - .main_ind_addr_b = indirect_dst_flag ? dst_offset : 0, + .main_ind_addr_a = FF(read_address.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(direct_address_offset), - .main_mem_addr_b = FF(direct_dst_offset), + .main_mem_addr_a = FF(read_address.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(AvmMemoryTag::FF)), .main_sel_mem_op_a = FF(1), .main_sel_mem_op_activate_gas = FF(1), // TODO: remove in the long term - .main_sel_mem_op_b = FF(1), .main_sel_op_get_contract_instance = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(indirect_address_flag)), - .main_sel_resolve_ind_addr_b = FF(static_cast(indirect_dst_flag)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_address.is_indirect)), + .main_tag_err = FF(static_cast(!tag_match)), }); clk++; // Read the contract instance @@ -2794,7 +2667,7 @@ void AvmTraceBuilder::op_get_contract_instance(uint8_t indirect, uint32_t addres contract_instance.public_key_hash }; write_slice_to_memory(call_ptr, clk, - direct_dst_offset, + resolved_dst_offset, AvmMemoryTag::U0, AvmMemoryTag::FF, internal_return_ptr, @@ -2814,41 +2687,21 @@ void AvmTraceBuilder::op_to_radix_le( uint8_t indirect, uint32_t src_offset, uint32_t dst_offset, uint32_t radix, uint32_t num_limbs) { auto clk = static_cast(main_trace.size()) + 1; - bool tag_match = true; - uint32_t direct_src_offset = src_offset; - uint32_t direct_dst_offset = dst_offset; - - bool indirect_src_flag = is_operand_indirect(indirect, 0); - bool indirect_dst_flag = is_operand_indirect(indirect, 1); - - if (indirect_src_flag) { - auto read_ind_src = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, src_offset); - direct_src_offset = uint32_t(read_ind_src.val); - tag_match = tag_match && read_ind_src.tag_match; - } + auto [resolved_src_offset, resolved_dst_offset] = unpack_indirects<2>(indirect, { src_offset, dst_offset }); - if (indirect_dst_flag) { - auto read_ind_dst = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_B, dst_offset); - direct_dst_offset = uint32_t(read_ind_dst.val); - tag_match = tag_match && read_ind_dst.tag_match; - } + auto read_src = constrained_read_from_memory( + call_ptr, clk, resolved_src_offset, AvmMemoryTag::FF, AvmMemoryTag::U8, IntermRegister::IA); - auto read_src = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, direct_src_offset, AvmMemoryTag::FF, AvmMemoryTag::U8); - // Read in the memory address of where the first limb should be stored (the read_tag must be U32 and write tag - // U8) - auto read_dst = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, direct_dst_offset, AvmMemoryTag::FF, AvmMemoryTag::U8); + auto read_dst = constrained_read_from_memory( + call_ptr, clk, resolved_dst_offset, AvmMemoryTag::FF, AvmMemoryTag::U8, IntermRegister::IB); FF input = read_src.val; - FF dst_addr = read_dst.val; // In case of a memory tag error, we do not perform the computation. // Therefore, we do not create any entry in gadget table and return a vector of 0 - std::vector res = tag_match ? conversion_trace_builder.op_to_radix_le(input, radix, num_limbs, clk) - : std::vector(num_limbs, 0); + std::vector res = read_src.tag_match + ? conversion_trace_builder.op_to_radix_le(input, radix, num_limbs, clk) + : std::vector(num_limbs, 0); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::TORADIXLE); @@ -2859,21 +2712,21 @@ void AvmTraceBuilder::op_to_radix_le( .main_clk = clk, .main_call_ptr = call_ptr, .main_ia = input, - .main_ib = dst_addr, + .main_ib = read_dst.val, .main_ic = radix, .main_id = num_limbs, - .main_ind_addr_a = indirect_src_flag ? src_offset : 0, - .main_ind_addr_b = indirect_dst_flag ? dst_offset : 0, + .main_ind_addr_a = read_src.indirect_address, + .main_ind_addr_b = read_dst.indirect_address, .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(direct_src_offset), - .main_mem_addr_b = FF(direct_dst_offset), + .main_mem_addr_a = read_src.direct_address, + .main_mem_addr_b = read_dst.direct_address, .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(AvmMemoryTag::FF)), .main_sel_mem_op_a = FF(1), .main_sel_mem_op_b = FF(1), .main_sel_op_radix_le = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(indirect_src_flag)), - .main_sel_resolve_ind_addr_b = FF(static_cast(indirect_dst_flag)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_src.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_dst.is_indirect)), .main_w_in_tag = FF(static_cast(AvmMemoryTag::U8)), }); // Increment the clock so we dont write at the same clock cycle @@ -2887,7 +2740,7 @@ void AvmTraceBuilder::op_to_radix_le( ff_res.emplace_back(limb); } write_slice_to_memory( - call_ptr, clk, direct_dst_offset, AvmMemoryTag::FF, AvmMemoryTag::U8, FF(internal_return_ptr), ff_res); + call_ptr, clk, resolved_dst_offset, AvmMemoryTag::FF, AvmMemoryTag::U8, FF(internal_return_ptr), ff_res); } /** @@ -2898,7 +2751,8 @@ void AvmTraceBuilder::op_to_radix_le( * instance of sha256 compression. * @param input_offset An index in memory pointing to the first U32 value of the input array to be used in the next * instance of sha256 compression. - * @param output_offset An index in memory pointing to where the first U32 value of the output array should be stored. + * @param output_offset An index in memory pointing to where the first U32 value of the output array should be + * stored. */ void AvmTraceBuilder::op_sha256_compression(uint8_t indirect, uint32_t output_offset, @@ -2910,15 +2764,14 @@ void AvmTraceBuilder::op_sha256_compression(uint8_t indirect, // Resolve the indirect flags, the results of this function are used to determine the memory offsets // that point to the starting memory addresses for the input and output values. - // Note::This function will add memory reads at clk in the mem_trace_builder - auto const res = resolve_ind_three(call_ptr, clk, indirect, h_init_offset, input_offset, output_offset); + auto [resolved_h_init_offset, resolved_input_offset, resolved_output_offset] = + unpack_indirects<3>(indirect, { h_init_offset, input_offset, output_offset }); - auto read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, res.direct_a_offset, AvmMemoryTag::U32, AvmMemoryTag::U32); - auto read_b = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, res.direct_b_offset, AvmMemoryTag::U32, AvmMemoryTag::U32); - auto read_c = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IC, res.direct_c_offset, AvmMemoryTag::U32, AvmMemoryTag::U32); + auto read_a = constrained_read_from_memory( + call_ptr, clk, resolved_h_init_offset, AvmMemoryTag::U32, AvmMemoryTag::U0, IntermRegister::IA); + auto read_b = constrained_read_from_memory( + call_ptr, clk, resolved_input_offset, AvmMemoryTag::U32, AvmMemoryTag::U0, IntermRegister::IB); + bool tag_match = read_a.tag_match && read_b.tag_match; // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::SHA256COMPRESSION); @@ -2933,26 +2786,21 @@ void AvmTraceBuilder::op_sha256_compression(uint8_t indirect, // did not lay down constraints), but this is a simplification main_trace.push_back(Row{ .main_clk = clk, - .main_ia = read_a.val, // First element of output (trivially 0) - .main_ib = read_b.val, // First element of state - .main_ic = read_c.val, // First element of input - .main_ind_addr_a = res.indirect_flag_a ? FF(h_init_offset) : FF(0), - .main_ind_addr_b = res.indirect_flag_b ? FF(input_offset) : FF(0), - .main_ind_addr_c = res.indirect_flag_a ? FF(output_offset) : FF(0), + .main_ia = read_a.val, // First element of state + .main_ib = read_b.val, // First element of input + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(read_b.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(res.direct_a_offset), - .main_mem_addr_b = FF(res.direct_b_offset), - .main_mem_addr_c = FF(res.direct_c_offset), + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(read_b.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(AvmMemoryTag::U32)), .main_sel_mem_op_a = FF(1), .main_sel_mem_op_b = FF(1), - .main_sel_mem_op_c = FF(1), .main_sel_op_sha256 = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(res.indirect_flag_a)), - .main_sel_resolve_ind_addr_b = FF(static_cast(res.indirect_flag_b)), - .main_sel_resolve_ind_addr_c = FF(static_cast(res.indirect_flag_c)), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::U32)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_b.is_indirect)), + .main_tag_err = FF(static_cast(!tag_match)), }); // We store the current clk this main trace row occurred so that we can line up the sha256 gadget operation at // the same clk later. @@ -2966,7 +2814,7 @@ void AvmTraceBuilder::op_sha256_compression(uint8_t indirect, // Read results are written to h_init array. read_slice_to_memory(call_ptr, clk, - res.direct_a_offset, + resolved_h_init_offset, AvmMemoryTag::U32, AvmMemoryTag::U32, FF(internal_return_ptr), @@ -2978,7 +2826,7 @@ void AvmTraceBuilder::op_sha256_compression(uint8_t indirect, // Read results are written to input array read_slice_to_memory(call_ptr, clk, - res.direct_b_offset, + resolved_input_offset, AvmMemoryTag::U32, AvmMemoryTag::U32, FF(internal_return_ptr), @@ -3001,15 +2849,21 @@ void AvmTraceBuilder::op_sha256_compression(uint8_t indirect, } // Write the result to memory after - write_slice_to_memory( - call_ptr, clk, res.direct_c_offset, AvmMemoryTag::U32, AvmMemoryTag::U32, FF(internal_return_ptr), ff_result); + write_slice_to_memory(call_ptr, + clk, + resolved_output_offset, + AvmMemoryTag::U32, + AvmMemoryTag::U32, + FF(internal_return_ptr), + ff_result); } /** * @brief SHA256 Hash with direct or indirect memory access. * This function is temporary until we have transitioned to sha256Compression * @param indirect byte encoding information about indirect/direct memory access. - * @param output_offset An index in memory pointing to where the first U32 value of the output array should be stored. + * @param output_offset An index in memory pointing to where the first U32 value of the output array should be + * stored. * @param input_offset An index in memory pointing to the first U8 value of the state array to be used in the next * instance of sha256. * @param input_size_offset An index in memory pointing to the U32 value of the input size. @@ -3020,144 +2874,62 @@ void AvmTraceBuilder::op_sha256(uint8_t indirect, uint32_t input_size_offset) { auto clk = static_cast(main_trace.size()) + 1; - bool tag_match = true; - uint32_t direct_src_offset = input_offset; - uint32_t direct_dst_offset = output_offset; + auto [resolved_output_offset, resolved_input_offset, resolved_input_size_offset] = + unpack_indirects<3>(indirect, { output_offset, input_offset, input_size_offset }); - bool indirect_src_flag = is_operand_indirect(indirect, 1); - bool indirect_dst_flag = is_operand_indirect(indirect, 0); - - if (indirect_src_flag) { - auto read_ind_src = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, input_offset); - direct_src_offset = uint32_t(read_ind_src.val); - tag_match = tag_match && read_ind_src.tag_match; - } - - if (indirect_dst_flag) { - auto read_ind_dst = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_C, output_offset); - direct_dst_offset = uint32_t(read_ind_dst.val); - tag_match = tag_match && read_ind_dst.tag_match; - } - // Note we load the input and output onto one line in the main trace and the length on the next line - // We do this so we can load two different AvmMemoryTags (u8 for the I/O and u32 for the length) - auto input_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, direct_src_offset, AvmMemoryTag::U8, AvmMemoryTag::U8); - auto output_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IC, direct_dst_offset, AvmMemoryTag::U8, AvmMemoryTag::U8); - - // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::SHA256); + auto input_length_read = constrained_read_from_memory( + call_ptr, clk, resolved_input_size_offset, AvmMemoryTag::U32, AvmMemoryTag::U0, IntermRegister::IB); + // Store the clock time that we will use to line up the gadget later auto sha256_op_clk = clk; - main_trace.push_back(Row{ - .main_clk = clk, - .main_ia = input_read.val, // First element of input - .main_ic = output_read.val, // First element of output - .main_ind_addr_a = indirect_src_flag ? FF(input_offset) : FF(0), - .main_ind_addr_c = indirect_dst_flag ? FF(output_offset) : FF(0), - .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(direct_src_offset), // input - .main_mem_addr_c = FF(direct_dst_offset), // output - .main_pc = FF(pc++), - .main_r_in_tag = FF(static_cast(AvmMemoryTag::U8)), - .main_sel_mem_op_a = FF(1), - .main_sel_mem_op_c = FF(1), - .main_sel_op_sha256 = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(indirect_src_flag)), - .main_sel_resolve_ind_addr_c = FF(static_cast(indirect_dst_flag)), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::U8)), - }); - clk++; - auto input_length_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, input_size_offset, AvmMemoryTag::U32, AvmMemoryTag::U32); main_trace.push_back(Row{ .main_clk = clk, .main_ib = input_length_read.val, // Message Length + .main_ind_addr_b = FF(input_length_read.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_b = FF(input_size_offset), // length - .main_pc = FF(pc), + .main_mem_addr_b = FF(input_length_read.direct_address), + .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(AvmMemoryTag::U32)), .main_sel_mem_op_b = FF(1), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::U32)), + .main_sel_op_sha256 = FF(1), + .main_sel_resolve_ind_addr_b = FF(static_cast(input_length_read.is_indirect)), + .main_tag_err = FF(static_cast(!input_length_read.tag_match)), }); clk++; std::vector input; input.reserve(uint32_t(input_length_read.val)); - - // We unroll this loop because the function typically expects arrays and for this temporary sha256 function we - // have a dynamic amount of input so we will use a vector. - auto register_order = std::array{ IntermRegister::IA, IntermRegister::IB, IntermRegister::IC, IntermRegister::ID }; - // If the slice size isnt a multiple of 4, we still need an extra row to write the remainder - uint32_t const num_main_rows = static_cast(input_length_read.val) / 4 + - static_cast(uint32_t(input_length_read.val) % 4 != 0); - for (uint32_t i = 0; i < num_main_rows; i++) { - Row main_row{ - .main_clk = clk + i, - .main_internal_return_ptr = FF(internal_return_ptr), - .main_pc = FF(pc), - .main_r_in_tag = FF(static_cast(AvmMemoryTag::U8)), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::U8)), - }; - // Write 4 values to memory in each_row - for (uint32_t j = 0; j < 4; j++) { - auto offset = i * 4 + j; - // If we exceed the slice size, we break - if (offset >= uint32_t(input_length_read.val)) { - break; - } - auto mem_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk + i, register_order[j], direct_src_offset + offset, AvmMemoryTag::U8, AvmMemoryTag::U8); - input.emplace_back(uint8_t(mem_read.val)); - // This looks a bit gross, but it is fine for now. - if (j == 0) { - main_row.main_ia = input.at(offset); - main_row.main_mem_addr_a = FF(direct_src_offset + offset); - main_row.main_sel_mem_op_a = FF(1); - main_row.main_tag_err = FF(static_cast(!mem_read.tag_match)); - } else if (j == 1) { - main_row.main_ib = input.at(offset); - main_row.main_mem_addr_b = FF(direct_src_offset + offset); - main_row.main_sel_mem_op_b = FF(1); - main_row.main_tag_err = FF(static_cast(!mem_read.tag_match)); - } else if (j == 2) { - main_row.main_ic = input.at(offset); - main_row.main_mem_addr_c = FF(direct_src_offset + offset); - main_row.main_sel_mem_op_c = FF(1); - main_row.main_tag_err = FF(static_cast(!mem_read.tag_match)); - } else { - main_row.main_id = input.at(offset); - main_row.main_mem_addr_d = FF(direct_src_offset + offset); - main_row.main_sel_mem_op_d = FF(1); - main_row.main_tag_err = FF(static_cast(!mem_read.tag_match)); - } - } - main_trace.emplace_back(main_row); - } - + uint32_t num_main_rows = read_slice_to_memory(call_ptr, + clk, + resolved_input_offset, + AvmMemoryTag::U8, + AvmMemoryTag::U0, + FF(internal_return_ptr), + uint32_t(input_length_read.val), + input); clk += num_main_rows; - + // std::array result = sha256_trace_builder.sha256(input, sha256_op_clk); - // We convert the results to field elements here + std::vector ff_result; for (uint32_t i = 0; i < 32; i++) { ff_result.emplace_back(result[i]); } // Write the result to memory after write_slice_to_memory( - call_ptr, clk, direct_dst_offset, AvmMemoryTag::U8, AvmMemoryTag::U8, FF(internal_return_ptr), ff_result); + call_ptr, clk, resolved_output_offset, AvmMemoryTag::U0, AvmMemoryTag::U8, FF(internal_return_ptr), ff_result); } /** * @brief Poseidon2 Permutation with direct or indirect memory access. * * @param indirect byte encoding information about indirect/direct memory access. - * @param input_offset An index in memory pointing to the first Field value of the input array to be used in the next - * instance of poseidon2 permutation. - * @param output_offset An index in memory pointing to where the first Field value of the output array should be stored. + * @param input_offset An index in memory pointing to the first Field value of the input array to be used in the + * next instance of poseidon2 permutation. + * @param output_offset An index in memory pointing to where the first Field value of the output array should be + * stored. */ void AvmTraceBuilder::op_poseidon2_permutation(uint8_t indirect, uint32_t input_offset, uint32_t output_offset) { @@ -3166,32 +2938,14 @@ void AvmTraceBuilder::op_poseidon2_permutation(uint8_t indirect, uint32_t input_ // Resolve the indirect flags, the results of this function are used to determine the memory offsets // that point to the starting memory addresses for the input, output and h_init values // Note::This function will add memory reads at clk in the mem_trace_builder - bool tag_match = true; - uint32_t direct_src_offset = input_offset; - uint32_t direct_dst_offset = output_offset; - - bool indirect_src_flag = is_operand_indirect(indirect, 0); - bool indirect_dst_flag = is_operand_indirect(indirect, 1); - - if (indirect_src_flag) { - auto read_ind_src = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, input_offset); - direct_src_offset = uint32_t(read_ind_src.val); - tag_match = tag_match && read_ind_src.tag_match; - } - - if (indirect_dst_flag) { - auto read_ind_dst = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_B, output_offset); - direct_dst_offset = uint32_t(read_ind_dst.val); - tag_match = tag_match && read_ind_dst.tag_match; - } + auto [resolved_input_offset, resolved_output_offset] = + unpack_indirects<2>(indirect, { input_offset, output_offset }); - auto read_a = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, direct_src_offset, AvmMemoryTag::FF, AvmMemoryTag::FF); - // Read in the memory address of where the first limb should be stored - auto read_b = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, direct_dst_offset, AvmMemoryTag::FF, AvmMemoryTag::FF); + auto read_a = constrained_read_from_memory( + call_ptr, clk, resolved_input_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IA); + auto read_b = constrained_read_from_memory( + call_ptr, clk, resolved_output_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IB); + bool tag_match = read_a.tag_match && read_b.tag_match; // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::POSEIDON2); @@ -3200,19 +2954,19 @@ void AvmTraceBuilder::op_poseidon2_permutation(uint8_t indirect, uint32_t input_ .main_clk = clk, .main_ia = read_a.val, // First element of input .main_ib = read_b.val, // First element of output (trivially zero) - .main_ind_addr_a = indirect_src_flag ? FF(input_offset) : FF(0), - .main_ind_addr_b = indirect_dst_flag ? FF(output_offset) : FF(0), + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(read_b.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(direct_src_offset), - .main_mem_addr_b = FF(direct_dst_offset), + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(read_b.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(AvmMemoryTag::FF)), .main_sel_mem_op_a = FF(1), .main_sel_mem_op_b = FF(1), .main_sel_op_poseidon2 = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(indirect_src_flag)), - .main_sel_resolve_ind_addr_b = FF(static_cast(indirect_dst_flag)), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::FF)), + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(read_b.is_indirect)), + .main_tag_err = FF(static_cast(!tag_match)), }); // We store the current clk this main trace row occurred so that we can line up the poseidon2 gadget operation // at the same clk later. @@ -3222,8 +2976,14 @@ void AvmTraceBuilder::op_poseidon2_permutation(uint8_t indirect, uint32_t input_ clk++; // Read results are written to input array. std::vector input_vec; - read_slice_to_memory( - call_ptr, clk, direct_src_offset, AvmMemoryTag::FF, AvmMemoryTag::FF, FF(internal_return_ptr), 4, input_vec); + read_slice_to_memory(call_ptr, + clk, + resolved_input_offset, + AvmMemoryTag::FF, + AvmMemoryTag::U0, + FF(internal_return_ptr), + 4, + input_vec); // Increment the clock by 1 since (4 reads / 4 reads per row = 1) clk += 1; @@ -3235,19 +2995,21 @@ void AvmTraceBuilder::op_poseidon2_permutation(uint8_t indirect, uint32_t input_ } // // Write the result to memory after write_slice_to_memory( - call_ptr, clk, direct_dst_offset, AvmMemoryTag::FF, AvmMemoryTag::FF, FF(internal_return_ptr), ff_result); + call_ptr, clk, resolved_output_offset, AvmMemoryTag::U0, AvmMemoryTag::FF, FF(internal_return_ptr), ff_result); } /** * @brief Keccakf1600 with direct or indirect memory access. - * This function temporarily has the same interface as the kecccak opcode for compatibility, when the keccak migration - * is complete (to keccakf1600) We will update this function call as we will not likely need input_size_offset + * This function temporarily has the same interface as the kecccak opcode for compatibility, when the keccak + * migration is complete (to keccakf1600) We will update this function call as we will not likely need + * input_size_offset * @param indirect byte encoding information about indirect/direct memory access. - * @param output_offset An index in memory pointing to where the first u64 value of the output array should be stored. + * @param output_offset An index in memory pointing to where the first u64 value of the output array should be + * stored. * @param input_offset An index in memory pointing to the first u64 value of the input array to be used in the next * instance of poseidon2 permutation. - * @param input_size offset An index in memory pointing to the size of the input array. Temporary while we maintain the - * same interface as keccak (this is fixed to 25) + * @param input_size offset An index in memory pointing to the size of the input array. Temporary while we maintain + * the same interface as keccak (this is fixed to 25) */ void AvmTraceBuilder::op_keccakf1600(uint8_t indirect, uint32_t output_offset, @@ -3256,32 +3018,13 @@ void AvmTraceBuilder::op_keccakf1600(uint8_t indirect, { // What happens if the input_size_offset is > 25 when the state is more that that? auto clk = static_cast(main_trace.size()) + 1; - // bool tag_match = res.tag_match; - bool tag_match = true; - uint32_t direct_src_offset = input_offset; - uint32_t direct_dst_offset = output_offset; - - bool indirect_src_flag = is_operand_indirect(indirect, 1); - bool indirect_dst_flag = is_operand_indirect(indirect, 0); - - if (indirect_src_flag) { - auto read_ind_src = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, input_offset); - direct_src_offset = uint32_t(read_ind_src.val); - tag_match = tag_match && read_ind_src.tag_match; - } - - if (indirect_dst_flag) { - auto read_ind_dst = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_C, output_offset); - direct_dst_offset = uint32_t(read_ind_dst.val); - tag_match = tag_match && read_ind_dst.tag_match; - } - - auto input_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, direct_src_offset, AvmMemoryTag::U64, AvmMemoryTag::U64); - auto output_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IC, direct_dst_offset, AvmMemoryTag::U64, AvmMemoryTag::U64); + auto [resolved_output_offset, resolved_input_offset] = + unpack_indirects<2>(indirect, { output_offset, input_offset }); + auto input_read = constrained_read_from_memory( + call_ptr, clk, resolved_input_offset, AvmMemoryTag::U64, AvmMemoryTag::U0, IntermRegister::IA); + auto output_read = constrained_read_from_memory( + call_ptr, clk, resolved_output_offset, AvmMemoryTag::U64, AvmMemoryTag::U0, IntermRegister::IC); + bool tag_match = input_read.tag_match && output_read.tag_match; // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::KECCAKF1600); @@ -3290,19 +3033,19 @@ void AvmTraceBuilder::op_keccakf1600(uint8_t indirect, .main_clk = clk, .main_ia = input_read.val, // First element of input .main_ic = output_read.val, // First element of output - .main_ind_addr_a = indirect_src_flag ? FF(input_offset) : FF(0), - .main_ind_addr_c = indirect_dst_flag ? FF(output_offset) : FF(0), + .main_ind_addr_a = FF(input_read.indirect_address), + .main_ind_addr_c = FF(output_read.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(direct_src_offset), // input - .main_mem_addr_c = FF(direct_dst_offset), // output + .main_mem_addr_a = FF(input_read.direct_address), + .main_mem_addr_c = FF(output_read.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(AvmMemoryTag::U64)), .main_sel_mem_op_a = FF(1), .main_sel_mem_op_c = FF(1), .main_sel_op_keccak = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(indirect_src_flag)), - .main_sel_resolve_ind_addr_c = FF(static_cast(indirect_dst_flag)), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::U64)), + .main_sel_resolve_ind_addr_a = FF(static_cast(input_read.is_indirect)), + .main_sel_resolve_ind_addr_c = FF(static_cast(output_read.is_indirect)), + .main_tag_err = FF(static_cast(!tag_match)), }); // We store the current clk this main trace row occurred so that we can line up the keccak gadget operation // at the same clk later. @@ -3310,7 +3053,7 @@ void AvmTraceBuilder::op_keccakf1600(uint8_t indirect, // We need to increment the clk clk++; auto input_length_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, input_size_offset, AvmMemoryTag::U32, AvmMemoryTag::U32); + call_ptr, clk, IntermRegister::IB, input_size_offset, AvmMemoryTag::U32, AvmMemoryTag::U0); main_trace.push_back(Row{ .main_clk = clk, .main_ib = input_length_read.val, // Message Length @@ -3319,18 +3062,24 @@ void AvmTraceBuilder::op_keccakf1600(uint8_t indirect, .main_pc = FF(pc), .main_r_in_tag = FF(static_cast(AvmMemoryTag::U32)), .main_sel_mem_op_b = FF(1), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::U32)), + .main_tag_err = FF(static_cast(!input_length_read.tag_match)), }); clk++; // Array input is fixed to 1600 bits std::vector input_vec; // Read results are written to input array - read_slice_to_memory( - call_ptr, clk, direct_src_offset, AvmMemoryTag::U64, AvmMemoryTag::U64, FF(internal_return_ptr), 25, input_vec); + uint32_t num_main_rows = read_slice_to_memory(call_ptr, + clk, + resolved_input_offset, + AvmMemoryTag::U64, + AvmMemoryTag::U0, + FF(internal_return_ptr), + 25, + input_vec); std::array input = vec_to_arr(input_vec); // Increment the clock by 7 since (25 reads / 4 reads per row = 7) - clk += 7; + clk += num_main_rows; // Now that we have read all the values, we can perform the operation to get the resulting witness. // Note: We use the keccak_op_clk to ensure that the keccakf1600 operation is performed at the same clock cycle @@ -3344,16 +3093,16 @@ void AvmTraceBuilder::op_keccakf1600(uint8_t indirect, // Write the result to memory after write_slice_to_memory( - call_ptr, clk, direct_dst_offset, AvmMemoryTag::U64, AvmMemoryTag::U64, FF(internal_return_ptr), ff_result); + call_ptr, clk, resolved_output_offset, AvmMemoryTag::U0, AvmMemoryTag::U64, FF(internal_return_ptr), ff_result); } /** * @brief Keccak with direct or indirect memory access. * Keccak is TEMPORARY while we wait for the transition to keccakf1600, so we do the minimal to store the result * @param indirect byte encoding information about indirect/direct memory access. - * @param output_offset An index in memory pointing to where the first u8 value of the output array should be stored. - * @param input_offset An index in memory pointing to the first u8 value of the input array to be used in the next - * instance of poseidon2 permutation. + * @param output_offset An index in memory pointing to where the first u8 value of the output array should be + * stored. + * @param input_offset An index in memory pointing to the first u8 value of the input array to be used * @param input_size offset An index in memory pointing to the size of the input array. */ void AvmTraceBuilder::op_keccak(uint8_t indirect, @@ -3362,76 +3111,44 @@ void AvmTraceBuilder::op_keccak(uint8_t indirect, uint32_t input_size_offset) { auto clk = static_cast(main_trace.size()) + 1; - bool tag_match = true; - uint32_t direct_src_offset = input_offset; - uint32_t direct_dst_offset = output_offset; - - bool indirect_src_flag = is_operand_indirect(indirect, 1); - bool indirect_dst_flag = is_operand_indirect(indirect, 0); - - if (indirect_src_flag) { - auto read_ind_src = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, input_offset); - direct_src_offset = uint32_t(read_ind_src.val); - tag_match = tag_match && read_ind_src.tag_match; - } - - if (indirect_dst_flag) { - auto read_ind_dst = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_C, output_offset); - direct_dst_offset = uint32_t(read_ind_dst.val); - tag_match = tag_match && read_ind_dst.tag_match; - } - // Note we load the input and output onto one line in the main trace and the length on the next line - // We do this so we can load two different AvmMemoryTags (u8 for the I/O and u32 for the length) - auto input_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, direct_src_offset, AvmMemoryTag::U8, AvmMemoryTag::U8); - auto output_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IC, direct_dst_offset, AvmMemoryTag::U8, AvmMemoryTag::U8); + auto [resolved_output_offset, resolved_input_offset, resolved_input_size_offset] = + unpack_indirects<3>(indirect, { output_offset, input_offset, input_size_offset }); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::KECCAK); + // Read the input length first + auto input_length_read = constrained_read_from_memory( + call_ptr, clk, resolved_input_size_offset, AvmMemoryTag::U32, AvmMemoryTag::U0, IntermRegister::IB); + // Store the clock time that we will use to line up the gadget later auto keccak_op_clk = clk; - main_trace.push_back(Row{ - .main_clk = clk, - .main_ia = input_read.val, // First element of input - .main_ic = output_read.val, // First element of output - .main_ind_addr_a = indirect_src_flag ? FF(input_offset) : FF(0), - .main_ind_addr_c = indirect_dst_flag ? FF(output_offset) : FF(0), - .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(direct_src_offset), // input - .main_mem_addr_c = FF(direct_dst_offset), // output - .main_pc = FF(pc++), - .main_r_in_tag = FF(static_cast(AvmMemoryTag::U8)), - .main_sel_mem_op_a = FF(1), - .main_sel_mem_op_c = FF(1), - .main_sel_op_keccak = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(indirect_src_flag)), - .main_sel_resolve_ind_addr_c = FF(static_cast(indirect_dst_flag)), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::U8)), - }); - clk++; - auto input_length_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, input_size_offset, AvmMemoryTag::U32, AvmMemoryTag::U32); main_trace.push_back(Row{ .main_clk = clk, .main_ib = input_length_read.val, // Message Length + .main_ind_addr_b = FF(input_length_read.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_b = FF(input_size_offset), // length - .main_pc = FF(pc), + .main_mem_addr_b = FF(input_length_read.direct_address), // length + .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(AvmMemoryTag::U32)), .main_sel_mem_op_b = FF(1), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::U32)), + .main_sel_op_keccak = FF(1), + .main_sel_resolve_ind_addr_b = FF(static_cast(input_length_read.is_indirect)), + .main_tag_err = FF(static_cast(!input_length_read.tag_match)), }); clk++; std::vector input; input.reserve(uint32_t(input_length_read.val)); - - uint32_t num_main_rows = read_slice_to_memory( - call_ptr, clk, direct_src_offset, AvmMemoryTag::U8, AvmMemoryTag::U8, FF(internal_return_ptr), 4, input); + // Read the slice length from memory + uint32_t num_main_rows = read_slice_to_memory(call_ptr, + clk, + resolved_input_offset, + AvmMemoryTag::U8, + AvmMemoryTag::U8, + FF(internal_return_ptr), + uint32_t(input_length_read.val), + input); clk += num_main_rows; @@ -3443,7 +3160,7 @@ void AvmTraceBuilder::op_keccak(uint8_t indirect, } // Write the result to memory after write_slice_to_memory( - call_ptr, clk, direct_dst_offset, AvmMemoryTag::U8, AvmMemoryTag::U8, FF(internal_return_ptr), ff_result); + call_ptr, clk, resolved_output_offset, AvmMemoryTag::U8, AvmMemoryTag::U8, FF(internal_return_ptr), ff_result); } /** @@ -3460,19 +3177,10 @@ void AvmTraceBuilder::op_pedersen_hash(uint8_t indirect, uint32_t input_size_offset) { auto clk = static_cast(main_trace.size()) + 1; - bool tag_match = true; - uint32_t direct_src_offset = input_offset; - bool indirect_src_flag = is_operand_indirect(indirect, 2); - - if (indirect_src_flag) { - auto read_ind_src = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, input_offset); - direct_src_offset = uint32_t(read_ind_src.val); - tag_match = tag_match && read_ind_src.tag_match; - } - - auto input_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, direct_src_offset, AvmMemoryTag::FF, AvmMemoryTag::FF); + auto [resolved_gen_ctx_offset, resolved_output_offset, resolved_input_offset, resolved_input_size_offset] = + unpack_indirects<4>(indirect, { gen_ctx_offset, output_offset, input_offset, input_size_offset }); + auto input_read = constrained_read_from_memory( + call_ptr, clk, resolved_input_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IA); // Constrain gas cost gas_trace_builder.constrain_gas_lookup(clk, OpCode::PEDERSEN); @@ -3482,41 +3190,44 @@ void AvmTraceBuilder::op_pedersen_hash(uint8_t indirect, main_trace.push_back(Row{ .main_clk = clk, .main_ia = input_read.val, // First element of input - .main_ind_addr_a = indirect_src_flag ? FF(input_offset) : FF(0), + .main_ind_addr_a = FF(input_read.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(direct_src_offset), // input + .main_mem_addr_a = FF(input_read.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(AvmMemoryTag::FF)), .main_sel_mem_op_a = FF(1), .main_sel_op_pedersen = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(indirect_src_flag)), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::FF)), + .main_sel_resolve_ind_addr_a = FF(static_cast(input_read.is_indirect)), + .main_tag_err = FF(static_cast(!input_read.tag_match)), }); clk++; // We read the input size and gen_ctx addresses in one row as they should contain U32 elements - auto input_size_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, input_size_offset, AvmMemoryTag::U32, AvmMemoryTag::U32); - auto gen_ctx_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, gen_ctx_offset, AvmMemoryTag::U32, AvmMemoryTag::U32); + auto input_size_read = constrained_read_from_memory( + call_ptr, clk, resolved_input_size_offset, AvmMemoryTag::U32, AvmMemoryTag::U0, IntermRegister::IA); + auto gen_ctx_read = constrained_read_from_memory( + call_ptr, clk, resolved_gen_ctx_offset, AvmMemoryTag::U32, AvmMemoryTag::U0, IntermRegister::IB); main_trace.push_back(Row{ .main_clk = clk, .main_ia = input_size_read.val, .main_ib = gen_ctx_read.val, + .main_ind_addr_a = FF(input_size_read.indirect_address), + .main_ind_addr_b = FF(gen_ctx_read.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(input_size_offset), - .main_mem_addr_b = FF(gen_ctx_offset), + .main_mem_addr_a = FF(input_size_read.direct_address), + .main_mem_addr_b = FF(gen_ctx_read.direct_address), .main_pc = FF(pc), .main_r_in_tag = FF(static_cast(AvmMemoryTag::U32)), .main_sel_mem_op_a = FF(1), .main_sel_mem_op_b = FF(1), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::U32)), + .main_sel_resolve_ind_addr_a = FF(static_cast(input_size_read.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(gen_ctx_read.is_indirect)), }); clk++; std::vector inputs; uint32_t num_main_rows = read_slice_to_memory(call_ptr, clk, - direct_src_offset, + resolved_input_offset, AvmMemoryTag::FF, AvmMemoryTag::FF, FF(internal_return_ptr), @@ -3525,7 +3236,7 @@ void AvmTraceBuilder::op_pedersen_hash(uint8_t indirect, clk += num_main_rows; FF output = pedersen_trace_builder.pedersen_hash(inputs, uint32_t(gen_ctx_read.val), pedersen_clk); write_slice_to_memory( - call_ptr, clk, output_offset, AvmMemoryTag::FF, AvmMemoryTag::FF, FF(internal_return_ptr), { output }); + call_ptr, clk, resolved_output_offset, AvmMemoryTag::FF, AvmMemoryTag::FF, FF(internal_return_ptr), { output }); } void AvmTraceBuilder::op_ec_add(uint8_t indirect, @@ -3538,16 +3249,31 @@ void AvmTraceBuilder::op_ec_add(uint8_t indirect, uint32_t output_offset) { auto clk = static_cast(main_trace.size()) + 1; + auto [resolved_lhs_x_offset, + resolved_lhs_y_offset, + resolved_lhs_is_inf_offset, + resolved_rhs_x_offset, + resolved_rhs_y_offset, + resolved_rhs_is_inf_offset, + resolved_output_offset] = unpack_indirects<7>(indirect, + { lhs_x_offset, + lhs_y_offset, + lhs_is_inf_offset, + rhs_x_offset, + rhs_y_offset, + rhs_is_inf_offset, + output_offset }); // Load lhs point - auto lhs_x_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, lhs_x_offset, AvmMemoryTag::FF, AvmMemoryTag::U0); - auto lhs_y_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, lhs_y_offset, AvmMemoryTag::FF, AvmMemoryTag::U0); + auto lhs_x_read = constrained_read_from_memory( + call_ptr, clk, resolved_lhs_x_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IA); + auto lhs_y_read = constrained_read_from_memory( + call_ptr, clk, resolved_lhs_y_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IB); // Load rhs point - auto rhs_x_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IC, rhs_x_offset, AvmMemoryTag::FF, AvmMemoryTag::U0); - auto rhs_y_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::ID, rhs_y_offset, AvmMemoryTag::FF, AvmMemoryTag::U0); + auto rhs_x_read = constrained_read_from_memory( + call_ptr, clk, resolved_rhs_x_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IC); + auto rhs_y_read = constrained_read_from_memory( + call_ptr, clk, resolved_rhs_y_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::ID); + bool tag_match = lhs_x_read.tag_match && lhs_y_read.tag_match && rhs_x_read.tag_match && rhs_y_read.tag_match; // Save this clk time to line up with the gadget op. auto ecc_clk = clk; @@ -3557,24 +3283,34 @@ void AvmTraceBuilder::op_ec_add(uint8_t indirect, .main_ib = lhs_y_read.val, .main_ic = rhs_x_read.val, .main_id = rhs_y_read.val, + .main_ind_addr_a = FF(lhs_x_read.indirect_address), + .main_ind_addr_b = FF(lhs_y_read.indirect_address), + .main_ind_addr_c = FF(rhs_x_read.indirect_address), + .main_ind_addr_d = FF(rhs_y_read.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(lhs_x_offset), - .main_mem_addr_b = FF(lhs_y_offset), - .main_mem_addr_c = FF(rhs_x_offset), - .main_mem_addr_d = FF(rhs_y_offset), + .main_mem_addr_a = FF(lhs_x_read.direct_address), + .main_mem_addr_b = FF(lhs_y_read.direct_address), + .main_mem_addr_c = FF(rhs_x_read.direct_address), + .main_mem_addr_d = FF(rhs_y_read.direct_address), .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(AvmMemoryTag::FF)), .main_sel_mem_op_a = FF(1), .main_sel_mem_op_b = FF(1), .main_sel_mem_op_c = FF(1), .main_sel_mem_op_d = FF(1), + .main_sel_resolve_ind_addr_a = FF(static_cast(lhs_x_read.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(lhs_y_read.is_indirect)), + .main_sel_resolve_ind_addr_c = FF(static_cast(rhs_x_read.is_indirect)), + .main_sel_resolve_ind_addr_d = FF(static_cast(rhs_y_read.is_indirect)), + .main_tag_err = FF(static_cast(!tag_match)), }); clk++; // Load the infinite bools separately since they have a different memory tag - auto lhs_is_inf_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, lhs_is_inf_offset, AvmMemoryTag::U8, AvmMemoryTag::U0); - auto rhs_is_inf_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, rhs_is_inf_offset, AvmMemoryTag::U8, AvmMemoryTag::U0); + auto lhs_is_inf_read = constrained_read_from_memory( + call_ptr, clk, resolved_lhs_is_inf_offset, AvmMemoryTag::U8, AvmMemoryTag::U0, IntermRegister::IA); + auto rhs_is_inf_read = constrained_read_from_memory( + call_ptr, clk, resolved_rhs_is_inf_offset, AvmMemoryTag::U8, AvmMemoryTag::U0, IntermRegister::IB); + bool tag_match_inf = lhs_is_inf_read.tag_match && rhs_is_inf_read.tag_match; main_trace.push_back(Row{ .main_clk = clk, @@ -3587,6 +3323,7 @@ void AvmTraceBuilder::op_ec_add(uint8_t indirect, .main_r_in_tag = FF(static_cast(AvmMemoryTag::U8)), .main_sel_mem_op_a = FF(1), .main_sel_mem_op_b = FF(1), + .main_tag_err = FF(static_cast(!tag_match_inf)), }); clk++; grumpkin::g1::affine_element lhs = uint8_t(lhs_is_inf_read.val) == 1 @@ -3596,47 +3333,49 @@ void AvmTraceBuilder::op_ec_add(uint8_t indirect, ? grumpkin::g1::affine_element::infinity() : grumpkin::g1::affine_element{ rhs_x_read.val, rhs_y_read.val }; auto result = ecc_trace_builder.embedded_curve_add(lhs, rhs, ecc_clk); - // Write across two lines since we have different mem_tags - uint32_t direct_output_offset = output_offset; - bool indirect_flag_output = is_operand_indirect(indirect, 6); - if (indirect_flag_output) { - auto read_ind_output = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, output_offset); - direct_output_offset = uint32_t(read_ind_output.val); - } + // Write point coordinates + auto write_x = constrained_write_to_memory( + call_ptr, clk, resolved_output_offset, result.x, AvmMemoryTag::U0, AvmMemoryTag::FF, IntermRegister::IA); + // Write y (directly) using the write_x.direct_address + 1 mem_trace_builder.write_into_memory( - call_ptr, clk, IntermRegister::IA, direct_output_offset, result.x, AvmMemoryTag::U0, AvmMemoryTag::FF); - mem_trace_builder.write_into_memory( - call_ptr, clk, IntermRegister::IB, direct_output_offset + 1, result.y, AvmMemoryTag::U0, AvmMemoryTag::FF); + call_ptr, clk, IntermRegister::IB, write_x.direct_address + 1, result.y, AvmMemoryTag::U0, AvmMemoryTag::FF); main_trace.push_back(Row{ .main_clk = clk, .main_ia = result.x, .main_ib = result.y, - .main_ind_addr_a = indirect_flag_output ? FF(output_offset) : FF(0), + .main_ind_addr_a = FF(write_x.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(direct_output_offset), - .main_mem_addr_b = FF(direct_output_offset + 1), + .main_mem_addr_a = FF(write_x.direct_address), + .main_mem_addr_b = FF(write_x.direct_address + 1), .main_pc = FF(pc), .main_rwa = FF(1), .main_rwb = FF(1), .main_sel_mem_op_a = FF(1), .main_sel_mem_op_b = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(indirect_flag_output)), + .main_sel_resolve_ind_addr_a = FF(static_cast(write_x.is_indirect)), .main_w_in_tag = FF(static_cast(AvmMemoryTag::FF)), }); clk++; - write_slice_to_memory(call_ptr, - clk, - direct_output_offset + 2, - AvmMemoryTag::U8, - AvmMemoryTag::U8, - FF(internal_return_ptr), - { result.is_point_at_infinity() }); -} + mem_trace_builder.write_into_memory(call_ptr, + clk, + IntermRegister::IA, + write_x.direct_address + 2, + result.is_point_at_infinity(), + AvmMemoryTag::U0, + AvmMemoryTag::U8); -// This function is a bit overloaded with logic around reconstructing points and scalars that could probably be moved to -// the gadget at some stage (although this is another temporary gadget..) + main_trace.push_back(Row{ + .main_clk = clk, + .main_ia = result.is_point_at_infinity(), + .main_internal_return_ptr = FF(internal_return_ptr), + .main_mem_addr_a = FF(write_x.direct_address + 2), + .main_pc = FF(pc), + .main_rwa = FF(1), + .main_sel_mem_op_a = FF(1), + .main_w_in_tag = FF(static_cast(AvmMemoryTag::U8)), + }); +} void AvmTraceBuilder::op_variable_msm(uint8_t indirect, uint32_t points_offset, uint32_t scalars_offset, @@ -3644,60 +3383,12 @@ void AvmTraceBuilder::op_variable_msm(uint8_t indirect, uint32_t point_length_offset) { auto clk = static_cast(main_trace.size()) + 1; - // This will all get refactored as part of the indirection refactor - bool tag_match = true; - uint32_t direct_points_offset = points_offset; - uint32_t direct_scalars_offset = scalars_offset; - uint32_t direct_output_offset = output_offset; - // Resolve the indirects - bool indirect_points_flag = is_operand_indirect(indirect, 0); - bool indirect_scalars_flag = is_operand_indirect(indirect, 1); - bool indirect_output_flag = is_operand_indirect(indirect, 2); - - // Read in the points first - if (indirect_points_flag) { - auto read_ind_a = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, points_offset); - direct_points_offset = uint32_t(read_ind_a.val); - tag_match = tag_match && read_ind_a.tag_match; - } - - auto read_points = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, direct_points_offset, AvmMemoryTag::FF, AvmMemoryTag::U0); + auto [resolved_points_offset, resolved_scalars_offset, resolved_output_offset] = + unpack_indirects<3>(indirect, { points_offset, scalars_offset, output_offset }); - // Read in the scalars - if (indirect_scalars_flag) { - auto read_ind_b = mem_trace_builder.indirect_read_and_load_from_memory( - call_ptr, clk, IndirectRegister::IND_B, scalars_offset); - direct_scalars_offset = uint32_t(read_ind_b.val); - tag_match = tag_match && read_ind_b.tag_match; - } - auto read_scalars = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, direct_scalars_offset, AvmMemoryTag::FF, AvmMemoryTag::U0); - - // In the refactor we will have the read_slice function handle indirects as well - main_trace.push_back(Row{ - .main_clk = clk, - .main_ia = read_points.val, - .main_ib = read_scalars.val, - .main_ind_addr_a = indirect_points_flag ? FF(points_offset) : FF(0), - .main_ind_addr_b = indirect_scalars_flag ? FF(scalars_offset) : FF(0), - .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(direct_points_offset), - .main_mem_addr_b = FF(direct_scalars_offset), - .main_pc = FF(pc++), - .main_r_in_tag = FF(static_cast(AvmMemoryTag::FF)), - .main_sel_mem_op_a = FF(1), - .main_sel_mem_op_b = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(indirect_points_flag)), - .main_sel_resolve_ind_addr_b = FF(static_cast(indirect_scalars_flag)), - .main_tag_err = FF(static_cast(!tag_match)), - }); - clk++; - - // Read the points length (different row since it has a different memory tag) auto points_length_read = mem_trace_builder.read_and_load_from_memory( call_ptr, clk, IntermRegister::IA, point_length_offset, AvmMemoryTag::U32, AvmMemoryTag::U0); + main_trace.push_back(Row{ .main_clk = clk, .main_ia = points_length_read.val, @@ -3716,97 +3407,69 @@ void AvmTraceBuilder::op_variable_msm(uint8_t indirect, std::vector points_coords_vec; std::vector points_inf_vec; std::vector scalars_vec; - // Read the coordinates first, +2 since we read 2 points per row - for (uint32_t i = 0; i < num_points; i += 2) { - // We can read up to 4 coordinates per row (x1,y1,x2,y2) - // Each pair of coordinates are separated by 3 memory addressess - auto point_x1_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, direct_points_offset + i * 3, AvmMemoryTag::FF, AvmMemoryTag::U0); + AddressWithMode coords_offset = resolved_points_offset; + // Loading the points is a bit more complex since we need to read the coordinates and the infinity flags separately + // The current circuit constraints does not allow for multiple memory tags to be loaded from within the same row. + // If we could we would be able to replace the following loops with a single read_slice_to_memory call. + // For now we load the coordinates first and then the infinity flags, and finally splice them together when creating + // the points + + // Read the coordinates first, +2 since we read 2 points per row, the first load could be indirect + for (uint32_t i = 0; i < num_points; i++) { + auto point_x1_read = constrained_read_from_memory( + call_ptr, clk, coords_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IA); auto point_y1_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, direct_points_offset + i * 3 + 1, AvmMemoryTag::FF, AvmMemoryTag::U0); - auto point_x2_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IC, direct_points_offset + (i + 1) * 3, AvmMemoryTag::FF, AvmMemoryTag::U0); - auto point_y2_read = mem_trace_builder.read_and_load_from_memory(call_ptr, - clk, - IntermRegister::ID, - direct_points_offset + (i + 1) * 3 + 1, - AvmMemoryTag::FF, - AvmMemoryTag::U0); - bool tag_match = - point_x1_read.tag_match && point_y1_read.tag_match && point_x2_read.tag_match && point_y2_read.tag_match; - points_coords_vec.insert(points_coords_vec.end(), - { point_x1_read.val, point_y1_read.val, point_x2_read.val, point_y2_read.val }); + call_ptr, clk, IntermRegister::IB, point_x1_read.direct_address + 1, AvmMemoryTag::FF, AvmMemoryTag::U0); + + bool tag_match = point_x1_read.tag_match && point_y1_read.tag_match; + points_coords_vec.insert(points_coords_vec.end(), { point_x1_read.val, point_y1_read.val }); main_trace.push_back(Row{ .main_clk = clk, .main_ia = point_x1_read.val, .main_ib = point_y1_read.val, - .main_ic = point_x2_read.val, - .main_id = point_y2_read.val, + .main_ind_addr_a = FF(point_x1_read.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(direct_points_offset + i * 3), - .main_mem_addr_b = FF(direct_points_offset + i * 3 + 1), - .main_mem_addr_c = FF(direct_points_offset + (i + 1) * 3), - .main_mem_addr_d = FF(direct_points_offset + (i + 1) * 3 + 1), + .main_mem_addr_a = FF(point_x1_read.direct_address), + .main_mem_addr_b = FF(point_x1_read.direct_address + 1), .main_pc = FF(pc), .main_r_in_tag = FF(static_cast(AvmMemoryTag::FF)), .main_sel_mem_op_a = FF(1), .main_sel_mem_op_b = FF(1), - .main_sel_mem_op_c = FF(1), - .main_sel_mem_op_d = FF(1), + .main_sel_resolve_ind_addr_a = FF(static_cast(point_x1_read.is_indirect)), .main_tag_err = FF(static_cast(!tag_match)), }); clk++; + // Update the coords offset to read the next point (subsequent points are always direct and separated by 3 + // addresses) + coords_offset = { AddressingMode::DIRECT, point_x1_read.direct_address + 3 }; } - // Read the Infinities flags, +4 since we read 4 points row - for (uint32_t i = 0; i < num_points; i += 4) { - // We can read up to 4 infinities per row - // Each infinity flag is separated by 3 memory addressess - uint32_t offset = direct_points_offset + i * 3 + 2; - auto point_inf1_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, offset, AvmMemoryTag::U8, AvmMemoryTag::U0); - offset += 3; - - auto point_inf2_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, offset, AvmMemoryTag::U8, AvmMemoryTag::U0); - offset += 3; - - auto point_inf3_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IC, offset, AvmMemoryTag::U8, AvmMemoryTag::U0); - offset += 3; - - auto point_inf4_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::ID, offset, AvmMemoryTag::U8, AvmMemoryTag::U0); - - points_inf_vec.insert(points_inf_vec.end(), - { point_inf1_read.val, point_inf2_read.val, point_inf3_read.val, point_inf4_read.val }); - bool tag_match = point_inf1_read.tag_match && point_inf2_read.tag_match && point_inf3_read.tag_match && - point_inf4_read.tag_match; + uint32_t inf_direct_address = resolved_points_offset.offset + 2; + // Read the Infinities flags + for (uint32_t i = 0; i < num_points; i++) { + auto point_inf_read = mem_trace_builder.read_and_load_from_memory( + call_ptr, clk, IntermRegister::IA, inf_direct_address, AvmMemoryTag::U8, AvmMemoryTag::U0); + points_inf_vec.emplace_back(point_inf_read.val); + main_trace.push_back(Row{ .main_clk = clk, - .main_ia = point_inf1_read.val, - .main_ib = point_inf2_read.val, - .main_ic = point_inf3_read.val, - .main_id = point_inf4_read.val, + .main_ia = point_inf_read.val, .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(direct_points_offset + i * 3 + 2), - .main_mem_addr_b = FF(direct_points_offset + (i + 1) * 3 + 2), - .main_mem_addr_c = FF(direct_points_offset + (i + 2) * 3 + 2), - .main_mem_addr_d = FF(direct_points_offset + (i + 3) * 3 + 2), + .main_mem_addr_a = FF(inf_direct_address), .main_pc = FF(pc), .main_r_in_tag = FF(static_cast(AvmMemoryTag::U8)), .main_sel_mem_op_a = FF(1), - .main_sel_mem_op_b = FF(1), - .main_sel_mem_op_c = FF(1), - .main_sel_mem_op_d = FF(1), - .main_tag_err = FF(static_cast(!tag_match)), + .main_tag_err = FF(static_cast(!point_inf_read.tag_match)), }); clk++; + // Update the inf offset to read the next point (subsequent points are always direct and separated by 3 + inf_direct_address += 3; } // Scalar read length is num_points* 2 since scalars are stored as lo and hi limbs uint32_t scalar_read_length = num_points * 2; + // Scalars are easy to read since they are stored as [lo1, hi1, lo2, hi2, ...] with the types [FF, FF, FF,FF, ...] auto num_scalar_rows = read_slice_to_memory(call_ptr, clk, - direct_scalars_offset, + resolved_scalars_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, FF(internal_return_ptr), @@ -3838,29 +3501,25 @@ void AvmTraceBuilder::op_variable_msm(uint8_t indirect, // Perform the variable MSM - could just put the logic in here since there are no constraints. auto result = ecc_trace_builder.variable_msm(points, scalars, clk); // Write the result back to memory [x, y, inf] with tags [FF, FF, U8] - if (indirect_output_flag) { - auto read_ind_a = - mem_trace_builder.indirect_read_and_load_from_memory(call_ptr, clk, IndirectRegister::IND_A, output_offset); - direct_output_offset = uint32_t(read_ind_a.val); - } - mem_trace_builder.write_into_memory( - call_ptr, clk, IntermRegister::IA, direct_output_offset, result.x, AvmMemoryTag::U0, AvmMemoryTag::FF); + auto write_x = constrained_write_to_memory( + call_ptr, clk, resolved_output_offset, result.x, AvmMemoryTag::U0, AvmMemoryTag::FF, IntermRegister::IA); mem_trace_builder.write_into_memory( - call_ptr, clk, IntermRegister::IB, direct_output_offset + 1, result.y, AvmMemoryTag::U0, AvmMemoryTag::FF); + call_ptr, clk, IntermRegister::IB, write_x.direct_address + 1, result.y, AvmMemoryTag::U0, AvmMemoryTag::FF); + main_trace.push_back(Row{ .main_clk = clk, .main_ia = result.x, .main_ib = result.y, - .main_ind_addr_a = indirect_output_flag ? FF(output_offset) : FF(0), + .main_ind_addr_a = FF(write_x.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(direct_output_offset), - .main_mem_addr_b = FF(direct_output_offset + 1), + .main_mem_addr_a = FF(write_x.direct_address), + .main_mem_addr_b = FF(write_x.direct_address + 1), .main_pc = FF(pc), .main_rwa = FF(1), .main_rwb = FF(1), .main_sel_mem_op_a = FF(1), .main_sel_mem_op_b = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(indirect_output_flag)), + .main_sel_resolve_ind_addr_a = FF(static_cast(write_x.is_indirect)), .main_w_in_tag = FF(static_cast(AvmMemoryTag::FF)), }); clk++; @@ -3868,7 +3527,7 @@ void AvmTraceBuilder::op_variable_msm(uint8_t indirect, mem_trace_builder.write_into_memory(call_ptr, clk, IntermRegister::IA, - direct_output_offset + 2, + write_x.direct_address + 2, result.is_point_at_infinity(), AvmMemoryTag::U0, AvmMemoryTag::U8); @@ -3876,12 +3535,14 @@ void AvmTraceBuilder::op_variable_msm(uint8_t indirect, .main_clk = clk, .main_ia = static_cast(result.is_point_at_infinity()), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(direct_output_offset + 2), + .main_mem_addr_a = FF(write_x.direct_address + 2), .main_pc = FF(pc), .main_rwa = FF(1), .main_sel_mem_op_a = FF(1), .main_w_in_tag = FF(static_cast(AvmMemoryTag::U8)), }); + + pc++; } // Finalise Lookup Counts // @@ -4036,6 +3697,7 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c auto pedersen_trace = pedersen_trace_builder.finalize(); auto bin_trace = bin_trace_builder.finalize(); auto gas_trace = gas_trace_builder.finalize(); + const auto& fixed_gas_table = FixedGasTable::get(); size_t mem_trace_size = mem_trace.size(); size_t main_trace_size = main_trace.size(); size_t alu_trace_size = alu_trace.size(); @@ -4057,11 +3719,11 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c // 2**16 long) size_t const lookup_table_size = (bin_trace_size > 0 && range_check_required) ? 3 * (1 << 16) : 0; size_t const range_check_size = range_check_required ? UINT16_MAX + 1 : 0; - std::vector trace_sizes = { mem_trace_size, main_trace_size, alu_trace_size, - range_check_size, conv_trace_size, lookup_table_size, - sha256_trace_size, poseidon2_trace_size, pedersen_trace_size, - gas_trace_size + 1, KERNEL_INPUTS_LENGTH, KERNEL_OUTPUTS_LENGTH, - min_trace_size, GAS_COST_TABLE.size() }; + std::vector trace_sizes = { mem_trace_size, main_trace_size, alu_trace_size, + range_check_size, conv_trace_size, lookup_table_size, + sha256_trace_size, poseidon2_trace_size, pedersen_trace_size, + gas_trace_size + 1, KERNEL_INPUTS_LENGTH, KERNEL_OUTPUTS_LENGTH, + min_trace_size, fixed_gas_table.size() }; auto trace_size = std::max_element(trace_sizes.begin(), trace_sizes.end()); // We only need to pad with zeroes to the size to the largest trace here, pow_2 padding is handled in the @@ -4569,13 +4231,16 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c r.incl_main_tag_err_counts = mem_trace_builder.m_tag_err_lookup_counts[static_cast(counter)]; if (counter <= UINT8_MAX) { - r.lookup_u8_0_counts = alu_trace_builder.u8_range_chk_counters[0][static_cast(counter)]; - r.lookup_u8_1_counts = alu_trace_builder.u8_range_chk_counters[1][static_cast(counter)]; - r.lookup_pow_2_0_counts = alu_trace_builder.u8_pow_2_counters[0][static_cast(counter)]; - r.lookup_pow_2_1_counts = alu_trace_builder.u8_pow_2_counters[1][static_cast(counter)]; - r.lookup_mem_rng_chk_hi_counts = mem_rng_check_hi_counts[static_cast(counter)]; + auto counter_u8 = static_cast(counter); + r.lookup_u8_0_counts = alu_trace_builder.u8_range_chk_counters[0][counter_u8]; + r.lookup_u8_1_counts = alu_trace_builder.u8_range_chk_counters[1][counter_u8]; + r.lookup_pow_2_0_counts = alu_trace_builder.u8_pow_2_counters[0][counter_u8]; + r.lookup_pow_2_1_counts = alu_trace_builder.u8_pow_2_counters[1][counter_u8]; + r.lookup_mem_rng_chk_hi_counts = mem_rng_check_hi_counts[counter_u8]; r.main_sel_rng_8 = FF(1); - r.main_table_pow_2 = uint256_t(1) << uint256_t(counter); + + // Also merge the powers of 2 table. + merge_into(r, FixedPowersTable::get().at(counter)); } if (counter <= UINT16_MAX) { @@ -4624,12 +4289,12 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c } // Write the kernel trace into the main trace - // 1. The write offsets are constrained to be non changing over the entire trace, so we fill in the values until - // we + // 1. The write offsets are constrained to be non changing over the entire trace, so we fill in the values + // until we // hit an operation that changes one of the write_offsets (a relevant opcode) // 2. Upon hitting the clk of each kernel operation we copy the values into the main trace - // 3. When an increment is required, we increment the value in the next row, then continue the process until the - // end + // 3. When an increment is required, we increment the value in the next row, then continue the process until + // the end // 4. Whenever we hit the last row, we zero all write_offsets such that the shift relation will succeed std::vector kernel_trace = kernel_trace_builder.finalize(); size_t kernel_padding_main_trace_bottom = 1; @@ -4645,9 +4310,9 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c // Check the clock and iterate through the main trace until we hit the clock auto clk = src.clk; - // Until the next kernel changing instruction is encountered we set all of the values of the offset arrays - // to be the same as the previous row This satisfies the `offset' - (offset + operation_selector) = 0` - // constraints + // Until the next kernel changing instruction is encountered we set all of the values of the offset + // arrays to be the same as the previous row This satisfies the `offset' - (offset + operation_selector) + // = 0` constraints for (size_t j = kernel_padding_main_trace_bottom; j < clk; j++) { auto const& prev = main_trace.at(j); auto& dest = main_trace.at(j + 1); @@ -4722,7 +4387,8 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c Row const& prev = main_trace.at(i - 1); Row& dest = main_trace.at(i); - // Setting all of the counters to 0 after the IS_LAST check so we can satisfy the constraints until the end + // Setting all of the counters to 0 after the IS_LAST check so we can satisfy the constraints until the + // end if (i == old_trace_size) { dest.kernel_note_hash_exist_write_offset = 0; dest.kernel_emit_note_hash_write_offset = 0; @@ -4797,12 +4463,8 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c // Add the gas costs table to the main trace // For each opcode we write its l2 gas cost and da gas cost - for (auto const& [opcode, gas_entry] : GAS_COST_TABLE) { - auto& dest = main_trace.at(static_cast(opcode)); - - dest.gas_sel_gas_cost = FF(1); - dest.gas_l2_gas_fixed_table = gas_entry.l2_fixed_gas_cost; - dest.gas_da_gas_fixed_table = gas_entry.da_fixed_gas_cost; + for (size_t i = 0; i < fixed_gas_table.size(); i++) { + merge_into(main_trace.at(i), fixed_gas_table.at(i)); } // Finalise gas left lookup counts diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.hpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.hpp index b0d86f4c5da..3a6e13dbc06 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.hpp @@ -21,6 +21,14 @@ namespace bb::avm_trace { using Row = bb::AvmFullRow; +enum class AddressingMode { + DIRECT, + INDIRECT, +}; +struct AddressWithMode { + AddressingMode mode; + uint32_t offset; +}; // This is the internal context that we keep along the lifecycle of bytecode execution // to iteratively build the whole trace. This is effectively performing witness generation. @@ -210,19 +218,16 @@ class AvmTraceBuilder { uint32_t output_offset, uint32_t point_length_offset); - private: - // Used for the standard indirect address resolution of three operands opcode. - struct IndirectThreeResolution { - bool tag_match = false; - uint32_t direct_a_offset; - uint32_t direct_b_offset; - uint32_t direct_c_offset; - - bool indirect_flag_a = false; - bool indirect_flag_b = false; - bool indirect_flag_c = false; + struct MemOp { + bool is_indirect; + uint32_t indirect_address; + uint32_t direct_address; + AvmMemoryTag tag; + bool tag_match; + FF val; }; + private: std::vector main_trace; AvmMemTraceBuilder mem_trace_builder; AvmAluTraceBuilder alu_trace_builder; @@ -249,7 +254,7 @@ class AvmTraceBuilder { * @return Row */ Row create_kernel_lookup_opcode( - bool indirect, uint32_t dst_offset, uint32_t selector, FF value, AvmMemoryTag w_tag); + uint8_t indirect, uint32_t dst_offset, uint32_t selector, FF value, AvmMemoryTag w_tag); /** * @brief Create a kernel output opcode object @@ -321,9 +326,6 @@ class AvmTraceBuilder { void finalise_mem_trace_lookup_counts(); - IndirectThreeResolution resolve_ind_three( - uint8_t space_id, uint32_t clk, uint8_t indirect, uint32_t a_offset, uint32_t b_offset, uint32_t c_offset); - uint32_t pc = 0; uint32_t internal_return_ptr = 0; // After a nested call, it should be initialized with MAX_SIZE_INTERNAL_STACK * call_ptr @@ -339,23 +341,37 @@ class AvmTraceBuilder { // Mapping of side effect counter -> value ExecutionHints execution_hints; + MemOp constrained_read_from_memory(uint8_t space_id, + uint32_t clk, + AddressWithMode addr, + AvmMemoryTag read_tag, + AvmMemoryTag write_tag, + IntermRegister reg); + MemOp constrained_write_to_memory(uint8_t space_id, + uint32_t clk, + AddressWithMode addr, + FF const& value, + AvmMemoryTag read_tag, + AvmMemoryTag write_tag, + IntermRegister reg); + // TODO(ilyas: #6383): Temporary way to bulk read slices template uint32_t read_slice_to_memory(uint8_t space_id, uint32_t clk, - uint32_t src_offset, + AddressWithMode addr, AvmMemoryTag r_tag, AvmMemoryTag w_tag, FF internal_return_ptr, size_t slice_len, std::vector& slice); - void write_slice_to_memory(uint8_t space_id, - uint32_t clk, - uint32_t dst_offset, - AvmMemoryTag r_tag, - AvmMemoryTag w_tag, - FF internal_return_ptr, - std::vector const& slice); + uint32_t write_slice_to_memory(uint8_t space_id, + uint32_t clk, + AddressWithMode addr, + AvmMemoryTag r_tag, + AvmMemoryTag w_tag, + FF internal_return_ptr, + std::vector const& slice); }; } // namespace bb::avm_trace diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/fixed_gas.cpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/fixed_gas.cpp new file mode 100644 index 00000000000..d27331a4811 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/fixed_gas.cpp @@ -0,0 +1,23 @@ +#include "barretenberg/vm/avm_trace/fixed_gas.hpp" + +namespace bb::avm_trace { + +FixedGasTable::FixedGasTable() +{ + for (int i = 0; i < static_cast(OpCode::LAST_OPCODE_SENTINEL); i++) { + table_rows.push_back(GasRow{ + .gas_da_gas_fixed_table = FF(2), + .gas_l2_gas_fixed_table = FF(10), + .gas_sel_gas_cost = FF(1), + }); + } +} + +// Singleton. +const FixedGasTable& FixedGasTable::get() +{ + static FixedGasTable table; + return table; +} + +} // namespace bb::avm_trace \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/fixed_gas.hpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/fixed_gas.hpp new file mode 100644 index 00000000000..15e7687c385 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/fixed_gas.hpp @@ -0,0 +1,36 @@ +#pragma once + +#include +#include + +#include "barretenberg/ecc/curves/bn254/fr.hpp" +#include "barretenberg/relations/generated/avm/gas.hpp" +#include "barretenberg/vm/avm_trace/avm_common.hpp" +#include "barretenberg/vm/avm_trace/avm_opcode.hpp" + +namespace bb::avm_trace { + +class FixedGasTable { + public: + using GasRow = bb::Avm_vm::GasRow; + + static const FixedGasTable& get(); + + size_t size() const { return table_rows.size(); } + const GasRow& at(size_t i) const { return table_rows.at(i); } + const GasRow& at(OpCode o) const { return at(static_cast(o)); } + + private: + FixedGasTable(); + + std::vector table_rows; +}; + +template void merge_into(DestRow& dest, FixedGasTable::GasRow const& src) +{ + dest.gas_sel_gas_cost = src.gas_sel_gas_cost; + dest.gas_l2_gas_fixed_table = src.gas_l2_gas_fixed_table; + dest.gas_da_gas_fixed_table = src.gas_da_gas_fixed_table; +} + +} // namespace bb::avm_trace \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/fixed_powers.cpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/fixed_powers.cpp new file mode 100644 index 00000000000..6ef8b8b4248 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/fixed_powers.cpp @@ -0,0 +1,25 @@ +#include "barretenberg/vm/avm_trace/fixed_powers.hpp" + +#include + +#include "barretenberg/numeric/uint256/uint256.hpp" + +namespace bb::avm_trace { + +FixedPowersTable::FixedPowersTable() +{ + for (uint64_t i = 0; i < 256; i++) { + table_rows.push_back(PowersRow{ + .powers_power_of_2 = FF(uint256_t(1) << uint256_t(i)), + }); + } +} + +// Singleton. +const FixedPowersTable& FixedPowersTable::get() +{ + static FixedPowersTable table; + return table; +} + +} // namespace bb::avm_trace \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/fixed_powers.hpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/fixed_powers.hpp new file mode 100644 index 00000000000..d19a1d81ecc --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/fixed_powers.hpp @@ -0,0 +1,32 @@ +#pragma once + +#include +#include + +#include "barretenberg/ecc/curves/bn254/fr.hpp" +#include "barretenberg/relations/generated/avm/powers.hpp" +#include "barretenberg/vm/avm_trace/avm_common.hpp" + +namespace bb::avm_trace { + +class FixedPowersTable { + public: + using PowersRow = bb::Avm_vm::PowersRow; + + static const FixedPowersTable& get(); + + size_t size() const { return table_rows.size(); } + const PowersRow& at(size_t i) const { return table_rows.at(i); } + + private: + FixedPowersTable(); + + std::vector table_rows; +}; + +template void merge_into(DestRow& dest, FixedPowersTable::PowersRow const& src) +{ + dest.powers_power_of_2 = src.powers_power_of_2; +} + +} // namespace bb::avm_trace \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.cpp index 7d58df8071a..448bf350a0e 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.cpp @@ -261,7 +261,6 @@ template std::vector AvmFullRow::names() "main_sel_rng_16", "main_sel_rng_8", "main_space_id", - "main_table_pow_2", "main_tag_err", "main_w_in_tag", "mem_addr", @@ -303,6 +302,7 @@ template std::vector AvmFullRow::names() "poseidon2_input", "poseidon2_output", "poseidon2_sel_poseidon_perm", + "powers_power_of_2", "sha256_clk", "sha256_input", "sha256_output", @@ -541,38 +541,38 @@ template std::ostream& operator<<(std::ostream& os, AvmFullRow << field_to_string(row.main_sel_resolve_ind_addr_c) << "," << field_to_string(row.main_sel_resolve_ind_addr_d) << "," << field_to_string(row.main_sel_rng_16) << "," << field_to_string(row.main_sel_rng_8) << "," << field_to_string(row.main_space_id) << "," - << field_to_string(row.main_table_pow_2) << "," << field_to_string(row.main_tag_err) << "," - << field_to_string(row.main_w_in_tag) << "," << field_to_string(row.mem_addr) << "," - << field_to_string(row.mem_clk) << "," << field_to_string(row.mem_diff_hi) << "," - << field_to_string(row.mem_diff_lo) << "," << field_to_string(row.mem_diff_mid) << "," - << field_to_string(row.mem_glob_addr) << "," << field_to_string(row.mem_last) << "," - << field_to_string(row.mem_lastAccess) << "," << field_to_string(row.mem_one_min_inv) << "," - << field_to_string(row.mem_r_in_tag) << "," << field_to_string(row.mem_rw) << "," - << field_to_string(row.mem_sel_mem) << "," << field_to_string(row.mem_sel_mov_ia_to_ic) << "," - << field_to_string(row.mem_sel_mov_ib_to_ic) << "," << field_to_string(row.mem_sel_op_a) << "," - << field_to_string(row.mem_sel_op_b) << "," << field_to_string(row.mem_sel_op_c) << "," - << field_to_string(row.mem_sel_op_cmov) << "," << field_to_string(row.mem_sel_op_d) << "," - << field_to_string(row.mem_sel_resolve_ind_addr_a) << "," << field_to_string(row.mem_sel_resolve_ind_addr_b) - << "," << field_to_string(row.mem_sel_resolve_ind_addr_c) << "," - << field_to_string(row.mem_sel_resolve_ind_addr_d) << "," << field_to_string(row.mem_sel_rng_chk) << "," - << field_to_string(row.mem_skip_check_tag) << "," << field_to_string(row.mem_space_id) << "," + << field_to_string(row.main_tag_err) << "," << field_to_string(row.main_w_in_tag) << "," + << field_to_string(row.mem_addr) << "," << field_to_string(row.mem_clk) << "," + << field_to_string(row.mem_diff_hi) << "," << field_to_string(row.mem_diff_lo) << "," + << field_to_string(row.mem_diff_mid) << "," << field_to_string(row.mem_glob_addr) << "," + << field_to_string(row.mem_last) << "," << field_to_string(row.mem_lastAccess) << "," + << field_to_string(row.mem_one_min_inv) << "," << field_to_string(row.mem_r_in_tag) << "," + << field_to_string(row.mem_rw) << "," << field_to_string(row.mem_sel_mem) << "," + << field_to_string(row.mem_sel_mov_ia_to_ic) << "," << field_to_string(row.mem_sel_mov_ib_to_ic) << "," + << field_to_string(row.mem_sel_op_a) << "," << field_to_string(row.mem_sel_op_b) << "," + << field_to_string(row.mem_sel_op_c) << "," << field_to_string(row.mem_sel_op_cmov) << "," + << field_to_string(row.mem_sel_op_d) << "," << field_to_string(row.mem_sel_resolve_ind_addr_a) << "," + << field_to_string(row.mem_sel_resolve_ind_addr_b) << "," << field_to_string(row.mem_sel_resolve_ind_addr_c) + << "," << field_to_string(row.mem_sel_resolve_ind_addr_d) << "," << field_to_string(row.mem_sel_rng_chk) + << "," << field_to_string(row.mem_skip_check_tag) << "," << field_to_string(row.mem_space_id) << "," << field_to_string(row.mem_tag) << "," << field_to_string(row.mem_tag_err) << "," << field_to_string(row.mem_tsp) << "," << field_to_string(row.mem_val) << "," << field_to_string(row.mem_w_in_tag) << "," << field_to_string(row.pedersen_clk) << "," << field_to_string(row.pedersen_input) << "," << field_to_string(row.pedersen_output) << "," << field_to_string(row.pedersen_sel_pedersen) << "," << field_to_string(row.poseidon2_clk) << "," << field_to_string(row.poseidon2_input) << "," << field_to_string(row.poseidon2_output) << "," - << field_to_string(row.poseidon2_sel_poseidon_perm) << "," << field_to_string(row.sha256_clk) << "," - << field_to_string(row.sha256_input) << "," << field_to_string(row.sha256_output) << "," - << field_to_string(row.sha256_sel_sha256_compression) << "," << field_to_string(row.sha256_state) << "," - << field_to_string(row.perm_main_alu) << "," << field_to_string(row.perm_main_bin) << "," - << field_to_string(row.perm_main_conv) << "," << field_to_string(row.perm_main_pos2_perm) << "," - << field_to_string(row.perm_main_pedersen) << "," << field_to_string(row.perm_main_mem_a) << "," - << field_to_string(row.perm_main_mem_b) << "," << field_to_string(row.perm_main_mem_c) << "," - << field_to_string(row.perm_main_mem_d) << "," << field_to_string(row.perm_main_mem_ind_addr_a) << "," - << field_to_string(row.perm_main_mem_ind_addr_b) << "," << field_to_string(row.perm_main_mem_ind_addr_c) - << "," << field_to_string(row.perm_main_mem_ind_addr_d) << "," << field_to_string(row.lookup_byte_lengths) - << "," << field_to_string(row.lookup_byte_operations) << "," << field_to_string(row.lookup_opcode_gas) << "," + << field_to_string(row.poseidon2_sel_poseidon_perm) << "," << field_to_string(row.powers_power_of_2) << "," + << field_to_string(row.sha256_clk) << "," << field_to_string(row.sha256_input) << "," + << field_to_string(row.sha256_output) << "," << field_to_string(row.sha256_sel_sha256_compression) << "," + << field_to_string(row.sha256_state) << "," << field_to_string(row.perm_main_alu) << "," + << field_to_string(row.perm_main_bin) << "," << field_to_string(row.perm_main_conv) << "," + << field_to_string(row.perm_main_pos2_perm) << "," << field_to_string(row.perm_main_pedersen) << "," + << field_to_string(row.perm_main_mem_a) << "," << field_to_string(row.perm_main_mem_b) << "," + << field_to_string(row.perm_main_mem_c) << "," << field_to_string(row.perm_main_mem_d) << "," + << field_to_string(row.perm_main_mem_ind_addr_a) << "," << field_to_string(row.perm_main_mem_ind_addr_b) + << "," << field_to_string(row.perm_main_mem_ind_addr_c) << "," + << field_to_string(row.perm_main_mem_ind_addr_d) << "," << field_to_string(row.lookup_byte_lengths) << "," + << field_to_string(row.lookup_byte_operations) << "," << field_to_string(row.lookup_opcode_gas) << "," << field_to_string(row.range_check_l2_gas_hi) << "," << field_to_string(row.range_check_l2_gas_lo) << "," << field_to_string(row.range_check_da_gas_hi) << "," << field_to_string(row.range_check_da_gas_lo) << "," << field_to_string(row.kernel_output_lookup) << "," << field_to_string(row.lookup_into_kernel) << "," diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp index b8f8a107901..9bbe9334c85 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp @@ -19,6 +19,7 @@ #include "barretenberg/relations/generated/avm/alu.hpp" #include "barretenberg/relations/generated/avm/binary.hpp" #include "barretenberg/relations/generated/avm/conversion.hpp" +#include "barretenberg/relations/generated/avm/gas.hpp" #include "barretenberg/relations/generated/avm/incl_main_tag_err.hpp" #include "barretenberg/relations/generated/avm/incl_mem_tag_err.hpp" #include "barretenberg/relations/generated/avm/keccakf1600.hpp" @@ -75,6 +76,7 @@ #include "barretenberg/relations/generated/avm/perm_main_pedersen.hpp" #include "barretenberg/relations/generated/avm/perm_main_pos2_perm.hpp" #include "barretenberg/relations/generated/avm/poseidon2.hpp" +#include "barretenberg/relations/generated/avm/powers.hpp" #include "barretenberg/relations/generated/avm/range_check_da_gas_hi.hpp" #include "barretenberg/relations/generated/avm/range_check_da_gas_lo.hpp" #include "barretenberg/relations/generated/avm/range_check_l2_gas_hi.hpp" @@ -328,7 +330,6 @@ template struct AvmFullRow { FF main_sel_rng_16{}; FF main_sel_rng_8{}; FF main_space_id{}; - FF main_table_pow_2{}; FF main_tag_err{}; FF main_w_in_tag{}; FF mem_addr{}; @@ -370,6 +371,7 @@ template struct AvmFullRow { FF poseidon2_input{}; FF poseidon2_output{}; FF poseidon2_sel_poseidon_perm{}; + FF powers_power_of_2{}; FF sha256_clk{}; FF sha256_input{}; FF sha256_output{}; @@ -812,7 +814,6 @@ class AvmCircuitBuilder { polys.main_sel_rng_16[i] = rows[i].main_sel_rng_16; polys.main_sel_rng_8[i] = rows[i].main_sel_rng_8; polys.main_space_id[i] = rows[i].main_space_id; - polys.main_table_pow_2[i] = rows[i].main_table_pow_2; polys.main_tag_err[i] = rows[i].main_tag_err; polys.main_w_in_tag[i] = rows[i].main_w_in_tag; polys.mem_addr[i] = rows[i].mem_addr; @@ -854,6 +855,7 @@ class AvmCircuitBuilder { polys.poseidon2_input[i] = rows[i].poseidon2_input; polys.poseidon2_output[i] = rows[i].poseidon2_output; polys.poseidon2_sel_poseidon_perm[i] = rows[i].poseidon2_sel_poseidon_perm; + polys.powers_power_of_2[i] = rows[i].powers_power_of_2; polys.sha256_clk[i] = rows[i].sha256_clk; polys.sha256_input[i] = rows[i].sha256_input; polys.sha256_output[i] = rows[i].sha256_output; @@ -1058,6 +1060,10 @@ class AvmCircuitBuilder { Avm_vm::get_relation_label_conversion); }; + auto gas = [=]() { + return evaluate_relation.template operator()>("gas", Avm_vm::get_relation_label_gas); + }; + auto keccakf1600 = [=]() { return evaluate_relation.template operator()>( "keccakf1600", Avm_vm::get_relation_label_keccakf1600); @@ -1086,6 +1092,11 @@ class AvmCircuitBuilder { Avm_vm::get_relation_label_poseidon2); }; + auto powers = [=]() { + return evaluate_relation.template operator()>("powers", + Avm_vm::get_relation_label_powers); + }; + auto sha256 = [=]() { return evaluate_relation.template operator()>("sha256", Avm_vm::get_relation_label_sha256); @@ -1331,6 +1342,8 @@ class AvmCircuitBuilder { relation_futures.emplace_back(std::async(std::launch::async, conversion)); + relation_futures.emplace_back(std::async(std::launch::async, gas)); + relation_futures.emplace_back(std::async(std::launch::async, keccakf1600)); relation_futures.emplace_back(std::async(std::launch::async, kernel)); @@ -1343,6 +1356,8 @@ class AvmCircuitBuilder { relation_futures.emplace_back(std::async(std::launch::async, poseidon2)); + relation_futures.emplace_back(std::async(std::launch::async, powers)); + relation_futures.emplace_back(std::async(std::launch::async, sha256)); relation_futures.emplace_back(std::async(std::launch::async, perm_main_alu)); @@ -1468,6 +1483,8 @@ class AvmCircuitBuilder { conversion(); + gas(); + keccakf1600(); kernel(); @@ -1480,6 +1497,8 @@ class AvmCircuitBuilder { poseidon2(); + powers(); + sha256(); perm_main_alu(); diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp index 283812ece0f..e5729066c00 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp @@ -16,6 +16,7 @@ #include "barretenberg/relations/generated/avm/alu.hpp" #include "barretenberg/relations/generated/avm/binary.hpp" #include "barretenberg/relations/generated/avm/conversion.hpp" +#include "barretenberg/relations/generated/avm/gas.hpp" #include "barretenberg/relations/generated/avm/incl_main_tag_err.hpp" #include "barretenberg/relations/generated/avm/incl_mem_tag_err.hpp" #include "barretenberg/relations/generated/avm/keccakf1600.hpp" @@ -72,6 +73,7 @@ #include "barretenberg/relations/generated/avm/perm_main_pedersen.hpp" #include "barretenberg/relations/generated/avm/perm_main_pos2_perm.hpp" #include "barretenberg/relations/generated/avm/poseidon2.hpp" +#include "barretenberg/relations/generated/avm/powers.hpp" #include "barretenberg/relations/generated/avm/range_check_da_gas_hi.hpp" #include "barretenberg/relations/generated/avm/range_check_da_gas_lo.hpp" #include "barretenberg/relations/generated/avm/range_check_l2_gas_hi.hpp" @@ -162,12 +164,14 @@ class AvmFlavor { using Relations = std::tuple, Avm_vm::binary, Avm_vm::conversion, + Avm_vm::gas, Avm_vm::keccakf1600, Avm_vm::kernel, Avm_vm::main, Avm_vm::mem, Avm_vm::pedersen, Avm_vm::poseidon2, + Avm_vm::powers, Avm_vm::sha256, perm_main_alu_relation, perm_main_bin_relation, @@ -497,7 +501,6 @@ class AvmFlavor { main_sel_rng_16, main_sel_rng_8, main_space_id, - main_table_pow_2, main_tag_err, main_w_in_tag, mem_addr, @@ -539,6 +542,7 @@ class AvmFlavor { poseidon2_input, poseidon2_output, poseidon2_sel_poseidon_perm, + powers_power_of_2, sha256_clk, sha256_input, sha256_output, @@ -883,7 +887,6 @@ class AvmFlavor { main_sel_rng_16, main_sel_rng_8, main_space_id, - main_table_pow_2, main_tag_err, main_w_in_tag, mem_addr, @@ -925,6 +928,7 @@ class AvmFlavor { poseidon2_input, poseidon2_output, poseidon2_sel_poseidon_perm, + powers_power_of_2, sha256_clk, sha256_input, sha256_output, @@ -1274,7 +1278,6 @@ class AvmFlavor { main_sel_rng_16, main_sel_rng_8, main_space_id, - main_table_pow_2, main_tag_err, main_w_in_tag, mem_addr, @@ -1316,6 +1319,7 @@ class AvmFlavor { poseidon2_input, poseidon2_output, poseidon2_sel_poseidon_perm, + powers_power_of_2, sha256_clk, sha256_input, sha256_output, @@ -1727,7 +1731,6 @@ class AvmFlavor { main_sel_rng_16, main_sel_rng_8, main_space_id, - main_table_pow_2, main_tag_err, main_w_in_tag, mem_addr, @@ -1769,6 +1772,7 @@ class AvmFlavor { poseidon2_input, poseidon2_output, poseidon2_sel_poseidon_perm, + powers_power_of_2, sha256_clk, sha256_input, sha256_output, @@ -2180,7 +2184,6 @@ class AvmFlavor { main_sel_rng_16, main_sel_rng_8, main_space_id, - main_table_pow_2, main_tag_err, main_w_in_tag, mem_addr, @@ -2222,6 +2225,7 @@ class AvmFlavor { poseidon2_input, poseidon2_output, poseidon2_sel_poseidon_perm, + powers_power_of_2, sha256_clk, sha256_input, sha256_output, @@ -2989,7 +2993,6 @@ class AvmFlavor { Base::main_sel_rng_16 = "MAIN_SEL_RNG_16"; Base::main_sel_rng_8 = "MAIN_SEL_RNG_8"; Base::main_space_id = "MAIN_SPACE_ID"; - Base::main_table_pow_2 = "MAIN_TABLE_POW_2"; Base::main_tag_err = "MAIN_TAG_ERR"; Base::main_w_in_tag = "MAIN_W_IN_TAG"; Base::mem_addr = "MEM_ADDR"; @@ -3031,6 +3034,7 @@ class AvmFlavor { Base::poseidon2_input = "POSEIDON2_INPUT"; Base::poseidon2_output = "POSEIDON2_OUTPUT"; Base::poseidon2_sel_poseidon_perm = "POSEIDON2_SEL_POSEIDON_PERM"; + Base::powers_power_of_2 = "POWERS_POWER_OF_2"; Base::sha256_clk = "SHA256_CLK"; Base::sha256_input = "SHA256_INPUT"; Base::sha256_output = "SHA256_OUTPUT"; @@ -3391,7 +3395,6 @@ class AvmFlavor { Commitment main_sel_rng_16; Commitment main_sel_rng_8; Commitment main_space_id; - Commitment main_table_pow_2; Commitment main_tag_err; Commitment main_w_in_tag; Commitment mem_addr; @@ -3433,6 +3436,7 @@ class AvmFlavor { Commitment poseidon2_input; Commitment poseidon2_output; Commitment poseidon2_sel_poseidon_perm; + Commitment powers_power_of_2; Commitment sha256_clk; Commitment sha256_input; Commitment sha256_output; @@ -3805,7 +3809,6 @@ class AvmFlavor { main_sel_rng_16 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); main_sel_rng_8 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); main_space_id = deserialize_from_buffer(Transcript::proof_data, num_frs_read); - main_table_pow_2 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); main_tag_err = deserialize_from_buffer(Transcript::proof_data, num_frs_read); main_w_in_tag = deserialize_from_buffer(Transcript::proof_data, num_frs_read); mem_addr = deserialize_from_buffer(Transcript::proof_data, num_frs_read); @@ -3847,6 +3850,7 @@ class AvmFlavor { poseidon2_input = deserialize_from_buffer(Transcript::proof_data, num_frs_read); poseidon2_output = deserialize_from_buffer(Transcript::proof_data, num_frs_read); poseidon2_sel_poseidon_perm = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + powers_power_of_2 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); sha256_clk = deserialize_from_buffer(Transcript::proof_data, num_frs_read); sha256_input = deserialize_from_buffer(Transcript::proof_data, num_frs_read); sha256_output = deserialize_from_buffer(Transcript::proof_data, num_frs_read); @@ -4211,7 +4215,6 @@ class AvmFlavor { serialize_to_buffer(main_sel_rng_16, Transcript::proof_data); serialize_to_buffer(main_sel_rng_8, Transcript::proof_data); serialize_to_buffer(main_space_id, Transcript::proof_data); - serialize_to_buffer(main_table_pow_2, Transcript::proof_data); serialize_to_buffer(main_tag_err, Transcript::proof_data); serialize_to_buffer(main_w_in_tag, Transcript::proof_data); serialize_to_buffer(mem_addr, Transcript::proof_data); @@ -4253,6 +4256,7 @@ class AvmFlavor { serialize_to_buffer(poseidon2_input, Transcript::proof_data); serialize_to_buffer(poseidon2_output, Transcript::proof_data); serialize_to_buffer(poseidon2_sel_poseidon_perm, Transcript::proof_data); + serialize_to_buffer(powers_power_of_2, Transcript::proof_data); serialize_to_buffer(sha256_clk, Transcript::proof_data); serialize_to_buffer(sha256_input, Transcript::proof_data); serialize_to_buffer(sha256_output, Transcript::proof_data); diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp index c6a7622629b..a10bc668d02 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp @@ -7,7 +7,6 @@ #include "barretenberg/honk/proof_system/permutation_library.hpp" #include "barretenberg/plonk_honk_shared/library/grand_product_library.hpp" #include "barretenberg/polynomials/polynomial.hpp" -#include "barretenberg/relations/lookup_relation.hpp" #include "barretenberg/relations/permutation_relation.hpp" #include "barretenberg/sumcheck/sumcheck.hpp" @@ -312,7 +311,6 @@ void AvmProver::execute_wire_commitments_round() witness_commitments.main_sel_rng_16 = commitment_key->commit(key->main_sel_rng_16); witness_commitments.main_sel_rng_8 = commitment_key->commit(key->main_sel_rng_8); witness_commitments.main_space_id = commitment_key->commit(key->main_space_id); - witness_commitments.main_table_pow_2 = commitment_key->commit(key->main_table_pow_2); witness_commitments.main_tag_err = commitment_key->commit(key->main_tag_err); witness_commitments.main_w_in_tag = commitment_key->commit(key->main_w_in_tag); witness_commitments.mem_addr = commitment_key->commit(key->mem_addr); @@ -354,6 +352,7 @@ void AvmProver::execute_wire_commitments_round() witness_commitments.poseidon2_input = commitment_key->commit(key->poseidon2_input); witness_commitments.poseidon2_output = commitment_key->commit(key->poseidon2_output); witness_commitments.poseidon2_sel_poseidon_perm = commitment_key->commit(key->poseidon2_sel_poseidon_perm); + witness_commitments.powers_power_of_2 = commitment_key->commit(key->powers_power_of_2); witness_commitments.sha256_clk = commitment_key->commit(key->sha256_clk); witness_commitments.sha256_input = commitment_key->commit(key->sha256_input); witness_commitments.sha256_output = commitment_key->commit(key->sha256_output); @@ -694,7 +693,6 @@ void AvmProver::execute_wire_commitments_round() transcript->send_to_verifier(commitment_labels.main_sel_rng_16, witness_commitments.main_sel_rng_16); transcript->send_to_verifier(commitment_labels.main_sel_rng_8, witness_commitments.main_sel_rng_8); transcript->send_to_verifier(commitment_labels.main_space_id, witness_commitments.main_space_id); - transcript->send_to_verifier(commitment_labels.main_table_pow_2, witness_commitments.main_table_pow_2); transcript->send_to_verifier(commitment_labels.main_tag_err, witness_commitments.main_tag_err); transcript->send_to_verifier(commitment_labels.main_w_in_tag, witness_commitments.main_w_in_tag); transcript->send_to_verifier(commitment_labels.mem_addr, witness_commitments.mem_addr); @@ -741,6 +739,7 @@ void AvmProver::execute_wire_commitments_round() transcript->send_to_verifier(commitment_labels.poseidon2_output, witness_commitments.poseidon2_output); transcript->send_to_verifier(commitment_labels.poseidon2_sel_poseidon_perm, witness_commitments.poseidon2_sel_poseidon_perm); + transcript->send_to_verifier(commitment_labels.powers_power_of_2, witness_commitments.powers_power_of_2); transcript->send_to_verifier(commitment_labels.sha256_clk, witness_commitments.sha256_clk); transcript->send_to_verifier(commitment_labels.sha256_input, witness_commitments.sha256_input); transcript->send_to_verifier(commitment_labels.sha256_output, witness_commitments.sha256_output); @@ -962,15 +961,16 @@ void AvmProver::execute_relation_check_rounds() * @details See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the unrolled protocol. * * */ -void AvmProver::execute_zeromorph_rounds() +void AvmProver::execute_pcs_rounds() { - ZeroMorph::prove(prover_polynomials.get_unshifted(), - prover_polynomials.get_to_be_shifted(), - sumcheck_output.claimed_evaluations.get_unshifted(), - sumcheck_output.claimed_evaluations.get_shifted(), - sumcheck_output.challenge, - commitment_key, - transcript); + auto prover_opening_claim = ZeroMorph::prove(prover_polynomials.get_unshifted(), + prover_polynomials.get_to_be_shifted(), + sumcheck_output.claimed_evaluations.get_unshifted(), + sumcheck_output.claimed_evaluations.get_shifted(), + sumcheck_output.challenge, + commitment_key, + transcript); + PCS::compute_opening_proof(commitment_key, prover_opening_claim, transcript); } HonkProof AvmProver::export_proof() @@ -996,7 +996,7 @@ HonkProof AvmProver::construct_proof() // Fiat-Shamir: rho, y, x, z // Execute Zeromorph multilinear PCS - execute_zeromorph_rounds(); + execute_pcs_rounds(); return export_proof(); } diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.hpp index 74d504446a3..3f95563ac06 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.hpp @@ -16,6 +16,8 @@ class AvmProver { using Flavor = AvmFlavor; using FF = Flavor::FF; using PCS = Flavor::PCS; + using Curve = Flavor::Curve; + using ZeroMorph = ZeroMorphProver_; using PCSCommitmentKey = Flavor::CommitmentKey; using ProvingKey = Flavor::ProvingKey; using Polynomial = Flavor::Polynomial; @@ -30,7 +32,7 @@ class AvmProver { void execute_wire_commitments_round(); void execute_log_derivative_inverse_round(); void execute_relation_check_rounds(); - void execute_zeromorph_rounds(); + void execute_pcs_rounds(); HonkProof export_proof(); HonkProof construct_proof(); @@ -55,8 +57,6 @@ class AvmProver { std::shared_ptr commitment_key; - using ZeroMorph = ZeroMorphProver_; - private: HonkProof proof; }; diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp index 24a8b6c6f0b..0a863144aad 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp @@ -52,7 +52,8 @@ bool AvmVerifier::verify_proof(const HonkProof& proof, const std::vector; + // using Curve = Flavor::Curve; + // using ZeroMorph = ZeroMorphVerifier_; using VerifierCommitments = Flavor::VerifierCommitments; using CommitmentLabels = Flavor::CommitmentLabels; @@ -452,8 +453,6 @@ bool AvmVerifier::verify_proof(const HonkProof& proof, const std::vectortemplate receive_from_prover(commitment_labels.main_sel_rng_16); commitments.main_sel_rng_8 = transcript->template receive_from_prover(commitment_labels.main_sel_rng_8); commitments.main_space_id = transcript->template receive_from_prover(commitment_labels.main_space_id); - commitments.main_table_pow_2 = - transcript->template receive_from_prover(commitment_labels.main_table_pow_2); commitments.main_tag_err = transcript->template receive_from_prover(commitment_labels.main_tag_err); commitments.main_w_in_tag = transcript->template receive_from_prover(commitment_labels.main_w_in_tag); commitments.mem_addr = transcript->template receive_from_prover(commitment_labels.mem_addr); @@ -510,6 +509,8 @@ bool AvmVerifier::verify_proof(const HonkProof& proof, const std::vectortemplate receive_from_prover(commitment_labels.poseidon2_output); commitments.poseidon2_sel_poseidon_perm = transcript->template receive_from_prover(commitment_labels.poseidon2_sel_poseidon_perm); + commitments.powers_power_of_2 = + transcript->template receive_from_prover(commitment_labels.powers_power_of_2); commitments.sha256_clk = transcript->template receive_from_prover(commitment_labels.sha256_clk); commitments.sha256_input = transcript->template receive_from_prover(commitment_labels.sha256_input); commitments.sha256_output = transcript->template receive_from_prover(commitment_labels.sha256_output); @@ -739,13 +740,15 @@ bool AvmVerifier::verify_proof(const HonkProof& proof, const std::vectorget_g1_identity(), // transcript); + // auto pairing_points = PCS::reduce_verify(opening_claim, transcript); // auto verified = pcs_verification_key->pairing_check(pairing_points[0], pairing_points[1]); // return sumcheck_verified.value() && verified; return sumcheck_verified.value(); diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_arithmetic.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_arithmetic.test.cpp index 0db934ed1ed..fa7c78ab02f 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_arithmetic.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_arithmetic.test.cpp @@ -205,20 +205,15 @@ size_t common_validate_div(std::vector const& trace, class AvmArithmeticTests : public ::testing::Test { public: - AvmTraceBuilder trace_builder; - VmPublicInputs public_inputs{}; - - protected: - // TODO(640): The Standard Honk on Grumpkin test suite fails unless the SRS is initialised for every test. - void SetUp() override + AvmArithmeticTests() + : public_inputs(generate_base_public_inputs()) + , trace_builder(AvmTraceBuilder(public_inputs)) { srs::init_crs_factory("../srs_db/ignition"); - std::array kernel_inputs{}; - kernel_inputs.at(DA_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_DA_GAS; - kernel_inputs.at(L2_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_L2_GAS; - std::get<0>(public_inputs) = kernel_inputs; - trace_builder = AvmTraceBuilder(public_inputs); - }; + } + + VmPublicInputs public_inputs; + AvmTraceBuilder trace_builder; // Generate a trace with an EQ opcode operation. std::vector gen_trace_eq(uint128_t const& a, @@ -1877,7 +1872,7 @@ TEST_F(AvmArithmeticNegativeTestsFF, fDivisionWrongWInTag) // Test that error flag cannot be raised for a non-relevant operation such as // the addition, subtraction, multiplication. -TEST_F(AvmArithmeticNegativeTestsFF, operationWithErrorFlag) +TEST_F(AvmArithmeticNegativeTestsFF, operationWithErrorFlag1) { trace_builder.calldata_copy(0, 0, 3, 0, std::vector{ 37, 4, 11 }); @@ -1894,35 +1889,37 @@ TEST_F(AvmArithmeticNegativeTestsFF, operationWithErrorFlag) row->main_op_err = FF(1); EXPECT_THROW_WITH_MESSAGE(validate_trace_check_circuit(std::move(trace)), "SUBOP_ERROR_RELEVANT_OP"); +} - trace_builder = AvmTraceBuilder(public_inputs); - +TEST_F(AvmArithmeticNegativeTestsFF, operationWithErrorFlag2) +{ trace_builder.calldata_copy(0, 0, 3, 0, std::vector{ 8, 4, 17 }); // Memory layout: [8,4,17,0,0,0,....] trace_builder.op_sub(0, 2, 0, 1, AvmMemoryTag::FF); // [8,9,17,0,0,0....] trace_builder.return_op(0, 0, 3); - trace = trace_builder.finalize(); + auto trace = trace_builder.finalize(); // Find the first row enabling the subtraction selector - row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_sub == FF(1); }); + auto row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_sub == FF(1); }); // Activate the operator error row->main_op_err = FF(1); EXPECT_THROW_WITH_MESSAGE(validate_trace_check_circuit(std::move(trace)), "SUBOP_ERROR_RELEVANT_OP"); +} - trace_builder = AvmTraceBuilder(public_inputs); - +TEST_F(AvmArithmeticNegativeTestsFF, operationWithErrorFlag3) +{ trace_builder.calldata_copy(0, 0, 3, 0, std::vector{ 5, 0, 20 }); // Memory layout: [5,0,20,0,0,0,....] trace_builder.op_mul(0, 2, 0, 1, AvmMemoryTag::FF); // [5,100,20,0,0,0....] trace_builder.return_op(0, 0, 3); - trace = trace_builder.finalize(); + auto trace = trace_builder.finalize(); // Find the first row enabling the multiplication selector - row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_mul == FF(1); }); + auto row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_mul == FF(1); }); // Activate the operator error row->main_op_err = FF(1); diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_bitwise.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_bitwise.test.cpp index ff1c4168616..482d397cbcc 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_bitwise.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_bitwise.test.cpp @@ -346,20 +346,15 @@ std::vector gen_mutated_trace_bit(std::vector trace, class AvmBitwiseTests : public ::testing::Test { public: - AvmTraceBuilder trace_builder; - VmPublicInputs public_inputs{}; - - protected: - // TODO(640): The Standard Honk on Grumpkin test suite fails unless the SRS is initialised for every test. - void SetUp() override + AvmBitwiseTests() + : public_inputs(generate_base_public_inputs()) + , trace_builder(AvmTraceBuilder(public_inputs)) { srs::init_crs_factory("../srs_db/ignition"); - std::array kernel_inputs{}; - kernel_inputs.at(DA_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_DA_GAS; - kernel_inputs.at(L2_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_L2_GAS; - std::get<0>(public_inputs) = kernel_inputs; - trace_builder = AvmTraceBuilder(public_inputs); - }; + } + + VmPublicInputs public_inputs; + AvmTraceBuilder trace_builder; std::vector gen_mutated_trace_not(FF const& a, FF const& c_mutated, avm_trace::AvmMemoryTag tag) { diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_cast.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_cast.test.cpp index 040a1937688..446215b1ec0 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_cast.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_cast.test.cpp @@ -13,25 +13,22 @@ using namespace bb::avm_trace; using namespace testing; class AvmCastTests : public ::testing::Test { - protected: - VmPublicInputs public_inputs{}; + public: + AvmCastTests() + : public_inputs(generate_base_public_inputs()) + , trace_builder(AvmTraceBuilder(public_inputs)) + { + srs::init_crs_factory("../srs_db/ignition"); + } + + VmPublicInputs public_inputs; AvmTraceBuilder trace_builder; + std::vector trace; size_t main_addr; size_t alu_addr; size_t mem_addr_c; - // TODO(640): The Standard Honk on Grumpkin test suite fails unless the SRS is initialised for every test. - void SetUp() override - { - srs::init_crs_factory("../srs_db/ignition"); - std::array kernel_inputs{}; - kernel_inputs.at(DA_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_DA_GAS; - kernel_inputs.at(L2_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_L2_GAS; - std::get<0>(public_inputs) = kernel_inputs; - trace_builder = AvmTraceBuilder(public_inputs); - }; - void gen_trace( uint128_t const& a, uint32_t src_address, uint32_t dst_address, AvmMemoryTag src_tag, AvmMemoryTag dst_tag) { diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_comparison.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_comparison.test.cpp index 38b028306f7..0f0f4ebac52 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_comparison.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_comparison.test.cpp @@ -80,23 +80,20 @@ std::vector positive_op_lte_test_values = { std::vector mem_tag_arr{ { AvmMemoryTag::U8, AvmMemoryTag::U16, AvmMemoryTag::U32, AvmMemoryTag::U64, AvmMemoryTag::U128 } }; + class AvmCmpTests : public ::testing::Test { public: - AvmTraceBuilder trace_builder; - VmPublicInputs public_inputs{}; - - protected: - // TODO(640): The Standard Honk on Grumpkin test suite fails unless the SRS is initialised for every test. - void SetUp() override + AvmCmpTests() + : public_inputs(generate_base_public_inputs()) + , trace_builder(AvmTraceBuilder(public_inputs)) { srs::init_crs_factory("../srs_db/ignition"); - std::array kernel_inputs{}; - kernel_inputs.at(DA_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_DA_GAS; - kernel_inputs.at(L2_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_L2_GAS; - std::get<0>(public_inputs) = kernel_inputs; - trace_builder = AvmTraceBuilder(public_inputs); - }; + } + + VmPublicInputs public_inputs; + AvmTraceBuilder trace_builder; }; + class AvmCmpTestsLT : public AvmCmpTests, public testing::WithParamInterface {}; class AvmCmpTestsLTE : public AvmCmpTests, public testing::WithParamInterface {}; diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_control_flow.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_control_flow.test.cpp index eb2b13781d1..616bdd5f126 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_control_flow.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_control_flow.test.cpp @@ -38,20 +38,15 @@ void validate_internal_return(Row const& row, uint32_t current_pc, uint32_t retu class AvmControlFlowTests : public ::testing::Test { public: - AvmTraceBuilder trace_builder; - VmPublicInputs public_inputs{}; - - protected: - // TODO(640): The Standard Honk on Grumpkin test suite fails unless the SRS is initialised for every test. - void SetUp() override + AvmControlFlowTests() + : public_inputs(generate_base_public_inputs()) + , trace_builder(AvmTraceBuilder(public_inputs)) { srs::init_crs_factory("../srs_db/ignition"); - std::array kernel_inputs{}; - kernel_inputs.at(DA_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_DA_GAS; - kernel_inputs.at(L2_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_L2_GAS; - std::get<0>(public_inputs) = kernel_inputs; - trace_builder = AvmTraceBuilder(public_inputs); - }; + } + + VmPublicInputs public_inputs; + AvmTraceBuilder trace_builder; }; /****************************************************************************** diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_execution.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_execution.test.cpp index 696d0641ba3..10f971f1f70 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_execution.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_execution.test.cpp @@ -1,4 +1,9 @@ #include "barretenberg/vm/avm_trace/avm_execution.hpp" + +#include +#include +#include + #include "avm_common.test.hpp" #include "barretenberg/common/serialize.hpp" #include "barretenberg/common/utils.hpp" @@ -7,11 +12,10 @@ #include "barretenberg/vm/avm_trace/avm_kernel_trace.hpp" #include "barretenberg/vm/avm_trace/avm_opcode.hpp" #include "barretenberg/vm/avm_trace/aztec_constants.hpp" -#include -#include -#include +#include "barretenberg/vm/avm_trace/fixed_gas.hpp" namespace tests_avm { + using namespace bb; using namespace bb::avm_trace; using namespace testing; @@ -27,6 +31,8 @@ class AvmExecutionTests : public ::testing::Test { : public_inputs_vec(PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH){}; protected: + const FixedGasTable& GAS_COST_TABLE = FixedGasTable::get(); + // TODO(640): The Standard Honk on Grumpkin test suite fails unless the SRS is initialised for every test. void SetUp() override { @@ -784,34 +790,12 @@ TEST_F(AvmExecutionTests, toRadixLeOpcode) auto bytecode = hex_to_bytes(bytecode_hex); auto instructions = Deserialization::parse(bytecode); - ASSERT_THAT(instructions, SizeIs(5)); - - // TORADIXLE - EXPECT_THAT(instructions.at(3), - AllOf(Field(&Instruction::op_code, OpCode::TORADIXLE), - Field(&Instruction::operands, - ElementsAre(VariantWith(3), - VariantWith(17), - VariantWith(21), - VariantWith(2), - VariantWith(256))))); - // Assign a vector that we will mutate internally in gen_trace to store the return values; std::vector returndata = std::vector(); auto trace = Execution::gen_trace(instructions, returndata, std::vector{ FF::modulus - FF(1) }, public_inputs_vec); // Find the first row enabling the TORADIXLE selector - auto row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_radix_le == 1; }); - EXPECT_EQ(row->main_ind_addr_a, 17); - EXPECT_EQ(row->main_ind_addr_b, 21); - EXPECT_EQ(row->main_mem_addr_a, 1); // Indirect(17) -> 1 - EXPECT_EQ(row->main_mem_addr_b, 5); // Indirect(21) -> 5 - EXPECT_EQ(row->main_ia, FF(FF::modulus - FF(1))); // Indirect(17) -> Direct(1) -> FF::modulus - FF(1) - EXPECT_EQ(row->main_ib, 0); // Indirect(21) -> 5 -> Unintialized memory - EXPECT_EQ(row->main_ic, 2); - EXPECT_EQ(row->main_id, 256); - // Expected output is bitwise decomposition of MODULUS - 1..could hardcode the result but it's a bit long std::vector expected_output; // Extract each bit. @@ -877,18 +861,6 @@ TEST_F(AvmExecutionTests, sha256CompressionOpcode) auto bytecode = hex_to_bytes(bytecode_hex); auto instructions = Deserialization::parse(bytecode); - // 8 SET for state + 16 SET for input + 3 SET for setting up indirects + 1 SHA256COMPRESSION + 1 RETURN - ASSERT_THAT(instructions, SizeIs(29)); - - // SHA256COMPRESSION - EXPECT_THAT(instructions.at(27), - AllOf(Field(&Instruction::op_code, OpCode::SHA256COMPRESSION), - Field(&Instruction::operands, - ElementsAre(VariantWith(7), - VariantWith(36), - VariantWith(34), - VariantWith(35))))); - // Assign a vector that we will mutate internally in gen_trace to store the return values; std::vector calldata = std::vector(); std::vector returndata = std::vector(); @@ -897,21 +869,8 @@ TEST_F(AvmExecutionTests, sha256CompressionOpcode) // 4091010797,3974542186]), std::vector expected_output = { 1862536192, 526086805, 2067405084, 593147560, 726610467, 813867028, 4091010797ULL, 3974542186ULL }; - auto trace = Execution::gen_trace(instructions, returndata, calldata, public_inputs_vec); - // Find the first row enabling the Sha256Compression selector - auto row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_sha256 == 1; }); - EXPECT_EQ(row->main_ind_addr_a, 34); - EXPECT_EQ(row->main_ind_addr_b, 35); - EXPECT_EQ(row->main_ind_addr_c, 36); - EXPECT_EQ(row->main_mem_addr_a, 1); // Indirect(34) -> 9 - EXPECT_EQ(row->main_mem_addr_b, 9); // Indirect(35) -> 9 - EXPECT_EQ(row->main_mem_addr_c, 256); // Indirect(36) -> 256 - EXPECT_EQ(row->main_ia, 1); // Trivially contains 0. (See avm_trace for explanation why) - EXPECT_EQ(row->main_ib, 1); // Contains first element of the state - EXPECT_EQ(row->main_ic, 0); // Contains first element of the input - EXPECT_EQ(returndata, expected_output); validate_trace(std::move(trace), public_inputs); @@ -975,36 +934,11 @@ TEST_F(AvmExecutionTests, sha256Opcode) auto bytecode = hex_to_bytes(bytecode_hex); auto instructions = Deserialization::parse(bytecode); - ASSERT_THAT(instructions, SizeIs(8)); - // - // SHA256 - EXPECT_THAT(instructions.at(6), - AllOf(Field(&Instruction::op_code, OpCode::SHA256), - Field(&Instruction::operands, - ElementsAre(VariantWith(3), - VariantWith(35), - VariantWith(36), - VariantWith(37))))); - // Assign a vector that we will mutate internally in gen_trace to store the return values; std::vector returndata = std::vector(); std::vector calldata = std::vector(); auto trace = Execution::gen_trace(instructions, returndata, calldata, public_inputs_vec); - // Find the first row enabling the sha256 selector - auto row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_sha256 == 1; }); - EXPECT_EQ(row->main_ind_addr_a, 36); // Register A is indirect - EXPECT_EQ(row->main_ind_addr_c, 35); // Register C is indirect - EXPECT_EQ(row->main_mem_addr_a, 1); // Indirect(36) -> 1 - EXPECT_EQ(row->main_mem_addr_c, 256); // Indirect(35) -> 256 - EXPECT_EQ(row->main_ia, 97); - EXPECT_EQ(row->main_ic, 0); - // Register b checks are done in the next row due to the difference in the memory tag - std::advance(row, 1); - EXPECT_EQ(row->main_ind_addr_b, 0); // Register B is not - EXPECT_EQ(row->main_mem_addr_b, 37); // Load(37) -> input length - EXPECT_EQ(row->main_ib, 3); // Input length - EXPECT_EQ(returndata, expected_output); validate_trace(std::move(trace), public_inputs); @@ -1046,16 +980,6 @@ TEST_F(AvmExecutionTests, poseidon2PermutationOpCode) auto bytecode = hex_to_bytes(bytecode_hex); auto instructions = Deserialization::parse(bytecode); - // 1 CALLDATACOPY for input + 2 SET for setting up indirects + 1 POSEIDON2 + 1 RETURN - ASSERT_THAT(instructions, SizeIs(5)); - - // POSEIDON2_PERM - EXPECT_THAT( - instructions.at(3), - AllOf(Field(&Instruction::op_code, OpCode::POSEIDON2), - Field(&Instruction::operands, - ElementsAre(VariantWith(3), VariantWith(36), VariantWith(35))))); - // Assign a vector that we will mutate internally in gen_trace to store the return values; std::vector returndata = std::vector(); std::vector expected_output = { @@ -1064,18 +988,8 @@ TEST_F(AvmExecutionTests, poseidon2PermutationOpCode) FF(std::string("0x018555a8eb50cf07f64b019ebaf3af3c925c93e631f3ecd455db07bbb52bbdd3")), FF(std::string("0x0cbea457c91c22c6c31fd89afd2541efc2edf31736b9f721e823b2165c90fd41")) }; - auto trace = Execution::gen_trace(instructions, returndata, calldata, public_inputs_vec); - // Find the first row enabling the poseidon2 selector - auto row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_poseidon2 == 1; }); - EXPECT_EQ(row->main_ind_addr_a, 36); - EXPECT_EQ(row->main_ind_addr_b, 35); - EXPECT_EQ(row->main_mem_addr_a, 1); // Indirect(36) -> 1 - EXPECT_EQ(row->main_mem_addr_b, 9); // Indirect(34) -> 9 - EXPECT_EQ(row->main_ia, FF(std::string("9a807b615c4d3e2fa0b1c2d3e4f56789fedcba9876543210abcdef0123456789"))); - EXPECT_EQ(row->main_ib, 0); // Contains first element of the output (trivially 0) - EXPECT_EQ(returndata, expected_output); validate_trace(std::move(trace), public_inputs); @@ -1145,36 +1059,11 @@ TEST_F(AvmExecutionTests, keccakf1600OpCode) auto bytecode = hex_to_bytes(bytecode_hex); auto instructions = Deserialization::parse(bytecode); - // 25 SET for input + 2 SET for setting up indirects + 1 KECCAK + 1 RETURN - ASSERT_THAT(instructions, SizeIs(30)); - // - // KECCAKF1600 - EXPECT_THAT(instructions.at(28), - AllOf(Field(&Instruction::op_code, OpCode::KECCAKF1600), - Field(&Instruction::operands, - ElementsAre(VariantWith(3), - VariantWith(35), - VariantWith(36), - VariantWith(37))))); - // // Assign a vector that we will mutate internally in gen_trace to store the return values; std::vector calldata = std::vector(); std::vector returndata = std::vector(); auto trace = Execution::gen_trace(instructions, returndata, calldata, public_inputs_vec); - // Find the first row enabling the keccak selector - auto row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_keccak == 1; }); - EXPECT_EQ(row->main_ind_addr_a, 36); // Register A is indirect - EXPECT_EQ(row->main_ind_addr_c, 35); // Register C is indirect - EXPECT_EQ(row->main_mem_addr_a, 1); // Indirect(36) -> 1 - EXPECT_EQ(row->main_mem_addr_c, 256); // Indirect(35) -> 256 - EXPECT_EQ(row->main_ia, (0xF1258F7940E1DDE7LLU)); - EXPECT_EQ(row->main_ic, 0); - - std::advance(row, 1); - EXPECT_EQ(row->main_ind_addr_b, 0); // Register B is not - EXPECT_EQ(row->main_mem_addr_b, 37); // Load(37) -> input length - EXPECT_EQ(row->main_ib, 25); // Input length EXPECT_EQ(returndata, expected_output); validate_trace(std::move(trace), public_inputs); @@ -1228,36 +1117,11 @@ TEST_F(AvmExecutionTests, keccakOpCode) auto bytecode = hex_to_bytes(bytecode_hex); auto instructions = Deserialization::parse(bytecode); - ASSERT_THAT(instructions, SizeIs(6)); - // - // KECCAK - EXPECT_THAT(instructions.at(4), - AllOf(Field(&Instruction::op_code, OpCode::KECCAK), - Field(&Instruction::operands, - ElementsAre(VariantWith(3), - VariantWith(35), - VariantWith(36), - VariantWith(37))))); - // Assign a vector that we will mutate internally in gen_trace to store the return values; std::vector calldata = std::vector(); std::vector returndata = std::vector(); auto trace = Execution::gen_trace(instructions, returndata, calldata, public_inputs_vec); - // Find the first row enabling the keccak selector - auto row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_keccak == 1; }); - EXPECT_EQ(row->main_ind_addr_a, 36); // Register A is indirect - EXPECT_EQ(row->main_ind_addr_c, 35); // Register C is indirect - EXPECT_EQ(row->main_mem_addr_a, 1); // Indirect(36) -> 1 - EXPECT_EQ(row->main_mem_addr_c, 256); // Indirect(35) -> 256 - EXPECT_EQ(row->main_ia, 189); - EXPECT_EQ(row->main_ic, 0); - // Register b checks are done in the next row due to the difference in the memory tag - std::advance(row, 1); - EXPECT_EQ(row->main_ind_addr_b, 0); // Register B is not - EXPECT_EQ(row->main_mem_addr_b, 37); // Load(37) -> input length - EXPECT_EQ(row->main_ib, 1); // Input length - EXPECT_EQ(returndata, expected_output); validate_trace(std::move(trace), public_inputs); @@ -1306,32 +1170,11 @@ TEST_F(AvmExecutionTests, pedersenHashOpCode) auto bytecode = hex_to_bytes(bytecode_hex); auto instructions = Deserialization::parse(bytecode); - ASSERT_THAT(instructions, SizeIs(6)); - // Pedersen - EXPECT_THAT(instructions.at(4), - AllOf(Field(&Instruction::op_code, OpCode::PEDERSEN), - Field(&Instruction::operands, - ElementsAre(VariantWith(4), - VariantWith(2), - VariantWith(3), - VariantWith(4), - VariantWith(5))))); - // Assign a vector that we will mutate internally in gen_trace to store the return values; std::vector returndata = std::vector(); std::vector calldata = { FF(1), FF(1) }; auto trace = Execution::gen_trace(instructions, returndata, calldata, public_inputs_vec); - // Find the first row enabling the pedersen selector - auto row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_pedersen == 1; }); - EXPECT_EQ(row->main_ind_addr_a, 4); // Register A is indirect - EXPECT_EQ(row->main_mem_addr_a, 0); // Indirect(4) -> 1 - EXPECT_EQ(row->main_ia, 1); // The first input - // The second row loads the U32 values - std::advance(row, 1); - EXPECT_EQ(row->main_ia, 2); // Input length is 2 - EXPECT_EQ(row->main_ib, 5); // Hash offset is 5 - EXPECT_EQ(returndata[0], expected_output); validate_trace(std::move(trace), public_inputs); @@ -1714,8 +1557,9 @@ TEST_F(AvmExecutionTests, l2GasLeft) // Find the first row enabling the L2GASLEFT selector auto row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_l2gasleft == 1; }); - uint32_t expected_rem_gas = DEFAULT_INITIAL_L2_GAS - GAS_COST_TABLE.at(OpCode::SET).l2_fixed_gas_cost - - GAS_COST_TABLE.at(OpCode::L2GASLEFT).l2_fixed_gas_cost; + uint32_t expected_rem_gas = DEFAULT_INITIAL_L2_GAS - + static_cast(GAS_COST_TABLE.at(OpCode::SET).gas_l2_gas_fixed_table) - + static_cast(GAS_COST_TABLE.at(OpCode::L2GASLEFT).gas_l2_gas_fixed_table); EXPECT_EQ(row->main_ia, expected_rem_gas); EXPECT_EQ(row->main_mem_addr_a, 257); // Resolved direct address: 257 @@ -1755,8 +1599,9 @@ TEST_F(AvmExecutionTests, daGasLeft) // Find the first row enabling the DAGASLEFT selector auto row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_dagasleft == 1; }); - uint32_t expected_rem_gas = DEFAULT_INITIAL_DA_GAS - GAS_COST_TABLE.at(OpCode::ADD).da_fixed_gas_cost - - GAS_COST_TABLE.at(OpCode::DAGASLEFT).da_fixed_gas_cost; + uint32_t expected_rem_gas = DEFAULT_INITIAL_DA_GAS - + static_cast(GAS_COST_TABLE.at(OpCode::ADD).gas_da_gas_fixed_table) - + static_cast(GAS_COST_TABLE.at(OpCode::DAGASLEFT).gas_da_gas_fixed_table); EXPECT_EQ(row->main_ia, expected_rem_gas); EXPECT_EQ(row->main_mem_addr_a, 39); @@ -1886,13 +1731,12 @@ TEST_F(AvmExecutionTests, kernelOutputEmitOpcodes) TEST_F(AvmExecutionTests, kernelOutputStorageLoadOpcodeSimple) { // Sload from a value that has not previously been written to will require a hint to process - std::string bytecode_hex = to_hex(OpCode::SET) + // opcode SET - "00" // Indirect flag - "03" // U32 - "00000009" // value 9 - "00000001" // dst_offset 1 - // Cast set to field - + to_hex(OpCode::CAST) + // opcode CAST + std::string bytecode_hex = to_hex(OpCode::SET) + // opcode SET + "00" // Indirect flag + "03" // U32 + "00000009" // value 9 + "00000001" // dst_offset 1 + + to_hex(OpCode::CAST) + // opcode CAST (Cast set to field) "00" // Indirect flag "06" // tag field "00000001" // dst 1 @@ -1900,7 +1744,7 @@ TEST_F(AvmExecutionTests, kernelOutputStorageLoadOpcodeSimple) + to_hex(OpCode::SLOAD) + // opcode SLOAD "00" // Indirect flag "00000001" // slot offset 1 - "00000001" // slot offset 1 + "00000001" // slot size 1 "00000002" // write storage value to offset 2 + to_hex(OpCode::RETURN) + // opcode RETURN "00" // Indirect flag @@ -1957,7 +1801,7 @@ TEST_F(AvmExecutionTests, kernelOutputStorageLoadOpcodeComplex) + to_hex(OpCode::SLOAD) + // opcode SLOAD "00" // Indirect flag (second operand indirect - dest offset) "00000001" // slot offset 1 - "00000002" // slot offset 2 + "00000002" // slot size 2 "00000002" // write storage value to offset 2 + to_hex(OpCode::RETURN) + // opcode RETURN "00" // Indirect flag @@ -2292,74 +2136,83 @@ TEST_F(AvmExecutionTests, kernelOutputHashExistsOpcodes) validate_trace(std::move(trace), public_inputs); } -// TEST_F(AvmExecutionTests, opCallOpcodes) -// { -// std::string bytecode_preamble; -// // Gas offset preamble -// bytecode_preamble += to_hex(OpCode::SET) + // opcode SET for gas offset indirect -// "00" // Indirect flag -// "03" // U32 -// "00000010" // val 16 (address where gas offset is located) -// "00000011" + // dst_offset 17 -// to_hex(OpCode::SET) + // opcode SET for value stored in gas offset -// "00" // Indirect flag -// "03" // U32 -// "00000011" // val i -// "00000000"; -// // args offset preamble -// bytecode_preamble += to_hex(OpCode::SET) + // opcode SET for args offset indirect -// "00" // Indirect flag -// "03" // U32 -// "00000100" // val i -// "00000012" + // dst_offset 0 -// to_hex(OpCode::SET) + // opcode SET for value stored in args offset -// "00" // Indirect flag -// "03" // U32 -// "00000012" // val i -// "00000001"; -// // ret offset preamble -// bytecode_preamble += to_hex(OpCode::SET) + // opcode SET for ret offset indirect -// "00" // Indirect flag -// "03" // U32 -// "00000008" // val i -// "00000004" + // dst_offset 0 -// to_hex(OpCode::SET) + // opcode SET for value stored in ret offset -// "00" // Indirect flag -// "03" // U32 -// "00000002" // val i -// "00000007"; -// std::string bytecode_hex = bytecode_preamble // SET gas, addr, args size, ret offset, success, function -// selector -// + to_hex(OpCode::CALL) + // opcode CALL -// "15" // Indirect flag -// "00000000" // gas offset -// "00000001" // addr offset -// "00000002" // args offset -// "00000003" // args size offset -// "00000004" // ret offset -// "00000007" // ret size -// "0000000a" // success offset -// "00000006" // function_selector_offset -// + to_hex(OpCode::RETURN) + // opcode RETURN -// "00" // Indirect flag -// "00000008" // ret offset 8 -// "00000003"; // ret size 3 - -// auto bytecode = hex_to_bytes(bytecode_hex); -// auto instructions = Deserialization::parse(bytecode); - -// std::vector calldata = {}; -// std::vector returndata = {}; - -// // Generate Hint for call operation -// auto execution_hints = ExecutionHints().with_externalcall_hints( -// { { .success = 1, .return_data = { 9, 8 }, .l2_gas_used = 0, .da_gas_used = 0 } }); - -// auto trace = Execution::gen_trace(instructions, returndata, calldata, public_inputs_vec, execution_hints); -// EXPECT_EQ(returndata, std::vector({ 9, 8, 1 })); // The 1 represents the success - -// validate_trace(std::move(trace), public_inputs); -// } +TEST_F(AvmExecutionTests, opCallOpcodes) +{ + // Calldata for l2_gas, da_gas, contract_address, nested_call_args (4 elements), + std::vector calldata = { 17, 10, 34802342, 1, 2, 3, 4 }; + std::string bytecode_preamble; + // Set up Gas offsets + bytecode_preamble += to_hex(OpCode::SET) + // opcode SET for gas offset indirect + "00" // Indirect flag + "03" // U32 + "00000000" // val 0 (address where gas tuple is located) + "00000011"; // dst_offset 17 + // Set up contract address offset + bytecode_preamble += to_hex(OpCode::SET) + // opcode SET for args offset indirect + "00" // Indirect flag + "03" // U32 + "00000002" // val 2 (where contract address is located) + "00000012"; // dst_offset 18 + // Set up args offset + bytecode_preamble += to_hex(OpCode::SET) + // opcode SET for ret offset indirect + "00" // Indirect flag + "03" // U32 + "00000003" // val 3 (the start of the args array) + "00000013"; // dst_offset 19 + // Set up args size offset + bytecode_preamble += to_hex(OpCode::SET) + // opcode SET for ret offset indirect + "00" // Indirect flag + "03" // U32 + "00000004" // val 4 (the length of the args array) + "00000014"; // dst_offset 20 + // Set up the ret offset + bytecode_preamble += to_hex(OpCode::SET) + // opcode SET for ret offset indirect + "00" // Indirect flag + "03" // U32 + "00000100" // val 256 (the start of where to write the return data) + "00000015"; // dst_offset 21 + // Set up the success offset + bytecode_preamble += to_hex(OpCode::SET) + // opcode SET for ret offset indirect + "00" // Indirect flag + "03" // U32 + "00000102" // val 258 (write the success flag at ret_offset + ret_size) + "00000016"; // dst_offset 22 + + std::string bytecode_hex = to_hex(OpCode::CALLDATACOPY) + // opcode CALLDATACOPY + "00" // Indirect flag + "00000000" // cd_offset + "00000007" // copy_size + "00000000" // dst_offset + + bytecode_preamble // Load up memory offsets + + to_hex(OpCode::CALL) + // opcode CALL + "3f" // Indirect flag + "00000011" // gas offset + "00000012" // addr offset + "00000013" // args offset + "00000014" // args size offset + "00000015" // ret offset + "00000002" // ret size + "00000016" // success offset + "00000017" // function_selector_offset + + to_hex(OpCode::RETURN) + // opcode RETURN + "00" // Indirect flag + "00000100" // ret offset 8 + "00000003"; // ret size 3 (extra read is for the success flag) + + auto bytecode = hex_to_bytes(bytecode_hex); + auto instructions = Deserialization::parse(bytecode); + + std::vector returndata = {}; + + // Generate Hint for call operation + auto execution_hints = ExecutionHints().with_externalcall_hints( + { { .success = 1, .return_data = { 9, 8 }, .l2_gas_used = 0, .da_gas_used = 0 } }); + + auto trace = Execution::gen_trace(instructions, returndata, calldata, public_inputs_vec, execution_hints); + EXPECT_EQ(returndata, std::vector({ 9, 8, 1 })); // The 1 represents the success + + validate_trace(std::move(trace), public_inputs); +} TEST_F(AvmExecutionTests, opGetContractInstanceOpcodes) { diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_gas.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_gas.test.cpp index 75ebab12270..2666bb40e01 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_gas.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_gas.test.cpp @@ -9,7 +9,6 @@ using namespace bb; using namespace bb::avm_trace; class AvmGasTests : public ::testing::Test { - protected: // TODO(640): The Standard Honk on Grumpkin test suite fails unless the SRS is initialised for every test. void SetUp() override { srs::init_crs_factory("../srs_db/ignition"); }; @@ -34,7 +33,7 @@ void test_gas(StartGas startGas, OpcodesFunc apply_opcodes, CheckFunc check_trac kernel_inputs[L2_GAS_LEFT_CONTEXT_INPUTS_OFFSET] = FF(startGas.l2_gas); kernel_inputs[DA_GAS_LEFT_CONTEXT_INPUTS_OFFSET] = FF(startGas.da_gas); - VmPublicInputs public_inputs{}; + VmPublicInputs public_inputs; std::get<0>(public_inputs) = kernel_inputs; AvmTraceBuilder trace_builder(public_inputs); diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_indirect_mem.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_indirect_mem.test.cpp index d75c53f49eb..f4592476924 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_indirect_mem.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_indirect_mem.test.cpp @@ -7,20 +7,15 @@ using namespace bb::avm_trace; class AvmIndirectMemTests : public ::testing::Test { public: - AvmTraceBuilder trace_builder; - VmPublicInputs public_inputs{}; - - protected: - // TODO(640): The Standard Honk on Grumpkin test suite fails unless the SRS is initialised for every test. - void SetUp() override + AvmIndirectMemTests() + : public_inputs(generate_base_public_inputs()) + , trace_builder(AvmTraceBuilder(public_inputs)) { srs::init_crs_factory("../srs_db/ignition"); - std::array kernel_inputs{}; - kernel_inputs.at(DA_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_DA_GAS; - kernel_inputs.at(L2_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_L2_GAS; - std::get<0>(public_inputs) = kernel_inputs; - trace_builder = AvmTraceBuilder(public_inputs); - }; + } + + VmPublicInputs public_inputs; + AvmTraceBuilder trace_builder; }; /****************************************************************************** diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_inter_table.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_inter_table.test.cpp index 9a4855892e5..098dbb0c181 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_inter_table.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_inter_table.test.cpp @@ -14,20 +14,15 @@ using namespace bb::avm_trace; class AvmInterTableTests : public ::testing::Test { public: - AvmTraceBuilder trace_builder; - VmPublicInputs public_inputs{}; - - protected: - // TODO(640): The Standard Honk on Grumpkin test suite fails unless the SRS is initialised for every test. - void SetUp() override + AvmInterTableTests() + : public_inputs(generate_base_public_inputs()) + , trace_builder(AvmTraceBuilder(public_inputs)) { srs::init_crs_factory("../srs_db/ignition"); - std::array kernel_inputs{}; - kernel_inputs.at(DA_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_DA_GAS; - kernel_inputs.at(L2_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_L2_GAS; - std::get<0>(public_inputs) = kernel_inputs; - trace_builder = AvmTraceBuilder(public_inputs); - }; + } + + VmPublicInputs public_inputs; + AvmTraceBuilder trace_builder; }; /****************************************************************************** diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_kernel.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_kernel.test.cpp index a985bb45a63..786902dc110 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_kernel.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_kernel.test.cpp @@ -12,7 +12,6 @@ using namespace bb; using namespace bb::avm_trace; class AvmKernelTests : public ::testing::Test { - protected: // TODO(640): The Standard Honk on Grumpkin test suite fails unless the SRS is initialised for every test. void SetUp() override { srs::init_crs_factory("../srs_db/ignition"); }; @@ -1084,7 +1083,7 @@ TEST_F(AvmKernelOutputPositiveTests, kernelSload) /*ib=*/slot, /*mem_addr_b=*/0, /*ind_b=*/false, - /*r_in_tag=*/AvmMemoryTag::FF, + /*r_in_tag=*/AvmMemoryTag::U0, // Kernel Sload is writing to memory /*side_effect_counter=*/0, /*rwa=*/1, /*no_b=*/true); @@ -1126,7 +1125,7 @@ TEST_F(AvmKernelOutputPositiveTests, kernelSstore) /*ib=*/slot, /*mem_addr_b=*/0, /*ind_b*/ false, - /*w_in_tag=*/AvmMemoryTag::FF, + /*r_in_tag=*/AvmMemoryTag::FF, /*side_effect_counter=*/0, /*rwa=*/0, /*no_b=*/true); diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_mem_opcodes.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_mem_opcodes.test.cpp index 3f0538411a2..71bfcbbe39e 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_mem_opcodes.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_mem_opcodes.test.cpp @@ -17,8 +17,15 @@ using namespace testing; class AvmMemOpcodeTests : public ::testing::Test { public: + AvmMemOpcodeTests() + : public_inputs(generate_base_public_inputs()) + , trace_builder(AvmTraceBuilder(public_inputs)) + { + srs::init_crs_factory("../srs_db/ignition"); + } + + VmPublicInputs public_inputs; AvmTraceBuilder trace_builder; - VmPublicInputs public_inputs{}; protected: std::vector trace; @@ -32,17 +39,6 @@ class AvmMemOpcodeTests : public ::testing::Test { size_t mem_ind_c_addr; size_t mem_ind_d_addr; - // TODO(640): The Standard Honk on Grumpkin test suite fails unless the SRS is initialised for every test. - void SetUp() override - { - srs::init_crs_factory("../srs_db/ignition"); - std::array kernel_inputs{}; - kernel_inputs.at(DA_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_DA_GAS; - kernel_inputs.at(L2_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_L2_GAS; - std::get<0>(public_inputs) = kernel_inputs; - trace_builder = AvmTraceBuilder(public_inputs); - }; - void build_mov_trace(bool indirect, uint128_t const& val, uint32_t src_offset, @@ -173,7 +169,8 @@ class AvmMemOpcodeTests : public ::testing::Test { uint32_t dst_offset, AvmMemoryTag tag, uint32_t dir_src_offset = 0, - uint32_t dir_dst_offset = 0) + uint32_t dir_dst_offset = 0, + bool indirect_uninitialized = false) { compute_mov_indices(indirect); FF const val_ff = uint256_t::from_uint128(val); @@ -220,7 +217,9 @@ class AvmMemOpcodeTests : public ::testing::Test { EXPECT_THAT(mem_ind_a_row, AllOf(MEM_ROW_FIELD_EQ(tag_err, 0), MEM_ROW_FIELD_EQ(r_in_tag, static_cast(AvmMemoryTag::U32)), - MEM_ROW_FIELD_EQ(tag, static_cast(AvmMemoryTag::U32)), + MEM_ROW_FIELD_EQ(tag, + indirect_uninitialized ? static_cast(AvmMemoryTag::U0) + : static_cast(AvmMemoryTag::U32)), MEM_ROW_FIELD_EQ(addr, src_offset), MEM_ROW_FIELD_EQ(val, dir_src_offset), MEM_ROW_FIELD_EQ(sel_resolve_ind_addr_a, 1))); @@ -376,7 +375,7 @@ TEST_F(AvmMemOpcodeTests, indUninitializedValueMov) trace_builder.return_op(0, 0, 0); trace = trace_builder.finalize(); - validate_mov_trace(true, 0, 2, 3, AvmMemoryTag::U0, 0, 1); + validate_mov_trace(true, 0, 2, 3, AvmMemoryTag::U0, 0, 1, true); } TEST_F(AvmMemOpcodeTests, indirectMov) diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_memory.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_memory.test.cpp index 38428559194..5dcfe52e0df 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_memory.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_memory.test.cpp @@ -2,25 +2,21 @@ #include "barretenberg/vm/avm_trace/avm_common.hpp" namespace tests_avm { + using namespace bb; using namespace bb::avm_trace; class AvmMemoryTests : public ::testing::Test { public: - AvmTraceBuilder trace_builder; - VmPublicInputs public_inputs{}; - - protected: - // TODO(640): The Standard Honk on Grumpkin test suite fails unless the SRS is initialised for every test. - void SetUp() override + AvmMemoryTests() + : public_inputs(generate_base_public_inputs()) + , trace_builder(AvmTraceBuilder(public_inputs)) { srs::init_crs_factory("../srs_db/ignition"); - std::array kernel_inputs{}; - kernel_inputs.at(DA_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_DA_GAS; - kernel_inputs.at(L2_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_L2_GAS; - std::get<0>(public_inputs) = kernel_inputs; - trace_builder = AvmTraceBuilder(public_inputs); - }; + } + + VmPublicInputs public_inputs; + AvmTraceBuilder trace_builder; }; /****************************************************************************** diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.cpp index 8d31f30ce9a..1657ba8ce0d 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.cpp @@ -5,8 +5,8 @@ #include "barretenberg/vm/generated/avm_flavor.hpp" #include -using namespace bb; namespace tests_avm { + using namespace bb; std::vector gen_three_op_params(std::vector operands, @@ -241,4 +241,14 @@ void clear_range_check_counters(std::vector& trace, uint256_t previous_valu previous_value >>= 16; } +VmPublicInputs generate_base_public_inputs() +{ + VmPublicInputs public_inputs; + std::array kernel_inputs{}; + kernel_inputs.at(DA_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_DA_GAS; + kernel_inputs.at(L2_GAS_LEFT_CONTEXT_INPUTS_OFFSET) = DEFAULT_INITIAL_L2_GAS; + std::get<0>(public_inputs) = kernel_inputs; + return public_inputs; +} + } // namespace tests_avm diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.hpp b/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.hpp index c3c665f3490..0dcf6381502 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.hpp @@ -41,4 +41,6 @@ void update_slice_registers(Row& row, uint256_t a); std::vector gen_three_op_params(std::vector> operands, std::vector mem_tags); +VmPublicInputs generate_base_public_inputs(); + } // namespace tests_avm diff --git a/barretenberg/ts/src/types/fields.ts b/barretenberg/ts/src/types/fields.ts index 9305f0d6142..ef9d9188ced 100644 --- a/barretenberg/ts/src/types/fields.ts +++ b/barretenberg/ts/src/types/fields.ts @@ -15,7 +15,7 @@ export class Fr { const valueBigInt = typeof value === 'bigint' ? value : toBigIntBE(value); if (valueBigInt > Fr.MAX_VALUE) { - throw new Error(`Fr out of range: ${valueBigInt}`); + throw new Error(`Value 0x${valueBigInt.toString(16)} is greater or equal to field modulus.`); } this.value = typeof value === 'bigint' ? toBufferBE(value) : value; diff --git a/boxes/boxes/react/package.json b/boxes/boxes/react/package.json index 863d7cf3eac..8e574163eb5 100644 --- a/boxes/boxes/react/package.json +++ b/boxes/boxes/react/package.json @@ -76,7 +76,6 @@ "ts-jest": "^29.1.0", "ts-loader": "^9.4.4", "ts-node": "^10.9.1", - "tty-browserify": "^0.0.1", "typescript": "^5.0.4", "util": "^0.12.5", "webpack": "^5.88.2", diff --git a/boxes/boxes/react/src/contracts/src/main.nr b/boxes/boxes/react/src/contracts/src/main.nr index 944c66a75e9..6cbe8afbba8 100644 --- a/boxes/boxes/react/src/contracts/src/main.nr +++ b/boxes/boxes/react/src/contracts/src/main.nr @@ -1,7 +1,7 @@ contract BoxReact { use dep::aztec::prelude::{AztecAddress, PrivateMutable, Map, NoteInterface, NoteHeader}; use dep::aztec::protocol_types::grumpkin_point::GrumpkinPoint; - use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_with_keys; + use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_note_with_keys; use dep::value_note::value_note::{ValueNote, VALUE_NOTE_LEN}; #[aztec(storage)] @@ -20,7 +20,7 @@ contract BoxReact { ) { let numbers = storage.numbers; let mut new_number = ValueNote::new(number, owner_npk_m_hash); - numbers.at(owner).initialize(&mut new_number).emit(encode_and_encrypt_with_keys(&mut context, owner_ovpk_m, owner_ivpk_m)); + numbers.at(owner).initialize(&mut new_number).emit(encode_and_encrypt_note_with_keys(&mut context, owner_ovpk_m, owner_ivpk_m)); } #[aztec(private)] @@ -33,7 +33,7 @@ contract BoxReact { ) { let numbers = storage.numbers; let mut new_number = ValueNote::new(number, owner_npk_m_hash); - numbers.at(owner).replace(&mut new_number).emit(encode_and_encrypt_with_keys(&mut context, owner_ovpk_m, owner_ivpk_m)); + numbers.at(owner).replace(&mut new_number).emit(encode_and_encrypt_note_with_keys(&mut context, owner_ovpk_m, owner_ivpk_m)); } unconstrained fn getNumber(owner: AztecAddress) -> pub ValueNote { diff --git a/boxes/boxes/react/webpack.config.js b/boxes/boxes/react/webpack.config.js index d5e6fc11e01..4db17529911 100644 --- a/boxes/boxes/react/webpack.config.js +++ b/boxes/boxes/react/webpack.config.js @@ -48,7 +48,6 @@ export default (_, argv) => ({ util: require.resolve('util/'), stream: require.resolve('stream-browserify'), string_decoder: require.resolve('string_decoder/'), - tty: require.resolve('tty-browserify'), }, }, devServer: { diff --git a/boxes/boxes/vanilla/package.json b/boxes/boxes/vanilla/package.json index 0921135a760..045d0e976fa 100644 --- a/boxes/boxes/vanilla/package.json +++ b/boxes/boxes/vanilla/package.json @@ -28,7 +28,6 @@ "html-webpack-plugin": "^5.6.0", "stream-browserify": "^3.0.0", "ts-loader": "^9.5.1", - "tty-browserify": "^0.0.1", "typescript": "^5.0.4", "util": "^0.12.5", "webpack": "^5.90.1", diff --git a/boxes/boxes/vanilla/src/contracts/src/main.nr b/boxes/boxes/vanilla/src/contracts/src/main.nr index 424471e8bfd..c8090a00f88 100644 --- a/boxes/boxes/vanilla/src/contracts/src/main.nr +++ b/boxes/boxes/vanilla/src/contracts/src/main.nr @@ -1,7 +1,7 @@ contract Vanilla { use dep::aztec::prelude::{AztecAddress, PrivateMutable, Map, NoteInterface, NoteHeader}; use dep::aztec::protocol_types::grumpkin_point::GrumpkinPoint; - use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_with_keys; + use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_note_with_keys; use dep::value_note::value_note::{ValueNote, VALUE_NOTE_LEN}; #[aztec(storage)] @@ -20,7 +20,7 @@ contract Vanilla { ) { let numbers = storage.numbers; let mut new_number = ValueNote::new(number, owner_npk_m_hash); - numbers.at(owner).initialize(&mut new_number).emit(encode_and_encrypt_with_keys(&mut context, owner_ovpk_m, owner_ivpk_m)); + numbers.at(owner).initialize(&mut new_number).emit(encode_and_encrypt_note_with_keys(&mut context, owner_ovpk_m, owner_ivpk_m)); } #[aztec(private)] @@ -33,7 +33,7 @@ contract Vanilla { ) { let numbers = storage.numbers; let mut new_number = ValueNote::new(number, owner_npk_m_hash); - numbers.at(owner).replace(&mut new_number).emit(encode_and_encrypt_with_keys(&mut context, owner_ovpk_m, owner_ivpk_m)); + numbers.at(owner).replace(&mut new_number).emit(encode_and_encrypt_note_with_keys(&mut context, owner_ovpk_m, owner_ivpk_m)); } unconstrained fn getNumber(owner: AztecAddress) -> pub ValueNote { diff --git a/boxes/boxes/vanilla/webpack.config.js b/boxes/boxes/vanilla/webpack.config.js index aa9f974b3a2..6fe89595fe0 100644 --- a/boxes/boxes/vanilla/webpack.config.js +++ b/boxes/boxes/vanilla/webpack.config.js @@ -44,7 +44,6 @@ export default (_, argv) => ({ util: require.resolve('util/'), stream: require.resolve('stream-browserify'), string_decoder: require.resolve('string_decoder/'), - tty: require.resolve('tty-browserify'), }, }, devServer: { diff --git a/boxes/contract-only/package.json b/boxes/contract-only/package.json index f262a39f6fc..f68de951d8e 100644 --- a/boxes/contract-only/package.json +++ b/boxes/contract-only/package.json @@ -43,7 +43,6 @@ "jest": "^29.6.4", "stream-browserify": "^3.0.0", "ts-loader": "^9.5.1", - "tty-browserify": "^0.0.1", "typescript": "^5.0.4", "util": "^0.12.5", "webpack": "^5.90.1", diff --git a/boxes/yarn.lock b/boxes/yarn.lock index 47091d6cb8c..526dda9a34f 100644 --- a/boxes/yarn.lock +++ b/boxes/yarn.lock @@ -104,7 +104,6 @@ __metadata: ts-jest: "npm:^29.1.0" ts-loader: "npm:^9.4.4" ts-node: "npm:^10.9.1" - tty-browserify: "npm:^0.0.1" typescript: "npm:^5.0.4" util: "npm:^0.12.5" webpack: "npm:^5.88.2" @@ -133,7 +132,6 @@ __metadata: html-webpack-plugin: "npm:^5.6.0" stream-browserify: "npm:^3.0.0" ts-loader: "npm:^9.5.1" - tty-browserify: "npm:^0.0.1" typescript: "npm:^5.0.4" util: "npm:^0.12.5" webpack: "npm:^5.90.1" @@ -8957,13 +8955,6 @@ __metadata: languageName: node linkType: hard -"tty-browserify@npm:^0.0.1": - version: 0.0.1 - resolution: "tty-browserify@npm:0.0.1" - checksum: 5e34883388eb5f556234dae75b08e069b9e62de12bd6d87687f7817f5569430a6dfef550b51dbc961715ae0cd0eb5a059e6e3fc34dc127ea164aa0f9b5bb033d - languageName: node - linkType: hard - "type-check@npm:^0.4.0, type-check@npm:~0.4.0": version: 0.4.0 resolution: "type-check@npm:0.4.0" diff --git a/build-images/Earthfile b/build-images/Earthfile index ca3fa661e1b..bde05449320 100644 --- a/build-images/Earthfile +++ b/build-images/Earthfile @@ -71,7 +71,7 @@ osxcross: && apt-get -y autoremove \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* - RUN git clone --depth=1 https://github.com/tpoechtrager/osxcross.git \ + RUN git clone https://github.com/tpoechtrager/osxcross.git \ && cd /osxcross \ && git reset --hard ff8d100f3f026b4ffbe4ce96d8aac4ce06f1278b \ && export OSX_SDK="MacOSX14.0.sdk" \ @@ -116,7 +116,6 @@ foundry: # It acts as the base image for all CI builds, and we build on it to produce a developer box. build: BUILD +wasi-sdk - BUILD +osxcross BUILD +foundry FROM +base-build RUN apt update && \ @@ -154,11 +153,6 @@ build: # Install wasi-sdk. COPY +wasi-sdk/opt/wasi-sdk /opt/wasi-sdk - # Install osxcross. Requires developer to mount SDK from their mac host. - COPY +osxcross/opt/osxcross /opt/osxcross - ENV PATH="/opt/osxcross/bin:$PATH" - ENV LD_LIBRARY_PATH="/opt/osxcross/lib:$LD_LIBRARY_PATH" - # Install foundry. COPY +foundry-build/opt/foundry /opt/foundry ENV PATH="/opt/foundry/bin:$PATH" @@ -204,6 +198,7 @@ build: # We want to produce downstream images: devbox and sysbox. This image is the base image for each. # It contains a suite of tools that developers might use to develop aztec. basebox: + BUILD +osxcross BUILD +build FROM +build RUN yes | unminimize @@ -237,6 +232,11 @@ basebox: RUN wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-$(dpkg --print-architecture) -O /usr/local/bin/earthly && \ chmod +x /usr/local/bin/earthly + # Install osxcross. Requires developer to mount SDK from their mac host. + COPY +osxcross/opt/osxcross /opt/osxcross + ENV PATH="/opt/osxcross/bin:$PATH" + ENV LD_LIBRARY_PATH="/opt/osxcross/lib:$LD_LIBRARY_PATH" + # Install gh (github cli). RUN mkdir -p -m 755 /etc/apt/keyrings && wget -qO- https://cli.github.com/packages/githubcli-archive-keyring.gpg > /etc/apt/keyrings/githubcli-archive-keyring.gpg \ && chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \ diff --git a/cspell.json b/cspell.json index 78f4bd23c72..9a41fb4eb43 100644 --- a/cspell.json +++ b/cspell.json @@ -169,6 +169,9 @@ "nullifer", "offchain", "onchain", + "opentelemetry", + "otel", + "OTLP", "otterscan", "outdir", "overlayfs", @@ -219,6 +222,7 @@ "rushstack", "schnorr", "secp", + "SEMRESATTRS", "sigchld", "Signerless", "siloes", @@ -253,6 +257,7 @@ "typegen", "typeparam", "undeployed", + "undici", "unexclude", "unexcluded", "unprefixed", @@ -270,6 +275,7 @@ "viem", "wasms", "webassembly", + "WITGEN", "workdir", "yamux", "yarnrc", @@ -301,5 +307,7 @@ "lib", "*.cmake" ], - "flagWords": ["anonymous"] -} \ No newline at end of file + "flagWords": [ + "anonymous" + ] +} diff --git a/docker-compose.yml b/docker-compose.yml index 952fd382939..b65e980c58d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,8 +5,8 @@ services: # need to run bb for proofs and bb is only built for x86 platform: linux/amd64 environment: - LOG_LEVEL: info - DEBUG: aztec:* + LOG_LEVEL: ${LOG_LEVEL:-info} + DEBUG: ${DEBUG:-aztec:*,-json-rpc:*,-aztec:circuits:artifact_hash,-aztec:randomness_singleton} DEBUG_COLORS: 1 CHAIN_ID: 31337 VERSION: 1 @@ -28,14 +28,16 @@ services: - aztec:/var/lib/aztec ports: - 8080:8080/tcp + profiles: + - pxe node: image: aztecprotocol/aztec${AZTEC_DOCKER_TAG:-@sha256:03feac60e91f1aabf678cecbcd13271dda229120ec6007f2c1bac718ff550c70} # need to run bb for proofs and bb is only built for x86 platform: linux/amd64 environment: - LOG_LEVEL: info - DEBUG: aztec:* + LOG_LEVEL: ${LOG_LEVEL:-info} + DEBUG: ${DEBUG:-aztec:*,-json-rpc:*,-aztec:circuits:artifact_hash,-aztec:randomness_singleton,-aztec:avm_simulator:*} DEBUG_COLORS: 1 CHAIN_ID: 31337 VERSION: 1 @@ -59,18 +61,34 @@ services: P2P_ENABLED: true PEER_ID_PRIVATE_KEY: AZTEC_PORT: 8999 + TEL_COLLECTOR_BASE_URL: ${TEL_COLLECTOR_BASE_URL:-http://otel-collector:4318} secrets: - ethereum-host - p2p-boot-node - entrypoint: [ - "/bin/sh", - "-c", - "export ETHEREUM_HOST=$$(cat /var/run/secrets/ethereum-host);\ - export BOOTSTRAP_NODES=$$(cat /var/run/secrets/p2p-boot-node);\ - test -z \"$$PEER_ID_PRIVATE_KEY\" -a ! -f /var/lib/aztec/p2p-private-key && node /usr/src/yarn-project/cli/dest/bin/index.js generate-p2p-private-key | head -1 | cut -d' ' -f 3 | tee /var/lib/aztec/p2p-private-key || echo 'Re-using existing P2P private key';\ - test -z \"$$PEER_ID_PRIVATE_KEY\" && export PEER_ID_PRIVATE_KEY=$$(cat /var/lib/aztec/p2p-private-key);\ - node /usr/src/yarn-project/aztec/dest/bin/index.js start --node --archiver", - ] + entrypoint: | + /bin/sh -c ' + export ETHEREUM_HOST=$$(cat /var/run/secrets/ethereum-host) + export BOOTSTRAP_NODES=$$(cat /var/run/secrets/p2p-boot-node) + + test -z "$$PEER_ID_PRIVATE_KEY" -a ! -f /var/lib/aztec/p2p-private-key && node /usr/src/yarn-project/cli/dest/bin/index.js generate-p2p-private-key | head -1 | cut -d" " -f 3 | tee /var/lib/aztec/p2p-private-key || echo "Re-using existing P2P private key" + test -z "$$PEER_ID_PRIVATE_KEY" && export PEER_ID_PRIVATE_KEY=$$(cat /var/lib/aztec/p2p-private-key) + + # if the stack is started with --profile metrics --profile node, give the collector a chance to start before the node + i=0 + max=3 + while ! curl --head --silent $$TEL_COLLECTOR_BASE_URL > /dev/null; do + echo "OpenTelemetry collector not up. Retrying after 1s"; + sleep 1; + i=$$((i+1)); + if [ $$i -eq $$max ]; then + echo "OpenTelemetry collector at $$TEL_COLLECTOR_BASE_URL not up after $${max}s. Running without metrics"; + unset TEL_COLLECTOR_BASE_URL; + break + fi; + done; + + node /usr/src/yarn-project/aztec/dest/bin/index.js start --node --archiver + ' volumes: - aztec:/var/lib/aztec profiles: @@ -94,8 +112,103 @@ services: profiles: - cli + otel-collector: + image: otel/opentelemetry-collector-contrib + configs: + - source: otel-collector-config + target: /etc/otelcol-contrib/config.yaml + profiles: + - metrics + ports: + - 4318:4318 + + prometheus: + image: prom/prometheus + profiles: + - metrics + configs: + - source: prometheus-config + target: /etc/prometheus/prometheus.yml + + grafana: + image: grafana/grafana + ports: + - 3000:3000 + profiles: + - metrics + volumes: + - ./grafana_dashboards:/etc/grafana/provisioning/dashboards + - grafana:/var/lib/grafana + configs: + - source: grafana-sources + target: /etc/grafana/provisioning/datasources/default.yml + + jaeger: + image: jaegertracing/all-in-one + ports: + - 16686:16686 + profiles: + - metrics + volumes: aztec: + grafana: + +configs: + grafana-sources: + content: | + apiVersion: 1 + datasources: + - name: Prometheus + uid: aztec-node-metrics + type: prometheus + url: http://prometheus:9090 + editable: false + isDefault: true + jsonData: + timeInterval: 10s + + prometheus-config: + content: | + global: + evaluation_interval: 30s + scrape_interval: 10s + scrape_configs: + - job_name: otel-collector + static_configs: + - targets: ['otel-collector:8888'] + - job_name: aztec + static_configs: + - targets: ['otel-collector:8889'] + otel-collector-config: + content: | + receivers: + otlp: + protocols: + http: + + processors: + batch: + + exporters: + prometheus: + endpoint: 0.0.0.0:8889 + metric_expiration: 5m + otlp/jaeger: + endpoint: "jaeger:4317" + tls: + insecure: true + + service: + pipelines: + traces: + receivers: [otlp] + processors: [batch] + exporters: [otlp/jaeger] + metrics: + receivers: [otlp] + processors: [batch] + exporters: [prometheus] secrets: aztec-node-url: diff --git a/docs/.gitignore b/docs/.gitignore index 07d9321798d..bfd44418b7d 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -24,3 +24,4 @@ yarn-error.log* docs/reference/aztecjs docs/reference/smart_contract_reference/aztec-nr +test-results diff --git a/docs/docs/aztec/_category_.json b/docs/docs/aztec/_category_.json index 336394a563a..22b47d59039 100644 --- a/docs/docs/aztec/_category_.json +++ b/docs/docs/aztec/_category_.json @@ -1,6 +1,6 @@ { "label": "Aztec", - "position": 0, + "position": 1, "collapsible": true, "collapsed": true } diff --git a/docs/docs/getting_started.md b/docs/docs/getting_started.md index 163fa2588a5..9b78a80b8e1 100644 --- a/docs/docs/getting_started.md +++ b/docs/docs/getting_started.md @@ -2,43 +2,45 @@ title: Quickstart --- -The easiest way to start developing on Aztec is simply to click on one of these buttons: +The easiest way to start developing on Aztec locally is through `npx aztec-app`. This is a convenient way of installing the development environment (A.K.A. Sandbox) and starting new projects from a boilerplate. -[![One-Click React Starter](/img/codespaces_badges/react_cta_badge.svg)](https://codespaces.new/AztecProtocol/aztec-packages?devcontainer_path=.devcontainer%2Freact%2Fdevcontainer.json) [![One-Click HTML/TS Starter](/img/codespaces_badges/vanilla_cta_badge.svg)](https://codespaces.new/AztecProtocol/aztec-packages?devcontainer_path=.devcontainer%2Fvanilla%2Fdevcontainer.json) [![One-Click Token Starter](/img/codespaces_badges/token_cta_badge.svg)](https://codespaces.new/AztecProtocol/aztec-packages?devcontainer_path=.devcontainer%2Ftoken%2Fdevcontainer.json) +To locally install the Sandbox without other tools, see [here](./getting_started/manual_install.md). -That's it! +## Prerequisites -This creates a codespace with a prebuilt image containing one of the "Aztec Boxes" and a development network (sandbox). -- You can develop directly on the codespace, push it to a repo, make yourself at home. -- You can also just use the sandbox that comes with it. The URL will be logged, you just need to use it as your `PXE_URL`. +- Node.js >= v18 (recommend installing with [nvm](https://github.com/nvm-sh/nvm)) +- Docker (visit [this page of the Docker docs](https://docs.docker.com/get-docker/) on how to install it) -## Develop Locally +### Run the `npx` script -The above method uses Aztec boxes to install the sandbox and clone the repo. You can use it too to get started on your own machine and use your own IDE. +Thanks to Node, you can run the recommended `npx script`: + +```bash +npx aztec-app +``` -You can also [install the sandbox manually](/reference/sandbox_reference). +This script gives you some options to bootstrap a new project, start/stop the sandbox, or see the logs. Run `npx aztec-app -h` for a list of options. -### Prerequisites +## Install Noir LSP (recommended) -- Node.js >= v18 (recommend installing with [nvm](https://github.com/nvm-sh/nvm)) -- Docker (visit [this page of the Docker docs](https://docs.docker.com/get-docker/) on how to install it) +Install the [Noir Language Support extension](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir) to get syntax highlighting, syntax error detection and go-to definitions for your Aztec contracts. -### Run the `npx` script +Once the extension is installed, check your nargo binary by hovering over `Nargo` in the status bar on the bottom right of the application window. Click to choose the path to `aztec-nargo` (or regular `nargo`, if you have that installed). -With the node installation, you now should have `npm` and be able to run `npx` scripts. You can do that by running: +You can print the path of your `aztec-nargo` executable by running: ```bash -npx create-aztec-app +which aztec-nargo ``` -And follow the instructions. If all goes well, you should now have a development environment running locally on your machine. - -You can run `npx create-aztec-app sandbox -h` to start, stop, update and output logs from the sandbox. +To specify a custom nargo executable, go to the VSCode settings and search for "noir", or click extension settings on the `noir-lang` LSP plugin. +Update the `Noir: Nargo Path` field to point to your desired `aztec-nargo` executable. ## What's next? -To deploy a smart contract to your sandbox and interact with it using Aztec.js, go to the [next page](getting_started/aztecjs-getting-started.md). +Now you have a development network running, so you're ready to start coding your first app with Aztec.nr and Aztec.js! -To skip this and write your first smart contract, go to the [Aztec.nr getting started page](getting_started/aztecnr-getting-started.md). +To follow the series of tutorials, start with the private voting contract [here](./tutorials/contract_tutorials/private_voting_contract.md). +If you want to just keep learning, you can read about the high level architecture on the [Core Components page](./aztec/concepts/state_model/index.md) and [the lifecycle of a transaction](./aztec/concepts/transactions.md). diff --git a/docs/docs/getting_started/codespaces.md b/docs/docs/getting_started/codespaces.md new file mode 100644 index 00000000000..5d57ac291e2 --- /dev/null +++ b/docs/docs/getting_started/codespaces.md @@ -0,0 +1,25 @@ +--- +title: Codespaces +sidebar_position: 0 +draft: true +--- + +All machines are different, and you may not want to run the sandbox locally (for example when using Windows). We thought about you exactly ❤️ + +[Codespaces](https://github.com/features/codespaces) are a quick way to develop: they provision a remote machine with all tooling you need for Aztec in just a few minutes. We're big fans, so we prepared some prebuilt images to make it easier and faster. + +Just choose a boilerplate and click "create new codespace": + +[![One-Click React Starter](/img/codespaces_badges/react_cta_badge.svg)](https://codespaces.new/AztecProtocol/aztec-packages?devcontainer_path=.devcontainer%2Freact%2Fdevcontainer.json) [![One-Click HTML/TS Starter](/img/codespaces_badges/vanilla_cta_badge.svg)](https://codespaces.new/AztecProtocol/aztec-packages?devcontainer_path=.devcontainer%2Fvanilla%2Fdevcontainer.json) [![One-Click Token Starter](/img/codespaces_badges/token_cta_badge.svg)](https://codespaces.new/AztecProtocol/aztec-packages?devcontainer_path=.devcontainer%2Ftoken%2Fdevcontainer.json) + +This creates a codespace with a prebuilt image containing one of the "Aztec Boxes" and a development network (sandbox). +- You can develop directly on the codespace, push it to a repo, make yourself at home. +- You can also just use the sandbox that comes with it. The URL will be logged, you just need to use it as your `PXE_URL`. + +You can then start, stop, or see the logs of your sandbox just by calling `sandbox` or `npx aztec-app sandbox`. Run `sandbox -h` for a list of commands. + +## More about codespaces + +Codespaces are way more powerful than you may initially think. For example, you can connect your local `vscode` to a remote codespace, for a fully contained development environment that doesn't use any of your computer resources! + +Visit the [codespaces documentation](https://docs.github.com/en/codespaces/overview) for more specific documentation around codespaces. diff --git a/docs/docs/getting_started/manual_install.md b/docs/docs/getting_started/manual_install.md new file mode 100644 index 00000000000..a5e3f3ad93b --- /dev/null +++ b/docs/docs/getting_started/manual_install.md @@ -0,0 +1,77 @@ +--- +title: Manual install +sidebar_position: 1 +--- + +You can have some more control over the sandbox by installing it manually through the underlying script used by [`npx aztec-app`](../getting_started.md). + +This involves some knowledge on Docker if you want to stop, restart, or detach from logs. But it also gives you better control over things such as environment variables. + +### Prerequisites + +- Node.js >= v18 (recommend installing with [nvm](https://github.com/nvm-sh/nvm)) +- Docker (visit [this page of the Docker docs](https://docs.docker.com/get-docker/) on how to install it) + +### Install the sandbox + +To install the latest Sandbox version, run: + +```bash +bash -i <(curl -s install.aztec.network) +``` + +This will install the following tools: + +- **aztec** - launches various infrastructure subsystems (sequencer, prover, pxe, etc). +- **aztec-nargo** - aztec's build of nargo, the noir compiler toolchain. +- **aztec-sandbox** - a wrapper around docker-compose that launches services needed for sandbox testing. +- **aztec-up** - a tool to upgrade the aztec toolchain to the latest, or specific versions. +- **aztec-builder** - A useful tool for projects to generate ABIs and update their dependencies. + +Once these have been installed, to start the sandbox, run: + +```bash +aztec-sandbox +``` + +### Have fun + +**Congratulations, you have just installed and run the Aztec Sandbox!** + +```bash + /\ | | + / \ ___| |_ ___ ___ + / /\ \ |_ / __/ _ \/ __| + / ____ \ / /| || __/ (__ + /_/___ \_\/___|\__\___|\___| + +``` + +In the terminal, you will see some logs: + +1. Sandbox version +2. Contract addresses of rollup contracts +3. PXE (private execution environment) setup logs +4. Initial accounts that are shipped with the sandbox and can be used in tests + +## Running Aztec PXE / Node / P2P-Bootstrap node + +If you wish to run components of the Aztec network stack separately, you can use the `aztec start` command with various options for enabling components. + +```bash +aztec start --node [nodeOptions] --pxe [pxeOptions] --archiver [archiverOptions] --sequencer [sequencerOptions] --prover [proverOptions] ----p2p-bootstrap [p2pOptions] +``` + +Starting the aztec node alongside a PXE, sequencer or archiver, will attach the components to the node.Eg if you want to run a PXE separately to a node, you can [read this guide](../aztec/concepts/pxe/index.md)/ + +## Update the sandbox + +To update the sandbox, you can just run: + +```bash +aztec-up +``` + +## Next steps + +Visit the [sandbox reference](../reference/sandbox_reference/index.md) for more info on which environment variables you can set, which cheat codes you can use, and learn about what exactly is the Aztec Sandbox. diff --git a/docs/docs/guides/smart_contracts/how_to_compile_contract.md b/docs/docs/guides/smart_contracts/how_to_compile_contract.md index 39e91a84166..605d8050421 100644 --- a/docs/docs/guides/smart_contracts/how_to_compile_contract.md +++ b/docs/docs/guides/smart_contracts/how_to_compile_contract.md @@ -222,7 +222,7 @@ export class TokenContract extends ContractBase { } ``` -Read more about interacting with contracts using `aztec.js` [here](../../getting_started/aztecjs-getting-started.md). +Read more about interacting with contracts using `aztec.js` [here](../../tutorials/aztecjs-getting-started.md). ### Aztec.nr interfaces diff --git a/docs/docs/guides/smart_contracts/writing_contracts/authwit.md b/docs/docs/guides/smart_contracts/writing_contracts/authwit.md index 18b21cd75b7..8f74c3bf340 100644 --- a/docs/docs/guides/smart_contracts/writing_contracts/authwit.md +++ b/docs/docs/guides/smart_contracts/writing_contracts/authwit.md @@ -74,7 +74,7 @@ As part of `AuthWit` we are assuming that the `on_behalf_of` implements the priv ```rust #[aztec(private)] -fn spend_private_authwit(inner_hash: Field) -> Field; +fn verify_private_authwit(inner_hash: Field) -> Field; ``` For public authwit, we have a shared registry that is used, there we are using a `consume` function. @@ -101,10 +101,8 @@ To make it convenient to compute the message hashes in TypeScript, the `aztec.js For private calls where we allow execution on behalf of others, we generally want to check if the current call is authenticated by `on_behalf_of`. To easily do so, we can use the `assert_current_call_valid_authwit` which fetches information from the current context without us needing to provide much beyond the `on_behalf_of`. -This function will then make a to `on_behalf_of` to execute the `spend_private_authwit` function which validates that the call is authenticated. -The `on_behalf_of` should assert that we are indeed authenticated and then emit a nullifier when we are spending the authwit to prevent replay attacks. -If the return value is not as expected, we throw an error. -This is to cover the case where the `on_behalf_of` might implemented some function with the same selector as the `spend_private_authwit` that could be used to authenticate unintentionally. +This function will then make a call to `on_behalf_of` to execute the `verify_private_authwit` function which validates that the call is authenticated. +The `on_behalf_of` should assert that we are indeed authenticated and then return the `IS_VALID` selector. If the return value is not as expected, we throw an error. This is to cover the case where the `on_behalf_of` might implemented some function with the same selector as the `verify_private_authwit` that could be used to authenticate unintentionally. #### Example diff --git a/docs/docs/guides/smart_contracts/writing_contracts/common_patterns/index.md b/docs/docs/guides/smart_contracts/writing_contracts/common_patterns/index.md index 5e0997a595e..a44b376779a 100644 --- a/docs/docs/guides/smart_contracts/writing_contracts/common_patterns/index.md +++ b/docs/docs/guides/smart_contracts/writing_contracts/common_patterns/index.md @@ -31,7 +31,7 @@ E.g. you don't want a user to subscribe once they have subscribed already. Or yo Emit a nullifier in your function. By adding this nullifier into the tree, you prevent another nullifier from being added again. This is also why in authwit, we emit a nullifier, to prevent someone from reusing their approval. -#include_code spend_private_authwit /noir-projects/aztec-nr/authwit/src/account.nr rust +#include_code verify_private_authwit /noir-projects/aztec-nr/authwit/src/account.nr rust Note be careful to ensure that the nullifier is not deterministic and that no one could do a preimage analysis attack. More in [the anti pattern section on deterministic nullifiers](#deterministic-nullifiers) diff --git a/docs/docs/guides/smart_contracts/writing_contracts/initializers.md b/docs/docs/guides/smart_contracts/writing_contracts/initializers.md index 7428af87fbf..80f7d3de38d 100644 --- a/docs/docs/guides/smart_contracts/writing_contracts/initializers.md +++ b/docs/docs/guides/smart_contracts/writing_contracts/initializers.md @@ -27,4 +27,4 @@ Initializers are commonly used to set an admin, such as this example: Here, the initializer is calling a public function. It can also call a private function. Learn more about calling functions from functions [here](./call_functions.md). -To see constructors in action, check out the [Aztec.nr getting started guide](../../../getting_started/aztecnr-getting-started.md). +To see an initializer in action, check out the [Counter Contract Tutorial](../../../tutorials/contract_tutorials/counter_contract.md). diff --git a/docs/docs/reference/sandbox_reference/index.md b/docs/docs/reference/sandbox_reference/index.md index 1697ca3d415..ba339531a85 100644 --- a/docs/docs/reference/sandbox_reference/index.md +++ b/docs/docs/reference/sandbox_reference/index.md @@ -5,26 +5,6 @@ sidebar_position: 0 The Aztec Sandbox is an environment for local development on the Aztec Network. It's easy to get setup with just a single, simple command, and contains all the components needed to develop and test Aztec contracts and applications. -## Components of the Aztec network - -Aztec's Layer 2 network is a fully programmable combined private/public ZK rollup. To achieve this, the network contains the following primary components: - -- Aztec Node - Aggregates all of the 'backend' services necessary for the building and publishing of rollups. This package is currently in development and much of the functionality is mocked. -- [Private Execution Environment (PXE)](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/pxe) - Normally residing with the end client, this decrypts and stores a client's private state, executes simulations and submits transactions to the Aztec Node. -- [Aztec.js](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/aztec.js) - Aztec's client library for interacting with the PXE (think Ethers.js). See the getting started guide [here](../../getting_started/aztecjs-getting-started.md). - -All of this is included in the Sandbox, with the exception of Aztec.js which you can use to interact with it. - -With the help of Aztec.js you will be able to: - -- Create an account -- Deploy a contract -- Call view methods on contracts -- Simulate the calling of contract functions -- Send transactions to the network -- Be notified when transactions settle -- Query chain state such as chain id, block number etc. - ## What's in the Sandbox? The sandbox contains a local Ethereum instance running [Anvil](https://book.getfoundry.sh/anvil/), a local instance of the Aztec rollup and an aztec private execution client for handling user transactions and state. diff --git a/docs/docs/reference/sandbox_reference/sandbox-reference.md b/docs/docs/reference/sandbox_reference/sandbox-reference.md index b20ffd89175..5e9ef0fbfbd 100644 --- a/docs/docs/reference/sandbox_reference/sandbox-reference.md +++ b/docs/docs/reference/sandbox_reference/sandbox-reference.md @@ -8,67 +8,6 @@ For a quick start, follow the [guide](../../getting_started.md) to install the s ::: -## Manual Install - -You can manually install the sandbox via the underlying script used in the [Aztec Boxes](getting_started.md#run-the-npx-script). - -### Prerequisites - -- Node.js >= v18 (recommend installing with [nvm](https://github.com/nvm-sh/nvm)) -- Docker (visit [this page of the Docker docs](https://docs.docker.com/get-docker/) on how to install it) - -### Install the sandbox - -To install the latest Sandbox version, run: - -```bash -bash -i <(curl -s install.aztec.network) -``` - -This will install the following tools: - -- **aztec** - launches various infrastructure subsystems (sequencer, prover, pxe, etc). -- **aztec-nargo** - aztec's build of nargo, the noir compiler toolchain. -- **aztec-sandbox** - a wrapper around docker-compose that launches services needed for sandbox testing. -- **aztec-up** - a tool to upgrade the aztec toolchain to the latest, or specific versions. -- **aztec-builder** - A useful tool for projects to generate ABIs and update their dependencies. - -Once these have been installed, to start the sandbox, run: - -```bash -aztec-sandbox -``` - -### Have fun! - -**Congratulations, you have just installed and run the Aztec Sandbox!** - -```bash - /\ | | - / \ ___| |_ ___ ___ - / /\ \ |_ / __/ _ \/ __| - / ____ \ / /| || __/ (__ - /_/___ \_\/___|\__\___|\___| - -``` - -In the terminal, you will see some logs: - -1. Sandbox version -2. Contract addresses of rollup contracts -3. PXE (private execution environment) setup logs -4. Initial accounts that are shipped with the sandbox and can be used in tests - -## Running Aztec PXE / Node / P2P-Bootstrap node - -If you wish to run components of the Aztec network stack separately, you can use the `aztec start` command with various options for enabling components. - -```bash -aztec start --node [nodeOptions] --pxe [pxeOptions] --archiver [archiverOptions] --sequencer [sequencerOptions] --prover [proverOptions] ----p2p-bootstrap [p2pOptions] -``` - -Starting the aztec node alongside a PXE, sequencer or archiver, will attach the components to the node.Eg if you want to run a PXE separately to a node, you can [read this guide](../../aztec/concepts/pxe/index.md)/ - ## Environment Variables There are various environment variables you can use when running the whole sandbox or when running on of the available modes. diff --git a/docs/docs/getting_started/aztecjs-getting-started.md b/docs/docs/tutorials/aztecjs-getting-started.md similarity index 99% rename from docs/docs/getting_started/aztecjs-getting-started.md rename to docs/docs/tutorials/aztecjs-getting-started.md index 7d435faae95..6f9d145da5e 100644 --- a/docs/docs/getting_started/aztecjs-getting-started.md +++ b/docs/docs/tutorials/aztecjs-getting-started.md @@ -357,4 +357,4 @@ That's it! We have successfully deployed a token contract to an instance of the ## Next Steps -Write your first smart contract on the [next page](./aztecnr-getting-started.md). +Write your first account contract on the [next page](./write_accounts_contract.md). diff --git a/docs/docs/tutorials/contract_tutorials/advanced/_category_.json b/docs/docs/tutorials/contract_tutorials/advanced/_category_.json index b867f5fd363..81a03772f7d 100644 --- a/docs/docs/tutorials/contract_tutorials/advanced/_category_.json +++ b/docs/docs/tutorials/contract_tutorials/advanced/_category_.json @@ -1,6 +1,6 @@ { "label": "Advanced", - "position": 3, + "position": 5, "collapsible": true, "collapsed": true } diff --git a/docs/docs/getting_started/aztecnr-getting-started.md b/docs/docs/tutorials/contract_tutorials/counter_contract.md similarity index 78% rename from docs/docs/getting_started/aztecnr-getting-started.md rename to docs/docs/tutorials/contract_tutorials/counter_contract.md index 000e1c0d1d8..8d140da25f3 100644 --- a/docs/docs/getting_started/aztecnr-getting-started.md +++ b/docs/docs/tutorials/contract_tutorials/counter_contract.md @@ -1,15 +1,15 @@ --- -title: Writing Your First Smart Contract -sidebar_position: 2 +title: Counter Contract +sidebar_position: 0 --- In this guide, we will create our first Aztec.nr smart contract. We will build a simple private counter. This contract will get you started with the basic setup and syntax of Aztec.nr, but doesn't showcase the awesome stuff Aztec is capable of. -If you already have some experience with Noir and want to build a cooler contract that utilizes both private and public state, you might want to check out the [token contract tutorial instead](../tutorials/contract_tutorials/token_contract.md). +If you already have some experience with Noir and want to build a cooler contract that utilizes both private and public state, you might want to check out the [token contract tutorial instead](../../tutorials/contract_tutorials/token_contract.md). ## Prerequisites -- You have followed the [quickstart](../getting_started.md) +- You have followed the [quickstart](../../getting_started.md) - Running Aztec Sandbox ## Set up a project @@ -116,7 +116,7 @@ Let’s create a constructor method to run on deployment that assigns an initial This function accesses the counts from storage. Then it assigns the passed initial counter to the `owner`'s counter privately using `at().add()`. -We have annotated this and other functions with `#[aztec(private)]` which are ABI macros so the compiler understands it will handle private inputs. Learn more about functions and annotations [here](../aztec/concepts/smart_contracts/functions/index.md). +We have annotated this and other functions with `#[aztec(private)]` which are ABI macros so the compiler understands it will handle private inputs. Learn more about functions and annotations [here](../../aztec/concepts/smart_contracts/functions/index.md). ## Incrementing our counter @@ -160,28 +160,4 @@ In the same directory, run this: aztec-builder codegen -o src/artifacts target ``` -You can now use the artifact and/or the TS class in your Aztec.js! If you skipped the Aztec.js getting-started guide, you can follow it [here](aztecjs-getting-started.md). This will teach you about deploying and calling contracts in Aztec.js. - -## Install Noir LSP (recommended) - -Install the [Noir Language Support extension](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir) to get syntax highlighting, syntax error detection and go-to definitions for your Aztec contracts. - -Once the extension is installed, check your nargo binary by hovering over `Nargo` in the status bar on the bottom right of the application window. Click to choose the path to `aztec-nargo` (or regular `nargo`, if you have that installed). - -You can print the path of your `aztec-nargo` executable by running: - -```bash -which aztec-nargo -``` - -To specify a custom nargo executable, go to the VSCode settings and search for "noir", or click extension settings on the `noir-lang` LSP plugin. -Update the `Noir: Nargo Path` field to point to your desired `aztec-nargo` executable. - -## What's next? - -The next recommmended steps are follow the tutorials in order. They will teach you more about contracts, Aztec.js, and how Aztec works in general. - -To follow the series of tutorials, start with the private voting contract [here](../tutorials/contract_tutorials/private_voting_contract.md). - -Alternatively, you can read about the high level architecture on the [Core Components page](../aztec/concepts/state_model/index.md) and [the lifecycle of a transaction](../aztec/concepts/transactions.md). - +You can now use the artifact and/or the TS class in your Aztec.js! diff --git a/docs/docs/tutorials/contract_tutorials/crowdfunding_contract.md b/docs/docs/tutorials/contract_tutorials/crowdfunding_contract.md index 11735715ce3..ce858d65ef1 100644 --- a/docs/docs/tutorials/contract_tutorials/crowdfunding_contract.md +++ b/docs/docs/tutorials/contract_tutorials/crowdfunding_contract.md @@ -1,6 +1,6 @@ --- title: "Crowdfunding contract" -sidebar_position: 2 +sidebar_position: 3 tags: [developers, tutorial, example] --- diff --git a/docs/docs/tutorials/contract_tutorials/private_voting_contract.md b/docs/docs/tutorials/contract_tutorials/private_voting_contract.md index fa6becef831..16606e5c508 100644 --- a/docs/docs/tutorials/contract_tutorials/private_voting_contract.md +++ b/docs/docs/tutorials/contract_tutorials/private_voting_contract.md @@ -1,6 +1,6 @@ --- title: "Private voting contract" -sidebar_position: 0 +sidebar_position: 1 --- import Image from '@theme/IdealImage'; @@ -164,12 +164,11 @@ Once it is compiled you can [deploy](../../reference/sandbox_reference/index.md) aztec-builder target -o src/artifacts ``` -Once it is compiled you can [deploy](../../guides/smart_contracts/how_to_deploy_contract.md) it to the sandbox. This is out of scope for this tutorial but you can learn how to do this in the [Aztec.js getting-started guide](../../getting_started/aztecjs-getting-started.md). +Once it is compiled you can [deploy](../../guides/smart_contracts/how_to_deploy_contract.md) it to the sandbox just like you did in the [counter contract tutorial](./counter_contract.md). ## Next steps Now you have learned the foundations of Aztec smart contracts, you can start to play around with some more advanced features. Some ideas: - Add some more features into this contract, like the admin can distribute votes, people can delegate their votes, or voteIds can have more data like names, descriptions, etc -- Create a frontend for this contract using [Aztec.js](../../getting_started/aztecjs-getting-started.md). - Go to the [next tutorial](token_contract.md) and learn how to write a token contract diff --git a/docs/docs/tutorials/contract_tutorials/token_contract.md b/docs/docs/tutorials/contract_tutorials/token_contract.md index 2a21152707f..eb5618f3d52 100644 --- a/docs/docs/tutorials/contract_tutorials/token_contract.md +++ b/docs/docs/tutorials/contract_tutorials/token_contract.md @@ -1,6 +1,6 @@ --- title: "Private token contract" -sidebar_position: 1 +sidebar_position: 4 --- In this tutorial we will go through writing an L2 native token contract diff --git a/docs/docs/vision.mdx b/docs/docs/vision.mdx index 2e4ff847897..a7e748579fa 100644 --- a/docs/docs/vision.mdx +++ b/docs/docs/vision.mdx @@ -1,7 +1,7 @@ --- title: Aztec's Vision sidebar_label: Vision -sidebar_position: 1 +sidebar_position: 0 --- import Disclaimer from "@site/src/components/Disclaimers/_wip_disclaimer.mdx"; diff --git a/docs/sidebars.js b/docs/sidebars.js index 1b84e84f303..9e9383590d8 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -59,10 +59,6 @@ export default { { label: "Proving System", type: "category", - link: { - type: "doc", - id: "protocol-specs/cryptography/proving-system/performance-targets", - }, items: [ "protocol-specs/cryptography/proving-system/performance-targets", "protocol-specs/cryptography/proving-system/overview", @@ -72,10 +68,6 @@ export default { { label: "Hashing", type: "category", - link: { - type: "doc", - id: "protocol-specs/cryptography/hashing/hashing", - }, items: [ "protocol-specs/cryptography/hashing/hashing", "protocol-specs/cryptography/hashing/poseidon2", @@ -217,7 +209,6 @@ export default { { label: "Decentralization", type: "category", - link: { type: "doc", id: "protocol-specs/decentralization/governance" }, items: [ "protocol-specs/decentralization/actors", "protocol-specs/decentralization/governance", diff --git a/grafana_dashboards/aztec/aztec-node-dashboard.json b/grafana_dashboards/aztec/aztec-node-dashboard.json new file mode 100644 index 00000000000..863e0079d49 --- /dev/null +++ b/grafana_dashboards/aztec/aztec-node-dashboard.json @@ -0,0 +1,576 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "description": "Stats from the Aztec Node", + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 6, + "panels": [], + "title": "Node status", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "decimals": 2, + "fieldMinMax": false, + "mappings": [], + "max": 1, + "min": 0, + "noValue": "0", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "percentunit" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 5, + "x": 0, + "y": 1 + }, + "id": 7, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "center", + "orientation": "auto", + "reduceOptions": { + "calcs": ["lastNotNull"], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": { + "valueSize": 64 + }, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(process_cpu_utilization)", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "CPU utilization", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 15, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineStyle": { + "fill": "solid" + }, + "lineWidth": 1, + "pointSize": 1, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 7, + "x": 5, + "y": 1 + }, + "id": 8, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "system_memory_usage{system_memory_state=\"used\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Memory use", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 5, + "x": 12, + "y": 1 + }, + "id": 9, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": ["lastNotNull"], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "aztec_archiver_block_height", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Current block height", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "decimals": 0, + "displayName": "txs/block", + "mappings": [], + "min": 0, + "noValue": "0", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 7, + "x": 17, + "y": 1 + }, + "id": 10, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": ["lastNotNull"], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": { + "titleSize": 12 + }, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "rate(aztec_archiver_block_size_sum[$__rate_interval]) / rate(aztec_archiver_block_size_count[$__rate_interval])", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Average block size", + "type": "stat" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 7 + }, + "id": 3, + "panels": [], + "title": "Mempool", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "min": 0, + "noValue": "0", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 8 + }, + "id": 5, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "rate(aztec_mempool_tx_size_bytes_sum[$__rate_interval]) / rate(aztec_mempool_tx_size_bytes_count[$__rate_interval])", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Tx size", + "range": true, + "refId": "Avg tx size", + "useBackend": false + } + ], + "title": "Average transaction size ", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "min": 0, + "noValue": "0", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 8 + }, + "id": 2, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": ["last"], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.0.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "aztec_mempool_tx_count", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "tx", + "useBackend": false + } + ], + "title": "Transactions in mempool", + "type": "stat" + } + ], + "refresh": "", + "schemaVersion": 39, + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-15m", + "to": "now" + }, + "timeRangeUpdatedDuringEditOrView": false, + "timepicker": {}, + "timezone": "browser", + "title": "Aztec Node", + "uid": "edp4qxqgjoav4e", + "version": 1, + "weekStart": "" +} diff --git a/grafana_dashboards/aztec/protocol-circuits-dashboard.json b/grafana_dashboards/aztec/protocol-circuits-dashboard.json new file mode 100644 index 00000000000..1849485d30c --- /dev/null +++ b/grafana_dashboards/aztec/protocol-circuits-dashboard.json @@ -0,0 +1,747 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "description": "Metrics relating to protocol circuits", + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 3, + "panels": [], + "title": "Circuit proving", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "noValue": "0", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 1 + }, + "id": 4, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_proving_duration_seconds{aztec_circuit_protocol_circuit_name=\"base-parity\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Base parity", + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_proving_duration_seconds{aztec_circuit_protocol_circuit_name=\"root-parity\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Root parity", + "range": true, + "refId": "B", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_proving_duration_seconds{aztec_circuit_protocol_circuit_name=\"base-rollup\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Base rollup", + "range": true, + "refId": "C", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_proving_duration_seconds{aztec_circuit_protocol_circuit_name=\"merge-rollup\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Merge rollup", + "range": true, + "refId": "D", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_proving_duration_seconds{aztec_circuit_protocol_circuit_name=\"root-rollup\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Root rollup", + "range": true, + "refId": "E", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_proving_duration_seconds{aztec_circuit_protocol_circuit_name=\"public-kernel-setup\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Public Kernel - Setup", + "range": true, + "refId": "F", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_proving_duration_seconds{aztec_circuit_protocol_circuit_name=\"public-kernel-app-logic\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Public Kernel - App logic", + "range": true, + "refId": "G", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_proving_duration_seconds{aztec_circuit_protocol_circuit_name=\"public-kernel-teardown\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Public Kernel - Teardown", + "range": true, + "refId": "H", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_proving_duration_seconds{aztec_circuit_protocol_circuit_name=\"public-kernel-tail\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Public Kernel - Tail", + "range": true, + "refId": "I", + "useBackend": false + } + ], + "title": "Circuit proving", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "noValue": "0", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 1 + }, + "id": 5, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_witness_generation_duration_seconds{aztec_circuit_protocol_circuit_name=\"base-parity\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Base parity", + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_witness_generation_duration_seconds{aztec_circuit_protocol_circuit_name=\"root-parity\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Root parity", + "range": true, + "refId": "B", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_witness_generation_duration_seconds{aztec_circuit_protocol_circuit_name=\"base-rollup\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Base rollup", + "range": true, + "refId": "C", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_witness_generation_duration_seconds{aztec_circuit_protocol_circuit_name=\"merge-rollup\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Merge rollup", + "range": true, + "refId": "D", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_witness_generation_duration_seconds{aztec_circuit_protocol_circuit_name=\"root-rollup\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Root rollup", + "range": true, + "refId": "E", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_witness_generation_duration_seconds{aztec_circuit_protocol_circuit_name=\"public-kernel-setup\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Public Kernel - Setup", + "range": true, + "refId": "F", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_witness_generation_duration_seconds{aztec_circuit_protocol_circuit_name=\"public-kernel-app-logic\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Public Kernel - App logic", + "range": true, + "refId": "G", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_witness_generation_duration_seconds{aztec_circuit_protocol_circuit_name=\"public-kernel-teardown\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Public Kernel - Teardown", + "range": true, + "refId": "H", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "aztec_circuit_witness_generation_duration_seconds{aztec_circuit_protocol_circuit_name=\"public-kernel-tail\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Public Kernel - Tail", + "range": true, + "refId": "I", + "useBackend": false + } + ], + "title": "Circuit witness generation", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 9 + }, + "id": 2, + "panels": [], + "title": "Circuit simulation", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "min": 0, + "noValue": "0", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 10 + }, + "id": 6, + "options": { + "legend": { + "calcs": [], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "editorMode": "code", + "exemplar": false, + "expr": "rate(aztec_circuit_simulation_duration_seconds_sum{aztec_circuit_protocol_circuit_name=\"base-parity\"}[$__rate_interval]) / rate(aztec_circuit_simulation_duration_seconds_count{aztec_circuit_protocol_circuit_name=\"base-parity\"}[$__rate_interval])", + "instant": false, + "legendFormat": "Base paritiy", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "editorMode": "code", + "expr": "rate(aztec_circuit_simulation_duration_seconds_sum{aztec_circuit_protocol_circuit_name=\"root-parity\"}[$__rate_interval]) / rate(aztec_circuit_simulation_duration_seconds_count{aztec_circuit_protocol_circuit_name=\"root-parity\"}[$__rate_interval])", + "hide": true, + "instant": false, + "legendFormat": "Root paritiy", + "range": true, + "refId": "B" + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "editorMode": "code", + "expr": "rate(aztec_circuit_simulation_duration_seconds_sum{aztec_circuit_protocol_circuit_name=\"base-rollup\"}[$__rate_interval]) / rate(aztec_circuit_simulation_duration_seconds_count{aztec_circuit_protocol_circuit_name=\"base-rollup\"}[$__rate_interval])", + "hide": true, + "instant": false, + "legendFormat": "Base rollup", + "range": true, + "refId": "C" + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "editorMode": "code", + "expr": "rate(aztec_circuit_simulation_duration_seconds_sum{aztec_circuit_protocol_circuit_name=\"merge-rollup\"}[$__rate_interval]) / rate(aztec_circuit_simulation_duration_seconds_count{aztec_circuit_protocol_circuit_name=\"merge-rollup\"}[$__rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "Merge rollup", + "range": true, + "refId": "D" + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "editorMode": "code", + "expr": "rate(aztec_circuit_simulation_duration_seconds_sum{aztec_circuit_protocol_circuit_name=\"root-rollup\"}[$__rate_interval]) / rate(aztec_circuit_simulation_duration_seconds_count{aztec_circuit_protocol_circuit_name=\"root-rollup\"}[$__rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "Root rollup", + "range": true, + "refId": "E" + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "editorMode": "code", + "expr": "rate(aztec_circuit_simulation_duration_seconds_sum{aztec_circuit_protocol_circuit_name=\"public-kernel-setup\"}[$__rate_interval]) / rate(aztec_circuit_simulation_duration_seconds_count{aztec_circuit_protocol_circuit_name=\"public-kernel-setup\"}[$__rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "Public kernel - Setup", + "range": true, + "refId": "F" + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "editorMode": "code", + "expr": "rate(aztec_circuit_simulation_duration_seconds_sum{aztec_circuit_protocol_circuit_name=\"public-kernel-app-logic\"}[$__rate_interval]) / rate(aztec_circuit_simulation_duration_seconds_count{aztec_circuit_protocol_circuit_name=\"public-kernel-app-logic\"}[$__rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "Public kernel - App logic", + "range": true, + "refId": "H" + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "editorMode": "code", + "expr": "rate(aztec_circuit_simulation_duration_seconds_sum{aztec_circuit_protocol_circuit_name=\"public-kernel-teardown\"}[$__rate_interval]) / rate(aztec_circuit_simulation_duration_seconds_count{aztec_circuit_protocol_circuit_name=\"public-kernel-teardown\"}[$__rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "Public kernel - Teardown", + "range": true, + "refId": "I" + }, + { + "datasource": { + "type": "prometheus", + "uid": "aztec-node-metrics" + }, + "editorMode": "code", + "expr": "rate(aztec_circuit_simulation_duration_seconds_sum{aztec_circuit_protocol_circuit_name=\"public-kernel-tail\"}[$__rate_interval]) / rate(aztec_circuit_simulation_duration_seconds_count{aztec_circuit_protocol_circuit_name=\"public-kernel-tail\"}[$__rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "Public kernel - Tail", + "range": true, + "refId": "G" + } + ], + "title": "Circuit simulation (only when faking proofs)", + "type": "timeseries" + } + ], + "schemaVersion": 39, + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-15m", + "to": "now" + }, + "timeRangeUpdatedDuringEditOrView": false, + "timepicker": {}, + "timezone": "browser", + "title": "Protocol circuits", + "uid": "ddp5sfpkscb9cf", + "version": 3, + "weekStart": "" +} diff --git a/grafana_dashboards/default.yml b/grafana_dashboards/default.yml new file mode 100644 index 00000000000..d83924c0ffb --- /dev/null +++ b/grafana_dashboards/default.yml @@ -0,0 +1,11 @@ +apiVersion: 1 + +providers: + - name: "Aztec" + orgId: 1 + folder: "Aztec" + type: file + disableDeletion: false + editable: true + options: + path: /etc/grafana/provisioning/dashboards/aztec diff --git a/noir-projects/Dockerfile.test b/noir-projects/Dockerfile.test index 40edcbaaf35..91adc723c6a 100644 --- a/noir-projects/Dockerfile.test +++ b/noir-projects/Dockerfile.test @@ -28,7 +28,9 @@ RUN cd /usr/src/yarn-project/txe && yarn start & echo $! > /tmp/txe.pid && \ # Wait for TXE to initialize sleep 5 && \ cd ./noir-contracts && \ - ./bootstrap.sh && nargo test --silence-warnings --oracle-resolver http://localhost:8080 ; \ + # We need to increase the timeout since all tests running in parallel hammer TXE at the same time, and processing slows down leading to timeouts + # The only way we currently have to batch tests is via RAYON_NUM_THREADS, which is not ideal + ./bootstrap.sh && NARGO_FOREIGN_CALL_TIMEOUT=300000 nargo test --silence-warnings --oracle-resolver http://localhost:8080 ; \ kill $(cat /tmp/txe.pid) RUN cd /usr/src/yarn-project/txe && yarn start & echo $! > /tmp/txe.pid && \ diff --git a/noir-projects/Earthfile b/noir-projects/Earthfile index df8db2aa046..a828544fea2 100644 --- a/noir-projects/Earthfile +++ b/noir-projects/Earthfile @@ -58,7 +58,10 @@ test: RUN cd /usr/src/yarn-project/txe && yarn start & echo $! > /tmp/txe.pid && \ # Wait for TXE to initialize sleep 5 && \ - cd /usr/src/noir-projects/noir-contracts && nargo test --silence-warnings --oracle-resolver http://localhost:8080 ; \ + cd /usr/src/noir-projects/noir-contracts && \ + # We need to increase the timeout since all tests running in parallel hammer TXE at the same time and processing slows down, leading to timeouts + # The only way we currently have to batch tests is via RAYON_NUM_THREADS, which is not ideal + NARGO_FOREIGN_CALL_TIMEOUT=300000 nargo test --silence-warnings --oracle-resolver http://localhost:8080 ; \ kill $(cat /tmp/txe.pid) format: diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index a9b40658cc8..3f39ac213c5 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = 2e5113eff6fa3209baf95f4053cda88f821a664f + commit = 65a04245e871878b76ba738dc13d2a8cc1cda2e3 method = merge cmdver = 0.4.6 - parent = 4913192d0539a407399ad77e31ab1346930c1361 + parent = f6b4d721f92ed87d3f865254c240e80f26a36c30 diff --git a/noir-projects/aztec-nr/authwit/src/account.nr b/noir-projects/aztec-nr/authwit/src/account.nr index c00592520a3..3f63137f08c 100644 --- a/noir-projects/aztec-nr/authwit/src/account.nr +++ b/noir-projects/aztec-nr/authwit/src/account.nr @@ -31,8 +31,8 @@ impl AccountActions<&mut PrivateContext> { } // docs:end:entrypoint - // docs:start:spend_private_authwit - pub fn spend_private_authwit(self, inner_hash: Field) -> Field { + // docs:start:verify_private_authwit + pub fn verify_private_authwit(self, inner_hash: Field) -> Field { // The `inner_hash` is "siloed" with the `msg_sender` to ensure that only it can // consume the message. // This ensures that contracts cannot consume messages that are not intended for them. @@ -44,8 +44,7 @@ impl AccountActions<&mut PrivateContext> { ); let valid_fn = self.is_valid_impl; assert(valid_fn(self.context, message_hash) == true, "Message not authorized by account"); - self.context.push_new_nullifier(message_hash, 0); IS_VALID_SELECTOR } - // docs:end:spend_private_authwit + // docs:end:verify_private_authwit } diff --git a/noir-projects/aztec-nr/authwit/src/auth.nr b/noir-projects/aztec-nr/authwit/src/auth.nr index b24bad76c17..18342ce4f7b 100644 --- a/noir-projects/aztec-nr/authwit/src/auth.nr +++ b/noir-projects/aztec-nr/authwit/src/auth.nr @@ -1,6 +1,9 @@ use dep::aztec::protocol_types::{ abis::function_selector::FunctionSelector, address::AztecAddress, - constants::{GENERATOR_INDEX__AUTHWIT_INNER, GENERATOR_INDEX__AUTHWIT_OUTER, CANONICAL_AUTH_REGISTRY_ADDRESS}, + constants::{ + GENERATOR_INDEX__AUTHWIT_INNER, GENERATOR_INDEX__AUTHWIT_OUTER, GENERATOR_INDEX__AUTHWIT_NULLIFIER, + CANONICAL_AUTH_REGISTRY_ADDRESS +}, hash::pedersen_hash }; use dep::aztec::{prelude::Deserialize, context::{PrivateContext, PublicContext, gas::GasOpts}, hash::hash_args_array}; @@ -10,20 +13,36 @@ global IS_VALID_SELECTOR = 0xabf64ad4; // 4 first bytes of keccak256("IS_VALID() // docs:start:assert_current_call_valid_authwit // Assert that `on_behalf_of` have authorized the current call with a valid authentication witness pub fn assert_current_call_valid_authwit(context: &mut PrivateContext, on_behalf_of: AztecAddress) { - let function_selector = FunctionSelector::from_signature("spend_private_authwit(Field)"); let inner_hash = compute_inner_authwit_hash([context.msg_sender().to_field(), context.selector().to_field(), context.args_hash]); - let result: Field = context.call_private_function(on_behalf_of, function_selector, [inner_hash]).unpack_into(); - assert(result == IS_VALID_SELECTOR, "Message not authorized by account"); + assert_inner_hash_valid_authwit(context, on_behalf_of, inner_hash); } // docs:end:assert_current_call_valid_authwit +pub fn assert_inner_hash_valid_authwit(context: &mut PrivateContext, on_behalf_of: AztecAddress, inner_hash: Field) { + // We perform a static call here and not a standard one to ensure that the account contract cannot re-enter. + let result: Field = context.static_call_private_function( + on_behalf_of, + FunctionSelector::from_signature("verify_private_authwit(Field)"), + [inner_hash] + ).unpack_into(); + assert(result == IS_VALID_SELECTOR, "Message not authorized by account"); + // Compute the nullifier, similar computation to the outer hash, but without the chain_id and version. + // Those should already be handled in the verification, so we just need something to nullify, that allow same inner_hash for multiple actors. + let nullifier = compute_authwit_nullifier(on_behalf_of, inner_hash); + context.push_new_nullifier(nullifier, 0); +} + // docs:start:assert_current_call_valid_authwit_public // Assert that `on_behalf_of` have authorized the current call in a public context pub fn assert_current_call_valid_authwit_public(context: &mut PublicContext, on_behalf_of: AztecAddress) { let inner_hash = compute_inner_authwit_hash( [(*context).msg_sender().to_field(), (*context).selector().to_field(), (*context).get_args_hash()] ); + assert_inner_hash_valid_authwit_public(context, on_behalf_of, inner_hash); +} +// docs:end:assert_current_call_valid_authwit_public +pub fn assert_inner_hash_valid_authwit_public(context: &mut PublicContext, on_behalf_of: AztecAddress, inner_hash: Field) { let result: Field = context.call_public_function( AztecAddress::from_field(CANONICAL_AUTH_REGISTRY_ADDRESS), FunctionSelector::from_signature("consume((Field),Field)"), @@ -32,7 +51,6 @@ pub fn assert_current_call_valid_authwit_public(context: &mut PublicContext, on_ ).deserialize_into(); assert(result == IS_VALID_SELECTOR, "Message not authorized by account"); } -// docs:end:assert_current_call_valid_authwit_public // docs:start:compute_call_authwit_hash // Compute the message hash to be used by an authentication witness @@ -54,6 +72,13 @@ pub fn compute_inner_authwit_hash(args: [Field; N]) -> Field { pedersen_hash(args, GENERATOR_INDEX__AUTHWIT_INNER) } +pub fn compute_authwit_nullifier(on_behalf_of: AztecAddress, inner_hash: Field) -> Field { + pedersen_hash( + [on_behalf_of.to_field(), inner_hash], + GENERATOR_INDEX__AUTHWIT_NULLIFIER + ) +} + pub fn compute_outer_authwit_hash( consumer: AztecAddress, chain_id: Field, diff --git a/noir-projects/aztec-nr/authwit/src/cheatcodes.nr b/noir-projects/aztec-nr/authwit/src/cheatcodes.nr new file mode 100644 index 00000000000..f673a203277 --- /dev/null +++ b/noir-projects/aztec-nr/authwit/src/cheatcodes.nr @@ -0,0 +1,44 @@ +use dep::aztec::{ + protocol_types::address::AztecAddress, + context::{public_context::PublicContext, call_interfaces::CallInterface}, test::helpers::cheatcodes, + hash::hash_args +}; + +use crate::auth::{compute_inner_authwit_hash, compute_outer_authwit_hash, set_authorized}; + +pub fn add_private_authwit_from_call_interface( + on_behalf_of: AztecAddress, + caller: AztecAddress, + call_interface: C +) where C: CallInterface { + let target = call_interface.get_contract_address(); + let inputs = cheatcodes::get_private_context_inputs(cheatcodes::get_block_number()); + let chain_id = inputs.tx_context.chain_id; + let version = inputs.tx_context.version; + let args_hash = hash_args(call_interface.get_args()); + let selector = call_interface.get_selector(); + let inner_hash = compute_inner_authwit_hash([caller.to_field(), selector.to_field(), args_hash]); + let message_hash = compute_outer_authwit_hash(target, chain_id, version, inner_hash); + cheatcodes::add_authwit(on_behalf_of, message_hash); +} + +pub fn add_public_authwit_from_call_interface( + on_behalf_of: AztecAddress, + caller: AztecAddress, + call_interface: C +) where C: CallInterface { + let current_contract = cheatcodes::get_contract_address(); + cheatcodes::set_contract_address(on_behalf_of); + let target = call_interface.get_contract_address(); + let inputs = cheatcodes::get_private_context_inputs(cheatcodes::get_block_number()); + let chain_id = inputs.tx_context.chain_id; + let version = inputs.tx_context.version; + let args_hash = hash_args(call_interface.get_args()); + let selector = call_interface.get_selector(); + let inner_hash = compute_inner_authwit_hash([caller.to_field(), selector.to_field(), args_hash]); + let message_hash = compute_outer_authwit_hash(target, chain_id, version, inner_hash); + let mut inputs = cheatcodes::get_public_context_inputs(); + let mut context = PublicContext::new(inputs); + set_authorized(&mut context, message_hash, true); + cheatcodes::set_contract_address(current_contract); +} diff --git a/noir-projects/aztec-nr/authwit/src/lib.nr b/noir-projects/aztec-nr/authwit/src/lib.nr index e56460fd701..c4d792a4a26 100644 --- a/noir-projects/aztec-nr/authwit/src/lib.nr +++ b/noir-projects/aztec-nr/authwit/src/lib.nr @@ -2,3 +2,4 @@ mod account; mod auth_witness; mod auth; mod entrypoint; +mod cheatcodes; diff --git a/noir-projects/aztec-nr/aztec/src/context/call_interfaces.nr b/noir-projects/aztec-nr/aztec/src/context/call_interfaces.nr index dd1374f9eb0..35151d1427d 100644 --- a/noir-projects/aztec-nr/aztec/src/context/call_interfaces.nr +++ b/noir-projects/aztec-nr/aztec/src/context/call_interfaces.nr @@ -16,6 +16,7 @@ trait CallInterface { fn get_selector(self) -> FunctionSelector; fn get_name(self) -> str; fn get_contract_address(self) -> AztecAddress; + fn get_is_static(self) -> bool; } impl CallInterface for PrivateCallInterface { @@ -38,6 +39,10 @@ impl CallInterface AztecAddress { self.target_contract } + + fn get_is_static(self) -> bool { + self.is_static + } } struct PrivateCallInterface { @@ -46,7 +51,8 @@ struct PrivateCallInterface { name: str, args_hash: Field, args: [Field], - original: fn[Env](PrivateContextInputs) -> PrivateCircuitPublicInputs + original: fn[Env](PrivateContextInputs) -> PrivateCircuitPublicInputs, + is_static: bool } impl PrivateCallInterface { @@ -93,6 +99,10 @@ impl CallInterface AztecAddress { self.target_contract } + + fn get_is_static(self) -> bool { + self.is_static + } } struct PrivateVoidCallInterface { @@ -101,7 +111,8 @@ struct PrivateVoidCallInterface { name: str, args_hash: Field, args: [Field], - original: fn[Env](PrivateContextInputs) -> PrivateCircuitPublicInputs + original: fn[Env](PrivateContextInputs) -> PrivateCircuitPublicInputs, + is_static: bool } impl PrivateVoidCallInterface { @@ -144,6 +155,10 @@ impl CallInterface AztecAddress { self.target_contract } + + fn get_is_static(self) -> bool { + self.is_static + } } struct PrivateStaticCallInterface { @@ -152,7 +167,8 @@ struct PrivateStaticCallInterface { name: str, args_hash: Field, args: [Field], - original: fn[Env](PrivateContextInputs) -> PrivateCircuitPublicInputs + original: fn[Env](PrivateContextInputs) -> PrivateCircuitPublicInputs, + is_static: bool } impl PrivateStaticCallInterface { @@ -182,6 +198,10 @@ impl CallInterface AztecAddress { self.target_contract } + + fn get_is_static(self) -> bool { + self.is_static + } } struct PrivateStaticVoidCallInterface { @@ -190,7 +210,8 @@ struct PrivateStaticVoidCallInterface { name: str, args_hash: Field, args: [Field], - original: fn[Env](PrivateContextInputs) -> PrivateCircuitPublicInputs + original: fn[Env](PrivateContextInputs) -> PrivateCircuitPublicInputs, + is_static: bool } impl PrivateStaticVoidCallInterface { @@ -219,6 +240,10 @@ impl CallInterface for PublicCallI fn get_contract_address(self) -> AztecAddress { self.target_contract } + + fn get_is_static(self) -> bool { + self.is_static + } } struct PublicCallInterface { @@ -227,7 +252,8 @@ struct PublicCallInterface { name: str, args: [Field], gas_opts: GasOpts, - original: fn[Env](PublicContextInputs) -> T + original: fn[Env](PublicContextInputs) -> T, + is_static: bool } impl PublicCallInterface { @@ -308,6 +334,10 @@ impl CallInterface for PublicVoid fn get_contract_address(self) -> AztecAddress { self.target_contract } + + fn get_is_static(self) -> bool { + self.is_static + } } struct PublicVoidCallInterface { @@ -316,7 +346,8 @@ struct PublicVoidCallInterface { name: str, args: [Field], gas_opts: GasOpts, - original: fn[Env](PublicContextInputs) -> () + original: fn[Env](PublicContextInputs) -> (), + is_static: bool } impl PublicVoidCallInterface { @@ -378,7 +409,7 @@ impl PublicVoidCallInterface { } impl CallInterface for PublicStaticCallInterface { - fn get_args(self) -> [Field] { + fn get_args(self) -> [Field] { self.args } @@ -397,6 +428,10 @@ impl CallInterface for PublicStati fn get_contract_address(self) -> AztecAddress { self.target_contract } + + fn get_is_static(self) -> bool { + self.is_static + } } struct PublicStaticCallInterface { @@ -405,7 +440,8 @@ struct PublicStaticCallInterface { name: str, args: [Field], gas_opts: GasOpts, - original: fn[Env](PublicContextInputs) -> T + original: fn[Env](PublicContextInputs) -> T, + is_static: bool } impl PublicStaticCallInterface { @@ -453,6 +489,10 @@ impl CallInterface for PublicStat fn get_contract_address(self) -> AztecAddress { self.target_contract } + + fn get_is_static(self) -> bool { + self.is_static + } } struct PublicStaticVoidCallInterface { @@ -461,7 +501,8 @@ struct PublicStaticVoidCallInterface { name: str, args: [Field], gas_opts: GasOpts, - original: fn[Env](PublicContextInputs) -> () + original: fn[Env](PublicContextInputs) -> (), + is_static: bool } impl PublicStaticVoidCallInterface { diff --git a/noir-projects/aztec-nr/aztec/src/context/private_context.nr b/noir-projects/aztec-nr/aztec/src/context/private_context.nr index 71d69f484a6..25bb33ba663 100644 --- a/noir-projects/aztec-nr/aztec/src/context/private_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/private_context.nr @@ -1,5 +1,3 @@ -use crate::encrypted_logs::{payload::compute_encrypted_note_log}; - use crate::{ context::{inputs::PrivateContextInputs, packed_returns::PackedReturns}, messaging::process_l1_to_l2_message, @@ -10,7 +8,7 @@ use crate::{ key_validation_request::get_key_validation_request, arguments, returns::pack_returns, call_private_function::call_private_function_internal, header::get_header_at, logs::{ - emit_encrypted_note_log, emit_encrypted_event_log, compute_encrypted_event_log, + emit_encrypted_note_log, emit_encrypted_event_log, emit_contract_class_unencrypted_log_private_internal, emit_unencrypted_log_private_internal }, logs_traits::{LensForEncryptedLog, ToBytesForUnencryptedLog}, @@ -276,10 +274,7 @@ impl PrivateContext { // --> might be a better approach to force devs to make a public function call that emits the log if needed then // it would be less easy to accidentally leak information. // If we decide to keep this function around would make sense to wait for traits and then merge it with emit_unencrypted_log. - pub fn emit_unencrypted_log( - &mut self, - log: T - ) where T: ToBytesForUnencryptedLog { + pub fn emit_unencrypted_log(&mut self, log: T) where T: ToBytesForUnencryptedLog { let event_selector = 5; // TODO: compute actual event selector. let contract_address = self.this_address(); let counter = self.next_counter(); @@ -313,36 +308,7 @@ impl PrivateContext { // NB: A randomness value of 0 signals that the kernels should not mask the contract address // used in siloing later on e.g. 'handshaking' contract w/ known address. - pub fn encrypt_and_emit_event( - &mut self, - randomness: Field, // Secret random value used later for masked_contract_address - event_type_id: Field, - ovpk_m: GrumpkinPoint, - ivpk_m: GrumpkinPoint, - preimage: [Field; N] - ) where [Field; N]: LensForEncryptedLog { - let ovsk_app = self.request_ovsk_app(ovpk_m.hash()); - let contract_address = self.this_address(); - - // We are currently just encrypting it unconstrained, but otherwise the same way as if it was a note. - let encrypted_log: [u8; M] = compute_encrypted_event_log( - contract_address, - randomness, - event_type_id, - ovsk_app, - ovpk_m, - ivpk_m, - preimage - ); - - self.emit_raw_event_log_with_masked_address(randomness, encrypted_log); - } - - pub fn emit_raw_event_log_with_masked_address( - &mut self, - randomness: Field, - encrypted_log: [u8; M] - ) { + pub fn emit_raw_event_log_with_masked_address(&mut self, randomness: Field, encrypted_log: [u8; M]) { let counter = self.next_counter(); let contract_address = self.this_address(); let len = encrypted_log.len() as Field + 4; diff --git a/noir-projects/aztec-nr/aztec/src/context/unconstrained_context.nr b/noir-projects/aztec-nr/aztec/src/context/unconstrained_context.nr index 514995eedd8..8811a048c0d 100644 --- a/noir-projects/aztec-nr/aztec/src/context/unconstrained_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/unconstrained_context.nr @@ -2,7 +2,9 @@ use dep::protocol_types::address::AztecAddress; struct UnconstrainedContext { block_number: u32, - contract_address: AztecAddress, + contract_address: AztecAddress, + version: Field, + chain_id: Field, } impl UnconstrainedContext { @@ -13,7 +15,9 @@ impl UnconstrainedContext { // available. let block_number = block_number_oracle(); let contract_address = contract_address_oracle(); - Self { block_number, contract_address } + let chain_id = chain_id_oracle(); + let version = version_oracle(); + Self { block_number, contract_address, version, chain_id } } fn block_number(self) -> u32 { @@ -23,6 +27,14 @@ impl UnconstrainedContext { fn this_address(self) -> AztecAddress { self.contract_address } + + fn version(self) -> Field { + self.version + } + + fn chain_id(self) -> Field { + self.chain_id + } } #[oracle(getContractAddress)] @@ -30,3 +42,9 @@ unconstrained fn contract_address_oracle() -> AztecAddress {} #[oracle(getBlockNumber)] unconstrained fn block_number_oracle() -> u32 {} + +#[oracle(getChainId)] +unconstrained fn chain_id_oracle() -> Field {} + +#[oracle(getVersion)] +unconstrained fn version_oracle() -> Field {} diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs.nr index 2f1b93d9aad..dfc49fc0a86 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs.nr @@ -3,3 +3,4 @@ mod incoming_body; mod outgoing_body; mod payload; mod encrypted_note_emission; +mod encrypted_event_emission; diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr new file mode 100644 index 00000000000..a027c168515 --- /dev/null +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr @@ -0,0 +1,45 @@ +use crate::{ + context::PrivateContext, event::event_interface::EventInterface, + encrypted_logs::payload::compute_encrypted_event_log, oracle::logs_traits::LensForEncryptedEvent +}; +use dep::protocol_types::{address::AztecAddress, grumpkin_point::GrumpkinPoint}; + +fn emit_with_keys( + context: &mut PrivateContext, + randomness: Field, + event: Event, + ovpk: GrumpkinPoint, + ivpk: GrumpkinPoint +) where Event: EventInterface, [u8; NB]: LensForEncryptedEvent { + let contract_address: AztecAddress = context.this_address(); + let ovsk_app: Field = context.request_ovsk_app(ovpk.hash()); + + let encrypted_log: [u8; OB] = compute_encrypted_event_log(contract_address, randomness, ovsk_app, ovpk, ivpk, event); + + context.emit_raw_event_log_with_masked_address(randomness, encrypted_log); +} + +pub fn encode_and_encrypt_event( + context: &mut PrivateContext, + randomness: Field, + ov: AztecAddress, + iv: AztecAddress +) -> fn[(&mut PrivateContext, AztecAddress, AztecAddress, Field)](Event) -> () where Event: EventInterface, [u8; NB]: LensForEncryptedEvent { + | e: Event | { + let header = context.get_header(); + let ovpk = header.get_ovpk_m(context, ov); + let ivpk = header.get_ivpk_m(context, iv); + emit_with_keys(context, randomness, e, ovpk, ivpk); + } +} + +pub fn encode_and_encrypt_event_with_keys( + context: &mut PrivateContext, + randomness: Field, + ovpk: GrumpkinPoint, + ivpk: GrumpkinPoint +) -> fn[(&mut PrivateContext, Field, GrumpkinPoint, GrumpkinPoint)](Event) -> () where Event: EventInterface, [u8; NB]: LensForEncryptedEvent { + | e: Event | { + emit_with_keys(context, randomness, e, ovpk, ivpk); + } +} diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_note_emission.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_note_emission.nr index 12994a4b4fa..5b66e2e8027 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_note_emission.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_note_emission.nr @@ -33,7 +33,7 @@ fn emit_with_keys( context.emit_raw_note_log(note_hash_counter, encrypted_log); } -pub fn encode_and_encrypt( +pub fn encode_and_encrypt_note( context: &mut PrivateContext, ov: AztecAddress, iv: AztecAddress @@ -46,7 +46,7 @@ pub fn encode_and_encrypt( } } -pub fn encode_and_encrypt_with_keys( +pub fn encode_and_encrypt_note_with_keys( context: &mut PrivateContext, ovpk: GrumpkinPoint, ivpk: GrumpkinPoint diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr index 98c2d42d5b9..8819906d2b0 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr @@ -44,7 +44,7 @@ fn test_encrypted_log_header() { let ciphertext = header.compute_ciphertext(secret, point); let expected_header_ciphertext = [ - 131, 119, 105, 129, 244, 32, 151, 205, 12, 99, 93, 62, 10, 180, 72, 21, 179, 36, 250, 95, 56, 167, 171, 16, 195, 164, 223, 57, 75, 5, 24, 119, 198, 34, 99, 189, 193, 183, 227, 43, 79, 204, 214, 89, 221, 153, 246, 64 + 228, 9, 65, 81, 62, 59, 249, 207, 90, 196, 206, 72, 39, 199, 82, 196, 23, 131, 32, 226, 26, 176, 43, 39, 239, 177, 177, 192, 85, 216, 17, 15, 18, 187, 35, 225, 135, 192, 63, 88, 29, 173, 232, 46, 72, 82, 187, 139 ]; assert_eq(ciphertext, expected_header_ciphertext); diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/incoming_body.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/incoming_body.nr index 871f5fd7771..07e0cb74b12 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/incoming_body.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/incoming_body.nr @@ -15,8 +15,8 @@ impl EncryptedLogIncomingBody { EncryptedLogIncomingBody { plaintext } } - pub fn from_event(event: T, randomness: Field) -> Self where T: EventInterface { - let mut plaintext = event.to_be_bytes(randomness); + pub fn from_event(event: T, randomness: Field) -> Self where T: EventInterface { + let mut plaintext = event.private_to_be_bytes(randomness); EncryptedLogIncomingBody { plaintext } } @@ -38,7 +38,7 @@ mod test { use dep::protocol_types::{ address::AztecAddress, traits::Empty, constants::GENERATOR_INDEX__NOTE_NULLIFIER, grumpkin_private_key::GrumpkinPrivateKey, grumpkin_point::GrumpkinPoint, traits::Serialize, - abis::function_selector::FunctionSelector + abis::event_selector::EventSelector }; use crate::{ @@ -60,7 +60,9 @@ mod test { impl NoteInterface for AddressNote { fn compute_note_content_hash(self) -> Field {1} - fn get_note_type_id() -> Field {1} + fn get_note_type_id() -> Field { + 1 + } fn get_header(self) -> NoteHeader { self.header} @@ -131,7 +133,7 @@ mod test { let ciphertext = body.compute_ciphertext(eph_sk, ivpk_app); let expected_note_body_ciphertext = [ - 131, 119, 105, 129, 244, 32, 151, 205, 12, 99, 93, 62, 10, 180, 72, 21, 47, 232, 95, 17, 240, 230, 80, 129, 174, 158, 23, 76, 114, 185, 43, 18, 254, 148, 147, 230, 66, 216, 167, 62, 180, 213, 238, 33, 108, 29, 84, 139, 99, 206, 212, 253, 92, 116, 137, 31, 0, 104, 45, 91, 250, 109, 141, 114, 189, 53, 35, 60, 108, 156, 170, 206, 150, 114, 150, 187, 198, 13, 62, 153, 133, 13, 169, 167, 242, 221, 40, 168, 186, 203, 104, 82, 47, 238, 142, 179, 90, 37, 9, 70, 245, 176, 122, 247, 42, 87, 75, 7, 20, 89, 166, 123, 14, 26, 230, 156, 49, 94, 0, 94, 72, 58, 171, 239, 115, 174, 155, 7, 151, 17, 60, 206, 193, 134, 70, 87, 215, 88, 21, 194, 63, 26, 106, 105, 124, 213, 252, 152, 192, 71, 115, 13, 181, 5, 169, 15, 170, 196, 174, 228, 170, 192, 91, 76, 110, 220, 89, 47, 248, 144, 189, 251, 167, 149, 248, 226 + 228, 9, 65, 81, 62, 59, 249, 207, 90, 196, 206, 72, 39, 199, 82, 196, 63, 127, 188, 251, 150, 188, 238, 205, 3, 86, 102, 164, 175, 12, 137, 158, 163, 111, 205, 10, 229, 230, 46, 202, 110, 107, 156, 180, 67, 192, 161, 201, 48, 153, 169, 1, 25, 182, 93, 39, 39, 207, 251, 218, 234, 147, 156, 13, 110, 180, 190, 199, 41, 6, 211, 203, 176, 110, 165, 186, 110, 127, 199, 22, 201, 149, 92, 249, 219, 68, 145, 68, 179, 29, 233, 34, 98, 123, 197, 234, 169, 53, 44, 14, 81, 60, 92, 27, 250, 134, 49, 248, 57, 119, 236, 118, 158, 104, 82, 243, 98, 164, 60, 72, 74, 27, 177, 194, 221, 225, 193, 150, 67, 235, 205, 106, 150, 24, 126, 186, 220, 178, 199, 189, 113, 54, 181, 55, 46, 15, 236, 236, 9, 159, 5, 172, 237, 154, 110, 50, 241, 64, 92, 13, 37, 53, 20, 140, 42, 146, 229, 63, 97, 25, 159, 63, 235, 104, 68, 100 ]; assert_eq(expected_note_body_ciphertext.len(), ciphertext.len()); @@ -155,17 +157,18 @@ mod test { global TEST_EVENT_LEN: Field = 3; global TEST_EVENT_BYTES_LEN = 32 * 3 + 64; + global TEST_EVENT_BYTES_LEN_WITHOUT_RANDOMNESS = 32 * 3 + 32; - impl EventInterface for TestEvent { - fn _selector(self) -> FunctionSelector { - FunctionSelector::from_signature("TestEvent(Field,Field,Field)") + impl EventInterface for TestEvent { + fn get_event_type_id() -> EventSelector { + EventSelector::from_signature("TestEvent(Field,Field,Field)") } - fn to_be_bytes(self, randomness: Field) -> [u8; TEST_EVENT_BYTES_LEN] { + fn private_to_be_bytes(self, randomness: Field) -> [u8; TEST_EVENT_BYTES_LEN] { let mut buffer: [u8; TEST_EVENT_BYTES_LEN] = [0; TEST_EVENT_BYTES_LEN]; let randomness_bytes = randomness.to_be_bytes(32); - let event_type_id_bytes = self._selector().to_field().to_be_bytes(32); + let event_type_id_bytes = TestEvent::get_event_type_id().to_field().to_be_bytes(32); for i in 0..32 { buffer[i] = randomness_bytes[i]; @@ -183,6 +186,31 @@ mod test { buffer } + + fn to_be_bytes(self) -> [u8; TEST_EVENT_BYTES_LEN_WITHOUT_RANDOMNESS] { + let mut buffer: [u8; TEST_EVENT_BYTES_LEN_WITHOUT_RANDOMNESS] = [0; TEST_EVENT_BYTES_LEN_WITHOUT_RANDOMNESS]; + + let event_type_id_bytes = TestEvent::get_event_type_id().to_field().to_be_bytes(32); + + for i in 0..32 { + buffer[i] = event_type_id_bytes[i]; + } + + let serialized_event = self.serialize(); + + for i in 0..serialized_event.len() { + let bytes = serialized_event[i].to_be_bytes(32); + for j in 0..32 { + buffer[32 + i * 32 + j] = bytes[j]; + } + } + + buffer + } + + fn emit(self, _emit: fn[Env](Self) -> ()) { + _emit(self); + } } #[test] @@ -206,7 +234,7 @@ mod test { let ciphertext = body.compute_ciphertext(eph_sk, ivpk_app); let expected_event_body_ciphertext = [ - 131, 119, 105, 129, 244, 32, 151, 205, 12, 99, 93, 62, 10, 180, 72, 21, 47, 232, 95, 17, 240, 230, 80, 129, 174, 158, 23, 76, 114, 185, 43, 18, 254, 148, 147, 230, 66, 216, 167, 62, 180, 213, 238, 33, 108, 29, 84, 139, 157, 165, 187, 138, 35, 3, 236, 75, 197, 105, 102, 247, 224, 253, 13, 217, 145, 62, 96, 167, 93, 23, 18, 198, 187, 91, 8, 3, 197, 195, 127, 9, 218, 111, 125, 97, 141, 129, 142, 1, 230, 108, 35, 211, 170, 170, 170, 249, 249, 104, 68, 191, 245, 207, 182, 245, 248, 82, 175, 83, 155, 138, 208, 65, 31, 129, 251, 242, 219, 76, 17, 61, 178, 187, 108, 114, 177, 215, 175, 189, 166, 221, 94, 9, 22, 57, 151, 204, 57, 220, 129, 243, 217, 18, 101, 128, 229, 40, 254, 175, 2, 21, 31, 198, 18, 152, 169, 32, 113, 92, 37, 65, 169, 119, 95, 149, 239, 8, 23, 182, 22, 209, 207, 120, 133, 90, 252, 106 + 228, 9, 65, 81, 62, 59, 249, 207, 90, 196, 206, 72, 39, 199, 82, 196, 63, 127, 188, 251, 150, 188, 238, 205, 3, 86, 102, 164, 175, 12, 137, 158, 163, 111, 205, 10, 229, 230, 46, 202, 110, 107, 156, 180, 67, 192, 161, 201, 66, 122, 29, 35, 42, 33, 153, 216, 199, 208, 103, 207, 126, 153, 189, 136, 19, 220, 238, 15, 169, 29, 255, 11, 123, 107, 70, 192, 53, 40, 36, 93, 187, 32, 123, 136, 104, 23, 229, 245, 152, 90, 84, 2, 136, 112, 42, 27, 82, 214, 104, 14, 250, 48, 199, 245, 88, 22, 200, 77, 38, 51, 127, 56, 138, 255, 16, 46, 179, 129, 215, 185, 185, 116, 148, 16, 133, 62, 56, 180, 10, 132, 109, 77, 206, 199, 21, 167, 7, 163, 171, 158, 244, 23, 18, 121, 108, 42, 107, 7, 48, 84, 212, 104, 39, 16, 109, 7, 108, 129, 60, 80, 112, 241, 223, 140, 186, 158, 38, 74, 230, 213, 159, 175, 142, 228, 128, 160 ]; assert_eq(expected_event_body_ciphertext.len(), ciphertext.len()); diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/outgoing_body.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/outgoing_body.nr index 4aa90d6d282..460cc73bb85 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/outgoing_body.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/outgoing_body.nr @@ -99,7 +99,7 @@ mod test { let ciphertext = body.compute_ciphertext(sender_ovsk_app, eph_pk); let expected_outgoing_body_ciphertext = [ - 126, 10, 214, 39, 130, 143, 96, 143, 79, 143, 22, 36, 55, 41, 234, 255, 226, 26, 138, 236, 91, 188, 204, 216, 172, 133, 134, 69, 161, 237, 134, 5, 75, 192, 10, 6, 229, 54, 194, 56, 103, 243, 57, 248, 147, 237, 4, 3, 39, 28, 226, 30, 237, 228, 212, 115, 246, 244, 105, 39, 129, 119, 126, 207, 176, 14, 75, 134, 241, 23, 2, 187, 239, 86, 47, 56, 239, 20, 92, 176, 70, 12, 219, 226, 150, 70, 192, 43, 125, 53, 230, 153, 135, 228, 210, 197, 76, 123, 185, 190, 61, 172, 29, 168, 241, 191, 205, 71, 136, 72, 52, 115, 232, 246, 87, 42, 50, 150, 134, 108, 225, 90, 191, 191, 182, 150, 124, 147, 78, 249, 144, 111, 122, 187, 187, 5, 249, 167, 186, 14, 228, 128, 158, 138, 55, 99, 228, 46, 219, 187, 248, 122, 70, 31, 39, 209, 127, 23, 244, 84, 14, 93, 86, 208, 155, 151, 238, 70, 63, 3, 137, 59, 206, 230, 4, 20 + 127, 84, 96, 176, 101, 107, 236, 57, 68, 8, 53, 202, 138, 74, 186, 54, 74, 193, 245, 7, 109, 59, 218, 33, 1, 31, 205, 225, 241, 209, 64, 222, 94, 245, 4, 150, 47, 241, 187, 64, 152, 20, 102, 158, 200, 217, 213, 82, 1, 240, 170, 185, 51, 80, 27, 109, 63, 231, 235, 120, 174, 44, 133, 248, 10, 97, 60, 40, 222, 190, 147, 76, 187, 48, 91, 206, 48, 106, 56, 118, 38, 127, 82, 4, 182, 188, 44, 224, 31, 129, 47, 107, 134, 252, 20, 25, 122, 191, 158, 69, 35, 255, 215, 171, 196, 45, 91, 184, 83, 80, 238, 201, 1, 233, 235, 159, 171, 130, 158, 64, 176, 165, 132, 30, 84, 81, 71, 195, 145, 47, 82, 247, 210, 192, 23, 4, 220, 90, 56, 109, 46, 105, 79, 251, 165, 141, 185, 233, 191, 118, 219, 153, 191, 162, 99, 238, 241, 249, 9, 74, 210, 241, 54, 28, 126, 226, 85, 235, 174, 75, 239, 207, 100, 184, 248, 194 ]; for i in 0..expected_outgoing_body_ciphertext.len() { diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr index 273142bca9d..16454145ec4 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr @@ -7,6 +7,7 @@ use dep::std::{embedded_curve_ops::{embedded_curve_add, EmbeddedCurvePoint}, fie use crate::oracle::unsafe_rand::unsafe_rand; +use crate::event::event_interface::EventInterface; use crate::note::note_interface::NoteInterface; use crate::encrypted_logs::{ @@ -14,6 +15,63 @@ use crate::encrypted_logs::{ outgoing_body::EncryptedLogOutgoingBody }; +pub fn compute_encrypted_event_log( + contract_address: AztecAddress, + randomness: Field, + ovsk_app: Field, + ovpk: GrumpkinPoint, + ivpk: GrumpkinPoint, + event: Event +) -> [u8; OB] where Event: EventInterface { + // @todo Need to draw randomness from the full domain of Fq not only Fr + let eph_sk: GrumpkinPrivateKey = fr_to_private_key(unsafe_rand()); + let eph_pk = eph_sk.derive_public_key(); + + // TODO: (#7177) This value needs to be populated! + let recipient = AztecAddress::from_field(0); + + let ivpk_app = compute_ivpk_app(ivpk, contract_address); + + let header = EncryptedLogHeader::new(contract_address); + + let incoming_header_ciphertext: [u8; 48] = header.compute_ciphertext(eph_sk, ivpk); + let outgoing_Header_ciphertext: [u8; 48] = header.compute_ciphertext(eph_sk, ovpk); + let incoming_body_ciphertext = EncryptedLogIncomingBody::from_event(event, randomness).compute_ciphertext(eph_sk, ivpk_app); + let outgoing_body_ciphertext: [u8; 176] = EncryptedLogOutgoingBody::new(eph_sk, recipient, ivpk_app).compute_ciphertext(fr_to_private_key(ovsk_app), eph_pk); + + let mut encrypted_bytes: [u8; OB] = [0; OB]; + // @todo We ignore the tags for now + + let eph_pk_bytes = eph_pk.to_be_bytes(); + for i in 0..64 { + encrypted_bytes[64 + i] = eph_pk_bytes[i]; + } + for i in 0..48 { + encrypted_bytes[128 + i] = incoming_header_ciphertext[i]; + encrypted_bytes[176 + i] = outgoing_Header_ciphertext[i]; + } + for i in 0..176 { + encrypted_bytes[224 + i] = outgoing_body_ciphertext[i]; + } + // Then we fill in the rest as the incoming body ciphertext + let size = OB - 400; + assert_eq(size, incoming_body_ciphertext.len(), "ciphertext length mismatch"); + for i in 0..size { + encrypted_bytes[400 + i] = incoming_body_ciphertext[i]; + } + + // Current unoptimized size of the encrypted log + // incoming_tag (32 bytes) + // outgoing_tag (32 bytes) + // eph_pk (64 bytes) + // incoming_header (48 bytes) + // outgoing_header (48 bytes) + // outgoing_body (176 bytes) + // incoming_body_fixed (64 bytes) + // incoming_body_variable (N * 32 bytes + 16 bytes padding) + encrypted_bytes +} + pub fn compute_encrypted_note_log( contract_address: AztecAddress, storage_slot: Field, @@ -26,7 +84,7 @@ pub fn compute_encrypted_note_log( let eph_sk: GrumpkinPrivateKey = fr_to_private_key(unsafe_rand()); let eph_pk = eph_sk.derive_public_key(); - // @todo This value needs to be populated! + // TODO: (#7177) This value needs to be populated! let recipient = AztecAddress::from_field(0); let ivpk_app = compute_ivpk_app(ivpk, contract_address); diff --git a/noir-projects/aztec-nr/aztec/src/event/event_interface.nr b/noir-projects/aztec-nr/aztec/src/event/event_interface.nr index fe4b63fedd7..4505dedd1ab 100644 --- a/noir-projects/aztec-nr/aztec/src/event/event_interface.nr +++ b/noir-projects/aztec-nr/aztec/src/event/event_interface.nr @@ -1,9 +1,10 @@ use crate::context::PrivateContext; use crate::note::note_header::NoteHeader; -use dep::protocol_types::{grumpkin_point::GrumpkinPoint, abis::function_selector::FunctionSelector}; +use dep::protocol_types::{grumpkin_point::GrumpkinPoint, abis::event_selector::EventSelector}; -trait EventInterface { - // Should be autogenerated by the #[aztec(event)] macro unless it is overridden by a custom implementation - fn _selector(self) -> FunctionSelector; - fn to_be_bytes(self, randomness: Field) -> [u8; N]; +trait EventInterface { + fn private_to_be_bytes(self, randomness: Field) -> [u8; NB]; + fn to_be_bytes(self) -> [u8; MB]; + fn get_event_type_id() -> EventSelector; + fn emit(self, _emit: fn[Env](Self) -> ()); } diff --git a/noir-projects/aztec-nr/aztec/src/keys/point_to_symmetric_key.nr b/noir-projects/aztec-nr/aztec/src/keys/point_to_symmetric_key.nr index 934306e32ab..4fa31d5813e 100644 --- a/noir-projects/aztec-nr/aztec/src/keys/point_to_symmetric_key.nr +++ b/noir-projects/aztec-nr/aztec/src/keys/point_to_symmetric_key.nr @@ -34,7 +34,7 @@ fn check_point_to_symmetric_key() { let key = point_to_symmetric_key(secret, point); // The following value gets updated when running encrypt_buffer.test.ts with AZTEC_GENERATE_TEST_DATA=1 let expected_key = [ - 198, 74, 242, 51, 177, 36, 183, 8, 2, 246, 197, 138, 59, 166, 86, 96, 155, 50, 186, 34, 242, 3, 208, 144, 161, 64, 69, 165, 70, 57, 226, 139 + 49, 167, 146, 222, 151, 129, 138, 184, 87, 210, 245, 249, 99, 100, 1, 59, 223, 180, 5, 99, 14, 7, 177, 236, 159, 203, 231, 72, 220, 180, 241, 23 ]; assert_eq(key, expected_key); } diff --git a/noir-projects/aztec-nr/aztec/src/keys/public_keys.nr b/noir-projects/aztec-nr/aztec/src/keys/public_keys.nr index e6c82b833d0..fe65ff9e37e 100644 --- a/noir-projects/aztec-nr/aztec/src/keys/public_keys.nr +++ b/noir-projects/aztec-nr/aztec/src/keys/public_keys.nr @@ -82,7 +82,8 @@ fn compute_public_keys_hash() { }; let actual = keys.hash(); - let expected_public_keys_hash = 0x1936abe4f6a920d16a9f6917f10a679507687e2cd935dd1f1cdcb1e908c027f3; + let expected_public_keys_hash = 0x2406c1c88b7afc13052335bb9af43fd35034b5ba0a9caab76eda2833cf8ec717; + assert(actual.to_field() == expected_public_keys_hash); } diff --git a/noir-projects/aztec-nr/aztec/src/note/lifecycle.nr b/noir-projects/aztec-nr/aztec/src/note/lifecycle.nr index 4a7a3a95e94..7fe6021326a 100644 --- a/noir-projects/aztec-nr/aztec/src/note/lifecycle.nr +++ b/noir-projects/aztec-nr/aztec/src/note/lifecycle.nr @@ -15,12 +15,10 @@ pub fn create_note( let note_hash_counter = context.side_effect_counter; let header = NoteHeader { contract_address, storage_slot, nonce: 0, note_hash_counter }; - // TODO: change this to note.set_header(header) once https://github.com/noir-lang/noir/issues/4095 is fixed - Note::set_header(note, header); + note.set_header(header); let inner_note_hash = compute_inner_note_hash(*note); - // TODO: Strong typing required because of https://github.com/noir-lang/noir/issues/4088 - let serialized_note: [Field; N] = Note::serialize_content(*note); + let serialized_note = Note::serialize_content(*note); assert( notify_created_note( storage_slot, diff --git a/noir-projects/aztec-nr/aztec/src/oracle/enqueue_public_function_call.nr b/noir-projects/aztec-nr/aztec/src/oracle/enqueue_public_function_call.nr index 4bb26359cab..d512a3bf070 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/enqueue_public_function_call.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/enqueue_public_function_call.nr @@ -79,6 +79,7 @@ pub fn parse_public_call_stack_item_from_oracle(fields: [Field; ENQUEUE_PUBLIC_F // Note: Not using PublicCirclePublicInputs::deserialize here, because everything below args_hash is 0 and // there is no more data in fields because there is only ENQUEUE_PUBLIC_FUNCTION_CALL_RETURN_SIZE fields! + // WARNING: if updating, see comment in public_call_stack_item.ts's PublicCallStackItem.hash() let item = PublicCallStackItem { contract_address: AztecAddress::from_field(reader.read()), function_data: FunctionData { selector: FunctionSelector::from_field(reader.read()), is_private: false }, diff --git a/noir-projects/aztec-nr/aztec/src/oracle/logs_traits.nr b/noir-projects/aztec-nr/aztec/src/oracle/logs_traits.nr index c6632f5a4d3..899f7f0d0d1 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/logs_traits.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/logs_traits.nr @@ -31,7 +31,6 @@ impl LensForEncryptedLog<3, 576> for [Field; 3] { impl LensForEncryptedLog<4, 608> for [Field; 4] { fn output_fields(self) -> [Field; 4] {[self[0]; 4]} fn output_bytes(self) -> [u8; 608] {[self[0] as u8; 608]} - } impl LensForEncryptedLog<5, 640> for [Field; 5] { fn output_fields(self) -> [Field; 5] {[self[0]; 5]} @@ -40,7 +39,31 @@ impl LensForEncryptedLog<5, 640> for [Field; 5] { impl LensForEncryptedLog<6, 672> for [Field; 6] { fn output_fields(self) -> [Field; 6] {[self[0]; 6]} fn output_bytes(self) -> [u8; 672] {[self[0] as u8; 672]} +} +trait LensForEncryptedEvent { + // N = event preimage input in bytes + // M = encryption output len in bytes (= 480 + M) + fn output(self: [u8; N]) -> [u8; M]; +} + +impl LensForEncryptedEvent<96, 512> for [u8; 96] { + fn output(self) -> [u8; 512] {[self[0] as u8; 512]} +} +impl LensForEncryptedEvent<128, 544> for [u8; 128] { + fn output(self) -> [u8; 544] {[self[0] as u8; 544]} +} +impl LensForEncryptedEvent<160, 576> for [u8; 160] { + fn output(self) -> [u8; 576] {[self[0] as u8; 576]} +} +impl LensForEncryptedEvent<192, 608> for [u8; 192] { + fn output(self) -> [u8; 608] {[self[0] as u8; 608]} +} +impl LensForEncryptedEvent<224, 640> for [u8; 224] { + fn output(self) -> [u8; 640] {[self[0] as u8; 640]} +} +impl LensForEncryptedEvent<256, 672> for [u8; 256] { + fn output(self) -> [u8; 672] {[self[0] as u8; 672]} } // This trait defines the length of the inputs in bytes to diff --git a/noir-projects/aztec-nr/aztec/src/oracle/notes.nr b/noir-projects/aztec-nr/aztec/src/oracle/notes.nr index 42c6bcdb7ee..4d7aad6f6e2 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/notes.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/notes.nr @@ -145,8 +145,7 @@ unconstrained pub fn get_notes( let header = NoteHeader { contract_address, nonce, storage_slot, note_hash_counter }; let serialized_note = arr_copy_slice(fields, [0; N], read_offset + 2); let mut note = Note::deserialize_content(serialized_note); - // TODO: change this to note.set_header(header) once https://github.com/noir-lang/noir/issues/4095 is fixed - Note::set_header(&mut note, header); + note.set_header(header); placeholder_opt_notes[i] = Option::some(note); }; } diff --git a/noir-projects/aztec-nr/aztec/src/test/helpers.nr b/noir-projects/aztec-nr/aztec/src/test/helpers.nr index b28a85add1c..b7164a82359 100644 --- a/noir-projects/aztec-nr/aztec/src/test/helpers.nr +++ b/noir-projects/aztec-nr/aztec/src/test/helpers.nr @@ -1,4 +1,4 @@ mod test_environment; mod cheatcodes; -mod types; +mod utils; mod keys; diff --git a/noir-projects/aztec-nr/aztec/src/test/helpers/cheatcodes.nr b/noir-projects/aztec-nr/aztec/src/test/helpers/cheatcodes.nr index 014757cf9b0..db5e13ed424 100644 --- a/noir-projects/aztec-nr/aztec/src/test/helpers/cheatcodes.nr +++ b/noir-projects/aztec-nr/aztec/src/test/helpers/cheatcodes.nr @@ -1,6 +1,9 @@ -use dep::protocol_types::{abis::function_selector::FunctionSelector, address::{AztecAddress, PartialAddress}}; +use dep::protocol_types::{ + abis::function_selector::FunctionSelector, address::{AztecAddress, PartialAddress}, + constants::CONTRACT_INSTANCE_LENGTH, contract_instance::ContractInstance +}; use crate::context::inputs::{PublicContextInputs, PrivateContextInputs}; -use crate::test::helpers::types::{Deployer, TestAccount}; +use crate::test::helpers::utils::{Deployer, TestAccount}; use crate::keys::public_keys::PublicKeys; unconstrained pub fn reset() { @@ -19,8 +22,8 @@ unconstrained pub fn get_block_number() -> u32 { oracle_get_block_number() } -unconstrained pub fn advance_blocks(blocks: u32) { - oracle_time_travel(blocks); +unconstrained pub fn advance_blocks_by(blocks: u32) { + oracle_advance_blocks_by(blocks); } unconstrained pub fn get_private_context_inputs(historical_block_number: u32) -> PrivateContextInputs { @@ -31,20 +34,12 @@ unconstrained pub fn get_public_context_inputs() -> PublicContextInputs { oracle_get_public_context_inputs() } -unconstrained pub fn deploy( - path: str, - initializer: str, - args: [Field], - public_keys_hash: Field -) -> AztecAddress { - oracle_deploy(path, initializer, args, public_keys_hash) +unconstrained pub fn deploy(path: str, initializer: str, args: [Field], public_keys_hash: Field) -> ContractInstance { + let instance_fields = oracle_deploy(path, initializer, args, public_keys_hash); + ContractInstance::deserialize(instance_fields) } -unconstrained pub fn direct_storage_write( - contract_address: AztecAddress, - storage_slot: Field, - fields: [Field; N] -) { +unconstrained pub fn direct_storage_write(contract_address: AztecAddress, storage_slot: Field, fields: [Field; N]) { let _hash = direct_storage_write_oracle(contract_address, storage_slot, fields); } @@ -72,6 +67,40 @@ unconstrained pub fn get_side_effects_counter() -> u32 { oracle_get_side_effects_counter() } +unconstrained pub fn add_authwit(address: AztecAddress, message_hash: Field) { + orable_add_authwit(address, message_hash) +} + +unconstrained pub fn assert_public_call_fails(target_address: AztecAddress, function_selector: FunctionSelector, args: [Field]) { + oracle_assert_public_call_fails(target_address, function_selector, args) +} + +unconstrained pub fn assert_private_call_fails( + target_address: AztecAddress, + function_selector: FunctionSelector, + argsHash: Field, + sideEffectsCounter: Field, + isStaticCall: bool, + isDelegateCall: bool +) { + oracle_assert_private_call_fails( + target_address, + function_selector, + argsHash, + sideEffectsCounter, + isStaticCall, + isDelegateCall + ) +} + +unconstrained pub fn add_nullifiers(contractAddress: AztecAddress, nullifiers: [Field]) { + oracle_add_nullifiers(contractAddress, nullifiers) +} + +unconstrained pub fn add_note_hashes(contractAddress: AztecAddress, inner_note_hashes: [Field]) { + oracle_add_note_hashes(contractAddress, inner_note_hashes) +} + #[oracle(reset)] fn oracle_reset() {} @@ -84,8 +113,8 @@ fn oracle_set_contract_address(address: AztecAddress) {} #[oracle(getBlockNumber)] fn oracle_get_block_number() -> u32 {} -#[oracle(timeTravel)] -fn oracle_time_travel(blocks: u32) {} +#[oracle(advanceBlocksBy)] +fn oracle_advance_blocks_by(blocks: u32) {} #[oracle(getPrivateContextInputs)] fn oracle_get_private_context_inputs(historical_block_number: u32) -> PrivateContextInputs {} @@ -99,7 +128,7 @@ fn oracle_deploy( initializer: str, args: [Field], public_keys_hash: Field -) -> AztecAddress {} +) -> [Field; CONTRACT_INSTANCE_LENGTH] {} #[oracle(directStorageWrite)] fn direct_storage_write_oracle( @@ -125,3 +154,30 @@ fn oracle_set_msg_sender(msg_sender: AztecAddress) {} #[oracle(getSideEffectsCounter)] fn oracle_get_side_effects_counter() -> u32 {} + +#[oracle(addAuthWitness)] +fn orable_add_authwit(address: AztecAddress, message_hash: Field) {} + +#[oracle(assertPublicCallFails)] +fn oracle_assert_public_call_fails( + target_address: AztecAddress, + function_selector: FunctionSelector, + args: [Field] +) {} + +#[oracle(assertPrivateCallFails)] +fn oracle_assert_private_call_fails( + target_address: AztecAddress, + function_selector: FunctionSelector, + argsHash: Field, + sideEffectsCounter: Field, + isStaticCall: bool, + isDelegateCall: bool +) {} + +#[oracle(addNullifiers)] +fn oracle_add_nullifiers(contractAddress: AztecAddress, nullifiers: [Field]) {} + +#[oracle(addNoteHashes)] +fn oracle_add_note_hashes(contractAddress: AztecAddress, inner_note_hashes: [Field]) {} + diff --git a/noir-projects/aztec-nr/aztec/src/test/helpers/test_environment.nr b/noir-projects/aztec-nr/aztec/src/test/helpers/test_environment.nr index 4f2800b19fc..9b66e64264b 100644 --- a/noir-projects/aztec-nr/aztec/src/test/helpers/test_environment.nr +++ b/noir-projects/aztec-nr/aztec/src/test/helpers/test_environment.nr @@ -8,9 +8,9 @@ use crate::context::inputs::{PublicContextInputs, PrivateContextInputs}; use crate::context::{packed_returns::PackedReturns, call_interfaces::CallInterface}; use crate::context::{PrivateContext, PublicContext, PrivateVoidCallInterface}; -use crate::test::helpers::{cheatcodes, types::{Deployer, TestAccount}, keys}; +use crate::test::helpers::{cheatcodes, utils::{apply_side_effects_private, Deployer, TestAccount}, keys}; use crate::keys::constants::{NULLIFIER_INDEX, INCOMING_INDEX, OUTGOING_INDEX, TAGGING_INDEX}; -use crate::hash::hash_args; +use crate::hash::{hash_args, hash_args_array}; use crate::note::{ note_header::NoteHeader, note_interface::NoteInterface, @@ -18,16 +18,12 @@ use crate::note::{ }; use crate::oracle::notes::notify_created_note; -struct TestEnvironment { - contract_address: Option, - args_hash: Option, - function_selector: Option -} +struct TestEnvironment {} impl TestEnvironment { fn new() -> Self { cheatcodes::reset(); - Self { contract_address: Option::none(), args_hash: Option::none(), function_selector: Option::none() } + Self {} } fn block_number(self) -> u32 { @@ -40,7 +36,7 @@ impl TestEnvironment { } fn advance_block_by(&mut self, blocks: u32) { - cheatcodes::advance_blocks(blocks); + cheatcodes::advance_blocks_by(blocks); } fn public(self) -> PublicContext { @@ -74,26 +70,41 @@ impl TestEnvironment { test_account.address } - fn create_account_contract(self, secret: Field) -> AztecAddress { + fn create_account_contract(&mut self, secret: Field) -> AztecAddress { let public_keys = cheatcodes::derive_keys(secret); - let args = &[public_keys.ivpk_m.x, public_keys.ivpk_m.y]; - let address = cheatcodes::deploy( + let args = [public_keys.ivpk_m.x, public_keys.ivpk_m.y]; + let instance = cheatcodes::deploy( "@aztec/noir-contracts.js/SchnorrAccount", "constructor", - args, + args.as_slice(), public_keys.hash().to_field() ); - cheatcodes::advance_blocks(1); - let test_account = cheatcodes::add_account(secret, PartialAddress::from_field(address.to_field())); - let address = test_account.address; + cheatcodes::advance_blocks_by(1); + let test_account = cheatcodes::add_account( + secret, + PartialAddress::compute( + instance.contract_class_id, + instance.salt, + instance.initialization_hash, + instance.deployer + ) + ); let keys = test_account.keys; + let address = instance.to_address(); + keys::store_master_key(NULLIFIER_INDEX, address, keys.npk_m); keys::store_master_key(INCOMING_INDEX, address, keys.ivpk_m); keys::store_master_key(OUTGOING_INDEX, address, keys.ovpk_m); keys::store_master_key(TAGGING_INDEX, address, keys.tpk_m); - test_account.address + let selector = FunctionSelector::from_signature("constructor(Field,Field)"); + + let mut context = self.private_at(cheatcodes::get_block_number()); + + let _ = context.call_private_function(address, selector, args); + + address } fn deploy(self, path: str) -> Deployer { @@ -113,7 +124,9 @@ impl TestEnvironment { cheatcodes::set_msg_sender(original_contract_address); let mut inputs = cheatcodes::get_private_context_inputs(cheatcodes::get_block_number() - 1); inputs.call_context.function_selector = call_interface.get_selector(); + inputs.call_context.is_static_call = call_interface.get_is_static(); let public_inputs = original_fn(inputs); + apply_side_effects_private(target_address, public_inputs); cheatcodes::set_contract_address(original_contract_address); cheatcodes::set_msg_sender(original_msg_sender); @@ -133,7 +146,9 @@ impl TestEnvironment { cheatcodes::set_msg_sender(original_contract_address); let mut inputs = cheatcodes::get_private_context_inputs(cheatcodes::get_block_number() - 1); inputs.call_context.function_selector = call_interface.get_selector(); + inputs.call_context.is_static_call = call_interface.get_is_static(); let public_inputs = original_fn(inputs); + apply_side_effects_private(target_address, public_inputs); cheatcodes::set_contract_address(original_contract_address); cheatcodes::set_msg_sender(original_msg_sender); @@ -151,6 +166,7 @@ impl TestEnvironment { let mut inputs = cheatcodes::get_public_context_inputs(); inputs.selector = call_interface.get_selector().to_field(); inputs.args_hash = hash_args(call_interface.get_args()); + inputs.is_static_call = call_interface.get_is_static(); let result = original_fn(inputs); cheatcodes::set_contract_address(original_contract_address); @@ -158,21 +174,23 @@ impl TestEnvironment { result } - fn call_public_void(self, call_interface: C) where C: CallInterface { - let original_fn = call_interface.get_original(); - let original_msg_sender = cheatcodes::get_msg_sender(); - let original_contract_address = cheatcodes::get_contract_address(); - let target_address = call_interface.get_contract_address(); - - cheatcodes::set_contract_address(target_address); - cheatcodes::set_msg_sender(original_contract_address); - let mut inputs = cheatcodes::get_public_context_inputs(); - inputs.selector = call_interface.get_selector().to_field(); - inputs.args_hash = hash_args(call_interface.get_args()); - original_fn(inputs); + fn assert_public_call_fails(self, call_interface: C) where C: CallInterface { + cheatcodes::assert_public_call_fails( + call_interface.get_contract_address(), + call_interface.get_selector(), + call_interface.get_args() + ); + } - cheatcodes::set_contract_address(original_contract_address); - cheatcodes::set_msg_sender(original_msg_sender); + fn assert_private_call_fails(self, call_interface: C) where C: CallInterface { + cheatcodes::assert_private_call_fails( + call_interface.get_contract_address(), + call_interface.get_selector(), + hash_args(call_interface.get_args()), + cheatcodes::get_side_effects_counter() as Field, + call_interface.get_is_static(), + false + ); } pub fn store_note_in_cache( @@ -186,12 +204,9 @@ impl TestEnvironment { let note_hash_counter = cheatcodes::get_side_effects_counter(); let header = NoteHeader { contract_address, storage_slot, nonce: 0, note_hash_counter }; - // TODO: change this to note.set_header(header) once https://github.com/noir-lang/noir/issues/4095 is fixed - Note::set_header(note, header); + note.set_header(header); let inner_note_hash = compute_inner_note_hash(*note); - - // TODO: Strong typing required because of https://github.com/noir-lang/noir/issues/4088 - let serialized_note: [Field; N] = Note::serialize_content(*note); + let serialized_note = Note::serialize_content(*note); assert( notify_created_note( storage_slot, diff --git a/noir-projects/aztec-nr/aztec/src/test/helpers/types.nr b/noir-projects/aztec-nr/aztec/src/test/helpers/utils.nr similarity index 67% rename from noir-projects/aztec-nr/aztec/src/test/helpers/types.nr rename to noir-projects/aztec-nr/aztec/src/test/helpers/utils.nr index 7baec3523d8..808b5ad37f5 100644 --- a/noir-projects/aztec-nr/aztec/src/test/helpers/types.nr +++ b/noir-projects/aztec-nr/aztec/src/test/helpers/utils.nr @@ -1,6 +1,7 @@ use dep::protocol_types::{ traits::{Deserialize, Serialize}, address::AztecAddress, - abis::{function_selector::FunctionSelector, private_circuit_public_inputs::PrivateCircuitPublicInputs} + abis::{function_selector::FunctionSelector, private_circuit_public_inputs::PrivateCircuitPublicInputs}, + contract_instance::ContractInstance }; use crate::context::inputs::{PublicContextInputs, PrivateContextInputs}; @@ -9,6 +10,25 @@ use crate::test::helpers::cheatcodes; use crate::keys::public_keys::{PUBLIC_KEYS_LENGTH, PublicKeys}; use crate::hash::hash_args; +use crate::oracle::notes::notify_nullified_note; + +pub fn apply_side_effects_private(contract_address: AztecAddress, public_inputs: PrivateCircuitPublicInputs) { + let mut nullifiers = &[]; + for nullifier in public_inputs.new_nullifiers { + if nullifier.value != 0 { + nullifiers = nullifiers.push_back(nullifier.value); + } + } + cheatcodes::add_nullifiers(contract_address, nullifiers); + let mut note_hashes = &[]; + for note_hash in public_inputs.new_note_hashes { + if note_hash.value != 0 { + note_hashes = note_hashes.push_back(note_hash.value); + } + } + cheatcodes::add_note_hashes(contract_address, note_hashes); +} + struct Deployer { path: str, public_keys_hash: Field @@ -18,14 +38,15 @@ impl Deployer { pub fn with_private_initializer( self, call_interface: C - ) -> AztecAddress where C: CallInterface { - let address = cheatcodes::deploy( + ) -> ContractInstance where C: CallInterface { + let instance = cheatcodes::deploy( self.path, call_interface.get_name(), call_interface.get_args(), self.public_keys_hash ); - cheatcodes::advance_blocks(1); + let address = instance.to_address(); + cheatcodes::advance_blocks_by(1); let block_number = cheatcodes::get_block_number(); let original_fn = call_interface.get_original(); let original_msg_sender = cheatcodes::get_msg_sender(); @@ -35,29 +56,30 @@ impl Deployer { cheatcodes::set_msg_sender(original_contract_address); let mut inputs = cheatcodes::get_private_context_inputs(block_number - 1); inputs.call_context.function_selector = call_interface.get_selector(); - let _result = original_fn(inputs); - + let public_inputs = original_fn(inputs); + apply_side_effects_private(address, public_inputs); + cheatcodes::advance_blocks_by(1); cheatcodes::set_contract_address(original_contract_address); cheatcodes::set_msg_sender(original_msg_sender); - address + instance } pub fn with_public_initializer( self, call_interface: C - ) -> AztecAddress where C: CallInterface { - let address = cheatcodes::deploy( + ) -> ContractInstance where C: CallInterface { + let instance = cheatcodes::deploy( self.path, call_interface.get_name(), call_interface.get_args(), self.public_keys_hash ); - cheatcodes::advance_blocks(1); + cheatcodes::advance_blocks_by(1); let original_fn = call_interface.get_original(); let original_msg_sender = cheatcodes::get_msg_sender(); let original_contract_address = cheatcodes::get_contract_address(); - cheatcodes::set_contract_address(address); + cheatcodes::set_contract_address(instance.to_address()); cheatcodes::set_msg_sender(original_contract_address); let mut inputs = cheatcodes::get_public_context_inputs(); inputs.selector = call_interface.get_selector().to_field(); @@ -66,12 +88,11 @@ impl Deployer { cheatcodes::set_contract_address(original_contract_address); cheatcodes::set_msg_sender(original_msg_sender); - address + instance } - pub fn without_initializer(self) -> AztecAddress { - let address = cheatcodes::deploy(self.path, "", &[], self.public_keys_hash); - address + pub fn without_initializer(self) -> ContractInstance { + cheatcodes::deploy(self.path, "", &[], self.public_keys_hash) } } diff --git a/noir-projects/aztec-nr/easy-private-state/src/easy_private_uint.nr b/noir-projects/aztec-nr/easy-private-state/src/easy_private_uint.nr index ff23dd0e159..1a84b86acba 100644 --- a/noir-projects/aztec-nr/easy-private-state/src/easy_private_uint.nr +++ b/noir-projects/aztec-nr/easy-private-state/src/easy_private_uint.nr @@ -1,7 +1,7 @@ use dep::aztec::{ context::PrivateContext, protocol_types::{address::AztecAddress, grumpkin_point::GrumpkinPoint}, note::note_getter_options::NoteGetterOptions, state_vars::PrivateSet, - encrypted_logs::encrypted_note_emission::encode_and_encrypt + encrypted_logs::encrypted_note_emission::encode_and_encrypt_note }; use dep::value_note::{filter::filter_notes_min_sum, value_note::ValueNote}; @@ -30,7 +30,7 @@ impl EasyPrivateUint<&mut PrivateContext> { // Insert the new note to the owner's set of notes. // docs:start:insert - self.set.insert(&mut addend_note).emit(encode_and_encrypt(self.context, outgoing_viewer, owner)); + self.set.insert(&mut addend_note).emit(encode_and_encrypt_note(self.context, outgoing_viewer, owner)); // docs:end:insert } @@ -63,6 +63,6 @@ impl EasyPrivateUint<&mut PrivateContext> { // Creates change note for the owner. let result_value = minuend - subtrahend; let mut result_note = ValueNote::new(result_value as Field, owner_npk_m_hash); - self.set.insert(&mut result_note).emit(encode_and_encrypt(self.context, outgoing_viewer, owner)); + self.set.insert(&mut result_note).emit(encode_and_encrypt_note(self.context, outgoing_viewer, owner)); } } diff --git a/noir-projects/aztec-nr/value-note/src/utils.nr b/noir-projects/aztec-nr/value-note/src/utils.nr index 927f8325281..44094999c76 100644 --- a/noir-projects/aztec-nr/value-note/src/utils.nr +++ b/noir-projects/aztec-nr/value-note/src/utils.nr @@ -1,7 +1,7 @@ use dep::aztec::prelude::{AztecAddress, PrivateContext, PrivateSet, NoteGetterOptions}; use dep::aztec::note::note_getter_options::SortOrder; use dep::aztec::protocol_types::grumpkin_point::GrumpkinPoint; -use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt; +use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_note; use crate::{filter::filter_notes_min_sum, value_note::{ValueNote, VALUE_NOTE_LEN, VALUE_NOTE_BYTES_LEN}}; // Sort the note values (0th field) in descending order. @@ -23,7 +23,7 @@ pub fn increment( let mut note = ValueNote::new(amount, recipient_npk_m_hash); // Insert the new note to the owner's set of notes and emit the log if value is non-zero. - balance.insert(&mut note).emit(encode_and_encrypt(balance.context, outgoing_viewer, recipient)); + balance.insert(&mut note).emit(encode_and_encrypt_note(balance.context, outgoing_viewer, recipient)); } // Find some of the `owner`'s notes whose values add up to the `amount`. diff --git a/noir-projects/noir-contracts/Nargo.toml b/noir-projects/noir-contracts/Nargo.toml index 53dd5747074..4e0dae683c9 100644 --- a/noir-projects/noir-contracts/Nargo.toml +++ b/noir-projects/noir-contracts/Nargo.toml @@ -3,6 +3,7 @@ members = [ "contracts/app_subscription_contract", "contracts/auth_contract", "contracts/auth_registry_contract", + "contracts/auth_wit_test_contract", "contracts/avm_initializer_test_contract", "contracts/avm_test_contract", "contracts/fpc_contract", diff --git a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr index df46548453e..d9c5e6e6b1d 100644 --- a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr @@ -9,7 +9,7 @@ contract AppSubscription { AztecAddress, FunctionSelector, PrivateContext, NoteHeader, Map, PrivateMutable, PublicMutable, SharedImmutable }, - encrypted_logs::encrypted_note_emission::encode_and_encrypt, + encrypted_logs::encrypted_note_emission::encode_and_encrypt_note, protocol_types::{traits::is_empty, grumpkin_point::GrumpkinPoint} }, authwit::{auth_witness::get_auth_witness, auth::assert_current_call_valid_authwit}, @@ -45,7 +45,7 @@ contract AppSubscription { // We are emitting both the outgoing and the incoming logs to the subscriber here because passing a separate // outgoing_viewer arg to entrypoint function is impractical and the outgoing are not so valuable here. - storage.subscriptions.at(user_address).replace(&mut note).emit(encode_and_encrypt(&mut context, user_address, user_address)); + storage.subscriptions.at(user_address).replace(&mut note).emit(encode_and_encrypt_note(&mut context, user_address, user_address)); context.set_as_fee_payer(); @@ -116,7 +116,7 @@ contract AppSubscription { let subscriber_npk_m_hash = header.get_npk_m_hash(&mut context, subscriber_address); let mut subscription_note = SubscriptionNote::new(subscriber_npk_m_hash, expiry_block_number, tx_count); - storage.subscriptions.at(subscriber_address).initialize_or_replace(&mut subscription_note).emit(encode_and_encrypt(&mut context, context.msg_sender(), subscriber_address)); + storage.subscriptions.at(subscriber_address).initialize_or_replace(&mut subscription_note).emit(encode_and_encrypt_note(&mut context, context.msg_sender(), subscriber_address)); } unconstrained fn is_initialized(subscriber_address: AztecAddress) -> pub bool { diff --git a/noir-projects/noir-contracts/contracts/auth_wit_test_contract/Nargo.toml b/noir-projects/noir-contracts/contracts/auth_wit_test_contract/Nargo.toml new file mode 100644 index 00000000000..dc0fb24920c --- /dev/null +++ b/noir-projects/noir-contracts/contracts/auth_wit_test_contract/Nargo.toml @@ -0,0 +1,9 @@ +[package] +name = "auth_wit_test_contract" +authors = [""] +compiler_version = ">=0.25.0" +type = "contract" + +[dependencies] +aztec = { path = "../../../aztec-nr/aztec" } +authwit = { path = "../../../aztec-nr/authwit" } diff --git a/noir-projects/noir-contracts/contracts/auth_wit_test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/auth_wit_test_contract/src/main.nr new file mode 100644 index 00000000000..997d53439a6 --- /dev/null +++ b/noir-projects/noir-contracts/contracts/auth_wit_test_contract/src/main.nr @@ -0,0 +1,14 @@ +contract AuthWitTest { + use dep::aztec::protocol_types::address::AztecAddress; + use dep::authwit::auth::{assert_inner_hash_valid_authwit, assert_inner_hash_valid_authwit_public}; + + #[aztec(private)] + fn consume(on_behalf_of: AztecAddress, inner_hash: Field) { + assert_inner_hash_valid_authwit(&mut context, on_behalf_of, inner_hash); + } + + #[aztec(public)] + fn consume_public(on_behalf_of: AztecAddress, inner_hash: Field) { + assert_inner_hash_valid_authwit_public(&mut context, on_behalf_of, inner_hash); + } +} diff --git a/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr index d870e8564f8..a9dd932bdac 100644 --- a/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr @@ -363,19 +363,19 @@ contract AvmTest { // Use the standard context interface to check for a nullifier #[aztec(public)] fn nullifier_exists(nullifier: Field) -> bool { - context.nullifier_exists(nullifier, context.this_address()) + context.nullifier_exists(nullifier, context.storage_address()) } #[aztec(public)] fn assert_nullifier_exists(nullifier: Field) { - assert(context.nullifier_exists(nullifier, context.this_address()), "Nullifier doesn't exist!"); + assert(context.nullifier_exists(nullifier, context.storage_address()), "Nullifier doesn't exist!"); } // Use the standard context interface to emit a new nullifier #[aztec(public)] fn emit_nullifier_and_check(nullifier: Field) { context.push_new_nullifier(nullifier, 0); - let exists = context.nullifier_exists(nullifier, context.this_address()); + let exists = context.nullifier_exists(nullifier, context.storage_address()); assert(exists, "Nullifier was just created, but its existence wasn't detected!"); } diff --git a/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr b/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr index e8ae03dd953..7065267bc2e 100644 --- a/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr +++ b/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr @@ -6,7 +6,7 @@ use dep::aztec::{ traits::{ToField, Serialize, FromField}, grumpkin_point::GrumpkinPoint, constants::MAX_NOTE_HASH_READ_REQUESTS_PER_CALL }, - encrypted_logs::encrypted_note_emission::encode_and_encrypt_with_keys, + encrypted_logs::encrypted_note_emission::encode_and_encrypt_note_with_keys, note::note_getter::view_notes, state_vars::PrivateSet, note::constants::MAX_NOTES_PER_PAGE }; use dep::std; @@ -114,7 +114,7 @@ impl Deck<&mut PrivateContext> { let mut inserted_cards = &[]; for card in cards { let mut card_note = CardNote::from_card(card, owner_npk_m_hash); - self.set.insert(&mut card_note.note).emit(encode_and_encrypt_with_keys(self.set.context, msg_sender_ovpk_m, owner_ivpk_m)); + self.set.insert(&mut card_note.note).emit(encode_and_encrypt_note_with_keys(self.set.context, msg_sender_ovpk_m, owner_ivpk_m)); inserted_cards = inserted_cards.push_back(card_note); } diff --git a/noir-projects/noir-contracts/contracts/child_contract/src/main.nr b/noir-projects/noir-contracts/contracts/child_contract/src/main.nr index ab9483cce2a..bd7220461a1 100644 --- a/noir-projects/noir-contracts/contracts/child_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/child_contract/src/main.nr @@ -6,7 +6,7 @@ contract Child { context::gas::GasOpts, protocol_types::{abis::call_context::CallContext, grumpkin_point::GrumpkinPoint}, note::{note_getter_options::NoteGetterOptions, note_header::NoteHeader}, - encrypted_logs::encrypted_note_emission::encode_and_encrypt + encrypted_logs::encrypted_note_emission::encode_and_encrypt_note }; use dep::value_note::value_note::ValueNote; @@ -56,7 +56,7 @@ contract Child { let owner_npk_m_hash = header.get_npk_m_hash(&mut context, owner); let mut note = ValueNote::new(new_value, owner_npk_m_hash); - storage.a_map_with_private_values.at(owner).insert(&mut note).emit(encode_and_encrypt(&mut context, owner, owner)); + storage.a_map_with_private_values.at(owner).insert(&mut note).emit(encode_and_encrypt_note(&mut context, owner, owner)); new_value } diff --git a/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr b/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr index b843313be4b..27631ddbe72 100644 --- a/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr @@ -44,15 +44,19 @@ contract Counter { use dep::aztec::note::note_viewer_options::NoteViewerOptions; #[test] - fn test_initialize() { + fn test_increment() { // Setup env, generate keys let mut env = TestEnvironment::new(); let owner = env.create_account(); let outgoing_viewer = env.create_account(); + let initial_value: Field = 5; + cheatcodes::set_contract_address(owner); // Deploy contract and initialize - let initializer = Counter::interface().initialize(5, owner, outgoing_viewer); - let contract_address = env.deploy("@aztec/noir-contracts.js/Counter").with_private_initializer(initializer); + let initializer = Counter::interface().initialize(initial_value as u64, owner, outgoing_viewer); + let counter_contract = env.deploy("@aztec/noir-contracts.js/Counter").with_private_initializer(initializer); + let contract_address = counter_contract.to_address(); + // Read the stored value in the note cheatcodes::set_contract_address(contract_address); @@ -60,6 +64,18 @@ contract Counter { let owner_slot = derive_storage_slot_in_map(counter_slot, owner); let mut options = NoteViewerOptions::new(); let notes: BoundedVec = view_notes(owner_slot, options); - assert(notes.get(0).value == 5); + let initial_note_value = notes.get(0).value; + assert( + initial_note_value == initial_value, f"Expected {initial_value} but got {initial_note_value}" + ); + + // Increment the counter + let increment_call_interface = Counter::at(contract_address).increment(owner, outgoing_viewer); + env.call_private_void(increment_call_interface); + let current_value_for_owner = get_counter(owner); + let expected_current_value = initial_value + 1; + assert( + expected_current_value == current_value_for_owner, f"Expected {expected_current_value} but got {current_value_for_owner}" + ); } } diff --git a/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr b/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr index d5f932c4a75..9e43661a329 100644 --- a/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr @@ -8,7 +8,7 @@ contract Crowdfunding { abis::function_selector::FunctionSelector, address::AztecAddress, traits::Serialize, grumpkin_point::GrumpkinPoint }, - encrypted_logs::encrypted_note_emission::encode_and_encrypt, + encrypted_logs::encrypted_note_emission::encode_and_encrypt_note, state_vars::{PrivateSet, PublicImmutable, SharedImmutable} }; use dep::value_note::value_note::ValueNote; @@ -17,14 +17,8 @@ contract Crowdfunding { #[aztec(event)] struct WithdrawalProcessed { - who: AztecAddress, - amount: u64, - } - - impl Serialize<2> for WithdrawalProcessed { - fn serialize(self: Self) -> [Field; 2] { - [self.who.to_field(), self.amount as Field] - } + who: Field, + amount: Field, } // docs:start:storage @@ -87,7 +81,7 @@ contract Crowdfunding { // contract by proving that the hash of this note exists in the note hash tree. let donor_npk_m_hash = header.get_npk_m_hash(&mut context, donor); let mut note = ValueNote::new(amount as Field, donor_npk_m_hash); - storage.donation_receipts.insert(&mut note).emit(encode_and_encrypt(&mut context, donor, donor)); + storage.donation_receipts.insert(&mut note).emit(encode_and_encrypt_note(&mut context, donor, donor)); } // docs:end:donate @@ -103,7 +97,7 @@ contract Crowdfunding { Token::at(storage.donation_token.read_private()).transfer(operator_address, amount as Field).call(&mut context); // 3) Emit an unencrypted event so that anyone can audit how much the operator has withdrawn - let event = WithdrawalProcessed { amount, who: operator_address }; + let event = WithdrawalProcessed { amount: amount as Field, who: operator_address.to_field() }; context.emit_unencrypted_log(event.serialize()); } // docs:end:operator-withdrawals diff --git a/noir-projects/noir-contracts/contracts/delegated_on_contract/src/main.nr b/noir-projects/noir-contracts/contracts/delegated_on_contract/src/main.nr index da7a24ce09b..7a848446055 100644 --- a/noir-projects/noir-contracts/contracts/delegated_on_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/delegated_on_contract/src/main.nr @@ -4,7 +4,7 @@ contract DelegatedOn { AztecAddress, FunctionSelector, NoteHeader, NoteGetterOptions, NoteViewerOptions, PublicMutable, PrivateSet, PrivateContext, Map }; - use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt; + use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_note; use dep::aztec::{protocol_types::grumpkin_point::GrumpkinPoint}; use dep::value_note::value_note::ValueNote; @@ -20,7 +20,7 @@ contract DelegatedOn { let owner_npk_m_hash = header.get_npk_m_hash(&mut context, owner); let mut note = ValueNote::new(new_value, owner_npk_m_hash); - storage.a_map_with_private_values.at(owner).insert(&mut note).emit(encode_and_encrypt(&mut context, context.msg_sender(), owner)); + storage.a_map_with_private_values.at(owner).insert(&mut note).emit(encode_and_encrypt_note(&mut context, context.msg_sender(), owner)); new_value } diff --git a/noir-projects/noir-contracts/contracts/docs_example_contract/src/main.nr b/noir-projects/noir-contracts/contracts/docs_example_contract/src/main.nr index 9653946054c..22242e9404a 100644 --- a/noir-projects/noir-contracts/contracts/docs_example_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/docs_example_contract/src/main.nr @@ -18,7 +18,7 @@ contract DocsExample { PrivateContext, Map, PublicMutable, PublicImmutable, PrivateMutable, PrivateImmutable, PrivateSet, SharedImmutable, Deserialize }; - use dep::aztec::encrypted_logs::encrypted_note_emission::{encode_and_encrypt, encode_and_encrypt_with_keys}; + use dep::aztec::encrypted_logs::encrypted_note_emission::{encode_and_encrypt_note, encode_and_encrypt_note_with_keys}; use dep::aztec::note::note_getter_options::Comparator; use dep::aztec::protocol_types::grumpkin_point::GrumpkinPoint; // how to import methods from other files/folders within your workspace @@ -173,7 +173,7 @@ contract DocsExample { let msg_sender_npk_m_hash = header.get_npk_m_hash(&mut context, context.msg_sender()); let mut new_card = CardNote::new(points, randomness, msg_sender_npk_m_hash); - storage.private_immutable.initialize(&mut new_card).emit(encode_and_encrypt(&mut context, context.msg_sender(), context.msg_sender())); + storage.private_immutable.initialize(&mut new_card).emit(encode_and_encrypt_note(&mut context, context.msg_sender(), context.msg_sender())); } // docs:end:initialize-private-mutable @@ -185,7 +185,7 @@ contract DocsExample { let mut legendary_card = CardNote::new(points, randomness, msg_sender_npk_m_hash); // create and broadcast note - storage.legendary_card.initialize(&mut legendary_card).emit(encode_and_encrypt(&mut context, context.msg_sender(), context.msg_sender())); + storage.legendary_card.initialize(&mut legendary_card).emit(encode_and_encrypt_note(&mut context, context.msg_sender(), context.msg_sender())); } #[aztec(private)] @@ -197,7 +197,7 @@ contract DocsExample { for i in 0..amounts.len() { let mut note = CardNote::new(amounts[i], 1, msg_sender_npk_m_hash); - storage.set.insert(&mut note).emit(encode_and_encrypt_with_keys(&mut context, msg_sender_ovpk_m, msg_sender_ivpk_m)); + storage.set.insert(&mut note).emit(encode_and_encrypt_note_with_keys(&mut context, msg_sender_ovpk_m, msg_sender_ivpk_m)); } } @@ -207,7 +207,7 @@ contract DocsExample { let msg_sender_npk_m_hash = header.get_npk_m_hash(&mut context, context.msg_sender()); let mut note = CardNote::new(amount, randomness, msg_sender_npk_m_hash); - storage.set.insert(&mut note).emit(encode_and_encrypt(&mut context, context.msg_sender(), context.msg_sender())); + storage.set.insert(&mut note).emit(encode_and_encrypt_note(&mut context, context.msg_sender(), context.msg_sender())); } // docs:start:state_vars-NoteGetterOptionsComparatorExampleNoir @@ -229,7 +229,7 @@ contract DocsExample { let msg_sender_npk_m_hash = header.get_npk_m_hash(&mut context, context.msg_sender()); let mut new_card = CardNote::new(points, randomness, msg_sender_npk_m_hash); - storage.legendary_card.replace(&mut new_card).emit(encode_and_encrypt(&mut context, context.msg_sender(), context.msg_sender())); + storage.legendary_card.replace(&mut new_card).emit(encode_and_encrypt_note(&mut context, context.msg_sender(), context.msg_sender())); DocsExample::at(context.this_address()).update_leader(context.msg_sender(), points).enqueue(&mut context); } @@ -250,14 +250,15 @@ contract DocsExample { let mut new_card = CardNote::new(points, card.randomness, msg_sender_npk_m_hash); // docs:start:state_vars-PrivateMutableReplace - storage.legendary_card.replace(&mut new_card).emit(encode_and_encrypt(&mut context, context.msg_sender(), context.msg_sender())); + storage.legendary_card.replace(&mut new_card).emit(encode_and_encrypt_note(&mut context, context.msg_sender(), context.msg_sender())); // docs:end:state_vars-PrivateMutableReplace DocsExample::at(context.this_address()).update_leader(context.msg_sender(), points).enqueue(&mut context); } #[aztec(private)] - fn spend_private_authwit(inner_hash: Field) -> Field { + #[aztec(view)] + fn verify_private_authwit(inner_hash: Field) -> Field { 1 } diff --git a/noir-projects/noir-contracts/contracts/ecdsa_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/ecdsa_account_contract/src/main.nr index 0bb6c9c0076..f4495e48217 100644 --- a/noir-projects/noir-contracts/contracts/ecdsa_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/ecdsa_account_contract/src/main.nr @@ -4,7 +4,7 @@ mod ecdsa_public_key_note; // The signing key is stored in an immutable private note and should be different from the signing key. contract EcdsaAccount { use dep::aztec::prelude::{AztecAddress, FunctionSelector, NoteHeader, NoteGetterOptions, PrivateContext, PrivateImmutable}; - use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt; + use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_note; use dep::aztec::protocol_types::abis::call_context::CallContext; use dep::std; @@ -33,7 +33,7 @@ contract EcdsaAccount { // important. let mut pub_key_note = EcdsaPublicKeyNote::new(signing_pub_key_x, signing_pub_key_y, this_npk_m_hash); - storage.public_key.initialize(&mut pub_key_note).emit(encode_and_encrypt(&mut context, this, this)); + storage.public_key.initialize(&mut pub_key_note).emit(encode_and_encrypt_note(&mut context, this, this)); } // Note: If you globally change the entrypoint signature don't forget to update default_entrypoint.ts @@ -45,15 +45,10 @@ contract EcdsaAccount { #[aztec(private)] #[aztec(noinitcheck)] - fn spend_private_authwit(inner_hash: Field) -> Field { + #[aztec(view)] + fn verify_private_authwit(inner_hash: Field) -> Field { let actions = AccountActions::init(&mut context, is_valid_impl); - actions.spend_private_authwit(inner_hash) - } - - #[aztec(private)] - #[aztec(internal)] - fn cancel_authwit(outer_hash: Field) { - context.push_new_nullifier(outer_hash, 0); + actions.verify_private_authwit(inner_hash) } #[contract_library_method] diff --git a/noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr b/noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr index 661283b1f85..27159437c1b 100644 --- a/noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr @@ -1,7 +1,7 @@ // Sample escrow contract that stores a balance of a private token on behalf of an owner. contract Escrow { use dep::aztec::prelude::{AztecAddress, EthAddress, FunctionSelector, NoteHeader, PrivateContext, PrivateImmutable}; - use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt; + use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_note; use dep::address_note::address_note::AddressNote; @@ -20,7 +20,7 @@ contract Escrow { let owner_npk_m_hash = header.get_npk_m_hash(&mut context, owner); let mut note = AddressNote::new(owner, owner_npk_m_hash); - storage.owner.initialize(&mut note).emit(encode_and_encrypt(&mut context, context.msg_sender(), owner)); + storage.owner.initialize(&mut note).emit(encode_and_encrypt_note(&mut context, context.msg_sender(), owner)); } // Withdraws balance. Requires that msg.sender is the owner. diff --git a/noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr b/noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr index 372b11fbd5b..71cc8d803f7 100644 --- a/noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr @@ -4,7 +4,7 @@ contract InclusionProofs { AztecAddress, EthAddress, FunctionSelector, NoteHeader, NoteGetterOptions, PrivateContext, Map, PrivateSet, PublicMutable }; - use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt; + use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_note; use dep::aztec::protocol_types::{grumpkin_point::GrumpkinPoint, contract_class_id::ContractClassId, header::Header}; use dep::aztec::{note::note_getter_options::NoteStatus}; @@ -36,7 +36,7 @@ contract InclusionProofs { let owner_npk_m_hash = header.get_npk_m_hash(&mut context, owner); let mut note = ValueNote::new(value, owner_npk_m_hash); - owner_private_values.insert(&mut note).emit(encode_and_encrypt(&mut context, context.msg_sender(), owner)); + owner_private_values.insert(&mut note).emit(encode_and_encrypt_note(&mut context, context.msg_sender(), owner)); } // docs:end:create_note diff --git a/noir-projects/noir-contracts/contracts/parent_contract/src/main.nr b/noir-projects/noir-contracts/contracts/parent_contract/src/main.nr index b8789b55e6f..efeae7bcda8 100644 --- a/noir-projects/noir-contracts/contracts/parent_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/parent_contract/src/main.nr @@ -257,8 +257,9 @@ contract Parent { let owner = env.create_account(); // Deploy child contract - let child_contract_address = env.deploy("@aztec/noir-contracts.js/Child").without_initializer(); - cheatcodes::advance_blocks(1); + let child_contract = env.deploy("@aztec/noir-contracts.js/Child").without_initializer(); + let child_contract_address = child_contract.to_address(); + cheatcodes::advance_blocks_by(1); // Set value in child through parent let value_to_set = 7; diff --git a/noir-projects/noir-contracts/contracts/pending_note_hashes_contract/src/main.nr b/noir-projects/noir-contracts/contracts/pending_note_hashes_contract/src/main.nr index 6f5076d4ddf..c35c36d6eb5 100644 --- a/noir-projects/noir-contracts/contracts/pending_note_hashes_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/pending_note_hashes_contract/src/main.nr @@ -8,7 +8,7 @@ contract PendingNoteHashes { use dep::value_note::{balance_utils, filter::filter_notes_min_sum, value_note::{VALUE_NOTE_LEN, ValueNote}}; use dep::aztec::protocol_types::grumpkin_point::GrumpkinPoint; use dep::aztec::protocol_types::constants::{MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, MAX_NEW_NOTE_HASHES_PER_CALL}; - use dep::aztec::encrypted_logs::encrypted_note_emission::{encode_and_encrypt, encode_and_encrypt_with_keys}; + use dep::aztec::encrypted_logs::encrypted_note_emission::{encode_and_encrypt_note, encode_and_encrypt_note_with_keys}; use dep::aztec::note::note_emission::NoteEmission; #[aztec(storage)] @@ -36,7 +36,7 @@ contract PendingNoteHashes { let mut note = ValueNote::new(amount, owner_npk_m_hash); // Insert note - owner_balance.insert(&mut note).emit(encode_and_encrypt(&mut context, outgoing_viewer, owner)); + owner_balance.insert(&mut note).emit(encode_and_encrypt_note(&mut context, outgoing_viewer, owner)); let options = NoteGetterOptions::with_filter(filter_notes_min_sum, amount); // get note inserted above @@ -71,7 +71,7 @@ contract PendingNoteHashes { // Insert note let mut note = ValueNote::new(amount, owner_npk_m_hash); - owner_balance.insert(&mut note).emit(encode_and_encrypt(&mut context, context.msg_sender(), owner)); + owner_balance.insert(&mut note).emit(encode_and_encrypt_note(&mut context, context.msg_sender(), owner)); 0 } @@ -91,7 +91,7 @@ contract PendingNoteHashes { let mut note = ValueNote::new(amount, owner_npk_m_hash); // Insert note - owner_balance.insert(&mut note).emit(encode_and_encrypt(&mut context, outgoing_viewer, owner)); + owner_balance.insert(&mut note).emit(encode_and_encrypt_note(&mut context, outgoing_viewer, owner)); } // Nested/inner function to create and insert a note @@ -112,7 +112,7 @@ contract PendingNoteHashes { note.randomness = 2; // Insert note - owner_balance.insert(&mut note).emit(encode_and_encrypt(&mut context, outgoing_viewer, owner)); + owner_balance.insert(&mut note).emit(encode_and_encrypt_note(&mut context, outgoing_viewer, owner)); } // Nested/inner function to create and insert a note @@ -129,10 +129,10 @@ contract PendingNoteHashes { // Insert note let emission = owner_balance.insert(&mut note); - emission.emit(encode_and_encrypt(&mut context, outgoing_viewer, owner)); + emission.emit(encode_and_encrypt_note(&mut context, outgoing_viewer, owner)); // Emit note again - emission.emit(encode_and_encrypt(&mut context, outgoing_viewer, owner)); + emission.emit(encode_and_encrypt_note(&mut context, outgoing_viewer, owner)); } // Nested/inner function to get a note and confirm it matches the expected value @@ -351,7 +351,7 @@ contract PendingNoteHashes { let owner_ivpk_m = header.get_ivpk_m(&mut context, owner); let mut good_note = ValueNote::new(10, owner_npk_m_hash); // Insert good note with real log - owner_balance.insert(&mut good_note).emit(encode_and_encrypt(&mut context, outgoing_viewer, owner)); + owner_balance.insert(&mut good_note).emit(encode_and_encrypt_note(&mut context, outgoing_viewer, owner)); // We will emit a note log with an incorrect preimage to ensure the pxe throws // This note has not been inserted... @@ -360,7 +360,7 @@ contract PendingNoteHashes { let existing_note_header = good_note.get_header(); bad_note.set_header(existing_note_header); - NoteEmission::new(bad_note).emit(encode_and_encrypt_with_keys(&mut context, outgoing_viewer_ovpk_m, owner_ivpk_m)); + NoteEmission::new(bad_note).emit(encode_and_encrypt_note_with_keys(&mut context, outgoing_viewer_ovpk_m, owner_ivpk_m)); } #[contract_library_method] @@ -378,7 +378,7 @@ contract PendingNoteHashes { for i in 0..max_notes_per_call() { let mut note = ValueNote::new(i as Field, owner_npk_m_hash); - owner_balance.insert(&mut note).emit(encode_and_encrypt_with_keys(context, outgoing_viewer_ovpk_m, owner_ivpk_m)); + owner_balance.insert(&mut note).emit(encode_and_encrypt_note_with_keys(context, outgoing_viewer_ovpk_m, owner_ivpk_m)); } } diff --git a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr index 9334077bf57..8632a64bc62 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr @@ -6,10 +6,10 @@ contract SchnorrAccount { use dep::std; use dep::aztec::prelude::{AztecAddress, FunctionSelector, NoteHeader, PrivateContext, PrivateImmutable}; - use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt; + use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_note; use dep::authwit::{ entrypoint::{app::AppPayload, fee::FeePayload}, account::AccountActions, - auth_witness::get_auth_witness + auth_witness::get_auth_witness, auth::{compute_authwit_nullifier, compute_outer_authwit_hash} }; use dep::aztec::hash::compute_siloed_nullifier; use dep::aztec::oracle::get_nullifier_membership_witness::get_low_nullifier_membership_witness; @@ -36,7 +36,7 @@ contract SchnorrAccount { // docs:start:initialize let mut pub_key_note = PublicKeyNote::new(signing_pub_key_x, signing_pub_key_y, this_npk_m_hash); - storage.signing_public_key.initialize(&mut pub_key_note).emit(encode_and_encrypt(&mut context, this, this)); + storage.signing_public_key.initialize(&mut pub_key_note).emit(encode_and_encrypt_note(&mut context, this, this)); // docs:end:initialize } @@ -50,15 +50,10 @@ contract SchnorrAccount { #[aztec(private)] #[aztec(noinitcheck)] - fn spend_private_authwit(inner_hash: Field) -> Field { + #[aztec(view)] + fn verify_private_authwit(inner_hash: Field) -> Field { let actions = AccountActions::init(&mut context, is_valid_impl); - actions.spend_private_authwit(inner_hash) - } - - #[aztec(private)] - #[aztec(internal)] - fn cancel_authwit(outer_hash: Field) { - context.push_new_nullifier(outer_hash, 0); + actions.verify_private_authwit(inner_hash) } #[contract_library_method] @@ -90,11 +85,15 @@ contract SchnorrAccount { /** * @notice Helper function to check validity of private authwitnesses + * @param consumer The address of the consumer of the message * @param message_hash The message hash of the message to check the validity * @return True if the message_hash can be consumed, false otherwise */ - unconstrained fn lookup_validity(message_hash: Field) -> pub bool { + unconstrained fn lookup_validity(consumer: AztecAddress, inner_hash: Field) -> pub bool { let public_key = storage.signing_public_key.view_note(); + + let message_hash = compute_outer_authwit_hash(consumer, context.chain_id(), context.version(), inner_hash); + let witness: [Field; 64] = get_auth_witness(message_hash); let mut signature: [u8; 64] = [0; 64]; for i in 0..64 { @@ -107,14 +106,12 @@ contract SchnorrAccount { message_hash.to_be_bytes(32) ); - let block_number = context.block_number(); - let myself = context.this_address(); - // Compute the nullifier and check if it is spent // This will BLINDLY TRUST the oracle, but the oracle is us, and // it is not as part of execution of the contract, so we are good. - let siloed_nullifier = compute_siloed_nullifier(myself, message_hash); - let lower_wit = get_low_nullifier_membership_witness(block_number, siloed_nullifier); + let nullifier = compute_authwit_nullifier(context.this_address(), inner_hash); + let siloed_nullifier = compute_siloed_nullifier(consumer, nullifier); + let lower_wit = get_low_nullifier_membership_witness(context.block_number(), siloed_nullifier); let is_spent = lower_wit.leaf_preimage.nullifier == siloed_nullifier; !is_spent & valid_in_private diff --git a/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr index 3441779536b..8d776eab233 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr @@ -20,15 +20,10 @@ contract SchnorrHardcodedAccount { } #[aztec(private)] - fn spend_private_authwit(inner_hash: Field) -> Field { + #[aztec(view)] + fn verify_private_authwit(inner_hash: Field) -> Field { let actions = AccountActions::init(&mut context, is_valid_impl); - actions.spend_private_authwit(inner_hash) - } - - #[aztec(private)] - #[aztec(internal)] - fn cancel_authwit(outer_hash: Field) { - context.push_new_nullifier(outer_hash, 0); + actions.verify_private_authwit(inner_hash) } // docs:start:is-valid diff --git a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/main.nr index ca795fca252..fbf81afb5fc 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/main.nr @@ -16,15 +16,10 @@ contract SchnorrSingleKeyAccount { } #[aztec(private)] - fn spend_private_authwit(inner_hash: Field) -> Field { + #[aztec(view)] + fn verify_private_authwit(inner_hash: Field) -> Field { let actions = AccountActions::init(&mut context, is_valid_impl); - actions.spend_private_authwit(inner_hash) - } - - #[aztec(private)] - #[aztec(internal)] - fn cancel_authwit(outer_hash: Field) { - context.push_new_nullifier(outer_hash, 0); + actions.verify_private_authwit(inner_hash) } #[contract_library_method] diff --git a/noir-projects/noir-contracts/contracts/static_child_contract/src/main.nr b/noir-projects/noir-contracts/contracts/static_child_contract/src/main.nr index 9ed71ce290e..e0d113569fe 100644 --- a/noir-projects/noir-contracts/contracts/static_child_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/static_child_contract/src/main.nr @@ -5,7 +5,7 @@ contract StaticChild { use dep::aztec::{ context::{PublicContext, gas::GasOpts}, protocol_types::{abis::{call_context::CallContext}}, note::{note_getter_options::NoteGetterOptions, note_header::NoteHeader}, - encrypted_logs::encrypted_note_emission::encode_and_encrypt + encrypted_logs::encrypted_note_emission::encode_and_encrypt_note }; use dep::value_note::value_note::ValueNote; @@ -43,7 +43,7 @@ contract StaticChild { let header = context.get_header(); let owner_npk_m_hash = header.get_npk_m_hash(&mut context, owner); let mut note = ValueNote::new(new_value, owner_npk_m_hash); - storage.a_private_value.insert(&mut note).emit(encode_and_encrypt(&mut context, context.msg_sender(), owner)); + storage.a_private_value.insert(&mut note).emit(encode_and_encrypt_note(&mut context, context.msg_sender(), owner)); new_value } @@ -57,7 +57,7 @@ contract StaticChild { let header = context.get_header(); let owner_npk_m_hash = header.get_npk_m_hash(&mut context, owner); let mut note = ValueNote::new(new_value, owner_npk_m_hash); - storage.a_private_value.insert(&mut note).emit(encode_and_encrypt(&mut context, outgoing_viewer, owner)); + storage.a_private_value.insert(&mut note).emit(encode_and_encrypt_note(&mut context, outgoing_viewer, owner)); new_value } diff --git a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr index 0ec38297670..eb34674b095 100644 --- a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr @@ -7,7 +7,8 @@ contract Test { AztecAddress, EthAddress, FunctionSelector, NoteHeader, NoteGetterOptions, NoteViewerOptions, PrivateContext, PrivateImmutable, PrivateSet, SharedImmutable }; - use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt; + use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_note; + use dep::aztec::encrypted_logs::encrypted_event_emission::encode_and_encrypt_event_with_keys; use dep::aztec::protocol_types::{ abis::private_circuit_public_inputs::PrivateCircuitPublicInputs, @@ -41,7 +42,11 @@ contract Test { #[aztec(event)] struct ExampleEvent { - value: Field, + value0: Field, + value1: Field, + value2: Field, + value3: Field, + value4: Field, } #[aztec(storage)] @@ -97,7 +102,7 @@ contract Test { let owner_npk_m_hash = header.get_npk_m_hash(&mut context, owner); let mut note = ValueNote::new(value, owner_npk_m_hash); - create_note(&mut context, storage_slot, &mut note).emit(encode_and_encrypt(&mut context, outgoing_viewer, owner)); + create_note(&mut context, storage_slot, &mut note).emit(encode_and_encrypt_note(&mut context, outgoing_viewer, owner)); } #[aztec(private)] @@ -262,23 +267,34 @@ contract Test { let header = context.get_header(); let outgoing_viewer_ovpk_m = header.get_ovpk_m(&mut context, outgoing_viewer); let owner_ivpk_m = header.get_ivpk_m(&mut context, owner); - context.encrypt_and_emit_event( - 5, // testing only - this should be a secret random value to salt the addr - 1, - outgoing_viewer_ovpk_m, - owner_ivpk_m, - fields + + let event = ExampleEvent { value0: fields[0], value1: fields[1], value2: fields[2], value3: fields[3], value4: fields[4] }; + + event.emit( + encode_and_encrypt_event_with_keys( + &mut context, + // testing only - a secret random value is passed in here to salt / mask the address + 5, + outgoing_viewer_ovpk_m, + owner_ivpk_m + ) ); + // this contract has reached max number of functions, so using this one fn // to test nested and non nested encrypted logs if nest { Test::at(context.this_address()).emit_array_as_encrypted_log([0, 0, 0, 0, 0], owner, outgoing_viewer, false).call(&mut context); - context.encrypt_and_emit_event( - 0, // testing only - this signals to the kerels to not mask the address - 1, - outgoing_viewer_ovpk_m, - owner_ivpk_m, - [1, 2, 3, 4, 5] + + let otherEvent = ExampleEvent { value0: 1, value1: 2, value2: 3, value3: 4, value4: 5 }; + + otherEvent.emit( + encode_and_encrypt_event_with_keys( + &mut context, + // testing only - a randomness of 0 signals the kerels to not mask the address + 0, + outgoing_viewer_ovpk_m, + owner_ivpk_m + ) ); } } @@ -312,7 +328,7 @@ contract Test { let owner_npk_m_hash = header.get_npk_m_hash(&mut context, owner); let mut note = ValueNote::new(value + 1, owner_npk_m_hash); - create_note(&mut context, storage_slot, &mut note).emit(encode_and_encrypt(&mut context, context.msg_sender(), owner)); + create_note(&mut context, storage_slot, &mut note).emit(encode_and_encrypt_note(&mut context, context.msg_sender(), owner)); storage_slot += 1; Test::at(context.this_address()).call_create_note(value + 2, owner, outgoing_viewer, storage_slot).call(&mut context); } diff --git a/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr index 9412f8e19eb..f42cb2ffd7a 100644 --- a/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr @@ -1,12 +1,10 @@ contract TestLog { use dep::aztec::prelude::PrivateSet; - use dep::aztec::protocol_types::{ - traits::Serialize, grumpkin_point::GrumpkinPoint, grumpkin_private_key::GrumpkinPrivateKey, - abis::function_selector::FunctionSelector - }; + use dep::aztec::protocol_types::{traits::Serialize, grumpkin_point::GrumpkinPoint, grumpkin_private_key::GrumpkinPrivateKey}; use dep::value_note::value_note::ValueNote; use dep::aztec::encrypted_logs::incoming_body::EncryptedLogIncomingBody; use dep::aztec::event::event_interface::EventInterface; + use dep::aztec::encrypted_logs::encrypted_event_emission::{encode_and_encrypt_event, encode_and_encrypt_event_with_keys}; #[aztec(event)] struct ExampleEvent0 { @@ -14,56 +12,12 @@ contract TestLog { value1: Field, } - // This should be autogenerated by the macros - global EXAMPLE_EVENT_0_BYTES_LEN = 32 * 2 + 32 + 32; - - impl EventInterface for ExampleEvent0 { - fn _selector(self) -> FunctionSelector { - FunctionSelector::from_signature("TestEvent(Field,Field,Field)") - } - - fn to_be_bytes(self, randomness: Field) -> [u8; EXAMPLE_EVENT_0_BYTES_LEN] { - let mut buffer: [u8; EXAMPLE_EVENT_0_BYTES_LEN] = [0; EXAMPLE_EVENT_0_BYTES_LEN]; - - let randomness_bytes = randomness.to_be_bytes(32); - let event_type_id_bytes = self._selector().to_field().to_be_bytes(32); - - for i in 0..32 { - buffer[i] = randomness_bytes[i]; - buffer[32 + i] = event_type_id_bytes[i]; - } - - let serialized_event = self.serialize(); - - for i in 0..serialized_event.len() { - let bytes = serialized_event[i].to_be_bytes(32); - for j in 0..32 { - buffer[64 + i * 32 + j] = bytes[j]; - } - } - - buffer - } - } - #[aztec(event)] struct ExampleEvent1 { value2: Field, value3: Field, } - impl Serialize<2> for ExampleEvent0 { - fn serialize(self) -> [Field; 2] { - [self.value0, self.value1] - } - } - - impl Serialize<2> for ExampleEvent1 { - fn serialize(self) -> [Field; 2] { - [self.value2, self.value3] - } - } - #[aztec(storage)] struct Storage { example_set: PrivateSet, @@ -86,41 +40,28 @@ contract TestLog { ).compute_ciphertext(secret, point).as_array() } - #[aztec(private)] - fn emit_encrypted_log(randomness: Field, event_type_id: Field, preimage: [Field; 6]) { - let header = context.get_header(); - let msg_sender_ivpk_m = header.get_ivpk_m(&mut context, context.msg_sender()); - let msg_sender_ovpk_m = header.get_ovpk_m(&mut context, context.msg_sender()); - - context.encrypt_and_emit_event( - randomness, - event_type_id, - msg_sender_ovpk_m, - msg_sender_ivpk_m, - preimage - ); - } - #[aztec(private)] fn emit_encrypted_events(randomness: [Field; 2], preimages: [Field; 4]) { - let header = context.get_header(); - let msg_sender_ivpk_m = header.get_ivpk_m(&mut context, context.msg_sender()); - let msg_sender_ovpk_m = header.get_ovpk_m(&mut context, context.msg_sender()); - - context.encrypt_and_emit_event( - randomness[0], - ExampleEvent0::selector().to_field(), - msg_sender_ovpk_m, - msg_sender_ivpk_m, - ExampleEvent0 { value0: preimages[0], value1: preimages[1] }.serialize() + let event0 = ExampleEvent0 { value0: preimages[0], value1: preimages[1] }; + + event0.emit( + encode_and_encrypt_event( + &mut context, + randomness[0], + context.msg_sender(), + context.msg_sender() + ) ); - context.encrypt_and_emit_event( - randomness[1], - ExampleEvent1::selector().to_field(), - msg_sender_ovpk_m, - msg_sender_ivpk_m, - ExampleEvent1 { value2: preimages[2], value3: preimages[3] }.serialize() + let event1 = ExampleEvent1 { value2: preimages[2], value3: preimages[3] }; + + event1.emit( + encode_and_encrypt_event( + &mut context, + randomness[1], + context.msg_sender(), + context.msg_sender() + ) ); } } diff --git a/noir-projects/noir-contracts/contracts/token_blacklist_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_blacklist_contract/src/main.nr index defab65772e..114fa764515 100644 --- a/noir-projects/noir-contracts/contracts/token_blacklist_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_blacklist_contract/src/main.nr @@ -13,7 +13,7 @@ contract TokenBlacklist { use dep::aztec::{ hash::compute_secret_hash, prelude::{AztecAddress, FunctionSelector, Map, NoteGetterOptions, PrivateSet, PublicMutable, SharedMutable}, - encrypted_logs::encrypted_note_emission::encode_and_encrypt + encrypted_logs::encrypted_note_emission::encode_and_encrypt_note }; use dep::authwit::{auth::{assert_current_call_valid_authwit, assert_current_call_valid_authwit_public}}; @@ -179,7 +179,7 @@ contract TokenBlacklist { // Add the token note to user's balances set let caller = context.msg_sender(); - storage.balances.add(to, U128::from_integer(amount)).emit(encode_and_encrypt(&mut context, caller, to)); + storage.balances.add(to, U128::from_integer(amount)).emit(encode_and_encrypt_note(&mut context, caller, to)); } #[aztec(private)] @@ -195,7 +195,7 @@ contract TokenBlacklist { assert(nonce == 0, "invalid nonce"); } - storage.balances.sub(from, U128::from_integer(amount)).emit(encode_and_encrypt(&mut context, from, from)); + storage.balances.sub(from, U128::from_integer(amount)).emit(encode_and_encrypt_note(&mut context, from, from)); TokenBlacklist::at(context.this_address())._increase_public_balance(to, amount).enqueue(&mut context); } @@ -215,8 +215,8 @@ contract TokenBlacklist { } let amount = U128::from_integer(amount); - storage.balances.sub(from, amount).emit(encode_and_encrypt(&mut context, from, from)); - storage.balances.add(to, amount).emit(encode_and_encrypt(&mut context, from, to)); + storage.balances.sub(from, amount).emit(encode_and_encrypt_note(&mut context, from, from)); + storage.balances.add(to, amount).emit(encode_and_encrypt_note(&mut context, from, to)); } #[aztec(private)] @@ -230,7 +230,7 @@ contract TokenBlacklist { assert(nonce == 0, "invalid nonce"); } - storage.balances.sub(from, U128::from_integer(amount)).emit(encode_and_encrypt(&mut context, from, from)); + storage.balances.sub(from, U128::from_integer(amount)).emit(encode_and_encrypt_note(&mut context, from, from)); TokenBlacklist::at(context.this_address())._reduce_total_supply(amount).enqueue(&mut context); } diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr index b263abeff01..d13145c89e2 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr @@ -1,6 +1,7 @@ // docs:start:token_all // docs:start:imports mod types; +mod test; // Minimal token implementation that supports `AuthWit` accounts. // The auth message follows a similar pattern to the cross-chain message and includes a designated caller. @@ -17,11 +18,11 @@ contract Token { use dep::aztec::{ hash::compute_secret_hash, prelude::{NoteGetterOptions, Map, PublicMutable, SharedImmutable, PrivateSet, AztecAddress}, - encrypted_logs::encrypted_note_emission::{encode_and_encrypt, encode_and_encrypt_with_keys} + encrypted_logs::encrypted_note_emission::{encode_and_encrypt_note, encode_and_encrypt_note_with_keys} }; // docs:start:import_authwit - use dep::authwit::{auth::{assert_current_call_valid_authwit, assert_current_call_valid_authwit_public}}; + use dep::authwit::auth::{assert_current_call_valid_authwit, assert_current_call_valid_authwit_public, compute_authwit_nullifier}; // docs:end:import_authwit use crate::types::{transparent_note::TransparentNote, token_note::{TokenNote, TOKEN_NOTE_LEN}, balances_map::BalancesMap}; @@ -195,7 +196,7 @@ contract Token { #[aztec(private)] fn privately_mint_private_note(amount: Field) { let caller = context.msg_sender(); - storage.balances.add(caller, U128::from_integer(amount)).emit(encode_and_encrypt(&mut context, caller, caller)); + storage.balances.add(caller, U128::from_integer(amount)).emit(encode_and_encrypt_note(&mut context, caller, caller)); Token::at(context.this_address()).assert_minter_and_mint(context.msg_sender(), amount).enqueue(&mut context); } @@ -289,7 +290,7 @@ contract Token { // Note: Using context.msg_sender() as a sender below makes this incompatible with escrows because we send // outgoing logs to that address and to send outgoing logs you need to get a hold of ovsk_m. let from = context.msg_sender(); - storage.balances.add(to, U128::from_integer(amount)).emit(encode_and_encrypt(&mut context, from, to)); + storage.balances.add(to, U128::from_integer(amount)).emit(encode_and_encrypt_note(&mut context, from, to)); } // docs:end:redeem_shield @@ -302,7 +303,7 @@ contract Token { assert(nonce == 0, "invalid nonce"); } - storage.balances.sub(from, U128::from_integer(amount)).emit(encode_and_encrypt(&mut context, from, from)); + storage.balances.sub(from, U128::from_integer(amount)).emit(encode_and_encrypt_note(&mut context, from, from)); Token::at(context.this_address())._increase_public_balance(to, amount).enqueue(&mut context); } @@ -321,11 +322,22 @@ contract Token { let to_ivpk = header.get_ivpk_m(&mut context, to); let amount = U128::from_integer(amount); - storage.balances.sub(from, amount).emit(encode_and_encrypt_with_keys(&mut context, from_ovpk, from_ivpk)); - storage.balances.add(to, amount).emit(encode_and_encrypt_with_keys(&mut context, from_ovpk, to_ivpk)); + storage.balances.sub(from, amount).emit(encode_and_encrypt_note_with_keys(&mut context, from_ovpk, from_ivpk)); + storage.balances.add(to, amount).emit(encode_and_encrypt_note_with_keys(&mut context, from_ovpk, to_ivpk)); } // docs:end:transfer + /** + * Cancel a private authentication witness. + * @param inner_hash The inner hash of the authwit to cancel. + */ + #[aztec(private)] + fn cancel_authwit(inner_hash: Field) { + let on_behalf_of = context.msg_sender(); + let nullifier = compute_authwit_nullifier(on_behalf_of, inner_hash); + context.push_new_nullifier(nullifier, 0); + } + #[aztec(private)] fn transfer_from(from: AztecAddress, to: AztecAddress, amount: Field, nonce: Field) { // docs:start:assert_current_call_valid_authwit @@ -346,10 +358,10 @@ contract Token { let amount = U128::from_integer(amount); // docs:start:increase_private_balance // docs:start:encrypted - storage.balances.sub(from, amount).emit(encode_and_encrypt_with_keys(&mut context, from_ovpk, from_ivpk)); + storage.balances.sub(from, amount).emit(encode_and_encrypt_note_with_keys(&mut context, from_ovpk, from_ivpk)); // docs:end:encrypted // docs:end:increase_private_balance - storage.balances.add(to, amount).emit(encode_and_encrypt_with_keys(&mut context, from_ovpk, to_ivpk)); + storage.balances.add(to, amount).emit(encode_and_encrypt_note_with_keys(&mut context, from_ovpk, to_ivpk)); } // docs:start:burn @@ -361,7 +373,7 @@ contract Token { assert(nonce == 0, "invalid nonce"); } - storage.balances.sub(from, U128::from_integer(amount)).emit(encode_and_encrypt(&mut context, from, from)); + storage.balances.sub(from, U128::from_integer(amount)).emit(encode_and_encrypt_note(&mut context, from, from)); Token::at(context.this_address())._reduce_total_supply(amount).enqueue(&mut context); } @@ -395,75 +407,5 @@ contract Token { storage.balances.balance_of(owner).to_field() } // docs:end:balance_of_private - - use dep::aztec::test::{helpers::{cheatcodes, test_environment::TestEnvironment}}; - use dep::aztec::protocol_types::storage::map::derive_storage_slot_in_map; - use dep::aztec::note::note_getter::{MAX_NOTES_PER_PAGE, view_notes}; - use dep::aztec::note::note_viewer_options::NoteViewerOptions; - - #[test] - fn test_private_transfer() { - // Setup env, generate keys - let mut env = TestEnvironment::new(); - let owner = env.create_account(); - let recipient = env.create_account(); - let mint_amount = 10000; - - // Start the test in the account contract address - cheatcodes::set_contract_address(owner); - - // Deploy token contract - let initializer_call_interface = Token::interface().constructor( - owner, - "TestToken0000000000000000000000", - "TT00000000000000000000000000000", - 18 - ); - let token_contract_address = env.deploy("@aztec/noir-contracts.js/Token").with_public_initializer(initializer_call_interface); - env.advance_block_by(1); - - // Mint some tokens - let secret = 1; - let secret_hash = compute_secret_hash(secret); - let mint_private_call_interface = Token::at(token_contract_address).mint_private(mint_amount, secret_hash); - env.call_public(mint_private_call_interface); - - // Time travel so we can read keys from the registry - env.advance_block_by(6); - - // Store a note in the cache so we can redeem it - env.store_note_in_cache( - &mut TransparentNote::new(mint_amount, secret_hash), - Token::storage().pending_shields.slot, - token_contract_address - ); - - // Redeem our shielded tokens - let redeem_shield_call_interface = Token::at(token_contract_address).redeem_shield(owner, mint_amount, secret); - env.call_private_void(redeem_shield_call_interface); - - // Not really sure why this is needed? Nullifier inclusion in contract initializer fails otherwise. - // If it were to fail, it should do it at line 443, investigation required - env.advance_block_by(1); - - // Transfer tokens - let transfer_amount = 1000; - let private_token_transfer_call_interface = Token::at(token_contract_address).transfer(recipient, transfer_amount); - env.call_private_void(private_token_transfer_call_interface); - - // Check balances - cheatcodes::set_contract_address(token_contract_address); - - let balances_slot = Token::storage().balances.slot; - let recipient_slot = derive_storage_slot_in_map(balances_slot, recipient); - let mut options = NoteViewerOptions::new(); - let notes: BoundedVec = view_notes(recipient_slot, options); - assert(notes.get(0).amount.to_field() == transfer_amount); - - let owner_slot = derive_storage_slot_in_map(balances_slot, owner); - let mut options = NoteViewerOptions::new(); - let notes: BoundedVec = view_notes(owner_slot, options); - assert(notes.get(0).amount.to_field() == mint_amount - transfer_amount); - } } // docs:end:token_all \ No newline at end of file diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test.nr new file mode 100644 index 00000000000..cf797ce3bcc --- /dev/null +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test.nr @@ -0,0 +1,9 @@ +mod access_control; +mod burn; +mod utils; +mod transfer_public; +mod transfer_private; +mod unshielding; +mod minting; +mod reading_constants; +mod shielding; diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/access_control.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/access_control.nr new file mode 100644 index 00000000000..37a84e09a7b --- /dev/null +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/access_control.nr @@ -0,0 +1,52 @@ +use crate::test::utils; +use dep::aztec::test::helpers::cheatcodes; +use crate::Token; + +#[test] +unconstrained fn access_control() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, recipient) = utils::setup(/* with_account_contracts */ false); + + // Set a new admin + let set_admin_call_interface = Token::at(token_contract_address).set_admin(recipient); + env.call_public(set_admin_call_interface); + + // Check it worked + let get_admin_call_interface = Token::at(token_contract_address).admin(); + let admin = env.call_public(get_admin_call_interface); + assert(admin == recipient.to_field()); + + // Impersonate new admin + cheatcodes::set_contract_address(recipient); + + // Check new admin is not a minter + let is_minter_call_interface = Token::at(token_contract_address).is_minter(recipient); + let is_minter = env.call_public(is_minter_call_interface); + assert(is_minter == false); + // Set admin as minter + let set_minter_call_interface = Token::at(token_contract_address).set_minter(recipient, true); + env.call_public(set_minter_call_interface); + + // Check it worked + let is_minter = env.call_public(is_minter_call_interface); + assert(is_minter == true); + + // Revoke minter as admin + let set_minter_call_interface = Token::at(token_contract_address).set_minter(recipient, false); + env.call_public(set_minter_call_interface); + + // Check it worked + let is_minter = env.call_public(is_minter_call_interface); + assert(is_minter == false); + + // Impersonate original admin + cheatcodes::set_contract_address(owner); + + // Try to set ourselves as admin, fail miserably + let set_admin_call_interface = Token::at(token_contract_address).set_admin(recipient); + env.assert_public_call_fails(set_admin_call_interface); + + // Try to revoke minter status to recipient, fail miserably + let set_minter_call_interface = Token::at(token_contract_address).set_minter(recipient, false); + env.assert_public_call_fails(set_minter_call_interface); +} diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/burn.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/burn.nr new file mode 100644 index 00000000000..af0e6cb3c31 --- /dev/null +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/burn.nr @@ -0,0 +1,179 @@ +use crate::test::utils; +use dep::aztec::{test::helpers::cheatcodes, oracle::unsafe_rand::unsafe_rand}; +use dep::authwit::cheatcodes as authwit_cheatcodes; +use crate::Token; + +#[test] +unconstrained fn burn_public_success() { + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); + let burn_amount = mint_amount / 10; + + // Burn less than balance + let burn_call_interface = Token::at(token_contract_address).burn_public(owner, burn_amount, 0); + env.call_public(burn_call_interface); + utils::check_public_balance(token_contract_address, owner, mint_amount - burn_amount); +} + +#[test] +unconstrained fn burn_public_on_behalf_of_other() { + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + let burn_amount = mint_amount / 10; + + // Burn on behalf of other + let burn_call_interface = Token::at(token_contract_address).burn_public(owner, burn_amount, unsafe_rand()); + authwit_cheatcodes::add_public_authwit_from_call_interface(owner, recipient, burn_call_interface); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + // Burn tokens + env.call_public(burn_call_interface); + utils::check_public_balance(token_contract_address, owner, mint_amount - burn_amount); +} + +#[test] +unconstrained fn burn_public_failure_more_than_balance() { + let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); + + // Burn more than balance + let burn_amount = mint_amount * 10; + let burn_call_interface = Token::at(token_contract_address).burn_public(owner, burn_amount, 0); + env.assert_public_call_fails(burn_call_interface); + utils::check_public_balance(token_contract_address, owner, mint_amount); +} + +#[test] +unconstrained fn burn_public_failure_on_behalf_of_self_non_zero_nonce() { + let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); + + // Burn on behalf of self with non-zero nonce + let burn_amount = mint_amount / 10; + let burn_call_interface = Token::at(token_contract_address).burn_public(owner, burn_amount, unsafe_rand()); + env.assert_public_call_fails(burn_call_interface); + utils::check_public_balance(token_contract_address, owner, mint_amount); +} + +#[test] +unconstrained fn burn_public_failure_on_behalf_of_other_without_approval() { + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + + // Burn on behalf of other without approval + let burn_amount = mint_amount / 10; + let burn_call_interface = Token::at(token_contract_address).burn_public(owner, burn_amount, unsafe_rand()); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + env.assert_public_call_fails(burn_call_interface); + utils::check_public_balance(token_contract_address, owner, mint_amount); + + // Burn on behalf of other, wrong designated caller + let burn_call_interface = Token::at(token_contract_address).burn_public(owner, burn_amount, unsafe_rand()); + authwit_cheatcodes::add_public_authwit_from_call_interface(owner, owner, burn_call_interface); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + env.assert_public_call_fails(burn_call_interface); + utils::check_public_balance(token_contract_address, owner, mint_amount); +} + +#[test] +unconstrained fn burn_public_failure_on_behalf_of_other_wrong_caller() { + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + + // Burn on behalf of other, wrong designated caller + let burn_amount = mint_amount / 10; + let burn_call_interface = Token::at(token_contract_address).burn_public(owner, burn_amount, unsafe_rand()); + authwit_cheatcodes::add_public_authwit_from_call_interface(owner, owner, burn_call_interface); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + env.assert_public_call_fails(burn_call_interface); + utils::check_public_balance(token_contract_address, owner, mint_amount); +} + +#[test] +unconstrained fn burn_private_on_behalf_of_self() { + let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); + let burn_amount = mint_amount / 10; + + // Burn less than balance + let burn_call_interface = Token::at(token_contract_address).burn(owner, burn_amount, 0); + env.call_private_void(burn_call_interface); + utils::check_private_balance(token_contract_address, owner, mint_amount - burn_amount); +} + +#[test] +unconstrained fn burn_private_on_behalf_of_other() { + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + let burn_amount = mint_amount / 10; + + // Burn on behalf of other + let burn_call_interface = Token::at(token_contract_address).burn(owner, burn_amount, unsafe_rand()); + authwit_cheatcodes::add_private_authwit_from_call_interface(owner, recipient, burn_call_interface); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + // Burn tokens + env.call_private_void(burn_call_interface); + utils::check_private_balance(token_contract_address, owner, mint_amount - burn_amount); +} + +#[test(should_fail_with="Balance too low")] +unconstrained fn burn_private_failure_more_than_balance() { + let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); + + // Burn more than balance + let burn_amount = mint_amount * 10; + let burn_call_interface = Token::at(token_contract_address).burn(owner, burn_amount, 0); + env.call_private_void(burn_call_interface); + // Private doesnt revert, so we cannot check balances here since notes have already been nullified. Test is done. +} + +#[test(should_fail_with="invalid nonce")] +unconstrained fn burn_private_failure_on_behalf_of_self_non_zero_nonce() { + let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); + + // Burn more than balance + let burn_amount = mint_amount / 10; + let burn_call_interface = Token::at(token_contract_address).burn(owner, burn_amount, unsafe_rand()); + env.call_private_void(burn_call_interface); + // Private doesnt revert, so we cannot check balances here since notes have already been nullified. Test is done. +} + +#[test(should_fail)] +unconstrained fn burn_private_failure_on_behalf_of_other_more_than_balance() { + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + + // Burn more than balance + let burn_amount = mint_amount * 10; + // Burn on behalf of other + let burn_call_interface = Token::at(token_contract_address).burn(owner, burn_amount, unsafe_rand()); + authwit_cheatcodes::add_private_authwit_from_call_interface(owner, recipient, burn_call_interface); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + env.call_private_void(burn_call_interface); + // Private doesnt revert, so we cannot check balances here since notes have already been nullified. Test is done. +} + +#[test(should_fail)] +unconstrained fn burn_private_failure_on_behalf_of_other_without_approval() { + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + + // Burn more than balance + let burn_amount = mint_amount / 10; + // Burn on behalf of other + let burn_call_interface = Token::at(token_contract_address).burn(owner, burn_amount, unsafe_rand()); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + env.call_private_void(burn_call_interface); + // Private doesnt revert, so we cannot check balances here since notes have already been nullified. Test is done. +} + +#[test(should_fail)] +unconstrained fn burn_private_failure_on_behalf_of_other_wrong_designated_caller() { + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + + // Burn more than balance + let burn_amount = mint_amount / 10; + // Burn on behalf of other + let burn_call_interface = Token::at(token_contract_address).burn(owner, burn_amount, unsafe_rand()); + authwit_cheatcodes::add_private_authwit_from_call_interface(owner, owner, burn_call_interface); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + env.call_private_void(burn_call_interface); + // Private doesnt revert, so we cannot check balances here since notes have already been nullified. Test is done. +} diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/minting.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/minting.nr new file mode 100644 index 00000000000..4e92489a59a --- /dev/null +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/minting.nr @@ -0,0 +1,239 @@ +use crate::test::utils; +use dep::aztec::{test::helpers::cheatcodes, oracle::unsafe_rand::unsafe_rand, hash::compute_secret_hash}; +use crate::{types::transparent_note::TransparentNote, Token}; + +#[test] +unconstrained fn mint_public_success() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, _) = utils::setup(/* with_account_contracts */ false); + + let mint_amount = 10000; + let mint_public_call_interface = Token::at(token_contract_address).mint_public(owner, mint_amount); + env.call_public(mint_public_call_interface); + + utils::check_public_balance(token_contract_address, owner, mint_amount); + + let total_supply_call_interface = Token::at(token_contract_address).total_supply(); + let total_supply = env.call_public(total_supply_call_interface); + + assert(total_supply == mint_amount); +} + +#[test] +unconstrained fn mint_public_failures() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, recipient) = utils::setup(/* with_account_contracts */ false); + + // As non-minter + let mint_amount = 10000; + cheatcodes::set_contract_address(recipient); + let mint_public_call_interface = Token::at(token_contract_address).mint_public(owner, mint_amount); + env.assert_public_call_fails(mint_public_call_interface); + + utils::check_public_balance(token_contract_address, owner, 0); + + cheatcodes::set_contract_address(owner); + + // Overflow recipient + + let mint_amount = 2.pow_32(128); + let mint_public_call_interface = Token::at(token_contract_address).mint_public(owner, mint_amount); + env.assert_public_call_fails(mint_public_call_interface); + + utils::check_public_balance(token_contract_address, owner, 0); + + // Overflow total supply + + let mint_for_recipient_amount = 1000; + + let mint_public_call_interface = Token::at(token_contract_address).mint_public(recipient, mint_for_recipient_amount); + env.call_public(mint_public_call_interface); + + let mint_amount = 2.pow_32(128) - mint_for_recipient_amount; + let mint_public_call_interface = Token::at(token_contract_address).mint_public(owner, mint_amount); + env.assert_public_call_fails(mint_public_call_interface); + + utils::check_public_balance(token_contract_address, recipient, mint_for_recipient_amount); + utils::check_public_balance(token_contract_address, owner, 0); +} + +#[test] +unconstrained fn mint_private_success() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, _) = utils::setup(/* with_account_contracts */ false); + let mint_amount = 10000; + // Mint some tokens + let secret = unsafe_rand(); + let secret_hash = compute_secret_hash(secret); + let mint_private_call_interface = Token::at(token_contract_address).mint_private(mint_amount, secret_hash); + env.call_public(mint_private_call_interface); + + let mint_public_call_interface = Token::at(token_contract_address).mint_public(owner, mint_amount); + env.call_public(mint_public_call_interface); + + // Time travel so we can read keys from the registry + env.advance_block_by(6); + + // Store a note in the cache so we can redeem it + env.store_note_in_cache( + &mut TransparentNote::new(mint_amount, secret_hash), + Token::storage().pending_shields.slot, + token_contract_address + ); + + // Redeem our shielded tokens + let redeem_shield_call_interface = Token::at(token_contract_address).redeem_shield(owner, mint_amount, secret); + env.call_private_void(redeem_shield_call_interface); + + utils::check_private_balance(token_contract_address, owner, mint_amount); +} + +#[test(should_fail_with="Cannot return zero notes")] +unconstrained fn mint_private_failure_double_spend() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, recipient) = utils::setup(/* with_account_contracts */ false); + let mint_amount = 10000; + // Mint some tokens + let secret = unsafe_rand(); + let secret_hash = compute_secret_hash(secret); + let mint_private_call_interface = Token::at(token_contract_address).mint_private(mint_amount, secret_hash); + env.call_public(mint_private_call_interface); + + let mint_public_call_interface = Token::at(token_contract_address).mint_public(owner, mint_amount); + env.call_public(mint_public_call_interface); + + // Time travel so we can read keys from the registry + env.advance_block_by(6); + + // Store a note in the cache so we can redeem it + env.store_note_in_cache( + &mut TransparentNote::new(mint_amount, secret_hash), + Token::storage().pending_shields.slot, + token_contract_address + ); + + // Redeem our shielded tokens + let redeem_shield_call_interface = Token::at(token_contract_address).redeem_shield(owner, mint_amount, secret); + env.call_private_void(redeem_shield_call_interface); + + utils::check_private_balance(token_contract_address, owner, mint_amount); + + // Attempt to double spend + let redeem_shield_call_interface = Token::at(token_contract_address).redeem_shield(recipient, mint_amount, secret); + env.call_private_void(redeem_shield_call_interface); +} + +#[test(should_fail_with="caller is not minter")] +unconstrained fn mint_private_failure_non_minter() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, _, recipient) = utils::setup(/* with_account_contracts */ false); + let mint_amount = 10000; + // Try to mint some tokens impersonating recipient + cheatcodes::set_contract_address(recipient); + + let secret = unsafe_rand(); + let secret_hash = compute_secret_hash(secret); + let mint_private_call_interface = Token::at(token_contract_address).mint_private(mint_amount, secret_hash); + env.call_public(mint_private_call_interface); +} + +#[test(should_fail_with="call to assert_max_bit_size")] +unconstrained fn mint_private_failure_overflow() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, _, _) = utils::setup(/* with_account_contracts */ false); + + // Overflow recipient + let mint_amount = 2.pow_32(128); + let secret = unsafe_rand(); + let secret_hash = compute_secret_hash(secret); + let mint_private_call_interface = Token::at(token_contract_address).mint_private(mint_amount, secret_hash); + env.call_public(mint_private_call_interface); +} + +#[test(should_fail_with="attempt to add with overflow")] +unconstrained fn mint_private_failure_overflow_recipient() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, _) = utils::setup(/* with_account_contracts */ false); + let mint_amount = 10000; + // Mint some tokens + let secret = unsafe_rand(); + let secret_hash = compute_secret_hash(secret); + let mint_private_call_interface = Token::at(token_contract_address).mint_private(mint_amount, secret_hash); + env.call_public(mint_private_call_interface); + + // Time travel so we can read keys from the registry + env.advance_block_by(6); + + // Store a note in the cache so we can redeem it + env.store_note_in_cache( + &mut TransparentNote::new(mint_amount, secret_hash), + Token::storage().pending_shields.slot, + token_contract_address + ); + + // Redeem our shielded tokens + let redeem_shield_call_interface = Token::at(token_contract_address).redeem_shield(owner, mint_amount, secret); + env.call_private_void(redeem_shield_call_interface); + + utils::check_private_balance(token_contract_address, owner, mint_amount); + + let mint_amount = 2.pow_32(128) - mint_amount; + // Mint some tokens + let secret = unsafe_rand(); + let secret_hash = compute_secret_hash(secret); + let mint_private_call_interface = Token::at(token_contract_address).mint_private(mint_amount, secret_hash); + env.call_public(mint_private_call_interface); +} + +#[test(should_fail_with="attempt to add with overflow")] +unconstrained fn mint_private_failure_overflow_total_supply() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, recipient) = utils::setup(/* with_account_contracts */ false); + let mint_amount = 10000; + // Mint some tokens + let secret_owner = unsafe_rand(); + let secret_recipient = unsafe_rand(); + let secret_hash_owner = compute_secret_hash(secret_owner); + let secret_hash_recipient = compute_secret_hash(secret_recipient); + + let mint_private_call_interface = Token::at(token_contract_address).mint_private(mint_amount, secret_hash_owner); + env.call_public(mint_private_call_interface); + let mint_private_call_interface = Token::at(token_contract_address).mint_private(mint_amount, secret_hash_recipient); + env.call_public(mint_private_call_interface); + + // Time travel so we can read keys from the registry + env.advance_block_by(6); + + // Store 2 notes in the cache so we can redeem it for owner and recipient + env.store_note_in_cache( + &mut TransparentNote::new(mint_amount, secret_hash_owner), + Token::storage().pending_shields.slot, + token_contract_address + ); + env.store_note_in_cache( + &mut TransparentNote::new(mint_amount, secret_hash_recipient), + Token::storage().pending_shields.slot, + token_contract_address + ); + + // Redeem owner's shielded tokens + cheatcodes::set_contract_address(owner); + let redeem_shield_call_interface = Token::at(token_contract_address).redeem_shield(owner, mint_amount, secret_owner); + env.call_private_void(redeem_shield_call_interface); + + // Redeem recipient's shielded tokens + cheatcodes::set_contract_address(recipient); + let redeem_shield_call_interface = Token::at(token_contract_address).redeem_shield(recipient, mint_amount, secret_recipient); + env.call_private_void(redeem_shield_call_interface); + + utils::check_private_balance(token_contract_address, owner, mint_amount); + utils::check_private_balance(token_contract_address, recipient, mint_amount); + + cheatcodes::set_contract_address(owner); + let mint_amount = 2.pow_32(128) - 2 * mint_amount; + // Try to mint some tokens + let secret = unsafe_rand(); + let secret_hash = compute_secret_hash(secret); + let mint_private_call_interface = Token::at(token_contract_address).mint_private(mint_amount, secret_hash); + env.call_public(mint_private_call_interface); +} diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/reading_constants.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/reading_constants.nr new file mode 100644 index 00000000000..469ff747590 --- /dev/null +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/reading_constants.nr @@ -0,0 +1,29 @@ +use crate::test::utils; +use dep::aztec::test::helpers::cheatcodes; +use crate::Token; + +// It is not possible to deserialize strings in Noir ATM, so name and symbol cannot be checked yet. + +#[test] +unconstrained fn check_decimals_private() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, _, _) = utils::setup(/* with_account_contracts */ false); + + // Check decimals + let private_get_decimals_call_interface = Token::at(token_contract_address).private_get_decimals(); + let result = env.call_private(private_get_decimals_call_interface); + + assert(result == 18); +} + +#[test] +unconstrained fn check_decimals_public() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, _, _) = utils::setup(/* with_account_contracts */ false); + + // Check decimals + let public_get_decimals_call_interface = Token::at(token_contract_address).public_get_decimals(); + let result = env.call_public(public_get_decimals_call_interface); + + assert(result == 18 as u8); +} diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/shielding.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/shielding.nr new file mode 100644 index 00000000000..66280304481 --- /dev/null +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/shielding.nr @@ -0,0 +1,156 @@ +use crate::test::utils; +use dep::aztec::{test::helpers::cheatcodes, oracle::unsafe_rand::unsafe_rand, hash::compute_secret_hash}; +use dep::authwit::cheatcodes as authwit_cheatcodes; +use crate::{types::transparent_note::TransparentNote, Token}; + +#[test] +unconstrained fn shielding_on_behalf_of_self() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); + let secret = unsafe_rand(); + let secret_hash = compute_secret_hash(secret); + // Shield tokens + let shield_amount = mint_amount / 10; + let shield_call_interface = Token::at(token_contract_address).shield(owner, shield_amount, secret_hash, 0); + env.call_public(shield_call_interface); + + // Store a note in the cache so we can redeem it + env.store_note_in_cache( + &mut TransparentNote::new(shield_amount, secret_hash), + Token::storage().pending_shields.slot, + token_contract_address + ); + + // Redeem our shielded tokens + let redeem_shield_call_interface = Token::at(token_contract_address).redeem_shield(owner, shield_amount, secret); + env.call_private_void(redeem_shield_call_interface); + + // Check balances + utils::check_public_balance(token_contract_address, owner, mint_amount - shield_amount); + utils::check_private_balance(token_contract_address, owner, mint_amount + shield_amount); +} + +#[test] +unconstrained fn shielding_on_behalf_of_other() { + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + let secret = unsafe_rand(); + let secret_hash = compute_secret_hash(secret); + + // Shield tokens on behalf of owner + let shield_amount = 1000; + let shield_call_interface = Token::at(token_contract_address).shield(owner, shield_amount, secret_hash, 0); + authwit_cheatcodes::add_public_authwit_from_call_interface(owner, recipient, shield_call_interface); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + // Shield tokens + env.call_public(shield_call_interface); + + // Become owner again + cheatcodes::set_contract_address(owner); + // Store a note in the cache so we can redeem it + env.store_note_in_cache( + &mut TransparentNote::new(shield_amount, secret_hash), + Token::storage().pending_shields.slot, + token_contract_address + ); + + // Redeem our shielded tokens + let redeem_shield_call_interface = Token::at(token_contract_address).redeem_shield(owner, shield_amount, secret); + env.call_private_void(redeem_shield_call_interface); + + // Check balances + utils::check_public_balance(token_contract_address, owner, mint_amount - shield_amount); + utils::check_private_balance(token_contract_address, owner, mint_amount + shield_amount); +} + +#[test] +unconstrained fn shielding_failure_on_behalf_of_self_more_than_balance() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + let secret = unsafe_rand(); + let secret_hash = compute_secret_hash(secret); + // Shield tokens + let shield_amount = mint_amount + 1; + let shield_call_interface = Token::at(token_contract_address).shield(owner, shield_amount, secret_hash, 0); + env.assert_public_call_fails(shield_call_interface); + + // Check balances + utils::check_public_balance(token_contract_address, owner, mint_amount); + utils::check_private_balance(token_contract_address, owner, mint_amount); +} + +#[test] +unconstrained fn shielding_failure_on_behalf_of_self_invalid_nonce() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + let secret = unsafe_rand(); + let secret_hash = compute_secret_hash(secret); + // Shield tokens + let shield_amount = mint_amount / 10; + let shield_call_interface = Token::at(token_contract_address).shield(owner, shield_amount, secret_hash, unsafe_rand()); + env.assert_public_call_fails(shield_call_interface); + + // Check balances + utils::check_public_balance(token_contract_address, owner, mint_amount); + utils::check_private_balance(token_contract_address, owner, mint_amount); +} + +#[test] +unconstrained fn shielding_failure_on_behalf_of_other_more_than_balance() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + let secret = unsafe_rand(); + let secret_hash = compute_secret_hash(secret); + // Shield tokens on behalf of owner + let shield_amount = mint_amount + 1; + let shield_call_interface = Token::at(token_contract_address).shield(owner, shield_amount, secret_hash, 0); + authwit_cheatcodes::add_public_authwit_from_call_interface(owner, recipient, shield_call_interface); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + // Shield tokens + env.assert_public_call_fails(shield_call_interface); + + // Check balances + utils::check_public_balance(token_contract_address, owner, mint_amount); + utils::check_private_balance(token_contract_address, owner, mint_amount); +} + +#[test] +unconstrained fn shielding_failure_on_behalf_of_other_wrong_caller() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + let secret = unsafe_rand(); + let secret_hash = compute_secret_hash(secret); + // Shield tokens on behalf of owner + let shield_amount = mint_amount + 1; + let shield_call_interface = Token::at(token_contract_address).shield(owner, shield_amount, secret_hash, 0); + authwit_cheatcodes::add_public_authwit_from_call_interface(owner, owner, shield_call_interface); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + // Shield tokens + env.assert_public_call_fails(shield_call_interface); + + // Check balances + utils::check_public_balance(token_contract_address, owner, mint_amount); + utils::check_private_balance(token_contract_address, owner, mint_amount); +} + +#[test] +unconstrained fn shielding_failure_on_behalf_of_other_without_approval() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + let secret = unsafe_rand(); + let secret_hash = compute_secret_hash(secret); + // Shield tokens on behalf of owner + let shield_amount = mint_amount + 1; + let shield_call_interface = Token::at(token_contract_address).shield(owner, shield_amount, secret_hash, 0); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + // Shield tokens + env.assert_public_call_fails(shield_call_interface); + + // Check balances + utils::check_public_balance(token_contract_address, owner, mint_amount); + utils::check_private_balance(token_contract_address, owner, mint_amount); +} + diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_private.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_private.nr new file mode 100644 index 00000000000..47e04809114 --- /dev/null +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_private.nr @@ -0,0 +1,131 @@ +use crate::test::utils; +use dep::aztec::{test::helpers::cheatcodes, oracle::unsafe_rand::unsafe_rand, protocol_types::address::AztecAddress}; +use dep::authwit::cheatcodes as authwit_cheatcodes; +use crate::Token; + +#[test] +unconstrained fn transfer_private() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); + // Transfer tokens + let transfer_amount = 1000; + let transfer_private_call_interface = Token::at(token_contract_address).transfer(recipient, transfer_amount); + env.call_private_void(transfer_private_call_interface); + + // Check balances + utils::check_private_balance(token_contract_address, owner, mint_amount - transfer_amount); + utils::check_private_balance(token_contract_address, recipient, transfer_amount); +} + +#[test] +unconstrained fn transfer_private_to_self() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); + // Transfer tokens + let transfer_amount = 1000; + let transfer_private_call_interface = Token::at(token_contract_address).transfer(owner, transfer_amount); + env.call_private_void(transfer_private_call_interface); + + // Check balances + utils::check_private_balance(token_contract_address, owner, mint_amount); +} + +#[test] +unconstrained fn transfer_private_to_non_deployed_account() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); + let not_deployed = cheatcodes::create_account(); + // Transfer tokens + let transfer_amount = 1000; + let transfer_private_call_interface = Token::at(token_contract_address).transfer(not_deployed.address, transfer_amount); + env.call_private_void(transfer_private_call_interface); + + // Check balances + utils::check_private_balance(token_contract_address, owner, mint_amount - transfer_amount); + utils::check_private_balance(token_contract_address, not_deployed.address, transfer_amount); +} + +#[test] +unconstrained fn transfer_private_on_behalf_of_other() { + // Setup with account contracts. Slower since we actually deploy them, but needed for authwits. + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + // Add authwit + let transfer_amount = 1000; + let transfer_private_from_call_interface = Token::at(token_contract_address).transfer_from(owner, recipient, transfer_amount, 1); + authwit_cheatcodes::add_private_authwit_from_call_interface(owner, recipient, transfer_private_from_call_interface); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + // Transfer tokens + env.call_private_void(transfer_private_from_call_interface); + // Check balances + utils::check_private_balance(token_contract_address, owner, mint_amount - transfer_amount); + utils::check_private_balance(token_contract_address, recipient, transfer_amount); +} + +#[test(should_fail_with="Balance too low")] +unconstrained fn transfer_private_failure_more_than_balance() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, _, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); + // Transfer tokens + let transfer_amount = mint_amount + 1; + let transfer_private_call_interface = Token::at(token_contract_address).transfer(recipient, transfer_amount); + env.call_private_void(transfer_private_call_interface); +} + +#[test(should_fail_with="invalid nonce")] +unconstrained fn transfer_private_failure_on_behalf_of_self_non_zero_nonce() { + // Setup with account contracts. Slower since we actually deploy them, but needed for authwits. + let (env, token_contract_address, owner, recipient, _) = utils::setup_and_mint(/* with_account_contracts */ true); + // Add authwit + let transfer_amount = 1000; + let transfer_private_from_call_interface = Token::at(token_contract_address).transfer_from(owner, recipient, transfer_amount, 1); + // Transfer tokens + env.call_private_void(transfer_private_from_call_interface); +} + +#[test(should_fail_with="Balance too low")] +unconstrained fn transfer_private_failure_on_behalf_of_more_than_balance() { + // Setup with account contracts. Slower since we actually deploy them, but needed for authwits. + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + // Add authwit + let transfer_amount = mint_amount + 1; + let transfer_private_from_call_interface = Token::at(token_contract_address).transfer_from(owner, recipient, transfer_amount, 1); + authwit_cheatcodes::add_private_authwit_from_call_interface(owner, recipient, transfer_private_from_call_interface); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + // Transfer tokens + env.call_private_void(transfer_private_from_call_interface); +} + +#[test(should_fail)] +unconstrained fn transfer_private_failure_on_behalf_of_other_without_approval() { + // Setup with account contracts. Slower since we actually deploy them, but needed for authwits. + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + // Add authwit + let transfer_amount = 1000; + let transfer_private_from_call_interface = Token::at(token_contract_address).transfer_from(owner, recipient, transfer_amount, 1); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + // Transfer tokens + env.call_private_void(transfer_private_from_call_interface); + // Check balances + utils::check_private_balance(token_contract_address, owner, mint_amount - transfer_amount); + utils::check_private_balance(token_contract_address, recipient, transfer_amount); +} + +#[test(should_fail)] +unconstrained fn transfer_private_failure_on_behalf_of_other_wrong_caller() { + // Setup with account contracts. Slower since we actually deploy them, but needed for authwits. + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + // Add authwit + let transfer_amount = 1000; + let transfer_private_from_call_interface = Token::at(token_contract_address).transfer_from(owner, recipient, transfer_amount, 1); + authwit_cheatcodes::add_private_authwit_from_call_interface(owner, owner, transfer_private_from_call_interface); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + // Transfer tokens + env.call_private_void(transfer_private_from_call_interface); + // Check balances + utils::check_private_balance(token_contract_address, owner, mint_amount - transfer_amount); + utils::check_private_balance(token_contract_address, recipient, transfer_amount); +} diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_public.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_public.nr new file mode 100644 index 00000000000..ae0b631ce37 --- /dev/null +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_public.nr @@ -0,0 +1,122 @@ +use crate::test::utils; +use dep::aztec::{test::helpers::cheatcodes, oracle::unsafe_rand::unsafe_rand}; +use dep::authwit::cheatcodes as authwit_cheatcodes; +use crate::Token; + +#[test] +unconstrained fn public_transfer() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); + // Transfer tokens + let transfer_amount = mint_amount / 10; + let public_transfer_call_interface = Token::at(token_contract_address).transfer_public(owner, recipient, transfer_amount, 0); + env.call_public(public_transfer_call_interface); + + // Check balances + utils::check_public_balance(token_contract_address, owner, mint_amount - transfer_amount); + utils::check_public_balance(token_contract_address, recipient, transfer_amount); +} + +#[test] +unconstrained fn public_transfer_to_self() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); + // Transfer tokens + let transfer_amount = mint_amount / 10; + let public_transfer_call_interface = Token::at(token_contract_address).transfer_public(owner, owner, transfer_amount, 0); + env.call_public(public_transfer_call_interface); + + // Check balances + utils::check_public_balance(token_contract_address, owner, mint_amount); +} + +#[test] +unconstrained fn public_transfer_on_behalf_of_other() { + // Setup with account contracts. Slower since we actually deploy them, but needed for authwits. + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + let transfer_amount = mint_amount / 10; + let public_transfer_from_call_interface = Token::at(token_contract_address).transfer_public(owner, recipient, transfer_amount, 1); + authwit_cheatcodes::add_public_authwit_from_call_interface(owner, recipient, public_transfer_from_call_interface); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + // Transfer tokens + env.call_public(public_transfer_from_call_interface); + // Check balances + utils::check_public_balance(token_contract_address, owner, mint_amount - transfer_amount); + utils::check_public_balance(token_contract_address, recipient, transfer_amount); +} + +#[test] +unconstrained fn public_transfer_failure_more_than_balance() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); + // Transfer tokens + let transfer_amount = mint_amount + 1; + let public_transfer_call_interface = Token::at(token_contract_address).transfer_public(owner, recipient, transfer_amount, 0); + // Try to transfer tokens + env.assert_public_call_fails(public_transfer_call_interface); + + // Check balances + utils::check_public_balance(token_contract_address, owner, mint_amount); +} + +#[test] +unconstrained fn public_transfer_failure_on_behalf_of_self_non_zero_nonce() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + // Transfer tokens + let transfer_amount = mint_amount / 10; + let public_transfer_call_interface = Token::at(token_contract_address).transfer_public(owner, recipient, transfer_amount, unsafe_rand()); + // Try to transfer tokens + env.assert_public_call_fails(public_transfer_call_interface); + + // Check balances + utils::check_public_balance(token_contract_address, owner, mint_amount); +} + +#[test] +unconstrained fn public_transfer_failure_on_behalf_of_other_without_approval() { + // Setup with account contracts. Slower since we actually deploy them, but needed for authwits. + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + let transfer_amount = mint_amount / 10; + let public_transfer_from_call_interface = Token::at(token_contract_address).transfer_public(owner, recipient, transfer_amount, 1); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + // Try to transfer tokens + env.assert_public_call_fails(public_transfer_from_call_interface); + // Check balances + utils::check_public_balance(token_contract_address, owner, mint_amount); + utils::check_public_balance(token_contract_address, recipient, 0); +} + +#[test] +unconstrained fn public_transfer_failure_on_behalf_of_other_more_than_balance() { + // Setup with account contracts. Slower since we actually deploy them, but needed for authwits. + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + let transfer_amount = mint_amount + 1; + let public_transfer_from_call_interface = Token::at(token_contract_address).transfer_public(owner, recipient, transfer_amount, 1); + authwit_cheatcodes::add_public_authwit_from_call_interface(owner, recipient, public_transfer_from_call_interface); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + // Try to transfer tokens + env.assert_public_call_fails(public_transfer_from_call_interface); + // Check balances + utils::check_public_balance(token_contract_address, owner, mint_amount); + utils::check_public_balance(token_contract_address, recipient, 0); +} + +#[test] +unconstrained fn public_transfer_failure_on_behalf_of_other_wrong_caller() { + // Setup with account contracts. Slower since we actually deploy them, but needed for authwits. + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + let transfer_amount = mint_amount / 10; + let public_transfer_from_call_interface = Token::at(token_contract_address).transfer_public(owner, recipient, transfer_amount, 1); + authwit_cheatcodes::add_public_authwit_from_call_interface(owner, owner, public_transfer_from_call_interface); + // Impersonate recipient to perform the call + cheatcodes::set_contract_address(recipient); + // Try to transfer tokens + env.assert_public_call_fails(public_transfer_from_call_interface); + // Check balances + utils::check_public_balance(token_contract_address, owner, mint_amount); + utils::check_public_balance(token_contract_address, recipient, 0); +} diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/unshielding.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/unshielding.nr new file mode 100644 index 00000000000..52987cb1736 --- /dev/null +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/unshielding.nr @@ -0,0 +1,89 @@ +use crate::test::utils; +use dep::aztec::{oracle::unsafe_rand::unsafe_rand, test::helpers::cheatcodes}; +use dep::authwit::cheatcodes as authwit_cheatcodes; +use crate::Token; + +#[test] +unconstrained fn unshield_on_behalf_of_self() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); + + let unshield_amount = mint_amount / 10; + let unshield_call_interface = Token::at(token_contract_address).unshield(owner, owner, unshield_amount, 0); + env.call_private_void(unshield_call_interface); + utils::check_private_balance(token_contract_address, owner, mint_amount - unshield_amount); + utils::check_public_balance(token_contract_address, owner, mint_amount + unshield_amount); +} + +#[test] +unconstrained fn unshield_on_behalf_of_other() { + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + + let unshield_amount = mint_amount / 10; + let unshield_call_interface = Token::at(token_contract_address).unshield(owner, recipient, unshield_amount, 0); + authwit_cheatcodes::add_private_authwit_from_call_interface(owner, recipient, unshield_call_interface); + // Impersonate recipient + cheatcodes::set_contract_address(recipient); + // Unshield tokens + env.call_private_void(unshield_call_interface); + utils::check_private_balance(token_contract_address, owner, mint_amount - unshield_amount); + utils::check_public_balance(token_contract_address, recipient, unshield_amount); +} + +#[test(should_fail_with="Balance too low")] +unconstrained fn unshield_failure_more_than_balance() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); + + let unshield_amount = mint_amount + 1; + let unshield_call_interface = Token::at(token_contract_address).unshield(owner, owner, unshield_amount, 0); + env.call_private_void(unshield_call_interface); +} + +#[test(should_fail_with="invalid nonce")] +unconstrained fn unshield_failure_on_behalf_of_self_non_zero_nonce() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); + + let unshield_amount = mint_amount + 1; + let unshield_call_interface = Token::at(token_contract_address).unshield(owner, owner, unshield_amount, unsafe_rand()); + env.call_private_void(unshield_call_interface); +} + +#[test(should_fail_with="Balance too low")] +unconstrained fn unshield_failure_on_behalf_of_other_more_than_balance() { + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + + let unshield_amount = mint_amount + 1; + let unshield_call_interface = Token::at(token_contract_address).unshield(owner, recipient, unshield_amount, 0); + authwit_cheatcodes::add_private_authwit_from_call_interface(owner, recipient, unshield_call_interface); + // Impersonate recipient + cheatcodes::set_contract_address(recipient); + // Unshield tokens + env.call_private_void(unshield_call_interface); +} + +#[test(should_fail)] +unconstrained fn unshield_failure_on_behalf_of_other_invalid_designated_caller() { + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + + let unshield_amount = mint_amount + 1; + let unshield_call_interface = Token::at(token_contract_address).unshield(owner, recipient, unshield_amount, 0); + authwit_cheatcodes::add_private_authwit_from_call_interface(owner, owner, unshield_call_interface); + // Impersonate recipient + cheatcodes::set_contract_address(recipient); + // Unshield tokens + env.call_private_void(unshield_call_interface); +} + +#[test(should_fail)] +unconstrained fn unshield_failure_on_behalf_of_other_no_approval() { + let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); + + let unshield_amount = mint_amount + 1; + let unshield_call_interface = Token::at(token_contract_address).unshield(owner, recipient, unshield_amount, 0); + // Impersonate recipient + cheatcodes::set_contract_address(recipient); + // Unshield tokens + env.call_private_void(unshield_call_interface); +} diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr new file mode 100644 index 00000000000..1801ddd7213 --- /dev/null +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr @@ -0,0 +1,89 @@ +use dep::aztec::{ + hash::compute_secret_hash, prelude::AztecAddress, + test::helpers::{cheatcodes, test_environment::TestEnvironment}, + protocol_types::storage::map::derive_storage_slot_in_map, + note::{note_getter::{MAX_NOTES_PER_PAGE, view_notes}, note_viewer_options::NoteViewerOptions}, + oracle::{unsafe_rand::unsafe_rand, storage::storage_read} +}; + +use crate::{types::{token_note::TokenNote, transparent_note::TransparentNote}, Token}; + +pub fn setup(with_account_contracts: bool) -> (&mut TestEnvironment, AztecAddress, AztecAddress, AztecAddress) { + // Setup env, generate keys + let mut env = TestEnvironment::new(); + let (owner, recipient) = if with_account_contracts { + let owner = env.create_account_contract(1); + let recipient = env.create_account_contract(2); + // Deploy canonical auth registry + let _auth_registry = env.deploy("@aztec/noir-contracts.js/AuthRegistry").without_initializer(); + (owner, recipient) + } else { + let owner = env.create_account(); + let recipient = env.create_account(); + (owner, recipient) + }; + + // Start the test in the account contract address + cheatcodes::set_contract_address(owner); + + // Deploy token contract + let initializer_call_interface = Token::interface().constructor( + owner, + "TestToken0000000000000000000000", + "TT00000000000000000000000000000", + 18 + ); + let token_contract = env.deploy("@aztec/noir-contracts.js/Token").with_public_initializer(initializer_call_interface); + let token_contract_address = token_contract.to_address(); + env.advance_block_by(1); + (&mut env, token_contract_address, owner, recipient) +} + +pub fn setup_and_mint(with_account_contracts: bool) -> (&mut TestEnvironment, AztecAddress, AztecAddress, AztecAddress, Field) { + // Setup + let (env, token_contract_address, owner, recipient) = setup(with_account_contracts); + let mint_amount = 10000; + // Mint some tokens + let secret = unsafe_rand(); + let secret_hash = compute_secret_hash(secret); + let mint_private_call_interface = Token::at(token_contract_address).mint_private(mint_amount, secret_hash); + env.call_public(mint_private_call_interface); + + let mint_public_call_interface = Token::at(token_contract_address).mint_public(owner, mint_amount); + env.call_public(mint_public_call_interface); + + // Time travel so we can read keys from the registry + env.advance_block_by(6); + + // Store a note in the cache so we can redeem it + env.store_note_in_cache( + &mut TransparentNote::new(mint_amount, secret_hash), + Token::storage().pending_shields.slot, + token_contract_address + ); + + // Redeem our shielded tokens + let redeem_shield_call_interface = Token::at(token_contract_address).redeem_shield(owner, mint_amount, secret); + env.call_private_void(redeem_shield_call_interface); + + (env, token_contract_address, owner, recipient, mint_amount) +} + +pub fn check_public_balance(token_contract_address: AztecAddress, address: AztecAddress, address_amount: Field) { + let current_contract_address = cheatcodes::get_contract_address(); + cheatcodes::set_contract_address(token_contract_address); + + let balances_slot = Token::storage().public_balances.slot; + let address_slot = derive_storage_slot_in_map(balances_slot, address); + let fields = storage_read(address_slot); + assert(U128::deserialize(fields).to_field() == address_amount, "Public balance is not correct"); + cheatcodes::set_contract_address(current_contract_address); +} + +pub fn check_private_balance(token_contract_address: AztecAddress, address: AztecAddress, address_amount: Field) { + let current_contract_address = cheatcodes::get_contract_address(); + cheatcodes::set_contract_address(token_contract_address); + let balance_of_private = Token::balance_of_private(address); + assert(balance_of_private == address_amount, "Private balance is not correct"); + cheatcodes::set_contract_address(current_contract_address); +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr index 22746ed644d..2434f8ffdeb 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr @@ -2,6 +2,7 @@ mod append_only_tree_snapshot; mod contract_class_function_leaf_preimage; +mod event_selector; mod function_selector; mod function_data; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/event_selector.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/event_selector.nr new file mode 100644 index 00000000000..b03a9dfba1c --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/event_selector.nr @@ -0,0 +1,70 @@ +use crate::utils::field::field_from_bytes; +use dep::std::cmp::Eq; +use crate::traits::{Serialize, Deserialize, FromField, ToField, Empty}; + +global SELECTOR_SIZE = 4; + +struct EventSelector { + // 1st 4-bytes (big-endian leftmost) of abi-encoding of an event. + inner: u32, +} + +impl Eq for EventSelector { + fn eq(self, other: EventSelector) -> bool { + other.inner == self.inner + } +} + +impl Serialize<1> for EventSelector { + fn serialize(self: Self) -> [Field; 1] { + [self.inner as Field] + } +} + +impl Deserialize<1> for EventSelector { + fn deserialize(fields: [Field; 1]) -> Self { + Self { + inner: fields[0] as u32 + } + } +} + +impl FromField for EventSelector { + fn from_field(field: Field) -> Self { + Self { inner: field as u32 } + } +} + +impl ToField for EventSelector { + fn to_field(self) -> Field { + self.inner as Field + } +} + +impl Empty for EventSelector { + fn empty() -> Self { + Self { inner: 0 as u32 } + } +} + +impl EventSelector { + pub fn from_u32(value: u32) -> Self { + Self { inner: value } + } + + pub fn from_signature(signature: str) -> Self { + let bytes = signature.as_bytes(); + let hash = dep::std::hash::keccak256(bytes, bytes.len() as u32); + + let mut selector_be_bytes = [0; SELECTOR_SIZE]; + for i in 0..SELECTOR_SIZE { + selector_be_bytes[i] = hash[i]; + } + + EventSelector::from_field(field_from_bytes(selector_be_bytes, true)) + } + + pub fn zero() -> Self { + Self { inner: 0 } + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_call_stack_item.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_call_stack_item.nr index 5c98a585441..9572f179dd1 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_call_stack_item.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_call_stack_item.nr @@ -30,6 +30,7 @@ impl Hash for PublicCallStackItem { impl PublicCallStackItem { fn as_execution_request(self) -> Self { + // WARNING: if updating, see comment in public_call_stack_item.ts's `PublicCallStackItem.hash()` let public_inputs = self.public_inputs; let mut request_public_inputs = PublicCircuitPublicInputs::empty(); request_public_inputs.call_context = public_inputs.call_context; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index 2d48394e3af..8d450bce84d 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -299,13 +299,14 @@ global GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS = 43; global GENERATOR_INDEX__FUNCTION_ARGS = 44; global GENERATOR_INDEX__AUTHWIT_INNER = 45; global GENERATOR_INDEX__AUTHWIT_OUTER = 46; +global GENERATOR_INDEX__AUTHWIT_NULLIFIER = 47; // Key related generators follow -global GENERATOR_INDEX__NSK_M = 47; -global GENERATOR_INDEX__IVSK_M = 48; -global GENERATOR_INDEX__OVSK_M = 49; -global GENERATOR_INDEX__TSK_M = 50; -global GENERATOR_INDEX__PUBLIC_KEYS_HASH = 51; -global GENERATOR_INDEX__NOTE_NULLIFIER = 52; -global GENERATOR_INDEX__INNER_NOTE_HASH = 53; -global GENERATOR_INDEX__NOTE_CONTENT_HASH = 54; -global GENERATOR_INDEX__SYMMETRIC_KEY: u8 = 55; +global GENERATOR_INDEX__NSK_M = 48; +global GENERATOR_INDEX__IVSK_M = 49; +global GENERATOR_INDEX__OVSK_M = 50; +global GENERATOR_INDEX__TSK_M = 51; +global GENERATOR_INDEX__PUBLIC_KEYS_HASH = 52; +global GENERATOR_INDEX__NOTE_NULLIFIER = 53; +global GENERATOR_INDEX__INNER_NOTE_HASH = 54; +global GENERATOR_INDEX__NOTE_CONTENT_HASH = 55; +global GENERATOR_INDEX__SYMMETRIC_KEY: u8 = 56; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/utils.nr b/noir-projects/noir-protocol-circuits/crates/types/src/utils.nr index 95561df1094..88624e25476 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/utils.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/utils.nr @@ -13,7 +13,8 @@ pub fn conditional_assign(predicate: bool, lhs: Field, rhs: Field) -> Field { } pub fn arr_copy_slice(src: [T; N], mut dst: [T; M], offset: u32) -> [T; M] { - for i in 0..dst.len() { + let iterator_len = if N > M { M } else { N }; + for i in 0..iterator_len { dst[i] = src[i + offset]; } dst diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index a427e7cc298..50b65919f1e 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -451,6 +451,7 @@ dependencies = [ "noirc_errors", "noirc_frontend", "regex", + "tiny-keccak", ] [[package]] diff --git a/noir/noir-repo/aztec_macros/Cargo.toml b/noir/noir-repo/aztec_macros/Cargo.toml index ed70066af22..a99a654aeed 100644 --- a/noir/noir-repo/aztec_macros/Cargo.toml +++ b/noir/noir-repo/aztec_macros/Cargo.toml @@ -16,4 +16,4 @@ noirc_errors.workspace = true iter-extended.workspace = true convert_case = "0.6.0" regex = "1.10" - +tiny-keccak = { version = "2.0.0", features = ["keccak"] } diff --git a/noir/noir-repo/aztec_macros/src/lib.rs b/noir/noir-repo/aztec_macros/src/lib.rs index d79c7b190ed..580a132aa5a 100644 --- a/noir/noir-repo/aztec_macros/src/lib.rs +++ b/noir/noir-repo/aztec_macros/src/lib.rs @@ -7,7 +7,7 @@ use transforms::{ contract_interface::{ generate_contract_interface, stub_function, update_fn_signatures_in_contract_interface, }, - events::{generate_selector_impl, transform_events}, + events::{generate_event_impls, transform_event_abi}, functions::{ check_for_public_args, export_fn_abi, transform_function, transform_unconstrained, }, @@ -72,6 +72,7 @@ fn transform( } } + generate_event_impls(&mut ast).map_err(|err| (err.into(), file_id))?; generate_note_interface_impl(&mut ast).map_err(|err| (err.into(), file_id))?; Ok(ast) @@ -101,13 +102,6 @@ fn transform_module( generate_storage_layout(module, storage_struct_name.clone(), module_name)?; } - for structure in module.types.iter_mut() { - if structure.attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(event)")) { - module.impls.push(generate_selector_impl(structure)); - has_transformed_module = true; - } - } - let has_initializer = module.functions.iter().any(|func| { func.def .attributes @@ -222,7 +216,7 @@ fn transform_hir( context: &mut HirContext, ) -> Result<(), (AztecMacroError, FileId)> { if has_aztec_dependency(crate_id, context) { - transform_events(crate_id, context)?; + transform_event_abi(crate_id, context)?; inject_compute_note_hash_and_optionally_a_nullifier(crate_id, context)?; assign_storage_slots(crate_id, context)?; inject_note_exports(crate_id, context)?; diff --git a/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs b/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs index 1875ab0b252..8b763dfcc57 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs @@ -155,9 +155,17 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction, is_static_call name: \"{}\", args_hash, args: args_acc, - original: {} + original: {}, + is_static: {} }}", - args_hash, fn_selector, aztec_visibility, is_static, is_void, fn_name, original + args_hash, + fn_selector, + aztec_visibility, + is_static, + is_void, + fn_name, + original, + is_static_call ) } else { let args = format!( @@ -175,9 +183,17 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction, is_static_call name: \"{}\", args: args_acc, gas_opts: dep::aztec::context::gas::GasOpts::default(), - original: {} + original: {}, + is_static: {} }}", - args, fn_selector, aztec_visibility, is_static, is_void, fn_name, original + args, + fn_selector, + aztec_visibility, + is_static, + is_void, + fn_name, + original, + is_static_call ) }; diff --git a/noir/noir-repo/aztec_macros/src/transforms/events.rs b/noir/noir-repo/aztec_macros/src/transforms/events.rs index 69cb6ddafc3..05861b96eb4 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/events.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/events.rs @@ -1,178 +1,333 @@ -use iter_extended::vecmap; -use noirc_errors::Span; -use noirc_frontend::ast::{ - ExpressionKind, FunctionDefinition, FunctionReturnType, ItemVisibility, Literal, NoirFunction, - Visibility, -}; +use noirc_frontend::ast::{ItemVisibility, NoirFunction, NoirTraitImpl, TraitImplItem}; +use noirc_frontend::macros_api::{NodeInterner, StructId}; +use noirc_frontend::token::SecondaryAttribute; use noirc_frontend::{ graph::CrateId, - macros_api::{ - BlockExpression, FileId, HirContext, HirExpression, HirLiteral, HirStatement, NodeInterner, - NoirStruct, PathKind, StatementKind, StructId, StructType, Type, TypeImpl, - UnresolvedTypeData, - }, - token::SecondaryAttribute, + macros_api::{FileId, HirContext}, + parse_program, + parser::SortedModule, }; -use crate::{ - chained_dep, - utils::{ - ast_utils::{ - call, expression, ident, ident_path, is_custom_attribute, make_statement, make_type, - path, variable_path, - }, - constants::SIGNATURE_PLACEHOLDER, - errors::AztecMacroError, - hir_utils::{collect_crate_structs, signature_of_type}, - }, -}; +use crate::utils::hir_utils::collect_crate_structs; +use crate::utils::{ast_utils::is_custom_attribute, errors::AztecMacroError}; + +// Automatic implementation of most of the methods in the EventInterface trait, guiding the user with meaningful error messages in case some +// methods must be implemented manually. +pub fn generate_event_impls(module: &mut SortedModule) -> Result<(), AztecMacroError> { + // Find structs annotated with #[aztec(event)] + // Why doesn't this work ? Events are not tagged and do not appear, it seems only going through the submodule works + // let annotated_event_structs = module + // .types + // .iter_mut() + // .filter(|typ| typ.attributes.iter().any(|attr: &SecondaryAttribute| is_custom_attribute(attr, "aztec(event)"))); + // This did not work because I needed the submodule itself to add the trait impl back in to, but it would be nice if it was tagged on the module level + // let mut annotated_event_structs = module.submodules.iter_mut() + // .flat_map(|submodule| submodule.contents.types.iter_mut()) + // .filter(|typ| typ.attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(event)"))); + + // To diagnose + // let test = module.types.iter_mut(); + // for event_struct in test { + // print!("\ngenerate_event_interface_impl COUNT: {}\n", event_struct.name.0.contents); + // } + + for submodule in module.submodules.iter_mut() { + let annotated_event_structs = submodule.contents.types.iter_mut().filter(|typ| { + typ.attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(event)")) + }); + + for event_struct in annotated_event_structs { + // event_struct.attributes.push(SecondaryAttribute::Abi("events".to_string())); + // If one impl is pushed, this doesn't throw the "#[abi(tag)] attributes can only be used in contracts" error + // But if more than one impl is pushed, we get an increasing amount of "#[abi(tag)] attributes can only be used in contracts" errors + // We work around this by doing this addition in the HIR pass via transform_event_abi below. + + let event_type = event_struct.name.0.contents.to_string(); + let event_len = event_struct.fields.len() as u32; + // event_byte_len = event fields * 32 + randomness (32) + event_type_id (32) + let event_byte_len = event_len * 32 + 64; + + let mut event_fields = vec![]; + + for (field_ident, field_type) in event_struct.fields.iter() { + event_fields.push(( + field_ident.0.contents.to_string(), + field_type.typ.to_string().replace("plain::", ""), + )); + } -/// Generates the impl for an event selector -/// -/// Inserts the following code: -/// ```noir -/// impl SomeStruct { -/// fn selector() -> FunctionSelector { -/// aztec::protocol_types::abis::function_selector::FunctionSelector::from_signature("SIGNATURE_PLACEHOLDER") -/// } -/// } -/// ``` -/// -/// This allows developers to emit events without having to write the signature of the event every time they emit it. -/// The signature cannot be known at this point since types are not resolved yet, so we use a signature placeholder. -/// It'll get resolved after by transforming the HIR. -pub fn generate_selector_impl(structure: &mut NoirStruct) -> TypeImpl { - structure.attributes.push(SecondaryAttribute::Abi("events".to_string())); - let struct_type = - make_type(UnresolvedTypeData::Named(path(structure.name.clone()), vec![], true)); - - let selector_path = - chained_dep!("aztec", "protocol_types", "abis", "function_selector", "FunctionSelector"); - let mut from_signature_path = selector_path.clone(); - from_signature_path.segments.push(ident("from_signature")); - - let selector_fun_body = BlockExpression { - statements: vec![make_statement(StatementKind::Expression(call( - variable_path(from_signature_path), - vec![expression(ExpressionKind::Literal(Literal::Str( - SIGNATURE_PLACEHOLDER.to_string(), - )))], - )))], - }; - - // Define `FunctionSelector` return type - let return_type = - FunctionReturnType::Ty(make_type(UnresolvedTypeData::Named(selector_path, vec![], true))); - - let mut selector_fn_def = FunctionDefinition::normal( - &ident("selector"), - &vec![], - &[], - &selector_fun_body, - &[], - &return_type, - ); - - selector_fn_def.visibility = ItemVisibility::Public; - - // Seems to be necessary on contract modules - selector_fn_def.return_visibility = Visibility::Public; - - TypeImpl { - object_type: struct_type, - type_span: structure.span, - generics: vec![], - methods: vec![(NoirFunction::normal(selector_fn_def), Span::default())], + let mut event_interface_trait_impl = + generate_trait_impl_stub_event_interface(event_type.as_str(), event_byte_len)?; + event_interface_trait_impl.items.push(TraitImplItem::Function( + generate_fn_get_event_type_id(event_type.as_str(), event_len)?, + )); + event_interface_trait_impl.items.push(TraitImplItem::Function( + generate_fn_private_to_be_bytes(event_type.as_str(), event_byte_len)?, + )); + event_interface_trait_impl.items.push(TraitImplItem::Function( + generate_fn_to_be_bytes(event_type.as_str(), event_byte_len)?, + )); + event_interface_trait_impl + .items + .push(TraitImplItem::Function(generate_fn_emit(event_type.as_str())?)); + submodule.contents.trait_impls.push(event_interface_trait_impl); + + let serialize_trait_impl = + generate_trait_impl_serialize(event_type.as_str(), event_len, &event_fields)?; + submodule.contents.trait_impls.push(serialize_trait_impl); + + let deserialize_trait_impl = + generate_trait_impl_deserialize(event_type.as_str(), event_len, &event_fields)?; + submodule.contents.trait_impls.push(deserialize_trait_impl); + } } + + Ok(()) } -/// Computes the signature for a resolved event type. -/// It has the form 'EventName(Field,(Field),[u8;2])' -fn event_signature(event: &StructType) -> String { - let fields = vecmap(event.get_fields(&[]), |(_, typ)| signature_of_type(&typ)); - format!("{}({})", event.name.0.contents, fields.join(",")) +fn generate_trait_impl_stub_event_interface( + event_type: &str, + byte_length: u32, +) -> Result { + let byte_length_without_randomness = byte_length - 32; + let trait_impl_source = format!( + " +impl dep::aztec::event::event_interface::EventInterface<{byte_length}, {byte_length_without_randomness}> for {event_type} {{ + }} + " + ) + .to_string(); + + let (parsed_ast, errors) = parse_program(&trait_impl_source); + if !errors.is_empty() { + dbg!(errors); + return Err(AztecMacroError::CouldNotImplementEventInterface { + secondary_message: Some(format!("Failed to parse Noir macro code (trait impl of {event_type} for EventInterface). This is either a bug in the compiler or the Noir macro code")), + }); + } + + let mut sorted_ast = parsed_ast.into_sorted(); + let event_interface_impl = sorted_ast.trait_impls.remove(0); + + Ok(event_interface_impl) } -/// Substitutes the signature literal that was introduced in the selector method previously with the actual signature. -fn transform_event( - struct_id: StructId, - interner: &mut NodeInterner, -) -> Result<(), (AztecMacroError, FileId)> { - let struct_type = interner.get_struct(struct_id); - let selector_id = interner - .lookup_method(&Type::Struct(struct_type.clone(), vec![]), struct_id, "selector", false) - .ok_or_else(|| { - let error = AztecMacroError::EventError { - span: struct_type.borrow().location.span, - message: "Selector method not found".to_owned(), - }; - (error, struct_type.borrow().location.file) - })?; - let selector_function = interner.function(&selector_id); - - let compute_selector_statement = interner.statement( - selector_function.block(interner).statements().first().ok_or_else(|| { - let error = AztecMacroError::EventError { - span: struct_type.borrow().location.span, - message: "Compute selector statement not found".to_owned(), - }; - (error, struct_type.borrow().location.file) - })?, - ); - - let compute_selector_expression = match compute_selector_statement { - HirStatement::Expression(expression_id) => match interner.expression(&expression_id) { - HirExpression::Call(hir_call_expression) => Some(hir_call_expression), - _ => None, - }, - _ => None, +fn generate_trait_impl_serialize( + event_type: &str, + event_len: u32, + event_fields: &[(String, String)], +) -> Result { + let field_names = + event_fields.iter().map(|field| format!("self.{}", field.0)).collect::>(); + let field_input = field_names.join(","); + + let trait_impl_source = format!( + " + impl dep::aztec::protocol_types::traits::Serialize<{event_len}> for {event_type} {{ + fn serialize(self: {event_type}) -> [Field; {event_len}] {{ + [{field_input}] + }} + }} + " + ) + .to_string(); + + let (parsed_ast, errors) = parse_program(&trait_impl_source); + if !errors.is_empty() { + dbg!(errors); + return Err(AztecMacroError::CouldNotImplementEventInterface { + secondary_message: Some(format!("Failed to parse Noir macro code (trait impl of Serialize for {event_type}). This is either a bug in the compiler or the Noir macro code")), + }); } - .ok_or_else(|| { - let error = AztecMacroError::EventError { - span: struct_type.borrow().location.span, - message: "Compute selector statement is not a call expression".to_owned(), - }; - (error, struct_type.borrow().location.file) - })?; - - let first_arg_id = compute_selector_expression.arguments.first().ok_or_else(|| { - let error = AztecMacroError::EventError { - span: struct_type.borrow().location.span, - message: "Compute selector statement is not a call expression".to_owned(), - }; - (error, struct_type.borrow().location.file) - })?; - - match interner.expression(first_arg_id) { - HirExpression::Literal(HirLiteral::Str(signature)) - if signature == SIGNATURE_PLACEHOLDER => - { - let selector_literal_id = *first_arg_id; - - let structure = interner.get_struct(struct_id); - let signature = event_signature(&structure.borrow()); - interner.update_expression(selector_literal_id, |expr| { - *expr = HirExpression::Literal(HirLiteral::Str(signature.clone())); - }); - - // Also update the type! It might have a different length now than the placeholder. - interner.push_expr_type( - selector_literal_id, - Type::String(Box::new(Type::Constant(signature.len() as u32))), - ); - Ok(()) - } - _ => Err(( - AztecMacroError::EventError { - span: struct_type.borrow().location.span, - message: "Signature placeholder literal does not match".to_owned(), - }, - struct_type.borrow().location.file, - )), + + let mut sorted_ast = parsed_ast.into_sorted(); + let serialize_impl = sorted_ast.trait_impls.remove(0); + + Ok(serialize_impl) +} + +fn generate_trait_impl_deserialize( + event_type: &str, + event_len: u32, + event_fields: &[(String, String)], +) -> Result { + let field_names: Vec = event_fields + .iter() + .enumerate() + .map(|(index, field)| format!("{}: fields[{}]", field.0, index)) + .collect::>(); + let field_input = field_names.join(","); + + let trait_impl_source = format!( + " + impl dep::aztec::protocol_types::traits::Deserialize<{event_len}> for {event_type} {{ + fn deserialize(fields: [Field; {event_len}]) -> {event_type} {{ + {event_type} {{ {field_input} }} + }} + }} + " + ) + .to_string(); + + let (parsed_ast, errors) = parse_program(&trait_impl_source); + if !errors.is_empty() { + dbg!(errors); + return Err(AztecMacroError::CouldNotImplementEventInterface { + secondary_message: Some(format!("Failed to parse Noir macro code (trait impl of Deserialize for {event_type}). This is either a bug in the compiler or the Noir macro code")), + }); } + + let mut sorted_ast = parsed_ast.into_sorted(); + let deserialize_impl = sorted_ast.trait_impls.remove(0); + + Ok(deserialize_impl) } -pub fn transform_events( +fn generate_fn_get_event_type_id( + event_type: &str, + field_length: u32, +) -> Result { + let from_signature_input = + std::iter::repeat("Field").take(field_length as usize).collect::>().join(","); + let function_source = format!( + " + fn get_event_type_id() -> dep::aztec::protocol_types::abis::event_selector::EventSelector {{ + dep::aztec::protocol_types::abis::event_selector::EventSelector::from_signature(\"{event_type}({from_signature_input})\") + }} + ", + ) + .to_string(); + + let (function_ast, errors) = parse_program(&function_source); + if !errors.is_empty() { + dbg!(errors); + return Err(AztecMacroError::CouldNotImplementEventInterface { + secondary_message: Some(format!("Failed to parse Noir macro code (fn get_event_type_id, implemented for EventInterface of {event_type}). This is either a bug in the compiler or the Noir macro code")), + }); + } + + let mut function_ast = function_ast.into_sorted(); + let mut noir_fn = function_ast.functions.remove(0); + noir_fn.def.visibility = ItemVisibility::Public; + Ok(noir_fn) +} + +fn generate_fn_private_to_be_bytes( + event_type: &str, + byte_length: u32, +) -> Result { + let function_source = format!( + " + fn private_to_be_bytes(self: {event_type}, randomness: Field) -> [u8; {byte_length}] {{ + let mut buffer: [u8; {byte_length}] = [0; {byte_length}]; + + let randomness_bytes = randomness.to_be_bytes(32); + let event_type_id_bytes = {event_type}::get_event_type_id().to_field().to_be_bytes(32); + + for i in 0..32 {{ + buffer[i] = randomness_bytes[i]; + buffer[32 + i] = event_type_id_bytes[i]; + }} + + let serialized_event = self.serialize(); + + for i in 0..serialized_event.len() {{ + let bytes = serialized_event[i].to_be_bytes(32); + for j in 0..32 {{ + buffer[64 + i * 32 + j] = bytes[j]; + }} + }} + + buffer + }} + " + ) + .to_string(); + + let (function_ast, errors) = parse_program(&function_source); + if !errors.is_empty() { + dbg!(errors); + return Err(AztecMacroError::CouldNotImplementEventInterface { + secondary_message: Some(format!("Failed to parse Noir macro code (fn private_to_be_bytes, implemented for EventInterface of {event_type}). This is either a bug in the compiler or the Noir macro code")), + }); + } + + let mut function_ast = function_ast.into_sorted(); + let mut noir_fn = function_ast.functions.remove(0); + noir_fn.def.visibility = ItemVisibility::Public; + Ok(noir_fn) +} + +fn generate_fn_to_be_bytes( + event_type: &str, + byte_length: u32, +) -> Result { + let byte_length_without_randomness = byte_length - 32; + let function_source = format!( + " + fn to_be_bytes(self: {event_type}) -> [u8; {byte_length_without_randomness}] {{ + let mut buffer: [u8; {byte_length_without_randomness}] = [0; {byte_length_without_randomness}]; + + let event_type_id_bytes = {event_type}::get_event_type_id().to_field().to_be_bytes(32); + + for i in 0..32 {{ + buffer[i] = event_type_id_bytes[i]; + }} + + let serialized_event = self.serialize(); + + for i in 0..serialized_event.len() {{ + let bytes = serialized_event[i].to_be_bytes(32); + for j in 0..32 {{ + buffer[32 + i * 32 + j] = bytes[j]; + }} + }} + + buffer + }} + ") + .to_string(); + + let (function_ast, errors) = parse_program(&function_source); + if !errors.is_empty() { + dbg!(errors); + return Err(AztecMacroError::CouldNotImplementEventInterface { + secondary_message: Some(format!("Failed to parse Noir macro code (fn to_be_bytes, implemented for EventInterface of {event_type}). This is either a bug in the compiler or the Noir macro code")), + }); + } + + let mut function_ast = function_ast.into_sorted(); + let mut noir_fn = function_ast.functions.remove(0); + noir_fn.def.visibility = ItemVisibility::Public; + Ok(noir_fn) +} + +fn generate_fn_emit(event_type: &str) -> Result { + let function_source = format!( + " + fn emit(self: {event_type}, _emit: fn[Env](Self) -> ()) {{ + _emit(self); + }} + " + ) + .to_string(); + + let (function_ast, errors) = parse_program(&function_source); + if !errors.is_empty() { + dbg!(errors); + return Err(AztecMacroError::CouldNotImplementEventInterface { + secondary_message: Some(format!("Failed to parse Noir macro code (fn emit, implemented for EventInterface of {event_type}). This is either a bug in the compiler or the Noir macro code")), + }); + } + + let mut function_ast = function_ast.into_sorted(); + let mut noir_fn = function_ast.functions.remove(0); + noir_fn.def.visibility = ItemVisibility::Public; + Ok(noir_fn) +} + +// We do this pass in the HIR to work around the "#[abi(tag)] attributes can only be used in contracts" error +pub fn transform_event_abi( crate_id: &CrateId, context: &mut HirContext, ) -> Result<(), (AztecMacroError, FileId)> { @@ -184,3 +339,14 @@ pub fn transform_events( } Ok(()) } + +fn transform_event( + struct_id: StructId, + interner: &mut NodeInterner, +) -> Result<(), (AztecMacroError, FileId)> { + interner.update_struct_attributes(struct_id, |struct_attributes| { + struct_attributes.push(SecondaryAttribute::Abi("events".to_string())); + }); + + Ok(()) +} diff --git a/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs b/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs index fdce8b81db2..3ace22a89c3 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs @@ -11,7 +11,10 @@ use noirc_frontend::{ Type, }; +use acvm::AcirField; use regex::Regex; +// TODO(#7165): nuke the following dependency from here and Cargo.toml +use tiny_keccak::{Hasher, Keccak}; use crate::{ chained_dep, @@ -97,7 +100,6 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt .collect::, _>>()?; let [note_serialized_len, note_bytes_len]: [_; 2] = note_interface_generics.try_into().unwrap(); - let note_type_id = note_type_id(¬e_type); // Automatically inject the header field if it's not present let (header_field_name, _) = if let Some(existing_header) = @@ -184,25 +186,26 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt } if !check_trait_method_implemented(trait_impl, "get_note_type_id") { + let note_type_id = compute_note_type_id(¬e_type); let get_note_type_id_fn = - generate_note_get_type_id(¬e_type_id, note_interface_impl_span)?; + generate_get_note_type_id(note_type_id, note_interface_impl_span)?; trait_impl.items.push(TraitImplItem::Function(get_note_type_id_fn)); } if !check_trait_method_implemented(trait_impl, "compute_note_content_hash") { - let get_header_fn = + let compute_note_content_hash_fn = generate_compute_note_content_hash(¬e_type, note_interface_impl_span)?; - trait_impl.items.push(TraitImplItem::Function(get_header_fn)); + trait_impl.items.push(TraitImplItem::Function(compute_note_content_hash_fn)); } if !check_trait_method_implemented(trait_impl, "to_be_bytes") { - let get_header_fn = generate_note_to_be_bytes( + let to_be_bytes_fn = generate_note_to_be_bytes( ¬e_type, note_bytes_len.as_str(), note_serialized_len.as_str(), note_interface_impl_span, )?; - trait_impl.items.push(TraitImplItem::Function(get_header_fn)); + trait_impl.items.push(TraitImplItem::Function(to_be_bytes_fn)); } } @@ -324,16 +327,17 @@ fn generate_note_set_header( // Automatically generate the note type id getter method. The id itself its calculated as the concatenation // of the conversion of the characters in the note's struct name to unsigned integers. -fn generate_note_get_type_id( - note_type_id: &str, +fn generate_get_note_type_id( + note_type_id: u32, impl_span: Option, ) -> Result { + // TODO(#7165): replace {} with dep::aztec::protocol_types::abis::note_selector::compute_note_selector(\"{}\") in the function source below let function_source = format!( " - fn get_note_type_id() -> Field {{ - {} - }} - ", + fn get_note_type_id() -> Field {{ + {} + }} + ", note_type_id ) .to_string(); @@ -387,7 +391,7 @@ fn generate_note_properties_struct( // Generate the deserialize_content method as // -// fn deserialize_content(serialized_note: [Field; NOTE_SERILIZED_LEN]) -> Self { +// fn deserialize_content(serialized_note: [Field; NOTE_SERIALIZED_LEN]) -> Self { // NoteType { // note_field1: serialized_note[0] as Field, // note_field2: NoteFieldType2::from_field(serialized_note[1])... @@ -525,10 +529,10 @@ fn generate_note_exports_global( let struct_source = format!( " #[abi(notes)] - global {0}_EXPORTS: (Field, str<{1}>) = ({2},\"{0}\"); + global {0}_EXPORTS: (Field, str<{1}>) = (0x{2},\"{0}\"); ", note_type, - note_type_id.len(), + note_type.len(), note_type_id ) .to_string(); @@ -685,10 +689,18 @@ fn generate_note_deserialize_content_source( .to_string() } +// TODO(#7165): nuke this function // Utility function to generate the note type id as a Field -fn note_type_id(note_type: &str) -> String { +fn compute_note_type_id(note_type: &str) -> u32 { // TODO(#4519) Improve automatic note id generation and assignment - note_type.chars().map(|c| (c as u32).to_string()).collect::>().join("") + let mut keccak = Keccak::v256(); + let mut result = [0u8; 32]; + keccak.update(note_type.as_bytes()); + keccak.finalize(&mut result); + // Take the first 4 bytes of the hash and convert them to an integer + // If you change the following value you have to change NUM_BYTES_PER_NOTE_TYPE_ID in l1_note_payload.ts as well + let num_bytes_per_note_type_id = 4; + u32::from_be_bytes(result[0..num_bytes_per_note_type_id].try_into().unwrap()) } pub fn inject_note_exports( @@ -717,29 +729,42 @@ pub fn inject_note_exports( }, file_id, ))?; - let init_function = + let get_note_type_id_function = context.def_interner.function(&func_id).block(&context.def_interner); - let init_function_statement_id = init_function.statements().first().ok_or(( - AztecMacroError::CouldNotExportStorageLayout { - span: None, - secondary_message: Some(format!( - "Could not retrieve note id statement from function for note {}", - note.borrow().name.0.contents - )), - }, - file_id, - ))?; - let note_id_statement = context.def_interner.statement(init_function_statement_id); + let get_note_type_id_statement_id = + get_note_type_id_function.statements().first().ok_or(( + AztecMacroError::CouldNotExportStorageLayout { + span: None, + secondary_message: Some(format!( + "Could not retrieve note id statement from function for note {}", + note.borrow().name.0.contents + )), + }, + file_id, + ))?; + let note_type_id_statement = + context.def_interner.statement(get_note_type_id_statement_id); - let note_id_value = match note_id_statement { + let note_type_id = match note_type_id_statement { HirStatement::Expression(expression_id) => { match context.def_interner.expression(&expression_id) { HirExpression::Literal(HirLiteral::Integer(value, _)) => Ok(value), + HirExpression::Literal(_) => Err(( + AztecMacroError::CouldNotExportStorageLayout { + span: None, + secondary_message: Some( + "note_type_id statement must be a literal integer expression" + .to_string(), + ), + }, + file_id, + )), _ => Err(( AztecMacroError::CouldNotExportStorageLayout { span: None, secondary_message: Some( - "note_id statement must be a literal expression".to_string(), + "note_type_id statement must be a literal expression" + .to_string(), ), }, file_id, @@ -747,9 +772,10 @@ pub fn inject_note_exports( } } _ => Err(( - AztecMacroError::CouldNotAssignStorageSlots { + AztecMacroError::CouldNotExportStorageLayout { + span: None, secondary_message: Some( - "note_id statement must be an expression".to_string(), + "note_type_id statement must be an expression".to_string(), ), }, file_id, @@ -757,7 +783,7 @@ pub fn inject_note_exports( }?; let global = generate_note_exports_global( ¬e.borrow().name.0.contents, - ¬e_id_value.to_string(), + ¬e_type_id.to_hex(), ) .map_err(|err| (err, file_id))?; diff --git a/noir/noir-repo/aztec_macros/src/utils/constants.rs b/noir/noir-repo/aztec_macros/src/utils/constants.rs index 848cca0477d..2178f7a2526 100644 --- a/noir/noir-repo/aztec_macros/src/utils/constants.rs +++ b/noir/noir-repo/aztec_macros/src/utils/constants.rs @@ -1,4 +1,3 @@ pub const FUNCTION_TREE_HEIGHT: u32 = 5; pub const MAX_CONTRACT_PRIVATE_FUNCTIONS: usize = 2_usize.pow(FUNCTION_TREE_HEIGHT); -pub const SIGNATURE_PLACEHOLDER: &str = "SIGNATURE_PLACEHOLDER"; pub const SELECTOR_PLACEHOLDER: &str = "SELECTOR_PLACEHOLDER"; diff --git a/noir/noir-repo/aztec_macros/src/utils/errors.rs b/noir/noir-repo/aztec_macros/src/utils/errors.rs index 852b5f1e57a..557d065cb25 100644 --- a/noir/noir-repo/aztec_macros/src/utils/errors.rs +++ b/noir/noir-repo/aztec_macros/src/utils/errors.rs @@ -14,6 +14,7 @@ pub enum AztecMacroError { CouldNotAssignStorageSlots { secondary_message: Option }, CouldNotImplementComputeNoteHashAndOptionallyANullifier { secondary_message: Option }, CouldNotImplementNoteInterface { span: Option, secondary_message: Option }, + CouldNotImplementEventInterface { secondary_message: Option }, MultipleStorageDefinitions { span: Option }, CouldNotExportStorageLayout { span: Option, secondary_message: Option }, CouldNotInjectContextGenericInStorage { secondary_message: Option }, @@ -67,6 +68,11 @@ impl From for MacroError { secondary_message, span }, + AztecMacroError::CouldNotImplementEventInterface { secondary_message } => MacroError { + primary_message: "Could not implement automatic methods for event, please provide an implementation of the EventInterface trait".to_string(), + secondary_message, + span: None, + }, AztecMacroError::MultipleStorageDefinitions { span } => MacroError { primary_message: "Only one struct can be tagged as #[aztec(storage)]".to_string(), secondary_message: None, diff --git a/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs b/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs index 71dd1b18761..e959c61732a 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs @@ -1,6 +1,7 @@ use std::collections::BTreeMap; use acvm::acir::circuit::ErrorSelector; +use acvm::AcirField; use iter_extended::vecmap; use noirc_abi::{Abi, AbiErrorType, AbiParameter, AbiReturnType, AbiType, AbiValue}; use noirc_frontend::ast::Visibility; @@ -107,9 +108,7 @@ pub(super) fn value_from_hir_expression(context: &Context, expression: HirExpres }, HirLiteral::Bool(value) => AbiValue::Boolean { value }, HirLiteral::Str(value) => AbiValue::String { value }, - HirLiteral::Integer(field, sign) => { - AbiValue::Integer { value: field.to_string(), sign } - } + HirLiteral::Integer(field, sign) => AbiValue::Integer { value: field.to_hex(), sign }, _ => unreachable!("Literal cannot be used in the abi"), }, _ => unreachable!("Type cannot be used in the abi {:?}", expression), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs index 343113836ed..9a0be775c30 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -88,15 +88,12 @@ pub fn resolve_import( import_directive: &ImportDirective, def_maps: &BTreeMap, ) -> Result { - let allow_contracts = - allow_referencing_contracts(def_maps, crate_id, import_directive.module_id); - let module_scope = import_directive.module_id; let NamespaceResolution { module_id: resolved_module, namespace: resolved_namespace, mut error, - } = resolve_path_to_ns(import_directive, crate_id, crate_id, def_maps, allow_contracts)?; + } = resolve_path_to_ns(import_directive, crate_id, crate_id, def_maps)?; let name = resolve_path_name(import_directive); @@ -129,20 +126,11 @@ pub fn resolve_import( }) } -fn allow_referencing_contracts( - def_maps: &BTreeMap, - krate: CrateId, - local_id: LocalModuleId, -) -> bool { - ModuleId { krate, local_id }.module(def_maps).is_contract -} - fn resolve_path_to_ns( import_directive: &ImportDirective, crate_id: CrateId, importing_crate: CrateId, def_maps: &BTreeMap, - allow_contracts: bool, ) -> NamespaceResolutionResult { let import_path = &import_directive.path.segments; let def_map = &def_maps[&crate_id]; @@ -150,21 +138,11 @@ fn resolve_path_to_ns( match import_directive.path.kind { crate::ast::PathKind::Crate => { // Resolve from the root of the crate - resolve_path_from_crate_root( - crate_id, - importing_crate, - import_path, - def_maps, - allow_contracts, - ) + resolve_path_from_crate_root(crate_id, importing_crate, import_path, def_maps) + } + crate::ast::PathKind::Dep => { + resolve_external_dep(def_map, import_directive, def_maps, importing_crate) } - crate::ast::PathKind::Dep => resolve_external_dep( - def_map, - import_directive, - def_maps, - allow_contracts, - importing_crate, - ), crate::ast::PathKind::Plain => { // Plain paths are only used to import children modules. It's possible to allow import of external deps, but maybe this distinction is better? // In Rust they can also point to external Dependencies, if no children can be found with the specified name @@ -174,7 +152,6 @@ fn resolve_path_to_ns( import_path, import_directive.module_id, def_maps, - allow_contracts, ) } } @@ -186,7 +163,6 @@ fn resolve_path_from_crate_root( import_path: &[Ident], def_maps: &BTreeMap, - allow_contracts: bool, ) -> NamespaceResolutionResult { resolve_name_in_module( crate_id, @@ -194,7 +170,6 @@ fn resolve_path_from_crate_root( import_path, def_maps[&crate_id].root, def_maps, - allow_contracts, ) } @@ -204,7 +179,6 @@ fn resolve_name_in_module( import_path: &[Ident], starting_mod: LocalModuleId, def_maps: &BTreeMap, - allow_contracts: bool, ) -> NamespaceResolutionResult { let def_map = &def_maps[&krate]; let mut current_mod_id = ModuleId { krate, local_id: starting_mod }; @@ -267,10 +241,6 @@ fn resolve_name_in_module( return Err(PathResolutionError::Unresolved(current_segment.clone())); } - // Check if it is a contract and we're calling from a non-contract context - if current_mod.is_contract && !allow_contracts { - return Err(PathResolutionError::ExternalContractUsed(current_segment.clone())); - } current_ns = found_ns; } @@ -288,7 +258,6 @@ fn resolve_external_dep( current_def_map: &CrateDefMap, directive: &ImportDirective, def_maps: &BTreeMap, - allow_contracts: bool, importing_crate: CrateId, ) -> NamespaceResolutionResult { // Use extern_prelude to get the dep @@ -316,7 +285,7 @@ fn resolve_external_dep( is_prelude: false, }; - resolve_path_to_ns(&dep_directive, dep_module.krate, importing_crate, def_maps, allow_contracts) + resolve_path_to_ns(&dep_directive, dep_module.krate, importing_crate, def_maps) } // Issue an error if the given private function is being called from a non-child module, or diff --git a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs index cef49332b00..cd82685c31e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs @@ -623,6 +623,15 @@ impl NodeInterner { f(&mut value); } + pub fn update_struct_attributes( + &mut self, + type_id: StructId, + f: impl FnOnce(&mut StructAttributes), + ) { + let value = self.struct_attributes.get_mut(&type_id).unwrap(); + f(value); + } + pub fn update_trait(&mut self, trait_id: TraitId, f: impl FnOnce(&mut Trait)) { let value = self.traits.get_mut(&trait_id).unwrap(); f(value); diff --git a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Prover.toml b/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Prover.toml index 1ebc77c5a5f..4619fd298dd 100644 --- a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Prover.toml +++ b/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Prover.toml @@ -1,4 +1,4 @@ key_hash = "0x096129b1c6e108252fc5c829c4cc9b7e8f0d1fd9f29c2532b563d6396645e08f" -proof = ["0x0000000000000000000000000000000000000000000000000000000000000020","0x0000000000000000000000000000000000000000000000000000000000000011","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000042ab5d6d1986846cf","0x00000000000000000000000000000000000000000000000b75c020998797da78","0x0000000000000000000000000000000000000000000000005a107acb64952eca","0x000000000000000000000000000000000000000000000000000031e97a575e9d","0x00000000000000000000000000000000000000000000000b5666547acf8bd5a4","0x00000000000000000000000000000000000000000000000c410db10a01750aeb","0x00000000000000000000000000000000000000000000000d722669117f9758a4","0x000000000000000000000000000000000000000000000000000178cbf4206471","0x000000000000000000000000000000000000000000000000e91b8a11e7842c38","0x000000000000000000000000000000000000000000000007fd51009034b3357f","0x000000000000000000000000000000000000000000000009889939f81e9c7402","0x0000000000000000000000000000000000000000000000000000f94656a2ca48","0x000000000000000000000000000000000000000000000006fb128b46c1ddb67f","0x0000000000000000000000000000000000000000000000093fe27776f50224bd","0x000000000000000000000000000000000000000000000004a0c80c0da527a081","0x0000000000000000000000000000000000000000000000000001b52c2020d746","0x0000000000000000000000000000005a9bae947e1e91af9e4033d8d6aa6ed632","0x000000000000000000000000000000000025e485e013446d4ac7981c88ba6ecc","0x000000000000000000000000000000ff1e0496e30ab24a63b32b2d1120b76e62","0x00000000000000000000000000000000001afe0a8a685d7cd85d1010e55d9d7c","0x000000000000000000000000000000b0804efd6573805f991458295f510a2004","0x00000000000000000000000000000000000c81a178016e2fe18605022d5a8b0e","0x000000000000000000000000000000eba51e76eb1cfff60a53a0092a3c3dea47","0x000000000000000000000000000000000022e7466247b533282f5936ac4e6c15","0x00000000000000000000000000000071b1d76edf770edff98f00ff4deec264cd","0x00000000000000000000000000000000001e48128e68794d8861fcbb2986a383","0x000000000000000000000000000000d3a2af4915ae6d86b097adc377fafda2d4","0x000000000000000000000000000000000006359de9ca452dab3a4f1f8d9c9d98","0x00000000000000000000000000000044d7ca77b464f03aa44f6f8d49a0d3ada5","0x00000000000000000000000000000000002a36959f550517d82d0af666bcd7dc","0x0000000000000000000000000000000566b28c19f0b1732b95e0381bc5d6dbdd","0x00000000000000000000000000000000002511360b7a8c6a823559f0ac9eb02b","0x000000000000000000000000000000f968b227a358a305607f3efc933823d288","0x00000000000000000000000000000000000eaf8adb390375a76d95e918b65e08","0x000000000000000000000000000000bb34b4b447aae56f5e24f81c3acd6d547f","0x00000000000000000000000000000000002175d012746260ebcfe339a91a81e1","0x00000000000000000000000000000058035b1ed115023f42bf4ee93d2dc29dcb","0x00000000000000000000000000000000002de4b004225be4e68938b0db546287","0x0000000000000000000000000000003d18d72585ef033ab3663d1944abb2054a","0x0000000000000000000000000000000000149a1974c0c2b5f0639970cda1af83","0x000000000000000000000000000000bb1eb2b1fc10b55295ed6c1ae54e8a40da","0x000000000000000000000000000000000026da80059472ac8c64e437d6fe6134","0x000000000000000000000000000000d1f101b72ee710423ca44548910676a4fe","0x00000000000000000000000000000000000323378ad6b5aec67af99e522095a0","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x2622384e4b4688a3ad115007c1c09de1a141aeca06c31925898cf746038a5897","0x2f743e893a3880004db1ff3492279d89c025b9815f16e129d15f7a3687b6f833","0x03e05487307f18e3afb90cc524e56809e478039d317a3757433bfc8e06a32b73","0x099ba7011747dd2d8b5ac03ed02b93c9803d51899677409931d5b1571c3041b5","0x189ef108e334c5173619eac1067b99526a5cc6e47cbffaa3c117f0c3eb8bebd4","0x0b5f77b69ac2955ecc44a73e18b2ea8403224cf769657d53acc9a5d302d0b86e","0x1b81353a160e985e8a1fb09d3a3827fe68d03585757530dcec1b8038ac829a21","0x175e75cef1b974011de38e6e631f42bffd4dcb6fad6680930388cffaa60d940e","0x1631945a2aa39032cfa8cf379d18a983d4b5a487adab67252c6514b35bc88095","0x181b639e465a6f9842c5d75f6f5b855a065f498595146df3bd2b9c0ef66042a1","0x0c6e5af7add3e12f610c13d8066896d08882a7c50cfe33676fda8a75e250e9b9","0x28f94cd060c45a2e6b423831302deb456d0964879db5008a2be0957a2c749e2a","0x1c81fb20cea508580aa962e5b4736a43382816e7abac7e478e6c080cf896798d","0x23dea53784aa14dcf7e1cce5ee480796e67b2dd69a8e20c5c09558001640edfa","0x149c2548f8b0d96fefecab53e31aa3902341c903fa0ef863ef64610315de993b","0x16ad81b3129ccebe1682d14b726bc9b86acd0f0be8c304594ce5a87e756add27","0x2c1ef938516edccc0cd1d4d812644d72b6ead3c85e1c8500fc54e77e5652b23f","0x0eecb7fba3395b21197cb24bb9b733b1985d81f35a1ee944714ffd781a7bd136","0x06e2a96ecf1e8419198eca10133954f3560102467f40a234cf071d23c6cf411a","0x1e6bfa2adcbdc50313408ef28a77b76dd915fa372c093c4484ba662695a3eadc","0x28ccaf4d4759c1f4bb49429b961a59cdefbc445017ffa807e90c54b27e1ee657","0x22803d537311e757a146ae7a2fc396d42d67f27e73efca82e3e324dc493da4de","0x196255f687cede05f326204bfaead7a54f8d48b67ce8522cb8af6a7fffaffcb6","0x147ea42988386b944f006be242ccc6b099fadd7f450955d252768667bbaee4f9","0x1f9ccb05e508b1d08c79c11acbc0677fdc18d5d40827e2e1eaae60fee51b940f","0x28ea76870d22eea72821da25f9b7a89341347afcd6c077387986a82dc8afa833","0x0e6ef82d3e5a318a9c6233dffbb00d130599f4ac979a89b034ce9d930b11165a","0x2e97fa9299a218c982504199ada3278270b9cb566bf46fe1ecc1d151e06b8745","0x1a41ac9b1032ac24c11720407c253a866e9c75a4ec233f15f968b206ea1e5d0e","0x0b31b541bb044c1bc2428c2a57ba29438f620050d1628389ff1fa90c494d7c58","0x050fec8d69f182768a9b34eca8c3f4695dad8bc20a10904090cfe18777d44d25","0x069283ac40daaafff76c3679f54a0aa773c8d71152fbb9c3219906113fc4f683","0x25c3ec4e8b90214aafe3b5416abf11a98bd34b8acb449df8424f159ddf858bc1","0x1a3884f3a922d0da758cb7ed9a5ddc3c3c2132dde8d913753fa3e6b766be5697","0x222d05a0fce0565bf9cc490f97bd4eff53858f2ca6afe9d91c5c8d7de8076f39","0x054698b045b439467a3067a8dc2b4d020b2bb44df3d98a19f9cfb04c9ee5ffd1","0x0e39d66cded0f3df40e04124e36c827bcaf15fbe9fb6e9bbc3af889f8bd1ebf0","0x145aea47dc97ec35ac67f135aac37f8bc6eaf149551a2f48901529d10e25c860","0x1894877b2769ae2c288738f8aa33acfc7ca9a7d1e26a76908ca2909bf25aa59a","0x27e8c702be67be467f052abd180464a468b7d5d5d8a4961e56e8561f7863c91a","0x0326d3e4607d54a30c7fa99d1609f386aeb8c8094cabd7397246074f634dcec8","0x17eb8f62b5ba2dad391e3f81d3a6b9d03ff723a7d6a4d77b98b18ddd0debf4fd","0x1a5d3e8a27c1f69d6e4558b3c89cd9347c62182ce90fb6e34392bc4e7b7c178c","0x2293034bed3d33d5ad0d150f64d493c9be554f640103621f9ae56034a7323d84","0x13d75ffbb9d2ceb2daa6d42f3618d4ea9775befa1cf5f9df141dfebf794abc35","0x2ec339c42fbb2d50221ec907779e72be3eab2960d110a90d36cc6c0afcf5857e","0x15e9c913fa84a2657571831d5d7a90f6534ca67a1617b4063fa5bf09f46cd7a2","0x10f56fbe9fefd59d2acd49fa641fedcfb65d96d54cf47207e2c8ab34f22bbabe","0x117fa3859a400040ebe8dee4a60ddcb04484ff5cfb5294c6530354c3c8cb35f3","0x123260b824df2f8bbe6a351ba2fa94c61aa754741eb198b768a699b2d1cc2b6f","0x1e51d9a653adc6b67287d35bb60584261f57363177c6b54a56dbd39834d851ba","0x18a9b2e2fce77bdb5e41215e2caeb7e77e946dbb2f381c8e7974709e03a6c216","0x2b2640870195a40e374cfa834e37ad9a5e17cb687bd2119a63ac02c3769b0f1e","0x2da73263fef362dfc79dd1066fd7ec294b765e2533f3ac4320e8d1540f2639a8","0x0cc9f299e5291bb1bc0951ce510a634c418af9f9802a291fe6d951768c0a1b2d","0x02a940acb788df42cc9219531776d45465be19087fc3f523fe92df771e5efc10","0x2d5976cc5540e761824bdacf69a2dddabe104fdbb235985ae9080b488f642fa9","0x284c18d1574d2cb7b4ee45b3ff30176eff2ab9c7b7f60cd8a87cef599379244d","0x12a38d659bf38da09af8f445505baa16bcb036d83173f6f45a7e46cac511e5a1","0x0852ef710b2396ba5b7fd69a95b336908d3a368262ec41e0d972564f784201a4","0x240c467a31ed3bb7c4cef09407750d2d89b3750e6cebb4aaa9d0f1f92be77249","0x04edf7595087745abc11fe7780afd4754c5013725653a4cec31f039b77e7b3c7","0x080d04b50ae3acd787f33f8f4a639a58677b5c04ef8a352fd4dd9236883f0e81","0x0cd745e7540fe230038f024ab1269177599ad94e8d8099a010eb7eebd3e41ec8","0x25e2394f90f5b3e3046b8876a6b3ef19a03ef9e9aeae4813fcb14907decc0393","0x03df12a6e39c606d70d3d470aff710d9daa86dece773a6f6f057725b57d6d115","0x0f744082aecf54f55db19dfbe56a81c17b3eb48417305c129beb6c97a22c705b","0x244a80d6d82e82fc416e8e4694deb4e08b81c32bb90cb2f96ff3f687298322d1","0x251eb4d8692f49523e3972096264ee770b295fb62a970fbfdd8aa1fff661ef50","0x0c4d9200120430618493a9151d632faa95c9ae842b7d97103a4afb3330cafbed","0x09e970a55dd7335db16a3823b6489c77cb7785f674cb7c924994ee121122e514","0x19e5bd1113959463be673ee72103bfe7559f423c632fbf9701ff099e165c429b","0x071eb2916ba30652a328c98353f69f239c41a4913c34931f18e91e5414b3270a","0x2a0cd2ebac904b7ebd82b6509dfcaf9ecf32175758c691d01f4fb32dad6371c4","0x1aa43a3009417d95904ebecd4189545e52ca7e9c7dfa3bde5f255ddefed5c754","0x29fd7a93212d60af81b810dad13a240bbbe16966a4977408b1d64c5d692b50b4","0x000000000000000000000000000000bef7cad70fa62891e6329cb7c17d0c5459","0x0000000000000000000000000000000000209177f2a04609421c1f23c04b454e","0x00000000000000000000000000000060dec389686170618e2490100f3fcf39e2","0x0000000000000000000000000000000000213368873145aad5f93798c31730af","0x000000000000000000000000000000c0f21a470488d9cbe53650d941c25cd569","0x000000000000000000000000000000000016d6f88e6b319553f5948886a6bd5e","0x000000000000000000000000000000d6dbb8a54a071e01c46d648c8c555ec352","0x0000000000000000000000000000000000130a7ce06ad74eb6c83f5565e2f821","0x00000000000000000000000000000058ca3aa788bd6ff37a5da3ecefdc896601","0x00000000000000000000000000000000001381bddcf8fb976cc52fee0d920598","0x00000000000000000000000000000082bdd94acd10edf22e09b1a42be500f8f8","0x00000000000000000000000000000000002f27815e28b2bc0699336893abdc0f","0x000000000000000000000000000000eb1d6973a54f8848f4c0630370d6181e49","0x000000000000000000000000000000000000129c1889d64ab66303bf17bfc864","0x000000000000000000000000000000155918aa9f6d352b847bf860a261266282","0x0000000000000000000000000000000000216e687d2f85a811f67573cbf311ba","0x0000000000000000000000000000002d2662f79a7ba21a95f44e67ed0b5abf3b","0x00000000000000000000000000000000001351870a81dc6edff235df110fe798","0x000000000000000000000000000000b113a55b86f59b21fe419ed8518dfddfc6","0x00000000000000000000000000000000002f26cd920f79b0d72a49897acc521c","0x0000000000000000000000000000002a4e1689c65dcae73ed1a33b03c611a7fe","0x00000000000000000000000000000000001c5093a8ae791c00fdd763c95800c5","0x0000000000000000000000000000006231d049ec3683c06ec6b00348e0669c61","0x0000000000000000000000000000000000237bfd7ec06c28f22ce84db9bb17ed","0x0000000000000000000000000000008afa7fa0842467bded20491950c3c1cde0","0x00000000000000000000000000000000000194ab5c71154605b8483cb40d00b8","0x00000000000000000000000000000066709af193591e93e8be3b833f63cb8597","0x000000000000000000000000000000000008ab9091bb9225b00ca0c011dff12f"] +proof = ["0x0000000000000000000000000000000000000000000000000000000000000020","0x0000000000000000000000000000000000000000000000000000000000000011","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000042ab5d6d1986846cf","0x00000000000000000000000000000000000000000000000b75c020998797da78","0x0000000000000000000000000000000000000000000000005a107acb64952eca","0x000000000000000000000000000000000000000000000000000031e97a575e9d","0x00000000000000000000000000000000000000000000000b5666547acf8bd5a4","0x00000000000000000000000000000000000000000000000c410db10a01750aeb","0x00000000000000000000000000000000000000000000000d722669117f9758a4","0x000000000000000000000000000000000000000000000000000178cbf4206471","0x000000000000000000000000000000000000000000000000e91b8a11e7842c38","0x000000000000000000000000000000000000000000000007fd51009034b3357f","0x000000000000000000000000000000000000000000000009889939f81e9c7402","0x0000000000000000000000000000000000000000000000000000f94656a2ca48","0x000000000000000000000000000000000000000000000006fb128b46c1ddb67f","0x0000000000000000000000000000000000000000000000093fe27776f50224bd","0x000000000000000000000000000000000000000000000004a0c80c0da527a081","0x0000000000000000000000000000000000000000000000000001b52c2020d746","0x0000000000000000000000000000005a9bae947e1e91af9e4033d8d6aa6ed632","0x000000000000000000000000000000000025e485e013446d4ac7981c88ba6ecc","0x000000000000000000000000000000ff1e0496e30ab24a63b32b2d1120b76e62","0x00000000000000000000000000000000001afe0a8a685d7cd85d1010e55d9d7c","0x000000000000000000000000000000b0804efd6573805f991458295f510a2004","0x00000000000000000000000000000000000c81a178016e2fe18605022d5a8b0e","0x000000000000000000000000000000eba51e76eb1cfff60a53a0092a3c3dea47","0x000000000000000000000000000000000022e7466247b533282f5936ac4e6c15","0x00000000000000000000000000000071b1d76edf770edff98f00ff4deec264cd","0x00000000000000000000000000000000001e48128e68794d8861fcbb2986a383","0x000000000000000000000000000000d3a2af4915ae6d86b097adc377fafda2d4","0x000000000000000000000000000000000006359de9ca452dab3a4f1f8d9c9d98","0x0000000000000000000000000000000d9d719a8b9f020ad3642d60fe704e696f","0x00000000000000000000000000000000000ddfdbbdefc4ac1580ed38e12cfa49","0x0000000000000000000000000000008289fe9754ce48cd01b7be96a861b5e157","0x00000000000000000000000000000000000ff3e0896bdea021253b3d360fa678","0x0000000000000000000000000000000d9d719a8b9f020ad3642d60fe704e696f","0x00000000000000000000000000000000000ddfdbbdefc4ac1580ed38e12cfa49","0x0000000000000000000000000000008289fe9754ce48cd01b7be96a861b5e157","0x00000000000000000000000000000000000ff3e0896bdea021253b3d360fa678","0x000000000000000000000000000000f968b227a358a305607f3efc933823d288","0x00000000000000000000000000000000000eaf8adb390375a76d95e918b65e08","0x000000000000000000000000000000bb34b4b447aae56f5e24f81c3acd6d547f","0x00000000000000000000000000000000002175d012746260ebcfe339a91a81e1","0x0000000000000000000000000000005b739ed2075f2b046062b8fc6a2d1e9863","0x00000000000000000000000000000000001285cd1030d338c0e1603b4da2c838","0x00000000000000000000000000000027447d6c281eb38b2b937af4a516d60c04","0x000000000000000000000000000000000019bc3d980465fbb4a656a74296fc58","0x000000000000000000000000000000b484788ace8f7df86dd5e325d2e9b12599","0x00000000000000000000000000000000000a2ca0d10eb7b767114ae230b728d3","0x000000000000000000000000000000c6dfc7092f16f95795e437664498b88d53","0x0000000000000000000000000000000000131067b4e4d95a4f6f8cf5c9b5450a","0x0f413f22eec51f2a02800e0cafaeec1d92d744fbbaef213c687b9edabd6985f5","0x21230f4ff26c80ffb5d037a9d1d26c3f955ca34cbeca4f54db6656b932967a0c","0x0521f877fe35535767f99597cc50effbd283dcae6812ee0a7620d796ccbfd642","0x202b01350a9cc5c20ec0f3eaada338c0a3b793811bd539418ffa3cc4302615e2","0x2d1214d9b0d41058ad4a172d9c0aecc5bdabe95e687c3465050c6b5396509be4","0x1113b344a151b0af091cb28d728b752ebb4865da6cd7ee68471b961ca5cf69b9","0x2aa66d0954bb83e17bd5c9928d3aa7a7df75d741d409f7c15ba596804ba643fb","0x2e26bc7a530771ef7a95d5360d537e41cf94d8a0942764ff09881c107f91a106","0x0f14f32b921bb63ad1df00adab7c82af58ea8aa7f353f14b281208d8c5fab504","0x13429515c0c53b6502bbcdf545defb3cb69a986c9263e070fcbb397391aae1a3","0x1f21cac5e2f262afc1006a21454cc6bcb018c44e53ad8ab61cebbac99e539176","0x2a9886a6ddc8a61b097c668cd362fc8acdee8dde74f7b1af192c3e060bb2948f","0x2d718181e408ead2e9bcd30a84ad1fccbaf8d48ab6d1820bad4933d284b503c4","0x2634c1aafc902f14508f34d3d7e9d485f42d1a4c95b5a1ef73711ed0d3c68d77","0x092ede9777e6472ce5ffd8c963d466006189e960e2c591d338dc8d4af1a057fb","0x1cba45b17fd24f1cb1b4ab7b83eee741f6c77ba70a497dc4de259eceb7d5ea26","0x246e887c7bf2e17f919b2393b6e9b00b33e8822d862544a775aac05cb7bff710","0x04c3f539fe8689971948afcb437f1ecbd444a5bddaca1c8a450348dcd8480047","0x20c6a423ae4fd58e8951aa378d02d77baf90508ceb48856db2319d70938b186e","0x1bcf8786b554b3316d8ebdbc9d006a4e5d4865aad512ffd404b7f83550d3d030","0x09ab038260518f0970564afcd6bf22e2abf6b1fa5e12a327bbf195b6ca5edd78","0x1024e32554746f89c195286ba6ccfc9765e5d14bbe8064bc6fdf22d16ec6b495","0x17706656f8dbd7e47bb257a6428f0cb7278ea02fa9e6ce431d7bcc9133fba9c7","0x25a3e8a33c15ef2a4dd16313a6049bf1d468b4cdc141f238f2d51a1e8e1c22b3","0x1198863f08006edb27aee23164fb117a4ddec1bf1ed89807aa907e5cd24bf068","0x1862b4856b5b4d4a064f873e221703e4e2cd1ebfca1337dedca56485c38ed5a0","0x062214af1ea6dd6bf8895b92d394571c43970b6f967e1c794624d96071b25ad3","0x1e5be9428ddcf1f9b0cbafc28101e792ec5cf73852b0cd0b84fbff71b4490e09","0x2d4189bea5b1e30f63c64bd26df82f18bcaf885ec8887b54634b2557869ce87f","0x0f2e5d9a908850e9d44925e17d8b12d1adb1ed029799c9b5858598504242bbc0","0x3050dc85746a57931d99f3f35e77c2ba561fba0baa018b79ff1fd544026833ae","0x2a591a32437e5e0b875a137fd868bd1b6dbc003ff1b661f26e00627cc7c5cf47","0x27946841e1670ad9c65717016d0cedf524724217236e81b9fd0a264a36ebfb0e","0x0fc396e9d19d6e68e289602e292ee345542d0d28bf6de34fa62cc577cbdfb1df","0x08e7433a07a44c0c9c4dd4b273a2685bbd1a91fd5cf2b43409458fab42a23e1b","0x12bd9bfb029c3503a5c6deea87b0a0f11bb9f7ea584af2d48f3e48d7e09247ae","0x2ccc4810748c0a82dfc0f063d0b8c7999ffe9474653080e6ef92b3cb7a428784","0x08eb574d7fecadadb508c8bd35fdad06b99110609d679763c2e3645229b1b95a","0x0f1a65e747c8021ed7c454a4be1e89b1bce66ead9ed980fa98a7a050eafe98a1","0x1c8ff9e36684ec71614dee4c17859b06c742089f6029d3694a16e00dac9b57f1","0x0303101a8ba712aeca4da85b767ab8d3ecf489ec7d746f8ee20041717cc000e9","0x0aaf64c65e7088e5596108c9601467911fea809ca6540d79af77e6e66e36cd99","0x17caf164ce74ea7edfb1390e07763d2197797ec26661b92cde18a98d61d2fddc","0x18cb055c7ad6d01437725bb457681d81f3ecadc4f35d838a3c13daf25a44456a","0x2d78602b8bbcd32b36a99a6e2d248e7fe044ef1b50813133370412f9ef5299f0","0x2b139276ea86d426a115479e4154f72a6bd83a6253bf13e9670dc6b4664378f0","0x127c7837b384902c39a104036c09546728571c46c8166b1b9b13b3a615ebb781","0x05faa4816f83cf0189a482ad943c94b9ec6474002f2b327f8698763ad0ea0985","0x2f90359cc30ee693fb3aced96523cf7aebd152c22329eee56a398d9a4ac0628e","0x0a71beaf17a59c5a238f04c1f203848d87502c5057a78c13f0cfb0f9876e7714","0x2696c1e6d089556adaeb95c8a5e3065b00a393a38c2d69e9bd6ce8cdc49d87da","0x1f3d165a7dc6564a036e451eb9cb7f1e1cb1e6d29daa75e3f135ea3e58a79ccd","0x1473a660819bdd838d56122b72b32b267211e9f1103239480ec50fa85c9e1035","0x0a8ccaeb22451f391b3fc3467c8e6e900270a7afb7b510e8acf5a4f06f1c0888","0x03b3080afc0658cc87e307758cebc171921f43eca159b9dedf7f72aa8dd926bd","0x2dd7d6663fa0e1755dfafac352c361fcd64c7f4d53627e3646870ac169cc4a07","0x1ec54b883f5f35ccad0e75695af20790d9860104095bab34c9bf01628dd40cb9","0x193dff50f83c241f7a9e087a29ce72ecf3f6d8563593f786dcd04c32bcfd4ced","0x135122c0dae26cda8ca1c09de8225064ad86d10423ab0aaa53b481aa4626e1d6","0x08d5a56cbfab5aeed56d3cdd7fb6b30fc26b0c1a5b63fccd7fa44c53ba6fd35a","0x0d12f126dfa2daad3726d00ca339284cc22e36c6d81bb7a4b95c6f9598b60e7c","0x2e8b24bbdf2fd839d3c7cae1f0eeb96bfcfaeef30b27476f2fafcb17da78cd5e","0x2364acfe0cea39b7f749c5f303b99504977357925f810f684c60f35d16315211","0x06ca062eb70b8c51cfac35345e7b6b51f33a8ec9ebe204fb9b4911200bf508b7","0x266c0aa1ccb97186815bf69084f600d06ddd934e59a38dfe602ee5d6b9487f22","0x1d817537a49c6d0e3b4b65c6665334b91d7593142e60065048be9e55ceb5e7ab","0x05e9b7256a368df053c691952b59e9327a7c12ed322bbd6f72c669b9b9c26d49","0x05e9b7256a368df053c691952b59e9327a7c12ed322bbd6f72c669b9b9c26d49","0x25b77026673a1e613e50df0e88fb510973739d5f9064bd364079a9f884209632","0x25c9bc7a3f6aae3d43ff68b5614b34b5eaceff37157b37347995d231784ac1fd","0x085f69baef22680ae15f4801ef4361ebe9c7fc24a94b5bc2527dce8fb705439e","0x0d7c6b9ce31bfc32238a205455baf5ffe99cd30eb0f7bb5b504e1d4501e01382","0x1001a8cc4bc1221c814fba0eddcf3c40619b133373640c600de5bed0a0a05b10","0x20f5894be90e52977cb70f4f4cbd5101693db0360848939750db7e91109d54b6","0x22c09cb26db43f0599408b4daed0f4f496c66424e6affa41c14387d8e0af851b","0x24e5f41357798432426a9549d71e8cc681eaebacbe87f6e3bf38e85de5aa2f3d","0x06eb90100c736fbf2b87432d7821ecdc0b365024739bc36363d48b905973f5b9","0x0000000000000000000000000000007f36e0b4f59927ebbb2302e76cbe8bd44e","0x00000000000000000000000000000000001b95777c6c98640c80638c195909ca","0x0000000000000000000000000000006d4b1ad71244248cb2070fbbbb0ac9df88","0x00000000000000000000000000000000001abada4d5d816a67b6fc75746cb723","0x000000000000000000000000000000465811089df032ceb5269254547a101e57","0x000000000000000000000000000000000011a4a909c59776a6df9c7615e8e87d","0x000000000000000000000000000000311f6f724e7199351c9774225f15c25f20","0x00000000000000000000000000000000001ddba8eb0ab208ad3d96c70941fcbc","0x0000000000000000000000000000000dfa80bdf5be151b21ad89466b7201b63d","0x000000000000000000000000000000000015ca7dc258adab8ea406d94e00c56d","0x000000000000000000000000000000507ea3454165f92295b6e435c7d30d14f0","0x00000000000000000000000000000000002f522608db7b7d389d1df67eab104d","0x000000000000000000000000000000950102cce743fadb23965fc72e31efd36c","0x000000000000000000000000000000000018b4a7ec90df68dfe97d3c5367d1bf","0x000000000000000000000000000000118d90258b25dba8bc0f99d9f7547c6a62","0x000000000000000000000000000000000012d78638701da6322abbf325693b0f","0x000000000000000000000000000000144743e0d082f35295b51561af65f94c6b","0x00000000000000000000000000000000002322a615615e5405836374bb3c5336","0x000000000000000000000000000000e6f08dd5904ee42f826cde680919b41a96","0x00000000000000000000000000000000002d3f823ea255b68465e4b5360bf864","0x00000000000000000000000000000076d4db93683b6363ae92a5a20d8bb9922e","0x00000000000000000000000000000000002f8a7009cac72c9599b81cb9054308","0x00000000000000000000000000000085c12dd2be9f2b29e54c1a4bc3cbf9b6ce","0x000000000000000000000000000000000024e3688a1f4f50b0c6bd6c068f32b2","0x00000000000000000000000000000023a2015e7ea351e444c9405adfbd81e84d","0x00000000000000000000000000000000001fb3e4228c15dc4380db796925ec49","0x000000000000000000000000000000834ad9406b8ded7208b872373be7445e47","0x0000000000000000000000000000000000267544d6a9f5cc46d10555f2617c65"] public_inputs = ["0x0000000000000000000000000000000000000000000000000000000000000003"] verification_key = ["0x0000000000000000000000000000000000000000000000000000000000000020","0x0000000000000000000000000000000000000000000000000000000000000011","0x0000000000000000000000000000000000000000000000000000000000000001","0x00000000000000000000000000000060e430ad1c23bfcf3514323aae3f206e84","0x00000000000000000000000000000000001b5c3ff4c2458d8f481b1c068f27ae","0x000000000000000000000000000000bb510ab2112def34980e4fc6998ad9dd16","0x00000000000000000000000000000000000576e7c105b43e061e13cb877fefe1","0x000000000000000000000000000000ced074785d11857b065d8199e6669a601c","0x00000000000000000000000000000000000053b48a4098c1c0ae268f273952f7","0x000000000000000000000000000000d1d4b26e941db8168cee8f6de548ae0fd8","0x00000000000000000000000000000000001a9adf5a6dadc3d948bb61dfd63f4c","0x0000000000000000000000000000009ce1faac6f8de6ebb18f1db17372c82ad5","0x00000000000000000000000000000000002002681bb417184b2df070a16a3858","0x000000000000000000000000000000161baa651a8092e0e84725594de5aba511","0x00000000000000000000000000000000000be0064399c2a1efff9eb0cdcb2223","0x0000000000000000000000000000008673be6fd1bdbe980a29d8c1ded54381e7","0x000000000000000000000000000000000008a5158a7d9648cf1d234524c9fa0c","0x0000000000000000000000000000002b4fce6e4b1c72062b296d49bca2aa4130","0x00000000000000000000000000000000002e45a9eff4b6769e55fb710cded44f","0x00000000000000000000000000000072b85bf733758b76bcf97333efb85a23e3","0x000000000000000000000000000000000017da0ea508994fc82862715e4b5592","0x00000000000000000000000000000094fa74695cf058dba8ff35aec95456c6c3","0x0000000000000000000000000000000000211acddb851061c24b8f159e832bd1","0x000000000000000000000000000000303b5e5c531384b9a792e11702ad3bcab0","0x00000000000000000000000000000000000d336dff51a60b8833d5d7f6d4314c","0x0000000000000000000000000000009f825dde88092070747180d581c342444a","0x0000000000000000000000000000000000237fbd6511a03cca8cac01b555fe01","0x0000000000000000000000000000007c313205159495df6d8de292079a4844ff","0x000000000000000000000000000000000018facdfc468530dd45e8f7a1d38ce9","0x0000000000000000000000000000000d1ce33446fc3dc4ab40ca38d92dac74e1","0x00000000000000000000000000000000000852d8e3e0e8f4435af3e94222688b","0x0000000000000000000000000000006c04ee19ec1dfec87ed47d6d04aa158de2","0x000000000000000000000000000000000013240f97a584b45184c8ec31319b5f","0x000000000000000000000000000000cefb5d240b07ceb4be26ea429b6dc9d9e0","0x00000000000000000000000000000000002dad22022121d689f57fb38ca21349","0x000000000000000000000000000000c9f189f2a91aeb664ce376d8b157ba98f8","0x00000000000000000000000000000000002531a51ad54f124d58094b219818d2","0x000000000000000000000000000000ef1e6db71809307f677677e62b4163f556","0x0000000000000000000000000000000000272da4396fb2a7ee0638b9140e523d","0x0000000000000000000000000000002e54c0244a7732c87bc4712a76dd8c83fb","0x000000000000000000000000000000000007db77b3e04b7eba9643da57cbbe4d","0x000000000000000000000000000000e0dfe1ddd7f74ae0d636c910c3e85830d8","0x00000000000000000000000000000000000466fa9b57ec4664abd1505b490862","0x0000000000000000000000000000009ee55ae8a32fe5384c79907067cc27192e","0x00000000000000000000000000000000000799d0e465cec07ecb5238c854e830","0x0000000000000000000000000000001d5910ad361e76e1c241247a823733c39f","0x00000000000000000000000000000000002b03f2ccf7507564da2e6678bef8fe","0x000000000000000000000000000000231147211b3c75e1f47d150e4bbd2fb22e","0x00000000000000000000000000000000000d19ee104a10d3c701cfd87473cbbe","0x0000000000000000000000000000006705f3f382637d00f698e2c5c94ed05ae9","0x00000000000000000000000000000000000b9c792da28bb60601dd7ce4b74e68","0x000000000000000000000000000000ac5acc8cc21e4ddb225c510670f80c80b3","0x00000000000000000000000000000000002da9d3fa57343e6998aba19429b9fa","0x0000000000000000000000000000004bacbf54b7c17a560df0af18b6d0d527be","0x00000000000000000000000000000000000faea33aeca2025b22c288964b21eb","0x000000000000000000000000000000492e756298d68d6e95de096055cc0336c3","0x00000000000000000000000000000000001a12a12f004859e5a3675c7315121b","0x000000000000000000000000000000893d521d512f30e6d32afbbc0cecd8ee00","0x00000000000000000000000000000000001674b3c1ef12c6da690631e0d86c04","0x000000000000000000000000000000aa6cb02a52e7a613873d4ac9b411349945","0x00000000000000000000000000000000001ecb1fe9c493add46751f9940f73e1","0x00000000000000000000000000000045b3d362ca82cba69fb2b9c733a5b8c351","0x000000000000000000000000000000000019a683586af466e331945b732d2f8c","0x000000000000000000000000000000fc79b052dfdfe67c0ecfc06b4267ffd694","0x00000000000000000000000000000000001336a70c396393038d5e9913744ac2","0x0000000000000000000000000000005450d29af1e9438e91cd33ddeb2548226e","0x000000000000000000000000000000000000993a602891cfd0e6f6ecf7404933","0x000000000000000000000000000000498efddab90a32e9b2db729ed6e9b40192","0x00000000000000000000000000000000002425efebe9628c63ca6fc28bdb5901","0x000000000000000000000000000000d8488157f875a21ab5f93f1c2b641f3de9","0x0000000000000000000000000000000000290f95ada3936604dc4b14df7504e3","0x0000000000000000000000000000005d6902187f3ed60dcce06fca211b40329a","0x00000000000000000000000000000000002b5870a6ba0b20aaa0178e5adfbc36","0x000000000000000000000000000000e5c2519171fa0e548fc3c4966ffc1ce570","0x00000000000000000000000000000000001cb8d8f4793b7debbdc429389dbf2d","0x000000000000000000000000000000a3ee22dd60456277b86c32a18982dcb185","0x00000000000000000000000000000000002493c99a3d068b03f8f2b8d28b57ce","0x000000000000000000000000000000f6c3731486320082c20ec71bbdc92196c1","0x00000000000000000000000000000000001ded39c4c8366469843cd63f09ecac","0x000000000000000000000000000000494997477ab161763e46601d95844837ef","0x00000000000000000000000000000000002e0cddbc5712d79b59cb3b41ebbcdd","0x000000000000000000000000000000426db4c64531d350750df62dbbc41a1bd9","0x0000000000000000000000000000000000303126892f664d8d505964d14315ec","0x00000000000000000000000000000076a6b2c6040c0c62bd59acfe3e3e125672","0x000000000000000000000000000000000000874a5ad262eecc6b565e0b085074","0x000000000000000000000000000000ef082fb517183c9c6841c2b8ef2ca1df04","0x0000000000000000000000000000000000127b2a745a1b74968c3edc18982b9b","0x000000000000000000000000000000c9efd4f8c3d56e1eb23d789a8f710d5be6","0x000000000000000000000000000000000015a18748490ff4c2b1871081954e86","0x000000000000000000000000000000a0011ef987dc016ab110eacd554a1d8bbf","0x00000000000000000000000000000000002097c84955059442a95df075833071","0x000000000000000000000000000000d38e9426ad3085b68b00a93c17897c2877","0x00000000000000000000000000000000002aecd48089890ea0798eb952c66824","0x00000000000000000000000000000078d8a9ce405ce559f441f2e71477ff3ddb","0x00000000000000000000000000000000001216bdb2f0d961bb8a7a23331d2150","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x000000000000000000000000000000ee40d90bea71fba7a412dd61fcf34e8ceb","0x0000000000000000000000000000000000140b0936c323fd2471155617b6af56","0x0000000000000000000000000000002b90071823185c5ff8e440fd3d73b6fefc","0x00000000000000000000000000000000002b6c10790a5f6631c87d652e059df4"] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/src/main.nr b/noir/noir-repo/test_programs/execution_success/verify_honk_proof/src/main.nr index d25fd804ce4..c534b07fc77 100644 --- a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/verify_honk_proof/src/main.nr @@ -6,7 +6,7 @@ fn main( // This is the proof without public inputs attached. // // This means: the size of this does not change with the number of public inputs. - proof: [Field; 153], + proof: [Field; 156], public_inputs: pub [Field; 1], // This is currently not public. It is fine given that the vk is a part of the circuit definition. // I believe we want to eventually make it public too though. diff --git a/yarn-project/Earthfile b/yarn-project/Earthfile index 82f52d6e6eb..1f4e2fdc790 100644 --- a/yarn-project/Earthfile +++ b/yarn-project/Earthfile @@ -202,7 +202,7 @@ anvil: FROM ../build-images+build SAVE ARTIFACT /opt/foundry/bin/anvil -end-to-end: +end-to-end-base: FROM ubuntu:noble # add repository for chromium RUN apt-get update && apt-get install -y software-properties-common \ @@ -221,10 +221,15 @@ end-to-end: ENV ACVM_BINARY_PATH=/usr/src/noir/noir-repo/target/release/acvm ENV PROVER_AGENT_CONCURRENCY=8 RUN mkdir -p $BB_WORKING_DIRECTORY $ACVM_WORKING_DIRECTORY + + RUN ln -s /usr/src/yarn-project/.yarn/releases/yarn-3.6.3.cjs /usr/local/bin/yarn + +end-to-end: + FROM +end-to-end-base + COPY +anvil/anvil /opt/foundry/bin/anvil COPY +end-to-end-prod/usr/src /usr/src WORKDIR /usr/src/yarn-project/end-to-end - RUN ln -s /usr/src/yarn-project/.yarn/releases/yarn-3.6.3.cjs /usr/local/bin/yarn ENTRYPOINT ["yarn", "test"] scripts-prod: diff --git a/yarn-project/accounts/package.json b/yarn-project/accounts/package.json index 2b3cde6cec1..90d2d36ab83 100644 --- a/yarn-project/accounts/package.json +++ b/yarn-project/accounts/package.json @@ -45,7 +45,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/archiver/package.json b/yarn-project/archiver/package.json index 40f0a179937..9514bf2b7e4 100644 --- a/yarn-project/archiver/package.json +++ b/yarn-project/archiver/package.json @@ -34,7 +34,15 @@ "workerThreads": true, "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ @@ -57,6 +65,7 @@ "@aztec/kv-store": "workspace:^", "@aztec/l1-artifacts": "workspace:^", "@aztec/protocol-contracts": "workspace:^", + "@aztec/telemetry-client": "workspace:^", "@aztec/types": "workspace:^", "debug": "^4.3.4", "lodash.groupby": "^4.6.0", diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index 9c83e57981d..1040f308fae 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -10,6 +10,7 @@ import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { sleep } from '@aztec/foundation/sleep'; import { AvailabilityOracleAbi, type InboxAbi, RollupAbi } from '@aztec/l1-artifacts'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type MockProxy, mock } from 'jest-mock-extended'; import { @@ -49,6 +50,7 @@ describe('Archiver', () => { registryAddress, archiverStore, 1000, + new NoopTelemetryClient(), ); let latestBlockNum = await archiver.getBlockNumber(); @@ -152,6 +154,7 @@ describe('Archiver', () => { registryAddress, archiverStore, 1000, + new NoopTelemetryClient(), ); let latestBlockNum = await archiver.getBlockNumber(); diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 9face3a26ae..b03ce4d2115 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -29,6 +29,7 @@ import { Fr } from '@aztec/foundation/fields'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; import { ClassRegistererAddress } from '@aztec/protocol-contracts/class-registerer'; +import { type TelemetryClient } from '@aztec/telemetry-client'; import { type ContractClassPublic, type ContractDataSource, @@ -49,6 +50,7 @@ import { retrieveBlockMetadataFromRollup, retrieveL1ToL2Messages, } from './data_retrieval.js'; +import { ArchiverInstrumentation } from './instrumentation.js'; /** * Helper interface to combine all sources this archiver implementation provides. @@ -66,6 +68,9 @@ export class Archiver implements ArchiveSource { */ private runningPromise?: RunningPromise; + /** Capture runtime metrics */ + private instrumentation: ArchiverInstrumentation; + /** * Creates a new instance of the Archiver. * @param publicClient - A client for interacting with the Ethereum node. @@ -84,8 +89,11 @@ export class Archiver implements ArchiveSource { private readonly registryAddress: EthAddress, private readonly store: ArchiverDataStore, private readonly pollingIntervalMs = 10_000, + telemetry: TelemetryClient, private readonly log: DebugLogger = createDebugLogger('aztec:archiver'), - ) {} + ) { + this.instrumentation = new ArchiverInstrumentation(telemetry); + } /** * Creates a new instance of the Archiver and blocks until it syncs from chain. @@ -97,6 +105,7 @@ export class Archiver implements ArchiveSource { public static async createAndSync( config: ArchiverConfig, archiverStore: ArchiverDataStore, + telemetry: TelemetryClient, blockUntilSynced = true, ): Promise { const chain = createEthereumChain(config.rpcUrl, config.apiKey); @@ -114,6 +123,7 @@ export class Archiver implements ArchiveSource { config.l1Contracts.registryAddress, archiverStore, config.archiverPollingIntervalMS, + telemetry, ); await archiver.start(blockUntilSynced); return archiver; @@ -286,6 +296,7 @@ export class Archiver implements ArchiveSource { ); await this.store.addBlocks(retrievedBlocks); + this.instrumentation.processNewBlocks(retrievedBlocks.retrievedData); } /** diff --git a/yarn-project/archiver/src/archiver/instrumentation.ts b/yarn-project/archiver/src/archiver/instrumentation.ts new file mode 100644 index 00000000000..837b00af7f2 --- /dev/null +++ b/yarn-project/archiver/src/archiver/instrumentation.ts @@ -0,0 +1,30 @@ +import { type L2Block } from '@aztec/circuit-types'; +import { type Gauge, type Histogram, Metrics, type TelemetryClient, ValueType } from '@aztec/telemetry-client'; + +export class ArchiverInstrumentation { + private blockHeight: Gauge; + private blockSize: Histogram; + + constructor(telemetry: TelemetryClient) { + const meter = telemetry.getMeter('Archiver'); + this.blockHeight = meter.createGauge(Metrics.ARCHIVER_BLOCK_HEIGHT, { + description: 'The height of the latest block processed by the archiver', + valueType: ValueType.INT, + }); + + this.blockSize = meter.createHistogram(Metrics.ARCHIVER_BLOCK_SIZE, { + description: 'The number of transactions processed per block', + valueType: ValueType.INT, + advice: { + explicitBucketBoundaries: [2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192], + }, + }); + } + + public processNewBlocks(blocks: L2Block[]) { + this.blockHeight.record(Math.max(...blocks.map(b => b.number))); + for (const block of blocks) { + this.blockSize.record(block.body.txEffects.length); + } + } +} diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts index 693b1e9c60c..d22537ae824 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts @@ -187,7 +187,6 @@ export class BlockStore { } if (start < INITIAL_L2_BLOCK_NUM) { - this.#log.verbose(`Clamping start block ${start} to ${INITIAL_L2_BLOCK_NUM}`); start = INITIAL_L2_BLOCK_NUM; } diff --git a/yarn-project/archiver/src/index.ts b/yarn-project/archiver/src/index.ts index fb3f8da310a..cf4549e81a0 100644 --- a/yarn-project/archiver/src/index.ts +++ b/yarn-project/archiver/src/index.ts @@ -1,5 +1,6 @@ import { createDebugLogger } from '@aztec/foundation/log'; import { fileURLToPath } from '@aztec/foundation/url'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { createPublicClient, http } from 'viem'; import { localhost } from 'viem/chains'; @@ -34,6 +35,8 @@ async function main() { l1Contracts.inboxAddress, l1Contracts.registryAddress, archiverStore, + 1000, + new NoopTelemetryClient(), ); const shutdown = async () => { diff --git a/yarn-project/archiver/tsconfig.json b/yarn-project/archiver/tsconfig.json index ea0bb3a5469..dbe9915c010 100644 --- a/yarn-project/archiver/tsconfig.json +++ b/yarn-project/archiver/tsconfig.json @@ -27,6 +27,9 @@ { "path": "../protocol-contracts" }, + { + "path": "../telemetry-client" + }, { "path": "../types" }, diff --git a/yarn-project/aztec-faucet/package.json b/yarn-project/aztec-faucet/package.json index 957b2203b87..567a3afb5dc 100644 --- a/yarn-project/aztec-faucet/package.json +++ b/yarn-project/aztec-faucet/package.json @@ -31,7 +31,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/aztec-node/package.json b/yarn-project/aztec-node/package.json index 6689da13001..6189671d529 100644 --- a/yarn-project/aztec-node/package.json +++ b/yarn-project/aztec-node/package.json @@ -32,7 +32,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ @@ -62,6 +70,7 @@ "@aztec/prover-client": "workspace:^", "@aztec/sequencer-client": "workspace:^", "@aztec/simulator": "workspace:^", + "@aztec/telemetry-client": "workspace:^", "@aztec/types": "workspace:^", "@aztec/world-state": "workspace:^", "koa": "^2.14.2", diff --git a/yarn-project/aztec-node/src/aztec-node/server.test.ts b/yarn-project/aztec-node/src/aztec-node/server.test.ts index 309a9ef3bcc..a1d559bf498 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.test.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.test.ts @@ -1,4 +1,5 @@ import { createEthereumChain } from '@aztec/ethereum'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type AztecNodeConfig, AztecNodeService } from '../index.js'; @@ -10,7 +11,9 @@ describe('aztec node service', () => { chainId: 12345, // not the testnet chain id }; const ethereumChain = createEthereumChain(config.rpcUrl!, config.apiKey); - await expect(() => AztecNodeService.createAndSync(config as AztecNodeConfig)).rejects.toThrow( + await expect(() => + AztecNodeService.createAndSync(config as AztecNodeConfig, new NoopTelemetryClient()), + ).rejects.toThrow( `RPC URL configured for chain id ${ethereumChain.chainInfo.id} but expected id ${config.chainId}`, ); }); diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index da82aa420e7..4e1eb36c75d 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -63,6 +63,8 @@ import { getCanonicalMultiCallEntrypointAddress } from '@aztec/protocol-contract import { TxProver } from '@aztec/prover-client'; import { type GlobalVariableBuilder, SequencerClient, getGlobalVariableBuilder } from '@aztec/sequencer-client'; import { PublicProcessorFactory, WASMSimulator } from '@aztec/simulator'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type ContractClassPublic, type ContractDataSource, @@ -104,6 +106,7 @@ export class AztecNodeService implements AztecNode { protected readonly merkleTreesDb: AztecKVStore, private readonly prover: ProverClient | undefined, private txValidator: TxValidator, + private telemetry: TelemetryClient, private log = createDebugLogger('aztec:node'), ) { this.packageVersion = getPackageInfo().version; @@ -124,9 +127,11 @@ export class AztecNodeService implements AztecNode { */ public static async createAndSync( config: AztecNodeConfig, + telemetry?: TelemetryClient, log = createDebugLogger('aztec:node'), storeLog = createDebugLogger('aztec:node:lmdb'), - ) { + ): Promise { + telemetry ??= new NoopTelemetryClient(); const ethereumChain = createEthereumChain(config.rpcUrl, config.apiKey); //validate that the actual chain id matches that specified in configuration if (config.chainId !== ethereumChain.chainInfo.id) { @@ -145,7 +150,7 @@ export class AztecNodeService implements AztecNode { if (!config.archiverUrl) { // first create and sync the archiver const archiverStore = new KVArchiverDataStore(store, config.maxLogs); - archiver = await Archiver.createAndSync(config, archiverStore, true); + archiver = await Archiver.createAndSync(config, archiverStore, telemetry, true); } else { archiver = createArchiverClient(config.archiverUrl); } @@ -155,7 +160,7 @@ export class AztecNodeService implements AztecNode { config.transactionProtocol = `/aztec/tx/${config.l1Contracts.rollupAddress.toString()}`; // create the tx pool and the p2p client, which will need the l2 block source - const p2pClient = await createP2PClient(store, config, new AztecKVTxPool(store), archiver); + const p2pClient = await createP2PClient(store, config, new AztecKVTxPool(store, telemetry), archiver); // now create the merkle trees and the world state synchronizer const merkleTrees = await MerkleTrees.new(store); @@ -179,6 +184,7 @@ export class AztecNodeService implements AztecNode { config, await proofVerifier.getVerificationKeys(), worldStateSynchronizer, + telemetry, await archiver .getBlock(-1) .then(b => b?.header ?? worldStateSynchronizer.getCommitted().buildInitialHeader()), @@ -200,6 +206,7 @@ export class AztecNodeService implements AztecNode { archiver, prover!, simulationProvider, + telemetry, ); return new AztecNodeService( @@ -218,6 +225,7 @@ export class AztecNodeService implements AztecNode { store, prover, txValidator, + telemetry, log, ); } @@ -756,6 +764,7 @@ export class AztecNodeService implements AztecNode { merkleTrees.asLatest(), this.contractDataSource, new WASMSimulator(), + this.telemetry, ); const processor = await publicProcessorFactory.create(prevHeader, newGlobalVariables); // REFACTOR: Consider merging ProcessReturnValues into ProcessedTx diff --git a/yarn-project/aztec-node/src/bin/index.ts b/yarn-project/aztec-node/src/bin/index.ts index e1688b79198..41aba729aeb 100644 --- a/yarn-project/aztec-node/src/bin/index.ts +++ b/yarn-project/aztec-node/src/bin/index.ts @@ -1,5 +1,6 @@ #!/usr/bin/env -S node --no-warnings import { createDebugLogger } from '@aztec/foundation/log'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import http from 'http'; @@ -15,7 +16,7 @@ const logger = createDebugLogger('aztec:node'); async function createAndDeployAztecNode() { const aztecNodeConfig: AztecNodeConfig = { ...getConfigEnvVars() }; - return await AztecNodeService.createAndSync(aztecNodeConfig); + return await AztecNodeService.createAndSync(aztecNodeConfig, new NoopTelemetryClient()); } /** diff --git a/yarn-project/aztec-node/tsconfig.json b/yarn-project/aztec-node/tsconfig.json index f023c003bff..5a6637a7bac 100644 --- a/yarn-project/aztec-node/tsconfig.json +++ b/yarn-project/aztec-node/tsconfig.json @@ -48,6 +48,9 @@ { "path": "../simulator" }, + { + "path": "../telemetry-client" + }, { "path": "../types" }, diff --git a/yarn-project/aztec.js/package.json b/yarn-project/aztec.js/package.json index 96748862412..367a1216ab2 100644 --- a/yarn-project/aztec.js/package.json +++ b/yarn-project/aztec.js/package.json @@ -49,7 +49,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ @@ -86,7 +94,6 @@ "stream-browserify": "^3.0.0", "ts-loader": "^9.4.4", "ts-node": "^10.9.1", - "tty-browserify": "^0.0.1", "typescript": "^5.0.4", "util": "^0.12.5", "webpack": "^5.88.2", diff --git a/yarn-project/aztec.js/src/account/interface.ts b/yarn-project/aztec.js/src/account/interface.ts index 8919c3aa403..cafef217f9e 100644 --- a/yarn-project/aztec.js/src/account/interface.ts +++ b/yarn-project/aztec.js/src/account/interface.ts @@ -1,44 +1,25 @@ -import { type AuthWitness, type CompleteAddress, type FunctionCall } from '@aztec/circuit-types'; +import { type AuthWitness, type CompleteAddress } from '@aztec/circuit-types'; import { type AztecAddress } from '@aztec/circuits.js'; import { type Fq, type Fr } from '@aztec/foundation/fields'; -import { type ContractFunctionInteraction } from '../contract/contract_function_interaction.js'; import { type EntrypointInterface } from '../entrypoint/entrypoint.js'; // docs:start:account-interface /** Creates authorization witnesses. */ export interface AuthWitnessProvider { /** - * Computes an authentication witness from either a message hash or an intent (caller and an action). - * If a message hash is provided, it will create a witness for that directly. - * Otherwise, it will compute the message hash using the caller and the action of the intent. - * @param messageHashOrIntent - The message hash or the intent (caller and action) to approve - * @param chainId - The chain id for the message, will default to the current chain id - * @param version - The version for the message, will default to the current protocol version + * Computes an authentication witness from either a message hash + * @param messageHash - The message hash to approve * @returns The authentication witness */ - createAuthWit( - messageHashOrIntent: - | Fr - | Buffer - | { - /** The caller to approve */ - caller: AztecAddress; - /** The action to approve */ - action: ContractFunctionInteraction | FunctionCall; - /** The chain id to approve */ - chainId?: Fr; - /** The version to approve */ - version?: Fr; - }, - ): Promise; + createAuthWit(messageHash: Fr | Buffer): Promise; } /** * Handler for interfacing with an account. Knows how to create transaction execution * requests and authorize actions for its corresponding account. */ -export interface AccountInterface extends AuthWitnessProvider, EntrypointInterface { +export interface AccountInterface extends EntrypointInterface, AuthWitnessProvider { /** Returns the complete address for this account. */ getCompleteAddress(): CompleteAddress; diff --git a/yarn-project/aztec.js/src/account/wallet.ts b/yarn-project/aztec.js/src/account/wallet.ts index d9d78aea434..5dc257bca01 100644 --- a/yarn-project/aztec.js/src/account/wallet.ts +++ b/yarn-project/aztec.js/src/account/wallet.ts @@ -1,8 +1,13 @@ -import { type PXE } from '@aztec/circuit-types'; +import { type AuthWitness, type PXE } from '@aztec/circuit-types'; +import { type IntentAction, type IntentInnerHash } from '../utils/authwit.js'; import { type AccountInterface, type AccountKeyRotationInterface } from './interface.js'; /** * The wallet interface. */ -export type Wallet = AccountInterface & PXE & AccountKeyRotationInterface; +export type Wallet = AccountInterface & + PXE & + AccountKeyRotationInterface & { + createAuthWit(intent: IntentInnerHash | IntentAction): Promise; + }; diff --git a/yarn-project/aztec.js/src/fee/private_fee_payment_method.ts b/yarn-project/aztec.js/src/fee/private_fee_payment_method.ts index 10e1b36de4c..93618e26136 100644 --- a/yarn-project/aztec.js/src/fee/private_fee_payment_method.ts +++ b/yarn-project/aztec.js/src/fee/private_fee_payment_method.ts @@ -6,7 +6,6 @@ import { type AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr } from '@aztec/foundation/fields'; import { type Wallet } from '../account/wallet.js'; -import { computeAuthWitMessageHash } from '../utils/authwit.js'; import { type FeePaymentMethod } from './fee_payment_method.js'; /** @@ -55,11 +54,9 @@ export class PrivateFeePaymentMethod implements FeePaymentMethod { async getFunctionCalls(gasSettings: GasSettings): Promise { const nonce = Fr.random(); const maxFee = gasSettings.getFeeLimit(); - const messageHash = computeAuthWitMessageHash( - this.paymentContract, - this.wallet.getChainId(), - this.wallet.getVersion(), - { + await this.wallet.createAuthWit({ + caller: this.paymentContract, + action: { name: 'unshield', args: [this.wallet.getCompleteAddress().address, this.paymentContract, maxFee, nonce], selector: FunctionSelector.fromSignature('unshield((Field),(Field),Field,Field)'), @@ -68,8 +65,7 @@ export class PrivateFeePaymentMethod implements FeePaymentMethod { to: this.asset, returnTypes: [], }, - ); - await this.wallet.createAuthWit(messageHash); + }); const secretHashForRebate = computeSecretHash(this.rebateSecret); diff --git a/yarn-project/aztec.js/src/fee/public_fee_payment_method.ts b/yarn-project/aztec.js/src/fee/public_fee_payment_method.ts index 32e10f31be7..dae20d5fe8a 100644 --- a/yarn-project/aztec.js/src/fee/public_fee_payment_method.ts +++ b/yarn-project/aztec.js/src/fee/public_fee_payment_method.ts @@ -4,7 +4,6 @@ import { FunctionSelector, FunctionType } from '@aztec/foundation/abi'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr } from '@aztec/foundation/fields'; -import { computeAuthWitMessageHash } from '../utils/authwit.js'; import { type AccountWallet } from '../wallet/account_wallet.js'; import { type FeePaymentMethod } from './fee_payment_method.js'; @@ -47,23 +46,25 @@ export class PublicFeePaymentMethod implements FeePaymentMethod { getFunctionCalls(gasSettings: GasSettings): Promise { const nonce = Fr.random(); const maxFee = gasSettings.getFeeLimit(); - const messageHash = computeAuthWitMessageHash( - this.paymentContract, - this.wallet.getChainId(), - this.wallet.getVersion(), - { - name: 'transfer_public', - args: [this.wallet.getAddress(), this.paymentContract, maxFee, nonce], - selector: FunctionSelector.fromSignature('transfer_public((Field),(Field),Field,Field)'), - type: FunctionType.PUBLIC, - isStatic: false, - to: this.asset, - returnTypes: [], - }, - ); return Promise.resolve([ - this.wallet.setPublicAuthWit(messageHash, true).request(), + this.wallet + .setPublicAuthWit( + { + caller: this.paymentContract, + action: { + name: 'transfer_public', + args: [this.wallet.getAddress(), this.paymentContract, maxFee, nonce], + selector: FunctionSelector.fromSignature('transfer_public((Field),(Field),Field,Field)'), + type: FunctionType.PUBLIC, + isStatic: false, + to: this.asset, + returnTypes: [], + }, + }, + true, + ) + .request(), { name: 'fee_entrypoint_public', to: this.paymentContract, diff --git a/yarn-project/aztec.js/src/index.ts b/yarn-project/aztec.js/src/index.ts index 8fabb0e8f91..4e3d71e4d8f 100644 --- a/yarn-project/aztec.js/src/index.ts +++ b/yarn-project/aztec.js/src/index.ts @@ -46,12 +46,13 @@ export { FunctionSelectorLike, WrappedFieldLike, computeAuthWitMessageHash, + computeInnerAuthWitHashFromAction, computeInnerAuthWitHash, - computeOuterAuthWitHash, generatePublicKey, waitForAccountSynch, waitForPXE, } from './utils/index.js'; +export { NoteSelector } from '@aztec/foundation/abi'; export { createPXEClient } from './rpc_clients/index.js'; diff --git a/yarn-project/aztec.js/src/rpc_clients/pxe_client.ts b/yarn-project/aztec.js/src/rpc_clients/pxe_client.ts index c54a9674c4d..2b6871e885e 100644 --- a/yarn-project/aztec.js/src/rpc_clients/pxe_client.ts +++ b/yarn-project/aztec.js/src/rpc_clients/pxe_client.ts @@ -25,6 +25,7 @@ import { GrumpkinScalar, Point, } from '@aztec/circuits.js'; +import { NoteSelector } from '@aztec/foundation/abi'; import { createJsonRpcClient, makeFetch } from '@aztec/foundation/json-rpc/client'; /** @@ -53,6 +54,7 @@ export const createPXEClient = (url: string, fetch = makeFetch([1, 2, 3], false) Point, TxExecutionRequest, TxHash, + NoteSelector, }, { Tx, SimulatedTx, TxReceipt, EncryptedNoteL2BlockL2Logs, UnencryptedL2BlockL2Logs, NullifierMembershipWitness }, false, diff --git a/yarn-project/aztec.js/src/utils/authwit.ts b/yarn-project/aztec.js/src/utils/authwit.ts index 41655da79fb..e3df2b9a527 100644 --- a/yarn-project/aztec.js/src/utils/authwit.ts +++ b/yarn-project/aztec.js/src/utils/authwit.ts @@ -1,32 +1,77 @@ import { type FunctionCall, PackedValues } from '@aztec/circuit-types'; -import { type AztecAddress, type Fr, GeneratorIndex } from '@aztec/circuits.js'; +import { type AztecAddress, Fr, GeneratorIndex } from '@aztec/circuits.js'; import { pedersenHash } from '@aztec/foundation/crypto'; +import { ContractFunctionInteraction } from '../contract/contract_function_interaction.js'; + +/** Metadata for the intent */ +export type IntentMetadata = { + /** The chain id to approve */ + chainId: Fr; + /** The version to approve */ + version: Fr; +}; + +/** Intent with an inner hash */ +export type IntentInnerHash = { + /** The consumer */ + consumer: AztecAddress; + /** The action to approve */ + innerHash: Buffer | Fr; +}; + +/** Intent with an action */ +export type IntentAction = { + /** The caller to approve */ + caller: AztecAddress; + /** The action to approve */ + action: ContractFunctionInteraction | FunctionCall; +}; + // docs:start:authwit_computeAuthWitMessageHash /** - * Compute an authentication witness message hash from a caller and a request - * H(target: AztecAddress, chainId: Field, version: Field, H(caller: AztecAddress, selector: Field, args_hash: Field)) - * Example usage would be `bob` authenticating `alice` to perform a transfer of `10` - * tokens from his account to herself: - * H(token, 1, 1, H(alice, transfer_selector, H(bob, alice, 10, nonce))) - * `bob` then signs the message hash and gives it to `alice` who can then perform the - * action. - * @param caller - The caller approved to make the call - * @param chainId - The chain id for the message - * @param version - The version for the message - * @param action - The request to be made (function call) - * @returns The message hash for the witness + * Compute an authentication witness message hash from an intent and metadata + * + * If using the `IntentInnerHash`, the consumer is the address that can "consume" the authwit, for token approvals it is the token contract itself. + * The `innerHash` itself will be the message that a contract is allowed to execute. + * At the point of "approval checking", the validating contract (account for private and registry for public) will be computing the message hash + * (`H(consumer, chainid, version, inner_hash)`) where the all but the `inner_hash` is injected from the context (consumer = msg_sender), + * and use it for the authentication check. + * Therefore, any allowed `innerHash` will therefore also have information around where it can be spent (version, chainId) and who can spend it (consumer). + * + * If using the `IntentAction`, the caller is the address that is making the call, for a token approval from Alice to Bob, this would be Bob. + * The action is then used along with the `caller` to compute the `innerHash` and the consumer. + * + * + * @param intent - The intent to approve (consumer and innerHash or caller and action) + * The consumer is the address that can "consume" the authwit, for token approvals it is the token contract itself. + * The caller is the address that is making the call, for a token approval from Alice to Bob, this would be Bob. + * The caller becomes part of the `inner_hash` and is dealt with entirely in application logic. + * @param metadata - The metadata for the intent (chainId, version) + * @returns The message hash for the action */ -export const computeAuthWitMessageHash = (caller: AztecAddress, chainId: Fr, version: Fr, action: FunctionCall) => { - return computeOuterAuthWitHash( - action.to.toField(), - chainId, - version, - computeInnerAuthWitHash([caller.toField(), action.selector.toField(), PackedValues.fromValues(action.args).hash]), - ); +export const computeAuthWitMessageHash = (intent: IntentInnerHash | IntentAction, metadata: IntentMetadata) => { + const chainId = metadata.chainId; + const version = metadata.version; + + if ('caller' in intent) { + const action = intent.action instanceof ContractFunctionInteraction ? intent.action.request() : intent.action; + return computeOuterAuthWitHash( + action.to.toField(), + chainId, + version, + computeInnerAuthWitHashFromAction(intent.caller, action), + ); + } else { + const inner = Buffer.isBuffer(intent.innerHash) ? Fr.fromBuffer(intent.innerHash) : intent.innerHash; + return computeOuterAuthWitHash(intent.consumer, chainId, version, inner); + } }; // docs:end:authwit_computeAuthWitMessageHash +export const computeInnerAuthWitHashFromAction = (caller: AztecAddress, action: FunctionCall) => + computeInnerAuthWitHash([caller.toField(), action.selector.toField(), PackedValues.fromValues(action.args).hash]); + /** * Compute the inner hash for an authentication witness. * This is the "intent" of the message, before siloed with the consumer. @@ -53,6 +98,6 @@ export const computeInnerAuthWitHash = (args: Fr[]) => { * @param innerHash - The inner hash for the witness * @returns The outer hash for the witness */ -export const computeOuterAuthWitHash = (consumer: AztecAddress, chainId: Fr, version: Fr, innerHash: Fr) => { +const computeOuterAuthWitHash = (consumer: AztecAddress, chainId: Fr, version: Fr, innerHash: Fr) => { return pedersenHash([consumer.toField(), chainId, version, innerHash], GeneratorIndex.AUTHWIT_OUTER); }; diff --git a/yarn-project/aztec.js/src/wallet/account_wallet.ts b/yarn-project/aztec.js/src/wallet/account_wallet.ts index 4452b15f953..bf1098e4896 100644 --- a/yarn-project/aztec.js/src/wallet/account_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/account_wallet.ts @@ -1,4 +1,4 @@ -import { type AuthWitness, type FunctionCall, type PXE, type TxExecutionRequest } from '@aztec/circuit-types'; +import { type AuthWitness, type PXE, type TxExecutionRequest } from '@aztec/circuit-types'; import { AztecAddress, CANONICAL_KEY_REGISTRY_ADDRESS, Fq, Fr, derivePublicKeyFromSecretKey } from '@aztec/circuits.js'; import { type ABIParameterVisibility, type FunctionAbi, FunctionType } from '@aztec/foundation/abi'; import { AuthRegistryAddress } from '@aztec/protocol-contracts/auth-registry'; @@ -6,7 +6,12 @@ import { AuthRegistryAddress } from '@aztec/protocol-contracts/auth-registry'; import { type AccountInterface } from '../account/interface.js'; import { ContractFunctionInteraction } from '../contract/contract_function_interaction.js'; import { type ExecutionRequestInit } from '../entrypoint/entrypoint.js'; -import { computeAuthWitMessageHash } from '../utils/authwit.js'; +import { + type IntentAction, + type IntentInnerHash, + computeAuthWitMessageHash, + computeInnerAuthWitHashFromAction, +} from '../utils/authwit.js'; import { BaseWallet } from './base_wallet.js'; /** @@ -30,28 +35,25 @@ export class AccountWallet extends BaseWallet { } /** - * Computes an authentication witness from either a message or a caller and an action. - * If a message is provided, it will create a witness for the message directly. - * Otherwise, it will compute the message using the caller and the action. - * @param messageHashOrIntent - The message or the caller and action to approve + * Computes an authentication witness from either a message hash or an intent. + * + * If a message hash is provided, it will create a witness for the hash directly. + * Otherwise, it will compute the message hash using the intent, along with the + * chain id and the version values provided by the wallet. + * + * @param messageHashOrIntent - The message hash of the intent to approve * @returns The authentication witness */ - async createAuthWit( - messageHashOrIntent: - | Fr - | Buffer - | { - /** The caller to approve */ - caller: AztecAddress; - /** The action to approve */ - action: ContractFunctionInteraction | FunctionCall; - /** The chain id to approve */ - chainId?: Fr; - /** The version to approve */ - version?: Fr; - }, - ): Promise { - const messageHash = this.getMessageHash(messageHashOrIntent); + async createAuthWit(messageHashOrIntent: Fr | Buffer | IntentAction | IntentInnerHash): Promise { + let messageHash: Fr; + if (Buffer.isBuffer(messageHashOrIntent)) { + messageHash = Fr.fromBuffer(messageHashOrIntent); + } else if (messageHashOrIntent instanceof Fr) { + messageHash = messageHashOrIntent; + } else { + messageHash = this.getMessageHash(messageHashOrIntent); + } + const witness = await this.account.createAuthWit(messageHash); await this.pxe.addAuthWitness(witness); return witness; @@ -59,129 +61,92 @@ export class AccountWallet extends BaseWallet { /** * Returns a function interaction to set a message hash as authorized or revoked in this account. + * * Public calls can then consume this authorization. - * @param messageHashOrIntent - The message or the caller and action to authorize/revoke + * + * @param messageHashOrIntent - The message hash or intent to authorize/revoke * @param authorized - True to authorize, false to revoke authorization. * @returns - A function interaction. */ public setPublicAuthWit( - messageHashOrIntent: - | Fr - | Buffer - | { - /** The caller to approve */ - caller: AztecAddress; - /** The action to approve */ - action: ContractFunctionInteraction | FunctionCall; - /** The chain id to approve */ - chainId?: Fr; - /** The version to approve */ - version?: Fr; - }, + messageHashOrIntent: Fr | Buffer | IntentInnerHash | IntentAction, authorized: boolean, ): ContractFunctionInteraction { - const message = this.getMessageHash(messageHashOrIntent); + let messageHash: Fr; + if (Buffer.isBuffer(messageHashOrIntent)) { + messageHash = Fr.fromBuffer(messageHashOrIntent); + } else if (messageHashOrIntent instanceof Fr) { + messageHash = messageHashOrIntent; + } else { + messageHash = this.getMessageHash(messageHashOrIntent); + } + return new ContractFunctionInteraction(this, AuthRegistryAddress, this.getSetAuthorizedAbi(), [ - message, + messageHash, authorized, ]); } - /** - * Returns a function interaction to cancel a message hash as authorized or revoked. - * @param messageHashOrIntent - The message or the caller and action to revoke - * @returns - A function interaction. - */ - public cancelPublicAuthWit( - messageHashOrIntent: - | Fr - | Buffer - | { - /** The caller to approve */ - caller: AztecAddress; - /** The action to approve */ - action: ContractFunctionInteraction | FunctionCall; - /** The chain id to approve */ - chainId?: Fr; - /** The version to approve */ - version?: Fr; - }, - ): ContractFunctionInteraction { - return this.setPublicAuthWit(messageHashOrIntent, false); + private getInnerHashAndConsumer(intent: IntentInnerHash | IntentAction): { + /** The inner hash */ + innerHash: Fr; + /** The consumer of the authwit */ + consumer: AztecAddress; + } { + if ('caller' in intent && 'action' in intent) { + const action = intent.action instanceof ContractFunctionInteraction ? intent.action.request() : intent.action; + return { + innerHash: computeInnerAuthWitHashFromAction(intent.caller, action), + consumer: action.to, + }; + } else if (Buffer.isBuffer(intent.innerHash)) { + return { innerHash: Fr.fromBuffer(intent.innerHash), consumer: intent.consumer }; + } + return { innerHash: intent.innerHash, consumer: intent.consumer }; } /** - * Returns the message hash for the given message or authwit input. - * @param messageHashOrIntent - The message hash or the caller and action to authorize + * Returns the message hash for the given intent + * + * @param intent - A tuple of (consumer and inner hash) or (caller and action) * @returns The message hash */ - private getMessageHash( - messageHashOrIntent: - | Fr - | Buffer - | { - /** The caller to approve */ - caller: AztecAddress; - /** The action to approve */ - action: ContractFunctionInteraction | FunctionCall; - /** The chain id to approve */ - chainId?: Fr; - /** The version to approve */ - version?: Fr; - }, - ): Fr { - if (Buffer.isBuffer(messageHashOrIntent)) { - return Fr.fromBuffer(messageHashOrIntent); - } else if (messageHashOrIntent instanceof Fr) { - return messageHashOrIntent; - } else { - return computeAuthWitMessageHash( - messageHashOrIntent.caller, - messageHashOrIntent.chainId || this.getChainId(), - messageHashOrIntent.version || this.getVersion(), - messageHashOrIntent.action instanceof ContractFunctionInteraction - ? messageHashOrIntent.action.request() - : messageHashOrIntent.action, - ); - } + private getMessageHash(intent: IntentInnerHash | IntentAction): Fr { + const chainId = this.getChainId(); + const version = this.getVersion(); + return computeAuthWitMessageHash(intent, { chainId, version }); } /** * Lookup the validity of an authwit in private and public contexts. - * If the authwit have been consumed already (nullifier spent), will return false in both contexts. - * @param target - The target contract address - * @param messageHashOrIntent - The message hash or the caller and action to authorize/revoke + * + * Uses the chain id and version of the wallet. + * + * @param onBehalfOf - The address of the "approver" + * @param intent - The consumer and inner hash or the caller and action to lookup + * * @returns - A struct containing the validity of the authwit in private and public contexts. */ async lookupValidity( - target: AztecAddress, - messageHashOrIntent: - | Fr - | Buffer - | { - /** The caller to approve */ - caller: AztecAddress; - /** The action to approve */ - action: ContractFunctionInteraction | FunctionCall; - /** The chain id to approve */ - chainId?: Fr; - /** The version to approve */ - version?: Fr; - }, + onBehalfOf: AztecAddress, + intent: IntentInnerHash | IntentAction, ): Promise<{ /** boolean flag indicating if the authwit is valid in private context */ isValidInPrivate: boolean; /** boolean flag indicating if the authwit is valid in public context */ isValidInPublic: boolean; }> { - const messageHash = this.getMessageHash(messageHashOrIntent); + const { innerHash, consumer } = this.getInnerHashAndConsumer(intent); + + const messageHash = this.getMessageHash(intent); const results = { isValidInPrivate: false, isValidInPublic: false }; // Check private const witness = await this.getAuthWitness(messageHash); if (witness !== undefined) { - results.isValidInPrivate = (await new ContractFunctionInteraction(this, target, this.getLookupValidityAbi(), [ - messageHash, + results.isValidInPrivate = (await new ContractFunctionInteraction(this, onBehalfOf, this.getLookupValidityAbi(), [ + consumer, + innerHash, ]).simulate()) as boolean; } @@ -190,7 +155,7 @@ export class AccountWallet extends BaseWallet { this, AuthRegistryAddress, this.getIsConsumableAbi(), - [target, messageHash], + [onBehalfOf, messageHash], ).simulate()) as boolean; return results; @@ -220,31 +185,6 @@ export class AccountWallet extends BaseWallet { await interaction.send().wait(); } - /** - * Returns a function interaction to cancel a message hash as authorized in this account. - * @param messageHashOrIntent - The message or the caller and action to authorize/revoke - * @returns - A function interaction. - */ - public cancelAuthWit( - messageHashOrIntent: - | Fr - | Buffer - | { - /** The caller to approve */ - caller: AztecAddress; - /** The action to approve */ - action: ContractFunctionInteraction | FunctionCall; - /** The chain id to approve */ - chainId?: Fr; - /** The version to approve */ - version?: Fr; - }, - ): ContractFunctionInteraction { - const message = this.getMessageHash(messageHashOrIntent); - const args = [message]; - return new ContractFunctionInteraction(this, this.getAddress(), this.getCancelAuthwitAbi(), args); - } - /** Returns the complete address of the account that implements this wallet. */ public getCompleteAddress() { return this.account.getCompleteAddress(); @@ -278,24 +218,6 @@ export class AccountWallet extends BaseWallet { }; } - private getCancelAuthwitAbi(): FunctionAbi { - return { - name: 'cancel_authwit', - isInitializer: false, - functionType: FunctionType.PRIVATE, - isInternal: true, - isStatic: false, - parameters: [ - { - name: 'message_hash', - type: { kind: 'field' }, - visibility: 'private' as ABIParameterVisibility, - }, - ], - returnTypes: [], - }; - } - private getLookupValidityAbi(): FunctionAbi { return { name: 'lookup_validity', diff --git a/yarn-project/aztec.js/src/wallet/base_wallet.ts b/yarn-project/aztec.js/src/wallet/base_wallet.ts index 974895fa96e..247b509fbaf 100644 --- a/yarn-project/aztec.js/src/wallet/base_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/base_wallet.ts @@ -2,7 +2,6 @@ import { type AuthWitness, type EventMetadata, type ExtendedNote, - type FunctionCall, type GetUnencryptedLogsResponse, type IncomingNotesFilter, type L2Block, @@ -32,8 +31,8 @@ import { type ContractClassWithId, type ContractInstanceWithAddress } from '@azt import { type NodeInfo } from '@aztec/types/interfaces'; import { type Wallet } from '../account/wallet.js'; -import { type ContractFunctionInteraction } from '../contract/contract_function_interaction.js'; import { type ExecutionRequestInit } from '../entrypoint/entrypoint.js'; +import { type IntentAction, type IntentInnerHash } from '../utils/authwit.js'; /** * A base class for Wallet implementations @@ -49,21 +48,7 @@ export abstract class BaseWallet implements Wallet { abstract createTxExecutionRequest(exec: ExecutionRequestInit): Promise; - abstract createAuthWit( - messageHashOrIntent: - | Fr - | Buffer - | { - /** The caller to approve */ - caller: AztecAddress; - /** The action to approve */ - action: ContractFunctionInteraction | FunctionCall; - /** The chain id to approve */ - chainId?: Fr; - /** The version to approve */ - version?: Fr; - }, - ): Promise; + abstract createAuthWit(intent: Fr | Buffer | IntentInnerHash | IntentAction): Promise; abstract rotateNullifierKeys(newNskM: Fq): Promise; diff --git a/yarn-project/aztec.js/src/wallet/signerless_wallet.ts b/yarn-project/aztec.js/src/wallet/signerless_wallet.ts index bba8c3ec66e..f69c78d5f33 100644 --- a/yarn-project/aztec.js/src/wallet/signerless_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/signerless_wallet.ts @@ -3,6 +3,7 @@ import { type CompleteAddress, type Fq, type Fr } from '@aztec/circuits.js'; import { DefaultEntrypoint } from '../entrypoint/default_entrypoint.js'; import { type EntrypointInterface, type ExecutionRequestInit } from '../entrypoint/entrypoint.js'; +import { type IntentAction, type IntentInnerHash } from '../utils/authwit.js'; import { BaseWallet } from './base_wallet.js'; /** @@ -12,7 +13,6 @@ export class SignerlessWallet extends BaseWallet { constructor(pxe: PXE, private entrypoint?: EntrypointInterface) { super(pxe); } - async createTxExecutionRequest(execution: ExecutionRequestInit): Promise { let entrypoint = this.entrypoint; if (!entrypoint) { @@ -39,7 +39,7 @@ export class SignerlessWallet extends BaseWallet { throw new Error('SignerlessWallet: Method getCompleteAddress not implemented.'); } - createAuthWit(_messageHash: Fr): Promise { + createAuthWit(_intent: Fr | Buffer | IntentInnerHash | IntentAction): Promise { throw new Error('SignerlessWallet: Method createAuthWit not implemented.'); } diff --git a/yarn-project/aztec.js/webpack.config.js b/yarn-project/aztec.js/webpack.config.js index 5e56741b688..1b60153b54b 100644 --- a/yarn-project/aztec.js/webpack.config.js +++ b/yarn-project/aztec.js/webpack.config.js @@ -66,7 +66,6 @@ export default { buffer: require.resolve('buffer/'), util: require.resolve('util/'), stream: require.resolve('stream-browserify'), - tty: require.resolve('tty-browserify'), }, }, }; diff --git a/yarn-project/aztec/package.json b/yarn-project/aztec/package.json index 6caffb8c0ce..989cf37a039 100644 --- a/yarn-project/aztec/package.json +++ b/yarn-project/aztec/package.json @@ -46,6 +46,7 @@ "@aztec/protocol-contracts": "workspace:^", "@aztec/prover-client": "workspace:^", "@aztec/pxe": "workspace:^", + "@aztec/telemetry-client": "workspace:^", "abitype": "^0.8.11", "commander": "^11.1.0", "koa": "^2.14.2", @@ -76,7 +77,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/aztec/src/cli/cmds/start_archiver.ts b/yarn-project/aztec/src/cli/cmds/start_archiver.ts index 567f2b160b0..ad2f296d958 100644 --- a/yarn-project/aztec/src/cli/cmds/start_archiver.ts +++ b/yarn-project/aztec/src/cli/cmds/start_archiver.ts @@ -9,6 +9,10 @@ import { createDebugLogger } from '@aztec/aztec.js'; import { type ServerList } from '@aztec/foundation/json-rpc/server'; import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; import { initStoreForRollup } from '@aztec/kv-store/utils'; +import { + createAndStartTelemetryClient, + getConfigEnvVars as getTelemetryClientConfig, +} from '@aztec/telemetry-client/start'; import { mergeEnvVarsAndCliOptions, parseModuleOptions } from '../util.js'; @@ -30,7 +34,8 @@ export const startArchiver = async (options: any, signalHandlers: (() => Promise ); const archiverStore = new KVArchiverDataStore(store, archiverConfig.maxLogs); - const archiver = await Archiver.createAndSync(archiverConfig, archiverStore, true); + const telemetry = createAndStartTelemetryClient(getTelemetryClientConfig()); + const archiver = await Archiver.createAndSync(archiverConfig, archiverStore, telemetry, true); const archiverServer = createArchiverRpcServer(archiver); services.push({ archiver: archiverServer }); signalHandlers.push(archiver.stop); diff --git a/yarn-project/aztec/src/cli/cmds/start_node.ts b/yarn-project/aztec/src/cli/cmds/start_node.ts index 9245bd32708..6ef8e67fe4b 100644 --- a/yarn-project/aztec/src/cli/cmds/start_node.ts +++ b/yarn-project/aztec/src/cli/cmds/start_node.ts @@ -8,6 +8,10 @@ import { type ServerList } from '@aztec/foundation/json-rpc/server'; import { type LogFn } from '@aztec/foundation/log'; import { createProvingJobSourceServer } from '@aztec/prover-client/prover-agent'; import { type PXEServiceConfig, createPXERpcServer, getPXEServiceConfig } from '@aztec/pxe'; +import { + createAndStartTelemetryClient, + getConfigEnvVars as getTelemetryClientConfig, +} from '@aztec/telemetry-client/start'; import { mnemonicToAccount, privateKeyToAccount } from 'viem/accounts'; @@ -81,7 +85,8 @@ export const startNode = async ( } // Create and start Aztec Node. - const node = await createAztecNode(nodeConfig); + const telemetryClient = createAndStartTelemetryClient(getTelemetryClientConfig()); + const node = await createAztecNode(telemetryClient, nodeConfig); const nodeServer = createAztecNodeRpcServer(node); // Add node to services list diff --git a/yarn-project/aztec/src/cli/cmds/start_prover.ts b/yarn-project/aztec/src/cli/cmds/start_prover.ts index 4b299ab5661..64fd693f9ed 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover.ts @@ -2,6 +2,10 @@ import { BBNativeRollupProver, TestCircuitProver } from '@aztec/bb-prover'; import { type ServerCircuitProver } from '@aztec/circuit-types'; import { getProverEnvVars } from '@aztec/prover-client'; import { ProverAgent, createProvingJobSourceClient } from '@aztec/prover-client/prover-agent'; +import { + createAndStartTelemetryClient, + getConfigEnvVars as getTelemetryClientConfig, +} from '@aztec/telemetry-client/start'; import { type ServiceStarter, parseModuleOptions } from '../util.js'; @@ -30,20 +34,24 @@ export const startProver: ServiceStarter = async (options, signalHandlers, logge ? parseInt(proverOptions.proverAgentPollInterval, 10) : proverOptions.proverAgentPollInterval; + const telemetry = createAndStartTelemetryClient(getTelemetryClientConfig()); let circuitProver: ServerCircuitProver; if (proverOptions.realProofs) { if (!proverOptions.acvmBinaryPath || !proverOptions.bbBinaryPath) { throw new Error('Cannot start prover without simulation or native prover options'); } - circuitProver = await BBNativeRollupProver.new({ - acvmBinaryPath: proverOptions.acvmBinaryPath, - bbBinaryPath: proverOptions.bbBinaryPath, - acvmWorkingDirectory: proverOptions.acvmWorkingDirectory, - bbWorkingDirectory: proverOptions.bbWorkingDirectory, - }); + circuitProver = await BBNativeRollupProver.new( + { + acvmBinaryPath: proverOptions.acvmBinaryPath, + bbBinaryPath: proverOptions.bbBinaryPath, + acvmWorkingDirectory: proverOptions.acvmWorkingDirectory, + bbWorkingDirectory: proverOptions.bbWorkingDirectory, + }, + telemetry, + ); } else { - circuitProver = new TestCircuitProver(); + circuitProver = new TestCircuitProver(telemetry); } const agent = new ProverAgent(circuitProver, agentConcurrency, pollInterval); diff --git a/yarn-project/aztec/src/sandbox.ts b/yarn-project/aztec/src/sandbox.ts index d9c7f0d113d..79d6e6e3218 100644 --- a/yarn-project/aztec/src/sandbox.ts +++ b/yarn-project/aztec/src/sandbox.ts @@ -36,6 +36,8 @@ import { getCanonicalAuthRegistry } from '@aztec/protocol-contracts/auth-registr import { GasTokenAddress, getCanonicalGasToken } from '@aztec/protocol-contracts/gas-token'; import { getCanonicalKeyRegistry } from '@aztec/protocol-contracts/key-registry'; import { type PXEServiceConfig, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type HDAccount, type PrivateKeyAccount, createPublicClient, http as httpViemTransport } from 'viem'; import { mnemonicToAccount } from 'viem/accounts'; @@ -252,7 +254,7 @@ export async function createSandbox(config: Partial = {}) { await deployContractsToL1(aztecNodeConfig, hdAccount); } - const node = await createAztecNode(aztecNodeConfig); + const node = await createAztecNode(new NoopTelemetryClient(), aztecNodeConfig); const pxe = await createAztecPXE(node); await deployCanonicalKeyRegistry( @@ -281,9 +283,9 @@ export async function createSandbox(config: Partial = {}) { * Create and start a new Aztec RPC HTTP Server * @param config - Optional Aztec node settings. */ -export async function createAztecNode(config: Partial = {}) { +export async function createAztecNode(telemetryClient: TelemetryClient, config: Partial = {}) { const aztecNodeConfig: AztecNodeConfig = { ...getConfigEnvVars(), ...config }; - const node = await AztecNodeService.createAndSync(aztecNodeConfig); + const node = await AztecNodeService.createAndSync(aztecNodeConfig, telemetryClient); return node; } diff --git a/yarn-project/aztec/tsconfig.json b/yarn-project/aztec/tsconfig.json index ef88fd56147..557c0080199 100644 --- a/yarn-project/aztec/tsconfig.json +++ b/yarn-project/aztec/tsconfig.json @@ -62,6 +62,9 @@ }, { "path": "../pxe" + }, + { + "path": "../telemetry-client" } ], "include": ["src"] diff --git a/yarn-project/bb-prover/package.json b/yarn-project/bb-prover/package.json index 0d46748757b..89ad0498089 100644 --- a/yarn-project/bb-prover/package.json +++ b/yarn-project/bb-prover/package.json @@ -35,7 +35,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ @@ -56,6 +64,7 @@ "@aztec/foundation": "workspace:^", "@aztec/noir-protocol-circuits-types": "workspace:^", "@aztec/simulator": "workspace:^", + "@aztec/telemetry-client": "workspace:^", "@noir-lang/noirc_abi": "portal:../../noir/packages/noirc_abi", "@noir-lang/types": "portal:../../noir/packages/types", "commander": "^9.0.0", diff --git a/yarn-project/bb-prover/src/avm_proving.test.ts b/yarn-project/bb-prover/src/avm_proving.test.ts index 917850189ff..53987df2e8c 100644 --- a/yarn-project/bb-prover/src/avm_proving.test.ts +++ b/yarn-project/bb-prover/src/avm_proving.test.ts @@ -35,6 +35,7 @@ import { initContext, initExecutionEnvironment, initHostStorage, + initPersistableStateManager, } from '@aztec/simulator/avm/fixtures'; import { jest } from '@jest/globals'; @@ -43,11 +44,7 @@ import fs from 'node:fs/promises'; import { tmpdir } from 'node:os'; import path from 'path'; -import { AvmPersistableStateManager } from '../../simulator/src/avm/journal/journal.js'; -import { - convertAvmResultsToPxResult, - createPublicExecution, -} from '../../simulator/src/public/transitional_adaptors.js'; +import { PublicSideEffectTrace } from '../../simulator/src/public/side_effect_trace.js'; import { SerializableContractInstance } from '../../types/src/contracts/contract_instance.js'; import { type BBSuccess, BB_RESULT, generateAvmProof, verifyAvmProof } from './bb/execute.js'; import { extractVkData } from './verification_key/verification_key_data.js'; @@ -224,15 +221,13 @@ const proveAndVerifyAvmTestContract = async ( storageDb.storageRead.mockResolvedValue(Promise.resolve(storageValue)); const hostStorage = initHostStorage({ contractsDb }); - const persistableState = new AvmPersistableStateManager(hostStorage); + const trace = new PublicSideEffectTrace(startSideEffectCounter); + const persistableState = initPersistableStateManager({ hostStorage, trace }); const context = initContext({ env: environment, persistableState }); const nestedCallBytecode = getAvmTestContractBytecode('add_args_return'); - jest - .spyOn(context.persistableState.hostStorage.contractsDb, 'getBytecode') - .mockReturnValue(Promise.resolve(nestedCallBytecode)); + jest.spyOn(hostStorage.contractsDb, 'getBytecode').mockResolvedValue(nestedCallBytecode); const startGas = new Gas(context.machineState.gasLeft.daGas, context.machineState.gasLeft.l2Gas); - const oldPublicExecution = createPublicExecution(startSideEffectCounter, environment, calldata); const internalLogger = createDebugLogger('aztec:avm-proving-test'); const logger = (msg: string, _data?: any) => internalLogger.verbose(msg); @@ -255,25 +250,21 @@ const proveAndVerifyAvmTestContract = async ( expect(avmResult.revertReason?.message).toContain(assertionErrString); } - const pxResult = convertAvmResultsToPxResult( - avmResult, - startSideEffectCounter, - oldPublicExecution, + const pxResult = trace.toPublicExecutionResult( + environment, startGas, - context, - simulator.getBytecode(), + /*endGasLeft=*/ Gas.from(context.machineState.gasLeft), + /*bytecode=*/ simulator.getBytecode()!, + avmResult, functionName, ); - // TODO(dbanks12): public inputs should not be empty.... Need to construct them from AvmContext? - const uncompressedBytecode = simulator.getBytecode()!; - const publicInputs = getPublicInputs(pxResult); const avmCircuitInputs = new AvmCircuitInputs( functionName, - uncompressedBytecode, - context.environment.calldata, - publicInputs, - pxResult.avmHints, + /*bytecode=*/ simulator.getBytecode()!, // uncompressed bytecode + /*calldata=*/ context.environment.calldata, + /*publicInputs=*/ getPublicInputs(pxResult), + /*avmHints=*/ pxResult.avmCircuitHints, ); // Then we prove. diff --git a/yarn-project/bb-prover/src/bb/execute.ts b/yarn-project/bb-prover/src/bb/execute.ts index 60113d1142b..c3b28317377 100644 --- a/yarn-project/bb-prover/src/bb/execute.ts +++ b/yarn-project/bb-prover/src/bb/execute.ts @@ -21,7 +21,7 @@ export enum BB_RESULT { export type BBSuccess = { status: BB_RESULT.SUCCESS | BB_RESULT.ALREADY_PRESENT; - duration: number; + durationMs: number; /** Full path of the public key. */ pkPath?: string; /** Base directory for the VKs (raw, fields). */ @@ -155,7 +155,7 @@ export async function generateKeyForNoirCircuit( if (result.status == BB_RESULT.SUCCESS) { return { status: BB_RESULT.SUCCESS, - duration, + durationMs: duration, pkPath: key === 'pk' ? outputPath : undefined, vkPath: key === 'vk' ? outputPath : undefined, proofPath: undefined, @@ -174,7 +174,7 @@ export async function generateKeyForNoirCircuit( if (!res) { return { status: BB_RESULT.ALREADY_PRESENT, - duration: 0, + durationMs: 0, pkPath: key === 'pk' ? outputPath : undefined, vkPath: key === 'vk' ? outputPath : undefined, }; @@ -237,7 +237,7 @@ export async function generateProof( if (result.status == BB_RESULT.SUCCESS) { return { status: BB_RESULT.SUCCESS, - duration, + durationMs: duration, proofPath: `${outputPath}`, pkPath: undefined, vkPath: `${outputPath}`, @@ -346,7 +346,7 @@ export async function generateAvmProof( if (result.status == BB_RESULT.SUCCESS) { return { status: BB_RESULT.SUCCESS, - duration, + durationMs: duration, proofPath: join(outputPath, PROOF_FILENAME), pkPath: undefined, vkPath: outputPath, @@ -426,7 +426,7 @@ async function verifyProofInternal( const result = await executeBB(pathToBB, command, args, log); const duration = timer.ms(); if (result.status == BB_RESULT.SUCCESS) { - return { status: BB_RESULT.SUCCESS, duration }; + return { status: BB_RESULT.SUCCESS, durationMs: duration }; } // Not a great error message here but it is difficult to decipher what comes from bb return { @@ -466,7 +466,7 @@ export async function writeVkAsFields( const result = await executeBB(pathToBB, 'vk_as_fields', args, log); const duration = timer.ms(); if (result.status == BB_RESULT.SUCCESS) { - return { status: BB_RESULT.SUCCESS, duration, vkPath: verificationKeyPath }; + return { status: BB_RESULT.SUCCESS, durationMs: duration, vkPath: verificationKeyPath }; } // Not a great error message here but it is difficult to decipher what comes from bb return { @@ -508,7 +508,7 @@ export async function writeProofAsFields( const result = await executeBB(pathToBB, 'proof_as_fields', args, log); const duration = timer.ms(); if (result.status == BB_RESULT.SUCCESS) { - return { status: BB_RESULT.SUCCESS, duration, proofPath: proofPath }; + return { status: BB_RESULT.SUCCESS, durationMs: duration, proofPath: proofPath }; } // Not a great error message here but it is difficult to decipher what comes from bb return { @@ -549,7 +549,7 @@ export async function generateContractForVerificationKey( const result = await executeBB(pathToBB, 'contract', args, log); const duration = timer.ms(); if (result.status == BB_RESULT.SUCCESS) { - return { status: BB_RESULT.SUCCESS, duration, contractPath }; + return { status: BB_RESULT.SUCCESS, durationMs: duration, contractPath }; } // Not a great error message here but it is difficult to decipher what comes from bb return { @@ -564,7 +564,7 @@ export async function generateContractForVerificationKey( if (!res) { return { status: BB_RESULT.ALREADY_PRESENT, - duration: 0, + durationMs: 0, contractPath, }; } diff --git a/yarn-project/bb-prover/src/instrumentation.ts b/yarn-project/bb-prover/src/instrumentation.ts new file mode 100644 index 00000000000..a510388a428 --- /dev/null +++ b/yarn-project/bb-prover/src/instrumentation.ts @@ -0,0 +1,149 @@ +import { type CircuitName } from '@aztec/circuit-types/stats'; +import { type Timer } from '@aztec/foundation/timer'; +import { + Attributes, + type Gauge, + type Histogram, + Metrics, + type TelemetryClient, + type Tracer, + ValueType, +} from '@aztec/telemetry-client'; + +/** + * Instrumentation class for Prover implementations. + */ +export class ProverInstrumentation { + private simulationDuration: Histogram; + private witGenDuration: Gauge; + private provingDuration: Gauge; + + private witGenInputSize: Gauge; + private witGenOutputSize: Gauge; + + private proofSize: Gauge; + private circuitSize: Gauge; + private circuitPublicInputCount: Gauge; + + public readonly tracer: Tracer; + + constructor(telemetry: TelemetryClient, name: string) { + this.tracer = telemetry.getTracer(name); + const meter = telemetry.getMeter(name); + + this.simulationDuration = meter.createHistogram(Metrics.CIRCUIT_SIMULATION_DURATION, { + description: 'Records how long it takes to simulate a circuit', + unit: 's', + valueType: ValueType.DOUBLE, + advice: { + explicitBucketBoundaries: [0.1, 0.25, 0.5, 1, 2.5, 5, 10, 30, 60], + }, + }); + + this.witGenDuration = meter.createGauge(Metrics.CIRCUIT_WITNESS_GEN_DURATION, { + description: 'Records how long it takes to generate the partial witness for a circuit', + unit: 's', + valueType: ValueType.DOUBLE, + }); + + // ideally this would be a histogram, but proving takes a long time on the server + // and they don't happen that often so Prometheus & Grafana have a hard time handling it + this.provingDuration = meter.createGauge(Metrics.CIRCUIT_PROVING_DURATION, { + unit: 's', + description: 'Records how long it takes to prove a circuit', + valueType: ValueType.DOUBLE, + }); + + this.witGenInputSize = meter.createGauge(Metrics.CIRCUIT_WITNESS_GEN_INPUT_SIZE, { + unit: 'By', + description: 'Records the size of the input to the witness generation', + valueType: ValueType.INT, + }); + + this.witGenOutputSize = meter.createGauge(Metrics.CIRCUIT_WITNESS_GEN_OUTPUT_SIZE, { + unit: 'By', + description: 'Records the size of the output of the witness generation', + valueType: ValueType.INT, + }); + + this.proofSize = meter.createGauge(Metrics.CIRCUIT_PROVING_PROOF_SIZE, { + unit: 'By', + description: 'Records the size of the proof generated for a circuit', + valueType: ValueType.INT, + }); + + this.circuitPublicInputCount = meter.createGauge(Metrics.CIRCUIT_PUBLIC_INPUTS_COUNT, { + description: 'Records the number of public inputs in a circuit', + valueType: ValueType.INT, + }); + + this.circuitSize = meter.createGauge(Metrics.CIRCUIT_SIZE, { + description: 'Records the size of the circuit in gates', + valueType: ValueType.INT, + }); + } + + /** + * Records the duration of a circuit operation. + * @param metric - The metric to record + * @param circuitName - The name of the circuit + * @param timerOrS - The duration + */ + recordDuration( + metric: 'simulationDuration' | 'witGenDuration' | 'provingDuration', + circuitName: CircuitName, + timerOrS: Timer | number, + ) { + const s = typeof timerOrS === 'number' ? timerOrS : timerOrS.s(); + this[metric].record(s, { + [Attributes.PROTOCOL_CIRCUIT_NAME]: circuitName, + [Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server', + }); + } + + /** + * Records the duration of an AVM circuit operation. + * @param metric - The metric to record + * @param appCircuitName - The name of the function circuit (should be a `contract:function` string) + * @param timerOrS - The duration + */ + recordAvmDuration(metric: 'witGenDuration' | 'provingDuration', appCircuitName: string, timerOrS: Timer | number) { + const s = typeof timerOrS === 'number' ? timerOrS : timerOrS.s(); + this[metric].record(s, { + [Attributes.APP_CIRCUIT_NAME]: appCircuitName, + }); + } + + /** + * Records the size of a circuit operation. + * @param metric - Records the size of a circuit operation. + * @param circuitName - The name of the circuit + * @param size - The size + */ + recordSize( + metric: 'witGenInputSize' | 'witGenOutputSize' | 'proofSize' | 'circuitSize' | 'circuitPublicInputCount', + circuitName: CircuitName, + size: number, + ) { + this[metric].record(Math.ceil(size), { + [Attributes.PROTOCOL_CIRCUIT_NAME]: circuitName, + [Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server', + }); + } + + /** + * Records the size of an AVM circuit operation. + * @param metric - The metric to record + * @param appCircuitName - The name of the function circuit (should be a `contract:function` string) + * @param size - The size + */ + recordAvmSize( + metric: 'witGenInputSize' | 'witGenOutputSize' | 'proofSize' | 'circuitSize' | 'circuitPublicInputCount', + appCircuitName: string, + size: number, + ) { + this[metric].record(Math.ceil(size), { + [Attributes.APP_CIRCUIT_NAME]: appCircuitName, + }); + } +} diff --git a/yarn-project/bb-prover/src/prover/bb_native_proof_creator.ts b/yarn-project/bb-prover/src/prover/bb_native_proof_creator.ts index 3cc08097278..ea93f78fe77 100644 --- a/yarn-project/bb-prover/src/prover/bb_native_proof_creator.ts +++ b/yarn-project/bb-prover/src/prover/bb_native_proof_creator.ts @@ -176,7 +176,7 @@ export class BBNativeProofCreator implements ProofCreator { throw new Error(errorMessage); } - this.log.info(`Successfully verified ${circuitType} proof in ${Math.ceil(result.duration)} ms`); + this.log.info(`Successfully verified ${circuitType} proof in ${Math.ceil(result.durationMs)} ms`); } private async verifyProofFromKey( @@ -339,7 +339,7 @@ export class BBNativeProofCreator implements ProofCreator { this.log.debug(`Generated proof`, { eventName: 'circuit-proving', circuitName: 'app-circuit', - duration: provingResult.duration, + duration: provingResult.durationMs, inputSize: compressedBincodedWitness.length, proofSize: proof.binaryProof.buffer.length, appCircuitName, @@ -358,7 +358,7 @@ export class BBNativeProofCreator implements ProofCreator { this.log.debug(`Generated proof`, { circuitName: mapProtocolArtifactNameToCircuitName(circuitType), - duration: provingResult.duration, + duration: provingResult.durationMs, eventName: 'circuit-proving', inputSize: compressedBincodedWitness.length, proofSize: proof.binaryProof.buffer.length, diff --git a/yarn-project/bb-prover/src/prover/bb_prover.ts b/yarn-project/bb-prover/src/prover/bb_prover.ts index 33210ed6444..7eed17fbe13 100644 --- a/yarn-project/bb-prover/src/prover/bb_prover.ts +++ b/yarn-project/bb-prover/src/prover/bb_prover.ts @@ -57,6 +57,7 @@ import { convertRootRollupOutputsFromWitnessMap, } from '@aztec/noir-protocol-circuits-types'; import { NativeACVMSimulator } from '@aztec/simulator'; +import { Attributes, type TelemetryClient, trackSpan } from '@aztec/telemetry-client'; import { abiEncode } from '@noir-lang/noirc_abi'; import { type Abi, type WitnessMap } from '@noir-lang/types'; @@ -78,6 +79,7 @@ import { writeProofAsFields, } from '../bb/execute.js'; import type { ACVMConfig, BBConfig } from '../config.js'; +import { ProverInstrumentation } from '../instrumentation.js'; import { PublicKernelArtifactMapping } from '../mappings/mappings.js'; import { mapProtocolArtifactNameToCircuitName } from '../stats.js'; import { extractVkData } from '../verification_key/verification_key_data.js'; @@ -102,9 +104,18 @@ export class BBNativeRollupProver implements ServerCircuitProver { ServerProtocolArtifact, Promise >(); - constructor(private config: BBProverConfig) {} - static async new(config: BBProverConfig) { + private instrumentation: ProverInstrumentation; + + constructor(private config: BBProverConfig, telemetry: TelemetryClient) { + this.instrumentation = new ProverInstrumentation(telemetry, 'BBNativeRollupProver'); + } + + get tracer() { + return this.instrumentation.tracer; + } + + static async new(config: BBProverConfig, telemetry: TelemetryClient) { await fs.access(config.acvmBinaryPath, fs.constants.R_OK); await fs.mkdir(config.acvmWorkingDirectory, { recursive: true }); await fs.access(config.bbBinaryPath, fs.constants.R_OK); @@ -112,7 +123,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { logger.info(`Using native BB at ${config.bbBinaryPath} and working directory ${config.bbWorkingDirectory}`); logger.info(`Using native ACVM at ${config.acvmBinaryPath} and working directory ${config.acvmWorkingDirectory}`); - return new BBNativeRollupProver(config); + return new BBNativeRollupProver(config, telemetry); } /** @@ -120,6 +131,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { * @param inputs - Inputs to the circuit. * @returns The public inputs of the parity circuit. */ + @trackSpan('BBNativeRollupProver.getBaseParityProof', { [Attributes.PROTOCOL_CIRCUIT_NAME]: 'base-parity' }) public async getBaseParityProof(inputs: BaseParityInputs): Promise> { const { circuitOutput, proof } = await this.createRecursiveProof( inputs, @@ -141,6 +153,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { * @param inputs - Inputs to the circuit. * @returns The public inputs of the parity circuit. */ + @trackSpan('BBNativeRollupProver.getRootParityProof', { [Attributes.PROTOCOL_CIRCUIT_NAME]: 'root-parity' }) public async getRootParityProof( inputs: RootParityInputs, ): Promise> { @@ -164,6 +177,9 @@ export class BBNativeRollupProver implements ServerCircuitProver { * @param inputs - The inputs to the AVM circuit. * @returns The proof. */ + @trackSpan('BBNativeRollupProver.getAvmProof', inputs => ({ + [Attributes.APP_CIRCUIT_NAME]: inputs.functionName, + })) public async getAvmProof(inputs: AvmCircuitInputs): Promise { const proofAndVk = await this.createAvmProof(inputs); await this.verifyAvmProof(proofAndVk.proof, proofAndVk.verificationKey); @@ -175,6 +191,11 @@ export class BBNativeRollupProver implements ServerCircuitProver { * @param kernelRequest - The object encapsulating the request for a proof * @returns The requested circuit's public inputs and proof */ + @trackSpan('BBNativeRollupProver.getPublicKernelProof', kernelReq => ({ + [Attributes.PROTOCOL_CIRCUIT_NAME]: mapProtocolArtifactNameToCircuitName( + PublicKernelArtifactMapping[kernelReq.type]!.artifact, + ), + })) public async getPublicKernelProof( kernelRequest: PublicKernelNonTailRequest, ): Promise> { @@ -385,11 +406,16 @@ export class BBNativeRollupProver implements ServerCircuitProver { const inputWitness = convertInput(input); const timer = new Timer(); const outputWitness = await simulator.simulateCircuit(inputWitness, artifact); - const witnessGenerationDuration = timer.ms(); const output = convertOutput(outputWitness); + + const circuitName = mapProtocolArtifactNameToCircuitName(circuitType); + this.instrumentation.recordDuration('witGenDuration', circuitName, timer); + this.instrumentation.recordSize('witGenInputSize', circuitName, input.toBuffer().length); + this.instrumentation.recordSize('witGenOutputSize', circuitName, output.toBuffer().length); + logger.debug(`Generated witness`, { - circuitName: mapProtocolArtifactNameToCircuitName(circuitType), - duration: witnessGenerationDuration, + circuitName, + duration: timer.ms(), inputSize: input.toBuffer().length, outputSize: output.toBuffer().length, eventName: 'circuit-witness-generation', @@ -439,10 +465,17 @@ export class BBNativeRollupProver implements ServerCircuitProver { const rawProof = await fs.readFile(`${provingResult.proofPath!}/${PROOF_FILENAME}`); const proof = new Proof(rawProof, vkData.numPublicInputs); - logger.info(`Generated proof for ${circuitType} in ${Math.ceil(provingResult.duration)} ms`, { - circuitName: mapProtocolArtifactNameToCircuitName(circuitType), + const circuitName = mapProtocolArtifactNameToCircuitName(circuitType); + + this.instrumentation.recordDuration('provingDuration', circuitName, provingResult.durationMs / 1000); + this.instrumentation.recordSize('proofSize', circuitName, proof.buffer.length); + this.instrumentation.recordSize('circuitPublicInputCount', circuitName, vkData.numPublicInputs); + this.instrumentation.recordSize('circuitSize', circuitName, vkData.circuitSize); + + logger.info(`Generated proof for ${circuitType} in ${Math.ceil(provingResult.durationMs)} ms`, { + circuitName, // does not include reading the proof from disk - duration: provingResult.duration, + duration: provingResult.durationMs, proofSize: proof.buffer.length, eventName: 'circuit-proving', // circuitOutput is the partial witness that became the input to the proof @@ -484,13 +517,19 @@ export class BBNativeRollupProver implements ServerCircuitProver { const proof = new Proof(rawProof, verificationKey.numPublicInputs); const circuitType = 'avm-circuit' as const; + const appCircuitName = 'unknown' as const; + this.instrumentation.recordAvmDuration('provingDuration', appCircuitName, provingResult.durationMs); + this.instrumentation.recordAvmSize('proofSize', appCircuitName, proof.buffer.length); + this.instrumentation.recordAvmSize('circuitPublicInputCount', appCircuitName, verificationKey.numPublicInputs); + this.instrumentation.recordAvmSize('circuitSize', appCircuitName, verificationKey.circuitSize); + logger.info( - `Generated proof for ${circuitType}(${input.functionName}) in ${Math.ceil(provingResult.duration)} ms`, + `Generated proof for ${circuitType}(${input.functionName}) in ${Math.ceil(provingResult.durationMs)} ms`, { circuitName: circuitType, appCircuitName: input.functionName, // does not include reading the proof from disk - duration: provingResult.duration, + duration: provingResult.durationMs, proofSize: proof.buffer.length, eventName: 'circuit-proving', inputSize: input.toBuffer().length, @@ -534,14 +573,19 @@ export class BBNativeRollupProver implements ServerCircuitProver { // Read the proof as fields const proof = await this.readProofAsFields(provingResult.proofPath!, circuitType, proofLength); + const circuitName = mapProtocolArtifactNameToCircuitName(circuitType); + this.instrumentation.recordDuration('provingDuration', circuitName, provingResult.durationMs / 1000); + this.instrumentation.recordSize('proofSize', circuitName, proof.binaryProof.buffer.length); + this.instrumentation.recordSize('circuitPublicInputCount', circuitName, vkData.numPublicInputs); + this.instrumentation.recordSize('circuitSize', circuitName, vkData.circuitSize); logger.info( - `Generated proof for ${circuitType} in ${Math.ceil(provingResult.duration)} ms, size: ${ + `Generated proof for ${circuitType} in ${Math.ceil(provingResult.durationMs)} ms, size: ${ proof.proof.length } fields`, { - circuitName: mapProtocolArtifactNameToCircuitName(circuitType), + circuitName, circuitSize: vkData.circuitSize, - duration: provingResult.duration, + duration: provingResult.durationMs, inputSize: output.toBuffer().length, proofSize: proof.binaryProof.buffer.length, eventName: 'circuit-proving', @@ -603,7 +647,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { throw new Error(errorMessage); } - logger.debug(`Successfully verified proof from key in ${result.duration} ms`); + logger.debug(`Successfully verified proof from key in ${result.durationMs} ms`); }; await runInDirectory(this.config.bbWorkingDirectory, operation); diff --git a/yarn-project/bb-prover/src/stats.ts b/yarn-project/bb-prover/src/stats.ts index c61b3d5ccca..f31e611dd8c 100644 --- a/yarn-project/bb-prover/src/stats.ts +++ b/yarn-project/bb-prover/src/stats.ts @@ -1,21 +1,7 @@ -import { type PublicKernelRequest, PublicKernelType } from '@aztec/circuit-types'; import type { CircuitName } from '@aztec/circuit-types/stats'; import { type ClientProtocolArtifact, type ServerProtocolArtifact } from '@aztec/noir-protocol-circuits-types'; -export function mapPublicKernelToCircuitName(kernelType: PublicKernelRequest['type']): CircuitName { - switch (kernelType) { - case PublicKernelType.SETUP: - return 'public-kernel-setup'; - case PublicKernelType.APP_LOGIC: - return 'public-kernel-app-logic'; - case PublicKernelType.TEARDOWN: - return 'public-kernel-teardown'; - case PublicKernelType.TAIL: - return 'public-kernel-tail'; - default: - throw new Error(`Unknown kernel type: ${kernelType}`); - } -} +export { mapPublicKernelToCircuitName } from '@aztec/circuit-types'; export function mapProtocolArtifactNameToCircuitName( artifact: ServerProtocolArtifact | ClientProtocolArtifact, diff --git a/yarn-project/bb-prover/src/test/test_circuit_prover.ts b/yarn-project/bb-prover/src/test/test_circuit_prover.ts index c4c24794e8f..85ce58a9580 100644 --- a/yarn-project/bb-prover/src/test/test_circuit_prover.ts +++ b/yarn-project/bb-prover/src/test/test_circuit_prover.ts @@ -57,7 +57,9 @@ import { convertSimulatedPublicTailOutputFromWitnessMap, } from '@aztec/noir-protocol-circuits-types'; import { type SimulationProvider, WASMSimulator, emitCircuitSimulationStats } from '@aztec/simulator'; +import { type TelemetryClient, trackSpan } from '@aztec/telemetry-client'; +import { ProverInstrumentation } from '../instrumentation.js'; import { SimulatedPublicKernelArtifactMapping } from '../mappings/mappings.js'; import { mapPublicKernelToCircuitName } from '../stats.js'; @@ -81,11 +83,19 @@ const VERIFICATION_KEYS: Record */ export class TestCircuitProver implements ServerCircuitProver { private wasmSimulator = new WASMSimulator(); + private instrumentation: ProverInstrumentation; constructor( + telemetry: TelemetryClient, private simulationProvider?: SimulationProvider, private logger = createDebugLogger('aztec:test-prover'), - ) {} + ) { + this.instrumentation = new ProverInstrumentation(telemetry, 'TestCircuitProver'); + } + + get tracer() { + return this.instrumentation.tracer; + } public async getEmptyPrivateKernelProof( inputs: PrivateKernelEmptyInputData, @@ -111,6 +121,7 @@ export class TestCircuitProver implements ServerCircuitProver { * @param inputs - Inputs to the circuit. * @returns The public inputs of the parity circuit. */ + @trackSpan('TestCircuitProver.getBaseParityProof') public async getBaseParityProof(inputs: BaseParityInputs): Promise> { const timer = new Timer(); const witnessMap = convertBaseParityInputsToWitnessMap(inputs); @@ -125,6 +136,8 @@ export class TestCircuitProver implements ServerCircuitProver { result, ); + this.instrumentation.recordDuration('simulationDuration', 'base-parity', timer); + emitCircuitSimulationStats( 'base-parity', timer.ms(), @@ -141,6 +154,7 @@ export class TestCircuitProver implements ServerCircuitProver { * @param inputs - Inputs to the circuit. * @returns The public inputs of the parity circuit. */ + @trackSpan('TestCircuitProver.getRootParityProof') public async getRootParityProof( inputs: RootParityInputs, ): Promise> { @@ -158,6 +172,7 @@ export class TestCircuitProver implements ServerCircuitProver { result, ); + this.instrumentation.recordDuration('simulationDuration', 'root-parity', timer); emitCircuitSimulationStats( 'root-parity', timer.ms(), @@ -174,6 +189,7 @@ export class TestCircuitProver implements ServerCircuitProver { * @param input - Inputs to the circuit. * @returns The public inputs as outputs of the simulation. */ + @trackSpan('TestCircuitProver.getBaseRollupProof') public async getBaseRollupProof( input: BaseRollupInputs, ): Promise> { @@ -185,6 +201,7 @@ export class TestCircuitProver implements ServerCircuitProver { const result = convertSimulatedBaseRollupOutputsFromWitnessMap(witness); + this.instrumentation.recordDuration('simulationDuration', 'base-rollup', timer); emitCircuitSimulationStats( 'base-rollup', timer.ms(), @@ -203,6 +220,7 @@ export class TestCircuitProver implements ServerCircuitProver { * @param input - Inputs to the circuit. * @returns The public inputs as outputs of the simulation. */ + @trackSpan('TestCircuitProver.getMergeRollupProof') public async getMergeRollupProof( input: MergeRollupInputs, ): Promise> { @@ -214,6 +232,7 @@ export class TestCircuitProver implements ServerCircuitProver { const result = convertMergeRollupOutputsFromWitnessMap(witness); + this.instrumentation.recordDuration('simulationDuration', 'merge-rollup', timer); emitCircuitSimulationStats( 'merge-rollup', timer.ms(), @@ -233,6 +252,7 @@ export class TestCircuitProver implements ServerCircuitProver { * @param input - Inputs to the circuit. * @returns The public inputs as outputs of the simulation. */ + @trackSpan('TestCircuitProver.getRootRollupProof') public async getRootRollupProof( input: RootRollupInputs, ): Promise> { @@ -244,6 +264,7 @@ export class TestCircuitProver implements ServerCircuitProver { const result = convertRootRollupOutputsFromWitnessMap(witness); + this.instrumentation.recordDuration('simulationDuration', 'root-rollup', timer); emitCircuitSimulationStats( 'root-rollup', timer.ms(), @@ -258,6 +279,7 @@ export class TestCircuitProver implements ServerCircuitProver { ); } + @trackSpan('TestCircuitProver.getPublicKernelProof') public async getPublicKernelProof( kernelRequest: PublicKernelNonTailRequest, ): Promise> { @@ -274,8 +296,10 @@ export class TestCircuitProver implements ServerCircuitProver { ); const result = kernelOps.convertOutputs(witness); + const circuitName = mapPublicKernelToCircuitName(kernelRequest.type); + this.instrumentation.recordDuration('simulationDuration', circuitName, timer); emitCircuitSimulationStats( - mapPublicKernelToCircuitName(kernelRequest.type), + circuitName, timer.ms(), kernelRequest.inputs.toBuffer().length, result.toBuffer().length, @@ -289,6 +313,7 @@ export class TestCircuitProver implements ServerCircuitProver { ); } + @trackSpan('TestCircuitProver.getPublicTailProof') public async getPublicTailProof( kernelRequest: PublicKernelTailRequest, ): Promise> { @@ -301,6 +326,7 @@ export class TestCircuitProver implements ServerCircuitProver { ); const result = convertSimulatedPublicTailOutputFromWitnessMap(witness); + this.instrumentation.recordDuration('simulationDuration', 'public-kernel-tail', timer); emitCircuitSimulationStats( 'public-kernel-tail', timer.ms(), diff --git a/yarn-project/bb-prover/tsconfig.json b/yarn-project/bb-prover/tsconfig.json index d2906818893..e0e59ed584c 100644 --- a/yarn-project/bb-prover/tsconfig.json +++ b/yarn-project/bb-prover/tsconfig.json @@ -20,6 +20,9 @@ }, { "path": "../simulator" + }, + { + "path": "../telemetry-client" } ], "include": ["src"] diff --git a/yarn-project/builder/package.json b/yarn-project/builder/package.json index 29b648700ff..c6f153b7f05 100644 --- a/yarn-project/builder/package.json +++ b/yarn-project/builder/package.json @@ -41,7 +41,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/builder/src/contract-interface-gen/typescript.ts b/yarn-project/builder/src/contract-interface-gen/typescript.ts index 5fc2828c500..d092f4b0688 100644 --- a/yarn-project/builder/src/contract-interface-gen/typescript.ts +++ b/yarn-project/builder/src/contract-interface-gen/typescript.ts @@ -225,7 +225,7 @@ function generateNotesGetter(input: ContractArtifact) { .map( ([name, { id }]) => `${name}: { - id: new Fr(${id.toBigInt()}n), + id: new NoteSelector(${id.value}), }`, ) .join(',\n'); @@ -281,11 +281,7 @@ function generateEvents(events: any[] | undefined) { if (payload === undefined) { return undefined; } - if ( - !eventSelector.equals( - EventSelector.fromField(payload.eventTypeId), - ) - ) { + if (!eventSelector.equals(payload.eventTypeId)) { return undefined; } if (payload.event.items.length !== fieldsLength) { @@ -349,14 +345,14 @@ import { DeployMethod, EthAddress, EthAddressLike, + EventSelector, FieldLike, Fr, - EventSelector, - FunctionSelector, FunctionSelectorLike, L1EventPayload, loadContractArtifact, NoirCompiledContract, + NoteSelector, Point, PublicKey, Wallet, diff --git a/yarn-project/circuit-types/package.json b/yarn-project/circuit-types/package.json index 8779c385af2..09e852caff3 100644 --- a/yarn-project/circuit-types/package.json +++ b/yarn-project/circuit-types/package.json @@ -36,7 +36,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/circuit-types/src/logs/encrypted_l2_note_log.ts b/yarn-project/circuit-types/src/logs/encrypted_l2_note_log.ts index fea25ec838d..a84039e3829 100644 --- a/yarn-project/circuit-types/src/logs/encrypted_l2_note_log.ts +++ b/yarn-project/circuit-types/src/logs/encrypted_l2_note_log.ts @@ -52,6 +52,10 @@ export class EncryptedL2NoteLog { return sha256Trunc(preimage); } + public getSiloedHash(): Buffer { + return this.hash(); + } + /** * Crates a random log. * @returns A random log. diff --git a/yarn-project/circuit-types/src/logs/function_l2_logs.ts b/yarn-project/circuit-types/src/logs/function_l2_logs.ts index a176af8bf00..e45fcbef4e9 100644 --- a/yarn-project/circuit-types/src/logs/function_l2_logs.ts +++ b/yarn-project/circuit-types/src/logs/function_l2_logs.ts @@ -37,9 +37,8 @@ export abstract class FunctionL2Logs acc + log.length + 4, 0) + 4; + return this.getKernelLength() + 4; } /** diff --git a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_incoming_body/encrypted_note_log_incoming_body.test.ts b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_incoming_body/encrypted_note_log_incoming_body.test.ts index 939ca41ea65..d6a3dbfd110 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_incoming_body/encrypted_note_log_incoming_body.test.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_incoming_body/encrypted_note_log_incoming_body.test.ts @@ -1,5 +1,6 @@ import { Fr, GrumpkinScalar } from '@aztec/circuits.js'; import { Grumpkin } from '@aztec/circuits.js/barretenberg'; +import { NoteSelector } from '@aztec/foundation/abi'; import { updateInlineTestData } from '@aztec/foundation/testing'; import { Note } from '../payload.js'; @@ -20,10 +21,10 @@ describe('encrypt log incoming body', () => { const viewingPubKey = grumpkin.mul(Grumpkin.generator, viewingSecretKey); const note = Note.random(); - const noteTypeId = Fr.random(); const storageSlot = Fr.random(); + const noteTypeId = NoteSelector.random(); - const body = new EncryptedNoteLogIncomingBody(noteTypeId, storageSlot, note); + const body = new EncryptedNoteLogIncomingBody(storageSlot, noteTypeId, note); const encrypted = body.computeCiphertext(ephSecretKey, viewingPubKey); @@ -44,7 +45,7 @@ describe('encrypt log incoming body', () => { const viewingPubKey = grumpkin.mul(Grumpkin.generator, viewingSecretKey); const note = new Note([new Fr(1), new Fr(2), new Fr(3)]); - const noteTypeId = new Fr(1); + const noteTypeId = new NoteSelector(1); const storageSlot = new Fr(2); const body = new EncryptedNoteLogIncomingBody(storageSlot, noteTypeId, note); diff --git a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_incoming_body/encrypted_note_log_incoming_body.ts b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_incoming_body/encrypted_note_log_incoming_body.ts index 83bd9edb479..2edaba57db4 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_incoming_body/encrypted_note_log_incoming_body.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_incoming_body/encrypted_note_log_incoming_body.ts @@ -1,11 +1,12 @@ import { Fr, type GrumpkinPrivateKey, type PublicKey } from '@aztec/circuits.js'; +import { NoteSelector } from '@aztec/foundation/abi'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { Note } from '../payload.js'; import { EncryptedLogIncomingBody } from './encrypted_log_incoming_body.js'; export class EncryptedNoteLogIncomingBody extends EncryptedLogIncomingBody { - constructor(public storageSlot: Fr, public noteTypeId: Fr, public note: Note) { + constructor(public storageSlot: Fr, public noteTypeId: NoteSelector, public note: Note) { super(); } @@ -16,7 +17,8 @@ export class EncryptedNoteLogIncomingBody extends EncryptedLogIncomingBody { */ public toBuffer(): Buffer { const noteBufferWithoutLength = this.note.toBuffer().subarray(4); - return serializeToBuffer(this.storageSlot, this.noteTypeId, noteBufferWithoutLength); + // Note: We serialize note type to field first because that's how it's done in Noir + return serializeToBuffer(this.storageSlot, this.noteTypeId.toField(), noteBufferWithoutLength); } /** @@ -28,7 +30,7 @@ export class EncryptedNoteLogIncomingBody extends EncryptedLogIncomingBody { public static fromBuffer(buf: Buffer): EncryptedNoteLogIncomingBody { const reader = BufferReader.asReader(buf); const storageSlot = Fr.fromBuffer(reader); - const noteTypeId = Fr.fromBuffer(reader); + const noteTypeId = NoteSelector.fromField(Fr.fromBuffer(reader)); // 2 Fields (storage slot and note type id) are not included in the note buffer const fieldsInNote = reader.getLength() / 32 - 2; diff --git a/yarn-project/circuit-types/src/logs/l1_payload/l1_event_payload.test.ts b/yarn-project/circuit-types/src/logs/l1_payload/l1_event_payload.test.ts index ea7f49391b6..1598b9c02f1 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/l1_event_payload.test.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/l1_event_payload.test.ts @@ -1,4 +1,5 @@ import { AztecAddress, KeyValidationRequest, computeOvskApp, derivePublicKeyFromSecretKey } from '@aztec/circuits.js'; +import { EventSelector } from '@aztec/foundation/abi'; import { pedersenHash } from '@aztec/foundation/crypto'; import { Fr, GrumpkinScalar } from '@aztec/foundation/fields'; @@ -29,7 +30,7 @@ describe('L1 Event Payload', () => { randomness = Fr.random(); maskedContractAddress = pedersenHash([contractAddress, randomness], 0); - payload = new L1EventPayload(Event.random(), contractAddress, randomness, Fr.random()); + payload = new L1EventPayload(Event.random(), contractAddress, randomness, EventSelector.random()); ovskM = GrumpkinScalar.random(); ivskM = GrumpkinScalar.random(); diff --git a/yarn-project/circuit-types/src/logs/l1_payload/l1_event_payload.ts b/yarn-project/circuit-types/src/logs/l1_payload/l1_event_payload.ts index e3cd80ba061..741a0128475 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/l1_event_payload.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/l1_event_payload.ts @@ -1,4 +1,5 @@ import { AztecAddress, type GrumpkinPrivateKey, type KeyValidationRequest, type PublicKey } from '@aztec/circuits.js'; +import { EventSelector } from '@aztec/foundation/abi'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; @@ -25,9 +26,9 @@ export class L1EventPayload extends L1Payload { */ public randomness: Fr, /** - * Type identifier for the underlying event, (calculated as a function selector). + * Type identifier for the underlying event. */ - public eventTypeId: Fr, + public eventTypeId: EventSelector, ) { super(); } @@ -43,7 +44,7 @@ export class L1EventPayload extends L1Payload { reader.readObject(Event), reader.readObject(AztecAddress), Fr.fromBuffer(reader), - Fr.fromBuffer(reader), + reader.readObject(EventSelector), ); } @@ -60,7 +61,7 @@ export class L1EventPayload extends L1Payload { * @returns A random L1EventPayload object. */ static random() { - return new L1EventPayload(Event.random(), AztecAddress.random(), Fr.random(), Fr.random()); + return new L1EventPayload(Event.random(), AztecAddress.random(), Fr.random(), EventSelector.random()); } public encrypt(ephSk: GrumpkinPrivateKey, recipient: AztecAddress, ivpk: PublicKey, ovKeys: KeyValidationRequest) { @@ -70,7 +71,7 @@ export class L1EventPayload extends L1Payload { recipient, ivpk, ovKeys, - new EncryptedEventLogIncomingBody(this.randomness, this.eventTypeId, this.event), + new EncryptedEventLogIncomingBody(this.randomness, this.eventTypeId.toField(), this.event), ); } @@ -100,9 +101,13 @@ export class L1EventPayload extends L1Payload { EncryptedEventLogIncomingBody.fromCiphertext, ); + // We instantiate selector before checking the address because instantiating the selector validates that + // the selector is valid (and that's the preferred way of detecting decryption failure). + const selector = EventSelector.fromField(incomingBody.eventTypeId); + this.ensureMatchedMaskedContractAddress(address, incomingBody.randomness, encryptedLog.maskedContractAddress); - return new L1EventPayload(incomingBody.event, address, incomingBody.randomness, incomingBody.eventTypeId); + return new L1EventPayload(incomingBody.event, address, incomingBody.randomness, selector); } /** @@ -131,8 +136,12 @@ export class L1EventPayload extends L1Payload { EncryptedEventLogIncomingBody.fromCiphertext, ); + // We instantiate selector before checking the address because instantiating the selector validates that + // the selector is valid (and that's the preferred way of detecting decryption failure). + const selector = EventSelector.fromField(incomingBody.eventTypeId); + this.ensureMatchedMaskedContractAddress(address, incomingBody.randomness, encryptedLog.maskedContractAddress); - return new L1EventPayload(incomingBody.event, address, incomingBody.randomness, incomingBody.eventTypeId); + return new L1EventPayload(incomingBody.event, address, incomingBody.randomness, selector); } } diff --git a/yarn-project/circuit-types/src/logs/l1_payload/l1_note_payload.ts b/yarn-project/circuit-types/src/logs/l1_payload/l1_note_payload.ts index ee28010c1bf..b0dadca6ffe 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/l1_note_payload.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/l1_note_payload.ts @@ -1,4 +1,5 @@ import { AztecAddress, type GrumpkinPrivateKey, type KeyValidationRequest, type PublicKey } from '@aztec/circuits.js'; +import { NoteSelector } from '@aztec/foundation/abi'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; @@ -28,7 +29,7 @@ export class L1NotePayload extends L1Payload { /** * Type identifier for the underlying note, required to determine how to compute its hash and nullifier. */ - public noteTypeId: Fr, + public noteTypeId: NoteSelector, ) { super(); } @@ -44,7 +45,7 @@ export class L1NotePayload extends L1Payload { reader.readObject(Note), reader.readObject(AztecAddress), Fr.fromBuffer(reader), - Fr.fromBuffer(reader), + reader.readObject(NoteSelector), ); } @@ -62,7 +63,7 @@ export class L1NotePayload extends L1Payload { * @returns A random L1NotePayload object. */ static random(contract = AztecAddress.random()) { - return new L1NotePayload(Note.random(), contract, Fr.random(), Fr.random()); + return new L1NotePayload(Note.random(), contract, Fr.random(), NoteSelector.random()); } public encrypt(ephSk: GrumpkinPrivateKey, recipient: AztecAddress, ivpk: PublicKey, ovKeys: KeyValidationRequest) { diff --git a/yarn-project/circuit-types/src/logs/l1_payload/tagged_log.test.ts b/yarn-project/circuit-types/src/logs/l1_payload/tagged_log.test.ts index c5c7968c965..a7b736db73c 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/tagged_log.test.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/tagged_log.test.ts @@ -1,4 +1,5 @@ import { AztecAddress, KeyValidationRequest, computeOvskApp, derivePublicKeyFromSecretKey } from '@aztec/circuits.js'; +import { EventSelector } from '@aztec/foundation/abi'; import { pedersenHash } from '@aztec/foundation/crypto'; import { Fr, GrumpkinScalar } from '@aztec/foundation/fields'; @@ -86,7 +87,7 @@ describe('L1 Event Payload', () => { randomness = Fr.random(); maskedContractAddress = pedersenHash([contractAddress, randomness], 0); - const payload = new L1EventPayload(Event.random(), contractAddress, randomness, Fr.random()); + const payload = new L1EventPayload(Event.random(), contractAddress, randomness, EventSelector.random()); ovskM = GrumpkinScalar.random(); ivskM = GrumpkinScalar.random(); diff --git a/yarn-project/circuit-types/src/logs/l1_payload/tagged_log.ts b/yarn-project/circuit-types/src/logs/l1_payload/tagged_log.ts index 4904479acdb..c4d2ec5fe73 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/tagged_log.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/tagged_log.ts @@ -78,8 +78,6 @@ export class TaggedLog { ivsk: GrumpkinPrivateKey, payloadType: typeof L1NotePayload | typeof L1EventPayload = L1NotePayload, ): TaggedLog | undefined { - // Right now heavily abusing that we will likely fail if bad decryption - // as some field will likely end up not being in the field etc. try { if (payloadType === L1EventPayload) { const reader = BufferReader.asReader((data as EncryptedL2Log).data); @@ -96,7 +94,17 @@ export class TaggedLog { const payload = L1NotePayload.decryptAsIncoming(reader.readToEnd(), ivsk); return new TaggedLog(payload, incomingTag, outgoingTag); } - } catch (e) { + } catch (e: any) { + // Following error messages are expected to occur when decryption fails + if ( + !e.message.endsWith('is greater or equal to field modulus.') && + !e.message.startsWith('Invalid AztecAddress length') && + !e.message.startsWith('Selector must fit in') && + !e.message.startsWith('Attempted to read beyond buffer length') + ) { + // If we encounter an unexpected error, we rethrow it + throw e; + } return; } } @@ -116,8 +124,6 @@ export class TaggedLog { ovsk: GrumpkinPrivateKey, payloadType: typeof L1NotePayload | typeof L1EventPayload = L1NotePayload, ) { - // Right now heavily abusing that we will likely fail if bad decryption - // as some field will likely end up not being in the field etc. try { if (payloadType === L1EventPayload) { const reader = BufferReader.asReader((data as EncryptedL2Log).data); @@ -133,7 +139,17 @@ export class TaggedLog { const payload = L1NotePayload.decryptAsOutgoing(reader.readToEnd(), ovsk); return new TaggedLog(payload, incomingTag, outgoingTag); } - } catch (e) { + } catch (e: any) { + // Following error messages are expected to occur when decryption fails + if ( + !e.message.endsWith('is greater or equal to field modulus.') && + !e.message.startsWith('Invalid AztecAddress length') && + !e.message.startsWith('Selector must fit in') && + !e.message.startsWith('Attempted to read beyond buffer length') + ) { + // If we encounter an unexpected error, we rethrow it + throw e; + } return; } } diff --git a/yarn-project/circuit-types/src/logs/tx_l2_logs.ts b/yarn-project/circuit-types/src/logs/tx_l2_logs.ts index f17004efa7c..90610fa5331 100644 --- a/yarn-project/circuit-types/src/logs/tx_l2_logs.ts +++ b/yarn-project/circuit-types/src/logs/tx_l2_logs.ts @@ -1,4 +1,6 @@ import { + Fr, + type LogHash, MAX_ENCRYPTED_LOGS_PER_TX, MAX_NOTE_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX, @@ -22,6 +24,8 @@ import { type UnencryptedL2Log } from './unencrypted_l2_log.js'; * Data container of logs emitted in 1 tx. */ export abstract class TxL2Logs { + abstract hash(): Buffer; + constructor( /** * An array containing logs emitted in individual function invocations in this tx. */ public readonly functionLogs: FunctionL2Logs[], @@ -94,6 +98,28 @@ export abstract class TxL2Logs): boolean { return isEqual(this, other); } + + /** + * Filter the logs from functions from this TxL2Logs that + * appear in the provided logHashes + * @param logHashes hashes we want to keep + * @param output our aggregation + * @returns our aggregation + */ + public filter(logHashes: LogHash[], output: TxL2Logs): TxL2Logs { + for (const fnLogs of this.functionLogs) { + let include = false; + for (const log of fnLogs.logs) { + if (logHashes.findIndex(lh => lh.value.equals(Fr.fromBuffer(log.getSiloedHash()))) !== -1) { + include = true; + } + } + if (include) { + output.addFunctionLogs([fnLogs]); + } + } + return output; + } } export class UnencryptedTxL2Logs extends TxL2Logs { @@ -156,17 +182,18 @@ export class UnencryptedTxL2Logs extends TxL2Logs { * Note: This is a TS implementation of `computeKernelUnencryptedLogsHash` function in Decoder.sol. See that function documentation * for more details. */ - public hash(): Buffer { - if (this.unrollLogs().length == 0) { + public override hash(): Buffer { + const unrolledLogs = this.unrollLogs(); + if (unrolledLogs.length == 0) { return Buffer.alloc(32); } let flattenedLogs = Buffer.alloc(0); - for (const logsFromSingleFunctionCall of this.unrollLogs()) { + for (const logsFromSingleFunctionCall of unrolledLogs) { flattenedLogs = Buffer.concat([flattenedLogs, logsFromSingleFunctionCall.getSiloedHash()]); } // pad the end of logs with 0s - for (let i = 0; i < MAX_UNENCRYPTED_LOGS_PER_TX - this.unrollLogs().length; i++) { + for (let i = 0; i < MAX_UNENCRYPTED_LOGS_PER_TX - unrolledLogs.length; i++) { flattenedLogs = Buffer.concat([flattenedLogs, Buffer.alloc(32)]); } @@ -234,17 +261,18 @@ export class EncryptedNoteTxL2Logs extends TxL2Logs { * Note: This is a TS implementation of `computeKernelNoteEncryptedLogsHash` function in Decoder.sol. See that function documentation * for more details. */ - public hash(): Buffer { - if (this.unrollLogs().length == 0) { + public override hash(): Buffer { + const unrolledLogs = this.unrollLogs(); + if (unrolledLogs.length == 0) { return Buffer.alloc(32); } let flattenedLogs = Buffer.alloc(0); - for (const logsFromSingleFunctionCall of this.unrollLogs()) { + for (const logsFromSingleFunctionCall of unrolledLogs) { flattenedLogs = Buffer.concat([flattenedLogs, logsFromSingleFunctionCall.hash()]); } // pad the end of logs with 0s - for (let i = 0; i < MAX_NOTE_ENCRYPTED_LOGS_PER_TX - this.unrollLogs().length; i++) { + for (let i = 0; i < MAX_NOTE_ENCRYPTED_LOGS_PER_TX - unrolledLogs.length; i++) { flattenedLogs = Buffer.concat([flattenedLogs, Buffer.alloc(32)]); } @@ -312,17 +340,18 @@ export class EncryptedTxL2Logs extends TxL2Logs { * Note: This is a TS implementation of `computeKernelEncryptedLogsHash` function in Decoder.sol. See that function documentation * for more details. */ - public hash(): Buffer { - if (this.unrollLogs().length == 0) { + public override hash(): Buffer { + const unrolledLogs = this.unrollLogs(); + if (unrolledLogs.length == 0) { return Buffer.alloc(32); } let flattenedLogs = Buffer.alloc(0); - for (const logsFromSingleFunctionCall of this.unrollLogs()) { + for (const logsFromSingleFunctionCall of unrolledLogs) { flattenedLogs = Buffer.concat([flattenedLogs, logsFromSingleFunctionCall.getSiloedHash()]); } // pad the end of logs with 0s - for (let i = 0; i < MAX_ENCRYPTED_LOGS_PER_TX - this.unrollLogs().length; i++) { + for (let i = 0; i < MAX_ENCRYPTED_LOGS_PER_TX - unrolledLogs.length; i++) { flattenedLogs = Buffer.concat([flattenedLogs, Buffer.alloc(32)]); } diff --git a/yarn-project/circuit-types/src/mocks.ts b/yarn-project/circuit-types/src/mocks.ts index 1c4989c97b0..99df4ebcfa5 100644 --- a/yarn-project/circuit-types/src/mocks.ts +++ b/yarn-project/circuit-types/src/mocks.ts @@ -19,7 +19,7 @@ import { makeCombinedConstantData, makePublicCallRequest, } from '@aztec/circuits.js/testing'; -import { type ContractArtifact } from '@aztec/foundation/abi'; +import { type ContractArtifact, NoteSelector } from '@aztec/foundation/abi'; import { makeTuple } from '@aztec/foundation/array'; import { times } from '@aztec/foundation/collection'; import { randomBytes } from '@aztec/foundation/crypto'; @@ -107,24 +107,50 @@ export const mockTx = ( if (hasLogs) { let i = 1; // 0 used in first nullifier - encryptedLogs.functionLogs.forEach((log, j) => { - // ts complains if we dont check .forPublic here, even though it is defined ^ - if (data.forPublic) { - data.forPublic.end.encryptedLogsHashes[j] = new LogHash( - Fr.fromBuffer(log.hash()), - i++, - new Fr(log.toBuffer().length), - ); - } + let nonRevertibleIndex = 0; + let revertibleIndex = 0; + let functionCount = 0; + encryptedLogs.functionLogs.forEach(functionLog => { + functionLog.logs.forEach(log => { + // ts complains if we dont check .forPublic here, even though it is defined ^ + if (data.forPublic) { + const hash = new LogHash( + Fr.fromBuffer(log.getSiloedHash()), + i++, + // +4 for encoding the length of the buffer + new Fr(log.length + 4), + ); + // make the first log non-revertible + if (functionCount === 0) { + data.forPublic.endNonRevertibleData.encryptedLogsHashes[nonRevertibleIndex++] = hash; + } else { + data.forPublic.end.encryptedLogsHashes[revertibleIndex++] = hash; + } + } + }); + functionCount++; }); - unencryptedLogs.functionLogs.forEach((log, j) => { - if (data.forPublic) { - data.forPublic.end.unencryptedLogsHashes[j] = new LogHash( - Fr.fromBuffer(log.hash()), - i++, - new Fr(log.toBuffer().length), - ); - } + nonRevertibleIndex = 0; + revertibleIndex = 0; + functionCount = 0; + unencryptedLogs.functionLogs.forEach(functionLog => { + functionLog.logs.forEach(log => { + if (data.forPublic) { + const hash = new LogHash( + Fr.fromBuffer(log.getSiloedHash()), + i++, + // +4 for encoding the length of the buffer + new Fr(log.length + 4), + ); + // make the first log non-revertible + if (functionCount === 0) { + data.forPublic.endNonRevertibleData.unencryptedLogsHashes[nonRevertibleIndex++] = hash; + } else { + data.forPublic.end.unencryptedLogsHashes[revertibleIndex++] = hash; + } + } + }); + functionCount++; }); } } else { @@ -177,8 +203,10 @@ export const randomContractArtifact = (): ContractArtifact => ({ notes: {}, }); -export const randomContractInstanceWithAddress = (opts: { contractClassId?: Fr } = {}): ContractInstanceWithAddress => - SerializableContractInstance.random(opts).withAddress(AztecAddress.random()); +export const randomContractInstanceWithAddress = ( + opts: { contractClassId?: Fr } = {}, + address: AztecAddress = AztecAddress.random(), +): ContractInstanceWithAddress => SerializableContractInstance.random(opts).withAddress(address); export const randomDeployedContract = () => { const artifact = randomContractArtifact(); @@ -192,7 +220,7 @@ export const randomExtendedNote = ({ contractAddress = AztecAddress.random(), txHash = randomTxHash(), storageSlot = Fr.random(), - noteTypeId = Fr.random(), + noteTypeId = NoteSelector.random(), }: Partial = {}) => { return new ExtendedNote(note, owner, contractAddress, storageSlot, noteTypeId, txHash); }; diff --git a/yarn-project/circuit-types/src/notes/extended_note.ts b/yarn-project/circuit-types/src/notes/extended_note.ts index caee60e8be9..bf91e2dc49d 100644 --- a/yarn-project/circuit-types/src/notes/extended_note.ts +++ b/yarn-project/circuit-types/src/notes/extended_note.ts @@ -1,4 +1,5 @@ import { AztecAddress, Fr } from '@aztec/circuits.js'; +import { NoteSelector } from '@aztec/foundation/abi'; import { BufferReader } from '@aztec/foundation/serialize'; import { Note } from '../logs/l1_payload/payload.js'; @@ -18,7 +19,7 @@ export class ExtendedNote { /** The specific storage location of the note on the contract. */ public storageSlot: Fr, /** The type identifier of the note on the contract. */ - public noteTypeId: Fr, + public noteTypeId: NoteSelector, /** The hash of the tx the note was created in. */ public txHash: TxHash, ) {} @@ -33,6 +34,7 @@ export class ExtendedNote { this.txHash.buffer, ]); } + static fromBuffer(buffer: Buffer | BufferReader) { const reader = BufferReader.asReader(buffer); @@ -40,7 +42,7 @@ export class ExtendedNote { const owner = AztecAddress.fromBuffer(reader); const contractAddress = AztecAddress.fromBuffer(reader); const storageSlot = Fr.fromBuffer(reader); - const noteTypeId = Fr.fromBuffer(reader); + const noteTypeId = reader.readObject(NoteSelector); const txHash = new TxHash(reader.readBytes(TxHash.SIZE)); return new this(note, owner, contractAddress, storageSlot, noteTypeId, txHash); diff --git a/yarn-project/circuit-types/src/tx/processed_tx.ts b/yarn-project/circuit-types/src/tx/processed_tx.ts index cf6bb977423..d20983f261b 100644 --- a/yarn-project/circuit-types/src/tx/processed_tx.ts +++ b/yarn-project/circuit-types/src/tx/processed_tx.ts @@ -27,6 +27,8 @@ import { makeEmptyProof, } from '@aztec/circuits.js'; +import { type CircuitName } from '../stats/stats.js'; + /** * Used to communicate to the prover which type of circuit to prove */ @@ -160,9 +162,9 @@ export function makeProcessedTx( data: kernelOutput, proof, // TODO(4712): deal with non-revertible logs here - noteEncryptedLogs: revertReason ? EncryptedNoteTxL2Logs.empty() : tx.noteEncryptedLogs, - encryptedLogs: revertReason ? EncryptedTxL2Logs.empty() : tx.encryptedLogs, - unencryptedLogs: revertReason ? UnencryptedTxL2Logs.empty() : tx.unencryptedLogs, + noteEncryptedLogs: tx.noteEncryptedLogs, + encryptedLogs: tx.encryptedLogs, + unencryptedLogs: tx.unencryptedLogs, isEmpty: false, revertReason, publicProvingRequests, @@ -304,3 +306,18 @@ export function validateProcessedTx(tx: ProcessedTx): void { validateProcessedTxLogs(tx); // TODO: validate other fields } + +export function mapPublicKernelToCircuitName(kernelType: PublicKernelRequest['type']): CircuitName { + switch (kernelType) { + case PublicKernelType.SETUP: + return 'public-kernel-setup'; + case PublicKernelType.APP_LOGIC: + return 'public-kernel-app-logic'; + case PublicKernelType.TEARDOWN: + return 'public-kernel-teardown'; + case PublicKernelType.TAIL: + return 'public-kernel-tail'; + default: + throw new Error(`Unknown kernel type: ${kernelType}`); + } +} diff --git a/yarn-project/circuit-types/src/tx/tx.ts b/yarn-project/circuit-types/src/tx/tx.ts index d10e8fdb5f6..8cb40f57d62 100644 --- a/yarn-project/circuit-types/src/tx/tx.ts +++ b/yarn-project/circuit-types/src/tx/tx.ts @@ -3,6 +3,7 @@ import { PrivateKernelTailCircuitPublicInputs, Proof, PublicCallRequest, + type PublicKernelCircuitPublicInputs, } from '@aztec/circuits.js'; import { arraySerializedSizeOfNonEmpty } from '@aztec/foundation/collection'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; @@ -29,15 +30,15 @@ export class Tx { /** * Encrypted note logs generated by the tx. */ - public readonly noteEncryptedLogs: EncryptedNoteTxL2Logs, + public noteEncryptedLogs: EncryptedNoteTxL2Logs, /** * Encrypted logs generated by the tx. */ - public readonly encryptedLogs: EncryptedTxL2Logs, + public encryptedLogs: EncryptedTxL2Logs, /** * Unencrypted logs generated by the tx. */ - public readonly unencryptedLogs: UnencryptedTxL2Logs, + public unencryptedLogs: UnencryptedTxL2Logs, /** * Enqueued public functions from the private circuit to be run by the sequencer. * Preimages of the public call stack entries from the private kernel circuit output. @@ -249,6 +250,37 @@ export class Tx { publicTeardownFunctionCall, ); } + + /** + * Filters out logs from functions that are not present in the provided kernel output. + * + * The purpose of this is to remove logs that got dropped due to a revert, + * in which case, we only have the kernel's hashes to go on, as opposed to + * this grouping by function maintained in this class. + * + * The logic therefore is to drop all FunctionLogs if any constituent hash + * does not appear in the provided hashes: it is impossible for part of a + * function to revert. + * + * @param logHashes the individual log hashes we want to keep + * @param out the output to put passing logs in, to keep this function abstract + */ + public filterRevertedLogs(kernelOutput: PublicKernelCircuitPublicInputs) { + this.encryptedLogs = this.encryptedLogs.filter( + kernelOutput.endNonRevertibleData.encryptedLogsHashes, + EncryptedTxL2Logs.empty(), + ); + + this.unencryptedLogs = this.unencryptedLogs.filter( + kernelOutput.endNonRevertibleData.unencryptedLogsHashes, + UnencryptedTxL2Logs.empty(), + ); + + this.noteEncryptedLogs = this.noteEncryptedLogs.filter( + kernelOutput.endNonRevertibleData.noteEncryptedLogsHashes, + EncryptedNoteTxL2Logs.empty(), + ); + } } /** Utility type for an entity that has a hash property for a txhash */ diff --git a/yarn-project/circuits.js/package.json b/yarn-project/circuits.js/package.json index 84f9e873016..524b4a62210 100644 --- a/yarn-project/circuits.js/package.json +++ b/yarn-project/circuits.js/package.json @@ -72,7 +72,15 @@ ], "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "moduleNameMapper": { diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index 77ce004e452..f226f6d2e16 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -206,13 +206,14 @@ export enum GeneratorIndex { FUNCTION_ARGS = 44, AUTHWIT_INNER = 45, AUTHWIT_OUTER = 46, - NSK_M = 47, - IVSK_M = 48, - OVSK_M = 49, - TSK_M = 50, - PUBLIC_KEYS_HASH = 51, - NOTE_NULLIFIER = 52, - INNER_NOTE_HASH = 53, - NOTE_CONTENT_HASH = 54, - SYMMETRIC_KEY = 55, + AUTHWIT_NULLIFIER = 47, + NSK_M = 48, + IVSK_M = 49, + OVSK_M = 50, + TSK_M = 51, + PUBLIC_KEYS_HASH = 52, + NOTE_NULLIFIER = 53, + INNER_NOTE_HASH = 54, + NOTE_CONTENT_HASH = 55, + SYMMETRIC_KEY = 56, } diff --git a/yarn-project/circuits.js/src/contract/__snapshots__/contract_address.test.ts.snap b/yarn-project/circuits.js/src/contract/__snapshots__/contract_address.test.ts.snap index 2d413b4089c..10e93a7af31 100644 --- a/yarn-project/circuits.js/src/contract/__snapshots__/contract_address.test.ts.snap +++ b/yarn-project/circuits.js/src/contract/__snapshots__/contract_address.test.ts.snap @@ -1,6 +1,6 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`ContractAddress computeContractAddressFromInstance 1`] = `"0x0bed63221d281713007bfb0c063e1f61d0646404fb3701b99bb92f41b6390604"`; +exports[`ContractAddress computeContractAddressFromInstance 1`] = `"0x2a192ee63791ad5e219b63db872bf54ba245afbc2c1287f4ba036b8f58fad740"`; exports[`ContractAddress computeInitializationHash 1`] = `Fr<0x109865e4b959adba34b722e72a69baaf9ee78e31bb1042318f0d91006ed86780>`; diff --git a/yarn-project/circuits.js/src/keys/derivation.test.ts b/yarn-project/circuits.js/src/keys/derivation.test.ts index f41aa0c0d3f..c3c1e0bb59d 100644 --- a/yarn-project/circuits.js/src/keys/derivation.test.ts +++ b/yarn-project/circuits.js/src/keys/derivation.test.ts @@ -11,7 +11,7 @@ describe('🔑', () => { const masterOutgoingViewingPublicKey = new Point(new Fr(5), new Fr(6)); const masterTaggingPublicKey = new Point(new Fr(7), new Fr(8)); - const expected = Fr.fromString('0x1936abe4f6a920d16a9f6917f10a679507687e2cd935dd1f1cdcb1e908c027f3'); + const expected = Fr.fromString('0x2406c1c88b7afc13052335bb9af43fd35034b5ba0a9caab76eda2833cf8ec717'); expect( new PublicKeys( masterNullifierPublicKey, diff --git a/yarn-project/circuits.js/src/structs/avm/avm.ts b/yarn-project/circuits.js/src/structs/avm/avm.ts index f33335f800c..907e41ad4f2 100644 --- a/yarn-project/circuits.js/src/structs/avm/avm.ts +++ b/yarn-project/circuits.js/src/structs/avm/avm.ts @@ -243,6 +243,7 @@ export class AvmContractInstanceHint { } } +// TODO(dbanks12): rename AvmCircuitHints export class AvmExecutionHints { public readonly storageValues: Vector; public readonly noteHashExists: Vector; @@ -267,6 +268,14 @@ export class AvmExecutionHints { this.contractInstances = new Vector(contractInstances); } + /** + * Return an empty instance. + * @returns an empty instance. + */ + empty() { + return new AvmExecutionHints([], [], [], [], [], []); + } + /** * Serializes the inputs to a buffer. * @returns - The inputs serialized to a buffer. diff --git a/yarn-project/circuits.js/src/structs/complete_address.test.ts b/yarn-project/circuits.js/src/structs/complete_address.test.ts index 25c0de180c8..4fd6d528283 100644 --- a/yarn-project/circuits.js/src/structs/complete_address.test.ts +++ b/yarn-project/circuits.js/src/structs/complete_address.test.ts @@ -38,11 +38,11 @@ describe('CompleteAddress', () => { // docs:start:instantiate-complete-address // Typically a recipient would share their complete address with the sender const completeAddressFromString = CompleteAddress.fromString( - '0x09bc7031bb21627cce6aac1dc710ecc92acd8475149c530a4bb57df63d9d6fe902a9372135ce5b49b46102732fabd742c31642543396013dde5b460075864607264c605bc115c6cb92a4db0a6b893fd3777341078693d0af22e3ff53f4c2ee2a2fae73914fc50d325e2707a8e996f1ad498429f715f998225dc6bd2ede05aaee055ee137d28b634322e0ea98afc42dfc48833e8d2879c34d23d6d1d337069cca212af0f28b7865b339e202a0077fd3bd8dddc472d055945ad99c02dcccd28bb22bb3585fca3e5751c9913521a390458d63e4d9b292e4872582f3b13da214470c14083a4567cf4f1e92696e6c01923bc6a8b414159446268b12fe8669ce44f1f5196561aca6c654d2405a5653002cba5552b50b6ce1afc9515ed6682507abcb3010040d791aeb30138efc9c7d36b47684af2f26f686672448349f05934ae7bbbf', + '0x1de12596818ab6bc3584b943f791b206ff588d3c307358ab6918f59ed7d381bc02a9372135ce5b49b46102732fabd742c31642543396013dde5b460075864607264c605bc115c6cb92a4db0a6b893fd3777341078693d0af22e3ff53f4c2ee2a2fae73914fc50d325e2707a8e996f1ad498429f715f998225dc6bd2ede05aaee055ee137d28b634322e0ea98afc42dfc48833e8d2879c34d23d6d1d337069cca212af0f28b7865b339e202a0077fd3bd8dddc472d055945ad99c02dcccd28bb22bb3585fca3e5751c9913521a390458d63e4d9b292e4872582f3b13da214470c14083a4567cf4f1e92696e6c01923bc6a8b414159446268b12fe8669ce44f1f5196561aca6c654d2405a5653002cba5552b50b6ce1afc9515ed6682507abcb3010040d791aeb30138efc9c7d36b47684af2f26f686672448349f05934ae7bbbf', ); // Alternatively, a recipient could share the individual components with the sender - const address = Fr.fromString('0x09bc7031bb21627cce6aac1dc710ecc92acd8475149c530a4bb57df63d9d6fe9'); + const address = Fr.fromString('0x1de12596818ab6bc3584b943f791b206ff588d3c307358ab6918f59ed7d381bc'); const npkM = Point.fromString( '0x02a9372135ce5b49b46102732fabd742c31642543396013dde5b460075864607264c605bc115c6cb92a4db0a6b893fd3777341078693d0af22e3ff53f4c2ee2a', ); diff --git a/yarn-project/circuits.js/src/structs/contract_storage_read.ts b/yarn-project/circuits.js/src/structs/contract_storage_read.ts index 5a679a75bf7..56f0f95aa1d 100644 --- a/yarn-project/circuits.js/src/structs/contract_storage_read.ts +++ b/yarn-project/circuits.js/src/structs/contract_storage_read.ts @@ -23,30 +23,19 @@ export class ContractStorageRead { /** * Side effect counter tracking position of this event in tx execution. */ - public readonly sideEffectCounter: number, + public readonly counter: number, + /** + * Contract address whose storage is being read. + */ public contractAddress?: AztecAddress, // TODO: Should not be optional. This is a temporary hack to silo the storage slot with the correct address for nested executions. ) {} - static from(args: { - /** - * Storage slot we are reading from. - */ - storageSlot: Fr; - /** - * Value read from the storage slot. - */ - currentValue: Fr; - /** - * Side effect counter tracking position of this event in tx execution. - */ - sideEffectCounter: number; - contractAddress?: AztecAddress; - }) { - return new ContractStorageRead(args.storageSlot, args.currentValue, args.sideEffectCounter, args.contractAddress); + static from(args: { storageSlot: Fr; currentValue: Fr; counter: number; contractAddress?: AztecAddress }) { + return new ContractStorageRead(args.storageSlot, args.currentValue, args.counter, args.contractAddress); } toBuffer() { - return serializeToBuffer(this.storageSlot, this.currentValue, new Fr(this.sideEffectCounter)); + return serializeToBuffer(this.storageSlot, this.currentValue, new Fr(this.counter)); } static fromBuffer(buffer: Buffer | BufferReader) { @@ -59,7 +48,7 @@ export class ContractStorageRead { } isEmpty() { - return this.storageSlot.isZero() && this.currentValue.isZero() && this.sideEffectCounter == 0; + return this.storageSlot.isZero() && this.currentValue.isZero() && this.counter == 0; } toFriendlyJSON() { @@ -67,7 +56,7 @@ export class ContractStorageRead { } toFields(): Fr[] { - const fields = [this.storageSlot, this.currentValue, new Fr(this.sideEffectCounter)]; + const fields = [this.storageSlot, this.currentValue, new Fr(this.counter)]; if (fields.length !== CONTRACT_STORAGE_READ_LENGTH) { throw new Error( `Invalid number of fields for ContractStorageRead. Expected ${CONTRACT_STORAGE_READ_LENGTH}, got ${fields.length}`, @@ -81,8 +70,8 @@ export class ContractStorageRead { const storageSlot = reader.readField(); const currentValue = reader.readField(); - const sideEffectCounter = reader.readField().toNumber(); + const counter = reader.readField().toNumber(); - return new ContractStorageRead(storageSlot, currentValue, sideEffectCounter); + return new ContractStorageRead(storageSlot, currentValue, counter); } } diff --git a/yarn-project/circuits.js/src/structs/contract_storage_update_request.ts b/yarn-project/circuits.js/src/structs/contract_storage_update_request.ts index 04be2dd24f9..4d7d3d665c0 100644 --- a/yarn-project/circuits.js/src/structs/contract_storage_update_request.ts +++ b/yarn-project/circuits.js/src/structs/contract_storage_update_request.ts @@ -22,14 +22,17 @@ export class ContractStorageUpdateRequest { */ public readonly newValue: Fr, /** - * Optional side effect counter tracking position of this event in tx execution. + * Side effect counter tracking position of this event in tx execution. + */ + public readonly counter: number, + /** + * Contract address whose storage is being read. */ - public readonly sideEffectCounter: number, public contractAddress?: AztecAddress, // TODO: Should not be optional. This is a temporary hack to silo the storage slot with the correct address for nested executions. ) {} toBuffer() { - return serializeToBuffer(this.storageSlot, this.newValue, this.sideEffectCounter); + return serializeToBuffer(this.storageSlot, this.newValue, this.counter); } static fromBuffer(buffer: Buffer | BufferReader) { @@ -52,7 +55,7 @@ export class ContractStorageUpdateRequest { * @returns The array. */ static getFields(fields: FieldsOf) { - return [fields.storageSlot, fields.newValue, fields.sideEffectCounter, fields.contractAddress] as const; + return [fields.storageSlot, fields.newValue, fields.counter, fields.contractAddress] as const; } static empty() { @@ -65,12 +68,12 @@ export class ContractStorageUpdateRequest { toFriendlyJSON() { return `Slot=${this.storageSlot.toFriendlyJSON()}: ${this.newValue.toFriendlyJSON()}, sideEffectCounter=${ - this.sideEffectCounter + this.counter }`; } toFields(): Fr[] { - const fields = [this.storageSlot, this.newValue, new Fr(this.sideEffectCounter)]; + const fields = [this.storageSlot, this.newValue, new Fr(this.counter)]; if (fields.length !== CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH) { throw new Error( `Invalid number of fields for ContractStorageUpdateRequest. Expected ${CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH}, got ${fields.length}`, diff --git a/yarn-project/circuits.js/src/structs/public_call_stack_item.ts b/yarn-project/circuits.js/src/structs/public_call_stack_item.ts index 3223909d287..170f3dc84b6 100644 --- a/yarn-project/circuits.js/src/structs/public_call_stack_item.ts +++ b/yarn-project/circuits.js/src/structs/public_call_stack_item.ts @@ -90,15 +90,24 @@ export class PublicCallStackItem { * @returns Hash. */ public hash() { + let publicInputsToHash = this.publicInputs; if (this.isExecutionRequest) { + // An execution request (such as an enqueued call from private) is hashed with + // only the publicInput members present in a PublicCallRequest. + // This allows us to check that the request (which is created/hashed before + // side-effects and output info are unknown for public calls) matches the call + // being processed by a kernel iteration. + // WARNING: This subset of publicInputs that is set here must align with + // `parse_public_call_stack_item_from_oracle` in enqueue_public_function_call.nr + // and `PublicCallStackItem::as_execution_request()` in public_call_stack_item.ts const { callContext, argsHash } = this.publicInputs; - this.publicInputs = PublicCircuitPublicInputs.empty(); - this.publicInputs.callContext = callContext; - this.publicInputs.argsHash = argsHash; + publicInputsToHash = PublicCircuitPublicInputs.empty(); + publicInputsToHash.callContext = callContext; + publicInputsToHash.argsHash = argsHash; } return pedersenHash( - [this.contractAddress, this.functionData.hash(), this.publicInputs.hash()], + [this.contractAddress, this.functionData.hash(), publicInputsToHash.hash()], GeneratorIndex.CALL_STACK_ITEM, ); } diff --git a/yarn-project/cli/package.json b/yarn-project/cli/package.json index 007477eecff..303d4d9c336 100644 --- a/yarn-project/cli/package.json +++ b/yarn-project/cli/package.json @@ -37,7 +37,15 @@ ], "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "reporters": [ diff --git a/yarn-project/cli/src/cmds/add_note.ts b/yarn-project/cli/src/cmds/add_note.ts index f6359bd5c1c..68debccd90c 100644 --- a/yarn-project/cli/src/cmds/add_note.ts +++ b/yarn-project/cli/src/cmds/add_note.ts @@ -1,4 +1,4 @@ -import { type AztecAddress, type Fr } from '@aztec/aztec.js'; +import { type AztecAddress, type Fr, type NoteSelector } from '@aztec/aztec.js'; import { ExtendedNote, Note, type TxHash } from '@aztec/circuit-types'; import { type DebugLogger } from '@aztec/foundation/log'; @@ -9,7 +9,7 @@ export async function addNote( address: AztecAddress, contractAddress: AztecAddress, storageSlot: Fr, - noteTypeId: Fr, + noteTypeId: NoteSelector, txHash: TxHash, noteFields: string[], rpcUrl: string, diff --git a/yarn-project/cli/src/inspect.ts b/yarn-project/cli/src/inspect.ts index f8ff880f1ee..53c424680fc 100644 --- a/yarn-project/cli/src/inspect.ts +++ b/yarn-project/cli/src/inspect.ts @@ -142,7 +142,7 @@ export async function inspectTx( function inspectNote(note: ExtendedNote, artifactMap: ArtifactMap, log: LogFn, text = 'Note') { const artifact = artifactMap[note.contractAddress.toString()]; const contract = artifact?.name ?? note.contractAddress.toString(); - const type = artifact?.notes[note.noteTypeId.toString()]?.typ ?? note.noteTypeId.toShortString(); + const type = artifact?.notes[note.noteTypeId.toString()]?.typ ?? note.noteTypeId.toField().toShortString(); log(` ${text} type ${type} at ${contract}`); log(` Owner: ${toFriendlyAddress(note.owner, artifactMap)}`); for (const field of note.note.items) { diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index b4c138967f7..6444cf51694 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -40,6 +40,7 @@ "@aztec/pxe": "workspace:^", "@aztec/sequencer-client": "workspace:^", "@aztec/simulator": "workspace:^", + "@aztec/telemetry-client": "workspace:^", "@aztec/types": "workspace:^", "@aztec/world-state": "workspace:^", "@jest/globals": "^29.5.0", @@ -79,7 +80,6 @@ "ts-loader": "^9.4.4", "ts-node": "^10.9.1", "tslib": "^2.4.0", - "tty-browserify": "^0.0.1", "typescript": "^5.0.4", "util": "^0.12.5", "viem": "^2.7.15", @@ -116,7 +116,15 @@ ], "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "reporters": [ diff --git a/yarn-project/end-to-end/src/composed/e2e_aztec_js_browser.test.ts b/yarn-project/end-to-end/src/composed/e2e_aztec_js_browser.test.ts index d96397f05c8..1d465504b8b 100644 --- a/yarn-project/end-to-end/src/composed/e2e_aztec_js_browser.test.ts +++ b/yarn-project/end-to-end/src/composed/e2e_aztec_js_browser.test.ts @@ -25,10 +25,12 @@ const pageLogger = createDebugLogger('aztec:e2e_aztec_browser.js:web:page'); * 2) go to `yarn-project/end-to-end` and build the web packed package with `yarn build:web`, * 3) start anvil: `anvil`, * 4) if you intend to use a remotely running environment then export the URL of your PXE e.g. `export PXE_URL='http://localhost:8080'` - * 7) go to `yarn-project/end-to-end` and run the test: `yarn test aztec_js_browser` + * 5) go to `yarn-project/end-to-end` and run the test: `yarn test aztec_js_browser` + * 6) If you get dependency error run `apt install libssn3 libatk1.0-0 libatk-bridge2.0-0 libcups2 libxdamage1 libxkbcommon0 libpango-1.0-0 libcairo2`. * - * NOTE: If you see the logs spammed with unexpected logs there is probably a chrome process with a webpage + * NOTE 1: If you see the logs spammed with unexpected logs there is probably a chrome process with a webpage * unexpectedly running in the background. Kill it with `killall chrome` + * NOTE 2: Don't forget to run `yarn build:web` once you make changes! */ const setupApp = async () => { diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index 4445829658c..17340f06675 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -40,6 +40,7 @@ import { AvailabilityOracleAbi, InboxAbi, OutboxAbi, RollupAbi } from '@aztec/l1 import { SHA256Trunc, StandardTree } from '@aztec/merkle-tree'; import { TxProver } from '@aztec/prover-client'; import { type L1Publisher, getL1Publisher } from '@aztec/sequencer-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { MerkleTrees, ServerWorldStateSynchronizer, type WorldStateConfig } from '@aztec/world-state'; import { beforeEach, describe, expect, it } from '@jest/globals'; @@ -145,7 +146,7 @@ describe('L1Publisher integration', () => { }; const worldStateSynchronizer = new ServerWorldStateSynchronizer(tmpStore, builderDb, blockSource, worldStateConfig); await worldStateSynchronizer.start(); - builder = await TxProver.new(config, getMockVerificationKeys(), worldStateSynchronizer); + builder = await TxProver.new(config, getMockVerificationKeys(), worldStateSynchronizer, new NoopTelemetryClient()); l2Proof = makeEmptyProof(); publisher = getL1Publisher({ diff --git a/yarn-project/end-to-end/src/e2e_authwit.test.ts b/yarn-project/end-to-end/src/e2e_authwit.test.ts index 8200ce331b7..471aca74685 100644 --- a/yarn-project/end-to-end/src/e2e_authwit.test.ts +++ b/yarn-project/end-to-end/src/e2e_authwit.test.ts @@ -1,5 +1,5 @@ -import { type AccountWallet, Fr, computeInnerAuthWitHash, computeOuterAuthWitHash } from '@aztec/aztec.js'; -import { AuthRegistryContract, SchnorrAccountContract } from '@aztec/noir-contracts.js'; +import { type AccountWallet, Fr, computeAuthWitMessageHash, computeInnerAuthWitHash } from '@aztec/aztec.js'; +import { AuthRegistryContract, AuthWitTestContract } from '@aztec/noir-contracts.js'; import { getCanonicalAuthRegistry } from '@aztec/protocol-contracts/auth-registry'; import { jest } from '@jest/globals'; @@ -16,6 +16,7 @@ describe('e2e_authwit_tests', () => { let chainId: Fr; let version: Fr; + let auth: AuthWitTestContract; beforeAll(async () => { ({ wallets } = await setup(2)); @@ -26,164 +27,122 @@ describe('e2e_authwit_tests', () => { const nodeInfo = await wallets[0].getNodeInfo(); chainId = new Fr(nodeInfo.chainId); version = new Fr(nodeInfo.protocolVersion); + + auth = await AuthWitTestContract.deploy(wallets[0]).send().deployed(); }); describe('Private', () => { describe('arbitrary data', () => { it('happy path', async () => { + // What are we doing here: + // 1. We compute an inner hash which is here just a hash of random data + // 2. We then compute the outer, which is binding it to a "consumer" (here the "auth" contract) + // 3. We then create an authwit for this outer hash. + // 4. We add this authwit to the wallet[1] + // 5. We check that the authwit is valid in private for wallet[0] (check that it is signed by 0) + // 6. We check that the authwit is NOT valid in private for wallet[1] (check that it is not signed by 1) + // docs:start:compute_inner_authwit_hash const innerHash = computeInnerAuthWitHash([Fr.fromString('0xdead')]); // docs:end:compute_inner_authwit_hash // docs:start:compute_outer_authwit_hash - const outerHash = computeOuterAuthWitHash(wallets[1].getAddress(), chainId, version, innerHash); + + const intent = { consumer: auth.address, innerHash }; // docs:end:compute_outer_authwit_hash // docs:start:create_authwit - const witness = await wallets[0].createAuthWit(outerHash); + const witness = await wallets[0].createAuthWit(intent); // docs:end:create_authwit await wallets[1].addAuthWitness(witness); // Check that the authwit is valid in private for wallets[0] - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ + expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ isValidInPrivate: true, isValidInPublic: false, }); // Check that the authwit is NOT valid in private for wallets[1] - expect(await wallets[0].lookupValidity(wallets[1].getAddress(), outerHash)).toEqual({ + expect(await wallets[0].lookupValidity(wallets[1].getAddress(), intent)).toEqual({ isValidInPrivate: false, isValidInPublic: false, }); - const c = await SchnorrAccountContract.at(wallets[0].getAddress(), wallets[0]); - await c.withWallet(wallets[1]).methods.spend_private_authwit(innerHash).send().wait(); + // Consume the inner hash using the wallets[0] as the "on behalf of". + await auth.withWallet(wallets[1]).methods.consume(wallets[0].getAddress(), innerHash).send().wait(); - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ + expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ isValidInPrivate: false, isValidInPublic: false, }); - }); + // Try to consume the same authwit again, it should fail + await expect( + auth.withWallet(wallets[1]).methods.consume(wallets[0].getAddress(), innerHash).send().wait(), + ).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); + }); describe('failure case', () => { - it('cancel before usage', async () => { - const innerHash = computeInnerAuthWitHash([Fr.fromString('0xdead'), Fr.fromString('0xbeef')]); - const outerHash = computeOuterAuthWitHash(wallets[1].getAddress(), chainId, version, innerHash); - - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ - isValidInPrivate: false, - isValidInPublic: false, - }); - - const witness = await wallets[0].createAuthWit(outerHash); - await wallets[1].addAuthWitness(witness); - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ - isValidInPrivate: true, - isValidInPublic: false, - }); - await wallets[0].cancelAuthWit(outerHash).send().wait(); - - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ - isValidInPrivate: false, - isValidInPublic: false, - }); - - const c = await SchnorrAccountContract.at(wallets[0].getAddress(), wallets[0]); - const txCancelledAuthwit = c.withWallet(wallets[1]).methods.spend_private_authwit(innerHash).send(); - - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ - isValidInPrivate: false, - isValidInPublic: false, - }); - - // The transaction should be dropped because of a cancelled authwit (duplicate nullifier) - await expect(txCancelledAuthwit.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); - }); - it('invalid chain id', async () => { - const invalidChainId = Fr.random(); - const innerHash = computeInnerAuthWitHash([Fr.fromString('0xdead'), Fr.fromString('0xbeef')]); - const outerHash = computeOuterAuthWitHash(wallets[1].getAddress(), invalidChainId, version, innerHash); - const outerCorrectHash = computeOuterAuthWitHash(wallets[1].getAddress(), chainId, version, innerHash); + const intent = { consumer: auth.address, innerHash }; - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ - isValidInPrivate: false, - isValidInPublic: false, - }); + const messageHash = computeAuthWitMessageHash(intent, { chainId: Fr.random(), version }); + const expectedMessageHash = computeAuthWitMessageHash(intent, { chainId, version }); - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerCorrectHash)).toEqual({ + expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ isValidInPrivate: false, isValidInPublic: false, }); - const witness = await wallets[0].createAuthWit(outerHash); + const witness = await wallets[0].createAuthWit(messageHash); await wallets[1].addAuthWitness(witness); - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ - isValidInPrivate: true, - isValidInPublic: false, - }); - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerCorrectHash)).toEqual({ + + // We should NOT see it as valid, even though we have the authwit, since the chain id is wrong + expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ isValidInPrivate: false, isValidInPublic: false, }); - const c = await SchnorrAccountContract.at(wallets[0].getAddress(), wallets[0]); - const txCancelledAuthwit = c.withWallet(wallets[1]).methods.spend_private_authwit(innerHash).send(); + // The transaction should be dropped because of the invalid chain id + await expect( + auth.withWallet(wallets[1]).methods.consume(wallets[0].getAddress(), innerHash).simulate(), + ).rejects.toThrow(`Unknown auth witness for message hash ${expectedMessageHash.toString()}`); - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ - isValidInPrivate: true, - isValidInPublic: false, - }); - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerCorrectHash)).toEqual({ + expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ isValidInPrivate: false, isValidInPublic: false, }); - - // The transaction should be dropped because of the invalid chain id - await expect(txCancelledAuthwit.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); it('invalid version', async () => { - const invalidVersion = Fr.random(); - const innerHash = computeInnerAuthWitHash([Fr.fromString('0xdead'), Fr.fromString('0xbeef')]); - const outerHash = computeOuterAuthWitHash(wallets[1].getAddress(), chainId, invalidVersion, innerHash); - const outerCorrectHash = computeOuterAuthWitHash(wallets[1].getAddress(), chainId, version, innerHash); + const intent = { consumer: auth.address, innerHash }; - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ - isValidInPrivate: false, - isValidInPublic: false, - }); + const messageHash = computeAuthWitMessageHash(intent, { chainId, version: Fr.random() }); - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerCorrectHash)).toEqual({ + const expectedMessageHash = computeAuthWitMessageHash(intent, { chainId, version }); + + expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ isValidInPrivate: false, isValidInPublic: false, }); - const witness = await wallets[0].createAuthWit(outerHash); + const witness = await wallets[0].createAuthWit(messageHash); await wallets[1].addAuthWitness(witness); - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ - isValidInPrivate: true, - isValidInPublic: false, - }); - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerCorrectHash)).toEqual({ + + // We should NOT see it as valid, even though we have the authwit, since the version is wrong + expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ isValidInPrivate: false, isValidInPublic: false, }); - const c = await SchnorrAccountContract.at(wallets[0].getAddress(), wallets[0]); - const txCancelledAuthwit = c.withWallet(wallets[1]).methods.spend_private_authwit(innerHash).send(); + // The transaction should be dropped because of the invalid version + await expect( + auth.withWallet(wallets[1]).methods.consume(wallets[0].getAddress(), innerHash).simulate(), + ).rejects.toThrow(`Unknown auth witness for message hash ${expectedMessageHash.toString()}`); - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ - isValidInPrivate: true, - isValidInPublic: false, - }); - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerCorrectHash)).toEqual({ + expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ isValidInPrivate: false, isValidInPublic: false, }); - - // The transaction should be dropped because of the invalid version - await expect(txCancelledAuthwit.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); }); }); @@ -193,16 +152,18 @@ describe('e2e_authwit_tests', () => { describe('arbitrary data', () => { it('happy path', async () => { const innerHash = computeInnerAuthWitHash([Fr.fromString('0xdead'), Fr.fromString('0x01')]); - const outerHash = computeOuterAuthWitHash(wallets[1].getAddress(), chainId, version, innerHash); - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ + + const intent = { consumer: wallets[1].getAddress(), innerHash }; + + expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ isValidInPrivate: false, isValidInPublic: false, }); // docs:start:set_public_authwit - await wallets[0].setPublicAuthWit(outerHash, true).send().wait(); + await wallets[0].setPublicAuthWit(intent, true).send().wait(); // docs:end:set_public_authwit - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ + expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ isValidInPrivate: false, isValidInPublic: true, }); @@ -210,7 +171,7 @@ describe('e2e_authwit_tests', () => { const registry = await AuthRegistryContract.at(getCanonicalAuthRegistry().instance.address, wallets[1]); await registry.methods.consume(wallets[0].getAddress(), innerHash).send().wait(); - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ + expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ isValidInPrivate: false, isValidInPublic: false, }); @@ -219,23 +180,23 @@ describe('e2e_authwit_tests', () => { describe('failure case', () => { it('cancel before usage', async () => { const innerHash = computeInnerAuthWitHash([Fr.fromString('0xdead'), Fr.fromString('0x02')]); - const outerHash = computeOuterAuthWitHash(wallets[1].getAddress(), chainId, version, innerHash); + const intent = { consumer: auth.address, innerHash }; - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ + expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ isValidInPrivate: false, isValidInPublic: false, }); - await wallets[0].setPublicAuthWit(outerHash, true).send().wait(); + await wallets[0].setPublicAuthWit(intent, true).send().wait(); - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ + expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ isValidInPrivate: false, isValidInPublic: true, }); - await wallets[0].cancelPublicAuthWit(outerHash).send().wait(); + await wallets[0].setPublicAuthWit(intent, false).send().wait(); - expect(await wallets[0].lookupValidity(wallets[0].getAddress(), outerHash)).toEqual({ + expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ isValidInPrivate: false, isValidInPublic: false, }); diff --git a/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts b/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts index cf6cd9fdcb1..ddbcceaca4f 100644 --- a/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts +++ b/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts @@ -38,7 +38,7 @@ describe('e2e_avm_simulator', () => { }); it('PXE processes failed assertions and fills in the error message with the expression (even complex ones)', async () => { await expect(avmContract.methods.assert_nullifier_exists(123).simulate()).rejects.toThrow( - "Assertion failed: Nullifier doesn't exist! 'context.nullifier_exists(nullifier, context.this_address())'", + "Assertion failed: Nullifier doesn't exist! 'context.nullifier_exists(nullifier, context.storage_address())'", ); }); }); diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/burn.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/burn.test.ts index 5e336d1b63d..6c6d95ab7be 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/burn.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/burn.test.ts @@ -194,10 +194,8 @@ describe('e2e_blacklist_token_contract burn', () => { // We need to compute the message we want to sign and add it to the wallet as approved const action = asset.withWallet(wallets[1]).methods.burn(wallets[0].getAddress(), amount, nonce); const messageHash = computeAuthWitMessageHash( - wallets[1].getAddress(), - wallets[0].getChainId(), - wallets[0].getVersion(), - action.request(), + { caller: wallets[1].getAddress(), action: action.request() }, + { chainId: wallets[0].getChainId(), version: wallets[0].getVersion() }, ); await expect(action.prove()).rejects.toThrow(`Unknown auth witness for message hash ${messageHash.toString()}`); @@ -212,10 +210,8 @@ describe('e2e_blacklist_token_contract burn', () => { // We need to compute the message we want to sign and add it to the wallet as approved const action = asset.withWallet(wallets[2]).methods.burn(wallets[0].getAddress(), amount, nonce); const expectedMessageHash = computeAuthWitMessageHash( - wallets[2].getAddress(), - wallets[0].getChainId(), - wallets[0].getVersion(), - action.request(), + { caller: wallets[2].getAddress(), action: action.request() }, + { chainId: wallets[0].getChainId(), version: wallets[0].getVersion() }, ); const witness = await wallets[0].createAuthWit({ caller: wallets[1].getAddress(), action }); diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_private.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_private.test.ts index ac8176b9e68..75f8c919bad 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_private.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_private.test.ts @@ -136,10 +136,8 @@ describe('e2e_blacklist_token_contract transfer private', () => { .withWallet(wallets[1]) .methods.transfer(wallets[0].getAddress(), wallets[1].getAddress(), amount, nonce); const messageHash = computeAuthWitMessageHash( - wallets[1].getAddress(), - wallets[0].getChainId(), - wallets[0].getVersion(), - action.request(), + { caller: wallets[1].getAddress(), action: action.request() }, + { chainId: wallets[0].getChainId(), version: wallets[0].getVersion() }, ); await expect(action.prove()).rejects.toThrow(`Unknown auth witness for message hash ${messageHash.toString()}`); @@ -156,10 +154,8 @@ describe('e2e_blacklist_token_contract transfer private', () => { .withWallet(wallets[2]) .methods.transfer(wallets[0].getAddress(), wallets[1].getAddress(), amount, nonce); const expectedMessageHash = computeAuthWitMessageHash( - wallets[2].getAddress(), - wallets[0].getChainId(), - wallets[0].getVersion(), - action.request(), + { caller: wallets[2].getAddress(), action: action.request() }, + { chainId: wallets[0].getChainId(), version: wallets[0].getVersion() }, ); const witness = await wallets[0].createAuthWit({ caller: wallets[1].getAddress(), action }); diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/unshielding.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/unshielding.test.ts index 8d11daf3c5e..4d877859f53 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/unshielding.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/unshielding.test.ts @@ -113,10 +113,8 @@ describe('e2e_blacklist_token_contract unshielding', () => { .withWallet(wallets[2]) .methods.unshield(wallets[0].getAddress(), wallets[1].getAddress(), amount, nonce); const expectedMessageHash = computeAuthWitMessageHash( - wallets[2].getAddress(), - wallets[0].getChainId(), - wallets[0].getVersion(), - action.request(), + { caller: wallets[2].getAddress(), action: action.request() }, + { chainId: wallets[0].getChainId(), version: wallets[0].getVersion() }, ); // Both wallets are connected to same node and PXE so we could just insert directly diff --git a/yarn-project/end-to-end/src/e2e_cross_chain_messaging.test.ts b/yarn-project/end-to-end/src/e2e_cross_chain_messaging.test.ts index 4ba3f4990f0..f65ba985a72 100644 --- a/yarn-project/end-to-end/src/e2e_cross_chain_messaging.test.ts +++ b/yarn-project/end-to-end/src/e2e_cross_chain_messaging.test.ts @@ -201,10 +201,11 @@ describe('e2e_cross_chain_messaging', () => { const withdrawAmount = 9n; const nonce = Fr.random(); const expectedBurnMessageHash = computeAuthWitMessageHash( - l2Bridge.address, - user1Wallet.getChainId(), - user1Wallet.getVersion(), - l2Token.methods.burn(user1Wallet.getAddress(), withdrawAmount, nonce).request(), + { + caller: l2Bridge.address, + action: l2Token.methods.burn(user1Wallet.getAddress(), withdrawAmount, nonce).request(), + }, + { chainId: user1Wallet.getChainId(), version: user1Wallet.getVersion() }, ); // Should fail as owner has not given approval to bridge burn their funds. await expect( diff --git a/yarn-project/end-to-end/src/e2e_event_logs.test.ts b/yarn-project/end-to-end/src/e2e_event_logs.test.ts index 3455b0166f6..02122e165d1 100644 --- a/yarn-project/end-to-end/src/e2e_event_logs.test.ts +++ b/yarn-project/end-to-end/src/e2e_event_logs.test.ts @@ -1,5 +1,6 @@ import { type AccountWalletWithSecretKey, type AztecNode, Fr, L1EventPayload, TaggedLog } from '@aztec/aztec.js'; import { deriveMasterIncomingViewingSecretKey } from '@aztec/circuits.js'; +import { EventSelector } from '@aztec/foundation/abi'; import { makeTuple } from '@aztec/foundation/array'; import { type Tuple } from '@aztec/foundation/serialize'; import { type ExampleEvent0, type ExampleEvent1, TestLogContract } from '@aztec/noir-contracts.js'; @@ -30,30 +31,6 @@ describe('Logs', () => { afterAll(() => teardown()); describe('functionality around emitting an encrypted log', () => { - it('emits a generic encrypted log and checks for correctness', async () => { - const randomness = Fr.random(); - const eventTypeId = Fr.random(); - const preimage = makeTuple(6, Fr.random); - - const tx = await testLogContract.methods.emit_encrypted_log(randomness, eventTypeId, preimage).send().wait(); - - const txEffect = await node.getTxEffect(tx.txHash); - - const encryptedLogs = txEffect!.encryptedLogs.unrollLogs(); - expect(encryptedLogs.length).toBe(1); - - const decryptedLog = TaggedLog.decryptAsIncoming( - encryptedLogs[0], - deriveMasterIncomingViewingSecretKey(wallets[0].getSecretKey()), - L1EventPayload, - ); - - expect(decryptedLog?.payload.contractAddress).toStrictEqual(testLogContract.address); - expect(decryptedLog?.payload.randomness).toStrictEqual(randomness); - expect(decryptedLog?.payload.eventTypeId).toStrictEqual(eventTypeId); - expect(decryptedLog?.payload.event.items).toStrictEqual(preimage); - }); - it('emits multiple events as encrypted logs and decodes them', async () => { const randomness = makeTuple(2, Fr.random); const preimage = makeTuple(4, Fr.random); @@ -74,7 +51,7 @@ describe('Logs', () => { expect(decryptedLog0?.payload.contractAddress).toStrictEqual(testLogContract.address); expect(decryptedLog0?.payload.randomness).toStrictEqual(randomness[0]); expect(decryptedLog0?.payload.eventTypeId).toStrictEqual( - new Fr(0x00000000000000000000000000000000000000000000000000000000aa533f60), + EventSelector.fromField(new Fr(0x00000000000000000000000000000000000000000000000000000000aa533f60)), ); // We decode our event into the event type @@ -97,7 +74,7 @@ describe('Logs', () => { expect(decryptedLog1?.payload.contractAddress).toStrictEqual(testLogContract.address); expect(decryptedLog1?.payload.randomness).toStrictEqual(randomness[1]); expect(decryptedLog1?.payload.eventTypeId).toStrictEqual( - new Fr(0x00000000000000000000000000000000000000000000000000000000d1be0447), + EventSelector.fromField(new Fr(0x00000000000000000000000000000000000000000000000000000000d1be0447)), ); // We check our second event, which is a different type diff --git a/yarn-project/end-to-end/src/e2e_fees/failures.test.ts b/yarn-project/end-to-end/src/e2e_fees/failures.test.ts index 160c49e6d2c..dd0bb68635c 100644 --- a/yarn-project/end-to-end/src/e2e_fees/failures.test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/failures.test.ts @@ -4,9 +4,10 @@ import { Fr, type FunctionCall, FunctionSelector, + PrivateFeePaymentMethod, PublicFeePaymentMethod, TxStatus, - computeAuthWitMessageHash, + computeSecretHash, } from '@aztec/aztec.js'; import { Gas, GasSettings } from '@aztec/circuits.js'; import { FunctionType } from '@aztec/foundation/abi'; @@ -35,6 +36,99 @@ describe('e2e_fees failures', () => { await t.teardown(); }); + it('reverts transactions but still pays fees using PrivateFeePaymentMethod', async () => { + const OutrageousPublicAmountAliceDoesNotHave = BigInt(1e8); + const PrivateMintedAlicePrivateBananas = BigInt(1e15); + + const [initialAlicePrivateBananas, initialFPCPrivateBananas] = await t.bananaPrivateBalances( + aliceAddress, + bananaFPC.address, + ); + const [initialAlicePublicBananas, initialFPCPublicBananas] = await t.bananaPublicBalances( + aliceAddress, + bananaFPC.address, + ); + const [initialAliceGas, initialFPCGas] = await t.gasBalances(aliceAddress, bananaFPC.address); + + await t.mintPrivateBananas(PrivateMintedAlicePrivateBananas, aliceAddress); + + // if we simulate locally, it throws an error + await expect( + bananaCoin.methods + // still use a public transfer so as to fail in the public app logic phase + .transfer_public(aliceAddress, sequencerAddress, OutrageousPublicAmountAliceDoesNotHave, 0) + .send({ + fee: { + gasSettings, + paymentMethod: new PrivateFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet), + }, + }) + .wait(), + ).rejects.toThrow(/attempt to subtract with underflow 'hi == high'/); + + // we did not pay the fee, because we did not submit the TX + await expectMapping( + t.bananaPrivateBalances, + [aliceAddress, bananaFPC.address], + [initialAlicePrivateBananas + PrivateMintedAlicePrivateBananas, initialFPCPrivateBananas], + ); + await expectMapping( + t.bananaPublicBalances, + [aliceAddress, bananaFPC.address], + [initialAlicePublicBananas, initialFPCPublicBananas], + ); + await expectMapping(t.gasBalances, [aliceAddress, bananaFPC.address], [initialAliceGas, initialFPCGas]); + + // if we skip simulation, it includes the failed TX + const rebateSecret = Fr.random(); + const currentSequencerL1Gas = await t.getCoinbaseBalance(); + const txReceipt = await bananaCoin.methods + .transfer_public(aliceAddress, sequencerAddress, OutrageousPublicAmountAliceDoesNotHave, 0) + .send({ + skipPublicSimulation: true, + fee: { + gasSettings, + paymentMethod: new PrivateFeePaymentMethod(bananaCoin.address, bananaFPC.address, aliceWallet, rebateSecret), + }, + }) + .wait({ dontThrowOnRevert: true }); + + expect(txReceipt.status).toBe(TxStatus.APP_LOGIC_REVERTED); + const feeAmount = txReceipt.transactionFee!; + const newSequencerL1Gas = await t.getCoinbaseBalance(); + expect(newSequencerL1Gas).toEqual(currentSequencerL1Gas + feeAmount); + + // and thus we paid the fee + await expectMapping( + t.bananaPrivateBalances, + [aliceAddress, bananaFPC.address], + [ + // alice paid the maximum amount in private bananas + initialAlicePrivateBananas + PrivateMintedAlicePrivateBananas - gasSettings.getFeeLimit().toBigInt(), + initialFPCPrivateBananas, + ], + ); + await expectMapping( + t.bananaPublicBalances, + [aliceAddress, bananaFPC.address], + [initialAlicePublicBananas, initialFPCPublicBananas + feeAmount], + ); + await expectMapping(t.gasBalances, [aliceAddress, bananaFPC.address], [initialAliceGas, initialFPCGas - feeAmount]); + + // Alice can redeem her shield to get the rebate + const refund = gasSettings.getFeeLimit().toBigInt() - feeAmount; + expect(refund).toBeGreaterThan(0n); + const secretHashForRebate = computeSecretHash(rebateSecret); + await t.addPendingShieldNoteToPXE(t.aliceWallet, refund, secretHashForRebate, txReceipt.txHash); + await bananaCoin.methods.redeem_shield(aliceAddress, refund, rebateSecret).send().wait(); + + await expectMapping( + t.bananaPrivateBalances, + [aliceAddress, bananaFPC.address], + [initialAlicePrivateBananas + PrivateMintedAlicePrivateBananas - feeAmount, initialFPCPrivateBananas], + ); + }); + it('reverts transactions but still pays fees using PublicFeePaymentMethod', async () => { const OutrageousPublicAmountAliceDoesNotHave = BigInt(1e15); const PublicMintedAlicePublicBananas = BigInt(1e12); @@ -115,9 +209,6 @@ describe('e2e_fees failures', () => { [aliceAddress, bananaFPC.address, sequencerAddress], [initialAliceGas, initialFPCGas - feeAmount, initialSequencerGas], ); - - // TODO(#4712) - demonstrate reverts with the PrivateFeePaymentMethod. - // Can't do presently because all logs are "revertible" so we lose notes that get broadcasted during unshielding. }); it('fails transaction that error in setup', async () => { @@ -234,25 +325,27 @@ class BuggedSetupFeePaymentMethod extends PublicFeePaymentMethod { override getFunctionCalls(gasSettings: GasSettings): Promise { const maxFee = gasSettings.getFeeLimit(); const nonce = Fr.random(); - const messageHash = computeAuthWitMessageHash( - this.paymentContract, - this.wallet.getChainId(), - this.wallet.getVersion(), - { - name: 'transfer_public', - args: [this.wallet.getAddress(), this.paymentContract, maxFee, nonce], - selector: FunctionSelector.fromSignature('transfer_public((Field),(Field),Field,Field)'), - type: FunctionType.PUBLIC, - isStatic: false, - to: this.asset, - returnTypes: [], - }, - ); const tooMuchFee = new Fr(maxFee.toBigInt() * 2n); return Promise.resolve([ - this.wallet.setPublicAuthWit(messageHash, true).request(), + this.wallet + .setPublicAuthWit( + { + caller: this.paymentContract, + action: { + name: 'transfer_public', + args: [this.wallet.getAddress(), this.paymentContract, maxFee, nonce], + selector: FunctionSelector.fromSignature('transfer_public((Field),(Field),Field,Field)'), + type: FunctionType.PUBLIC, + isStatic: false, + to: this.asset, + returnTypes: [], + }, + }, + true, + ) + .request(), { name: 'fee_entrypoint_public', to: this.paymentContract, diff --git a/yarn-project/end-to-end/src/e2e_lending_contract.test.ts b/yarn-project/end-to-end/src/e2e_lending_contract.test.ts index 7e22eecf358..9d1c0007931 100644 --- a/yarn-project/end-to-end/src/e2e_lending_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_lending_contract.test.ts @@ -5,7 +5,6 @@ import { ExtendedNote, Fr, Note, - computeAuthWitMessageHash, computeSecretHash, } from '@aztec/aztec.js'; import { LendingContract, PriceFeedContract, TokenContract } from '@aztec/noir-contracts.js'; @@ -320,17 +319,19 @@ describe('e2e_lending_contract', () => { it('Repay: 🍌 -> 🏦', async () => { const repayAmount = 20n; - const nonce = Fr.random(); - const messageHash = computeAuthWitMessageHash( - lendingContract.address, - wallet.getChainId(), - wallet.getVersion(), - stableCoin.methods.burn_public(lendingAccount.address, repayAmount, nonce).request(), - ); // Add it to the wallet as approved - await wallet.setPublicAuthWit(messageHash, true).send().wait(); + await wallet + .setPublicAuthWit( + { + caller: lendingContract.address, + action: stableCoin.methods.burn_public(lendingAccount.address, repayAmount, nonce).request(), + }, + true, + ) + .send() + .wait(); await lendingSim.progressTime(TIME_JUMP); lendingSim.repayPublic(lendingAccount.address, lendingAccount.address.toField(), repayAmount); diff --git a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts index 42a5f26cb8b..024857ab6e2 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts @@ -13,6 +13,7 @@ import { } from '@aztec/aztec.js'; import { type BootNodeConfig, BootstrapNode, createLibP2PPeerId } from '@aztec/p2p'; import { type PXEService, createPXEService, getPXEServiceConfig as getRpcConfig } from '@aztec/pxe'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import fs from 'fs'; import { mnemonicToAccount } from 'viem/accounts'; @@ -203,7 +204,11 @@ describe('e2e_p2p_network', () => { dataDirectory, bootstrapNodes: bootstrapNode ? [bootstrapNode] : [], }; - return await AztecNodeService.createAndSync(newConfig, createDebugLogger(`aztec:node-${tcpListenPort}`)); + return await AztecNodeService.createAndSync( + newConfig, + new NoopTelemetryClient(), + createDebugLogger(`aztec:node-${tcpListenPort}`), + ); }; // creates an instance of the PXE and submit a given number of transactions to it. diff --git a/yarn-project/end-to-end/src/e2e_prover/full.test.ts b/yarn-project/end-to-end/src/e2e_prover/full.test.ts index bbc1c8b7404..485c6d3d847 100644 --- a/yarn-project/end-to-end/src/e2e_prover/full.test.ts +++ b/yarn-project/end-to-end/src/e2e_prover/full.test.ts @@ -60,8 +60,8 @@ describe('full_prover', () => { logger.info(`Verifying private kernel tail proof`); await expect(t.circuitProofVerifier?.verifyProof(privateTx)).resolves.not.toThrow(); - const sentPrivateTx = privateInteraction.send(); - const sentPublicTx = publicInteraction.send(); + const sentPrivateTx = privateInteraction.send({ skipPublicSimulation: true }); + const sentPublicTx = publicInteraction.send({ skipPublicSimulation: true }); await Promise.all([ sentPrivateTx.wait({ timeout: 1200, interval: 10 }), sentPublicTx.wait({ timeout: 1200, interval: 10 }), diff --git a/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/deposits.test.ts b/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/deposits.test.ts index 306e9e7bb06..60e6edc6c3f 100644 --- a/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/deposits.test.ts +++ b/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging/deposits.test.ts @@ -1,4 +1,4 @@ -import { Fr, computeAuthWitMessageHash } from '@aztec/aztec.js'; +import { Fr } from '@aztec/aztec.js'; import { NO_L1_TO_L2_MSG_ERROR } from '../fixtures/fixtures.js'; import { PublicCrossChainMessagingContractTest } from './public_cross_chain_messaging_contract_test.js'; @@ -7,7 +7,6 @@ describe('e2e_public_cross_chain_messaging deposits', () => { const t = new PublicCrossChainMessagingContractTest('deposits'); let { - wallets, crossChainTestHarness, ethAccount, aztecNode, @@ -23,7 +22,7 @@ describe('e2e_public_cross_chain_messaging deposits', () => { await t.applyBaseSnapshots(); await t.setup(); // Have to destructure again to ensure we have latest refs. - ({ wallets, crossChainTestHarness, user1Wallet, user2Wallet } = t); + ({ crossChainTestHarness, user1Wallet, user2Wallet } = t); ethAccount = crossChainTestHarness.ethAccount; aztecNode = crossChainTestHarness.aztecNode; @@ -75,13 +74,16 @@ describe('e2e_public_cross_chain_messaging deposits', () => { // 4. Give approval to bridge to burn owner's funds: const withdrawAmount = 9n; const nonce = Fr.random(); - const burnMessageHash = computeAuthWitMessageHash( - l2Bridge.address, - wallets[0].getChainId(), - wallets[0].getVersion(), - l2Token.methods.burn_public(ownerAddress, withdrawAmount, nonce).request(), - ); - await user1Wallet.setPublicAuthWit(burnMessageHash, true).send().wait(); + await user1Wallet + .setPublicAuthWit( + { + caller: l2Bridge.address, + action: l2Token.methods.burn_public(ownerAddress, withdrawAmount, nonce).request(), + }, + true, + ) + .send() + .wait(); // 5. Withdraw owner's funds from L2 to L1 logger.verbose('5. Withdraw owner funds from L2 to L1'); diff --git a/yarn-project/end-to-end/src/e2e_token_contract/burn.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/burn.test.ts index f736b47a64c..1c89c74ac20 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/burn.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/burn.test.ts @@ -187,10 +187,8 @@ describe('e2e_token_contract burn', () => { // We need to compute the message we want to sign and add it to the wallet as approved const action = asset.withWallet(wallets[1]).methods.burn(accounts[0].address, amount, nonce); const messageHash = computeAuthWitMessageHash( - accounts[1].address, - wallets[0].getChainId(), - wallets[0].getVersion(), - action.request(), + { caller: accounts[1].address, action: action.request() }, + { chainId: wallets[0].getChainId(), version: wallets[0].getVersion() }, ); await expect(action.simulate()).rejects.toThrow( @@ -207,10 +205,8 @@ describe('e2e_token_contract burn', () => { // We need to compute the message we want to sign and add it to the wallet as approved const action = asset.withWallet(wallets[2]).methods.burn(accounts[0].address, amount, nonce); const expectedMessageHash = computeAuthWitMessageHash( - accounts[2].address, - wallets[0].getChainId(), - wallets[0].getVersion(), - action.request(), + { caller: accounts[2].address, action: action.request() }, + { chainId: wallets[0].getChainId(), version: wallets[0].getVersion() }, ); const witness = await wallets[0].createAuthWit({ caller: accounts[1].address, action }); diff --git a/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts index a85f2de98fe..4d1536e4df4 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts @@ -1,4 +1,10 @@ -import { AztecAddress, CompleteAddress, Fr, computeAuthWitMessageHash } from '@aztec/aztec.js'; +import { + AztecAddress, + CompleteAddress, + Fr, + computeAuthWitMessageHash, + computeInnerAuthWitHashFromAction, +} from '@aztec/aztec.js'; import { DUPLICATE_NULLIFIER_ERROR } from '../fixtures/fixtures.js'; import { TokenContractTest } from './token_contract_test.js'; @@ -147,10 +153,11 @@ describe('e2e_token_contract transfer private', () => { .withWallet(wallets[1]) .methods.transfer_from(accounts[0].address, accounts[1].address, amount, nonce); const messageHash = computeAuthWitMessageHash( - accounts[1].address, - wallets[0].getChainId(), - wallets[0].getVersion(), - action.request(), + { caller: accounts[1].address, action: action.request() }, + { + chainId: wallets[0].getChainId(), + version: wallets[0].getVersion(), + }, ); await expect(action.simulate()).rejects.toThrow( @@ -169,10 +176,11 @@ describe('e2e_token_contract transfer private', () => { .withWallet(wallets[2]) .methods.transfer_from(accounts[0].address, accounts[1].address, amount, nonce); const expectedMessageHash = computeAuthWitMessageHash( - accounts[2].address, - wallets[0].getChainId(), - wallets[0].getVersion(), - action.request(), + { caller: accounts[2].address, action: action.request() }, + { + chainId: wallets[0].getChainId(), + version: wallets[0].getVersion(), + }, ); const witness = await wallets[0].createAuthWit({ caller: accounts[1].address, action }); @@ -195,44 +203,33 @@ describe('e2e_token_contract transfer private', () => { .withWallet(wallets[1]) .methods.transfer_from(accounts[0].address, accounts[1].address, amount, nonce); - const witness = await wallets[0].createAuthWit({ caller: accounts[1].address, action }); - await wallets[1].addAuthWitness(witness); - - await wallets[0].cancelAuthWit(witness.requestHash).send().wait(); - - // Perform the transfer, should fail because nullifier already emitted - const txCancelledAuthwit = asset - .withWallet(wallets[1]) - .methods.transfer_from(accounts[0].address, accounts[1].address, amount, nonce) - .send(); - await expect(txCancelledAuthwit.wait()).rejects.toThrowError(DUPLICATE_NULLIFIER_ERROR); - }); + const intent = { caller: accounts[1].address, action }; - it('transfer on behalf of other, cancelled authwit, flow 2', async () => { - const balance0 = await asset.methods.balance_of_private(accounts[0].address).simulate(); - const amount = balance0 / 2n; - const nonce = Fr.random(); - expect(amount).toBeGreaterThan(0n); + const witness = await wallets[0].createAuthWit(intent); + await wallets[1].addAuthWitness(witness); - // We need to compute the message we want to sign and add it to the wallet as approved - const action = asset - .withWallet(wallets[1]) - .methods.transfer_from(accounts[0].address, accounts[1].address, amount, nonce); + expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ + isValidInPrivate: true, + isValidInPublic: false, + }); - const witness = await wallets[0].createAuthWit({ caller: accounts[1].address, action }); - await wallets[1].addAuthWitness(witness); + const innerHash = computeInnerAuthWitHashFromAction(accounts[1].address, action.request()); + await asset.withWallet(wallets[0]).methods.cancel_authwit(innerHash).send().wait(); - await wallets[0].cancelAuthWit({ caller: accounts[1].address, action }).send().wait(); + expect(await wallets[0].lookupValidity(wallets[0].getAddress(), intent)).toEqual({ + isValidInPrivate: false, + isValidInPublic: false, + }); // Perform the transfer, should fail because nullifier already emitted const txCancelledAuthwit = asset .withWallet(wallets[1]) .methods.transfer_from(accounts[0].address, accounts[1].address, amount, nonce) .send(); - await expect(txCancelledAuthwit.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); + await expect(txCancelledAuthwit.wait()).rejects.toThrowError(DUPLICATE_NULLIFIER_ERROR); }); - it('transfer on behalf of other, invalid spend_private_authwit on "from"', async () => { + it('transfer on behalf of other, invalid verify_private_authwit on "from"', async () => { const nonce = Fr.random(); // Should fail as the returned value from the badAccount is malformed diff --git a/yarn-project/end-to-end/src/e2e_token_contract/transfer_public.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/transfer_public.test.ts index c828a6bdb15..5ba38158564 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/transfer_public.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/transfer_public.test.ts @@ -1,4 +1,4 @@ -import { Fr, computeAuthWitMessageHash } from '@aztec/aztec.js'; +import { Fr } from '@aztec/aztec.js'; import { U128_UNDERFLOW_ERROR } from '../fixtures/fixtures.js'; import { TokenContractTest } from './token_contract_test.js'; @@ -188,7 +188,7 @@ describe('e2e_token_contract transfer public', () => { await wallets[0].setPublicAuthWit({ caller: accounts[1].address, action }, true).send().wait(); - await wallets[0].cancelPublicAuthWit({ caller: accounts[1].address, action }).send().wait(); + await wallets[0].setPublicAuthWit({ caller: accounts[1].address, action }, false).send().wait(); await expect( asset @@ -212,40 +212,7 @@ describe('e2e_token_contract transfer public', () => { await wallets[0].setPublicAuthWit({ caller: accounts[1].address, action }, false).send().wait(); - await expect( - asset - .withWallet(wallets[1]) - .methods.transfer_public(accounts[0].address, accounts[1].address, amount, nonce) - .simulate(), - ).rejects.toThrowError(/unauthorized/); - }); - - it('transfer on behalf of other, cancelled authwit, flow 3', async () => { - const balance0 = await asset.methods.balance_of_public(accounts[0].address).simulate(); - const amount = balance0 / 2n; - expect(amount).toBeGreaterThan(0n); - const nonce = Fr.random(); - - const action = asset - .withWallet(wallets[1]) - .methods.transfer_public(accounts[0].address, accounts[1].address, amount, nonce); - const messageHash = computeAuthWitMessageHash( - accounts[1].address, - wallets[0].getChainId(), - wallets[0].getVersion(), - action.request(), - ); - - await wallets[0].setPublicAuthWit(messageHash, true).send().wait(); - - await wallets[0].cancelPublicAuthWit(messageHash).send().wait(); - - await expect( - asset - .withWallet(wallets[1]) - .methods.transfer_public(accounts[0].address, accounts[1].address, amount, nonce) - .simulate(), - ).rejects.toThrow(/unauthorized/); + await expect(action.simulate()).rejects.toThrow(/unauthorized/); }); it('transfer on behalf of other, invalid spend_public_authwit on "from"', async () => { diff --git a/yarn-project/end-to-end/src/e2e_token_contract/unshielding.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/unshielding.test.ts index d52b3ce214e..6507f4aeeca 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/unshielding.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/unshielding.test.ts @@ -111,10 +111,8 @@ describe('e2e_token_contract unshielding', () => { .withWallet(wallets[2]) .methods.unshield(accounts[0].address, accounts[1].address, amount, nonce); const expectedMessageHash = computeAuthWitMessageHash( - accounts[2].address, - wallets[0].getChainId(), - wallets[0].getVersion(), - action.request(), + { caller: accounts[2].address, action }, + { chainId: wallets[0].getChainId(), version: wallets[0].getVersion() }, ); // Both wallets are connected to same node and PXE so we could just insert directly diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index c897648c62c..fc985e66290 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -20,6 +20,7 @@ import { type Logger, createDebugLogger } from '@aztec/foundation/log'; import { makeBackoff, retry } from '@aztec/foundation/retry'; import { resolver, reviver } from '@aztec/foundation/serialize'; import { type PXEService, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; +import { createAndStartTelemetryClient, getConfigEnvVars as getTelemetryConfig } from '@aztec/telemetry-client/start'; import { type Anvil, createAnvil } from '@viem/anvil'; import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; @@ -270,8 +271,9 @@ async function setupFromFresh(statePath: string | undefined, logger: Logger): Pr aztecNodeConfig.bbWorkingDirectory = bbConfig.bbWorkingDirectory; } + const telemetry = createAndStartTelemetryClient(getTelemetryConfig()); logger.verbose('Creating and synching an aztec node...'); - const aztecNode = await AztecNodeService.createAndSync(aztecNodeConfig); + const aztecNode = await AztecNodeService.createAndSync(aztecNodeConfig, telemetry); logger.verbose('Creating pxe...'); const pxeConfig = getPXEServiceConfig(); @@ -343,7 +345,8 @@ async function setupFromState(statePath: string, logger: Logger): Promise { + await telemetry.stop(); + }); +} + const getAztecUrl = () => { return PXE_URL; }; @@ -369,7 +377,7 @@ export async function setup( config.bbWorkingDirectory = bbConfig.bbWorkingDirectory; } config.l1BlockPublishRetryIntervalMS = 100; - const aztecNode = await AztecNodeService.createAndSync(config); + const aztecNode = await AztecNodeService.createAndSync(config, telemetry); const sequencer = aztecNode.getSequencer(); const prover = aztecNode.getProver(); diff --git a/yarn-project/end-to-end/src/shared/browser.ts b/yarn-project/end-to-end/src/shared/browser.ts index 86e10d41786..143698eb996 100644 --- a/yarn-project/end-to-end/src/shared/browser.ts +++ b/yarn-project/end-to-end/src/shared/browser.ts @@ -145,14 +145,14 @@ export const browserTestSuite = ( it('Can access CompleteAddress class in browser', async () => { const result: string = await page.evaluate(() => { const completeAddress = window.AztecJs.CompleteAddress.fromString( - '0x06f73ae2ba011a157808a670dd52231347a3b46897ea00945d69fb35d08e68d02c93b9572b35f9c9e07e9003ae1ca444442a165f927bce00e347dab57cc19391148730d0deec722eb6c54747df7345bc2ab3bd8e81f438b17b81ccabd9e6a3ac0708920251ccaf6664d769cbc47c8d767f64912639e13d9f9e441b225066161900c48a65eea83f1dbf217c43daf1be6ba9cefd2754f07e3cc13e81e5432e47f30dfb47c8b1e11368bec638fd9d22c696bf9c323a0fd09050745f4b7cf150bfa529a9f3062ee5f9d0a099ac53b4e1130653fb797ed2b59914a8915951d13ad8252521211957a854707af85ad40e9ab4d474a4fcbdcbe7a47866cae0db4fd86ed2261669d85a9cfbd09365a6db5d7acfe5560104a0cb893a375d6c08ffb9cbb8270be446a16361f271ac11899ee19f990c68035da18703ba00c8e9773dfe6a784a', + '0x0f4b920040c48062d5cd72f0f1b6f331468940ab8651420de8080dfc7fa0f3dc2c93b9572b35f9c9e07e9003ae1ca444442a165f927bce00e347dab57cc19391148730d0deec722eb6c54747df7345bc2ab3bd8e81f438b17b81ccabd9e6a3ac0708920251ccaf6664d769cbc47c8d767f64912639e13d9f9e441b225066161900c48a65eea83f1dbf217c43daf1be6ba9cefd2754f07e3cc13e81e5432e47f30dfb47c8b1e11368bec638fd9d22c696bf9c323a0fd09050745f4b7cf150bfa529a9f3062ee5f9d0a099ac53b4e1130653fb797ed2b59914a8915951d13ad8252521211957a854707af85ad40e9ab4d474a4fcbdcbe7a47866cae0db4fd86ed2261669d85a9cfbd09365a6db5d7acfe5560104a0cb893a375d6c08ffb9cbb8270be446a16361f271ac11899ee19f990c68035da18703ba00c8e9773dfe6a784a', ); // NOTE: browser does not know how to serialize CompleteAddress for return, so return a string // otherwise returning a CompleteAddress makes result undefined. return completeAddress.toString(); }); expect(result).toBe( - '0x06f73ae2ba011a157808a670dd52231347a3b46897ea00945d69fb35d08e68d02c93b9572b35f9c9e07e9003ae1ca444442a165f927bce00e347dab57cc19391148730d0deec722eb6c54747df7345bc2ab3bd8e81f438b17b81ccabd9e6a3ac0708920251ccaf6664d769cbc47c8d767f64912639e13d9f9e441b225066161900c48a65eea83f1dbf217c43daf1be6ba9cefd2754f07e3cc13e81e5432e47f30dfb47c8b1e11368bec638fd9d22c696bf9c323a0fd09050745f4b7cf150bfa529a9f3062ee5f9d0a099ac53b4e1130653fb797ed2b59914a8915951d13ad8252521211957a854707af85ad40e9ab4d474a4fcbdcbe7a47866cae0db4fd86ed2261669d85a9cfbd09365a6db5d7acfe5560104a0cb893a375d6c08ffb9cbb8270be446a16361f271ac11899ee19f990c68035da18703ba00c8e9773dfe6a784a', + '0x0f4b920040c48062d5cd72f0f1b6f331468940ab8651420de8080dfc7fa0f3dc2c93b9572b35f9c9e07e9003ae1ca444442a165f927bce00e347dab57cc19391148730d0deec722eb6c54747df7345bc2ab3bd8e81f438b17b81ccabd9e6a3ac0708920251ccaf6664d769cbc47c8d767f64912639e13d9f9e441b225066161900c48a65eea83f1dbf217c43daf1be6ba9cefd2754f07e3cc13e81e5432e47f30dfb47c8b1e11368bec638fd9d22c696bf9c323a0fd09050745f4b7cf150bfa529a9f3062ee5f9d0a099ac53b4e1130653fb797ed2b59914a8915951d13ad8252521211957a854707af85ad40e9ab4d474a4fcbdcbe7a47866cae0db4fd86ed2261669d85a9cfbd09365a6db5d7acfe5560104a0cb893a375d6c08ffb9cbb8270be446a16361f271ac11899ee19f990c68035da18703ba00c8e9773dfe6a784a', ); }); diff --git a/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts b/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts index f62fa9bc3c5..6caf5b59482 100644 --- a/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts +++ b/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts @@ -425,15 +425,24 @@ export const uniswapL1L2TestSuite = ( // 3. Owner gives uniswap approval to transfer funds on its behalf const nonceForWETHTransferApproval = new Fr(1n); - const transferMessageHash = computeAuthWitMessageHash( - uniswapL2Contract.address, - ownerWallet.getChainId(), - ownerWallet.getVersion(), - wethCrossChainHarness.l2Token.methods - .transfer_public(ownerAddress, uniswapL2Contract.address, wethAmountToBridge, nonceForWETHTransferApproval) - .request(), - ); - await ownerWallet.setPublicAuthWit(transferMessageHash, true).send().wait(); + + await ownerWallet + .setPublicAuthWit( + { + caller: uniswapL2Contract.address, + action: wethCrossChainHarness.l2Token.methods + .transfer_public( + ownerAddress, + uniswapL2Contract.address, + wethAmountToBridge, + nonceForWETHTransferApproval, + ) + .request(), + }, + true, + ) + .send() + .wait(); // 4. Swap on L1 - sends L2 to L1 message to withdraw WETH to L1 and another message to swap assets. const [secretForDepositingSwappedDai, secretHashForDepositingSwappedDai] = @@ -456,13 +465,7 @@ export const uniswapL1L2TestSuite = ( ownerEthAddress, nonceForSwap, ); - const swapMessageHash = computeAuthWitMessageHash( - sponsorAddress, - ownerWallet.getChainId(), - ownerWallet.getVersion(), - action.request(), - ); - await ownerWallet.setPublicAuthWit(swapMessageHash, true).send().wait(); + await ownerWallet.setPublicAuthWit({ caller: sponsorAddress, action }, true).send().wait(); // 4.2 Call swap_public from user2 on behalf of owner const uniswapL2Interaction = await action.send().wait(); @@ -619,13 +622,13 @@ export const uniswapL1L2TestSuite = ( const nonceForWETHUnshieldApproval = new Fr(2n); const expectedMessageHash = computeAuthWitMessageHash( - uniswapL2Contract.address, - ownerWallet.getChainId(), - ownerWallet.getVersion(), - - wethCrossChainHarness.l2Token.methods - .unshield(ownerAddress, uniswapL2Contract.address, wethAmountToBridge, nonceForWETHUnshieldApproval) - .request(), + { + caller: uniswapL2Contract.address, + action: wethCrossChainHarness.l2Token.methods + .unshield(ownerAddress, uniswapL2Contract.address, wethAmountToBridge, nonceForWETHUnshieldApproval) + .request(), + }, + { chainId: ownerWallet.getChainId(), version: ownerWallet.getVersion() }, ); await expect( @@ -694,16 +697,23 @@ export const uniswapL1L2TestSuite = ( // 2. Give approval to uniswap to transfer funds to itself const nonceForWETHTransferApproval = new Fr(2n); - const transferMessageHash = computeAuthWitMessageHash( - uniswapL2Contract.address, - ownerWallet.getChainId(), - ownerWallet.getVersion(), - - wethCrossChainHarness.l2Token.methods - .transfer_public(ownerAddress, uniswapL2Contract.address, wethAmountToBridge, nonceForWETHTransferApproval) - .request(), - ); - await ownerWallet.setPublicAuthWit(transferMessageHash, true).send().wait(); + await ownerWallet + .setPublicAuthWit( + { + caller: uniswapL2Contract.address, + action: wethCrossChainHarness.l2Token.methods + .transfer_public( + ownerAddress, + uniswapL2Contract.address, + wethAmountToBridge, + nonceForWETHTransferApproval, + ) + .request(), + }, + true, + ) + .send() + .wait(); // No approval to call `swap` but should work even without it: const [_, secretHashForDepositingSwappedDai] = daiCrossChainHarness.generateClaimSecret(); @@ -750,13 +760,7 @@ export const uniswapL1L2TestSuite = ( ownerEthAddress, nonceForSwap, ); - const swapMessageHash = computeAuthWitMessageHash( - approvedUser, - ownerWallet.getChainId(), - ownerWallet.getVersion(), - action.request(), - ); - await ownerWallet.setPublicAuthWit(swapMessageHash, true).send().wait(); + await ownerWallet.setPublicAuthWit({ caller: approvedUser, action }, true).send().wait(); await expect(action.simulate()).rejects.toThrow(/unauthorized/); }); @@ -765,15 +769,23 @@ export const uniswapL1L2TestSuite = ( // swap should fail since no transfer approval to uniswap: const nonceForWETHTransferApproval = new Fr(4n); - const transferMessageHash = computeAuthWitMessageHash( - uniswapL2Contract.address, - ownerWallet.getChainId(), - ownerWallet.getVersion(), - wethCrossChainHarness.l2Token.methods - .transfer_public(ownerAddress, uniswapL2Contract.address, wethAmountToBridge, nonceForWETHTransferApproval) - .request(), - ); - await ownerWallet.setPublicAuthWit(transferMessageHash, true).send().wait(); + await ownerWallet + .setPublicAuthWit( + { + caller: uniswapL2Contract.address, + action: wethCrossChainHarness.l2Token.methods + .transfer_public( + ownerAddress, + uniswapL2Contract.address, + wethAmountToBridge, + nonceForWETHTransferApproval, + ) + .request(), + }, + true, + ) + .send() + .wait(); await expect( uniswapL2Contract.methods @@ -931,15 +943,23 @@ export const uniswapL1L2TestSuite = ( // Owner gives uniswap approval to transfer funds on its behalf const nonceForWETHTransferApproval = new Fr(5n); - const transferMessageHash = computeAuthWitMessageHash( - uniswapL2Contract.address, - ownerWallet.getChainId(), - ownerWallet.getVersion(), - wethCrossChainHarness.l2Token.methods - .transfer_public(ownerAddress, uniswapL2Contract.address, wethAmountToBridge, nonceForWETHTransferApproval) - .request(), - ); - await ownerWallet.setPublicAuthWit(transferMessageHash, true).send().wait(); + await ownerWallet + .setPublicAuthWit( + { + caller: uniswapL2Contract.address, + action: wethCrossChainHarness.l2Token.methods + .transfer_public( + ownerAddress, + uniswapL2Contract.address, + wethAmountToBridge, + nonceForWETHTransferApproval, + ) + .request(), + }, + true, + ) + .send() + .wait(); // Call swap_public on L2 const secretHashForDepositingSwappedDai = Fr.random(); diff --git a/yarn-project/end-to-end/tsconfig.json b/yarn-project/end-to-end/tsconfig.json index 7273cee65f5..28bde215732 100644 --- a/yarn-project/end-to-end/tsconfig.json +++ b/yarn-project/end-to-end/tsconfig.json @@ -66,6 +66,9 @@ { "path": "../simulator" }, + { + "path": "../telemetry-client" + }, { "path": "../types" }, diff --git a/yarn-project/end-to-end/webpack.config.js b/yarn-project/end-to-end/webpack.config.js index 6fe97604e7e..88f6bb5178c 100644 --- a/yarn-project/end-to-end/webpack.config.js +++ b/yarn-project/end-to-end/webpack.config.js @@ -64,7 +64,6 @@ export default { buffer: require.resolve('buffer/'), util: require.resolve('util/'), stream: require.resolve('stream-browserify'), - tty: require.resolve('tty-browserify'), }, }, }; diff --git a/yarn-project/entrypoints/package.json b/yarn-project/entrypoints/package.json index 63470f19789..48c6c5535a4 100644 --- a/yarn-project/entrypoints/package.json +++ b/yarn-project/entrypoints/package.json @@ -35,7 +35,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/entrypoints/src/dapp_entrypoint.ts b/yarn-project/entrypoints/src/dapp_entrypoint.ts index 18a217f2eda..f680cb26e28 100644 --- a/yarn-project/entrypoints/src/dapp_entrypoint.ts +++ b/yarn-project/entrypoints/src/dapp_entrypoint.ts @@ -1,4 +1,4 @@ -import { computeInnerAuthWitHash, computeOuterAuthWitHash } from '@aztec/aztec.js'; +import { computeAuthWitMessageHash, computeInnerAuthWitHash } from '@aztec/aztec.js'; import { type AuthWitnessProvider } from '@aztec/aztec.js/account'; import { type EntrypointInterface, EntrypointPayload, type ExecutionRequestInit } from '@aztec/aztec.js/entrypoint'; import { PackedValues, TxExecutionRequest } from '@aztec/circuit-types'; @@ -34,11 +34,9 @@ export class DefaultDappEntrypoint implements EntrypointInterface { const functionSelector = FunctionSelector.fromNameAndParameters(abi.name, abi.parameters); const innerHash = computeInnerAuthWitHash([Fr.ZERO, functionSelector.toField(), entrypointPackedArgs.hash]); - const outerHash = computeOuterAuthWitHash( - this.dappEntrypointAddress, - new Fr(this.chainId), - new Fr(this.version), - innerHash, + const outerHash = computeAuthWitMessageHash( + { consumer: this.dappEntrypointAddress, innerHash }, + { chainId: new Fr(this.chainId), version: new Fr(this.version) }, ); const authWitness = await this.userAuthWitnessProvider.createAuthWit(outerHash); diff --git a/yarn-project/ethereum/package.json b/yarn-project/ethereum/package.json index efd72f2d3da..b7518c9547f 100644 --- a/yarn-project/ethereum/package.json +++ b/yarn-project/ethereum/package.json @@ -51,7 +51,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/foundation/.prettierrc.json b/yarn-project/foundation/.prettierrc.json index 0f8d94093a8..b39dea73e43 100644 --- a/yarn-project/foundation/.prettierrc.json +++ b/yarn-project/foundation/.prettierrc.json @@ -6,5 +6,5 @@ "importOrder": ["^@aztec/(.*)$", "", "^\\./|\\.\\./"], "importOrderSeparation": true, "importOrderSortSpecifiers": true, - "importOrderParserPlugins": ["importAssertions", "typescript"] + "importOrderParserPlugins": ["importAssertions", "typescript", "decorators"] } diff --git a/yarn-project/foundation/package.json b/yarn-project/foundation/package.json index ec8d34c3480..17295e60f26 100644 --- a/yarn-project/foundation/package.json +++ b/yarn-project/foundation/package.json @@ -59,7 +59,15 @@ "jest": { "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "moduleNameMapper": { diff --git a/yarn-project/foundation/src/abi/abi.ts b/yarn-project/foundation/src/abi/abi.ts index 2b608086604..9e996fc4249 100644 --- a/yarn-project/foundation/src/abi/abi.ts +++ b/yarn-project/foundation/src/abi/abi.ts @@ -2,6 +2,7 @@ import { inflate } from 'pako'; import { type Fr } from '../fields/fields.js'; import { type FunctionSelector } from './function_selector.js'; +import { type NoteSelector } from './note_selector.js'; /** * A basic value. @@ -275,7 +276,7 @@ export type ContractNote = { /** * Note identifier */ - id: Fr; + id: NoteSelector; /** * Type of the note (e.g., 'TransparentNote') */ diff --git a/yarn-project/foundation/src/abi/index.ts b/yarn-project/foundation/src/abi/index.ts index 476d3da8850..cab81b750c4 100644 --- a/yarn-project/foundation/src/abi/index.ts +++ b/yarn-project/foundation/src/abi/index.ts @@ -1,7 +1,8 @@ export * from './abi.js'; export * from './buffer.js'; +export * from './decoder.js'; export * from './encoder.js'; export * from './event_selector.js'; -export * from './decoder.js'; export * from './function_selector.js'; +export * from './note_selector.js'; export * from './utils.js'; diff --git a/yarn-project/foundation/src/abi/note_selector.ts b/yarn-project/foundation/src/abi/note_selector.ts new file mode 100644 index 00000000000..392399f7ee1 --- /dev/null +++ b/yarn-project/foundation/src/abi/note_selector.ts @@ -0,0 +1,73 @@ +import { toBigIntBE } from '../bigint-buffer/index.js'; +import { randomBytes } from '../crypto/index.js'; +import { type Fr } from '../fields/fields.js'; +import { BufferReader } from '../serialize/buffer_reader.js'; +import { TypeRegistry } from '../serialize/type_registry.js'; +import { Selector } from './selector.js'; + +/* eslint-disable @typescript-eslint/no-unsafe-declaration-merging */ + +/** Note selector branding */ +export interface NoteSelector { + /** Brand. */ + _branding: 'NoteSelector'; +} + +/** A note selector is the first 4 bytes of the hash of a note signature. */ +export class NoteSelector extends Selector { + /** + * Deserializes from a buffer or reader, corresponding to a write in cpp. + * @param buffer - Buffer or BufferReader to read from. + * @returns The Selector. + */ + static fromBuffer(buffer: Buffer | BufferReader) { + const reader = BufferReader.asReader(buffer); + const value = Number(toBigIntBE(reader.readBytes(Selector.SIZE))); + return new NoteSelector(value); + } + + static fromString(buf: string) { + const withoutPrefix = buf.replace(/^0x/i, ''); + const buffer = Buffer.from(withoutPrefix, 'hex'); + return NoteSelector.fromBuffer(buffer); + } + + /** + * Converts a field to selector. + * @param fr - The field to convert. + * @returns The selector. + */ + static fromField(fr: Fr) { + return new NoteSelector(Number(fr.toBigInt())); + } + + /** + * Creates an empty selector. + * @returns An empty selector. + */ + static empty() { + return new NoteSelector(0); + } + + /** + * Creates a random selector. + * @returns A random selector. + */ + static random() { + return NoteSelector.fromBuffer(randomBytes(Selector.SIZE)); + } + + toJSON() { + return { + type: 'NoteSelector', + value: this.toString(), + }; + } + + static fromJSON(json: any): NoteSelector { + return NoteSelector.fromString(json.value); + } +} + +// For deserializing JSON. +TypeRegistry.register('NoteSelector', NoteSelector); diff --git a/yarn-project/foundation/src/crypto/random/randomness_singleton.ts b/yarn-project/foundation/src/crypto/random/randomness_singleton.ts index 667db265df1..f226874a921 100644 --- a/yarn-project/foundation/src/crypto/random/randomness_singleton.ts +++ b/yarn-project/foundation/src/crypto/random/randomness_singleton.ts @@ -18,10 +18,10 @@ export class RandomnessSingleton { private readonly log = createDebugLogger('aztec:randomness_singleton'), ) { if (seed !== undefined) { - this.log.verbose(`Using pseudo-randomness with seed: ${seed}`); + this.log.debug(`Using pseudo-randomness with seed: ${seed}`); this.counter = seed; } else { - this.log.verbose('Using true randomness'); + this.log.debug('Using true randomness'); } } diff --git a/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts b/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts index 9c342805d09..60d07291cc0 100644 --- a/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts +++ b/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts @@ -25,7 +25,7 @@ export class JsonRpcServer { private objectClassMap: JsonClassConverterInput, /** List of methods to disallow from calling remotely */ public readonly disallowedMethods: string[] = [], - private log = createDebugLogger('aztec:foundation:json-rpc:server'), + private log = createDebugLogger('json-rpc:server'), ) { this.proxy = new JsonProxy(handler, stringClassMap, objectClassMap); } @@ -226,7 +226,7 @@ export type ServerList = { */ export function createNamespacedJsonRpcServer( servers: ServerList, - log = createDebugLogger('aztec:foundation:json-rpc:multi-server'), + log = createDebugLogger('json-rpc:multi-server'), ): JsonRpcServer { const handler = {} as any; const disallowedMethods: string[] = []; diff --git a/yarn-project/foundation/src/log/logger.ts b/yarn-project/foundation/src/log/logger.ts index b2cfbc31b39..3b28a279d2c 100644 --- a/yarn-project/foundation/src/log/logger.ts +++ b/yarn-project/foundation/src/log/logger.ts @@ -1,6 +1,4 @@ import debug from 'debug'; -import isNode from 'detect-node'; -import { isatty } from 'tty'; import { type LogData, type LogFn } from './log_fn.js'; @@ -15,6 +13,9 @@ export type LogLevel = (typeof LogLevels)[number]; const envLogLevel = process.env.LOG_LEVEL?.toLowerCase() as LogLevel; const currentLevel = LogLevels.includes(envLogLevel) ? envLogLevel : DefaultLogLevel; +const namespaces = process.env.DEBUG ?? 'aztec:*'; +debug.enable(namespaces); + /** Log function that accepts an exception object */ type ErrorLogFn = (msg: string, err?: Error | unknown, data?: LogData) => void; @@ -38,9 +39,6 @@ export type DebugLogger = Logger; */ export function createDebugLogger(name: string): DebugLogger { const debugLogger = debug(name); - if (currentLevel === 'debug') { - debugLogger.enabled = true; - } const logger = { silent: () => {}, @@ -78,42 +76,11 @@ function logWithDebug(debug: debug.Debugger, level: LogLevel, msg: string, data? } msg = data ? `${msg} ${fmtLogData(data)}` : msg; - if (debug.enabled) { - if (level !== 'debug') { - msg = `${level.toUpperCase()} ${msg}`; - } - debug(msg); - } else if (LogLevels.indexOf(level) <= LogLevels.indexOf(currentLevel)) { - printLog(`${getPrefix(debug, level)} ${msg}`); + if (debug.enabled && LogLevels.indexOf(level) <= LogLevels.indexOf(currentLevel)) { + debug('[%s] %s', level.toUpperCase(), msg); } } -/** - * Returns a log prefix that emulates that of npm debug. Uses colors if in node and in a tty. - * @param debugLogger - Instance of npm debug logger. - * @param level - Intended log level (printed out if strictly above current log level). - * @returns Log prefix. - */ -function getPrefix(debugLogger: debug.Debugger, level: LogLevel) { - const levelLabel = currentLevel !== level ? ` ${level.toUpperCase()}` : ''; - const prefix = `${debugLogger.namespace.replace(/^aztec:/, '')}${levelLabel}`; - if ((!isNode || !isatty(process.stderr.fd)) && !process.env.DEBUG_COLORS) { - return prefix; - } - const colorIndex = debug.selectColor(debugLogger.namespace) as number; - const colorCode = '\u001B[3' + (colorIndex < 8 ? colorIndex : '8;5;' + colorIndex); - return ` ${colorCode};1m${prefix}\u001B[0m`; -} - -/** - * Outputs to console error. - * @param msg - What to log. - */ -function printLog(msg: string) { - // eslint-disable-next-line no-console - isNode ? process.stderr.write(msg + '\n') : console.error(msg); -} - /** * Concatenates a log message and an exception. * @param msg - Log message diff --git a/yarn-project/foundation/src/serialize/buffer_reader.test.ts b/yarn-project/foundation/src/serialize/buffer_reader.test.ts index f600942aba9..0845ff591e2 100644 --- a/yarn-project/foundation/src/serialize/buffer_reader.test.ts +++ b/yarn-project/foundation/src/serialize/buffer_reader.test.ts @@ -173,4 +173,72 @@ describe('buffer reader', () => { expect(bufferReader.peekBytes(10)).toEqual(Buffer.from(ARRAY.slice(0, 10))); }); }); + + describe('error handling', () => { + let smallBuffer: Buffer; + let smallBufferReader: BufferReader; + + beforeEach(() => { + smallBuffer = Buffer.from([1, 2, 3]); // 3-byte buffer + smallBufferReader = new BufferReader(smallBuffer); + }); + + it('should throw error when reading number beyond buffer length', () => { + expect(() => smallBufferReader.readNumber()).toThrow('Attempted to read beyond buffer length'); + }); + + it('should throw error when reading numbers beyond buffer length', () => { + expect(() => smallBufferReader.readNumbers(1)).toThrow('Attempted to read beyond buffer length'); + }); + + it('should throw error when reading UInt16 beyond buffer length', () => { + smallBufferReader.readBytes(2); + expect(() => smallBufferReader.readUInt16()).toThrow('Attempted to read beyond buffer length'); + }); + + it('should throw error when reading UInt8 beyond buffer length', () => { + smallBufferReader.readBytes(3); // Read all bytes + expect(() => smallBufferReader.readUInt8()).toThrow('Attempted to read beyond buffer length'); + }); + + it('should throw error when reading boolean beyond buffer length', () => { + smallBufferReader.readBytes(3); // Read all bytes + expect(() => smallBufferReader.readBoolean()).toThrow('Attempted to read beyond buffer length'); + }); + + it('should throw error when reading bytes beyond buffer length', () => { + expect(() => smallBufferReader.readBytes(4)).toThrow('Attempted to read beyond buffer length'); + }); + + it('should throw error when reading buffer beyond buffer length', () => { + // First, read a number (4 bytes) which is already beyond the buffer length + expect(() => smallBufferReader.readBuffer()).toThrow('Attempted to read beyond buffer length'); + }); + + it('should throw error when peeking beyond buffer length', () => { + expect(() => smallBufferReader.peekBytes(4)).toThrow('Attempted to read beyond buffer length'); + }); + + it('should throw error when reading vector beyond buffer length', () => { + expect(() => smallBufferReader.readVector({ fromBuffer: () => 1 })).toThrow( + 'Attempted to read beyond buffer length', + ); + }); + + it('should throw error when reading array beyond buffer length', () => { + expect(() => + smallBufferReader.readArray(4, { fromBuffer: (reader: BufferReader) => reader.readBytes(1) }), + ).toThrow('Attempted to read beyond buffer length'); + }); + + it('should throw error when reading string beyond buffer length', () => { + expect(() => smallBufferReader.readString()).toThrow('Attempted to read beyond buffer length'); + }); + + it('should throw error when reading map beyond buffer length', () => { + expect(() => smallBufferReader.readMap({ fromBuffer: () => 1 })).toThrow( + 'Attempted to read beyond buffer length', + ); + }); + }); }); diff --git a/yarn-project/foundation/src/serialize/buffer_reader.ts b/yarn-project/foundation/src/serialize/buffer_reader.ts index be1bf669a82..caee2973dfc 100644 --- a/yarn-project/foundation/src/serialize/buffer_reader.ts +++ b/yarn-project/foundation/src/serialize/buffer_reader.ts @@ -55,6 +55,7 @@ export class BufferReader { * @returns The read 32-bit unsigned integer value. */ public readNumber(): number { + this.#rangeCheck(4); this.index += 4; return this.buffer.readUint32BE(this.index - 4); } @@ -76,6 +77,7 @@ export class BufferReader { * @returns The read 16 bit value. */ public readUInt16(): number { + this.#rangeCheck(2); this.index += 2; return this.buffer.readUInt16BE(this.index - 2); } @@ -87,6 +89,7 @@ export class BufferReader { * @returns The read 8 bit value. */ public readUInt8(): number { + this.#rangeCheck(1); this.index += 1; return this.buffer.readUInt8(this.index - 1); } @@ -99,6 +102,7 @@ export class BufferReader { * @returns A boolean value representing the byte at the current index. */ public readBoolean(): boolean { + this.#rangeCheck(1); this.index += 1; return Boolean(this.buffer.at(this.index - 1)); } @@ -112,6 +116,7 @@ export class BufferReader { * @returns A new Buffer containing the read bytes. */ public readBytes(n: number): Buffer { + this.#rangeCheck(n); this.index += n; return Buffer.from(this.buffer.subarray(this.index - n, this.index)); } @@ -215,6 +220,7 @@ export class BufferReader { public readBufferArray(size = -1): Buffer[] { const result: Buffer[] = []; const end = size >= 0 ? this.index + size : this.buffer.length; + this.#rangeCheck(end - this.index); while (this.index < end) { const item = this.readBuffer(); result.push(item); @@ -252,6 +258,7 @@ export class BufferReader { * @returns A Buffer with the next n bytes or the remaining bytes if n is not provided or exceeds the buffer length. */ public peekBytes(n?: number): Buffer { + this.#rangeCheck(n || 0); return this.buffer.subarray(this.index, n ? this.index + n : undefined); } @@ -276,6 +283,7 @@ export class BufferReader { */ public readBuffer(): Buffer { const size = this.readNumber(); + this.#rangeCheck(size); return this.readBytes(size); } @@ -311,6 +319,14 @@ export class BufferReader { public getLength(): number { return this.buffer.length; } + + #rangeCheck(numBytes: number) { + if (this.index + numBytes > this.buffer.length) { + throw new Error( + `Attempted to read beyond buffer length. Start index: ${this.index}, Num bytes to read: ${numBytes}, Buffer length: ${this.buffer.length}`, + ); + } + } } /** diff --git a/yarn-project/key-store/package.json b/yarn-project/key-store/package.json index 79ce75204c3..0bf868d644d 100644 --- a/yarn-project/key-store/package.json +++ b/yarn-project/key-store/package.json @@ -29,7 +29,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/key-store/src/key_store.test.ts b/yarn-project/key-store/src/key_store.test.ts index 5c13479d406..dc8edfa7275 100644 --- a/yarn-project/key-store/src/key_store.test.ts +++ b/yarn-project/key-store/src/key_store.test.ts @@ -24,7 +24,7 @@ describe('KeyStore', () => { const { address: accountAddress } = await keyStore.addAccount(sk, partialAddress); expect(accountAddress.toString()).toMatchInlineSnapshot( - `"0x1a8a9a1d91cbb353d8df4f1bbfd0283f7fc63766f671edd9443a1270a7b2a954"`, + `"0x15565e4a5f3aff35f8eafa364cec1c11aaa84a5f7fcdf64a373614fdc8add52e"`, ); const { pkM: masterNullifierPublicKey } = await keyStore.getKeyValidationRequest( @@ -32,22 +32,22 @@ describe('KeyStore', () => { AztecAddress.random(), // Address is random because we are not interested in the app secret key here ); expect(masterNullifierPublicKey.toString()).toMatchInlineSnapshot( - `"0x2ef5d15dd65d29546680ab72846fb071f41cb9f2a0212215e6c560e29df4ff650ce764818364b376be92dc2f49577fe440e64a16012584f7c4ee94f7edbc323a"`, + `"0x1c088f4e4a711f236a88b55da9ddf388de0bc00d56a5ceca96cea3a5cbe75bf32db0a333ba30c36b844d9fc6d2fb0de8d10e4371f0c5baebae452d90ff366798"`, ); const masterIncomingViewingPublicKey = await keyStore.getMasterIncomingViewingPublicKey(accountAddress); expect(masterIncomingViewingPublicKey.toString()).toMatchInlineSnapshot( - `"0x1c088f4e4a711f236a88b55da9ddf388de0bc00d56a5ceca96cea3a5cbe75bf32db0a333ba30c36b844d9fc6d2fb0de8d10e4371f0c5baebae452d90ff366798"`, + `"0x232d0b445d097fbc2046012c3fc474f6a9beef97eda1d8d1f2487dbe501ee1e70e8db9a824531a14e8717dee54cbb7abfec29a88c550a49617258bd6fd858242"`, ); const masterOutgoingViewingPublicKey = await keyStore.getMasterOutgoingViewingPublicKey(accountAddress); expect(masterOutgoingViewingPublicKey.toString()).toMatchInlineSnapshot( - `"0x232d0b445d097fbc2046012c3fc474f6a9beef97eda1d8d1f2487dbe501ee1e70e8db9a824531a14e8717dee54cbb7abfec29a88c550a49617258bd6fd858242"`, + `"0x076429010fdebfa522b053267f654a4c5daf18589915d96f7e5001d63ea2033f27f915f254560c84450aa38e93c3162be52492d05b316e75f542e3b302117360"`, ); const masterTaggingPublicKey = await keyStore.getMasterTaggingPublicKey(accountAddress); expect(masterTaggingPublicKey.toString()).toMatchInlineSnapshot( - `"0x076429010fdebfa522b053267f654a4c5daf18589915d96f7e5001d63ea2033f27f915f254560c84450aa38e93c3162be52492d05b316e75f542e3b302117360"`, + `"0x07cec19d32f1cbaaacf16edc081021b696c86dff14160779373ffc77b04568e7076f25b0e7f0d02fd6433d788483e2262c1e45c5962790b40d1cd7efbd5253d3"`, ); // Arbitrary app contract address @@ -56,36 +56,36 @@ describe('KeyStore', () => { const { pkM: obtainedMasterNullifierPublicKey, skApp: appNullifierSecretKey } = await keyStore.getKeyValidationRequest(computedMasterNullifierPublicKeyHash, appAddress); expect(appNullifierSecretKey.toString()).toMatchInlineSnapshot( - `"0x230a44dfe7cfec7a735c89f7289c5cb5d2c3dc0bf5d3505917fd2476f67873a8"`, + `"0x0084c92262407236c992dcea10cf3406a642074cad6c6034d2990ffb073207a7"`, ); expect(obtainedMasterNullifierPublicKey).toEqual(masterNullifierPublicKey); const appIncomingViewingSecretKey = await keyStore.getAppIncomingViewingSecretKey(accountAddress, appAddress); expect(appIncomingViewingSecretKey.toString()).toMatchInlineSnapshot( - `"0x0084c92262407236c992dcea10cf3406a642074cad6c6034d2990ffb073207a7"`, + `"0x2639b26510f9d30b7e173d301b263b246b7a576186be1f44cd7c86bc06773f8a"`, ); const appOutgoingViewingSecretKey = await keyStore.getAppOutgoingViewingSecretKey(accountAddress, appAddress); expect(appOutgoingViewingSecretKey.toString()).toMatchInlineSnapshot( - `"0x2639b26510f9d30b7e173d301b263b246b7a576186be1f44cd7c86bc06773f8a"`, + `"0x13b400d2fccab28a04a4df9fe541d242e6b518d03137ef0ffa57c3d98cc56e67"`, ); // Returned accounts are as expected const accounts = await keyStore.getAccounts(); expect(accounts.toString()).toMatchInlineSnapshot( - `"0x1a8a9a1d91cbb353d8df4f1bbfd0283f7fc63766f671edd9443a1270a7b2a954"`, + `"0x15565e4a5f3aff35f8eafa364cec1c11aaa84a5f7fcdf64a373614fdc8add52e"`, ); // Manages to find master nullifer secret key for pub key const masterNullifierSecretKey = await keyStore.getMasterSecretKey(masterNullifierPublicKey); expect(masterNullifierSecretKey.toString()).toMatchInlineSnapshot( - `"0x0fde74d5e504c73b58aad420dd72590fc6004571411e7f77c45378714195a52b"`, + `"0x1f1f43082427fed511393bbabf8a471eb87af09f0e95bb740dc33e1ced1a54c1"`, ); // Manages to find master incoming viewing secret key for pub key const masterIncomingViewingSecretKey = await keyStore.getMasterSecretKey(masterIncomingViewingPublicKey); expect(masterIncomingViewingSecretKey.toString()).toMatchInlineSnapshot( - `"0x1f1f43082427fed511393bbabf8a471eb87af09f0e95bb740dc33e1ced1a54c1"`, + `"0x1d1d920024dd64e019c23de36d27aefe4d9d4d05983b99cf85bea9e85fd60020"`, ); }); @@ -98,7 +98,7 @@ describe('KeyStore', () => { const { address: accountAddress } = await keyStore.addAccount(sk, partialAddress); expect(accountAddress.toString()).toMatchInlineSnapshot( - `"0x1a8a9a1d91cbb353d8df4f1bbfd0283f7fc63766f671edd9443a1270a7b2a954"`, + `"0x15565e4a5f3aff35f8eafa364cec1c11aaa84a5f7fcdf64a373614fdc8add52e"`, ); // Arbitrary fixed values @@ -146,21 +146,21 @@ describe('KeyStore', () => { appAddress, ); expect(appNullifierSecretKey0.toString()).toMatchInlineSnapshot( - `"0x296e42f1039b62290372d608fcab55b00a3f96c1c8aa347b2a830639c5a12757"`, + `"0x21e3ca4bc7ae2b5e9fe343f4eec5c0aa7391857333821a4b0a1c7d4cb0055bf0"`, ); const { skApp: appNullifierSecretKey1 } = await keyStore.getKeyValidationRequest( newComputedMasterNullifierPublicKeyHashes[1], appAddress, ); expect(appNullifierSecretKey1.toString()).toMatchInlineSnapshot( - `"0x019f2a705b68683f1d86da639a543411fa779af41896c3920d0c2d5226c686dd"`, + `"0x0900aea4825d057e5bc916063a535520a7c6283740eaf218cd6961b10cba46fd"`, ); const { skApp: appNullifierSecretKey2 } = await keyStore.getKeyValidationRequest( newComputedMasterNullifierPublicKeyHashes[2], appAddress, ); expect(appNullifierSecretKey2.toString()).toMatchInlineSnapshot( - `"0x117445c8819c06b9a0889e5cce1f550e32ec6993c23f57bc9fc5cda05df520ae"`, + `"0x27ccbe41ff5f33fa78348533da9d4a79e8fea8805771e61748ea42be4202f168"`, ); expect(appNullifierSecretKey0).toEqual(computeAppNullifierSecretKey(newMasterNullifierSecretKeys[0], appAddress)); diff --git a/yarn-project/kv-store/package.json b/yarn-project/kv-store/package.json index 0fcb06fd0e3..2ca6477b149 100644 --- a/yarn-project/kv-store/package.json +++ b/yarn-project/kv-store/package.json @@ -28,7 +28,15 @@ "workerThreads": true, "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/merkle-tree/package.json b/yarn-project/merkle-tree/package.json index 8d19e74c5a5..0446f9a9d02 100644 --- a/yarn-project/merkle-tree/package.json +++ b/yarn-project/merkle-tree/package.json @@ -31,7 +31,15 @@ "testTimeout": 15000, "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.test.ts b/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.test.ts index aa374542a9f..75679d4904a 100644 --- a/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.test.ts +++ b/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.test.ts @@ -44,7 +44,7 @@ describe('IndexedTreeSnapshotBuilder', () => { describe('getSnapshot', () => { it('returns historical leaf data', async () => { - await tree.appendLeaves([Buffer.from('a'), Buffer.from('b'), Buffer.from('c')]); + await tree.appendLeaves([Fr.random().toBuffer(), Fr.random().toBuffer(), Fr.random().toBuffer()]); await tree.commit(); const expectedLeavesAtBlock1 = await Promise.all([ tree.getLatestLeafPreimageCopy(0n, false), @@ -59,7 +59,7 @@ describe('IndexedTreeSnapshotBuilder', () => { await snapshotBuilder.snapshot(1); - await tree.appendLeaves([Buffer.from('d'), Buffer.from('e'), Buffer.from('f')]); + await tree.appendLeaves([Fr.random().toBuffer(), Fr.random().toBuffer(), Fr.random().toBuffer()]); await tree.commit(); const expectedLeavesAtBlock2 = [ tree.getLatestLeafPreimageCopy(0n, false), @@ -98,12 +98,12 @@ describe('IndexedTreeSnapshotBuilder', () => { describe('findIndexOfPreviousValue', () => { it('returns the index of the leaf with the closest value to the given value', async () => { - await tree.appendLeaves([Buffer.from('a'), Buffer.from('f'), Buffer.from('d')]); + await tree.appendLeaves([Fr.random().toBuffer(), Fr.random().toBuffer(), Fr.random().toBuffer()]); await tree.commit(); const snapshot = await snapshotBuilder.snapshot(1); const historicalPrevValue = tree.findIndexOfPreviousKey(2n, false); - await tree.appendLeaves([Buffer.from('c'), Buffer.from('b'), Buffer.from('e')]); + await tree.appendLeaves([Fr.random().toBuffer(), Fr.random().toBuffer(), Fr.random().toBuffer()]); await tree.commit(); expect(snapshot.findIndexOfPreviousKey(2n)).toEqual(historicalPrevValue); diff --git a/yarn-project/noir-contracts.js/package.json b/yarn-project/noir-contracts.js/package.json index 138a1771247..881226f0a93 100644 --- a/yarn-project/noir-contracts.js/package.json +++ b/yarn-project/noir-contracts.js/package.json @@ -29,7 +29,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/noir-protocol-circuits-types/package.json b/yarn-project/noir-protocol-circuits-types/package.json index 1565d182094..dab1d3a0e72 100644 --- a/yarn-project/noir-protocol-circuits-types/package.json +++ b/yarn-project/noir-protocol-circuits-types/package.json @@ -33,7 +33,15 @@ ], "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "reporters": [ diff --git a/yarn-project/noir-protocol-circuits-types/src/index.ts b/yarn-project/noir-protocol-circuits-types/src/index.ts index 0685ef195c3..b6ba9bd4097 100644 --- a/yarn-project/noir-protocol-circuits-types/src/index.ts +++ b/yarn-project/noir-protocol-circuits-types/src/index.ts @@ -572,7 +572,7 @@ export function convertSimulatedPublicSetupInputsToWitnessMap(inputs: PublicKern } /** - * Converts the inputs of the public setup circuit into a witness map + * Converts the inputs of the public app logic circuit into a witness map * @param inputs - The public kernel inputs. * @returns The witness map */ diff --git a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts index bc8db0da20f..c276fc8a16d 100644 --- a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts +++ b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts @@ -1826,7 +1826,7 @@ export function mapStorageUpdateRequestToNoir( return { storage_slot: mapFieldToNoir(storageUpdateRequest.storageSlot), new_value: mapFieldToNoir(storageUpdateRequest.newValue), - counter: mapNumberToNoir(storageUpdateRequest.sideEffectCounter), + counter: mapNumberToNoir(storageUpdateRequest.counter), }; } /** @@ -1855,7 +1855,7 @@ export function mapStorageReadToNoir(storageRead: ContractStorageRead): StorageR return { storage_slot: mapFieldToNoir(storageRead.storageSlot), current_value: mapFieldToNoir(storageRead.currentValue), - counter: mapNumberToNoir(storageRead.sideEffectCounter), + counter: mapNumberToNoir(storageRead.counter), }; } /** diff --git a/yarn-project/p2p-bootstrap/package.json b/yarn-project/p2p-bootstrap/package.json index 86c7b7ff4c5..9e9d564c9ae 100644 --- a/yarn-project/p2p-bootstrap/package.json +++ b/yarn-project/p2p-bootstrap/package.json @@ -55,7 +55,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/p2p-bootstrap/terraform/main.tf b/yarn-project/p2p-bootstrap/terraform/main.tf index 30a9520f63c..3595f7a102b 100644 --- a/yarn-project/p2p-bootstrap/terraform/main.tf +++ b/yarn-project/p2p-bootstrap/terraform/main.tf @@ -104,7 +104,7 @@ resource "aws_ecs_task_definition" "p2p-bootstrap" { container_definitions = < { let txPool: AztecKVTxPool; beforeEach(() => { - txPool = new AztecKVTxPool(openTmpStore()); + txPool = new AztecKVTxPool(openTmpStore(), new NoopTelemetryClient()); }); describeTxPool(() => txPool); diff --git a/yarn-project/p2p/src/tx_pool/aztec_kv_tx_pool.ts b/yarn-project/p2p/src/tx_pool/aztec_kv_tx_pool.ts index 13729720692..f3756f83713 100644 --- a/yarn-project/p2p/src/tx_pool/aztec_kv_tx_pool.ts +++ b/yarn-project/p2p/src/tx_pool/aztec_kv_tx_pool.ts @@ -2,7 +2,9 @@ import { Tx, TxHash } from '@aztec/circuit-types'; import { type TxAddedToPoolStats } from '@aztec/circuit-types/stats'; import { type Logger, createDebugLogger } from '@aztec/foundation/log'; import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { TxPoolInstrumentation } from './instrumentation.js'; import { type TxPool } from './tx_pool.js'; /** @@ -18,15 +20,18 @@ export class AztecKVTxPool implements TxPool { #log: Logger; + #metrics: TxPoolInstrumentation; + /** * Class constructor for in-memory TxPool. Initiates our transaction pool as a JS Map. * @param store - A KV store. * @param log - A logger. */ - constructor(store: AztecKVStore, log = createDebugLogger('aztec:tx_pool')) { + constructor(store: AztecKVStore, telemetry: TelemetryClient, log = createDebugLogger('aztec:tx_pool')) { this.#txs = store.openMap('txs'); this.#store = store; this.#log = log; + this.#metrics = new TxPoolInstrumentation(telemetry, 'AztecKVTxPool'); } /** @@ -44,8 +49,8 @@ export class AztecKVTxPool implements TxPool { * @param txs - An array of txs to be added to the pool. * @returns Empty promise. */ - public async addTxs(txs: Tx[]): Promise { - const txHashes = await Promise.all(txs.map(tx => tx.getTxHash())); + public addTxs(txs: Tx[]): Promise { + const txHashes = txs.map(tx => tx.getTxHash()); return this.#store.transaction(() => { for (const [i, tx] of txs.entries()) { const txHash = txHashes[i]; @@ -56,6 +61,8 @@ export class AztecKVTxPool implements TxPool { void this.#txs.set(txHash.toString(), tx.toBuffer()); } + + this.#metrics.recordTxs(txs); }); } @@ -69,6 +76,8 @@ export class AztecKVTxPool implements TxPool { for (const hash of txHashes) { void this.#txs.delete(hash.toString()); } + + this.#metrics.removeTxs(txHashes.length); }); } diff --git a/yarn-project/p2p/src/tx_pool/instrumentation.ts b/yarn-project/p2p/src/tx_pool/instrumentation.ts new file mode 100644 index 00000000000..099afe22522 --- /dev/null +++ b/yarn-project/p2p/src/tx_pool/instrumentation.ts @@ -0,0 +1,58 @@ +import { type Tx } from '@aztec/circuit-types'; +import { type Histogram, Metrics, type TelemetryClient, type UpDownCounter } from '@aztec/telemetry-client'; + +/** + * Instrumentation class for the TxPool. + */ +export class TxPoolInstrumentation { + /** The number of txs in the mempool */ + private txInMempool: UpDownCounter; + /** Tracks tx size */ + private txSize: Histogram; + + constructor(telemetry: TelemetryClient, name: string) { + const meter = telemetry.getMeter(name); + this.txInMempool = meter.createUpDownCounter(Metrics.MEMPOOL_TX_COUNT, { + description: 'The current number of transactions in the mempool', + }); + + this.txSize = meter.createHistogram(Metrics.MEMPOOL_TX_SIZE, { + unit: 'By', + description: 'The size of transactions in the mempool', + advice: { + explicitBucketBoundaries: [ + 5_000, // 5KB + 10_000, + 20_000, + 50_000, + 75_000, + 100_000, // 100KB + 200_000, + ], + }, + }); + } + + /** + * Updates the metrics with the new transactions. + * @param txs - The transactions to record + */ + public recordTxs(txs: Tx[]) { + for (const tx of txs) { + this.txSize.record(tx.getSize()); + } + + this.txInMempool.add(txs.length); + } + + /** + * Updates the metrics by removing transactions from the mempool. + * @param count - The number of transactions to remove from the mempool + */ + public removeTxs(count = 1) { + if (count < 0) { + throw new Error('Count must be positive'); + } + this.txInMempool.add(-1 * count); + } +} diff --git a/yarn-project/p2p/src/tx_pool/memory_tx_pool.test.ts b/yarn-project/p2p/src/tx_pool/memory_tx_pool.test.ts index fb910b4755c..c4435a5613a 100644 --- a/yarn-project/p2p/src/tx_pool/memory_tx_pool.test.ts +++ b/yarn-project/p2p/src/tx_pool/memory_tx_pool.test.ts @@ -1,10 +1,12 @@ +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; + import { InMemoryTxPool } from './index.js'; import { describeTxPool } from './tx_pool_test_suite.js'; describe('In-Memory TX pool', () => { let inMemoryTxPool: InMemoryTxPool; beforeEach(() => { - inMemoryTxPool = new InMemoryTxPool(); + inMemoryTxPool = new InMemoryTxPool(new NoopTelemetryClient()); }); describeTxPool(() => inMemoryTxPool); diff --git a/yarn-project/p2p/src/tx_pool/memory_tx_pool.ts b/yarn-project/p2p/src/tx_pool/memory_tx_pool.ts index 858af51370c..924f907214f 100644 --- a/yarn-project/p2p/src/tx_pool/memory_tx_pool.ts +++ b/yarn-project/p2p/src/tx_pool/memory_tx_pool.ts @@ -1,7 +1,9 @@ import { Tx, TxHash } from '@aztec/circuit-types'; import { type TxAddedToPoolStats } from '@aztec/circuit-types/stats'; import { createDebugLogger } from '@aztec/foundation/log'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { TxPoolInstrumentation } from './instrumentation.js'; import { type TxPool } from './tx_pool.js'; /** @@ -13,12 +15,15 @@ export class InMemoryTxPool implements TxPool { */ private txs: Map; + private metrics: TxPoolInstrumentation; + /** * Class constructor for in-memory TxPool. Initiates our transaction pool as a JS Map. * @param log - A logger. */ - constructor(private log = createDebugLogger('aztec:tx_pool')) { + constructor(telemetry: TelemetryClient, private log = createDebugLogger('aztec:tx_pool')) { this.txs = new Map(); + this.metrics = new TxPoolInstrumentation(telemetry, 'InMemoryTxPool'); } /** @@ -37,6 +42,7 @@ export class InMemoryTxPool implements TxPool { * @returns Empty promise. */ public addTxs(txs: Tx[]): Promise { + this.metrics.recordTxs(txs); for (const tx of txs) { const txHash = tx.getTxHash(); this.log.debug(`Adding tx with id ${txHash.toString()}`, { @@ -54,6 +60,7 @@ export class InMemoryTxPool implements TxPool { * @returns The number of transactions that was deleted from the pool. */ public deleteTxs(txHashes: TxHash[]): Promise { + this.metrics.removeTxs(txHashes.length); for (const txHash of txHashes) { this.txs.delete(txHash.toBigInt()); } diff --git a/yarn-project/p2p/tsconfig.json b/yarn-project/p2p/tsconfig.json index 4e0866fd521..fcbafbb11d0 100644 --- a/yarn-project/p2p/tsconfig.json +++ b/yarn-project/p2p/tsconfig.json @@ -17,6 +17,9 @@ }, { "path": "../kv-store" + }, + { + "path": "../telemetry-client" } ], "include": ["src"] diff --git a/yarn-project/package.common.json b/yarn-project/package.common.json index 6fcf1a28315..7ef660b828e 100644 --- a/yarn-project/package.common.json +++ b/yarn-project/package.common.json @@ -20,7 +20,19 @@ }, "jest": { "extensionsToTreatAsEsm": [".ts"], - "transform": { "^.+\\.tsx?$": ["@swc/jest"] }, + "transform": { + "^.+\\.tsx?$": [ + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } + ] + }, "moduleNameMapper": { "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" }, diff --git a/yarn-project/package.json b/yarn-project/package.json index 388c8f4d6df..f9a72eb8eea 100644 --- a/yarn-project/package.json +++ b/yarn-project/package.json @@ -52,7 +52,8 @@ "scripts", "types", "txe", - "world-state" + "world-state", + "telemetry-client" ], "prettier": "@aztec/foundation/prettier", "devDependencies": { diff --git a/yarn-project/protocol-contracts/package.json b/yarn-project/protocol-contracts/package.json index d05a83250fd..48007fea086 100644 --- a/yarn-project/protocol-contracts/package.json +++ b/yarn-project/protocol-contracts/package.json @@ -40,7 +40,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index 85080060741..04cc3185a95 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -34,7 +34,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ @@ -57,6 +65,7 @@ "@aztec/kv-store": "workspace:^", "@aztec/noir-protocol-circuits-types": "workspace:^", "@aztec/simulator": "workspace:^", + "@aztec/telemetry-client": "workspace:^", "@aztec/world-state": "workspace:^", "@noir-lang/types": "portal:../../noir/packages/types", "commander": "^9.0.0", diff --git a/yarn-project/prover-client/src/mocks/test_context.ts b/yarn-project/prover-client/src/mocks/test_context.ts index 1af8c556c48..507068e36ae 100644 --- a/yarn-project/prover-client/src/mocks/test_context.ts +++ b/yarn-project/prover-client/src/mocks/test_context.ts @@ -31,6 +31,7 @@ import { WASMSimulator, type WorldStatePublicDB, } from '@aztec/simulator'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; import * as fs from 'fs/promises'; @@ -85,7 +86,7 @@ export class TestContext { logger: DebugLogger, proverCount = 4, createProver: (bbConfig: BBProverConfig) => Promise = _ => - Promise.resolve(new TestCircuitProver(new WASMSimulator())), + Promise.resolve(new TestCircuitProver(new NoopTelemetryClient(), new WASMSimulator())), blockNumber = 3, ) { const globalVariables = makeGlobals(blockNumber); @@ -95,6 +96,7 @@ export class TestContext { const publicWorldStateDB = mock(); const publicKernel = new RealPublicKernelCircuitSimulator(new WASMSimulator()); const actualDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); + const telemetry = new NoopTelemetryClient(); const processor = new PublicProcessor( actualDb, publicExecutor, @@ -103,6 +105,7 @@ export class TestContext { Header.empty(), publicContractsDB, publicWorldStateDB, + telemetry, ); let localProver: ServerCircuitProver; @@ -112,7 +115,7 @@ export class TestContext { acvmBinaryPath: config?.expectedAcvmPath, }); if (!config) { - localProver = new TestCircuitProver(simulationProvider); + localProver = new TestCircuitProver(new NoopTelemetryClient(), simulationProvider); } else { const bbConfig: BBProverConfig = { acvmBinaryPath: config.expectedAcvmPath, @@ -124,7 +127,7 @@ export class TestContext { } const queue = new MemoryProvingQueue(); - const orchestrator = new ProvingOrchestrator(actualDb, queue); + const orchestrator = new ProvingOrchestrator(actualDb, queue, telemetry); const agent = new ProverAgent(localProver, proverCount); queue.start(); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 99526b8dfd1..c6159039a72 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -9,6 +9,7 @@ import { type TxEffect, makeEmptyProcessedTx, makePaddingProcessedTx, + mapPublicKernelToCircuitName, toTxEffect, } from '@aztec/circuit-types'; import { @@ -20,6 +21,7 @@ import { type PublicInputsAndRecursiveProof, type ServerCircuitProver, } from '@aztec/circuit-types/interfaces'; +import { type CircuitName } from '@aztec/circuit-types/stats'; import { AGGREGATION_OBJECT_LENGTH, AvmCircuitInputs, @@ -53,6 +55,7 @@ import { createDebugLogger } from '@aztec/foundation/log'; import { promiseWithResolvers } from '@aztec/foundation/promise'; import { BufferReader, type Tuple } from '@aztec/foundation/serialize'; import { pushTestData } from '@aztec/foundation/testing'; +import { Attributes, type TelemetryClient, type Tracer, trackSpan, wrapCallbackInSpan } from '@aztec/telemetry-client'; import { type MerkleTreeOperations } from '@aztec/world-state'; import { inspect } from 'util'; @@ -91,7 +94,16 @@ export class ProvingOrchestrator { private pendingProvingJobs: AbortController[] = []; private paddingTx: PaddingProcessedTx | undefined = undefined; - constructor(private db: MerkleTreeOperations, private prover: ServerCircuitProver, private initialHeader?: Header) {} + public readonly tracer: Tracer; + + constructor( + private db: MerkleTreeOperations, + private prover: ServerCircuitProver, + telemetryClient: TelemetryClient, + private initialHeader?: Header, + ) { + this.tracer = telemetryClient.getTracer('ProvingOrchestrator'); + } /** * Resets the orchestrator's cached padding tx. @@ -108,6 +120,10 @@ export class ProvingOrchestrator { * @param verificationKeys - The private kernel verification keys * @returns A proving ticket, containing a promise notifying of proving completion */ + @trackSpan('ProvingOrchestrator.startNewBlock', (numTxs, globalVariables) => ({ + [Attributes.BLOCK_SIZE]: numTxs, + [Attributes.BLOCK_NUMBER]: globalVariables.blockNumber.toNumber(), + })) public async startNewBlock( numTxs: number, globalVariables: GlobalVariables, @@ -193,6 +209,9 @@ export class ProvingOrchestrator { * The interface to add a simulated transaction to the scheduler * @param tx - The transaction to be proven */ + @trackSpan('ProvingOrchestrator.addNewTx', tx => ({ + [Attributes.TX_HASH]: tx.hash.toString(), + })) public async addNewTx(tx: ProcessedTx): Promise { if (!this.provingState) { throw new Error(`Invalid proving state, call startNewBlock before adding transactions`); @@ -213,6 +232,17 @@ export class ProvingOrchestrator { /** * Marks the block as full and pads it to the full power of 2 block size, no more transactions will be accepted. */ + @trackSpan('ProvingOrchestrator.setBlockCompleted', function () { + if (!this.provingState) { + return {}; + } + + return { + [Attributes.BLOCK_NUMBER]: this.provingState!.globalVariables.blockNumber.toNumber(), + [Attributes.BLOCK_SIZE]: this.provingState!.totalNumTxs, + [Attributes.BLOCK_TXS_COUNT]: this.provingState!.transactionsReceived, + }; + }) public async setBlockCompleted() { if (!this.provingState) { throw new Error(`Invalid proving state, call startNewBlock before adding transactions or completing the block`); @@ -264,18 +294,26 @@ export class ProvingOrchestrator { logger.debug(`Enqueuing deferred proving for padding txs to enqueue ${txInputs.length} paddings`); this.deferredProving( provingState, - signal => - this.prover.getEmptyPrivateKernelProof( - { - // Chain id and version should not change even if the proving state does, so it's safe to use them for the padding tx - // which gets cached across multiple runs of the orchestrator with different proving states. If they were to change, - // we'd have to clear out the paddingTx here and regenerate it when they do. - chainId: unprovenPaddingTx.data.constants.txContext.chainId, - version: unprovenPaddingTx.data.constants.txContext.version, - header: unprovenPaddingTx.data.constants.historicalHeader, - }, - signal, - ), + wrapCallbackInSpan( + this.tracer, + 'ProvingOrchestrator.prover.getEmptyPrivateKernelProof', + { + [Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server', + [Attributes.PROTOCOL_CIRCUIT_NAME]: 'private-kernel-empty' as CircuitName, + }, + signal => + this.prover.getEmptyPrivateKernelProof( + { + // Chain id and version should not change even if the proving state does, so it's safe to use them for the padding tx + // which gets cached across multiple runs of the orchestrator with different proving states. If they were to change, + // we'd have to clear out the paddingTx here and regenerate it when they do. + chainId: unprovenPaddingTx.data.constants.txContext.chainId, + version: unprovenPaddingTx.data.constants.txContext.version, + header: unprovenPaddingTx.data.constants.historicalHeader, + }, + signal, + ), + ), result => { logger.debug(`Completed proof for padding tx, now enqueuing ${txInputs.length} padding txs`); this.paddingTx = makePaddingProcessedTx(result); @@ -319,6 +357,13 @@ export class ProvingOrchestrator { * Performs the final tree update for the block and returns the fully proven block. * @returns The fully proven block and proof. */ + @trackSpan('ProvingOrchestrator.finaliseBlock', function () { + return { + [Attributes.BLOCK_NUMBER]: this.provingState!.globalVariables.blockNumber.toNumber(), + [Attributes.BLOCK_TXS_COUNT]: this.provingState!.transactionsReceived, + [Attributes.BLOCK_SIZE]: this.provingState!.totalNumTxs, + }; + }) public async finaliseBlock() { try { if ( @@ -496,6 +541,9 @@ export class ProvingOrchestrator { } // Updates the merkle trees for a transaction. The first enqueued job for a transaction + @trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', (_, tx) => ({ + [Attributes.TX_HASH]: tx.hash.toString(), + })) private async prepareBaseRollupInputs( provingState: ProvingState | undefined, tx: ProcessedTx, @@ -593,7 +641,16 @@ export class ProvingOrchestrator { this.deferredProving( provingState, - signal => this.prover.getBaseRollupProof(tx.baseRollupInputs, signal), + wrapCallbackInSpan( + this.tracer, + 'ProvingOrchestrator.prover.getBaseRollupProof', + { + [Attributes.TX_HASH]: tx.processedTx.hash.toString(), + [Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server', + [Attributes.PROTOCOL_CIRCUIT_NAME]: 'base-rollup' as CircuitName, + }, + signal => this.prover.getBaseRollupProof(tx.baseRollupInputs, signal), + ), result => { logger.debug(`Completed proof for base rollup for tx ${tx.processedTx.hash.toString()}`); validatePartialState(result.inputs.end, tx.treeSnapshots); @@ -622,7 +679,15 @@ export class ProvingOrchestrator { this.deferredProving( provingState, - signal => this.prover.getMergeRollupProof(inputs, signal), + wrapCallbackInSpan( + this.tracer, + 'ProvingOrchestrator.prover.getMergeRollupProof', + { + [Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server', + [Attributes.PROTOCOL_CIRCUIT_NAME]: 'merge-rollup' as CircuitName, + }, + signal => this.prover.getMergeRollupProof(inputs, signal), + ), result => { this.storeAndExecuteNextMergeLevel(provingState, level, index, [ result.inputs, @@ -658,7 +723,15 @@ export class ProvingOrchestrator { this.deferredProving( provingState, - signal => this.prover.getRootRollupProof(inputs, signal), + wrapCallbackInSpan( + this.tracer, + 'ProvingOrchestrator.prover.getRootRollupProof', + { + [Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server', + [Attributes.PROTOCOL_CIRCUIT_NAME]: 'root-rollup' as CircuitName, + }, + signal => this.prover.getRootRollupProof(inputs, signal), + ), result => { provingState.rootRollupPublicInputs = result.inputs; provingState.finalAggregationObject = extractAggregationObject( @@ -680,7 +753,15 @@ export class ProvingOrchestrator { private enqueueBaseParityCircuit(provingState: ProvingState, inputs: BaseParityInputs, index: number) { this.deferredProving( provingState, - signal => this.prover.getBaseParityProof(inputs, signal), + wrapCallbackInSpan( + this.tracer, + 'ProvingOrchestrator.prover.getBaseParityProof', + { + [Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server', + [Attributes.PROTOCOL_CIRCUIT_NAME]: 'base-parity' as CircuitName, + }, + signal => this.prover.getBaseParityProof(inputs, signal), + ), rootInput => { provingState.setRootParityInputs(rootInput, index); if (provingState.areRootParityInputsReady()) { @@ -701,7 +782,15 @@ export class ProvingOrchestrator { private enqueueRootParityCircuit(provingState: ProvingState | undefined, inputs: RootParityInputs) { this.deferredProving( provingState, - signal => this.prover.getRootParityProof(inputs, signal), + wrapCallbackInSpan( + this.tracer, + 'ProvingOrchestrator.prover.getRootParityProof', + { + [Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server', + [Attributes.PROTOCOL_CIRCUIT_NAME]: 'root-parity' as CircuitName, + }, + signal => this.prover.getRootParityProof(inputs, signal), + ), async rootInput => { provingState!.finalRootParityInput = rootInput; await this.checkAndEnqueueRootRollup(provingState); @@ -770,26 +859,34 @@ export class ProvingOrchestrator { if (publicFunction.vmRequest) { // This function tries to do AVM proving. If there is a failure, it fakes the proof unless AVM_PROVING_STRICT is defined. // Nothing downstream depends on the AVM proof yet. So having this mode lets us incrementally build the AVM circuit. - const doAvmProving = async (signal: AbortSignal) => { - const inputs: AvmCircuitInputs = new AvmCircuitInputs( - publicFunction.vmRequest!.functionName, - publicFunction.vmRequest!.bytecode, - publicFunction.vmRequest!.calldata, - publicFunction.vmRequest!.kernelRequest.inputs.publicCall.callStackItem.publicInputs, - publicFunction.vmRequest!.avmHints, - ); - try { - return await this.prover.getAvmProof(inputs, signal); - } catch (err) { - if (process.env.AVM_PROVING_STRICT) { - throw err; - } else { - logger.warn(`Error thrown when proving AVM circuit: ${err}`); - logger.warn(`AVM_PROVING_STRICT is off, faking AVM proof and carrying on...`); - return { proof: makeEmptyProof(), verificationKey: VerificationKeyData.makeFake() }; + const doAvmProving = wrapCallbackInSpan( + this.tracer, + 'ProvingOrchestrator.prover.getAvmProof', + { + [Attributes.TX_HASH]: txProvingState.processedTx.hash.toString(), + [Attributes.APP_CIRCUIT_NAME]: publicFunction.vmRequest!.functionName, + }, + async (signal: AbortSignal) => { + const inputs: AvmCircuitInputs = new AvmCircuitInputs( + publicFunction.vmRequest!.functionName, + publicFunction.vmRequest!.bytecode, + publicFunction.vmRequest!.calldata, + publicFunction.vmRequest!.kernelRequest.inputs.publicCall.callStackItem.publicInputs, + publicFunction.vmRequest!.avmHints, + ); + try { + return await this.prover.getAvmProof(inputs, signal); + } catch (err) { + if (process.env.AVM_PROVING_STRICT) { + throw err; + } else { + logger.warn(`Error thrown when proving AVM circuit: ${err}`); + logger.warn(`AVM_PROVING_STRICT is off, faking AVM proof and carrying on...`); + return { proof: makeEmptyProof(), verificationKey: VerificationKeyData.makeFake() }; + } } - } - }; + }, + ); this.deferredProving(provingState, doAvmProving, proofAndVk => { logger.debug(`Proven VM for function index ${functionIndex} of tx index ${txIndex}`); this.checkAndEnqueuePublicKernel(provingState, txIndex, functionIndex, proofAndVk.proof); @@ -835,13 +932,25 @@ export class ProvingOrchestrator { this.deferredProving( provingState, - (signal): Promise> => { - if (request.type === PublicKernelType.TAIL) { - return this.prover.getPublicTailProof(request, signal); - } else { - return this.prover.getPublicKernelProof(request, signal); - } - }, + wrapCallbackInSpan( + this.tracer, + request.type === PublicKernelType.TAIL + ? 'ProvingOrchestrator.prover.getPublicTailProof' + : 'ProvingOrchestrator.prover.getPublicKernelProof', + { + [Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server', + [Attributes.PROTOCOL_CIRCUIT_NAME]: mapPublicKernelToCircuitName(request.type), + }, + ( + signal, + ): Promise> => { + if (request.type === PublicKernelType.TAIL) { + return this.prover.getPublicTailProof(request, signal); + } else { + return this.prover.getPublicKernelProof(request, signal); + } + }, + ), result => { const nextKernelRequest = txProvingState.getNextPublicKernelFromKernelProof( functionIndex, @@ -880,5 +989,9 @@ function extractAggregationObject(proof: Proof, numPublicInputs: number): Fr[] { Fr.SIZE_IN_BYTES * (numPublicInputs - AGGREGATION_OBJECT_LENGTH), Fr.SIZE_IN_BYTES * numPublicInputs, ); + // TODO(#7159): Remove the following workaround + if (buffer.length === 0) { + return Array.from({ length: AGGREGATION_OBJECT_LENGTH }, () => Fr.ZERO); + } return BufferReader.asReader(buffer).readArray(AGGREGATION_OBJECT_LENGTH, Fr); } diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts index 2c6a6b52118..d53cb3aff11 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts @@ -2,6 +2,7 @@ import { PROVING_STATUS, type ServerCircuitProver } from '@aztec/circuit-types'; import { getMockVerificationKeys } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; import { WASMSimulator } from '@aztec/simulator'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { jest } from '@jest/globals'; @@ -28,8 +29,8 @@ describe('prover/orchestrator/failures', () => { let mockProver: ServerCircuitProver; beforeEach(() => { - mockProver = new TestCircuitProver(new WASMSimulator()); - orchestrator = new ProvingOrchestrator(context.actualDb, mockProver); + mockProver = new TestCircuitProver(new NoopTelemetryClient(), new WASMSimulator()); + orchestrator = new ProvingOrchestrator(context.actualDb, mockProver, new NoopTelemetryClient()); }); it.each([ diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts index 3e68baee196..5814ae93b20 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts @@ -10,6 +10,7 @@ import { range } from '@aztec/foundation/array'; import { createDebugLogger } from '@aztec/foundation/log'; import { type PromiseWithResolvers, promiseWithResolvers } from '@aztec/foundation/promise'; import { sleep } from '@aztec/foundation/sleep'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { jest } from '@jest/globals'; @@ -141,8 +142,8 @@ describe('prover/orchestrator/lifecycle', () => { }, 60000); it('cancels proving requests', async () => { - const prover: ServerCircuitProver = new TestCircuitProver(); - const orchestrator = new ProvingOrchestrator(context.actualDb, prover); + const prover: ServerCircuitProver = new TestCircuitProver(new NoopTelemetryClient()); + const orchestrator = new ProvingOrchestrator(context.actualDb, prover, new NoopTelemetryClient()); const spy = jest.spyOn(prover, 'getBaseParityProof'); const deferredPromises: PromiseWithResolvers[] = []; diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_workflow.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_workflow.test.ts index 07158f9aeec..e139b16d18f 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_workflow.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_workflow.test.ts @@ -11,6 +11,7 @@ import { makeGlobalVariables, makeRootParityInput } from '@aztec/circuits.js/tes import { promiseWithResolvers } from '@aztec/foundation/promise'; import { sleep } from '@aztec/foundation/sleep'; import { openTmpStore } from '@aztec/kv-store/utils'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -25,7 +26,7 @@ describe('prover/orchestrator', () => { beforeEach(async () => { actualDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); mockProver = mock(); - orchestrator = new ProvingOrchestrator(actualDb, mockProver); + orchestrator = new ProvingOrchestrator(actualDb, mockProver, new NoopTelemetryClient()); }); it('calls root parity circuit only when ready', async () => { diff --git a/yarn-project/prover-client/src/test/bb_prover_base_rollup.test.ts b/yarn-project/prover-client/src/test/bb_prover_base_rollup.test.ts index 0f41135091f..2bc202a947b 100644 --- a/yarn-project/prover-client/src/test/bb_prover_base_rollup.test.ts +++ b/yarn-project/prover-client/src/test/bb_prover_base_rollup.test.ts @@ -1,6 +1,7 @@ import { BBNativeRollupProver, type BBProverConfig } from '@aztec/bb-prover'; import { makePaddingProcessedTx } from '@aztec/circuit-types'; import { createDebugLogger } from '@aztec/foundation/log'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { TestContext } from '../mocks/test_context.js'; import { buildBaseRollupInput } from '../orchestrator/block-building-helpers.js'; @@ -13,7 +14,7 @@ describe('prover/bb_prover/base-rollup', () => { beforeAll(async () => { const buildProver = async (bbConfig: BBProverConfig) => { - prover = await BBNativeRollupProver.new(bbConfig); + prover = await BBNativeRollupProver.new(bbConfig, new NoopTelemetryClient()); return prover; }; context = await TestContext.new(logger, 1, buildProver); diff --git a/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts b/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts index f7e6ad99910..5b6791a1e58 100644 --- a/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts +++ b/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts @@ -4,6 +4,7 @@ import { Fr, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, getMockVerificationKeys } from import { makeTuple } from '@aztec/foundation/array'; import { times } from '@aztec/foundation/collection'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { TestContext } from '../mocks/test_context.js'; @@ -14,7 +15,7 @@ describe('prover/bb_prover/full-rollup', () => { beforeAll(async () => { const buildProver = async (bbConfig: BBProverConfig) => { - prover = await BBNativeRollupProver.new(bbConfig); + prover = await BBNativeRollupProver.new(bbConfig, new NoopTelemetryClient()); return prover; }; logger = createDebugLogger('aztec:bb-prover-full-rollup'); diff --git a/yarn-project/prover-client/src/test/bb_prover_parity.test.ts b/yarn-project/prover-client/src/test/bb_prover_parity.test.ts index 595723e49db..b43f1c8aafd 100644 --- a/yarn-project/prover-client/src/test/bb_prover_parity.test.ts +++ b/yarn-project/prover-client/src/test/bb_prover_parity.test.ts @@ -15,6 +15,7 @@ import { makeTuple } from '@aztec/foundation/array'; import { randomBytes } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { type Tuple } from '@aztec/foundation/serialize'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { TestContext } from '../mocks/test_context.js'; @@ -27,7 +28,7 @@ describe('prover/bb_prover/parity', () => { beforeAll(async () => { const buildProver = async (bbConfig: BBProverConfig) => { bbConfig.circuitFilter = ['BaseParityArtifact', 'RootParityArtifact']; - bbProver = await BBNativeRollupProver.new(bbConfig); + bbProver = await BBNativeRollupProver.new(bbConfig, new NoopTelemetryClient()); return bbProver; }; context = await TestContext.new(logger, 1, buildProver); diff --git a/yarn-project/prover-client/src/tx-prover/tx-prover.ts b/yarn-project/prover-client/src/tx-prover/tx-prover.ts index 6008cfe9db5..09392412912 100644 --- a/yarn-project/prover-client/src/tx-prover/tx-prover.ts +++ b/yarn-project/prover-client/src/tx-prover/tx-prover.ts @@ -9,6 +9,7 @@ import { } from '@aztec/circuit-types/interfaces'; import { type Fr, type GlobalVariables, type Header, type VerificationKeys } from '@aztec/circuits.js'; import { NativeACVMSimulator } from '@aztec/simulator'; +import { type TelemetryClient } from '@aztec/telemetry-client'; import { type WorldStateSynchronizer } from '@aztec/world-state'; import { type ProverClientConfig } from '../config.js'; @@ -28,11 +29,17 @@ export class TxProver implements ProverClient { private config: ProverClientConfig, private worldStateSynchronizer: WorldStateSynchronizer, private vks: VerificationKeys, + private telemetry: TelemetryClient, private agent?: ProverAgent, initialHeader?: Header, ) { this.queue = new MemoryProvingQueue(config.proverJobTimeoutMs, config.proverJobPollIntervalMs); - this.orchestrator = new ProvingOrchestrator(worldStateSynchronizer.getLatest(), this.queue, initialHeader); + this.orchestrator = new ProvingOrchestrator( + worldStateSynchronizer.getLatest(), + this.queue, + telemetry, + initialHeader, + ); } async updateProverConfig(config: Partial): Promise { @@ -43,7 +50,7 @@ export class TxProver implements ProverClient { } if (newConfig.realProofs !== this.config.realProofs && this.agent) { - const circuitProver = await TxProver.buildCircuitProver(newConfig); + const circuitProver = await TxProver.buildCircuitProver(newConfig, this.telemetry); this.agent.setCircuitProver(circuitProver); } @@ -95,31 +102,35 @@ export class TxProver implements ProverClient { config: ProverClientConfig, vks: VerificationKeys, worldStateSynchronizer: WorldStateSynchronizer, + telemetry: TelemetryClient, initialHeader?: Header, ) { const agent = config.proverAgentEnabled ? new ProverAgent( - await TxProver.buildCircuitProver(config), + await TxProver.buildCircuitProver(config, telemetry), config.proverAgentConcurrency, config.proverAgentPollInterval, ) : undefined; - const prover = new TxProver(config, worldStateSynchronizer, vks, agent, initialHeader); + const prover = new TxProver(config, worldStateSynchronizer, vks, telemetry, agent, initialHeader); await prover.start(); return prover; } - private static async buildCircuitProver(config: ProverClientConfig): Promise { + private static async buildCircuitProver( + config: ProverClientConfig, + telemetry: TelemetryClient, + ): Promise { if (config.realProofs) { - return await BBNativeRollupProver.new(config); + return await BBNativeRollupProver.new(config, telemetry); } const simulationProvider = config.acvmBinaryPath ? new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath) : undefined; - return new TestCircuitProver(simulationProvider); + return new TestCircuitProver(telemetry, simulationProvider); } /** diff --git a/yarn-project/prover-client/tsconfig.json b/yarn-project/prover-client/tsconfig.json index 5f4666ebf03..9a0e67ac6c2 100644 --- a/yarn-project/prover-client/tsconfig.json +++ b/yarn-project/prover-client/tsconfig.json @@ -27,6 +27,9 @@ { "path": "../simulator" }, + { + "path": "../telemetry-client" + }, { "path": "../world-state" } diff --git a/yarn-project/pxe/package.json b/yarn-project/pxe/package.json index 9d3e04fee0c..588fb9fec9b 100644 --- a/yarn-project/pxe/package.json +++ b/yarn-project/pxe/package.json @@ -32,7 +32,15 @@ "workerThreads": true, "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/pxe/src/database/deferred_note_dao.test.ts b/yarn-project/pxe/src/database/deferred_note_dao.test.ts index d3c1e5d520b..efe57f5a681 100644 --- a/yarn-project/pxe/src/database/deferred_note_dao.test.ts +++ b/yarn-project/pxe/src/database/deferred_note_dao.test.ts @@ -1,5 +1,6 @@ import { Note, randomTxHash } from '@aztec/circuit-types'; import { AztecAddress, Fr, Point } from '@aztec/circuits.js'; +import { NoteSelector } from '@aztec/foundation/abi'; import { randomInt } from '@aztec/foundation/crypto'; import { DeferredNoteDao } from './deferred_note_dao.js'; @@ -10,7 +11,7 @@ export const randomDeferredNoteDao = ({ contractAddress = AztecAddress.random(), txHash = randomTxHash(), storageSlot = Fr.random(), - noteTypeId = Fr.random(), + noteTypeId = NoteSelector.random(), newNoteHashes = [Fr.random(), Fr.random()], dataStartIndexForTx = randomInt(100), }: Partial = {}) => { diff --git a/yarn-project/pxe/src/database/deferred_note_dao.ts b/yarn-project/pxe/src/database/deferred_note_dao.ts index 6e73db1e237..d1d0c551209 100644 --- a/yarn-project/pxe/src/database/deferred_note_dao.ts +++ b/yarn-project/pxe/src/database/deferred_note_dao.ts @@ -1,5 +1,6 @@ import { Note, TxHash } from '@aztec/circuit-types'; import { AztecAddress, Fr, Point, type PublicKey, Vector } from '@aztec/circuits.js'; +import { NoteSelector } from '@aztec/foundation/abi'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; /** @@ -18,7 +19,7 @@ export class DeferredNoteDao { /** The specific storage location of the note on the contract. */ public storageSlot: Fr, /** The type ID of the note on the contract. */ - public noteTypeId: Fr, + public noteTypeId: NoteSelector, /** The hash of the tx the note was created in. Equal to the first nullifier */ public txHash: TxHash, /** New note hashes in this transaction, one of which belongs to this note */ @@ -46,7 +47,7 @@ export class DeferredNoteDao { reader.readObject(Note), reader.readObject(AztecAddress), reader.readObject(Fr), - reader.readObject(Fr), + reader.readObject(NoteSelector), reader.readObject(TxHash), reader.readVector(Fr), reader.readNumber(), diff --git a/yarn-project/pxe/src/database/incoming_note_dao.test.ts b/yarn-project/pxe/src/database/incoming_note_dao.test.ts index ae8d562a381..f20e957fc1b 100644 --- a/yarn-project/pxe/src/database/incoming_note_dao.test.ts +++ b/yarn-project/pxe/src/database/incoming_note_dao.test.ts @@ -1,5 +1,6 @@ import { Note, randomTxHash } from '@aztec/circuit-types'; import { AztecAddress, Fr, Point } from '@aztec/circuits.js'; +import { NoteSelector } from '@aztec/foundation/abi'; import { IncomingNoteDao } from './incoming_note_dao.js'; @@ -8,7 +9,7 @@ export const randomIncomingNoteDao = ({ contractAddress = AztecAddress.random(), txHash = randomTxHash(), storageSlot = Fr.random(), - noteTypeId = Fr.random(), + noteTypeId = NoteSelector.random(), nonce = Fr.random(), innerNoteHash = Fr.random(), siloedNullifier = Fr.random(), diff --git a/yarn-project/pxe/src/database/incoming_note_dao.ts b/yarn-project/pxe/src/database/incoming_note_dao.ts index 6db39e1b455..0a128a74259 100644 --- a/yarn-project/pxe/src/database/incoming_note_dao.ts +++ b/yarn-project/pxe/src/database/incoming_note_dao.ts @@ -1,5 +1,6 @@ import { Note, TxHash } from '@aztec/circuit-types'; import { AztecAddress, Fr, Point, type PublicKey } from '@aztec/circuits.js'; +import { NoteSelector } from '@aztec/foundation/abi'; import { toBigIntBE } from '@aztec/foundation/bigint-buffer'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { type NoteData } from '@aztec/simulator'; @@ -16,7 +17,7 @@ export class IncomingNoteDao implements NoteData { /** The specific storage location of the note on the contract. */ public storageSlot: Fr, /** The note type identifier for the contract. */ - public noteTypeId: Fr, + public noteTypeId: NoteSelector, /** The hash of the tx the note was created in. */ public txHash: TxHash, /** The nonce of the note. */ @@ -57,8 +58,8 @@ export class IncomingNoteDao implements NoteData { const note = Note.fromBuffer(reader); const contractAddress = AztecAddress.fromBuffer(reader); const storageSlot = Fr.fromBuffer(reader); - const noteTypeId = Fr.fromBuffer(reader); - const txHash = new TxHash(reader.readBytes(TxHash.SIZE)); + const noteTypeId = reader.readObject(NoteSelector); + const txHash = reader.readObject(TxHash); const nonce = Fr.fromBuffer(reader); const innerNoteHash = Fr.fromBuffer(reader); const siloedNullifier = Fr.fromBuffer(reader); diff --git a/yarn-project/pxe/src/database/outgoing_note_dao.test.ts b/yarn-project/pxe/src/database/outgoing_note_dao.test.ts index 166a5e9b51b..9e7241760ff 100644 --- a/yarn-project/pxe/src/database/outgoing_note_dao.test.ts +++ b/yarn-project/pxe/src/database/outgoing_note_dao.test.ts @@ -1,5 +1,6 @@ import { Note, randomTxHash } from '@aztec/circuit-types'; import { AztecAddress, Fr, Point } from '@aztec/circuits.js'; +import { NoteSelector } from '@aztec/foundation/abi'; import { OutgoingNoteDao } from './outgoing_note_dao.js'; @@ -8,7 +9,7 @@ export const randomOutgoingNoteDao = ({ contractAddress = AztecAddress.random(), txHash = randomTxHash(), storageSlot = Fr.random(), - noteTypeId = Fr.random(), + noteTypeId = NoteSelector.random(), nonce = Fr.random(), innerNoteHash = Fr.random(), index = Fr.random().toBigInt(), diff --git a/yarn-project/pxe/src/database/outgoing_note_dao.ts b/yarn-project/pxe/src/database/outgoing_note_dao.ts index e7e2b8c263d..03075f9f7df 100644 --- a/yarn-project/pxe/src/database/outgoing_note_dao.ts +++ b/yarn-project/pxe/src/database/outgoing_note_dao.ts @@ -1,5 +1,6 @@ import { Note, TxHash } from '@aztec/circuit-types'; import { AztecAddress, Fr, Point, type PublicKey } from '@aztec/circuits.js'; +import { NoteSelector } from '@aztec/foundation/abi'; import { toBigIntBE } from '@aztec/foundation/bigint-buffer'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; @@ -15,7 +16,7 @@ export class OutgoingNoteDao { /** The specific storage location of the note on the contract. */ public storageSlot: Fr, /** The note type identifier for the contract. */ - public noteTypeId: Fr, + public noteTypeId: NoteSelector, /** The hash of the tx the note was created in. */ public txHash: TxHash, /** The nonce of the note. */ @@ -50,7 +51,7 @@ export class OutgoingNoteDao { const note = Note.fromBuffer(reader); const contractAddress = AztecAddress.fromBuffer(reader); const storageSlot = Fr.fromBuffer(reader); - const noteTypeId = Fr.fromBuffer(reader); + const noteTypeId = reader.readObject(NoteSelector); const txHash = new TxHash(reader.readBytes(TxHash.SIZE)); const nonce = Fr.fromBuffer(reader); const innerNoteHash = Fr.fromBuffer(reader); diff --git a/yarn-project/pxe/src/index.ts b/yarn-project/pxe/src/index.ts index 7c62b24d3ff..86b3f1205e7 100644 --- a/yarn-project/pxe/src/index.ts +++ b/yarn-project/pxe/src/index.ts @@ -11,3 +11,4 @@ export * from '@aztec/foundation/aztec-address'; export * from '@aztec/key-store'; export * from './database/index.js'; export { ContractDataOracle } from './contract_data_oracle/index.js'; +export { PrivateFunctionsTree } from './contract_data_oracle/private_functions_tree.js'; diff --git a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts b/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts index e96547fbbd7..e60353cd405 100644 --- a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts +++ b/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts @@ -21,6 +21,7 @@ import { makeRecursiveProof, } from '@aztec/circuits.js'; import { makeTxRequest } from '@aztec/circuits.js/testing'; +import { NoteSelector } from '@aztec/foundation/abi'; import { makeTuple } from '@aztec/foundation/array'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr } from '@aztec/foundation/fields'; @@ -45,7 +46,7 @@ describe('Kernel Prover', () => { .map(() => ({ note: new Note([Fr.random(), Fr.random(), Fr.random()]), storageSlot: Fr.random(), - noteTypeId: Fr.random(), + noteTypeId: NoteSelector.random(), owner: { x: Fr.random(), y: Fr.random() }, })); diff --git a/yarn-project/pxe/src/note_processor/note_processor.ts b/yarn-project/pxe/src/note_processor/note_processor.ts index 61b1a820ba0..859309439d0 100644 --- a/yarn-project/pxe/src/note_processor/note_processor.ts +++ b/yarn-project/pxe/src/note_processor/note_processor.ts @@ -151,18 +151,17 @@ export class NoteProcessor { const outgoingTaggedNote = TaggedLog.decryptAsOutgoing(log.data, ovskM)!; if (incomingTaggedNote || outgoingTaggedNote) { - // TODO(#7053): Re-enable this check - // if ( - // incomingTaggedNote && - // outgoingTaggedNote && - // !incomingTaggedNote.payload.equals(outgoingTaggedNote.payload) - // ) { - // throw new Error( - // `Incoming and outgoing note payloads do not match. Incoming: ${JSON.stringify( - // incomingTaggedNote.payload, - // )}, Outgoing: ${JSON.stringify(outgoingTaggedNote.payload)}`, - // ); - // } + if ( + incomingTaggedNote && + outgoingTaggedNote && + !incomingTaggedNote.payload.equals(outgoingTaggedNote.payload) + ) { + throw new Error( + `Incoming and outgoing note payloads do not match. Incoming: ${JSON.stringify( + incomingTaggedNote.payload, + )}, Outgoing: ${JSON.stringify(outgoingTaggedNote.payload)}`, + ); + } const payload = incomingTaggedNote?.payload || outgoingTaggedNote?.payload; diff --git a/yarn-project/pxe/src/pxe_http/pxe_http_server.ts b/yarn-project/pxe/src/pxe_http/pxe_http_server.ts index b8e2500c57f..f429337c763 100644 --- a/yarn-project/pxe/src/pxe_http/pxe_http_server.ts +++ b/yarn-project/pxe/src/pxe_http/pxe_http_server.ts @@ -18,6 +18,7 @@ import { UnencryptedL2BlockL2Logs, } from '@aztec/circuit-types'; import { FunctionSelector } from '@aztec/circuits.js'; +import { NoteSelector } from '@aztec/foundation/abi'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr, GrumpkinScalar, Point } from '@aztec/foundation/fields'; @@ -49,6 +50,7 @@ export function createPXERpcServer(pxeService: PXE): JsonRpcServer { L2Block, TxEffect, LogId, + NoteSelector, }, { SimulatedTx, Tx, TxReceipt, EncryptedNoteL2BlockL2Logs, UnencryptedL2BlockL2Logs, NullifierMembershipWitness }, ['start', 'stop'], diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index 5422eea4f00..cf351c5bf9b 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -35,13 +35,7 @@ import { getContractClassFromArtifact, } from '@aztec/circuits.js'; import { computeNoteHashNonce, siloNullifier } from '@aztec/circuits.js/hash'; -import { - type ContractArtifact, - type DecodedReturn, - EventSelector, - FunctionSelector, - encodeArguments, -} from '@aztec/foundation/abi'; +import { type ContractArtifact, type DecodedReturn, FunctionSelector, encodeArguments } from '@aztec/foundation/abi'; import { type Fq, Fr, type Point } from '@aztec/foundation/fields'; import { SerialQueue } from '@aztec/foundation/fifo'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; @@ -859,7 +853,7 @@ export class PXEService implements PXE { if (visibleEvent.payload === undefined) { return undefined; } - if (!EventSelector.fromField(visibleEvent.payload.eventTypeId).equals(eventMetadata.eventSelector)) { + if (!visibleEvent.payload.eventTypeId.equals(eventMetadata.eventSelector)) { return undefined; } if (visibleEvent.payload.event.items.length !== eventMetadata.fieldNames.length) { diff --git a/yarn-project/scripts/package.json b/yarn-project/scripts/package.json index e7a327778e6..0f5849a2ab2 100644 --- a/yarn-project/scripts/package.json +++ b/yarn-project/scripts/package.json @@ -59,7 +59,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/sequencer-client/package.json b/yarn-project/sequencer-client/package.json index a8479023370..96574ecbb1b 100644 --- a/yarn-project/sequencer-client/package.json +++ b/yarn-project/sequencer-client/package.json @@ -35,6 +35,7 @@ "@aztec/p2p": "workspace:^", "@aztec/protocol-contracts": "workspace:^", "@aztec/simulator": "workspace:^", + "@aztec/telemetry-client": "workspace:^", "@aztec/types": "workspace:^", "@aztec/world-state": "workspace:^", "@noir-lang/acvm_js": "portal:../../noir/packages/acvm_js", @@ -78,7 +79,15 @@ ], "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "moduleNameMapper": { diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index dae754f04a4..5250d962aca 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -2,6 +2,7 @@ import { type L1ToL2MessageSource, type L2BlockSource } from '@aztec/circuit-typ import { type BlockProver } from '@aztec/circuit-types/interfaces'; import { type P2P } from '@aztec/p2p'; import { PublicProcessorFactory, type SimulationProvider } from '@aztec/simulator'; +import { type TelemetryClient } from '@aztec/telemetry-client'; import { type ContractDataSource } from '@aztec/types/contracts'; import { type WorldStateSynchronizer } from '@aztec/world-state'; @@ -38,12 +39,18 @@ export class SequencerClient { l1ToL2MessageSource: L1ToL2MessageSource, prover: BlockProver, simulationProvider: SimulationProvider, + telemetryClient: TelemetryClient, ) { const publisher = getL1Publisher(config); const globalsBuilder = getGlobalVariableBuilder(config); const merkleTreeDb = worldStateSynchronizer.getLatest(); - const publicProcessorFactory = new PublicProcessorFactory(merkleTreeDb, contractDataSource, simulationProvider); + const publicProcessorFactory = new PublicProcessorFactory( + merkleTreeDb, + contractDataSource, + simulationProvider, + telemetryClient, + ); const sequencer = new Sequencer( publisher, @@ -55,6 +62,7 @@ export class SequencerClient { l1ToL2MessageSource, publicProcessorFactory, new TxValidatorFactory(merkleTreeDb, contractDataSource, !!config.enforceFees), + telemetryClient, config, ); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 4292e85e838..a0bdf943af3 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -27,6 +27,7 @@ import { randomBytes } from '@aztec/foundation/crypto'; import { type Writeable } from '@aztec/foundation/types'; import { type P2P, P2PClientState } from '@aztec/p2p'; import { type PublicProcessor, type PublicProcessorFactory } from '@aztec/simulator'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type ContractDataSource } from '@aztec/types/contracts'; import { type MerkleTreeOperations, WorldStateRunningState, type WorldStateSynchronizer } from '@aztec/world-state'; @@ -115,6 +116,7 @@ describe('sequencer', () => { l1ToL2MessageSource, publicProcessorFactory, new TxValidatorFactory(merkleTreeOps, contractSource, false), + new NoopTelemetryClient(), ); }); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index aaa1831c190..0a441453780 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -13,13 +13,14 @@ import { PROVING_STATUS, } from '@aztec/circuit-types/interfaces'; import { type L2BlockBuiltStats } from '@aztec/circuit-types/stats'; -import { AztecAddress, EthAddress, type Proof } from '@aztec/circuits.js'; +import { AztecAddress, EthAddress, type GlobalVariables, type Header, type Proof } from '@aztec/circuits.js'; import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; import { Timer, elapsed } from '@aztec/foundation/timer'; import { type P2P } from '@aztec/p2p'; import { type PublicProcessorFactory } from '@aztec/simulator'; +import { Attributes, type TelemetryClient, type Tracer, trackSpan } from '@aztec/telemetry-client'; import { type WorldStateStatus, type WorldStateSynchronizer } from '@aztec/world-state'; import { type GlobalVariableBuilder } from '../global_variable_builder/global_builder.js'; @@ -50,6 +51,8 @@ export class Sequencer { private allowedInTeardown: AllowedElement[] = []; private maxBlockSizeInBytes: number = 1024 * 1024; + public readonly tracer: Tracer; + constructor( private publisher: L1Publisher, private globalsBuilder: GlobalVariableBuilder, @@ -60,10 +63,12 @@ export class Sequencer { private l1ToL2MessageSource: L1ToL2MessageSource, private publicProcessorFactory: PublicProcessorFactory, private txValidatorFactory: TxValidatorFactory, + telemetry: TelemetryClient, config: SequencerConfig = {}, private log = createDebugLogger('aztec:sequencer'), ) { this.updateConfig(config); + this.tracer = telemetry.getTracer('Sequencer'); this.log.verbose(`Initialized sequencer with ${this.minTxsPerBLock}-${this.maxTxsPerBlock} txs per block.`); } @@ -174,7 +179,6 @@ export class Sequencer { return; } - const workTimer = new Timer(); this.state = SequencerState.WAITING_FOR_TXS; // Get txs to build the new block @@ -184,19 +188,6 @@ export class Sequencer { } this.log.debug(`Retrieved ${pendingTxs.length} txs from P2P pool`); - /** - * We'll call this function before running expensive operations to avoid wasted work. - */ - const assertBlockHeight = async () => { - const currentBlockNumber = await this.l2BlockSource.getBlockNumber(); - if (currentBlockNumber + 1 !== newBlockNumber) { - throw new Error('New block was emitted while building block'); - } - if (!(await this.publisher.isItMyTurnToSubmit(newBlockNumber))) { - throw new Error(`Not this sequencer turn to submit block`); - } - }; - const newGlobalVariables = await this.globalsBuilder.buildGlobalVariables( new Fr(newBlockNumber), this._coinbase, @@ -220,72 +211,7 @@ export class Sequencer { return; } - this.log.info(`Building block ${newBlockNumber} with ${validTxs.length} transactions`); - this.state = SequencerState.CREATING_BLOCK; - - // Get l1 to l2 messages from the contract - this.log.debug('Requesting L1 to L2 messages from contract'); - const l1ToL2Messages = await this.l1ToL2MessageSource.getL1ToL2Messages(BigInt(newBlockNumber)); - this.log.verbose(`Retrieved ${l1ToL2Messages.length} L1 to L2 messages for block ${newBlockNumber}`); - - // We create a fresh processor each time to reset any cached state (eg storage writes) - const processor = await this.publicProcessorFactory.create(historicalHeader, newGlobalVariables); - - const blockBuildingTimer = new Timer(); - - // We must initialise the block to be a power of 2 in size - const numRealTxs = validTxs.length; - const pow2 = Math.log2(numRealTxs); - // TODO turn this back into a Math.ceil once we can pad blocks to the next-power-of-2 with empty txs - const totalTxs = 2 ** Math.ceil(pow2); - const blockSize = Math.max(2, totalTxs); - const blockTicket = await this.prover.startNewBlock(blockSize, newGlobalVariables, l1ToL2Messages); - - const [publicProcessorDuration, [processedTxs, failedTxs]] = await elapsed(() => - processor.process(validTxs, blockSize, this.prover, this.txValidatorFactory.validatorForProcessedTxs()), - ); - if (failedTxs.length > 0) { - const failedTxData = failedTxs.map(fail => fail.tx); - this.log.debug(`Dropping failed txs ${Tx.getHashes(failedTxData).join(', ')}`); - await this.p2pClient.deleteTxs(Tx.getHashes(failedTxData)); - } - - if (processedTxs.length === 0) { - this.log.verbose('No txs processed correctly to build block. Exiting'); - this.prover.cancelBlock(); - return; - } - - await assertBlockHeight(); - - // All real transactions have been added, set the block as full and complete the proving. - await this.prover.setBlockCompleted(); - - // Here we are now waiting for the block to be proven. - // TODO(@PhilWindle) We should probably periodically check for things like another - // block being published before ours instead of just waiting on our block - const result = await blockTicket.provingPromise; - if (result.status === PROVING_STATUS.FAILURE) { - throw new Error(`Block proving failed, reason: ${result.reason}`); - } - - await assertBlockHeight(); - - // Block is proven, now finalise and publish! - const { block, aggregationObject, proof } = await this.prover.finaliseBlock(); - - await assertBlockHeight(); - - this.log.verbose(`Assembled block ${block.number}`, { - eventName: 'l2-block-built', - duration: workTimer.ms(), - publicProcessDuration: publicProcessorDuration, - rollupCircuitsDuration: blockBuildingTimer.ms(), - ...block.getStats(), - } satisfies L2BlockBuiltStats); - - await this.publishL2Block(block, aggregationObject, proof); - this.log.info(`Submitted rollup block ${block.number} with ${processedTxs.length} transactions`); + await this.buildBlockAndPublish(validTxs, newGlobalVariables, historicalHeader); } catch (err) { if (BlockProofError.isBlockProofError(err)) { const txHashes = err.txHashes.filter(h => !h.isZero()); @@ -299,10 +225,100 @@ export class Sequencer { } } + @trackSpan('Sequencer.buildBlockAndPublish', (_validTxs, newGlobalVariables, _historicalHeader) => ({ + [Attributes.BLOCK_NUMBER]: newGlobalVariables.blockNumber.toNumber(), + })) + private async buildBlockAndPublish( + validTxs: Tx[], + newGlobalVariables: GlobalVariables, + historicalHeader: Header | undefined, + ): Promise { + const workTimer = new Timer(); + this.state = SequencerState.CREATING_BLOCK; + this.log.info(`Building block ${newGlobalVariables.blockNumber.toNumber()} with ${validTxs.length} transactions`); + + const assertBlockHeight = async () => { + const currentBlockNumber = await this.l2BlockSource.getBlockNumber(); + if (currentBlockNumber + 1 !== newGlobalVariables.blockNumber.toNumber()) { + throw new Error('New block was emitted while building block'); + } + if (!(await this.publisher.isItMyTurnToSubmit(newGlobalVariables.blockNumber.toNumber()))) { + throw new Error(`Not this sequencer turn to submit block`); + } + }; + + // Get l1 to l2 messages from the contract + this.log.debug('Requesting L1 to L2 messages from contract'); + const l1ToL2Messages = await this.l1ToL2MessageSource.getL1ToL2Messages(newGlobalVariables.blockNumber.toBigInt()); + this.log.verbose( + `Retrieved ${l1ToL2Messages.length} L1 to L2 messages for block ${newGlobalVariables.blockNumber.toNumber()}`, + ); + + // We create a fresh processor each time to reset any cached state (eg storage writes) + const processor = await this.publicProcessorFactory.create(historicalHeader, newGlobalVariables); + + const numRealTxs = validTxs.length; + const pow2 = Math.log2(numRealTxs); + const totalTxs = 2 ** Math.ceil(pow2); + const blockSize = Math.max(2, totalTxs); + + const blockBuildingTimer = new Timer(); + const blockTicket = await this.prover.startNewBlock(blockSize, newGlobalVariables, l1ToL2Messages); + + const [publicProcessorDuration, [processedTxs, failedTxs]] = await elapsed(() => + processor.process(validTxs, blockSize, this.prover, this.txValidatorFactory.validatorForProcessedTxs()), + ); + if (failedTxs.length > 0) { + const failedTxData = failedTxs.map(fail => fail.tx); + this.log.debug(`Dropping failed txs ${Tx.getHashes(failedTxData).join(', ')}`); + await this.p2pClient.deleteTxs(Tx.getHashes(failedTxData)); + } + + if (processedTxs.length === 0) { + this.log.verbose('No txs processed correctly to build block. Exiting'); + this.prover.cancelBlock(); + return; + } + + await assertBlockHeight(); + + // All real transactions have been added, set the block as full and complete the proving. + await this.prover.setBlockCompleted(); + + // Here we are now waiting for the block to be proven. + // TODO(@PhilWindle) We should probably periodically check for things like another + // block being published before ours instead of just waiting on our block + const result = await blockTicket.provingPromise; + if (result.status === PROVING_STATUS.FAILURE) { + throw new Error(`Block proving failed, reason: ${result.reason}`); + } + + await assertBlockHeight(); + + // Block is proven, now finalise and publish! + const { block, aggregationObject, proof } = await this.prover.finaliseBlock(); + + await assertBlockHeight(); + + this.log.verbose(`Assembled block ${block.number}`, { + eventName: 'l2-block-built', + duration: workTimer.ms(), + publicProcessDuration: publicProcessorDuration, + rollupCircuitsDuration: blockBuildingTimer.ms(), + ...block.getStats(), + } satisfies L2BlockBuiltStats); + + await this.publishL2Block(block, aggregationObject, proof); + this.log.info(`Submitted rollup block ${block.number} with ${processedTxs.length} transactions`); + } + /** * Publishes the L2Block to the rollup contract. * @param block - The L2Block to be published. */ + @trackSpan('Sequencer.publishL2Block', block => ({ + [Attributes.BLOCK_NUMBER]: block.number, + })) protected async publishL2Block(block: L2Block, aggregationObject: Fr[], proof: Proof) { // Publishes new block to the network and awaits the tx to be mined this.state = SequencerState.PUBLISHING_BLOCK; diff --git a/yarn-project/sequencer-client/tsconfig.json b/yarn-project/sequencer-client/tsconfig.json index 4ec1ceda867..b4140a80da4 100644 --- a/yarn-project/sequencer-client/tsconfig.json +++ b/yarn-project/sequencer-client/tsconfig.json @@ -39,6 +39,9 @@ { "path": "../simulator" }, + { + "path": "../telemetry-client" + }, { "path": "../types" }, diff --git a/yarn-project/simulator/package.json b/yarn-project/simulator/package.json index f99e74aa4b2..8356b976f28 100644 --- a/yarn-project/simulator/package.json +++ b/yarn-project/simulator/package.json @@ -32,7 +32,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ @@ -53,6 +61,7 @@ "@aztec/foundation": "workspace:^", "@aztec/noir-protocol-circuits-types": "workspace:^", "@aztec/protocol-contracts": "workspace:^", + "@aztec/telemetry-client": "workspace:^", "@aztec/types": "workspace:^", "@aztec/world-state": "workspace:^", "@noir-lang/acvm_js": "portal:../../noir/packages/acvm_js", diff --git a/yarn-project/simulator/src/acvm/oracle/oracle.ts b/yarn-project/simulator/src/acvm/oracle/oracle.ts index 590f0542e1b..d2cd8ddf7d1 100644 --- a/yarn-project/simulator/src/acvm/oracle/oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/oracle.ts @@ -1,6 +1,6 @@ import { MerkleTreeId, UnencryptedL2Log } from '@aztec/circuit-types'; import { KeyValidationRequest } from '@aztec/circuits.js'; -import { EventSelector, FunctionSelector } from '@aztec/foundation/abi'; +import { EventSelector, FunctionSelector, NoteSelector } from '@aztec/foundation/abi'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr, Point } from '@aztec/foundation/fields'; @@ -49,6 +49,14 @@ export class Oracle { return toACVMField(await this.typedOracle.getContractAddress()); } + async getVersion(): Promise { + return toACVMField(await this.typedOracle.getVersion()); + } + + async getChainId(): Promise { + return toACVMField(await this.typedOracle.getChainId()); + } + async getKeyValidationRequest([pkMHash]: ACVMField[]): Promise { const { pkM, skApp } = await this.typedOracle.getKeyValidationRequest(fromACVMField(pkMHash)); @@ -244,7 +252,7 @@ export class Oracle { ): ACVMField { this.typedOracle.notifyCreatedNote( fromACVMField(storageSlot), - fromACVMField(noteTypeId), + NoteSelector.fromField(fromACVMField(noteTypeId)), note.map(fromACVMField), fromACVMField(innerNoteHash), +counter, @@ -357,7 +365,7 @@ export class Oracle { const encLog = this.typedOracle.computeEncryptedNoteLog( AztecAddress.fromString(contractAddress), Fr.fromString(storageSlot), - Fr.fromString(noteTypeId), + NoteSelector.fromField(Fr.fromString(noteTypeId)), ovKeys, ivpkM, preimage.map(fromACVMField), diff --git a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts index 41fd2f7e37b..690ccf8ac86 100644 --- a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts @@ -16,7 +16,7 @@ import { type PrivateCallStackItem, type PublicCallRequest, } from '@aztec/circuits.js'; -import { type FunctionSelector } from '@aztec/foundation/abi'; +import { type FunctionSelector, type NoteSelector } from '@aztec/foundation/abi'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr } from '@aztec/foundation/fields'; import { type ContractInstance } from '@aztec/types/contracts'; @@ -90,6 +90,14 @@ export abstract class TypedOracle { throw new OracleMethodNotAvailableError('getContractAddress'); } + getChainId(): Promise { + throw new OracleMethodNotAvailableError('getChainId'); + } + + getVersion(): Promise { + throw new OracleMethodNotAvailableError('getVersion'); + } + getKeyValidationRequest(_pkMHash: Fr): Promise { throw new OracleMethodNotAvailableError('getKeyValidationRequest'); } @@ -156,7 +164,13 @@ export abstract class TypedOracle { throw new OracleMethodNotAvailableError('getNotes'); } - notifyCreatedNote(_storageSlot: Fr, _noteTypeId: Fr, _note: Fr[], _innerNoteHash: Fr, _counter: number): void { + notifyCreatedNote( + _storageSlot: Fr, + _noteTypeId: NoteSelector, + _note: Fr[], + _innerNoteHash: Fr, + _counter: number, + ): void { throw new OracleMethodNotAvailableError('notifyCreatedNote'); } @@ -211,7 +225,7 @@ export abstract class TypedOracle { computeEncryptedNoteLog( _contractAddress: AztecAddress, _storageSlot: Fr, - _noteTypeId: Fr, + _noteTypeId: NoteSelector, _ovKeys: KeyValidationRequest, _ivpkM: PublicKey, _preimage: Fr[], diff --git a/yarn-project/simulator/src/avm/avm_context.test.ts b/yarn-project/simulator/src/avm/avm_context.test.ts index bea44afec38..a96d8898306 100644 --- a/yarn-project/simulator/src/avm/avm_context.test.ts +++ b/yarn-project/simulator/src/avm/avm_context.test.ts @@ -16,6 +16,7 @@ describe('Avm Context', () => { allSameExcept(context.environment, { address: newAddress, storageAddress: newAddress, + contractCallDepth: Fr.ONE, // Calldata also includes AvmContextInputs calldata: anyAvmContextInputs().concat(newCalldata), isStaticCall: false, @@ -46,6 +47,7 @@ describe('Avm Context', () => { allSameExcept(context.environment, { address: newAddress, storageAddress: newAddress, + contractCallDepth: Fr.ONE, // Calldata also includes AvmContextInputs calldata: anyAvmContextInputs().concat(newCalldata), isStaticCall: true, diff --git a/yarn-project/simulator/src/avm/avm_execution_environment.test.ts b/yarn-project/simulator/src/avm/avm_execution_environment.test.ts index 68bde3962fb..e13f3f248d7 100644 --- a/yarn-project/simulator/src/avm/avm_execution_environment.test.ts +++ b/yarn-project/simulator/src/avm/avm_execution_environment.test.ts @@ -16,6 +16,7 @@ describe('Execution Environment', () => { allSameExcept(executionEnvironment, { address: newAddress, storageAddress: newAddress, + contractCallDepth: Fr.ONE, // Calldata also includes AvmContextInputs calldata: anyAvmContextInputs().concat(calldata), }), @@ -30,6 +31,7 @@ describe('Execution Environment', () => { expect(newExecutionEnvironment).toEqual( allSameExcept(executionEnvironment, { address: newAddress, + contractCallDepth: Fr.ONE, isDelegateCall: true, // Calldata also includes AvmContextInputs calldata: anyAvmContextInputs().concat(calldata), @@ -49,6 +51,7 @@ describe('Execution Environment', () => { allSameExcept(executionEnvironment, { address: newAddress, storageAddress: newAddress, + contractCallDepth: Fr.ONE, isStaticCall: true, // Calldata also includes AvmContextInputs calldata: anyAvmContextInputs().concat(calldata), diff --git a/yarn-project/simulator/src/avm/avm_execution_environment.ts b/yarn-project/simulator/src/avm/avm_execution_environment.ts index 411b9d60ff4..c4794b1a02b 100644 --- a/yarn-project/simulator/src/avm/avm_execution_environment.ts +++ b/yarn-project/simulator/src/avm/avm_execution_environment.ts @@ -19,6 +19,7 @@ export class AvmContextInputs { */ // TODO(https://github.com/AztecProtocol/aztec-packages/issues/3992): gas not implemented export class AvmExecutionEnvironment { + private readonly calldataPrefixLength; constructor( public readonly address: AztecAddress, public readonly storageAddress: AztecAddress, @@ -45,8 +46,9 @@ export class AvmExecutionEnvironment { temporaryFunctionSelector.toField(), computeVarArgsHash(calldata), isStaticCall, - ); - this.calldata = [...inputs.toFields(), ...calldata]; + ).toFields(); + this.calldata = [...inputs, ...calldata]; + this.calldataPrefixLength = inputs.length; } private deriveEnvironmentForNestedCallInternal( @@ -62,7 +64,7 @@ export class AvmExecutionEnvironment { /*sender=*/ this.address, this.feePerL2Gas, this.feePerDaGas, - this.contractCallDepth, + this.contractCallDepth.add(Fr.ONE), this.header, this.globals, isStaticCall, @@ -109,4 +111,9 @@ export class AvmExecutionEnvironment { ): AvmExecutionEnvironment { throw new Error('Delegate calls not supported!'); } + + public getCalldataWithoutPrefix(): Fr[] { + // clip off the first few entries + return this.calldata.slice(this.calldataPrefixLength); + } } diff --git a/yarn-project/simulator/src/avm/avm_simulator.test.ts b/yarn-project/simulator/src/avm/avm_simulator.test.ts index 1614305b0be..e28d9d9b8e7 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.test.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.test.ts @@ -1,15 +1,16 @@ -import { UnencryptedL2Log } from '@aztec/circuit-types'; import { Grumpkin } from '@aztec/circuits.js/barretenberg'; import { computeVarArgsHash } from '@aztec/circuits.js/hash'; -import { EventSelector, FunctionSelector } from '@aztec/foundation/abi'; +import { FunctionSelector } from '@aztec/foundation/abi'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { keccak256, pedersenHash, poseidon2Hash, sha256 } from '@aztec/foundation/crypto'; import { Fq, Fr } from '@aztec/foundation/fields'; import { type Fieldable } from '@aztec/foundation/serialize'; -import { jest } from '@jest/globals'; +import { mock } from 'jest-mock-extended'; +import { type PublicSideEffectTraceInterface } from '../public/side_effect_trace_interface.js'; import { isAvmBytecode, markBytecodeAsAvm } from '../public/transitional_adaptors.js'; +import { type AvmExecutionEnvironment } from './avm_execution_environment.js'; import { AvmMachineState } from './avm_machine_state.js'; import { type MemoryValue, TypeTag, type Uint8 } from './avm_memory_types.js'; import { AvmSimulator } from './avm_simulator.js'; @@ -19,12 +20,26 @@ import { initContext, initExecutionEnvironment, initGlobalVariables, + initHostStorage, initMachineState, + initPersistableStateManager, randomMemoryBytes, randomMemoryFields, } from './fixtures/index.js'; +import { type HostStorage } from './journal/host_storage.js'; +import { type AvmPersistableStateManager } from './journal/journal.js'; import { Add, CalldataCopy, Return } from './opcodes/index.js'; import { encodeToBytecode } from './serialization/bytecode_serialization.js'; +import { + mockGetBytecode, + mockGetContractInstance, + mockL1ToL2MessageExists, + mockNoteHashExists, + mockNullifierExists, + mockStorageRead, + mockStorageReadWithMap, + mockTraceFork, +} from './test_utils.js'; describe('AVM simulator: injected bytecode', () => { let calldata: Fr[]; @@ -314,634 +329,565 @@ describe('AVM simulator: transpiled Noir contracts', () => { }); }); - describe('Tree access (notes & nullifiers)', () => { - it(`Note hash exists (it does not)`, async () => { - const noteHash = new Fr(42); - const leafIndex = new Fr(7); - const calldata = [noteHash, leafIndex]; - - const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - const bytecode = getAvmTestContractBytecode('note_hash_exists'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); - - expect(results.reverted).toBe(false); - expect(results.output).toEqual([/*exists=false*/ new Fr(0)]); + it('conversions', async () => { + const calldata: Fr[] = [new Fr(0b1011101010100)]; + const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - // Note hash existence check should be in trace - const trace = context.persistableState.flush(); - expect(trace.noteHashChecks).toEqual([expect.objectContaining({ noteHash, leafIndex, exists: false })]); - }); + const bytecode = getAvmTestContractBytecode('to_radix_le'); + const results = await new AvmSimulator(context).executeBytecode(bytecode); - it(`Note hash exists (it does)`, async () => { - const noteHash = new Fr(42); - const leafIndex = new Fr(7); - const calldata = [noteHash, leafIndex]; + expect(results.reverted).toBe(false); + const expectedResults = Buffer.concat('0010101011'.split('').map(c => new Fr(Number(c)).toBuffer())); + const resultBuffer = Buffer.concat(results.output.map(f => f.toBuffer())); - const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - // note hash exists! - jest - .spyOn(context.persistableState.hostStorage.commitmentsDb, 'getCommitmentIndex') - .mockReturnValue(Promise.resolve(BigInt(7))); - const bytecode = getAvmTestContractBytecode('note_hash_exists'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); + expect(resultBuffer.equals(expectedResults)).toBe(true); + }); - expect(results.reverted).toBe(false); - expect(results.output).toEqual([/*exists=true*/ new Fr(1)]); + describe('Side effects, world state, nested calls', () => { + const address = new Fr(1); + // TODO(dbanks12): should be able to make address and storage address different + const storageAddress = new Fr(1); + const sender = new Fr(42); + const leafIndex = new Fr(7); + const slotNumber = 1; // must update Noir contract if changing this + const slot = new Fr(slotNumber); + const listSlotNumber0 = 2; // must update Noir contract if changing this + const listSlotNumber1 = listSlotNumber0 + 1; + const listSlot0 = new Fr(listSlotNumber0); + const listSlot1 = new Fr(listSlotNumber1); + const value0 = new Fr(420); + const value1 = new Fr(69); + + let hostStorage: HostStorage; + let trace: PublicSideEffectTraceInterface; + let persistableState: AvmPersistableStateManager; + + beforeEach(() => { + hostStorage = initHostStorage(); + trace = mock(); + persistableState = initPersistableStateManager({ hostStorage, trace }); + }); + + const createContext = (calldata: Fr[] = []) => { + return initContext({ + persistableState, + env: initExecutionEnvironment({ address, storageAddress, sender, calldata }), + }); + }; - // Note hash existence check should be in trace - const trace = context.persistableState.flush(); - expect(trace.noteHashChecks).toEqual([expect.objectContaining({ noteHash, leafIndex, exists: true })]); + // Will check existence at leafIndex, but nothing may be found there and/or something may be found at mockAtLeafIndex + describe.each([ + [/*mockAtLeafIndex=*/ undefined], // doesn't exist at all + [/*mockAtLeafIndex=*/ leafIndex], // should be found! + [/*mockAtLeafIndex=*/ leafIndex.add(Fr.ONE)], // won't be found! (checking leafIndex+1, but it exists at leafIndex) + ])('Note hash checks', (mockAtLeafIndex?: Fr) => { + const expectFound = mockAtLeafIndex !== undefined && mockAtLeafIndex.equals(leafIndex); + const existsElsewhere = mockAtLeafIndex !== undefined && !mockAtLeafIndex.equals(leafIndex); + const existsStr = expectFound ? 'DOES exist' : 'does NOT exist'; + const foundAtStr = existsElsewhere + ? `at leafIndex=${mockAtLeafIndex.toNumber()} (exists at leafIndex=${leafIndex.toNumber()})` + : ''; + it(`Should return ${expectFound} (and be traced) when noteHash ${existsStr} ${foundAtStr}`, async () => { + const calldata = [value0, leafIndex]; + const context = createContext(calldata); + const bytecode = getAvmTestContractBytecode('note_hash_exists'); + if (mockAtLeafIndex !== undefined) { + mockNoteHashExists(hostStorage, mockAtLeafIndex, value0); + } + + const results = await new AvmSimulator(context).executeBytecode(bytecode); + expect(results.reverted).toBe(false); + expect(results.output).toEqual([expectFound ? Fr.ONE : Fr.ZERO]); + + expect(trace.traceNoteHashCheck).toHaveBeenCalledTimes(1); + expect(trace.traceNoteHashCheck).toHaveBeenCalledWith( + storageAddress, + /*noteHash=*/ value0, + leafIndex, + /*exists=*/ expectFound, + ); + }); }); - it(`Emit unencrypted logs (should be traced)`, async () => { - const context = initContext(); - const bytecode = getAvmTestContractBytecode('emit_unencrypted_log'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); - - expect(results.reverted).toBe(false); - - const expectedFields = [new Fr(10), new Fr(20), new Fr(30)]; - const expectedString = 'Hello, world!'.split('').map(c => new Fr(c.charCodeAt(0))); - const expectedCompressedString = Buffer.from( - '\0A long time ago, in a galaxy fa' + '\0r far away...\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0', - ); - expect(context.persistableState.flush().newLogs).toEqual([ - new UnencryptedL2Log( - context.environment.address, - new EventSelector(5), - Buffer.concat(expectedFields.map(f => f.toBuffer())), - ), - new UnencryptedL2Log( - context.environment.address, - new EventSelector(5), - Buffer.concat(expectedString.map(f => f.toBuffer())), - ), - new UnencryptedL2Log(context.environment.address, new EventSelector(5), expectedCompressedString), - ]); + describe.each([[/*exists=*/ false], [/*exists=*/ true]])('Nullifier checks', (exists: boolean) => { + const existsStr = exists ? 'DOES exist' : 'does NOT exist'; + it(`Should return ${exists} (and be traced) when noteHash ${existsStr}`, async () => { + const calldata = [value0]; + const context = createContext(calldata); + const bytecode = getAvmTestContractBytecode('nullifier_exists'); + + if (exists) { + mockNullifierExists(hostStorage, leafIndex, value0); + } + + const results = await new AvmSimulator(context).executeBytecode(bytecode); + expect(results.reverted).toBe(false); + expect(results.output).toEqual([exists ? Fr.ONE : Fr.ZERO]); + + expect(trace.traceNullifierCheck).toHaveBeenCalledTimes(1); + const isPending = false; + // leafIndex is returned from DB call for nullifiers, so it is absent on DB miss + const tracedLeafIndex = exists && !isPending ? leafIndex : Fr.ZERO; + expect(trace.traceNullifierCheck).toHaveBeenCalledWith( + storageAddress, + value0, + tracedLeafIndex, + exists, + isPending, + ); + }); }); - it(`Emit note hash (should be traced)`, async () => { - const utxo = new Fr(42); - const calldata = [utxo]; - - const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - const bytecode = getAvmTestContractBytecode('new_note_hash'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); - - expect(results.reverted).toBe(false); - - expect(context.persistableState.flush().newNoteHashes).toEqual([ - expect.objectContaining({ - storageAddress: context.environment.storageAddress, - noteHash: utxo, - }), - ]); + // Will check existence at leafIndex, but nothing may be found there and/or something may be found at mockAtLeafIndex + describe.each([ + [/*mockAtLeafIndex=*/ undefined], // doesn't exist at all + [/*mockAtLeafIndex=*/ leafIndex], // should be found! + [/*mockAtLeafIndex=*/ leafIndex.add(Fr.ONE)], // won't be found! (checking leafIndex+1, but it exists at leafIndex) + ])('L1ToL2 message checks', (mockAtLeafIndex?: Fr) => { + const expectFound = mockAtLeafIndex !== undefined && mockAtLeafIndex.equals(leafIndex); + const existsElsewhere = mockAtLeafIndex !== undefined && !mockAtLeafIndex.equals(leafIndex); + const existsStr = expectFound ? 'DOES exist' : 'does NOT exist'; + const foundAtStr = existsElsewhere + ? `at leafIndex=${mockAtLeafIndex.toNumber()} (exists at leafIndex=${leafIndex.toNumber()})` + : ''; + + it(`Should return ${expectFound} (and be traced) when noteHash ${existsStr} ${foundAtStr}`, async () => { + const calldata = [value0, leafIndex]; + const context = createContext(calldata); + const bytecode = getAvmTestContractBytecode('l1_to_l2_msg_exists'); + if (mockAtLeafIndex !== undefined) { + mockL1ToL2MessageExists(hostStorage, mockAtLeafIndex, value0, /*valueAtOtherIndices=*/ value1); + } + + const results = await new AvmSimulator(context).executeBytecode(bytecode); + expect(results.reverted).toBe(false); + expect(results.output).toEqual([expectFound ? Fr.ONE : Fr.ZERO]); + + expect(trace.traceL1ToL2MessageCheck).toHaveBeenCalledTimes(1); + expect(trace.traceL1ToL2MessageCheck).toHaveBeenCalledWith( + address, + /*noteHash=*/ value0, + leafIndex, + /*exists=*/ expectFound, + ); + }); }); - it(`Emit nullifier (should be traced)`, async () => { - const utxo = new Fr(42); - const calldata = [utxo]; + it('Should append a new note hash correctly', async () => { + const calldata = [value0]; + const context = createContext(calldata); + const bytecode = getAvmTestContractBytecode('new_note_hash'); - const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - const bytecode = getAvmTestContractBytecode('new_nullifier'); const results = await new AvmSimulator(context).executeBytecode(bytecode); - expect(results.reverted).toBe(false); + expect(results.output).toEqual([]); - expect(context.persistableState.flush().newNullifiers).toEqual([ - expect.objectContaining({ - storageAddress: context.environment.storageAddress, - nullifier: utxo, - }), - ]); + expect(trace.traceNewNoteHash).toHaveBeenCalledTimes(1); + expect(trace.traceNewNoteHash).toHaveBeenCalledWith( + expect.objectContaining(storageAddress), + /*nullifier=*/ value0, + ); }); - it(`Nullifier exists (it does not)`, async () => { - const utxo = new Fr(42); - const calldata = [utxo]; + it('Should append a new nullifier correctly', async () => { + const calldata = [value0]; + const context = createContext(calldata); + const bytecode = getAvmTestContractBytecode('new_nullifier'); - const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - const bytecode = getAvmTestContractBytecode('nullifier_exists'); const results = await new AvmSimulator(context).executeBytecode(bytecode); - expect(results.reverted).toBe(false); - expect(results.output).toEqual([/*exists=false*/ new Fr(0)]); - - // Nullifier existence check should be in trace - const trace = context.persistableState.flush(); - expect(trace.nullifierChecks).toEqual([ - expect.objectContaining({ - storageAddress: context.environment.storageAddress, - nullifier: utxo, - exists: false, - counter: expect.any(Fr), - isPending: false, - leafIndex: expect.any(Fr), - }), - ]); - }); + expect(results.output).toEqual([]); - it(`Nullifier exists (it does)`, async () => { - const utxo = new Fr(42); - const calldata = [utxo]; - - const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - // nullifier exists! - jest - .spyOn(context.persistableState.hostStorage.commitmentsDb, 'getNullifierIndex') - .mockReturnValue(Promise.resolve(BigInt(42))); - const bytecode = getAvmTestContractBytecode('nullifier_exists'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); - - expect(results.reverted).toBe(false); - expect(results.output).toEqual([/*exists=true*/ new Fr(1)]); - - // Nullifier existence check should be in trace - const trace = context.persistableState.flush(); - expect(trace.nullifierChecks).toEqual([ - expect.objectContaining({ - storageAddress: context.environment.storageAddress, - nullifier: utxo, - exists: true, - counter: expect.any(Fr), - isPending: false, - leafIndex: expect.any(Fr), - }), - ]); + expect(trace.traceNewNullifier).toHaveBeenCalledTimes(1); + expect(trace.traceNewNullifier).toHaveBeenCalledWith( + expect.objectContaining(storageAddress), + /*nullifier=*/ value0, + ); }); - it(`Emits a nullifier and checks its existence`, async () => { - const utxo = new Fr(42); - const calldata = [utxo]; - - const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - const bytecode = getAvmTestContractBytecode('emit_nullifier_and_check'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); - - expect(results.reverted).toBe(false); - // Nullifier existence check should be in trace - const trace = context.persistableState.flush(); - expect(trace.newNullifiers).toEqual([ - expect.objectContaining({ - storageAddress: context.environment.storageAddress, - nullifier: utxo, - }), - ]); - expect(trace.nullifierChecks).toEqual([ - expect.objectContaining({ - storageAddress: context.environment.storageAddress, - nullifier: utxo, - exists: true, - counter: expect.any(Fr), - isPending: true, - leafIndex: expect.any(Fr), - }), - ]); + describe('Cached nullifiers', () => { + it(`Emits a nullifier and checks its existence`, async () => { + const calldata = [value0]; + + const context = createContext(calldata); + const bytecode = getAvmTestContractBytecode('emit_nullifier_and_check'); + + const results = await new AvmSimulator(context).executeBytecode(bytecode); + expect(results.reverted).toBe(false); + + // New nullifier and nullifier existence check should be traced + expect(trace.traceNewNullifier).toHaveBeenCalledTimes(1); + expect(trace.traceNewNullifier).toHaveBeenCalledWith( + expect.objectContaining(storageAddress), + /*nullifier=*/ value0, + ); + expect(trace.traceNullifierCheck).toHaveBeenCalledTimes(1); + // leafIndex is returned from DB call for nullifiers, so it is absent on DB miss + expect(trace.traceNullifierCheck).toHaveBeenCalledWith( + storageAddress, + value0, + /*leafIndex=*/ Fr.ZERO, + /*exists=*/ true, + /*isPending=*/ true, + ); + }); + it(`Emits same nullifier twice (expect failure)`, async () => { + const calldata = [value0]; + + const context = createContext(calldata); + const bytecode = getAvmTestContractBytecode('nullifier_collision'); + + const results = await new AvmSimulator(context).executeBytecode(bytecode); + expect(results.reverted).toBe(true); + expect(results.revertReason?.message).toMatch(/Attempted to emit duplicate nullifier/); + + // Nullifier should be traced exactly once + expect(trace.traceNewNullifier).toHaveBeenCalledTimes(1); + expect(trace.traceNewNullifier).toHaveBeenCalledWith( + expect.objectContaining(storageAddress), + /*nullifier=*/ value0, + ); + }); }); - it(`Emits same nullifier twice (should fail)`, async () => { - const utxo = new Fr(42); - const calldata = [utxo]; + describe('Unencrypted Logs', () => { + it(`Emit unencrypted logs (should be traced)`, async () => { + const context = createContext(); + const bytecode = getAvmTestContractBytecode('emit_unencrypted_log'); - const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - const bytecode = getAvmTestContractBytecode('nullifier_collision'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); + const results = await new AvmSimulator(context).executeBytecode(bytecode); + expect(results.reverted).toBe(false); - expect(results.reverted).toBe(true); - expect(results.revertReason?.message).toMatch(/Attempted to emit duplicate nullifier/); - // Only the first nullifier should be in the trace, second one failed to add - expect(context.persistableState.flush().newNullifiers).toEqual([ - expect.objectContaining({ - storageAddress: context.environment.storageAddress, - nullifier: utxo, - }), - ]); - }); - }); + const eventSelector = new Fr(5); + const expectedFields = [new Fr(10), new Fr(20), new Fr(30)]; + const expectedString = 'Hello, world!'.split('').map(c => new Fr(c.charCodeAt(0))); + const expectedCompressedString = [ + '\0A long time ago, in a galaxy fa', + '\0r far away...\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0', + ].map(s => new Fr(Buffer.from(s))); - describe('Test tree access (l1ToL2 messages)', () => { - it(`Message exists (it does not)`, async () => { - const msgHash = new Fr(42); - const leafIndex = new Fr(24); - const calldata = [msgHash, leafIndex]; - - const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - const bytecode = getAvmTestContractBytecode('l1_to_l2_msg_exists'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); - - expect(results.reverted).toBe(false); - expect(results.output).toEqual([/*exists=false*/ new Fr(0)]); - // Message existence check should be in trace - const trace = context.persistableState.flush(); - expect(trace.l1ToL2MessageChecks.length).toEqual(1); - expect(trace.l1ToL2MessageChecks[0].exists).toEqual(false); + expect(trace.traceUnencryptedLog).toHaveBeenCalledTimes(3); + expect(trace.traceUnencryptedLog).toHaveBeenCalledWith(address, eventSelector, expectedFields); + expect(trace.traceUnencryptedLog).toHaveBeenCalledWith(address, eventSelector, expectedString); + expect(trace.traceUnencryptedLog).toHaveBeenCalledWith(address, eventSelector, expectedCompressedString); + }); }); - it(`Message exists (it does)`, async () => { - const msgHash = new Fr(42); - const leafIndex = new Fr(24); - const calldata = [msgHash, leafIndex]; + describe('Public storage accesses', () => { + it('Should set value in storage (single)', async () => { + const calldata = [value0]; - const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - jest.spyOn(context.persistableState.hostStorage.commitmentsDb, 'getL1ToL2LeafValue').mockResolvedValue(msgHash); - const bytecode = getAvmTestContractBytecode('l1_to_l2_msg_exists'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); + const context = createContext(calldata); + const bytecode = getAvmTestContractBytecode('set_storage_single'); - expect(results.reverted).toBe(false); - expect(results.output).toEqual([/*exists=false*/ new Fr(1)]); - // Message existence check should be in trace - const trace = context.persistableState.flush(); - expect(trace.l1ToL2MessageChecks.length).toEqual(1); - expect(trace.l1ToL2MessageChecks[0].exists).toEqual(true); - }); - }); + const results = await new AvmSimulator(context).executeBytecode(bytecode); + expect(results.reverted).toBe(false); - describe('Storage accesses', () => { - it('Should set value in storage (single)', async () => { - const slot = 1n; - const address = AztecAddress.fromField(new Fr(420)); - const value = new Fr(88); - const calldata = [value]; + expect(await context.persistableState.peekStorage(storageAddress, slot)).toEqual(value0); - const context = initContext({ - env: initExecutionEnvironment({ calldata, address, storageAddress: address }), + expect(trace.tracePublicStorageWrite).toHaveBeenCalledTimes(1); + expect(trace.tracePublicStorageWrite).toHaveBeenCalledWith(storageAddress, slot, value0); }); - const bytecode = getAvmTestContractBytecode('set_storage_single'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); - expect(results.reverted).toBe(false); + it('Should read value in storage (single)', async () => { + const context = createContext(); + mockStorageRead(hostStorage, value0); - // World state - const worldState = context.persistableState.flush(); - const storageSlot = worldState.currentStorageValue.get(address.toBigInt())!; - const adminSlotValue = storageSlot.get(slot); - expect(adminSlotValue).toEqual(value); - - // Tracing - expect(worldState.storageWrites).toEqual([ - expect.objectContaining({ - storageAddress: address, - slot: new Fr(slot), - value: value, - }), - ]); - }); + const bytecode = getAvmTestContractBytecode('read_storage_single'); - it('Should read value in storage (single)', async () => { - const slot = 1n; - const value = new Fr(12345); - const address = AztecAddress.fromField(new Fr(420)); - const storage = new Map([[slot, value]]); + const results = await new AvmSimulator(context).executeBytecode(bytecode); + expect(results.reverted).toBe(false); + expect(results.output).toEqual([value0]); - const context = initContext({ - env: initExecutionEnvironment({ storageAddress: address }), + expect(trace.tracePublicStorageRead).toHaveBeenCalledTimes(1); + expect(trace.tracePublicStorageRead).toHaveBeenCalledWith( + storageAddress, + slot, + value0, + /*exists=*/ true, + /*cached=*/ false, + ); }); - jest - .spyOn(context.persistableState.hostStorage.publicStateDb, 'storageRead') - .mockImplementation((_address, slot) => Promise.resolve(storage.get(slot.toBigInt())!)); - const bytecode = getAvmTestContractBytecode('read_storage_single'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); - // Get contract function artifact - expect(results.reverted).toBe(false); - expect(results.output).toEqual([value]); - - // Tracing - const worldState = context.persistableState.flush(); - expect(worldState.storageReads).toEqual([ - expect.objectContaining({ - storageAddress: address, - slot: new Fr(slot), - value: value, - exists: true, - }), - ]); - }); + it('Should set and read a value from storage (single)', async () => { + const calldata = [value0]; + + const context = createContext(calldata); + const bytecode = getAvmTestContractBytecode('set_read_storage_single'); + const results = await new AvmSimulator(context).executeBytecode(bytecode); + + expect(results.reverted).toBe(false); + expect(results.output).toEqual([value0]); + + expect(trace.tracePublicStorageWrite).toHaveBeenCalledTimes(1); + expect(trace.tracePublicStorageWrite).toHaveBeenCalledWith(storageAddress, slot, value0); + expect(trace.tracePublicStorageRead).toHaveBeenCalledTimes(1); + expect(trace.tracePublicStorageRead).toHaveBeenCalledWith( + storageAddress, + slot, + value0, + /*exists=*/ true, + /*cached=*/ true, + ); + }); - it('Should set and read a value from storage (single)', async () => { - const slot = 1n; - const value = new Fr(12345); - const address = AztecAddress.fromField(new Fr(420)); - const calldata = [value]; + it('Should set a value in storage (list)', async () => { + const calldata = [value0, value1]; - const context = initContext({ - env: initExecutionEnvironment({ calldata, address, storageAddress: address }), - }); - const bytecode = getAvmTestContractBytecode('set_read_storage_single'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); + const context = createContext(calldata); + const bytecode = getAvmTestContractBytecode('set_storage_list'); - expect(results.reverted).toBe(false); - expect(results.output).toEqual([value]); - - // Test read trace - const worldState = context.persistableState.flush(); - expect(worldState.storageReads).toEqual([ - expect.objectContaining({ - storageAddress: address, - slot: new Fr(slot), - value: value, - exists: true, - }), - ]); - expect(worldState.storageWrites).toEqual([ - expect.objectContaining({ - storageAddress: address, - slot: new Fr(slot), - value: value, - }), - ]); - }); + const results = await new AvmSimulator(context).executeBytecode(bytecode); + expect(results.reverted).toBe(false); - it('Should set a value in storage (list)', async () => { - const slot = 2n; - const sender = AztecAddress.fromField(new Fr(1)); - const address = AztecAddress.fromField(new Fr(420)); - const calldata = [new Fr(1), new Fr(2)]; + expect(await context.persistableState.peekStorage(address, listSlot0)).toEqual(calldata[0]); + expect(await context.persistableState.peekStorage(address, listSlot1)).toEqual(calldata[1]); - const context = initContext({ - env: initExecutionEnvironment({ sender, address, calldata, storageAddress: address }), + expect(trace.tracePublicStorageWrite).toHaveBeenCalledTimes(2); + expect(trace.tracePublicStorageWrite).toHaveBeenCalledWith(storageAddress, listSlot0, value0); + expect(trace.tracePublicStorageWrite).toHaveBeenCalledWith(storageAddress, listSlot1, value1); }); - const bytecode = getAvmTestContractBytecode('set_storage_list'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); - expect(results.reverted).toBe(false); + it('Should read a value in storage (list)', async () => { + const context = createContext(); + const mockedStorage = new Map([ + [listSlot0.toBigInt(), value0], + [listSlot1.toBigInt(), value1], + ]); + mockStorageReadWithMap(hostStorage, mockedStorage); + + const bytecode = getAvmTestContractBytecode('read_storage_list'); + + const results = await new AvmSimulator(context).executeBytecode(bytecode); + expect(results.reverted).toBe(false); + expect(results.output).toEqual([value0, value1]); + + expect(trace.tracePublicStorageRead).toHaveBeenCalledWith( + storageAddress, + listSlot0, + value0, + /*exists=*/ true, + /*cached=*/ false, + ); + expect(trace.tracePublicStorageRead).toHaveBeenCalledWith( + storageAddress, + listSlot1, + value1, + /*exists=*/ true, + /*cached=*/ false, + ); + }); - const worldState = context.persistableState.flush(); - const storageSlot = worldState.currentStorageValue.get(address.toBigInt())!; - expect(storageSlot.get(slot)).toEqual(calldata[0]); - expect(storageSlot.get(slot + 1n)).toEqual(calldata[1]); - - // Tracing - expect(worldState.storageWrites).toEqual([ - expect.objectContaining({ - storageAddress: address, - slot: new Fr(slot), - value: calldata[0], - }), - expect.objectContaining({ - storageAddress: address, - slot: new Fr(slot + 1n), - value: calldata[1], - }), - ]); - }); + it('Should set a value in storage (map)', async () => { + const calldata = [storageAddress, value0]; - it('Should read a value in storage (list)', async () => { - const slot = 2n; - const address = AztecAddress.fromField(new Fr(420)); - const values = [new Fr(1), new Fr(2)]; - const storage = new Map([ - [slot, values[0]], - [slot + 1n, values[1]], - ]); + const context = createContext(calldata); + const bytecode = getAvmTestContractBytecode('set_storage_map'); - const context = initContext({ - env: initExecutionEnvironment({ address, storageAddress: address }), - }); - jest - .spyOn(context.persistableState.hostStorage.publicStateDb, 'storageRead') - .mockImplementation((_address, slot) => Promise.resolve(storage.get(slot.toBigInt())!)); - const bytecode = getAvmTestContractBytecode('read_storage_list'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); + const results = await new AvmSimulator(context).executeBytecode(bytecode); + expect(results.reverted).toBe(false); - expect(results.reverted).toBe(false); - expect(results.output).toEqual(values); - - // Tracing - const worldState = context.persistableState.flush(); - expect(worldState.storageReads).toEqual([ - expect.objectContaining({ - storageAddress: address, - slot: new Fr(slot), - value: values[0], - exists: true, - }), - expect.objectContaining({ - storageAddress: address, - slot: new Fr(slot + 1n), - value: values[1], - exists: true, - }), - ]); - }); + // returns the storage slot for modified key + const mapSlotNumber = results.output[0].toBigInt(); + const mapSlot = new Fr(mapSlotNumber); - it('Should set a value in storage (map)', async () => { - const address = AztecAddress.fromField(new Fr(420)); - const value = new Fr(12345); - const calldata = [address.toField(), value]; + expect(await context.persistableState.peekStorage(storageAddress, mapSlot)).toEqual(value0); - const context = initContext({ - env: initExecutionEnvironment({ address, calldata, storageAddress: address }), + expect(trace.tracePublicStorageWrite).toHaveBeenCalledTimes(1); + expect(trace.tracePublicStorageWrite).toHaveBeenCalledWith(storageAddress, mapSlot, value0); }); - const bytecode = getAvmTestContractBytecode('set_storage_map'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); - expect(results.reverted).toBe(false); - // returns the storage slot for modified key - const slotNumber = results.output[0].toBigInt(); - - const worldState = context.persistableState.flush(); - const storageSlot = worldState.currentStorageValue.get(address.toBigInt())!; - expect(storageSlot.get(slotNumber)).toEqual(value); - - // Tracing - expect(worldState.storageWrites).toEqual([ - expect.objectContaining({ - storageAddress: address, - slot: new Fr(slotNumber), - value: value, - }), - ]); - }); + it('Should read-add-set a value in storage (map)', async () => { + const calldata = [storageAddress, value0]; - it('Should read-add-set a value in storage (map)', async () => { - const address = AztecAddress.fromField(new Fr(420)); - const value = new Fr(12345); - const calldata = [address.toField(), value]; + const context = createContext(calldata); + const bytecode = getAvmTestContractBytecode('add_storage_map'); - const context = initContext({ - env: initExecutionEnvironment({ address, calldata, storageAddress: address }), - }); - const bytecode = getAvmTestContractBytecode('add_storage_map'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); + const results = await new AvmSimulator(context).executeBytecode(bytecode); + expect(results.reverted).toBe(false); - expect(results.reverted).toBe(false); - // returns the storage slot for modified key - const slotNumber = results.output[0].toBigInt(); - - const worldState = context.persistableState.flush(); - const storageSlot = worldState.currentStorageValue.get(address.toBigInt())!; - expect(storageSlot.get(slotNumber)).toEqual(value); - - // Tracing - expect(worldState.storageReads).toEqual([ - expect.objectContaining({ - storageAddress: address, - slot: new Fr(slotNumber), - value: Fr.ZERO, - exists: false, - }), - ]); - expect(worldState.storageWrites).toEqual([ - expect.objectContaining({ - storageAddress: address, - slot: new Fr(slotNumber), - value: value, - }), - ]); - }); + // returns the storage slot for modified key + const mapSlotNumber = results.output[0].toBigInt(); + const mapSlot = new Fr(mapSlotNumber); - it('Should read value in storage (map)', async () => { - const value = new Fr(12345); - const address = AztecAddress.fromField(new Fr(420)); - const calldata = [address.toField()]; + expect(await context.persistableState.peekStorage(storageAddress, mapSlot)).toEqual(value0); - const context = initContext({ - env: initExecutionEnvironment({ calldata, address, storageAddress: address }), + expect(trace.tracePublicStorageRead).toHaveBeenCalledTimes(1); + expect(trace.tracePublicStorageRead).toHaveBeenCalledWith( + storageAddress, + mapSlot, + Fr.ZERO, + /*exists=*/ false, + /*cached=*/ false, + ); + expect(trace.tracePublicStorageWrite).toHaveBeenCalledTimes(1); + expect(trace.tracePublicStorageWrite).toHaveBeenCalledWith(storageAddress, mapSlot, value0); }); - jest - .spyOn(context.persistableState.hostStorage.publicStateDb, 'storageRead') - .mockReturnValue(Promise.resolve(value)); - const bytecode = getAvmTestContractBytecode('read_storage_map'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); - // Get contract function artifact - expect(results.reverted).toBe(false); - expect(results.output).toEqual([value]); - - // Tracing - const worldState = context.persistableState.flush(); - expect(worldState.storageReads).toEqual([ - expect.objectContaining({ - storageAddress: address, - // slot depends on pedersen hash of key, etc. - value: value, - exists: true, - }), - ]); + it('Should read value in storage (map)', async () => { + const calldata = [storageAddress]; + + const context = createContext(calldata); + mockStorageRead(hostStorage, value0); + const bytecode = getAvmTestContractBytecode('read_storage_map'); + + const results = await new AvmSimulator(context).executeBytecode(bytecode); + expect(results.reverted).toBe(false); + expect(results.output).toEqual([value0]); + + expect(trace.tracePublicStorageRead).toHaveBeenCalledTimes(1); + // slot is the result of a pedersen hash and is therefore not known in the test + expect(trace.tracePublicStorageRead).toHaveBeenCalledWith( + storageAddress, + expect.anything(), + value0, + /*exists=*/ true, + /*cached=*/ false, + ); + }); }); - }); - - describe('Contract', () => { - it(`GETCONTRACTINSTANCE deserializes correctly`, async () => { - const context = initContext(); - const contractInstance = { - address: AztecAddress.random(), - version: 1 as const, - salt: new Fr(0x123), - deployer: AztecAddress.fromBigInt(0x456n), - contractClassId: new Fr(0x789), - initializationHash: new Fr(0x101112), - publicKeysHash: new Fr(0x161718), - }; - jest - .spyOn(context.persistableState.hostStorage.contractsDb, 'getContractInstance') - .mockReturnValue(Promise.resolve(contractInstance)); - const bytecode = getAvmTestContractBytecode('test_get_contract_instance_raw'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); - - expect(results.reverted).toBe(false); + describe('Contract Instance Retrieval', () => { + it(`Can getContractInstance`, async () => { + const context = createContext(); + // Contract instance must match noir + const contractInstance = { + address: AztecAddress.random(), + version: 1 as const, + salt: new Fr(0x123), + deployer: AztecAddress.fromBigInt(0x456n), + contractClassId: new Fr(0x789), + initializationHash: new Fr(0x101112), + publicKeysHash: new Fr(0x161718), + }; + mockGetContractInstance(hostStorage, contractInstance); + + const bytecode = getAvmTestContractBytecode('test_get_contract_instance_raw'); + + const results = await new AvmSimulator(context).executeBytecode(bytecode); + expect(results.reverted).toBe(false); + + expect(trace.traceGetContractInstance).toHaveBeenCalledTimes(1); + expect(trace.traceGetContractInstance).toHaveBeenCalledWith({ exists: true, ...contractInstance }); + }); }); - }); - - describe('Nested external calls', () => { - it(`Nested call with not enough gas`, async () => { - const gas = [/*l2=*/ 5, /*da=*/ 10000].map(g => new Fr(g)); - const calldata: Fr[] = [new Fr(1), new Fr(2), ...gas]; - const callBytecode = getAvmTestContractBytecode('nested_call_to_add_with_gas'); - const addBytecode = getAvmTestContractBytecode('add_args_return'); - const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - jest - .spyOn(context.persistableState.hostStorage.contractsDb, 'getBytecode') - .mockReturnValue(Promise.resolve(addBytecode)); - const results = await new AvmSimulator(context).executeBytecode(callBytecode); + describe('Nested external calls', () => { + const expectTracedNestedCall = ( + environment: AvmExecutionEnvironment, + nestedTrace: PublicSideEffectTraceInterface, + isStaticCall: boolean = false, + ) => { + expect(trace.traceNestedCall).toHaveBeenCalledTimes(1); + expect(trace.traceNestedCall).toHaveBeenCalledWith( + /*nestedCallTrace=*/ nestedTrace, + /*nestedEnvironment=*/ expect.objectContaining({ + sender: environment.address, // sender is top-level call + contractCallDepth: new Fr(1), // top call is depth 0, nested is depth 1 + header: environment.header, // just confirming that nested env looks roughly right + globals: environment.globals, // just confirming that nested env looks roughly right + isStaticCall: isStaticCall, + // TODO(7121): can't check calldata like this since it is modified on environment construction + // with AvmContextInputs. These should eventually go away. + //calldata: expect.arrayContaining(environment.calldata), // top-level call forwards args + }), + /*startGasLeft=*/ expect.anything(), + /*endGasLeft=*/ expect.anything(), + /*bytecode=*/ expect.anything(), //decompressBytecodeIfCompressed(addBytecode), + /*avmCallResults=*/ expect.anything(), // we don't have the NESTED call's results to check + /*functionName=*/ expect.anything(), + ); + }; - // TODO: change this once we don't force rethrowing of exceptions. - // Outer frame should not revert, but inner should, so the forwarded return value is 0 - // expect(results.revertReason).toBeUndefined(); - // expect(results.reverted).toBe(false); - expect(results.reverted).toBe(true); - expect(results.revertReason?.message).toEqual('Not enough L2GAS gas left'); - }); + it(`Nested call`, async () => { + const calldata = [value0, value1]; + const context = createContext(calldata); + const callBytecode = getAvmTestContractBytecode('nested_call_to_add'); + const addBytecode = getAvmTestContractBytecode('add_args_return'); + mockGetBytecode(hostStorage, addBytecode); + const nestedTrace = mock(); + mockTraceFork(trace, nestedTrace); - it(`Nested call`, async () => { - const calldata: Fr[] = [new Fr(1), new Fr(2)]; - const callBytecode = getAvmTestContractBytecode('nested_call_to_add'); - const addBytecode = getAvmTestContractBytecode('add_args_return'); - const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - jest - .spyOn(context.persistableState.hostStorage.contractsDb, 'getBytecode') - .mockReturnValue(Promise.resolve(addBytecode)); + const results = await new AvmSimulator(context).executeBytecode(callBytecode); + expect(results.reverted).toBe(false); + expect(results.output).toEqual([value0.add(value1)]); - const results = await new AvmSimulator(context).executeBytecode(callBytecode); + expectTracedNestedCall(context.environment, nestedTrace); + }); - expect(results.reverted).toBe(false); - expect(results.output).toEqual([new Fr(3)]); - }); + it(`Nested static call`, async () => { + const calldata = [value0, value1]; + const context = createContext(calldata); + const callBytecode = getAvmTestContractBytecode('nested_static_call_to_add'); + const addBytecode = getAvmTestContractBytecode('add_args_return'); + mockGetBytecode(hostStorage, addBytecode); + const nestedTrace = mock(); + mockTraceFork(trace, nestedTrace); - it(`Nested static call`, async () => { - const calldata: Fr[] = [new Fr(1), new Fr(2)]; - const callBytecode = getAvmTestContractBytecode('nested_static_call_to_add'); - const addBytecode = getAvmTestContractBytecode('add_args_return'); - const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - jest - .spyOn(context.persistableState.hostStorage.contractsDb, 'getBytecode') - .mockReturnValue(Promise.resolve(addBytecode)); + const results = await new AvmSimulator(context).executeBytecode(callBytecode); + expect(results.reverted).toBe(false); + expect(results.output).toEqual([value0.add(value1)]); - const results = await new AvmSimulator(context).executeBytecode(callBytecode); + expectTracedNestedCall(context.environment, nestedTrace, /*isStaticCall=*/ true); + }); - expect(results.reverted).toBe(false); - expect(results.output).toEqual([/*result=*/ new Fr(3)]); - }); + it(`Nested call with not enough gas (expect failure)`, async () => { + const gas = [/*l2=*/ 5, /*da=*/ 10000].map(g => new Fr(g)); + const calldata: Fr[] = [value0, value1, ...gas]; + const context = createContext(calldata); + const callBytecode = getAvmTestContractBytecode('nested_call_to_add_with_gas'); + const addBytecode = getAvmTestContractBytecode('add_args_return'); + mockGetBytecode(hostStorage, addBytecode); + mockTraceFork(trace); + + const results = await new AvmSimulator(context).executeBytecode(callBytecode); + // TODO(7141): change this once we don't force rethrowing of exceptions. + // Outer frame should not revert, but inner should, so the forwarded return value is 0 + // expect(results.revertReason).toBeUndefined(); + // expect(results.reverted).toBe(false); + expect(results.reverted).toBe(true); + expect(results.revertReason?.message).toEqual('Not enough L2GAS gas left'); + + // Nested call should NOT have been made and therefore should not be traced + expect(trace.traceNestedCall).toHaveBeenCalledTimes(0); + }); - it(`Nested static call which modifies storage`, async () => { - const callBytecode = getAvmTestContractBytecode('nested_static_call_to_set_storage'); - const nestedBytecode = getAvmTestContractBytecode('set_storage_single'); - const context = initContext(); - jest - .spyOn(context.persistableState.hostStorage.contractsDb, 'getBytecode') - .mockReturnValue(Promise.resolve(nestedBytecode)); + it(`Nested static call which modifies storage (expect failure)`, async () => { + const context = createContext(); + const callBytecode = getAvmTestContractBytecode('nested_static_call_to_set_storage'); + const nestedBytecode = getAvmTestContractBytecode('set_storage_single'); + mockGetBytecode(hostStorage, nestedBytecode); + mockTraceFork(trace); - const results = await new AvmSimulator(context).executeBytecode(callBytecode); + const results = await new AvmSimulator(context).executeBytecode(callBytecode); - expect(results.reverted).toBe(true); // The outer call should revert. - expect(results.revertReason?.message).toEqual( - 'Static call cannot update the state, emit L2->L1 messages or generate logs', - ); - }); + expect(results.reverted).toBe(true); // The outer call should revert. + expect(results.revertReason?.message).toEqual( + 'Static call cannot update the state, emit L2->L1 messages or generate logs', + ); - it(`Nested calls rethrow exceptions`, async () => { - const calldata: Fr[] = [new Fr(1), new Fr(2)]; - const callBytecode = getAvmTestContractBytecode('nested_call_to_add'); - // We actually don't pass the function ADD, but it's ok because the signature is the same. - const nestedBytecode = getAvmTestContractBytecode('assert_same'); - const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - jest - .spyOn(context.persistableState.hostStorage.contractsDb, 'getBytecode') - .mockReturnValue(Promise.resolve(nestedBytecode)); + // TODO(7141): external call doesn't recover from nested exception until + // we support recoverability of reverts (here and in kernel) + //expectTracedNestedCall(context.environment, results, nestedTrace, /*isStaticCall=*/true); - const results = await new AvmSimulator(context).executeBytecode(callBytecode); + // Nested call should NOT have been able to write storage + expect(trace.tracePublicStorageWrite).toHaveBeenCalledTimes(0); + }); - expect(results.reverted).toBe(true); // The outer call should revert. - expect(results.revertReason?.message).toEqual('Assertion failed: Values are not equal'); + it(`Nested calls rethrow exceptions`, async () => { + const calldata = [value0, value1]; + const context = createContext(calldata); + const callBytecode = getAvmTestContractBytecode('nested_call_to_add'); + // We actually don't pass the function ADD, but it's ok because the signature is the same. + const nestedBytecode = getAvmTestContractBytecode('assert_same'); + mockGetBytecode(hostStorage, nestedBytecode); + + const results = await new AvmSimulator(context).executeBytecode(callBytecode); + expect(results.reverted).toBe(true); // The outer call should revert. + expect(results.revertReason?.message).toEqual('Assertion failed: Values are not equal'); + }); }); }); - - it('conversions', async () => { - const calldata: Fr[] = [new Fr(0b1011101010100)]; - const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - - const bytecode = getAvmTestContractBytecode('to_radix_le'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); - - expect(results.reverted).toBe(false); - const expectedResults = Buffer.concat('0010101011'.split('').map(c => new Fr(Number(c)).toBuffer())); - const resultBuffer = Buffer.concat(results.output.map(f => f.toBuffer())); - - expect(resultBuffer.equals(expectedResults)).toBe(true); - }); }); function sha256FromMemoryBytes(bytes: Uint8[]): Fr[] { diff --git a/yarn-project/simulator/src/avm/avm_simulator.ts b/yarn-project/simulator/src/avm/avm_simulator.ts index 6d0eb154332..64d13a2ffbe 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.ts @@ -29,10 +29,9 @@ export class AvmSimulator { * Fetch the bytecode and execute it in the current context. */ public async execute(): Promise { - const selector = this.context.environment.temporaryFunctionSelector; - const bytecode = await this.context.persistableState.hostStorage.contractsDb.getBytecode( + const bytecode = await this.context.persistableState.getBytecode( this.context.environment.address, - selector, + this.context.environment.temporaryFunctionSelector, ); // This assumes that we will not be able to send messages to accounts without code diff --git a/yarn-project/simulator/src/avm/fixtures/index.ts b/yarn-project/simulator/src/avm/fixtures/index.ts index b96be7f003c..d7926c28dfe 100644 --- a/yarn-project/simulator/src/avm/fixtures/index.ts +++ b/yarn-project/simulator/src/avm/fixtures/index.ts @@ -4,20 +4,21 @@ import { AztecAddress } from '@aztec/foundation/aztec-address'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { AvmTestContractArtifact } from '@aztec/noir-contracts.js'; -import { SerializableContractInstance } from '@aztec/types/contracts'; import { strict as assert } from 'assert'; import { mock } from 'jest-mock-extended'; import merge from 'lodash.merge'; import { type CommitmentsDB, type PublicContractsDB, type PublicStateDB } from '../../index.js'; +import { type PublicSideEffectTraceInterface } from '../../public/side_effect_trace_interface.js'; import { AvmContext } from '../avm_context.js'; import { AvmContextInputs, AvmExecutionEnvironment } from '../avm_execution_environment.js'; import { AvmMachineState } from '../avm_machine_state.js'; import { Field, Uint8 } from '../avm_memory_types.js'; import { HostStorage } from '../journal/host_storage.js'; import { AvmPersistableStateManager } from '../journal/journal.js'; -import { type TracedContractInstance } from '../journal/trace_types.js'; +import { NullifierManager } from '../journal/nullifiers.js'; +import { PublicStorage } from '../journal/public_storage.js'; /** * Create a new AVM context with default values. @@ -28,7 +29,7 @@ export function initContext(overrides?: { machineState?: AvmMachineState; }): AvmContext { return new AvmContext( - overrides?.persistableState || initMockPersistableStateManager(), + overrides?.persistableState || initPersistableStateManager(), overrides?.env || initExecutionEnvironment(), overrides?.machineState || initMachineState(), ); @@ -47,9 +48,20 @@ export function initHostStorage(overrides?: { ); } -/** Creates an empty state manager with mocked storage. */ -export function initMockPersistableStateManager(): AvmPersistableStateManager { - return new AvmPersistableStateManager(initHostStorage()); +/** Creates an empty state manager with mocked host storage. */ +export function initPersistableStateManager(overrides?: { + hostStorage?: HostStorage; + trace?: PublicSideEffectTraceInterface; + publicStorage?: PublicStorage; + nullifiers?: NullifierManager; +}): AvmPersistableStateManager { + const hostStorage = overrides?.hostStorage || initHostStorage(); + return new AvmPersistableStateManager( + hostStorage, + overrides?.trace || mock(), + overrides?.publicStorage || new PublicStorage(hostStorage.publicStateDb), + overrides?.nullifiers || new NullifierManager(hostStorage.commitmentsDb), + ); } /** @@ -138,14 +150,3 @@ export function getAvmTestContractBytecode(functionName: string): Buffer { ); return artifact.bytecode; } - -export function randomTracedContractInstance(): TracedContractInstance { - const instance = SerializableContractInstance.random(); - const address = AztecAddress.random(); - return { exists: true, ...instance, address }; -} - -export function emptyTracedContractInstance(withAddress?: AztecAddress): TracedContractInstance { - const instance = SerializableContractInstance.empty().withAddress(withAddress ?? AztecAddress.zero()); - return { exists: false, ...instance }; -} diff --git a/yarn-project/simulator/src/avm/journal/journal.test.ts b/yarn-project/simulator/src/avm/journal/journal.test.ts index 77b7b3732b6..7d001d3ee6a 100644 --- a/yarn-project/simulator/src/avm/journal/journal.test.ts +++ b/yarn-project/simulator/src/avm/journal/journal.test.ts @@ -1,445 +1,431 @@ -import { UnencryptedL2Log } from '@aztec/circuit-types'; -import { AztecAddress, EthAddress } from '@aztec/circuits.js'; -import { EventSelector } from '@aztec/foundation/abi'; +import { randomContractInstanceWithAddress } from '@aztec/circuit-types'; import { Fr } from '@aztec/foundation/fields'; - -import { type MockProxy, mock } from 'jest-mock-extended'; - -import { type CommitmentsDB, type PublicContractsDB, type PublicStateDB } from '../../index.js'; -import { emptyTracedContractInstance, randomTracedContractInstance } from '../fixtures/index.js'; -import { HostStorage } from './host_storage.js'; -import { AvmPersistableStateManager, type JournalData } from './journal.js'; +import { SerializableContractInstance } from '@aztec/types/contracts'; + +import { mock } from 'jest-mock-extended'; + +import { type PublicSideEffectTraceInterface } from '../../public/side_effect_trace_interface.js'; +import { initHostStorage, initPersistableStateManager } from '../fixtures/index.js'; +import { + mockGetContractInstance, + mockL1ToL2MessageExists, + mockNoteHashExists, + mockNullifierExists, + mockStorageRead, +} from '../test_utils.js'; +import { type HostStorage } from './host_storage.js'; +import { type AvmPersistableStateManager } from './journal.js'; describe('journal', () => { - let publicDb: MockProxy; - let contractsDb: MockProxy; - let commitmentsDb: MockProxy; - let journal: AvmPersistableStateManager; + const address = Fr.random(); + const utxo = Fr.random(); + const leafIndex = Fr.random(); - beforeEach(() => { - publicDb = mock(); - commitmentsDb = mock(); - contractsDb = mock(); + let hostStorage: HostStorage; + let trace: PublicSideEffectTraceInterface; + let persistableState: AvmPersistableStateManager; - const hostStorage = new HostStorage(publicDb, contractsDb, commitmentsDb); - journal = new AvmPersistableStateManager(hostStorage); + beforeEach(() => { + hostStorage = initHostStorage(); + trace = mock(); + persistableState = initPersistableStateManager({ hostStorage, trace }); }); describe('Public Storage', () => { it('When reading from storage, should check the cache first, and be appended to read/write journal', async () => { // Store a different value in storage vs the cache, and make sure the cache is returned - const contractAddress = new Fr(1); - const key = new Fr(2); + const slot = new Fr(2); const storedValue = new Fr(420); const cachedValue = new Fr(69); - publicDb.storageRead.mockResolvedValue(Promise.resolve(storedValue)); + mockStorageRead(hostStorage, storedValue); // Get the cache first - const cacheMissResult = await journal.readStorage(contractAddress, key); + const cacheMissResult = await persistableState.readStorage(address, slot); expect(cacheMissResult).toEqual(storedValue); // Write to storage - journal.writeStorage(contractAddress, key, cachedValue); + persistableState.writeStorage(address, slot, cachedValue); // Get the storage value - const cachedResult = await journal.readStorage(contractAddress, key); + const cachedResult = await persistableState.readStorage(address, slot); expect(cachedResult).toEqual(cachedValue); + // confirm that peek works + expect(await persistableState.peekStorage(address, slot)).toEqual(cachedResult); // We expect the journal to store the access in [storedVal, cachedVal] - [time0, time1] - const { storageReads, storageWrites }: JournalData = journal.flush(); - expect(storageReads).toEqual([ - expect.objectContaining({ - storageAddress: contractAddress, - exists: true, - slot: key, - value: storedValue, - }), - expect.objectContaining({ - storageAddress: contractAddress, - exists: true, - slot: key, - value: cachedValue, - }), - ]); - expect(storageWrites).toEqual([ - expect.objectContaining({ - storageAddress: contractAddress, - slot: key, - value: cachedValue, - }), - ]); + expect(trace.tracePublicStorageRead).toHaveBeenCalledTimes(2); + expect(trace.tracePublicStorageRead).toHaveBeenNthCalledWith( + /*nthCall=*/ 1, + address, + slot, + storedValue, + /*exists=*/ true, + /*cached=*/ false, + ); + expect(trace.tracePublicStorageRead).toHaveBeenNthCalledWith( + /*nthCall=*/ 2, + address, + slot, + cachedValue, + /*exists=*/ true, + /*cached=*/ true, + ); }); }); describe('UTXOs & messages', () => { - it('Should maintain commitments', () => { - const utxo = new Fr(1); - const address = new Fr(1234); - journal.writeNoteHash(address, utxo); - - const journalUpdates = journal.flush(); - expect(journalUpdates.newNoteHashes).toEqual([ - expect.objectContaining({ noteHash: utxo, storageAddress: address }), - ]); - }); - it('checkNullifierExists works for missing nullifiers', async () => { - const contractAddress = new Fr(1); - const utxo = new Fr(2); - const exists = await journal.checkNullifierExists(contractAddress, utxo); + it('checkNoteHashExists works for missing note hashes', async () => { + const exists = await persistableState.checkNoteHashExists(address, utxo, leafIndex); expect(exists).toEqual(false); - - const journalUpdates = journal.flush(); - expect(journalUpdates.nullifierChecks).toEqual([expect.objectContaining({ nullifier: utxo, exists: false })]); + expect(trace.traceNoteHashCheck).toHaveBeenCalledTimes(1); + expect(trace.traceNoteHashCheck).toHaveBeenCalledWith(address, utxo, leafIndex, exists); }); - it('checkNullifierExists works for existing nullifiers', async () => { - const contractAddress = new Fr(1); - const utxo = new Fr(2); - const storedLeafIndex = BigInt(42); - commitmentsDb.getNullifierIndex.mockResolvedValue(Promise.resolve(storedLeafIndex)); - const exists = await journal.checkNullifierExists(contractAddress, utxo); + it('checkNoteHashExists works for existing note hashes', async () => { + mockNoteHashExists(hostStorage, leafIndex, utxo); + const exists = await persistableState.checkNoteHashExists(address, utxo, leafIndex); expect(exists).toEqual(true); - - const journalUpdates = journal.flush(); - expect(journalUpdates.nullifierChecks).toEqual([expect.objectContaining({ nullifier: utxo, exists: true })]); + expect(trace.traceNoteHashCheck).toHaveBeenCalledTimes(1); + expect(trace.traceNoteHashCheck).toHaveBeenCalledWith(address, utxo, leafIndex, exists); }); - it('Should maintain nullifiers', async () => { - const contractAddress = new Fr(1); - const utxo = new Fr(2); - await journal.writeNullifier(contractAddress, utxo); - - const journalUpdates = journal.flush(); - expect(journalUpdates.newNullifiers).toEqual([ - expect.objectContaining({ storageAddress: contractAddress, nullifier: utxo }), - ]); + + it('writeNoteHash works', () => { + persistableState.writeNoteHash(address, utxo); + expect(trace.traceNewNoteHash).toHaveBeenCalledTimes(1); + expect(trace.traceNewNoteHash).toHaveBeenCalledWith(expect.objectContaining(address), /*noteHash=*/ utxo); }); - it('checkL1ToL2MessageExists works for missing message', async () => { - const msgHash = new Fr(2); - const leafIndex = new Fr(42); - const exists = await journal.checkL1ToL2MessageExists(msgHash, leafIndex); + it('checkNullifierExists works for missing nullifiers', async () => { + const exists = await persistableState.checkNullifierExists(address, utxo); expect(exists).toEqual(false); - - const journalUpdates = journal.flush(); - expect(journalUpdates.l1ToL2MessageChecks).toEqual([ - expect.objectContaining({ leafIndex: leafIndex, msgHash, exists: false }), - ]); + expect(trace.traceNullifierCheck).toHaveBeenCalledTimes(1); + expect(trace.traceNullifierCheck).toHaveBeenCalledWith( + address, + utxo, + /*leafIndex=*/ Fr.ZERO, + exists, + /*isPending=*/ false, + ); }); - it('checkL1ToL2MessageExists works for existing msgHash', async () => { - const msgHash = new Fr(2); - const leafIndex = new Fr(42); - commitmentsDb.getL1ToL2LeafValue.mockResolvedValue(msgHash); - const exists = await journal.checkL1ToL2MessageExists(msgHash, leafIndex); + it('checkNullifierExists works for existing nullifiers', async () => { + mockNullifierExists(hostStorage, leafIndex, utxo); + const exists = await persistableState.checkNullifierExists(address, utxo); expect(exists).toEqual(true); + expect(trace.traceNullifierCheck).toHaveBeenCalledTimes(1); + expect(trace.traceNullifierCheck).toHaveBeenCalledWith(address, utxo, leafIndex, exists, /*isPending=*/ false); + }); - const journalUpdates = journal.flush(); - expect(journalUpdates.l1ToL2MessageChecks).toEqual([ - expect.objectContaining({ leafIndex: leafIndex, msgHash, exists: true }), - ]); + it('writeNullifier works', async () => { + await persistableState.writeNullifier(address, utxo); + expect(trace.traceNewNullifier).toHaveBeenCalledWith(expect.objectContaining(address), /*nullifier=*/ utxo); }); - it('Should maintain nullifiers', async () => { - const contractAddress = new Fr(1); - const utxo = new Fr(2); - await journal.writeNullifier(contractAddress, utxo); - - const journalUpdates = journal.flush(); - expect(journalUpdates.newNullifiers).toEqual([ - expect.objectContaining({ storageAddress: contractAddress, nullifier: utxo }), - ]); + + it('checkL1ToL2MessageExists works for missing message', async () => { + const exists = await persistableState.checkL1ToL2MessageExists(address, utxo, leafIndex); + expect(exists).toEqual(false); + expect(trace.traceL1ToL2MessageCheck).toHaveBeenCalledTimes(1); + expect(trace.traceL1ToL2MessageCheck).toHaveBeenCalledWith(address, utxo, leafIndex, exists); }); - it('Should maintain l1 messages', () => { - const recipient = EthAddress.fromField(new Fr(1)); - const msgHash = new Fr(2); - journal.writeL1Message(recipient, msgHash); - const journalUpdates = journal.flush(); - expect(journalUpdates.newL1Messages).toEqual([expect.objectContaining({ recipient, content: msgHash })]); + it('checkL1ToL2MessageExists works for existing message', async () => { + mockL1ToL2MessageExists(hostStorage, leafIndex, utxo); + const exists = await persistableState.checkL1ToL2MessageExists(address, utxo, leafIndex); + expect(exists).toEqual(true); + expect(trace.traceL1ToL2MessageCheck).toHaveBeenCalledTimes(1); + expect(trace.traceL1ToL2MessageCheck).toHaveBeenCalledWith(address, utxo, leafIndex, exists); }); - describe('Getting contract instances', () => { - it('Should get contract instance', async () => { - const contractAddress = AztecAddress.fromField(new Fr(2)); - const instance = randomTracedContractInstance(); - instance.exists = true; - contractsDb.getContractInstance.mockResolvedValue(Promise.resolve(instance)); - await journal.getContractInstance(contractAddress); - expect(journal.trace.gotContractInstances).toEqual([instance]); - }); - it('Can get undefined contract instance', async () => { - const contractAddress = AztecAddress.fromField(new Fr(2)); - await journal.getContractInstance(contractAddress); - const emptyInstance = emptyTracedContractInstance(AztecAddress.fromField(contractAddress)); - expect(journal.trace.gotContractInstances).toEqual([emptyInstance]); - }); + it('Should maintain l1 messages', () => { + const recipient = new Fr(1); + persistableState.writeL2ToL1Message(recipient, utxo); + expect(trace.traceNewL2ToL1Message).toHaveBeenCalledTimes(1); + expect(trace.traceNewL2ToL1Message).toHaveBeenCalledWith(recipient, utxo); }); }); - it('Should merge two successful journals together', async () => { - // Fundamentally checking that insert ordering of public storage is preserved upon journal merge - // time | journal | op | value - // t0 -> journal0 -> write | 1 - // t1 -> journal1 -> write | 2 - // merge journals - // t2 -> journal0 -> read | 2 - - const contractAddress = new Fr(1); - const aztecContractAddress = AztecAddress.fromField(contractAddress); - const key = new Fr(2); - const value = new Fr(1); - const valueT1 = new Fr(2); - const recipient = EthAddress.fromField(new Fr(42)); - const commitment = new Fr(10); - const commitmentT1 = new Fr(20); - const log = { address: 10n, selector: 5, data: [new Fr(5), new Fr(6)] }; - const logT1 = { address: 20n, selector: 8, data: [new Fr(7), new Fr(8)] }; - const index = new Fr(42); - const indexT1 = new Fr(24); - const instance = emptyTracedContractInstance(aztecContractAddress); - - journal.writeStorage(contractAddress, key, value); - await journal.readStorage(contractAddress, key); - journal.writeNoteHash(contractAddress, commitment); - journal.writeLog(new Fr(log.address), new Fr(log.selector), log.data); - journal.writeL1Message(recipient, commitment); - await journal.writeNullifier(contractAddress, commitment); - await journal.checkNullifierExists(contractAddress, commitment); - await journal.checkL1ToL2MessageExists(commitment, index); - await journal.getContractInstance(aztecContractAddress); - - const childJournal = new AvmPersistableStateManager(journal.hostStorage, journal); - childJournal.writeStorage(contractAddress, key, valueT1); - await childJournal.readStorage(contractAddress, key); - childJournal.writeNoteHash(contractAddress, commitmentT1); - childJournal.writeLog(new Fr(logT1.address), new Fr(logT1.selector), logT1.data); - childJournal.writeL1Message(recipient, commitmentT1); - await childJournal.writeNullifier(contractAddress, commitmentT1); - await childJournal.checkNullifierExists(contractAddress, commitmentT1); - await childJournal.checkL1ToL2MessageExists(commitmentT1, indexT1); - await childJournal.getContractInstance(aztecContractAddress); - - journal.acceptNestedCallState(childJournal); - - const result = await journal.readStorage(contractAddress, key); - expect(result).toEqual(valueT1); - - // Check that the storage is merged by reading from the journal - // Check that the UTXOs are merged - const journalUpdates: JournalData = journal.flush(); - - // Check storage reads order is preserved upon merge - // We first read value from t0, then value from t1 - expect(journalUpdates.storageReads).toEqual([ - expect.objectContaining({ - storageAddress: contractAddress, - exists: true, - slot: key, - value: value, - }), - expect.objectContaining({ - storageAddress: contractAddress, - exists: true, - slot: key, - value: valueT1, - }), - // Read a third time to check storage - expect.objectContaining({ - storageAddress: contractAddress, - exists: true, - slot: key, - value: valueT1, - }), - ]); - - // We first write value from t0, then value from t1 - expect(journalUpdates.storageWrites).toEqual([ - expect.objectContaining({ - storageAddress: contractAddress, - slot: key, - value: value, - }), - expect.objectContaining({ - storageAddress: contractAddress, - slot: key, - value: valueT1, - }), - ]); - - expect(journalUpdates.newNoteHashes).toEqual([ - expect.objectContaining({ noteHash: commitment, storageAddress: contractAddress }), - expect.objectContaining({ noteHash: commitmentT1, storageAddress: contractAddress }), - ]); - expect(journalUpdates.newLogs).toEqual([ - new UnencryptedL2Log( - AztecAddress.fromBigInt(log.address), - new EventSelector(log.selector), - Buffer.concat(log.data.map(f => f.toBuffer())), - ), - new UnencryptedL2Log( - AztecAddress.fromBigInt(logT1.address), - new EventSelector(logT1.selector), - Buffer.concat(logT1.data.map(f => f.toBuffer())), - ), - ]); - expect(journalUpdates.newL1Messages).toEqual([ - expect.objectContaining({ recipient, content: commitment }), - expect.objectContaining({ recipient, content: commitmentT1 }), - ]); - expect(journalUpdates.nullifierChecks).toEqual([ - expect.objectContaining({ nullifier: commitment, exists: true }), - expect.objectContaining({ nullifier: commitmentT1, exists: true }), - ]); - expect(journalUpdates.newNullifiers).toEqual([ - expect.objectContaining({ - storageAddress: contractAddress, - nullifier: commitment, - }), - expect.objectContaining({ - storageAddress: contractAddress, - nullifier: commitmentT1, - }), - ]); - expect(journalUpdates.l1ToL2MessageChecks).toEqual([ - expect.objectContaining({ leafIndex: index, msgHash: commitment, exists: false }), - expect.objectContaining({ leafIndex: indexT1, msgHash: commitmentT1, exists: false }), - ]); - expect(journal.trace.gotContractInstances).toEqual([instance, instance]); - }); + describe('Getting contract instances', () => { + it('Should get contract instance', async () => { + const contractInstance = randomContractInstanceWithAddress(/*(base instance) opts=*/ {}, /*address=*/ address); + mockGetContractInstance(hostStorage, contractInstance); + await persistableState.getContractInstance(address); + expect(trace.traceGetContractInstance).toHaveBeenCalledTimes(1); + expect(trace.traceGetContractInstance).toHaveBeenCalledWith({ exists: true, ...contractInstance }); + }); + it('Can get undefined contract instance', async () => { + const emptyContractInstance = SerializableContractInstance.empty().withAddress(address); + await persistableState.getContractInstance(address); - it('Should merge failed journals together', async () => { - // Checking public storage update journals are preserved upon journal merge, - // But the latest state is not - - // time | journal | op | value - // t0 -> journal0 -> write | 1 - // t1 -> journal1 -> write | 2 - // merge journals - // t2 -> journal0 -> read | 1 - - const contractAddress = new Fr(1); - const aztecContractAddress = AztecAddress.fromField(contractAddress); - const key = new Fr(2); - const value = new Fr(1); - const valueT1 = new Fr(2); - const recipient = EthAddress.fromField(new Fr(42)); - const commitment = new Fr(10); - const commitmentT1 = new Fr(20); - const log = { address: 10n, selector: 5, data: [new Fr(5), new Fr(6)] }; - const logT1 = { address: 20n, selector: 8, data: [new Fr(7), new Fr(8)] }; - const index = new Fr(42); - const indexT1 = new Fr(24); - const instance = emptyTracedContractInstance(aztecContractAddress); - - journal.writeStorage(contractAddress, key, value); - await journal.readStorage(contractAddress, key); - journal.writeNoteHash(contractAddress, commitment); - await journal.writeNullifier(contractAddress, commitment); - await journal.checkNullifierExists(contractAddress, commitment); - await journal.checkL1ToL2MessageExists(commitment, index); - journal.writeLog(new Fr(log.address), new Fr(log.selector), log.data); - journal.writeL1Message(recipient, commitment); - await journal.getContractInstance(aztecContractAddress); - - const childJournal = new AvmPersistableStateManager(journal.hostStorage, journal); - childJournal.writeStorage(contractAddress, key, valueT1); - await childJournal.readStorage(contractAddress, key); - childJournal.writeNoteHash(contractAddress, commitmentT1); - await childJournal.writeNullifier(contractAddress, commitmentT1); - await childJournal.checkNullifierExists(contractAddress, commitmentT1); - await journal.checkL1ToL2MessageExists(commitmentT1, indexT1); - childJournal.writeLog(new Fr(logT1.address), new Fr(logT1.selector), logT1.data); - childJournal.writeL1Message(recipient, commitmentT1); - await childJournal.getContractInstance(aztecContractAddress); - - journal.rejectNestedCallState(childJournal); - - // Check that the storage is reverted by reading from the journal - const result = await journal.readStorage(contractAddress, key); - expect(result).toEqual(value); // rather than valueT1 - - const journalUpdates: JournalData = journal.flush(); - - // Reads and writes should be preserved - // Check storage reads order is preserved upon merge - // We first read value from t0, then value from t1 - expect(journalUpdates.storageReads).toEqual([ - expect.objectContaining({ - storageAddress: contractAddress, - exists: true, - slot: key, - value: value, - }), - expect.objectContaining({ - storageAddress: contractAddress, - exists: true, - slot: key, - value: valueT1, - }), - // Read a third time to check storage - expect.objectContaining({ - storageAddress: contractAddress, - exists: true, - slot: key, - value: value, - }), - ]); - - // We first write value from t0, then value from t1 - expect(journalUpdates.storageWrites).toEqual([ - expect.objectContaining({ - storageAddress: contractAddress, - slot: key, - value: value, - }), - expect.objectContaining({ - storageAddress: contractAddress, - slot: key, - value: valueT1, - }), - ]); - - // Check that the world state _traces_ are merged even on rejection - expect(journalUpdates.newNoteHashes).toEqual([ - expect.objectContaining({ noteHash: commitment, storageAddress: contractAddress }), - expect.objectContaining({ noteHash: commitmentT1, storageAddress: contractAddress }), - ]); - expect(journalUpdates.nullifierChecks).toEqual([ - expect.objectContaining({ nullifier: commitment, exists: true }), - expect.objectContaining({ nullifier: commitmentT1, exists: true }), - ]); - expect(journalUpdates.newNullifiers).toEqual([ - expect.objectContaining({ - storageAddress: contractAddress, - nullifier: commitment, - }), - expect.objectContaining({ - storageAddress: contractAddress, - nullifier: commitmentT1, - }), - ]); - expect(journalUpdates.l1ToL2MessageChecks).toEqual([ - expect.objectContaining({ leafIndex: index, msgHash: commitment, exists: false }), - expect.objectContaining({ leafIndex: indexT1, msgHash: commitmentT1, exists: false }), - ]); - - // Check that rejected Accrued Substate is absent - expect(journalUpdates.newLogs).toEqual([ - new UnencryptedL2Log( - AztecAddress.fromBigInt(log.address), - new EventSelector(log.selector), - Buffer.concat(log.data.map(f => f.toBuffer())), - ), - ]); - expect(journalUpdates.newL1Messages).toEqual([expect.objectContaining({ recipient, content: commitment })]); - expect(journal.trace.gotContractInstances).toEqual([instance, instance]); + expect(trace.traceGetContractInstance).toHaveBeenCalledTimes(1); + expect(trace.traceGetContractInstance).toHaveBeenCalledWith({ exists: false, ...emptyContractInstance }); + }); }); - it('Can fork and merge journals', () => { - const rootJournal = new AvmPersistableStateManager(journal.hostStorage); - const childJournal = rootJournal.fork(); - - expect(() => rootJournal.acceptNestedCallState(childJournal)); - expect(() => rootJournal.rejectNestedCallState(childJournal)); - }); + //it('Should merge two successful journals together', async () => { + // // Fundamentally checking that insert ordering of public storage is preserved upon journal merge + // // time | journal | op | value + // // t0 -> journal0 -> write | 1 + // // t1 -> journal1 -> write | 2 + // // merge journals + // // t2 -> journal0 -> read | 2 + + // const contractAddress = new Fr(1); + // const aztecContractAddress = AztecAddress.fromField(contractAddress); + // const key = new Fr(2); + // const value = new Fr(1); + // const valueT1 = new Fr(2); + // const recipient = EthAddress.fromField(new Fr(42)); + // const commitment = new Fr(10); + // const commitmentT1 = new Fr(20); + // const log = { address: 10n, selector: 5, data: [new Fr(5), new Fr(6)] }; + // const logT1 = { address: 20n, selector: 8, data: [new Fr(7), new Fr(8)] }; + // const index = new Fr(42); + // const indexT1 = new Fr(24); + // const instance = emptyTracedContractInstance(aztecContractAddress); + + // persistableState.writeStorage(contractAddress, key, value); + // await persistableState.readStorage(contractAddress, key); + // persistableState.writeNoteHash(contractAddress, commitment); + // persistableState.writeUnencryptedLog(new Fr(log.address), new Fr(log.selector), log.data); + // persistableState.writeL2ToL1Message(recipient, commitment); + // await persistableState.writeNullifier(contractAddress, commitment); + // await persistableState.checkNullifierExists(contractAddress, commitment); + // await persistableState.checkL1ToL2MessageExists(commitment, index); + // await persistableState.getContractInstance(aztecContractAddress); + + // const childJournal = new AvmPersistableStateManager(persistableState.hostStorage, persistableState); + // childJournal.writeStorage(contractAddress, key, valueT1); + // await childJournal.readStorage(contractAddress, key); + // childJournal.writeNoteHash(contractAddress, commitmentT1); + // childJournal.writeUnencryptedLog(new Fr(logT1.address), new Fr(logT1.selector), logT1.data); + // childJournal.writeL2ToL1Message(recipient, commitmentT1); + // await childJournal.writeNullifier(contractAddress, commitmentT1); + // await childJournal.checkNullifierExists(contractAddress, commitmentT1); + // await childJournal.checkL1ToL2MessageExists(commitmentT1, indexT1); + // await childJournal.getContractInstance(aztecContractAddress); + + // persistableState.acceptNestedCallState(childJournal); + + // const result = await persistableState.readStorage(contractAddress, key); + // expect(result).toEqual(valueT1); + + // // Check that the storage is merged by reading from the journal + // // Check that the UTXOs are merged + // const journalUpdates: JournalData = persistableState.getTrace()(); + + // // Check storage reads order is preserved upon merge + // // We first read value from t0, then value from t1 + // expect(journalUpdates.storageReads).toEqual([ + // expect.objectContaining({ + // storageAddress: contractAddress, + // exists: true, + // slot: key, + // value: value, + // }), + // expect.objectContaining({ + // storageAddress: contractAddress, + // exists: true, + // slot: key, + // value: valueT1, + // }), + // // Read a third time to check storage + // expect.objectContaining({ + // storageAddress: contractAddress, + // exists: true, + // slot: key, + // value: valueT1, + // }), + // ]); + + // // We first write value from t0, then value from t1 + // expect(journalUpdates.storageWrites).toEqual([ + // expect.objectContaining({ + // storageAddress: contractAddress, + // slot: key, + // value: value, + // }), + // expect.objectContaining({ + // storageAddress: contractAddress, + // slot: key, + // value: valueT1, + // }), + // ]); + + // expect(journalUpdates.newNoteHashes).toEqual([ + // expect.objectContaining({ noteHash: commitment, storageAddress: contractAddress }), + // expect.objectContaining({ noteHash: commitmentT1, storageAddress: contractAddress }), + // ]); + // expect(journalUpdates.newLogs).toEqual([ + // new UnencryptedL2Log( + // AztecAddress.fromBigInt(log.address), + // new EventSelector(log.selector), + // Buffer.concat(log.data.map(f => f.toBuffer())), + // ), + // new UnencryptedL2Log( + // AztecAddress.fromBigInt(logT1.address), + // new EventSelector(logT1.selector), + // Buffer.concat(logT1.data.map(f => f.toBuffer())), + // ), + // ]); + // expect(journalUpdates.newL1Messages).toEqual([ + // expect.objectContaining({ recipient, content: commitment }), + // expect.objectContaining({ recipient, content: commitmentT1 }), + // ]); + // expect(journalUpdates.nullifierChecks).toEqual([ + // expect.objectContaining({ nullifier: commitment, exists: true }), + // expect.objectContaining({ nullifier: commitmentT1, exists: true }), + // ]); + // expect(journalUpdates.newNullifiers).toEqual([ + // expect.objectContaining({ + // storageAddress: contractAddress, + // nullifier: commitment, + // }), + // expect.objectContaining({ + // storageAddress: contractAddress, + // nullifier: commitmentT1, + // }), + // ]); + // expect(journalUpdates.l1ToL2MessageChecks).toEqual([ + // expect.objectContaining({ leafIndex: index, msgHash: commitment, exists: false }), + // expect.objectContaining({ leafIndex: indexT1, msgHash: commitmentT1, exists: false }), + // ]); + // expect(persistableState.trace.gotContractInstances).toEqual([instance, instance]); + //}); + + //it('Should merge failed journals together', async () => { + // // Checking public storage update journals are preserved upon journal merge, + // // But the latest state is not + + // // time | journal | op | value + // // t0 -> journal0 -> write | 1 + // // t1 -> journal1 -> write | 2 + // // merge journals + // // t2 -> journal0 -> read | 1 + + // const contractAddress = new Fr(1); + // const aztecContractAddress = AztecAddress.fromField(contractAddress); + // const key = new Fr(2); + // const value = new Fr(1); + // const valueT1 = new Fr(2); + // const recipient = EthAddress.fromField(new Fr(42)); + // const commitment = new Fr(10); + // const commitmentT1 = new Fr(20); + // const log = { address: 10n, selector: 5, data: [new Fr(5), new Fr(6)] }; + // const logT1 = { address: 20n, selector: 8, data: [new Fr(7), new Fr(8)] }; + // const index = new Fr(42); + // const indexT1 = new Fr(24); + // const instance = emptyTracedContractInstance(aztecContractAddress); + + // persistableState.writeStorage(contractAddress, key, value); + // await persistableState.readStorage(contractAddress, key); + // persistableState.writeNoteHash(contractAddress, commitment); + // await persistableState.writeNullifier(contractAddress, commitment); + // await persistableState.checkNullifierExists(contractAddress, commitment); + // await persistableState.checkL1ToL2MessageExists(commitment, index); + // persistableState.writeUnencryptedLog(new Fr(log.address), new Fr(log.selector), log.data); + // persistableState.writeL2ToL1Message(recipient, commitment); + // await persistableState.getContractInstance(aztecContractAddress); + + // const childJournal = new AvmPersistableStateManager(persistableState.hostStorage, persistableState); + // childJournal.writeStorage(contractAddress, key, valueT1); + // await childJournal.readStorage(contractAddress, key); + // childJournal.writeNoteHash(contractAddress, commitmentT1); + // await childJournal.writeNullifier(contractAddress, commitmentT1); + // await childJournal.checkNullifierExists(contractAddress, commitmentT1); + // await persistableState.checkL1ToL2MessageExists(commitmentT1, indexT1); + // childJournal.writeUnencryptedLog(new Fr(logT1.address), new Fr(logT1.selector), logT1.data); + // childJournal.writeL2ToL1Message(recipient, commitmentT1); + // await childJournal.getContractInstance(aztecContractAddress); + + // persistableState.rejectNestedCallState(childJournal); + + // // Check that the storage is reverted by reading from the journal + // const result = await persistableState.readStorage(contractAddress, key); + // expect(result).toEqual(value); // rather than valueT1 + + // const journalUpdates: JournalData = persistableState.getTrace()(); + + // // Reads and writes should be preserved + // // Check storage reads order is preserved upon merge + // // We first read value from t0, then value from t1 + // expect(journalUpdates.storageReads).toEqual([ + // expect.objectContaining({ + // storageAddress: contractAddress, + // exists: true, + // slot: key, + // value: value, + // }), + // expect.objectContaining({ + // storageAddress: contractAddress, + // exists: true, + // slot: key, + // value: valueT1, + // }), + // // Read a third time to check storage + // expect.objectContaining({ + // storageAddress: contractAddress, + // exists: true, + // slot: key, + // value: value, + // }), + // ]); + + // // We first write value from t0, then value from t1 + // expect(journalUpdates.storageWrites).toEqual([ + // expect.objectContaining({ + // storageAddress: contractAddress, + // slot: key, + // value: value, + // }), + // expect.objectContaining({ + // storageAddress: contractAddress, + // slot: key, + // value: valueT1, + // }), + // ]); + + // // Check that the world state _traces_ are merged even on rejection + // expect(journalUpdates.newNoteHashes).toEqual([ + // expect.objectContaining({ noteHash: commitment, storageAddress: contractAddress }), + // expect.objectContaining({ noteHash: commitmentT1, storageAddress: contractAddress }), + // ]); + // expect(journalUpdates.nullifierChecks).toEqual([ + // expect.objectContaining({ nullifier: commitment, exists: true }), + // expect.objectContaining({ nullifier: commitmentT1, exists: true }), + // ]); + // expect(journalUpdates.newNullifiers).toEqual([ + // expect.objectContaining({ + // storageAddress: contractAddress, + // nullifier: commitment, + // }), + // expect.objectContaining({ + // storageAddress: contractAddress, + // nullifier: commitmentT1, + // }), + // ]); + // expect(journalUpdates.l1ToL2MessageChecks).toEqual([ + // expect.objectContaining({ leafIndex: index, msgHash: commitment, exists: false }), + // expect.objectContaining({ leafIndex: indexT1, msgHash: commitmentT1, exists: false }), + // ]); + + // // Check that rejected Accrued Substate is absent + // expect(journalUpdates.newLogs).toEqual([ + // new UnencryptedL2Log( + // AztecAddress.fromBigInt(log.address), + // new EventSelector(log.selector), + // Buffer.concat(log.data.map(f => f.toBuffer())), + // ), + // ]); + // expect(journalUpdates.newL1Messages).toEqual([expect.objectContaining({ recipient, content: commitment })]); + // expect(persistableState.trace.gotContractInstances).toEqual([instance, instance]); + //}); + + //it('Can fork and merge journals', () => { + // const rootJournal = new AvmPersistableStateManager(persistableState.hostStorage); + // const childJournal = rootJournal.fork(); + + // expect(() => rootJournal.acceptNestedCallState(childJournal)); + // expect(() => rootJournal.rejectNestedCallState(childJournal)); + //}); }); diff --git a/yarn-project/simulator/src/avm/journal/journal.ts b/yarn-project/simulator/src/avm/journal/journal.ts index dd028a63db9..06e6465385f 100644 --- a/yarn-project/simulator/src/avm/journal/journal.ts +++ b/yarn-project/simulator/src/avm/journal/journal.ts @@ -1,139 +1,69 @@ -// TODO(5818): Rename file and all uses of "journal" -import { UnencryptedL2Log } from '@aztec/circuit-types'; -import { - AztecAddress, - ContractStorageRead, - ContractStorageUpdateRequest, - EthAddress, - L2ToL1Message, - LogHash, - NoteHash, - Nullifier, - ReadRequest, -} from '@aztec/circuits.js'; -import { EventSelector } from '@aztec/foundation/abi'; -import { Fr } from '@aztec/foundation/fields'; +import { AztecAddress, type FunctionSelector, type Gas } from '@aztec/circuits.js'; +import { type Fr } from '@aztec/foundation/fields'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { SerializableContractInstance } from '@aztec/types/contracts'; -import { type PublicExecutionResult } from '../../index.js'; +import { type TracedContractInstance } from '../../public/side_effect_trace.js'; +import { type PublicSideEffectTraceInterface } from '../../public/side_effect_trace_interface.js'; +import { type AvmExecutionEnvironment } from '../avm_execution_environment.js'; +import { type AvmContractCallResults } from '../avm_message_call_result.js'; import { type HostStorage } from './host_storage.js'; -import { Nullifiers } from './nullifiers.js'; +import { NullifierManager } from './nullifiers.js'; import { PublicStorage } from './public_storage.js'; -import { WorldStateAccessTrace } from './trace.js'; -import { - type TracedContractInstance, - type TracedL1toL2MessageCheck, - type TracedNoteHash, - type TracedNoteHashCheck, - type TracedNullifier, - type TracedNullifierCheck, - type TracedPublicStorageRead, - type TracedPublicStorageWrite, - type TracedUnencryptedL2Log, -} from './trace_types.js'; - -// TODO:(5818): do we need this type anymore? -/** - * Data held within the journal - */ -export type JournalData = { - storageWrites: TracedPublicStorageWrite[]; - storageReads: TracedPublicStorageRead[]; - - noteHashChecks: TracedNoteHashCheck[]; - newNoteHashes: TracedNoteHash[]; - nullifierChecks: TracedNullifierCheck[]; - newNullifiers: TracedNullifier[]; - l1ToL2MessageChecks: TracedL1toL2MessageCheck[]; - - newL1Messages: L2ToL1Message[]; - newLogs: UnencryptedL2Log[]; - newLogsHashes: TracedUnencryptedL2Log[]; - /** contract address -\> key -\> value */ - currentStorageValue: Map>; - - sideEffectCounter: number; -}; - -// TRANSITIONAL: This should be removed once the kernel handles and entire enqueued call per circuit -export type PartialPublicExecutionResult = { - noteHashReadRequests: ReadRequest[]; - nullifierReadRequests: ReadRequest[]; - nullifierNonExistentReadRequests: ReadRequest[]; - l1ToL2MsgReadRequests: ReadRequest[]; - newNoteHashes: NoteHash[]; - newL2ToL1Messages: L2ToL1Message[]; - startSideEffectCounter: number; - newNullifiers: Nullifier[]; - contractStorageReads: ContractStorageRead[]; - contractStorageUpdateRequests: ContractStorageUpdateRequest[]; - unencryptedLogsHashes: LogHash[]; - unencryptedLogs: UnencryptedL2Log[]; - allUnencryptedLogs: UnencryptedL2Log[]; - nestedExecutions: PublicExecutionResult[]; -}; /** * A class to manage persistable AVM state for contract calls. * Maintains a cache of the current world state, - * a trace of all world state accesses, and a list of accrued substate items. + * a trace of all side effects. * - * The simulator should make any world state and accrued substate queries through this object. + * The simulator should make any world state / tree queries through this object. * * Manages merging of successful/reverted child state into current state. */ export class AvmPersistableStateManager { private readonly log: DebugLogger = createDebugLogger('aztec:avm_simulator:state_manager'); - /** Reference to node storage */ - public readonly hostStorage: HostStorage; - - // TODO(5818): make members private once this is not used in transitional_adaptors.ts. - /** World State */ - /** Public storage, including cached writes */ - public publicStorage: PublicStorage; - /** Nullifier set, including cached/recently-emitted nullifiers */ - public nullifiers: Nullifiers; - /** World State Access Trace */ - public trace: WorldStateAccessTrace; + constructor( + /** Reference to node storage */ + private hostStorage: HostStorage, + /** Side effect trace */ + private trace: PublicSideEffectTraceInterface, + /** Public storage, including cached writes */ + public readonly publicStorage: PublicStorage, + /** Nullifier set, including cached/recently-emitted nullifiers */ + private readonly nullifiers: NullifierManager, + ) {} - /** Accrued Substate **/ - public newL1Messages: L2ToL1Message[] = []; - public newLogs: UnencryptedL2Log[] = []; - - // TRANSITIONAL: This should be removed once the kernel handles and entire enqueued call per circuit - public transitionalExecutionResult: PartialPublicExecutionResult; - - constructor(hostStorage: HostStorage, parent?: AvmPersistableStateManager) { - this.hostStorage = hostStorage; - this.publicStorage = new PublicStorage(hostStorage.publicStateDb, parent?.publicStorage); - this.nullifiers = new Nullifiers(hostStorage.commitmentsDb, parent?.nullifiers); - this.trace = new WorldStateAccessTrace(parent?.trace); - - this.transitionalExecutionResult = { - noteHashReadRequests: [], - nullifierReadRequests: [], - nullifierNonExistentReadRequests: [], - l1ToL2MsgReadRequests: [], - newNoteHashes: [], - newL2ToL1Messages: [], - startSideEffectCounter: this.trace.accessCounter, - newNullifiers: [], - contractStorageReads: [], - contractStorageUpdateRequests: [], - unencryptedLogsHashes: [], - unencryptedLogs: [], - allUnencryptedLogs: [], - nestedExecutions: [], - }; + /** + * Create a new state manager with some preloaded pending siloed nullifiers + */ + public static newWithPendingSiloedNullifiers( + hostStorage: HostStorage, + trace: PublicSideEffectTraceInterface, + pendingSiloedNullifiers: Fr[], + ) { + const parentNullifiers = NullifierManager.newWithPendingSiloedNullifiers( + hostStorage.commitmentsDb, + pendingSiloedNullifiers, + ); + return new AvmPersistableStateManager( + hostStorage, + trace, + /*publicStorage=*/ new PublicStorage(hostStorage.publicStateDb), + /*nullifiers=*/ parentNullifiers.fork(), + ); } /** * Create a new state manager forked from this one */ public fork() { - return new AvmPersistableStateManager(this.hostStorage, this); + return new AvmPersistableStateManager( + this.hostStorage, + this.trace.fork(), + this.publicStorage.fork(), + this.nullifiers.fork(), + ); } /** @@ -147,13 +77,6 @@ export class AvmPersistableStateManager { this.log.debug(`Storage write (address=${storageAddress}, slot=${slot}): value=${value}`); // Cache storage writes for later reference/reads this.publicStorage.write(storageAddress, slot, value); - - // TRANSITIONAL: This should be removed once the kernel handles and entire enqueued call per circuit - this.transitionalExecutionResult.contractStorageUpdateRequests.push( - new ContractStorageUpdateRequest(slot, value, this.trace.accessCounter, storageAddress), - ); - - // Trace all storage writes (even reverted ones) this.trace.tracePublicStorageWrite(storageAddress, slot, value); } @@ -169,14 +92,22 @@ export class AvmPersistableStateManager { this.log.debug( `Storage read (address=${storageAddress}, slot=${slot}): value=${value}, exists=${exists}, cached=${cached}`, ); + this.trace.tracePublicStorageRead(storageAddress, slot, value, exists, cached); + return Promise.resolve(value); + } - // TRANSITIONAL: This should be removed once the kernel handles and entire enqueued call per circuit - this.transitionalExecutionResult.contractStorageReads.push( - new ContractStorageRead(slot, value, this.trace.accessCounter, storageAddress), + /** + * Read from public storage, don't trace the read. + * + * @param storageAddress - the address of the contract whose storage is being read from + * @param slot - the slot in the contract's storage being read from + * @returns the latest value written to slot, or 0 if never written to before + */ + public async peekStorage(storageAddress: Fr, slot: Fr): Promise { + const { value, exists, cached } = await this.publicStorage.read(storageAddress, slot); + this.log.debug( + `Storage peek (address=${storageAddress}, slot=${slot}): value=${value}, exists=${exists}, cached=${cached}`, ); - - // We want to keep track of all performed reads (even reverted ones) - this.trace.tracePublicStorageRead(storageAddress, slot, value, exists, cached); return Promise.resolve(value); } @@ -193,11 +124,7 @@ export class AvmPersistableStateManager { const gotLeafIndex = await this.hostStorage.commitmentsDb.getCommitmentIndex(noteHash); const exists = gotLeafIndex === leafIndex.toBigInt(); this.log.debug(`noteHashes(${storageAddress})@${noteHash} ?? leafIndex: ${leafIndex}, exists: ${exists}.`); - - // TODO: include exists here also - This can for sure come from the trace??? - this.transitionalExecutionResult.noteHashReadRequests.push(new ReadRequest(noteHash, this.trace.accessCounter)); - - this.trace.traceNoteHashCheck(storageAddress, noteHash, exists, leafIndex); + this.trace.traceNoteHashCheck(storageAddress, noteHash, leafIndex, exists); return Promise.resolve(exists); } @@ -206,9 +133,6 @@ export class AvmPersistableStateManager { * @param noteHash - the unsiloed note hash to write */ public writeNoteHash(storageAddress: Fr, noteHash: Fr) { - // TRANSITIONAL: This should be removed once the kernel handles and entire enqueued call per circuit - this.transitionalExecutionResult.newNoteHashes.push(new NoteHash(noteHash, this.trace.accessCounter)); - this.log.debug(`noteHashes(${storageAddress}) += @${noteHash}.`); this.trace.traceNewNoteHash(storageAddress, noteHash); } @@ -222,19 +146,9 @@ export class AvmPersistableStateManager { public async checkNullifierExists(storageAddress: Fr, nullifier: Fr): Promise { const [exists, isPending, leafIndex] = await this.nullifiers.checkExists(storageAddress, nullifier); this.log.debug( - `nullifiers(${storageAddress})@${nullifier} ?? leafIndex: ${leafIndex}, pending: ${isPending}, exists: ${exists}.`, + `nullifiers(${storageAddress})@${nullifier} ?? leafIndex: ${leafIndex}, exists: ${exists}, pending: ${isPending}.`, ); - - // TRANSITIONAL: This should be removed once the kernel handles and entire enqueued call per circuit - if (exists) { - this.transitionalExecutionResult.nullifierReadRequests.push(new ReadRequest(nullifier, this.trace.accessCounter)); - } else { - this.transitionalExecutionResult.nullifierNonExistentReadRequests.push( - new ReadRequest(nullifier, this.trace.accessCounter), - ); - } - - this.trace.traceNullifierCheck(storageAddress, nullifier, exists, isPending, leafIndex); + this.trace.traceNullifierCheck(storageAddress, nullifier, leafIndex, exists, isPending); return Promise.resolve(exists); } @@ -244,11 +158,6 @@ export class AvmPersistableStateManager { * @param nullifier - the unsiloed nullifier to write */ public async writeNullifier(storageAddress: Fr, nullifier: Fr) { - // TRANSITIONAL: This should be removed once the kernel handles and entire enqueued call per circuit - this.transitionalExecutionResult.newNullifiers.push( - new Nullifier(nullifier, this.trace.accessCounter, /*noteHash=*/ Fr.ZERO), - ); - this.log.debug(`nullifiers(${storageAddress}) += ${nullifier}.`); // Cache pending nullifiers for later access await this.nullifiers.append(storageAddress, nullifier); @@ -262,16 +171,13 @@ export class AvmPersistableStateManager { * @param msgLeafIndex - the message leaf index to use in the check * @returns exists - whether the message exists in the L1 to L2 Messages tree */ - public async checkL1ToL2MessageExists(msgHash: Fr, msgLeafIndex: Fr): Promise { + public async checkL1ToL2MessageExists(contractAddress: Fr, msgHash: Fr, msgLeafIndex: Fr): Promise { const valueAtIndex = await this.hostStorage.commitmentsDb.getL1ToL2LeafValue(msgLeafIndex.toBigInt()); const exists = valueAtIndex?.equals(msgHash) ?? false; this.log.debug( `l1ToL2Messages(@${msgLeafIndex}) ?? exists: ${exists}, expected: ${msgHash}, found: ${valueAtIndex}.`, ); - - this.transitionalExecutionResult.l1ToL2MsgReadRequests.push(new ReadRequest(msgHash, this.trace.accessCounter)); - - this.trace.traceL1ToL2MessageCheck(msgHash, msgLeafIndex, exists); + this.trace.traceL1ToL2MessageCheck(contractAddress, msgHash, msgLeafIndex, exists); return Promise.resolve(exists); } @@ -280,40 +186,27 @@ export class AvmPersistableStateManager { * @param recipient - L1 contract address to send the message to. * @param content - Message content. */ - public writeL1Message(recipient: EthAddress | Fr, content: Fr) { + public writeL2ToL1Message(recipient: Fr, content: Fr) { this.log.debug(`L1Messages(${recipient}) += ${content}.`); - const recipientAddress = recipient instanceof EthAddress ? recipient : EthAddress.fromField(recipient); - const message = new L2ToL1Message(recipientAddress, content, 0); - this.newL1Messages.push(message); - - // TRANSITIONAL: This should be removed once the kernel handles and entire enqueued call per circuit - this.transitionalExecutionResult.newL2ToL1Messages.push(message); + this.trace.traceNewL2ToL1Message(recipient, content); } - public writeLog(contractAddress: Fr, event: Fr, log: Fr[]) { + /** + * Write an unencrypted log + * @param contractAddress - address of the contract that emitted the log + * @param event - log event selector + * @param log - log contents + */ + public writeUnencryptedLog(contractAddress: Fr, event: Fr, log: Fr[]) { this.log.debug(`UnencryptedL2Log(${contractAddress}) += event ${event} with ${log.length} fields.`); - const ulog = new UnencryptedL2Log( - AztecAddress.fromField(contractAddress), - EventSelector.fromField(event), - Buffer.concat(log.map(f => f.toBuffer())), - ); - const logHash = Fr.fromBuffer(ulog.hash()); - - // TRANSITIONAL: This should be removed once the kernel handles and entire enqueued call per circuit - this.transitionalExecutionResult.unencryptedLogs.push(ulog); - this.transitionalExecutionResult.allUnencryptedLogs.push(ulog); - // this duplicates exactly what happens in the trace just for the purpose of transitional integration with the kernel - this.transitionalExecutionResult.unencryptedLogsHashes.push( - // TODO(6578): explain magic number 4 here - new LogHash(logHash, this.trace.accessCounter, new Fr(ulog.length + 4)), - ); - // TODO(6206): likely need to track this here and not just in the transitional logic. - - // TODO(6205): why are logs pushed here but logs hashes are traced? - this.newLogs.push(ulog); - this.trace.traceNewLog(logHash); + this.trace.traceUnencryptedLog(contractAddress, event, log); } + /** + * Get a contract instance. + * @param contractAddress - address of the contract instance to retrieve. + * @returns the contract instance with an "exists" flag + */ public async getContractInstance(contractAddress: Fr): Promise { let exists = true; const aztecAddress = AztecAddress.fromField(contractAddress); @@ -322,59 +215,57 @@ export class AvmPersistableStateManager { instance = SerializableContractInstance.empty().withAddress(aztecAddress); exists = false; } + this.log.debug( + `Get Contract instance (address=${contractAddress}): exists=${exists}, instance=${JSON.stringify(instance)}`, + ); const tracedInstance = { ...instance, exists }; this.trace.traceGetContractInstance(tracedInstance); return Promise.resolve(tracedInstance); } /** - * Accept nested world state modifications, merging in its trace and accrued substate + * Accept nested world state modifications */ - public acceptNestedCallState(nestedJournal: AvmPersistableStateManager) { - // Merge Public Storage - this.publicStorage.acceptAndMerge(nestedJournal.publicStorage); - - // Merge World State Access Trace - this.trace.acceptAndMerge(nestedJournal.trace); - - // Accrued Substate - this.newL1Messages.push(...nestedJournal.newL1Messages); - this.newLogs.push(...nestedJournal.newLogs); - - // TRANSITIONAL: This should be removed once the kernel handles and entire enqueued call per circuit - this.transitionalExecutionResult.allUnencryptedLogs.push( - ...nestedJournal.transitionalExecutionResult.allUnencryptedLogs, - ); + public acceptNestedCallState(nestedState: AvmPersistableStateManager) { + this.publicStorage.acceptAndMerge(nestedState.publicStorage); + this.nullifiers.acceptAndMerge(nestedState.nullifiers); } /** - * Reject nested world state, merging in its trace, but not accepting any state modifications + * Get a contract's bytecode from the contracts DB */ - public rejectNestedCallState(nestedJournal: AvmPersistableStateManager) { - // Merge World State Access Trace - this.trace.acceptAndMerge(nestedJournal.trace); + public async getBytecode(contractAddress: AztecAddress, selector: FunctionSelector): Promise { + return await this.hostStorage.contractsDb.getBytecode(contractAddress, selector); } - // TODO:(5818): do we need this type anymore? /** - * Access the current state of the journal - * - * @returns a JournalData object + * Accept the nested call's state and trace the nested call */ - public flush(): JournalData { - return { - noteHashChecks: this.trace.noteHashChecks, - newNoteHashes: this.trace.newNoteHashes, - nullifierChecks: this.trace.nullifierChecks, - newNullifiers: this.trace.newNullifiers, - l1ToL2MessageChecks: this.trace.l1ToL2MessageChecks, - newL1Messages: this.newL1Messages, - newLogs: this.newLogs, - newLogsHashes: this.trace.newLogsHashes, - currentStorageValue: this.publicStorage.getCache().cachePerContract, - storageReads: this.trace.publicStorageReads, - storageWrites: this.trace.publicStorageWrites, - sideEffectCounter: this.trace.accessCounter, - }; + public async processNestedCall( + nestedState: AvmPersistableStateManager, + success: boolean, + nestedEnvironment: AvmExecutionEnvironment, + startGasLeft: Gas, + endGasLeft: Gas, + bytecode: Buffer, + avmCallResults: AvmContractCallResults, + ) { + if (success) { + this.acceptNestedCallState(nestedState); + } + const functionName = + (await nestedState.hostStorage.contractsDb.getDebugFunctionName( + nestedEnvironment.address, + nestedEnvironment.temporaryFunctionSelector, + )) ?? `${nestedEnvironment.address}:${nestedEnvironment.temporaryFunctionSelector}`; + this.trace.traceNestedCall( + nestedState.trace, + nestedEnvironment, + startGasLeft, + endGasLeft, + bytecode, + avmCallResults, + functionName, + ); } } diff --git a/yarn-project/simulator/src/avm/journal/nullifiers.test.ts b/yarn-project/simulator/src/avm/journal/nullifiers.test.ts index f8cec85bd92..8a215a54228 100644 --- a/yarn-project/simulator/src/avm/journal/nullifiers.test.ts +++ b/yarn-project/simulator/src/avm/journal/nullifiers.test.ts @@ -3,15 +3,15 @@ import { Fr } from '@aztec/foundation/fields'; import { type MockProxy, mock } from 'jest-mock-extended'; import { type CommitmentsDB } from '../../index.js'; -import { Nullifiers } from './nullifiers.js'; +import { NullifierManager } from './nullifiers.js'; describe('avm nullifier caching', () => { let commitmentsDb: MockProxy; - let nullifiers: Nullifiers; + let nullifiers: NullifierManager; beforeEach(() => { commitmentsDb = mock(); - nullifiers = new Nullifiers(commitmentsDb); + nullifiers = new NullifierManager(commitmentsDb); }); describe('Nullifier caching and existence checks', () => { @@ -42,7 +42,7 @@ describe('avm nullifier caching', () => { const nullifier = new Fr(2); const storedLeafIndex = BigInt(420); - commitmentsDb.getNullifierIndex.mockResolvedValue(Promise.resolve(storedLeafIndex)); + commitmentsDb.getNullifierIndex.mockResolvedValue(storedLeafIndex); const [exists, isPending, gotIndex] = await nullifiers.checkExists(contractAddress, nullifier); // exists (in host), not pending, tree index retrieved from host @@ -53,7 +53,7 @@ describe('avm nullifier caching', () => { it('Existence check works on fallback to parent (gets value, exists, is pending)', async () => { const contractAddress = new Fr(1); const nullifier = new Fr(2); - const childNullifiers = new Nullifiers(commitmentsDb, nullifiers); + const childNullifiers = nullifiers.fork(); // Write to parent cache await nullifiers.append(contractAddress, nullifier); @@ -67,8 +67,8 @@ describe('avm nullifier caching', () => { it('Existence check works on fallback to grandparent (gets value, exists, is pending)', async () => { const contractAddress = new Fr(1); const nullifier = new Fr(2); - const childNullifiers = new Nullifiers(commitmentsDb, nullifiers); - const grandChildNullifiers = new Nullifiers(commitmentsDb, childNullifiers); + const childNullifiers = nullifiers.fork(); + const grandChildNullifiers = childNullifiers.fork(); // Write to parent cache await nullifiers.append(contractAddress, nullifier); @@ -99,7 +99,7 @@ describe('avm nullifier caching', () => { // Append a nullifier to parent await nullifiers.append(contractAddress, nullifier); - const childNullifiers = new Nullifiers(commitmentsDb, nullifiers); + const childNullifiers = nullifiers.fork(); // Can't append again in child await expect(childNullifiers.append(contractAddress, nullifier)).rejects.toThrow( `Nullifier ${nullifier} at contract ${contractAddress} already exists in parent cache or host.`, @@ -111,7 +111,7 @@ describe('avm nullifier caching', () => { const storedLeafIndex = BigInt(420); // Nullifier exists in host - commitmentsDb.getNullifierIndex.mockResolvedValue(Promise.resolve(storedLeafIndex)); + commitmentsDb.getNullifierIndex.mockResolvedValue(storedLeafIndex); // Can't append to cache await expect(nullifiers.append(contractAddress, nullifier)).rejects.toThrow( `Nullifier ${nullifier} at contract ${contractAddress} already exists in parent cache or host.`, @@ -128,7 +128,7 @@ describe('avm nullifier caching', () => { // Append a nullifier to parent await nullifiers.append(contractAddress, nullifier0); - const childNullifiers = new Nullifiers(commitmentsDb, nullifiers); + const childNullifiers = nullifiers.fork(); // Append a nullifier to child await childNullifiers.append(contractAddress, nullifier1); @@ -149,7 +149,7 @@ describe('avm nullifier caching', () => { await nullifiers.append(contractAddress, nullifier); // Create child cache, don't derive from parent so we can concoct a collision on merge - const childNullifiers = new Nullifiers(commitmentsDb); + const childNullifiers = new NullifierManager(commitmentsDb); // Append a nullifier to child await childNullifiers.append(contractAddress, nullifier); diff --git a/yarn-project/simulator/src/avm/journal/nullifiers.ts b/yarn-project/simulator/src/avm/journal/nullifiers.ts index e580c1a885c..a4d23a357e2 100644 --- a/yarn-project/simulator/src/avm/journal/nullifiers.ts +++ b/yarn-project/simulator/src/avm/journal/nullifiers.ts @@ -9,17 +9,29 @@ import type { CommitmentsDB } from '../../index.js'; * Maintains a nullifier cache, and ensures that existence checks fall back to the correct source. * When a contract call completes, its cached nullifier set can be merged into its parent's. */ -export class Nullifiers { - /** Cached nullifiers. */ - public cache: NullifierCache; - +export class NullifierManager { constructor( /** Reference to node storage. Checked on parent cache-miss. */ private readonly hostNullifiers: CommitmentsDB, - /** Parent's nullifiers. Checked on this' cache-miss. */ - private readonly parent?: Nullifiers | undefined, - ) { - this.cache = new NullifierCache(); + /** Cached nullifiers. */ + private readonly cache: NullifierCache = new NullifierCache(), + /** Parent nullifier manager to fall back on */ + private readonly parent?: NullifierManager, + ) {} + + /** + * Create a new nullifiers manager with some preloaded pending siloed nullifiers + */ + public static newWithPendingSiloedNullifiers(hostNullifiers: CommitmentsDB, pendingSiloedNullifiers: Fr[]) { + const cache = new NullifierCache(pendingSiloedNullifiers); + return new NullifierManager(hostNullifiers, cache); + } + + /** + * Create a new nullifiers manager forked from this one + */ + public fork() { + return new NullifierManager(this.hostNullifiers, new NullifierCache(), this); } /** @@ -92,7 +104,7 @@ export class Nullifiers { * * @param incomingNullifiers - the incoming cached nullifiers to merge into this instance's */ - public acceptAndMerge(incomingNullifiers: Nullifiers) { + public acceptAndMerge(incomingNullifiers: NullifierManager) { this.cache.acceptAndMerge(incomingNullifiers.cache); } } @@ -111,6 +123,15 @@ export class NullifierCache { private cachePerContract: Map> = new Map(); private siloedNullifiers: Set = new Set(); + /** + * @parem siloedNullifierFrs: optional list of pending siloed nullifiers to initialize this cache with + */ + constructor(siloedNullifierFrs?: Fr[]) { + if (siloedNullifierFrs !== undefined) { + siloedNullifierFrs.forEach(nullifier => this.siloedNullifiers.add(nullifier.toBigInt())); + } + } + /** * Check whether a nullifier exists in the cache. * @@ -147,10 +168,6 @@ export class NullifierCache { nullifiersForContract.add(nullifier.toBigInt()); } - public appendSiloed(siloedNullifier: Fr) { - this.siloedNullifiers.add(siloedNullifier.toBigInt()); - } - /** * Merge another cache's nullifiers into this instance's. * diff --git a/yarn-project/simulator/src/avm/journal/public_storage.test.ts b/yarn-project/simulator/src/avm/journal/public_storage.test.ts index 1d6359caef9..3b20b5cae3b 100644 --- a/yarn-project/simulator/src/avm/journal/public_storage.test.ts +++ b/yarn-project/simulator/src/avm/journal/public_storage.test.ts @@ -44,7 +44,7 @@ describe('avm public storage', () => { const slot = new Fr(2); const storedValue = new Fr(420); // ensure that fallback to host gets a value - publicDb.storageRead.mockResolvedValue(Promise.resolve(storedValue)); + publicDb.storageRead.mockResolvedValue(storedValue); const { exists, value: gotValue, cached } = await publicStorage.read(contractAddress, slot); // it exists in the host, so it must've been written before @@ -90,7 +90,7 @@ describe('avm public storage', () => { const parentValue = new Fr(69); const cachedValue = new Fr(1337); - publicDb.storageRead.mockResolvedValue(Promise.resolve(storedValue)); + publicDb.storageRead.mockResolvedValue(storedValue); const childStorage = new PublicStorage(publicDb, publicStorage); // Cache miss falls back to host diff --git a/yarn-project/simulator/src/avm/journal/public_storage.ts b/yarn-project/simulator/src/avm/journal/public_storage.ts index 6019934c201..4dee472ab24 100644 --- a/yarn-project/simulator/src/avm/journal/public_storage.ts +++ b/yarn-project/simulator/src/avm/journal/public_storage.ts @@ -27,6 +27,13 @@ export class PublicStorage { this.cache = new PublicStorageCache(); } + /** + * Create a new public storage manager forked from this one + */ + public fork() { + return new PublicStorage(this.hostPublicStorage, this); + } + /** * Get the pending storage. */ @@ -71,6 +78,9 @@ export class PublicStorage { // Finally try the host's Aztec state (a trip to the database) if (!value) { value = await this.hostPublicStorage.storageRead(storageAddress, slot); + // TODO(dbanks12): if value retrieved from host storage, we can cache it here + // any future reads to the same slot can read from cache instead of more expensive + // DB access } else { cached = true; } diff --git a/yarn-project/simulator/src/avm/journal/trace.test.ts b/yarn-project/simulator/src/avm/journal/trace.test.ts deleted file mode 100644 index a143ce4e3be..00000000000 --- a/yarn-project/simulator/src/avm/journal/trace.test.ts +++ /dev/null @@ -1,294 +0,0 @@ -import { Fr } from '@aztec/foundation/fields'; - -import { randomTracedContractInstance } from '../fixtures/index.js'; -import { WorldStateAccessTrace } from './trace.js'; -import { type TracedL1toL2MessageCheck, type TracedNullifier, type TracedNullifierCheck } from './trace_types.js'; - -describe('world state access trace', () => { - let trace: WorldStateAccessTrace; - - beforeEach(() => { - trace = new WorldStateAccessTrace(); - }); - - describe('Basic tracing', () => { - it('Should trace note hash checks', () => { - const contractAddress = new Fr(1); - const noteHash = new Fr(2); - const exists = true; - const leafIndex = new Fr(42); - - trace.traceNoteHashCheck(contractAddress, noteHash, exists, leafIndex); - - expect(trace.noteHashChecks).toEqual([ - { - // callPointer: expect.any(Fr), - storageAddress: contractAddress, - noteHash: noteHash, - exists: exists, - counter: Fr.ZERO, // 0th access - // endLifetime: expect.any(Fr), - leafIndex: leafIndex, - }, - ]); - expect(trace.getAccessCounter()).toBe(1); - }); - it('Should trace note hashes', () => { - const contractAddress = new Fr(1); - const utxo = new Fr(2); - - trace.traceNewNoteHash(contractAddress, utxo); - - expect(trace.newNoteHashes).toEqual([ - expect.objectContaining({ storageAddress: contractAddress, noteHash: utxo }), - ]); - expect(trace.getAccessCounter()).toEqual(1); - }); - it('Should trace nullifier checks', () => { - const contractAddress = new Fr(1); - const utxo = new Fr(2); - const exists = true; - const isPending = false; - const leafIndex = new Fr(42); - trace.traceNullifierCheck(contractAddress, utxo, exists, isPending, leafIndex); - const expectedCheck: TracedNullifierCheck = { - // callPointer: Fr.ZERO, - storageAddress: contractAddress, - nullifier: utxo, - exists: exists, - counter: Fr.ZERO, // 0th access - // endLifetime: Fr.ZERO, - isPending: isPending, - leafIndex: leafIndex, - }; - expect(trace.nullifierChecks).toEqual([expectedCheck]); - expect(trace.getAccessCounter()).toEqual(1); - }); - it('Should trace nullifiers', () => { - const contractAddress = new Fr(1); - const utxo = new Fr(2); - trace.traceNewNullifier(contractAddress, utxo); - const expectedNullifier: TracedNullifier = { - // callPointer: Fr.ZERO, - storageAddress: contractAddress, - nullifier: utxo, - counter: new Fr(0), - // endLifetime: Fr.ZERO, - }; - expect(trace.newNullifiers).toEqual([expectedNullifier]); - expect(trace.getAccessCounter()).toEqual(1); - }); - it('Should trace L1ToL2 Message checks', () => { - const utxo = new Fr(2); - const exists = true; - const leafIndex = new Fr(42); - trace.traceL1ToL2MessageCheck(utxo, leafIndex, exists); - const expectedCheck: TracedL1toL2MessageCheck = { - leafIndex: leafIndex, - msgHash: utxo, - exists: exists, - counter: new Fr(0), - }; - expect(trace.l1ToL2MessageChecks).toEqual([expectedCheck]); - expect(trace.getAccessCounter()).toEqual(1); - }); - it('Should trace get contract instance', () => { - const instance = randomTracedContractInstance(); - trace.traceGetContractInstance(instance); - expect(trace.gotContractInstances).toEqual([instance]); - expect(trace.getAccessCounter()).toEqual(1); - }); - }); - - it('Access counter should properly count accesses', () => { - const contractAddress = new Fr(1); - const slot = new Fr(2); - const value = new Fr(1); - const nullifier = new Fr(20); - const nullifierExists = false; - const nullifierIsPending = false; - const nullifierLeafIndex = Fr.ZERO; - const noteHash = new Fr(10); - const noteHashLeafIndex = new Fr(88); - const noteHashExists = false; - const msgExists = false; - const msgLeafIndex = Fr.ZERO; - const msgHash = new Fr(10); - const instance = randomTracedContractInstance(); - - let counter = 0; - trace.tracePublicStorageWrite(contractAddress, slot, value); - counter++; - trace.tracePublicStorageRead(contractAddress, slot, value, /*exists=*/ true, /*cached=*/ true); - counter++; - trace.traceNoteHashCheck(contractAddress, noteHash, noteHashExists, noteHashLeafIndex); - counter++; - trace.traceNewNoteHash(contractAddress, noteHash); - counter++; - trace.traceNullifierCheck(contractAddress, nullifier, nullifierExists, nullifierIsPending, nullifierLeafIndex); - counter++; - trace.traceNewNullifier(contractAddress, nullifier); - counter++; - trace.traceL1ToL2MessageCheck(msgHash, msgLeafIndex, msgExists); - counter++; - trace.tracePublicStorageWrite(contractAddress, slot, value); - counter++; - trace.tracePublicStorageRead(contractAddress, slot, value, /*exists=*/ true, /*cached=*/ true); - counter++; - trace.traceNewNoteHash(contractAddress, noteHash); - counter++; - trace.traceNullifierCheck(contractAddress, nullifier, nullifierExists, nullifierIsPending, nullifierLeafIndex); - counter++; - trace.traceNewNullifier(contractAddress, nullifier); - counter++; - trace.traceL1ToL2MessageCheck(msgHash, msgLeafIndex, msgExists); - counter++; - trace.traceGetContractInstance(instance); - counter++; - expect(trace.getAccessCounter()).toEqual(counter); - }); - - it('Should merge two traces together', () => { - const contractAddress = new Fr(1); - const slot = new Fr(2); - const value = new Fr(1); - const valueT1 = new Fr(2); - - const noteHash = new Fr(10); - const noteHashExists = false; - const noteHashLeafIndex = new Fr(88); - const noteHashT1 = new Fr(11); - const noteHashExistsT1 = true; - const noteHashLeafIndexT1 = new Fr(7); - - const nullifierExists = false; - const nullifierIsPending = false; - const nullifierLeafIndex = Fr.ZERO; - const nullifier = new Fr(10); - const nullifierT1 = new Fr(20); - const nullifierExistsT1 = true; - const nullifierIsPendingT1 = false; - const nullifierLeafIndexT1 = new Fr(42); - - const msgExists = false; - const msgLeafIndex = Fr.ZERO; - const msgHash = new Fr(10); - const msgHashT1 = new Fr(20); - const msgExistsT1 = true; - const msgLeafIndexT1 = new Fr(42); - - const instance = randomTracedContractInstance(); - const instanceT1 = randomTracedContractInstance(); - - const expectedMessageCheck = { - leafIndex: msgLeafIndex, - msgHash: msgHash, - exists: msgExists, - }; - const expectedMessageCheckT1 = { - leafIndex: msgLeafIndexT1, - msgHash: msgHashT1, - exists: msgExistsT1, - }; - - trace.tracePublicStorageWrite(contractAddress, slot, value); - trace.tracePublicStorageRead(contractAddress, slot, value, /*exists=*/ true, /*cached=*/ true); - trace.traceNoteHashCheck(contractAddress, noteHash, noteHashExists, noteHashLeafIndex); - trace.traceNewNoteHash(contractAddress, noteHash); - trace.traceNullifierCheck(contractAddress, nullifier, nullifierExists, nullifierIsPending, nullifierLeafIndex); - trace.traceNewNullifier(contractAddress, nullifier); - trace.traceL1ToL2MessageCheck(msgHash, msgLeafIndex, msgExists); - trace.traceGetContractInstance(instance); - - const childTrace = new WorldStateAccessTrace(trace); - childTrace.tracePublicStorageWrite(contractAddress, slot, valueT1); - childTrace.tracePublicStorageRead(contractAddress, slot, valueT1, /*exists=*/ true, /*cached=*/ true); - childTrace.traceNoteHashCheck(contractAddress, noteHashT1, noteHashExistsT1, noteHashLeafIndexT1); - childTrace.traceNewNoteHash(contractAddress, nullifierT1); - childTrace.traceNullifierCheck( - contractAddress, - nullifierT1, - nullifierExistsT1, - nullifierIsPendingT1, - nullifierLeafIndexT1, - ); - childTrace.traceNewNullifier(contractAddress, nullifierT1); - childTrace.traceL1ToL2MessageCheck(msgHashT1, msgLeafIndexT1, msgExistsT1); - childTrace.traceGetContractInstance(instanceT1); - - const childCounterBeforeMerge = childTrace.getAccessCounter(); - trace.acceptAndMerge(childTrace); - expect(trace.getAccessCounter()).toEqual(childCounterBeforeMerge); - - expect(trace.publicStorageReads).toEqual([ - expect.objectContaining({ - storageAddress: contractAddress, - slot: slot, - value: value, - exists: true, - cached: true, - }), - expect.objectContaining({ - storageAddress: contractAddress, - slot: slot, - value: valueT1, - exists: true, - cached: true, - }), - ]); - expect(trace.publicStorageWrites).toEqual([ - expect.objectContaining({ storageAddress: contractAddress, slot: slot, value: value }), - expect.objectContaining({ storageAddress: contractAddress, slot: slot, value: valueT1 }), - ]); - expect(trace.newNoteHashes).toEqual([ - expect.objectContaining({ - storageAddress: contractAddress, - noteHash: nullifier, - }), - expect.objectContaining({ - storageAddress: contractAddress, - noteHash: nullifierT1, - }), - ]); - expect(trace.newNullifiers).toEqual([ - expect.objectContaining({ - storageAddress: contractAddress, - nullifier: nullifier, - }), - expect.objectContaining({ - storageAddress: contractAddress, - nullifier: nullifierT1, - }), - ]); - expect(trace.nullifierChecks).toEqual([ - expect.objectContaining({ - nullifier: nullifier, - exists: nullifierExists, - isPending: nullifierIsPending, - leafIndex: nullifierLeafIndex, - }), - expect.objectContaining({ - nullifier: nullifierT1, - exists: nullifierExistsT1, - isPending: nullifierIsPendingT1, - leafIndex: nullifierLeafIndexT1, - }), - ]); - expect(trace.noteHashChecks).toEqual([ - expect.objectContaining({ noteHash: noteHash, exists: noteHashExists, leafIndex: noteHashLeafIndex }), - expect.objectContaining({ noteHash: noteHashT1, exists: noteHashExistsT1, leafIndex: noteHashLeafIndexT1 }), - ]); - expect( - trace.l1ToL2MessageChecks.map(c => ({ - leafIndex: c.leafIndex, - msgHash: c.msgHash, - exists: c.exists, - })), - ).toEqual([expectedMessageCheck, expectedMessageCheckT1]); - expect(trace.l1ToL2MessageChecks).toEqual([ - expect.objectContaining({ leafIndex: msgLeafIndex, msgHash: msgHash, exists: msgExists }), - expect.objectContaining({ leafIndex: msgLeafIndexT1, msgHash: msgHashT1, exists: msgExistsT1 }), - ]); - expect(trace.gotContractInstances).toEqual([instance, instanceT1]); - }); -}); diff --git a/yarn-project/simulator/src/avm/journal/trace.ts b/yarn-project/simulator/src/avm/journal/trace.ts deleted file mode 100644 index 608f738ccc3..00000000000 --- a/yarn-project/simulator/src/avm/journal/trace.ts +++ /dev/null @@ -1,181 +0,0 @@ -import { Fr } from '@aztec/foundation/fields'; - -import { - type TracedContractInstance, - type TracedL1toL2MessageCheck, - type TracedNoteHash, - type TracedNoteHashCheck, - type TracedNullifier, - type TracedNullifierCheck, - type TracedPublicStorageRead, - type TracedPublicStorageWrite, - type TracedUnencryptedL2Log, -} from './trace_types.js'; - -export class WorldStateAccessTrace { - public accessCounter: number; - - public publicStorageReads: TracedPublicStorageRead[] = []; - public publicStorageWrites: TracedPublicStorageWrite[] = []; - - public noteHashChecks: TracedNoteHashCheck[] = []; - public newNoteHashes: TracedNoteHash[] = []; - public nullifierChecks: TracedNullifierCheck[] = []; - public newNullifiers: TracedNullifier[] = []; - public l1ToL2MessageChecks: TracedL1toL2MessageCheck[] = []; - public newLogsHashes: TracedUnencryptedL2Log[] = []; - public gotContractInstances: TracedContractInstance[] = []; - - //public contractCalls: TracedContractCall[] = []; - //public archiveChecks: TracedArchiveLeafCheck[] = []; - - constructor(parentTrace?: WorldStateAccessTrace) { - this.accessCounter = parentTrace ? parentTrace.accessCounter : 0; - // TODO(4805): consider tracking the parent's trace vector lengths so we can enforce limits - } - - public getAccessCounter() { - return this.accessCounter; - } - - public tracePublicStorageRead(storageAddress: Fr, slot: Fr, value: Fr, exists: boolean, cached: boolean) { - // TODO(4805): check if some threshold is reached for max storage reads - // (need access to parent length, or trace needs to be initialized with parent's contents) - const traced: TracedPublicStorageRead = { - // callPointer: Fr.ZERO, - storageAddress, - slot, - value, - exists, - cached, - counter: new Fr(this.accessCounter), - // endLifetime: Fr.ZERO, - }; - this.publicStorageReads.push(traced); - this.incrementAccessCounter(); - } - - public tracePublicStorageWrite(storageAddress: Fr, slot: Fr, value: Fr) { - // TODO(4805): check if some threshold is reached for max storage writes - // (need access to parent length, or trace needs to be initialized with parent's contents) - const traced: TracedPublicStorageWrite = { - // callPointer: Fr.ZERO, - storageAddress, - slot, - value, - counter: new Fr(this.accessCounter), - // endLifetime: Fr.ZERO, - }; - this.publicStorageWrites.push(traced); - this.incrementAccessCounter(); - } - - public traceNoteHashCheck(storageAddress: Fr, noteHash: Fr, exists: boolean, leafIndex: Fr) { - const traced: TracedNoteHashCheck = { - // callPointer: Fr.ZERO, - storageAddress, - noteHash, - exists, - counter: new Fr(this.accessCounter), - // endLifetime: Fr.ZERO, - leafIndex, - }; - this.noteHashChecks.push(traced); - this.incrementAccessCounter(); - } - - public traceNewNoteHash(storageAddress: Fr, noteHash: Fr) { - // TODO(4805): check if some threshold is reached for max new note hash - const traced: TracedNoteHash = { - // callPointer: Fr.ZERO, - storageAddress, - noteHash, - counter: new Fr(this.accessCounter), - // endLifetime: Fr.ZERO, - }; - this.newNoteHashes.push(traced); - this.incrementAccessCounter(); - } - - public traceNullifierCheck(storageAddress: Fr, nullifier: Fr, exists: boolean, isPending: boolean, leafIndex: Fr) { - // TODO(4805): check if some threshold is reached for max new nullifier - const traced: TracedNullifierCheck = { - // callPointer: Fr.ZERO, - storageAddress, - nullifier, - exists, - counter: new Fr(this.accessCounter), - // endLifetime: Fr.ZERO, - isPending, - leafIndex, - }; - this.nullifierChecks.push(traced); - this.incrementAccessCounter(); - } - - public traceNewNullifier(storageAddress: Fr, nullifier: Fr) { - // TODO(4805): check if some threshold is reached for max new nullifier - const tracedNullifier: TracedNullifier = { - // callPointer: Fr.ZERO, - storageAddress, - nullifier, - counter: new Fr(this.accessCounter), - // endLifetime: Fr.ZERO, - }; - this.newNullifiers.push(tracedNullifier); - this.incrementAccessCounter(); - } - - public traceL1ToL2MessageCheck(msgHash: Fr, msgLeafIndex: Fr, exists: boolean) { - // TODO(4805): check if some threshold is reached for max message reads - const traced: TracedL1toL2MessageCheck = { - //callPointer: Fr.ZERO, // FIXME - leafIndex: msgLeafIndex, - msgHash: msgHash, - exists: exists, - counter: new Fr(this.accessCounter), - //endLifetime: Fr.ZERO, // FIXME - }; - this.l1ToL2MessageChecks.push(traced); - this.incrementAccessCounter(); - } - - public traceNewLog(logHash: Fr) { - const traced: TracedUnencryptedL2Log = { - logHash, - counter: new Fr(this.accessCounter), - }; - this.newLogsHashes.push(traced); - this.incrementAccessCounter(); - } - - public traceGetContractInstance(instance: TracedContractInstance) { - this.gotContractInstances.push(instance); - this.incrementAccessCounter(); - } - - private incrementAccessCounter() { - this.accessCounter++; - } - - /** - * Merges another trace into this one - * - * @param incomingTrace - the incoming trace to merge into this instance - */ - public acceptAndMerge(incomingTrace: WorldStateAccessTrace) { - // Merge storage read and write journals - this.publicStorageReads.push(...incomingTrace.publicStorageReads); - this.publicStorageWrites.push(...incomingTrace.publicStorageWrites); - // Merge new note hashes and nullifiers - this.noteHashChecks.push(...incomingTrace.noteHashChecks); - this.newNoteHashes.push(...incomingTrace.newNoteHashes); - this.nullifierChecks.push(...incomingTrace.nullifierChecks); - this.newNullifiers.push(...incomingTrace.newNullifiers); - this.l1ToL2MessageChecks.push(...incomingTrace.l1ToL2MessageChecks); - this.newLogsHashes.push(...incomingTrace.newLogsHashes); - this.gotContractInstances.push(...incomingTrace.gotContractInstances); - // it is assumed that the incoming trace was initialized with this as parent, so accept counter - this.accessCounter = incomingTrace.accessCounter; - } -} diff --git a/yarn-project/simulator/src/avm/journal/trace_types.ts b/yarn-project/simulator/src/avm/journal/trace_types.ts deleted file mode 100644 index db57e53998b..00000000000 --- a/yarn-project/simulator/src/avm/journal/trace_types.ts +++ /dev/null @@ -1,91 +0,0 @@ -import { type Fr } from '@aztec/foundation/fields'; -import { type ContractInstanceWithAddress } from '@aztec/types/contracts'; - -//export type TracedContractCall = { -// callPointer: Fr; -// address: Fr; -// storageAddress: Fr; -// endLifetime: Fr; -//}; - -export type TracedPublicStorageRead = { - // callPointer: Fr; - storageAddress: Fr; - exists: boolean; - cached: boolean; - slot: Fr; - value: Fr; - counter: Fr; - // endLifetime: Fr; -}; - -export type TracedPublicStorageWrite = { - // callPointer: Fr; - storageAddress: Fr; - slot: Fr; - value: Fr; - counter: Fr; - // endLifetime: Fr; -}; - -export type TracedNoteHashCheck = { - // callPointer: Fr; - storageAddress: Fr; - leafIndex: Fr; - noteHash: Fr; - exists: boolean; - counter: Fr; - // endLifetime: Fr; -}; - -export type TracedNoteHash = { - // callPointer: Fr; - storageAddress: Fr; - noteHash: Fr; - counter: Fr; - // endLifetime: Fr; -}; - -export type TracedNullifierCheck = { - // callPointer: Fr; - storageAddress: Fr; - nullifier: Fr; - exists: boolean; - counter: Fr; - // endLifetime: Fr; - // the fields below are relevant only to the public kernel - // and are therefore omitted from VM inputs - isPending: boolean; - leafIndex: Fr; -}; - -export type TracedNullifier = { - // callPointer: Fr; - storageAddress: Fr; - nullifier: Fr; - counter: Fr; - // endLifetime: Fr; -}; - -export type TracedL1toL2MessageCheck = { - //callPointer: Fr; - leafIndex: Fr; - msgHash: Fr; - exists: boolean; - counter: Fr; - //endLifetime: Fr; -}; - -export type TracedUnencryptedL2Log = { - //callPointer: Fr; - logHash: Fr; - counter: Fr; - //endLifetime: Fr; -}; - -//export type TracedArchiveLeafCheck = { -// leafIndex: Fr; -// leaf: Fr; -//}; - -export type TracedContractInstance = { exists: boolean } & ContractInstanceWithAddress; diff --git a/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts b/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts index 5f4ac1eae0d..9f71a34a6f6 100644 --- a/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts @@ -1,15 +1,20 @@ -import { UnencryptedL2Log } from '@aztec/circuit-types'; -import { EthAddress, Fr } from '@aztec/circuits.js'; -import { EventSelector } from '@aztec/foundation/abi'; +import { Fr } from '@aztec/circuits.js'; import { mock } from 'jest-mock-extended'; -import { type CommitmentsDB } from '../../index.js'; +import { type PublicSideEffectTraceInterface } from '../../public/side_effect_trace_interface.js'; import { type AvmContext } from '../avm_context.js'; import { Field, Uint8, Uint32 } from '../avm_memory_types.js'; import { InstructionExecutionError, StaticCallAlterationError } from '../errors.js'; -import { initContext, initExecutionEnvironment, initHostStorage } from '../fixtures/index.js'; -import { AvmPersistableStateManager } from '../journal/journal.js'; +import { + initContext, + initExecutionEnvironment, + initHostStorage, + initPersistableStateManager, +} from '../fixtures/index.js'; +import { type HostStorage } from '../journal/host_storage.js'; +import { type AvmPersistableStateManager } from '../journal/journal.js'; +import { mockL1ToL2MessageExists, mockNoteHashExists, mockNullifierExists } from '../test_utils.js'; import { EmitNoteHash, EmitNullifier, @@ -21,10 +26,27 @@ import { } from './accrued_substate.js'; describe('Accrued Substate', () => { + let hostStorage: HostStorage; + let trace: PublicSideEffectTraceInterface; + let persistableState: AvmPersistableStateManager; let context: AvmContext; + const address = new Fr(1); + const storageAddress = new Fr(2); + const sender = new Fr(42); + const value0 = new Fr(69); // noteHash or nullifier... + const value0Offset = 100; + const value1 = new Fr(420); + const value1Offset = 200; + const leafIndex = new Fr(7); + const leafIndexOffset = 1; + const existsOffset = 2; + beforeEach(() => { - context = initContext(); + hostStorage = initHostStorage(); + trace = mock(); + persistableState = initPersistableStateManager({ hostStorage, trace }); + context = initContext({ persistableState, env: initExecutionEnvironment({ address, storageAddress, sender }) }); }); describe('NoteHashExists', () => { @@ -47,82 +69,43 @@ describe('Accrued Substate', () => { expect(inst.serialize()).toEqual(buf); }); - it('Should correctly return false when noteHash does not exist', async () => { - const noteHash = new Field(69n); - const noteHashOffset = 0; - const leafIndex = new Field(7n); - const leafIndexOffset = 1; - const existsOffset = 2; - - // mock host storage this so that persistable state's getCommitmentIndex returns UNDEFINED - const commitmentsDb = mock(); - commitmentsDb.getCommitmentIndex.mockResolvedValue(Promise.resolve(undefined)); - const hostStorage = initHostStorage({ commitmentsDb }); - context = initContext({ persistableState: new AvmPersistableStateManager(hostStorage) }); - - context.machineState.memory.set(noteHashOffset, noteHash); - context.machineState.memory.set(leafIndexOffset, leafIndex); - await new NoteHashExists(/*indirect=*/ 0, noteHashOffset, leafIndexOffset, existsOffset).execute(context); - - const exists = context.machineState.memory.getAs(existsOffset); - expect(exists).toEqual(new Uint8(0)); - - const journalState = context.persistableState.flush(); - expect(journalState.noteHashChecks).toEqual([ - expect.objectContaining({ exists: false, leafIndex: leafIndex.toFr(), noteHash: noteHash.toFr() }), - ]); - }); - - it('Should correctly return false when note hash exists at a different leaf index', async () => { - const noteHash = new Field(69n); - const noteHashOffset = 0; - const leafIndex = new Field(7n); - const storedLeafIndex = 88n; - const leafIndexOffset = 1; - const existsOffset = 2; - - const commitmentsDb = mock(); - commitmentsDb.getCommitmentIndex.mockResolvedValue(Promise.resolve(storedLeafIndex)); - const hostStorage = initHostStorage({ commitmentsDb }); - context = initContext({ persistableState: new AvmPersistableStateManager(hostStorage) }); - - context.machineState.memory.set(noteHashOffset, noteHash); - context.machineState.memory.set(leafIndexOffset, leafIndex); - await new NoteHashExists(/*indirect=*/ 0, noteHashOffset, leafIndexOffset, existsOffset).execute(context); - - const exists = context.machineState.memory.getAs(existsOffset); - expect(exists).toEqual(new Uint8(0)); - - const journalState = context.persistableState.flush(); - expect(journalState.noteHashChecks).toEqual([ - expect.objectContaining({ exists: false, leafIndex: leafIndex.toFr(), noteHash: noteHash.toFr() }), - ]); - }); - - it('Should correctly return true when note hash exists at the given leaf index', async () => { - const noteHash = new Field(69n); - const noteHashOffset = 0; - const leafIndex = new Field(7n); - const storedLeafIndex = 7n; - const leafIndexOffset = 1; - const existsOffset = 2; - - const commitmentsDb = mock(); - commitmentsDb.getCommitmentIndex.mockResolvedValue(Promise.resolve(storedLeafIndex)); - const hostStorage = initHostStorage({ commitmentsDb }); - context = initContext({ persistableState: new AvmPersistableStateManager(hostStorage) }); - - context.machineState.memory.set(noteHashOffset, noteHash); - context.machineState.memory.set(leafIndexOffset, leafIndex); - await new NoteHashExists(/*indirect=*/ 0, noteHashOffset, leafIndexOffset, existsOffset).execute(context); - - const exists = context.machineState.memory.getAs(existsOffset); - expect(exists).toEqual(new Uint8(1)); - - const journalState = context.persistableState.flush(); - expect(journalState.noteHashChecks).toEqual([ - expect.objectContaining({ exists: true, leafIndex: leafIndex.toFr(), noteHash: noteHash.toFr() }), - ]); + // Will check existence at leafIndex, but nothing may be found there and/or something may be found at mockAtLeafIndex + describe.each([ + [/*mockAtLeafIndex=*/ undefined], // doesn't exist at all + [/*mockAtLeafIndex=*/ leafIndex], // should be found! + [/*mockAtLeafIndex=*/ leafIndex.add(Fr.ONE)], // won't be found! (checking leafIndex+1, but it exists at leafIndex) + ])('Note hash checks', (mockAtLeafIndex?: Fr) => { + const expectFound = mockAtLeafIndex !== undefined && mockAtLeafIndex.equals(leafIndex); + const existsElsewhere = mockAtLeafIndex !== undefined && !mockAtLeafIndex.equals(leafIndex); + const existsStr = expectFound ? 'DOES exist' : 'does NOT exist'; + const foundAtStr = existsElsewhere + ? `at leafIndex=${mockAtLeafIndex.toNumber()} (exists at leafIndex=${leafIndex.toNumber()})` + : ''; + it(`Should return ${expectFound} (and be traced) when noteHash ${existsStr} ${foundAtStr}`, async () => { + if (mockAtLeafIndex !== undefined) { + mockNoteHashExists(hostStorage, mockAtLeafIndex, value0); + } + + context.machineState.memory.set(value0Offset, new Field(value0)); // noteHash + context.machineState.memory.set(leafIndexOffset, new Field(leafIndex)); + await new NoteHashExists( + /*indirect=*/ 0, + /*noteHashOffset=*/ value0Offset, + leafIndexOffset, + existsOffset, + ).execute(context); + + const gotExists = context.machineState.memory.getAs(existsOffset); + expect(gotExists).toEqual(new Uint8(expectFound ? 1 : 0)); + + expect(trace.traceNoteHashCheck).toHaveBeenCalledTimes(1); + expect(trace.traceNoteHashCheck).toHaveBeenCalledWith( + storageAddress, + /*noteHash=*/ value0, + leafIndex, + /*exists=*/ expectFound, + ); + }); }); }); @@ -140,18 +123,13 @@ describe('Accrued Substate', () => { }); it('Should append a new note hash correctly', async () => { - const value = new Field(69n); - context.machineState.memory.set(0, value); - - await new EmitNoteHash(/*indirect=*/ 0, /*offset=*/ 0).execute(context); - - const journalState = context.persistableState.flush(); - expect(journalState.newNoteHashes).toEqual([ - expect.objectContaining({ - storageAddress: context.environment.storageAddress, - noteHash: value.toFr(), - }), - ]); + context.machineState.memory.set(value0Offset, new Field(value0)); + await new EmitNoteHash(/*indirect=*/ 0, /*offset=*/ value0Offset).execute(context); + expect(trace.traceNewNoteHash).toHaveBeenCalledTimes(1); + expect(trace.traceNewNoteHash).toHaveBeenCalledWith( + expect.objectContaining(storageAddress), + /*noteHash=*/ value0, + ); }); }); @@ -175,57 +153,39 @@ describe('Accrued Substate', () => { expect(inst.serialize()).toEqual(buf); }); - it('Should correctly show false when nullifier does not exist', async () => { - const value = new Field(69n); - const nullifierOffset = 0; - const addressOffset = 1; - const existsOffset = 2; - - // mock host storage this so that persistable state's checkNullifierExists returns UNDEFINED - const commitmentsDb = mock(); - commitmentsDb.getNullifierIndex.mockResolvedValue(Promise.resolve(undefined)); - const hostStorage = initHostStorage({ commitmentsDb }); - context = initContext({ persistableState: new AvmPersistableStateManager(hostStorage) }); - const address = new Field(context.environment.storageAddress.toField()); - - context.machineState.memory.set(nullifierOffset, value); - context.machineState.memory.set(addressOffset, address); - await new NullifierExists(/*indirect=*/ 0, nullifierOffset, addressOffset, existsOffset).execute(context); - - const exists = context.machineState.memory.getAs(existsOffset); - expect(exists).toEqual(new Uint8(0)); - - const journalState = context.persistableState.flush(); - expect(journalState.nullifierChecks).toEqual([ - expect.objectContaining({ nullifier: value.toFr(), storageAddress: address.toFr(), exists: false }), - ]); - }); - - it('Should correctly show true when nullifier exists', async () => { - const value = new Field(69n); - const nullifierOffset = 0; - const addressOffset = 1; - const existsOffset = 2; - const storedLeafIndex = BigInt(42); - - // mock host storage this so that persistable state's checkNullifierExists returns true - const commitmentsDb = mock(); - commitmentsDb.getNullifierIndex.mockResolvedValue(Promise.resolve(storedLeafIndex)); - const hostStorage = initHostStorage({ commitmentsDb }); - context = initContext({ persistableState: new AvmPersistableStateManager(hostStorage) }); - const address = new Field(context.environment.storageAddress.toField()); - - context.machineState.memory.set(nullifierOffset, value); - context.machineState.memory.set(addressOffset, address); - await new NullifierExists(/*indirect=*/ 0, nullifierOffset, addressOffset, existsOffset).execute(context); - - const exists = context.machineState.memory.getAs(existsOffset); - expect(exists).toEqual(new Uint8(1)); - - const journalState = context.persistableState.flush(); - expect(journalState.nullifierChecks).toEqual([ - expect.objectContaining({ nullifier: value.toFr(), storageAddress: address.toFr(), exists: true }), - ]); + describe.each([[/*exists=*/ false], [/*exists=*/ true]])('Nullifier checks', (exists: boolean) => { + const existsStr = exists ? 'DOES exist' : 'does NOT exist'; + it(`Should return ${exists} (and be traced) when noteHash ${existsStr}`, async () => { + const storageAddressOffset = 1; + + if (exists) { + mockNullifierExists(hostStorage, leafIndex, value0); + } + + context.machineState.memory.set(value0Offset, new Field(value0)); // nullifier + context.machineState.memory.set(storageAddressOffset, new Field(storageAddress)); + await new NullifierExists( + /*indirect=*/ 0, + /*nullifierOffset=*/ value0Offset, + storageAddressOffset, + existsOffset, + ).execute(context); + + const gotExists = context.machineState.memory.getAs(existsOffset); + expect(gotExists).toEqual(new Uint8(exists ? 1 : 0)); + + expect(trace.traceNullifierCheck).toHaveBeenCalledTimes(1); + const isPending = false; + // leafIndex is returned from DB call for nullifiers, so it is absent on DB miss + const tracedLeafIndex = exists && !isPending ? leafIndex : Fr.ZERO; + expect(trace.traceNullifierCheck).toHaveBeenCalledWith( + storageAddress, + value0, + tracedLeafIndex, + exists, + isPending, + ); + }); }); }); @@ -243,52 +203,39 @@ describe('Accrued Substate', () => { }); it('Should append a new nullifier correctly', async () => { - const value = new Field(69n); - context.machineState.memory.set(0, value); - - await new EmitNullifier(/*indirect=*/ 0, /*offset=*/ 0).execute(context); - - const journalState = context.persistableState.flush(); - expect(journalState.newNullifiers).toEqual([ - expect.objectContaining({ - storageAddress: context.environment.storageAddress.toField(), - nullifier: value.toFr(), - }), - ]); + context.machineState.memory.set(value0Offset, new Field(value0)); + await new EmitNullifier(/*indirect=*/ 0, /*offset=*/ value0Offset).execute(context); + expect(trace.traceNewNullifier).toHaveBeenCalledTimes(1); + expect(trace.traceNewNullifier).toHaveBeenCalledWith( + expect.objectContaining(storageAddress), + /*nullifier=*/ value0, + ); }); it('Nullifier collision reverts (same nullifier emitted twice)', async () => { - const value = new Field(69n); - context.machineState.memory.set(0, value); - - await new EmitNullifier(/*indirect=*/ 0, /*offset=*/ 0).execute(context); - await expect(new EmitNullifier(/*indirect=*/ 0, /*offset=*/ 0).execute(context)).rejects.toThrow( + context.machineState.memory.set(value0Offset, new Field(value0)); + await new EmitNullifier(/*indirect=*/ 0, /*offset=*/ value0Offset).execute(context); + await expect(new EmitNullifier(/*indirect=*/ 0, /*offset=*/ value0Offset).execute(context)).rejects.toThrow( new InstructionExecutionError( - `Attempted to emit duplicate nullifier ${value.toFr()} (storage address: ${ - context.environment.storageAddress - }).`, + `Attempted to emit duplicate nullifier ${value0} (storage address: ${storageAddress}).`, ), ); + expect(trace.traceNewNullifier).toHaveBeenCalledTimes(1); + expect(trace.traceNewNullifier).toHaveBeenCalledWith( + expect.objectContaining(storageAddress), + /*nullifier=*/ value0, + ); }); it('Nullifier collision reverts (nullifier exists in host state)', async () => { - const value = new Field(69n); - const storedLeafIndex = BigInt(42); - - // Mock the nullifiers db to return a stored leaf index - const commitmentsDb = mock(); - commitmentsDb.getNullifierIndex.mockResolvedValue(Promise.resolve(storedLeafIndex)); - const hostStorage = initHostStorage({ commitmentsDb }); - context = initContext({ persistableState: new AvmPersistableStateManager(hostStorage) }); - - context.machineState.memory.set(0, value); - await expect(new EmitNullifier(/*indirect=*/ 0, /*offset=*/ 0).execute(context)).rejects.toThrow( + mockNullifierExists(hostStorage, leafIndex); // db will say that nullifier already exists + context.machineState.memory.set(value0Offset, new Field(value0)); + await expect(new EmitNullifier(/*indirect=*/ 0, /*offset=*/ value0Offset).execute(context)).rejects.toThrow( new InstructionExecutionError( - `Attempted to emit duplicate nullifier ${value.toFr()} (storage address: ${ - context.environment.storageAddress - }).`, + `Attempted to emit duplicate nullifier ${value0} (storage address: ${storageAddress}).`, ), ); + expect(trace.traceNewNullifier).toHaveBeenCalledTimes(0); // the only attempt should fail before tracing }); }); @@ -312,77 +259,44 @@ describe('Accrued Substate', () => { expect(inst.serialize()).toEqual(buf); }); - it('Should correctly show false when L1ToL2 message does not exist', async () => { - const msgHash = new Field(69n); - const leafIndex = new Field(42n); - const msgHashOffset = 0; - const msgLeafIndexOffset = 1; - const existsOffset = 2; - - context.machineState.memory.set(msgHashOffset, msgHash); - context.machineState.memory.set(msgLeafIndexOffset, leafIndex); - await new L1ToL2MessageExists(/*indirect=*/ 0, msgHashOffset, msgLeafIndexOffset, existsOffset).execute(context); - - // never created, doesn't exist! - const exists = context.machineState.memory.getAs(existsOffset); - expect(exists).toEqual(new Uint8(0)); - - const journalState = context.persistableState.flush(); - expect(journalState.l1ToL2MessageChecks).toEqual([ - expect.objectContaining({ leafIndex: leafIndex.toFr(), msgHash: msgHash.toFr(), exists: false }), - ]); - }); - - it('Should correctly show true when L1ToL2 message exists', async () => { - const msgHash = new Field(69n); - const leafIndex = new Field(42n); - const msgHashOffset = 0; - const msgLeafIndexOffset = 1; - const existsOffset = 2; - - // mock commitments db to show message exists - const commitmentsDb = mock(); - commitmentsDb.getL1ToL2LeafValue.mockResolvedValue(msgHash.toFr()); - const hostStorage = initHostStorage({ commitmentsDb }); - context = initContext({ persistableState: new AvmPersistableStateManager(hostStorage) }); - - context.machineState.memory.set(msgHashOffset, msgHash); - context.machineState.memory.set(msgLeafIndexOffset, leafIndex); - await new L1ToL2MessageExists(/*indirect=*/ 0, msgHashOffset, msgLeafIndexOffset, existsOffset).execute(context); - - const exists = context.machineState.memory.getAs(existsOffset); - expect(exists).toEqual(new Uint8(1)); - - const journalState = context.persistableState.flush(); - expect(journalState.l1ToL2MessageChecks).toEqual([ - expect.objectContaining({ leafIndex: leafIndex.toFr(), msgHash: msgHash.toFr(), exists: true }), - ]); - }); - - it('Should correctly show false when another L1ToL2 message exists at that index', async () => { - const msgHash = new Field(69n); - const leafIndex = new Field(42n); - const msgHashOffset = 0; - const msgLeafIndexOffset = 1; - const existsOffset = 2; - - const commitmentsDb = mock(); - commitmentsDb.getL1ToL2LeafValue.mockResolvedValue(Fr.ZERO); - const hostStorage = initHostStorage({ commitmentsDb }); - context = initContext({ persistableState: new AvmPersistableStateManager(hostStorage) }); - - context.machineState.memory.set(msgHashOffset, msgHash); - context.machineState.memory.set(msgLeafIndexOffset, leafIndex); - await new L1ToL2MessageExists(/*indirect=*/ 0, msgHashOffset, msgLeafIndexOffset, existsOffset).execute(context); - - // never created, doesn't exist! - const exists = context.machineState.memory.getAs(existsOffset); - expect(exists).toEqual(new Uint8(0)); - - const journalState = context.persistableState.flush(); - expect(journalState.l1ToL2MessageChecks).toEqual([ - expect.objectContaining({ leafIndex: leafIndex.toFr(), msgHash: msgHash.toFr(), exists: false }), - ]); + // Will check existence at leafIndex, but nothing may be found there and/or something may be found at mockAtLeafIndex + describe.each([ + [/*mockAtLeafIndex=*/ undefined], // doesn't exist at all + [/*mockAtLeafIndex=*/ leafIndex], // should be found! + [/*mockAtLeafIndex=*/ leafIndex.add(Fr.ONE)], // won't be found! (checking leafIndex+1, but it exists at leafIndex) + ])('L1ToL2 message checks', (mockAtLeafIndex?: Fr) => { + const expectFound = mockAtLeafIndex !== undefined && mockAtLeafIndex.equals(leafIndex); + const existsElsewhere = mockAtLeafIndex !== undefined && !mockAtLeafIndex.equals(leafIndex); + const existsStr = expectFound ? 'DOES exist' : 'does NOT exist'; + const foundAtStr = existsElsewhere + ? `at leafIndex=${mockAtLeafIndex.toNumber()} (exists at leafIndex=${leafIndex.toNumber()})` + : ''; + + it(`Should return ${expectFound} (and be traced) when noteHash ${existsStr} ${foundAtStr}`, async () => { + if (mockAtLeafIndex !== undefined) { + mockL1ToL2MessageExists(hostStorage, mockAtLeafIndex, value0, /*valueAtOtherIndices=*/ value1); + } + + context.machineState.memory.set(value0Offset, new Field(value0)); // noteHash + context.machineState.memory.set(leafIndexOffset, new Field(leafIndex)); + await new L1ToL2MessageExists( + /*indirect=*/ 0, + /*msgHashOffset=*/ value0Offset, + leafIndexOffset, + existsOffset, + ).execute(context); + + const gotExists = context.machineState.memory.getAs(existsOffset); + expect(gotExists).toEqual(new Uint8(expectFound ? 1 : 0)); + + expect(trace.traceL1ToL2MessageCheck).toHaveBeenCalledTimes(1); + expect(trace.traceL1ToL2MessageCheck).toHaveBeenCalledWith( + address, + /*noteHash=*/ value0, + leafIndex, + /*exists=*/ expectFound, + ); + }); }); }); @@ -408,12 +322,15 @@ describe('Accrued Substate', () => { it('Should append unencrypted logs correctly', async () => { const startOffset = 0; - const eventSelector = 5; + const eventSelector = new Fr(5); const eventSelectorOffset = 10; const logSizeOffset = 20; - const values = [new Field(69n), new Field(420n), new Field(Field.MODULUS - 1n)]; - context.machineState.memory.setSlice(startOffset, values); + const values = [new Fr(69n), new Fr(420n), new Fr(Fr.MODULUS - 1n)]; + context.machineState.memory.setSlice( + startOffset, + values.map(f => new Field(f)), + ); context.machineState.memory.set(eventSelectorOffset, new Field(eventSelector)); context.machineState.memory.set(logSizeOffset, new Uint32(values.length)); @@ -424,11 +341,8 @@ describe('Accrued Substate', () => { logSizeOffset, ).execute(context); - const journalState = context.persistableState.flush(); - const expectedLog = Buffer.concat(values.map(v => v.toFr().toBuffer())); - expect(journalState.newLogs).toEqual([ - new UnencryptedL2Log(context.environment.address, new EventSelector(eventSelector), expectedLog), - ]); + expect(trace.traceUnencryptedLog).toHaveBeenCalledTimes(1); + expect(trace.traceUnencryptedLog).toHaveBeenCalledWith(address, eventSelector, values); }); }); @@ -450,25 +364,18 @@ describe('Accrued Substate', () => { expect(inst.serialize()).toEqual(buf); }); - it('Should append l2 to l1 messages correctly', async () => { - const recipientOffset = 0; - const recipient = new Fr(42); - const contentOffset = 1; - const content = new Fr(69); - - context.machineState.memory.set(recipientOffset, new Field(recipient)); - context.machineState.memory.set(contentOffset, new Field(content)); - + it('Should append l2 to l1 message correctly', async () => { + // recipient: value0 + // content: value1 + context.machineState.memory.set(value0Offset, new Field(value0)); + context.machineState.memory.set(value1Offset, new Field(value1)); await new SendL2ToL1Message( /*indirect=*/ 0, - /*recipientOffset=*/ recipientOffset, - /*contentOffset=*/ contentOffset, + /*recipientOffset=*/ value0Offset, + /*contentOffset=*/ value1Offset, ).execute(context); - - const journalState = context.persistableState.flush(); - expect(journalState.newL1Messages).toEqual([ - expect.objectContaining({ recipient: EthAddress.fromField(recipient), content }), - ]); + expect(trace.traceNewL2ToL1Message).toHaveBeenCalledTimes(1); + expect(trace.traceNewL2ToL1Message).toHaveBeenCalledWith(/*recipient=*/ value0, /*content=*/ value1); }); }); diff --git a/yarn-project/simulator/src/avm/opcodes/accrued_substate.ts b/yarn-project/simulator/src/avm/opcodes/accrued_substate.ts index c227710208f..97a21cf1440 100644 --- a/yarn-project/simulator/src/avm/opcodes/accrued_substate.ts +++ b/yarn-project/simulator/src/avm/opcodes/accrued_substate.ts @@ -201,7 +201,11 @@ export class L1ToL2MessageExists extends Instruction { const msgHash = memory.get(msgHashOffset).toFr(); const msgLeafIndex = memory.get(msgLeafIndexOffset).toFr(); - const exists = await context.persistableState.checkL1ToL2MessageExists(msgHash, msgLeafIndex); + const exists = await context.persistableState.checkL1ToL2MessageExists( + context.environment.address, + msgHash, + msgLeafIndex, + ); memory.set(existsOffset, exists ? new Uint8(1) : new Uint8(0)); memory.assert(memoryOperations); @@ -252,7 +256,7 @@ export class EmitUnencryptedLog extends Instruction { const memoryOperations = { reads: 2 + logSize, indirect: this.indirect }; context.machineState.consumeGas(this.gasCost(memoryOperations)); const log = memory.getSlice(logOffset, logSize).map(f => f.toFr()); - context.persistableState.writeLog(contractAddress, event, log); + context.persistableState.writeUnencryptedLog(contractAddress, event, log); memory.assert(memoryOperations); context.machineState.incrementPc(); @@ -285,7 +289,7 @@ export class SendL2ToL1Message extends Instruction { const recipient = memory.get(recipientOffset).toFr(); const content = memory.get(contentOffset).toFr(); - context.persistableState.writeL1Message(recipient, content); + context.persistableState.writeL2ToL1Message(recipient, content); memory.assert(memoryOperations); context.machineState.incrementPc(); diff --git a/yarn-project/simulator/src/avm/opcodes/contract.test.ts b/yarn-project/simulator/src/avm/opcodes/contract.test.ts index 105d9ef579c..ced3a000d64 100644 --- a/yarn-project/simulator/src/avm/opcodes/contract.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/contract.test.ts @@ -1,21 +1,31 @@ -import { AztecAddress, Fr } from '@aztec/circuits.js'; -import { type ContractInstanceWithAddress } from '@aztec/types/contracts'; +import { randomContractInstanceWithAddress } from '@aztec/circuit-types'; +import { AztecAddress } from '@aztec/circuits.js'; +import { SerializableContractInstance } from '@aztec/types/contracts'; import { mock } from 'jest-mock-extended'; -import { type PublicContractsDB } from '../../public/db_interfaces.js'; +import { type PublicSideEffectTraceInterface } from '../../public/side_effect_trace_interface.js'; import { type AvmContext } from '../avm_context.js'; import { Field } from '../avm_memory_types.js'; -import { initContext, initHostStorage } from '../fixtures/index.js'; -import { AvmPersistableStateManager } from '../journal/journal.js'; +import { initContext, initHostStorage, initPersistableStateManager } from '../fixtures/index.js'; +import { type HostStorage } from '../journal/host_storage.js'; +import { type AvmPersistableStateManager } from '../journal/journal.js'; +import { mockGetContractInstance } from '../test_utils.js'; import { GetContractInstance } from './contract.js'; describe('Contract opcodes', () => { - let context: AvmContext; const address = AztecAddress.random(); - beforeEach(async () => { - context = initContext(); + let hostStorage: HostStorage; + let trace: PublicSideEffectTraceInterface; + let persistableState: AvmPersistableStateManager; + let context: AvmContext; + + beforeEach(() => { + hostStorage = initHostStorage(); + trace = mock(); + persistableState = initPersistableStateManager({ hostStorage, trace }); + context = initContext({ persistableState }); }); describe('GETCONTRACTINSTANCE', () => { @@ -37,22 +47,10 @@ describe('Contract opcodes', () => { }); it('should copy contract instance to memory if found', async () => { - context.machineState.memory.set(0, new Field(address.toField())); - - const contractInstance = { - address: address, - version: 1 as const, - salt: new Fr(20), - contractClassId: new Fr(30), - initializationHash: new Fr(40), - publicKeysHash: new Fr(50), - deployer: AztecAddress.random(), - } as ContractInstanceWithAddress; - - const contractsDb = mock(); - contractsDb.getContractInstance.mockResolvedValue(Promise.resolve(contractInstance)); - context.persistableState = new AvmPersistableStateManager(initHostStorage({ contractsDb })); + const contractInstance = randomContractInstanceWithAddress(/*(base instance) opts=*/ {}, /*address=*/ address); + mockGetContractInstance(hostStorage, contractInstance); + context.machineState.memory.set(0, new Field(address.toField())); await new GetContractInstance(/*indirect=*/ 0, /*addressOffset=*/ 0, /*dstOffset=*/ 1).execute(context); const actual = context.machineState.memory.getSlice(1, 6); @@ -64,9 +62,13 @@ describe('Contract opcodes', () => { new Field(contractInstance.initializationHash), new Field(contractInstance.publicKeysHash), ]); + + expect(trace.traceGetContractInstance).toHaveBeenCalledTimes(1); + expect(trace.traceGetContractInstance).toHaveBeenCalledWith({ exists: true, ...contractInstance }); }); it('should return zeroes if not found', async () => { + const emptyContractInstance = SerializableContractInstance.empty().withAddress(address); context.machineState.memory.set(0, new Field(address.toField())); await new GetContractInstance(/*indirect=*/ 0, /*addressOffset=*/ 0, /*dstOffset=*/ 1).execute(context); @@ -80,6 +82,9 @@ describe('Contract opcodes', () => { new Field(0), new Field(0), ]); + + expect(trace.traceGetContractInstance).toHaveBeenCalledTimes(1); + expect(trace.traceGetContractInstance).toHaveBeenCalledWith({ exists: false, ...emptyContractInstance }); }); }); }); diff --git a/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts b/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts index 6dd086bc78d..19da62cc3a1 100644 --- a/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts @@ -1,16 +1,16 @@ import { Fr } from '@aztec/foundation/fields'; -import { jest } from '@jest/globals'; import { mock } from 'jest-mock-extended'; -import { type CommitmentsDB, type PublicContractsDB, type PublicStateDB } from '../../index.js'; +import { type PublicSideEffectTraceInterface } from '../../public/side_effect_trace_interface.js'; import { markBytecodeAsAvm } from '../../public/transitional_adaptors.js'; import { type AvmContext } from '../avm_context.js'; import { Field, Uint8, Uint32 } from '../avm_memory_types.js'; -import { adjustCalldataIndex, initContext } from '../fixtures/index.js'; -import { HostStorage } from '../journal/host_storage.js'; -import { AvmPersistableStateManager } from '../journal/journal.js'; +import { adjustCalldataIndex, initContext, initHostStorage, initPersistableStateManager } from '../fixtures/index.js'; +import { type HostStorage } from '../journal/host_storage.js'; +import { type AvmPersistableStateManager } from '../journal/journal.js'; import { encodeToBytecode } from '../serialization/bytecode_serialization.js'; +import { mockGetBytecode, mockTraceFork } from '../test_utils.js'; import { L2GasLeft } from './context_getters.js'; import { Call, Return, Revert, StaticCall } from './external_calls.js'; import { type Instruction } from './instruction.js'; @@ -19,14 +19,16 @@ import { SStore } from './storage.js'; describe('External Calls', () => { let context: AvmContext; + let hostStorage: HostStorage; + let trace: PublicSideEffectTraceInterface; + let persistableState: AvmPersistableStateManager; beforeEach(() => { - const contractsDb = mock(); - const commitmentsDb = mock(); - const publicStateDb = mock(); - const hostStorage = new HostStorage(publicStateDb, contractsDb, commitmentsDb); - const journal = new AvmPersistableStateManager(hostStorage); - context = initContext({ persistableState: journal }); + hostStorage = initHostStorage(); + trace = mock(); + persistableState = initPersistableStateManager({ hostStorage, trace }); + context = initContext({ persistableState: persistableState }); + mockTraceFork(trace); // make sure trace.fork() works on nested call }); describe('Call', () => { @@ -66,11 +68,16 @@ describe('External Calls', () => { const addrOffset = 2; const addr = new Fr(123456n); const argsOffset = 3; - const args = [new Field(1n), new Field(2n), new Field(3n)]; + const valueToStore = new Fr(42); + const valueOffset = 0; // 0th entry in calldata to nested call + const slot = new Fr(100); + const slotOffset = 1; // 1st entry in calldata to nested call + const args = [new Field(valueToStore), new Field(slot), new Field(3n)]; const argsSize = args.length; const argsSizeOffset = 20; const retOffset = 7; const retSize = 2; + const expectedRetValue = args.slice(0, retSize); const successOffset = 6; // const otherContextInstructionsL2GasCost = 780; // Includes the cost of the call itself @@ -82,10 +89,11 @@ describe('External Calls', () => { /*copySize=*/ argsSize, /*dstOffset=*/ 0, ), - new SStore(/*indirect=*/ 0, /*srcOffset=*/ 0, /*size=*/ 1, /*slotOffset=*/ 0), + new SStore(/*indirect=*/ 0, /*srcOffset=*/ valueOffset, /*size=*/ 1, /*slotOffset=*/ slotOffset), new Return(/*indirect=*/ 0, /*retOffset=*/ 0, /*size=*/ 2), ]), ); + mockGetBytecode(hostStorage, otherContextInstructionsBytecode); const { l2GasLeft: initialL2Gas, daGasLeft: initialDaGas } = context.machineState; @@ -94,9 +102,6 @@ describe('External Calls', () => { context.machineState.memory.set(2, new Field(addr)); context.machineState.memory.set(argsSizeOffset, new Uint32(argsSize)); context.machineState.memory.setSlice(3, args); - jest - .spyOn(context.persistableState.hostStorage.contractsDb, 'getBytecode') - .mockReturnValue(Promise.resolve(otherContextInstructionsBytecode)); const instruction = new Call( /*indirect=*/ 0, @@ -115,18 +120,10 @@ describe('External Calls', () => { expect(successValue).toEqual(new Uint8(1n)); const retValue = context.machineState.memory.getSlice(retOffset, retSize); - expect(retValue).toEqual([new Field(1n), new Field(2n)]); + expect(retValue).toEqual(expectedRetValue); // Check that the storage call has been merged into the parent journal - const { currentStorageValue } = context.persistableState.flush(); - expect(currentStorageValue.size).toEqual(1); - - const nestedContractWrites = currentStorageValue.get(addr.toBigInt()); - expect(nestedContractWrites).toBeDefined(); - - const slotNumber = 1n; - const expectedStoredValue = new Fr(1n); - expect(nestedContractWrites!.get(slotNumber)).toEqual(expectedStoredValue); + expect(await context.persistableState.peekStorage(addr, slot)).toEqual(valueToStore); expect(context.machineState.l2GasLeft).toBeLessThan(initialL2Gas); expect(context.machineState.daGasLeft).toEqual(initialDaGas); @@ -150,6 +147,7 @@ describe('External Calls', () => { new Return(/*indirect=*/ 0, /*retOffset=*/ 0, /*size=*/ 1), ]), ); + mockGetBytecode(hostStorage, otherContextInstructionsBytecode); const { l2GasLeft: initialL2Gas, daGasLeft: initialDaGas } = context.machineState; @@ -157,9 +155,6 @@ describe('External Calls', () => { context.machineState.memory.set(1, new Field(daGas)); context.machineState.memory.set(2, new Field(addr)); context.machineState.memory.set(argsSizeOffset, new Uint32(argsSize)); - jest - .spyOn(context.persistableState.hostStorage.contractsDb, 'getBytecode') - .mockReturnValue(Promise.resolve(otherContextInstructionsBytecode)); const instruction = new Call( /*indirect=*/ 0, @@ -239,10 +234,7 @@ describe('External Calls', () => { ]; const otherContextInstructionsBytecode = markBytecodeAsAvm(encodeToBytecode(otherContextInstructions)); - - jest - .spyOn(context.persistableState.hostStorage.contractsDb, 'getBytecode') - .mockReturnValue(Promise.resolve(otherContextInstructionsBytecode)); + mockGetBytecode(hostStorage, otherContextInstructionsBytecode); const instruction = new StaticCall( /*indirect=*/ 0, diff --git a/yarn-project/simulator/src/avm/opcodes/external_calls.ts b/yarn-project/simulator/src/avm/opcodes/external_calls.ts index 20f72557b3c..3830d4db0e9 100644 --- a/yarn-project/simulator/src/avm/opcodes/external_calls.ts +++ b/yarn-project/simulator/src/avm/opcodes/external_calls.ts @@ -1,7 +1,6 @@ import { FunctionSelector, Gas } from '@aztec/circuits.js'; import { padArrayEnd } from '@aztec/foundation/collection'; -import { convertAvmResultsToPxResult, createPublicExecution } from '../../public/transitional_adaptors.js'; import type { AvmContext } from '../avm_context.js'; import { gasLeftToGas } from '../avm_gas.js'; import { Field, TypeTag, Uint8 } from '../avm_memory_types.js'; @@ -24,7 +23,6 @@ abstract class ExternalCall extends Instruction { OperandType.UINT32, OperandType.UINT32, OperandType.UINT32, - /* temporary function selector */ OperandType.UINT32, ]; @@ -37,8 +35,8 @@ abstract class ExternalCall extends Instruction { private retOffset: number, private retSize: number, private successOffset: number, - // Function selector is temporary since eventually public contract bytecode will be one blob - // containing all functions, and function selector will become an application-level mechanism + // NOTE: Function selector is likely temporary since eventually public contract bytecode will be one + // blob containing all functions, and function selector will become an application-level mechanism // (e.g. first few bytes of calldata + compiler-generated jump table) private functionSelectorOffset: number, ) { @@ -81,7 +79,6 @@ abstract class ExternalCall extends Instruction { const allocatedGas = { l2Gas: allocatedL2Gas, daGas: allocatedDaGas }; context.machineState.consumeGas(allocatedGas); - // TRANSITIONAL: This should be removed once the kernel handles and entire enqueued call per circuit const nestedContext = context.createNestedContractCallContext( callAddress.toFr(), calldata, @@ -89,38 +86,9 @@ abstract class ExternalCall extends Instruction { callType, FunctionSelector.fromField(functionSelector), ); - const startSideEffectCounter = nestedContext.persistableState.trace.accessCounter; - const oldStyleExecution = createPublicExecution(startSideEffectCounter, nestedContext.environment, calldata); const simulator = new AvmSimulator(nestedContext); const nestedCallResults: AvmContractCallResults = await simulator.execute(); - const functionName = - (await nestedContext.persistableState.hostStorage.contractsDb.getDebugFunctionName( - nestedContext.environment.address, - nestedContext.environment.temporaryFunctionSelector, - )) ?? `${nestedContext.environment.address}:${nestedContext.environment.temporaryFunctionSelector}`; - const pxResults = convertAvmResultsToPxResult( - nestedCallResults, - startSideEffectCounter, - oldStyleExecution, - Gas.from(allocatedGas), - nestedContext, - simulator.getBytecode(), - functionName, - ); - // store the old PublicExecutionResult object to maintain a recursive data structure for the old kernel - context.persistableState.transitionalExecutionResult.nestedExecutions.push(pxResults); - // END TRANSITIONAL - - // const nestedContext = context.createNestedContractCallContext( - // callAddress.toFr(), - // calldata, - // allocatedGas, - // this.type, - // FunctionSelector.fromField(functionSelector), - // ); - // const nestedCallResults: AvmContractCallResults = await new AvmSimulator(nestedContext).execute(); - const success = !nestedCallResults.reverted; // TRANSITIONAL: We rethrow here so that the MESSAGE gets propagated. @@ -149,12 +117,16 @@ abstract class ExternalCall extends Instruction { // Refund unused gas context.machineState.refundGas(gasLeftToGas(nestedContext.machineState)); - // TODO: Should we merge the changes from a nested call in the case of a STATIC call? - if (success) { - context.persistableState.acceptNestedCallState(nestedContext.persistableState); - } else { - context.persistableState.rejectNestedCallState(nestedContext.persistableState); - } + // Accept the nested call's state and trace the nested call + await context.persistableState.processNestedCall( + /*nestedState=*/ nestedContext.persistableState, + /*success=*/ success, + /*nestedEnvironment=*/ nestedContext.environment, + /*startGasLeft=*/ Gas.from(allocatedGas), + /*endGasLeft=*/ Gas.from(nestedContext.machineState.gasLeft), + /*bytecode=*/ simulator.getBytecode()!, + /*avmCallResults=*/ nestedCallResults, + ); memory.assert(memoryOperations); context.machineState.incrementPc(); diff --git a/yarn-project/simulator/src/avm/opcodes/storage.test.ts b/yarn-project/simulator/src/avm/opcodes/storage.test.ts index 2bd18ebc197..7ddaa9cb5bb 100644 --- a/yarn-project/simulator/src/avm/opcodes/storage.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/storage.test.ts @@ -12,13 +12,13 @@ import { SLoad, SStore } from './storage.js'; describe('Storage Instructions', () => { let context: AvmContext; - let journal: MockProxy; + let persistableState: MockProxy; const address = AztecAddress.random(); beforeEach(async () => { - journal = mock(); + persistableState = mock(); context = initContext({ - persistableState: journal, + persistableState: persistableState, env: initExecutionEnvironment({ address, storageAddress: address }), }); }); @@ -52,12 +52,12 @@ describe('Storage Instructions', () => { await new SStore(/*indirect=*/ 0, /*srcOffset=*/ 1, /*size=*/ 1, /*slotOffset=*/ 0).execute(context); - expect(journal.writeStorage).toHaveBeenCalledWith(address, new Fr(a.toBigInt()), new Fr(b.toBigInt())); + expect(persistableState.writeStorage).toHaveBeenCalledWith(address, new Fr(a.toBigInt()), new Fr(b.toBigInt())); }); it('Should not be able to write to storage in a static call', async () => { context = initContext({ - persistableState: journal, + persistableState: persistableState, env: initExecutionEnvironment({ address, storageAddress: address, isStaticCall: true }), }); @@ -96,7 +96,7 @@ describe('Storage Instructions', () => { it('Sload should Read into storage', async () => { // Mock response const expectedResult = new Fr(1n); - journal.readStorage.mockReturnValueOnce(Promise.resolve(expectedResult)); + persistableState.readStorage.mockResolvedValueOnce(expectedResult); const a = new Field(1n); const b = new Field(2n); @@ -106,7 +106,7 @@ describe('Storage Instructions', () => { await new SLoad(/*indirect=*/ 0, /*slotOffset=*/ 0, /*size=*/ 1, /*dstOffset=*/ 1).execute(context); - expect(journal.readStorage).toHaveBeenCalledWith(address, new Fr(a.toBigInt())); + expect(persistableState.readStorage).toHaveBeenCalledWith(address, new Fr(a.toBigInt())); const actual = context.machineState.memory.get(1); expect(actual).toEqual(new Field(expectedResult)); diff --git a/yarn-project/simulator/src/avm/test_utils.ts b/yarn-project/simulator/src/avm/test_utils.ts new file mode 100644 index 00000000000..ce65116d5b8 --- /dev/null +++ b/yarn-project/simulator/src/avm/test_utils.ts @@ -0,0 +1,53 @@ +import { Fr } from '@aztec/circuits.js'; +import { type ContractInstanceWithAddress } from '@aztec/types/contracts'; + +import { type jest } from '@jest/globals'; +import { mock } from 'jest-mock-extended'; + +import { type CommitmentsDB, type PublicContractsDB, type PublicStateDB } from '../public/db_interfaces.js'; +import { type PublicSideEffectTraceInterface } from '../public/side_effect_trace_interface.js'; +import { type HostStorage } from './journal/host_storage.js'; + +export function mockGetBytecode(hs: HostStorage, bytecode: Buffer) { + (hs as jest.Mocked).contractsDb.getBytecode.mockResolvedValue(bytecode); +} + +export function mockTraceFork(trace: PublicSideEffectTraceInterface, nestedTrace?: PublicSideEffectTraceInterface) { + (trace as jest.Mocked).fork.mockReturnValue( + nestedTrace ?? mock(), + ); +} + +export function mockStorageRead(hs: HostStorage, value: Fr) { + (hs.publicStateDb as jest.Mocked).storageRead.mockResolvedValue(value); +} + +export function mockStorageReadWithMap(hs: HostStorage, mockedStorage: Map) { + (hs.publicStateDb as jest.Mocked).storageRead.mockImplementation((_address, slot) => + Promise.resolve(mockedStorage.get(slot.toBigInt()) ?? Fr.ZERO), + ); +} + +export function mockNoteHashExists(hs: HostStorage, leafIndex: Fr, _value?: Fr) { + (hs.commitmentsDb as jest.Mocked).getCommitmentIndex.mockResolvedValue(leafIndex.toBigInt()); +} + +export function mockNullifierExists(hs: HostStorage, leafIndex: Fr, _value?: Fr) { + (hs.commitmentsDb as jest.Mocked).getNullifierIndex.mockResolvedValue(leafIndex.toBigInt()); +} + +export function mockL1ToL2MessageExists(hs: HostStorage, leafIndex: Fr, value: Fr, valueAtOtherIndices?: Fr) { + (hs.commitmentsDb as jest.Mocked).getL1ToL2LeafValue.mockImplementation((index: bigint) => { + if (index == leafIndex.toBigInt()) { + return Promise.resolve(value); + } else { + // any indices other than mockAtLeafIndex will return a different value + // (or undefined if no value is specified for other indices) + return Promise.resolve(valueAtOtherIndices!); + } + }); +} + +export function mockGetContractInstance(hs: HostStorage, contractInstance: ContractInstanceWithAddress) { + (hs.contractsDb as jest.Mocked).getContractInstance.mockResolvedValue(contractInstance); +} diff --git a/yarn-project/simulator/src/client/client_execution_context.ts b/yarn-project/simulator/src/client/client_execution_context.ts index ab66af72af1..7da0e48928d 100644 --- a/yarn-project/simulator/src/client/client_execution_context.ts +++ b/yarn-project/simulator/src/client/client_execution_context.ts @@ -22,7 +22,13 @@ import { } from '@aztec/circuits.js'; import { Aes128 } from '@aztec/circuits.js/barretenberg'; import { computeUniqueNoteHash, siloNoteHash } from '@aztec/circuits.js/hash'; -import { type FunctionAbi, type FunctionArtifact, countArgumentsSize } from '@aztec/foundation/abi'; +import { + EventSelector, + type FunctionAbi, + type FunctionArtifact, + type NoteSelector, + countArgumentsSize, +} from '@aztec/foundation/abi'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { pedersenHash } from '@aztec/foundation/crypto'; import { Fr, GrumpkinScalar, type Point } from '@aztec/foundation/fields'; @@ -288,7 +294,7 @@ export class ClientExecutionContext extends ViewDataOracle { */ public override notifyCreatedNote( storageSlot: Fr, - noteTypeId: Fr, + noteTypeId: NoteSelector, noteItems: Fr[], innerNoteHash: Fr, counter: number, @@ -382,7 +388,7 @@ export class ClientExecutionContext extends ViewDataOracle { preimage: Fr[], ) { const event = new Event(preimage); - const l1EventPayload = new L1EventPayload(event, contractAddress, randomness, eventTypeId); + const l1EventPayload = new L1EventPayload(event, contractAddress, randomness, EventSelector.fromField(eventTypeId)); const taggedEvent = new TaggedLog(l1EventPayload); const ephSk = GrumpkinScalar.random(); @@ -404,7 +410,7 @@ export class ClientExecutionContext extends ViewDataOracle { public override computeEncryptedNoteLog( contractAddress: AztecAddress, storageSlot: Fr, - noteTypeId: Fr, + noteTypeId: NoteSelector, ovKeys: KeyValidationRequest, ivpkM: Point, preimage: Fr[], diff --git a/yarn-project/simulator/src/client/execution_result.ts b/yarn-project/simulator/src/client/execution_result.ts index 0328e04ede3..518b3ea7ed1 100644 --- a/yarn-project/simulator/src/client/execution_result.ts +++ b/yarn-project/simulator/src/client/execution_result.ts @@ -8,6 +8,7 @@ import { type UnencryptedL2Log, } from '@aztec/circuit-types'; import { type IsEmpty, type PrivateCallStackItem, PublicCallRequest, sortByCounter } from '@aztec/circuits.js'; +import { type NoteSelector } from '@aztec/foundation/abi'; import { type Fr } from '@aztec/foundation/fields'; import { type ACVMField } from '../acvm/index.js'; @@ -21,7 +22,7 @@ export interface NoteAndSlot { /** The storage slot of the note. */ storageSlot: Fr; /** The note type identifier. */ - noteTypeId: Fr; + noteTypeId: NoteSelector; } export class CountedLog implements IsEmpty { diff --git a/yarn-project/simulator/src/client/private_execution.test.ts b/yarn-project/simulator/src/client/private_execution.test.ts index 5946f7c0528..c46153495fb 100644 --- a/yarn-project/simulator/src/client/private_execution.test.ts +++ b/yarn-project/simulator/src/client/private_execution.test.ts @@ -33,7 +33,13 @@ import { } from '@aztec/circuits.js'; import { computeNoteHashNonce, computeSecretHash, computeVarArgsHash } from '@aztec/circuits.js/hash'; import { makeHeader } from '@aztec/circuits.js/testing'; -import { type FunctionArtifact, FunctionSelector, encodeArguments, getFunctionArtifact } from '@aztec/foundation/abi'; +import { + type FunctionArtifact, + FunctionSelector, + type NoteSelector, + encodeArguments, + getFunctionArtifact, +} from '@aztec/foundation/abi'; import { asyncMap } from '@aztec/foundation/async-map'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { times } from '@aztec/foundation/collection'; @@ -326,7 +332,7 @@ describe('Private Execution test suite', () => { const mockFirstNullifier = new Fr(1111); let currentNoteIndex = 0n; - const buildNote = (amount: bigint, ownerNpkMHash: Fr, storageSlot: Fr, noteTypeId: Fr) => { + const buildNote = (amount: bigint, ownerNpkMHash: Fr, storageSlot: Fr, noteTypeId: NoteSelector) => { // WARNING: this is not actually how nonces are computed! // For the purpose of this test we use a mocked firstNullifier and and a random number // to compute the nonce. Proper nonces are only enforced later by the kernel/later circuits @@ -847,7 +853,6 @@ describe('Private Execution test suite', () => { const secret = new Fr(1n); const secretHash = computeSecretHash(secret); const note = new Note([secretHash]); - // @todo @LHerskind (#6001) Need to investigate why this was working with `new Fr(5)` as the `example_set = 2` should have caused a failure. const storageSlot = TestContractArtifact.storageLayout['example_set'].slot; oracle.getNotes.mockResolvedValue([ { diff --git a/yarn-project/simulator/src/client/simulator.ts b/yarn-project/simulator/src/client/simulator.ts index 0be77660a6a..e72c4ef7e97 100644 --- a/yarn-project/simulator/src/client/simulator.ts +++ b/yarn-project/simulator/src/client/simulator.ts @@ -5,6 +5,7 @@ import { type FunctionArtifact, FunctionSelector, FunctionType, + type NoteSelector, encodeArguments, } from '@aztec/foundation/abi'; import { AztecAddress } from '@aztec/foundation/aztec-address'; @@ -140,7 +141,7 @@ export class AcirSimulator { contractAddress: AztecAddress, nonce: Fr, storageSlot: Fr, - noteTypeId: Fr, + noteTypeId: NoteSelector, computeNullifier: boolean, note: Note, ) { @@ -210,7 +211,12 @@ export class AcirSimulator { * @param note - The note. * @returns The note hash. */ - public async computeInnerNoteHash(contractAddress: AztecAddress, storageSlot: Fr, noteTypeId: Fr, note: Note) { + public async computeInnerNoteHash( + contractAddress: AztecAddress, + storageSlot: Fr, + noteTypeId: NoteSelector, + note: Note, + ) { const { innerNoteHash } = await this.computeNoteHashAndOptionallyANullifier( contractAddress, Fr.ZERO, diff --git a/yarn-project/simulator/src/client/unconstrained_execution.test.ts b/yarn-project/simulator/src/client/unconstrained_execution.test.ts index 0f97b50ec42..fbca7486e34 100644 --- a/yarn-project/simulator/src/client/unconstrained_execution.test.ts +++ b/yarn-project/simulator/src/client/unconstrained_execution.test.ts @@ -20,6 +20,8 @@ describe('Unconstrained Execution test suite', () => { node = mock(); node.getBlockNumber.mockResolvedValue(42); + node.getChainId.mockResolvedValue(1); + node.getVersion.mockResolvedValue(1); acirSimulator = new AcirSimulator(oracle, node); }); diff --git a/yarn-project/simulator/src/client/view_data_oracle.ts b/yarn-project/simulator/src/client/view_data_oracle.ts index f70e8db09b8..fd1710205dc 100644 --- a/yarn-project/simulator/src/client/view_data_oracle.ts +++ b/yarn-project/simulator/src/client/view_data_oracle.ts @@ -42,6 +42,14 @@ export class ViewDataOracle extends TypedOracle { return Promise.resolve(this.contractAddress); } + public override getChainId(): Promise { + return Promise.resolve(this.aztecNode.getChainId().then(id => new Fr(id))); + } + + public override getVersion(): Promise { + return Promise.resolve(this.aztecNode.getVersion().then(v => new Fr(v))); + } + /** * Retrieve keys associated with a specific master public key and app address. * @param pkMHash - The master public key hash. diff --git a/yarn-project/simulator/src/mocks/fixtures.ts b/yarn-project/simulator/src/mocks/fixtures.ts index 9c51ebbc184..7bbd49b1f7a 100644 --- a/yarn-project/simulator/src/mocks/fixtures.ts +++ b/yarn-project/simulator/src/mocks/fixtures.ts @@ -143,7 +143,7 @@ export class PublicExecutionResultBuilder { endGasLeft: Gas.test(), transactionFee: Fr.ZERO, calldata: [], - avmHints: AvmExecutionHints.empty(), + avmCircuitHints: AvmExecutionHints.empty(), functionName: 'unknown', ...overrides, }; diff --git a/yarn-project/simulator/src/public/abstract_phase_manager.ts b/yarn-project/simulator/src/public/abstract_phase_manager.ts index 1b2833e5875..fecc49988d2 100644 --- a/yarn-project/simulator/src/public/abstract_phase_manager.ts +++ b/yarn-project/simulator/src/public/abstract_phase_manager.ts @@ -266,13 +266,15 @@ export abstract class AbstractPhaseManager { const isExecutionRequest = !isPublicExecutionResult(current); const result = isExecutionRequest ? await this.publicExecutor.simulate( - current, + /*executionRequest=*/ current, this.globalVariables, /*availableGas=*/ this.getAvailableGas(tx, kernelPublicOutput), tx.data.constants.txContext, /*pendingNullifiers=*/ this.getSiloedPendingNullifiers(kernelPublicOutput), transactionFee, /*startSideEffectCounter=*/ AbstractPhaseManager.getMaxSideEffectCounter(kernelPublicOutput) + 1, + // NOTE: startSideEffectCounter is not the same as the executionRequest's sideEffectCounter + // (which counts the request itself) ) : current; @@ -320,7 +322,7 @@ export abstract class AbstractPhaseManager { calldata: result.calldata, bytecode: result.bytecode!, inputs: privateInputs, - avmHints: result.avmHints, + avmHints: result.avmCircuitHints, }; provingInformationList.push(publicProvingInformation); diff --git a/yarn-project/simulator/src/public/app_logic_phase_manager.ts b/yarn-project/simulator/src/public/app_logic_phase_manager.ts index bf25c580cdb..de2628b6bea 100644 --- a/yarn-project/simulator/src/public/app_logic_phase_manager.ts +++ b/yarn-project/simulator/src/public/app_logic_phase_manager.ts @@ -47,6 +47,7 @@ export class AppLogicPhaseManager extends AbstractPhaseManager { // if so, this is removing contracts deployed in private setup await this.publicContractsDB.removeNewContracts(tx); await this.publicStateDB.rollbackToCheckpoint(); + tx.filterRevertedLogs(kernelOutput); } else { tx.unencryptedLogs.addFunctionLogs(newUnencryptedLogs); // TODO(#6470): we should be adding contracts deployed in those logs to the publicContractsDB diff --git a/yarn-project/simulator/src/public/execution.ts b/yarn-project/simulator/src/public/execution.ts index 2d28731f621..e5ca2cecd53 100644 --- a/yarn-project/simulator/src/public/execution.ts +++ b/yarn-project/simulator/src/public/execution.ts @@ -20,16 +20,37 @@ import { type Gas } from '../avm/avm_gas.js'; export interface PublicExecutionResult { /** The execution that triggered this result. */ execution: PublicExecution; + + /** The side effect counter at the start of the function call. */ + startSideEffectCounter: Fr; + /** The side effect counter after executing this function call */ + endSideEffectCounter: Fr; + /** How much gas was available for this public execution. */ + startGasLeft: Gas; + /** How much gas was left after this public execution. */ + endGasLeft: Gas; + /** Transaction fee set for this tx. */ + transactionFee: Fr; + + /** Bytecode used for this execution. */ + bytecode?: Buffer; + /** Calldata used for this execution. */ + calldata: Fr[]; /** The return values of the function. */ returnValues: Fr[]; + /** Whether the execution reverted. */ + reverted: boolean; + /** The revert reason if the execution reverted. */ + revertReason?: SimulationError; + + /** The contract storage reads performed by the function. */ + contractStorageReads: ContractStorageRead[]; + /** The contract storage update requests performed by the function. */ + contractStorageUpdateRequests: ContractStorageUpdateRequest[]; /** The new note hashes to be inserted into the note hashes tree. */ newNoteHashes: NoteHash[]; /** The new l2 to l1 messages generated in this call. */ newL2ToL1Messages: L2ToL1Message[]; - /** The side effect counter at the start of the function call. */ - startSideEffectCounter: Fr; - /** The side effect counter after executing this function call */ - endSideEffectCounter: Fr; /** The new nullifiers to be inserted into the nullifier tree. */ newNullifiers: Nullifier[]; /** The note hash read requests emitted in this call. */ @@ -40,12 +61,6 @@ export interface PublicExecutionResult { nullifierNonExistentReadRequests: ReadRequest[]; /** L1 to L2 message read requests emitted in this call. */ l1ToL2MsgReadRequests: ReadRequest[]; - /** The contract storage reads performed by the function. */ - contractStorageReads: ContractStorageRead[]; - /** The contract storage update requests performed by the function. */ - contractStorageUpdateRequests: ContractStorageUpdateRequest[]; - /** The results of nested calls. */ - nestedExecutions: this[]; /** * The hashed logs with side effect counter. * Note: required as we don't track the counter anywhere else. @@ -61,22 +76,15 @@ export interface PublicExecutionResult { * Useful for maintaining correct ordering in ts. */ allUnencryptedLogs: UnencryptedFunctionL2Logs; - /** Whether the execution reverted. */ - reverted: boolean; - /** The revert reason if the execution reverted. */ - revertReason?: SimulationError; - /** How much gas was available for this public execution. */ - startGasLeft: Gas; - /** How much gas was left after this public execution. */ - endGasLeft: Gas; - /** Transaction fee set for this tx. */ - transactionFee: Fr; - /** Bytecode used for this execution. */ - bytecode?: Buffer; - /** Calldata used for this execution. */ - calldata: Fr[]; + + // TODO(dbanks12): add contract instance read requests + + /** The results of nested calls. */ + nestedExecutions: this[]; + /** Hints for proving AVM execution. */ - avmHints: AvmExecutionHints; + avmCircuitHints: AvmExecutionHints; + /** The name of the function that was executed. Only used for logging. */ functionName: string; } diff --git a/yarn-project/simulator/src/public/executor.ts b/yarn-project/simulator/src/public/executor.ts index 45885d23de6..8486fb8d80e 100644 --- a/yarn-project/simulator/src/public/executor.ts +++ b/yarn-project/simulator/src/public/executor.ts @@ -1,5 +1,5 @@ import { type AvmSimulationStats } from '@aztec/circuit-types/stats'; -import { Fr, type Gas, type GlobalVariables, type Header, type Nullifier, type TxContext } from '@aztec/circuits.js'; +import { Fr, Gas, type GlobalVariables, type Header, type Nullifier, type TxContext } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; import { Timer } from '@aztec/foundation/timer'; @@ -10,7 +10,8 @@ import { HostStorage } from '../avm/journal/host_storage.js'; import { AvmPersistableStateManager } from '../avm/journal/index.js'; import { type CommitmentsDB, type PublicContractsDB, type PublicStateDB } from './db_interfaces.js'; import { type PublicExecution, type PublicExecutionResult, checkValidStaticCall } from './execution.js'; -import { convertAvmResultsToPxResult, createAvmExecutionEnvironment } from './transitional_adaptors.js'; +import { PublicSideEffectTrace } from './side_effect_trace.js'; +import { createAvmExecutionEnvironment } from './transitional_adaptors.js'; /** * Handles execution of public functions. @@ -27,54 +28,57 @@ export class PublicExecutor { /** * Executes a public execution request. - * @param execution - The execution to run. + * @param executionRequest - The execution to run. * @param globalVariables - The global variables to use. - * @returns The result of the run plus all nested runs. + * @param availableGas - The gas available at the start of this enqueued call. + * @param txContext - Transaction context. + * @param pendingSiloedNullifiers - The pending nullifier set from earlier parts of this TX. + * @param transactionFee - Fee offered for this TX. + * @param startSideEffectCounter - The counter of the first side-effect generated by this simulation. + * @returns The result of execution, including the results of all nested calls. */ public async simulate( - execution: PublicExecution, + executionRequest: PublicExecution, globalVariables: GlobalVariables, availableGas: Gas, txContext: TxContext, - pendingNullifiers: Nullifier[], + pendingSiloedNullifiers: Nullifier[], transactionFee: Fr = Fr.ZERO, startSideEffectCounter: number = 0, ): Promise { - const address = execution.contractAddress; - const selector = execution.functionSelector; - const startGas = availableGas; + const address = executionRequest.contractAddress; + const selector = executionRequest.functionSelector; const fnName = (await this.contractsDb.getDebugFunctionName(address, selector)) ?? `${address}:${selector}`; PublicExecutor.log.verbose(`[AVM] Executing public external function ${fnName}.`); const timer = new Timer(); - // Temporary code to construct the AVM context - // These data structures will permeate across the simulator when the public executor is phased out const hostStorage = new HostStorage(this.stateDb, this.contractsDb, this.commitmentsDb); + const trace = new PublicSideEffectTrace(startSideEffectCounter); + const avmPersistableState = AvmPersistableStateManager.newWithPendingSiloedNullifiers( + hostStorage, + trace, + pendingSiloedNullifiers.map(n => n.value), + ); - const worldStateJournal = new AvmPersistableStateManager(hostStorage); - for (const nullifier of pendingNullifiers) { - worldStateJournal.nullifiers.cache.appendSiloed(nullifier.value); - } - worldStateJournal.trace.accessCounter = startSideEffectCounter; - - const executionEnv = createAvmExecutionEnvironment( - execution, + const avmExecutionEnv = createAvmExecutionEnvironment( + executionRequest, this.header, globalVariables, txContext.gasSettings, transactionFee, ); - const machineState = new AvmMachineState(startGas); - const avmContext = new AvmContext(worldStateJournal, executionEnv, machineState); + const avmMachineState = new AvmMachineState(availableGas); + const avmContext = new AvmContext(avmPersistableState, avmExecutionEnv, avmMachineState); const simulator = new AvmSimulator(avmContext); const avmResult = await simulator.execute(); - const bytecode = simulator.getBytecode(); + const bytecode = simulator.getBytecode()!; // Commit the journals state to the DBs since this is a top-level execution. // Observe that this will write all the state changes to the DBs, not only the latest for each slot. // However, the underlying DB keep a cache and will only write the latest state to disk. + // TODO(dbanks12): this should be unnecessary here or should be exposed by state manager await avmContext.persistableState.publicStorage.commitToDB(); PublicExecutor.log.verbose( @@ -89,28 +93,30 @@ export class PublicExecutor { } satisfies AvmSimulationStats, ); - const executionResult = convertAvmResultsToPxResult( - avmResult, - startSideEffectCounter, - execution, - startGas, - avmContext, + const publicExecutionResult = trace.toPublicExecutionResult( + avmExecutionEnv, + /*startGasLeft=*/ availableGas, + /*endGasLeft=*/ Gas.from(avmContext.machineState.gasLeft), bytecode, + avmResult, fnName, + /*requestSideEffectCounter=*/ executionRequest.callContext.sideEffectCounter, + // NOTE: startSideEffectCounter is not the same as the executionRequest's sideEffectCounter + // (which counts the request itself) ); // TODO(https://github.com/AztecProtocol/aztec-packages/issues/5818): is this really needed? // should already be handled in simulation. - if (execution.callContext.isStaticCall) { + if (executionRequest.callContext.isStaticCall) { checkValidStaticCall( - executionResult.newNoteHashes, - executionResult.newNullifiers, - executionResult.contractStorageUpdateRequests, - executionResult.newL2ToL1Messages, - executionResult.unencryptedLogs, + publicExecutionResult.newNoteHashes, + publicExecutionResult.newNullifiers, + publicExecutionResult.contractStorageUpdateRequests, + publicExecutionResult.newL2ToL1Messages, + publicExecutionResult.unencryptedLogs, ); } - return executionResult; + return publicExecutionResult; } } diff --git a/yarn-project/simulator/src/public/public_processor.test.ts b/yarn-project/simulator/src/public/public_processor.test.ts index 5040854a83a..362fd788e1d 100644 --- a/yarn-project/simulator/src/public/public_processor.test.ts +++ b/yarn-project/simulator/src/public/public_processor.test.ts @@ -45,6 +45,7 @@ import { WASMSimulator, computeFeePayerBalanceLeafSlot, } from '@aztec/simulator'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type MerkleTreeOperations, type TreeInfo } from '@aztec/world-state'; import { jest } from '@jest/globals'; @@ -95,6 +96,7 @@ describe('public_processor', () => { Header.empty(), publicContractsDB, publicWorldStateDB, + new NoopTelemetryClient(), ); }); @@ -219,6 +221,7 @@ describe('public_processor', () => { header, publicContractsDB, publicWorldStateDB, + new NoopTelemetryClient(), ); }); @@ -230,6 +233,7 @@ describe('public_processor', () => { it('runs a tx with enqueued public calls', async function () { const tx = mockTxWithPartialState({ + hasLogs: true, numberOfRevertiblePublicCallRequests: 2, publicTeardownCallRequest: PublicCallRequest.empty(), }); @@ -253,6 +257,10 @@ describe('public_processor', () => { expect(publicWorldStateDB.commit).toHaveBeenCalledTimes(1); expect(publicWorldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + // we keep the logs + expect(processed[0].encryptedLogs.getTotalLogCount()).toBe(6); + expect(processed[0].unencryptedLogs.getTotalLogCount()).toBe(2); + expect(prover.addNewTx).toHaveBeenCalledWith(processed[0]); }); @@ -346,7 +354,7 @@ describe('public_processor', () => { expect(prover.addNewTx).toHaveBeenCalledTimes(0); }); - it('rolls back app logic db updates on failed public execution, but persists setup/teardown', async function () { + it('rolls back app logic db updates on failed public execution, but persists setup', async function () { const baseContractAddressSeed = 0x200; const baseContractAddress = makeAztecAddress(baseContractAddressSeed); const publicCallRequests: PublicCallRequest[] = [ @@ -360,6 +368,7 @@ describe('public_processor', () => { const teardown = publicCallRequests.pop()!; // Remove the last call request to test that the processor can handle this const tx = mockTxWithPartialState({ + hasLogs: true, numberOfNonRevertiblePublicCallRequests: 1, numberOfRevertiblePublicCallRequests: 1, publicCallRequests, @@ -469,8 +478,10 @@ describe('public_processor', () => { expect(txEffect.publicDataWrites[4]).toEqual( new PublicDataWrite(computePublicDataTreeLeafSlot(baseContractAddress, contractSlotC), fr(0x201)), ); - expect(txEffect.encryptedLogs.getTotalLogCount()).toBe(0); - expect(txEffect.unencryptedLogs.getTotalLogCount()).toBe(0); + + // we keep the non-revertible logs + expect(txEffect.encryptedLogs.getTotalLogCount()).toBe(3); + expect(txEffect.unencryptedLogs.getTotalLogCount()).toBe(1); expect(prover.addNewTx).toHaveBeenCalledWith(processed[0]); }); @@ -589,6 +600,7 @@ describe('public_processor', () => { const teardown = publicCallRequests.pop()!; const tx = mockTxWithPartialState({ + hasLogs: true, numberOfNonRevertiblePublicCallRequests: 1, numberOfRevertiblePublicCallRequests: 1, publicCallRequests, @@ -689,8 +701,10 @@ describe('public_processor', () => { expect(txEffect.publicDataWrites[1]).toEqual( new PublicDataWrite(computePublicDataTreeLeafSlot(baseContractAddress, contractSlotA), fr(0x102)), ); - expect(txEffect.encryptedLogs.getTotalLogCount()).toBe(0); - expect(txEffect.unencryptedLogs.getTotalLogCount()).toBe(0); + + // we keep the non-revertible logs + expect(txEffect.encryptedLogs.getTotalLogCount()).toBe(3); + expect(txEffect.unencryptedLogs.getTotalLogCount()).toBe(1); expect(processed[0].data.revertCode).toEqual(RevertCode.TEARDOWN_REVERTED); @@ -711,6 +725,7 @@ describe('public_processor', () => { const teardown = publicCallRequests.pop()!; const tx = mockTxWithPartialState({ + hasLogs: true, numberOfNonRevertiblePublicCallRequests: 1, numberOfRevertiblePublicCallRequests: 1, publicCallRequests, @@ -812,8 +827,10 @@ describe('public_processor', () => { expect(txEffect.publicDataWrites[1]).toEqual( new PublicDataWrite(computePublicDataTreeLeafSlot(baseContractAddress, contractSlotA), fr(0x102)), ); - expect(txEffect.encryptedLogs.getTotalLogCount()).toBe(0); - expect(txEffect.unencryptedLogs.getTotalLogCount()).toBe(0); + + // we keep the non-revertible logs + expect(txEffect.encryptedLogs.getTotalLogCount()).toBe(3); + expect(txEffect.unencryptedLogs.getTotalLogCount()).toBe(1); expect(processed[0].data.revertCode).toEqual(RevertCode.BOTH_REVERTED); diff --git a/yarn-project/simulator/src/public/public_processor.ts b/yarn-project/simulator/src/public/public_processor.ts index f5f5bd3749c..fa15414db15 100644 --- a/yarn-project/simulator/src/public/public_processor.ts +++ b/yarn-project/simulator/src/public/public_processor.ts @@ -30,6 +30,7 @@ import { computeFeePayerBalanceLeafSlot, computeFeePayerBalanceStorageSlot, } from '@aztec/simulator'; +import { Attributes, type TelemetryClient, type Tracer, trackSpan } from '@aztec/telemetry-client'; import { type ContractDataSource } from '@aztec/types/contracts'; import { type MerkleTreeOperations } from '@aztec/world-state'; @@ -47,6 +48,7 @@ export class PublicProcessorFactory { private merkleTree: MerkleTreeOperations, private contractDataSource: ContractDataSource, private simulator: SimulationProvider, + private telemetryClient: TelemetryClient, ) {} /** @@ -74,6 +76,7 @@ export class PublicProcessorFactory { historicalHeader, publicContractsDB, worldStatePublicDB, + this.telemetryClient, ); } } @@ -83,6 +86,7 @@ export class PublicProcessorFactory { * any public function calls in them. Txs with private calls only are unaffected. */ export class PublicProcessor { + public readonly tracer: Tracer; constructor( protected db: MerkleTreeOperations, protected publicExecutor: PublicExecutor, @@ -91,9 +95,11 @@ export class PublicProcessor { protected historicalHeader: Header, protected publicContractsDB: ContractsDataSourcePublicDB, protected publicStateDB: PublicStateDB, - + telemetryClient: TelemetryClient, private log = createDebugLogger('aztec:sequencer:public-processor'), - ) {} + ) { + this.tracer = telemetryClient.getTracer('PublicProcessor'); + } /** * Run each tx through the public circuit and the public kernel circuit if needed. @@ -208,6 +214,9 @@ export class PublicProcessor { return finalPublicDataUpdateRequests; } + @trackSpan('PublicProcessor.processTxWithPublicCalls', tx => ({ + [Attributes.TX_HASH]: tx.getTxHash().toString(), + })) private async processTxWithPublicCalls(tx: Tx): Promise<[ProcessedTx, NestedProcessReturnValues[]]> { let returnValues: NestedProcessReturnValues[] = []; const publicProvingRequests: PublicProvingRequest[] = []; diff --git a/yarn-project/simulator/src/public/side_effect_trace.test.ts b/yarn-project/simulator/src/public/side_effect_trace.test.ts new file mode 100644 index 00000000000..fbfb42b2e5f --- /dev/null +++ b/yarn-project/simulator/src/public/side_effect_trace.test.ts @@ -0,0 +1,284 @@ +import { UnencryptedL2Log } from '@aztec/circuit-types'; +import { AztecAddress, EthAddress, Gas, L2ToL1Message } from '@aztec/circuits.js'; +import { EventSelector } from '@aztec/foundation/abi'; +import { Fr } from '@aztec/foundation/fields'; +import { SerializableContractInstance } from '@aztec/types/contracts'; + +import { randomBytes, randomInt } from 'crypto'; + +import { Selector } from '../../../foundation/src/abi/selector.js'; +import { AvmContractCallResults } from '../avm/avm_message_call_result.js'; +import { initExecutionEnvironment } from '../avm/fixtures/index.js'; +import { PublicSideEffectTrace, type TracedContractInstance } from './side_effect_trace.js'; + +function randomTracedContractInstance(): TracedContractInstance { + const instance = SerializableContractInstance.random(); + const address = AztecAddress.random(); + return { exists: true, ...instance, address }; +} + +describe('Side Effect Trace', () => { + const address = Fr.random(); + const utxo = Fr.random(); + const leafIndex = Fr.random(); + const slot = Fr.random(); + const value = Fr.random(); + const recipient = Fr.random(); + const content = Fr.random(); + const event = new Fr(randomBytes(Selector.SIZE).readUint32BE()); + const log = [Fr.random(), Fr.random(), Fr.random()]; + + const startGasLeft = Gas.fromFields([new Fr(randomInt(10000)), new Fr(randomInt(10000))]); + const endGasLeft = Gas.fromFields([new Fr(randomInt(10000)), new Fr(randomInt(10000))]); + const transactionFee = Fr.random(); + const calldata = [Fr.random(), Fr.random(), Fr.random(), Fr.random()]; + const bytecode = randomBytes(100); + const returnValues = [Fr.random(), Fr.random()]; + + const avmEnvironment = initExecutionEnvironment({ + address, + calldata, + transactionFee, + }); + const reverted = false; + const avmCallResults = new AvmContractCallResults(reverted, returnValues); + + let startCounter: number; + let startCounterFr: Fr; + let startCounterPlus1: number; + let trace: PublicSideEffectTrace; + + beforeEach(() => { + startCounter = randomInt(/*max=*/ 1000000); + startCounterFr = new Fr(startCounter); + startCounterPlus1 = startCounter + 1; + trace = new PublicSideEffectTrace(startCounter); + }); + + const toPxResult = (trc: PublicSideEffectTrace) => { + return trc.toPublicExecutionResult(avmEnvironment, startGasLeft, endGasLeft, bytecode, avmCallResults); + }; + + it('Should trace storage reads', () => { + const exists = true; + const cached = false; + trace.tracePublicStorageRead(address, slot, value, exists, cached); + expect(trace.getCounter()).toBe(startCounterPlus1); + + const pxResult = toPxResult(trace); + expect(pxResult.contractStorageReads).toEqual([ + { + storageSlot: slot, + currentValue: value, + counter: startCounter, + contractAddress: AztecAddress.fromField(address), + //exists: exists, + //cached: cached, + }, + ]); + expect(pxResult.avmCircuitHints.storageValues.items).toEqual([{ key: startCounterFr, value: value }]); + }); + + it('Should trace storage writes', () => { + trace.tracePublicStorageWrite(address, slot, value); + expect(trace.getCounter()).toBe(startCounterPlus1); + + const pxResult = toPxResult(trace); + expect(pxResult.contractStorageUpdateRequests).toEqual([ + { + storageSlot: slot, + newValue: value, + counter: startCounter, + contractAddress: AztecAddress.fromField(address), + }, + ]); + }); + + it('Should trace note hash checks', () => { + const exists = true; + trace.traceNoteHashCheck(address, utxo, leafIndex, exists); + expect(trace.getCounter()).toBe(startCounterPlus1); + + const pxResult = toPxResult(trace); + expect(pxResult.noteHashReadRequests).toEqual([ + { + //storageAddress: contractAddress, + value: utxo, + //exists: exists, + counter: startCounter, + //leafIndex: leafIndex, + }, + ]); + expect(pxResult.avmCircuitHints.noteHashExists.items).toEqual([{ key: startCounterFr, value: new Fr(exists) }]); + }); + + it('Should trace note hashes', () => { + trace.traceNewNoteHash(address, utxo); + expect(trace.getCounter()).toBe(startCounterPlus1); + + const pxResult = toPxResult(trace); + expect(pxResult.newNoteHashes).toEqual([ + { + //storageAddress: contractAddress, + value: utxo, + counter: startCounter, + }, + ]); + }); + + it('Should trace nullifier checks', () => { + const exists = true; + const isPending = false; + trace.traceNullifierCheck(address, utxo, leafIndex, exists, isPending); + expect(trace.getCounter()).toBe(startCounterPlus1); + + const pxResult = toPxResult(trace); + expect(pxResult.nullifierReadRequests).toEqual([ + { + value: utxo, + counter: startCounter, + }, + ]); + expect(pxResult.nullifierNonExistentReadRequests).toEqual([]); + expect(pxResult.avmCircuitHints.nullifierExists.items).toEqual([{ key: startCounterFr, value: new Fr(exists) }]); + }); + + it('Should trace non-existent nullifier checks', () => { + const exists = false; + const isPending = false; + trace.traceNullifierCheck(address, utxo, leafIndex, exists, isPending); + expect(trace.getCounter()).toBe(startCounterPlus1); + + const pxResult = toPxResult(trace); + expect(pxResult.nullifierReadRequests).toEqual([]); + expect(pxResult.nullifierNonExistentReadRequests).toEqual([ + { + value: utxo, + counter: startCounter, + }, + ]); + expect(pxResult.avmCircuitHints.nullifierExists.items).toEqual([{ key: startCounterFr, value: new Fr(exists) }]); + }); + + it('Should trace nullifiers', () => { + trace.traceNewNullifier(address, utxo); + expect(trace.getCounter()).toBe(startCounterPlus1); + + const pxResult = toPxResult(trace); + expect(pxResult.newNullifiers).toEqual([ + { + value: utxo, + counter: startCounter, + noteHash: Fr.ZERO, + }, + ]); + }); + + it('Should trace L1ToL2 Message checks', () => { + const exists = true; + trace.traceL1ToL2MessageCheck(address, utxo, leafIndex, exists); + expect(trace.getCounter()).toBe(startCounterPlus1); + + const pxResult = toPxResult(trace); + expect(pxResult.l1ToL2MsgReadRequests).toEqual([ + { + value: utxo, + counter: startCounter, + }, + ]); + expect(pxResult.avmCircuitHints.l1ToL2MessageExists.items).toEqual([ + { + key: startCounterFr, + value: new Fr(exists), + }, + ]); + }); + + it('Should trace new L2ToL1 messages', () => { + trace.traceNewL2ToL1Message(recipient, content); + expect(trace.getCounter()).toBe(startCounterPlus1); + + const pxResult = toPxResult(trace); + expect(pxResult.newL2ToL1Messages).toEqual([ + new L2ToL1Message(EthAddress.fromField(recipient), content, startCounter), + ]); + }); + + it('Should trace new unencrypted logs', () => { + trace.traceUnencryptedLog(address, event, log); + expect(trace.getCounter()).toBe(startCounterPlus1); + + const pxResult = toPxResult(trace); + const expectLog = new UnencryptedL2Log( + AztecAddress.fromField(address), + EventSelector.fromField(event), + Buffer.concat(log.map(f => f.toBuffer())), + ); + expect(pxResult.unencryptedLogs.logs).toEqual([expectLog]); + expect(pxResult.allUnencryptedLogs.logs).toEqual([expectLog]); + expect(pxResult.unencryptedLogsHashes).toEqual([ + expect.objectContaining({ + counter: startCounter, + }), + ]); + }); + + it('Should trace get contract instance', () => { + const instance = randomTracedContractInstance(); + const { version: _, ...instanceWithoutVersion } = instance; + trace.traceGetContractInstance(instance); + expect(trace.getCounter()).toBe(startCounterPlus1); + + const pxResult = toPxResult(trace); + // TODO(dbanks12): process contract instance read requests in public kernel + //expect(pxResult.gotContractInstances).toEqual([instance]); + expect(pxResult.avmCircuitHints.contractInstances.items).toEqual([ + { + // hint omits "version" and has "exists" as an Fr + ...instanceWithoutVersion, + exists: new Fr(instance.exists), + }, + ]); + }); + + it('Should trace nested calls', () => { + const existsDefault = true; + const cached = false; + const isPending = false; + + const nestedTrace = new PublicSideEffectTrace(startCounter); + let testCounter = startCounter; + nestedTrace.tracePublicStorageRead(address, slot, value, existsDefault, cached); + testCounter++; + nestedTrace.tracePublicStorageWrite(address, slot, value); + testCounter++; + nestedTrace.traceNoteHashCheck(address, utxo, leafIndex, existsDefault); + testCounter++; + nestedTrace.traceNewNoteHash(address, utxo); + testCounter++; + nestedTrace.traceNullifierCheck(address, utxo, leafIndex, /*exists=*/ true, isPending); + testCounter++; + nestedTrace.traceNullifierCheck(address, utxo, leafIndex, /*exists=*/ false, isPending); + testCounter++; + nestedTrace.traceNewNullifier(address, utxo); + testCounter++; + nestedTrace.traceL1ToL2MessageCheck(address, utxo, leafIndex, existsDefault); + testCounter++; + nestedTrace.traceNewL2ToL1Message(recipient, content); + testCounter++; + nestedTrace.traceUnencryptedLog(address, event, log); + testCounter++; + + trace.traceNestedCall(nestedTrace, avmEnvironment, startGasLeft, endGasLeft, bytecode, avmCallResults); + // parent trace adopts nested call's counter + expect(trace.getCounter()).toBe(testCounter); + + // get parent trace as result + const parentPxResult = toPxResult(trace); + const childPxResult = toPxResult(nestedTrace); + expect(parentPxResult.nestedExecutions).toEqual([childPxResult]); + + // parent absorb's child's unencryptedLogs into all* + expect(parentPxResult.allUnencryptedLogs).toEqual(childPxResult.allUnencryptedLogs); + }); +}); diff --git a/yarn-project/simulator/src/public/side_effect_trace.ts b/yarn-project/simulator/src/public/side_effect_trace.ts new file mode 100644 index 00000000000..64e32718a59 --- /dev/null +++ b/yarn-project/simulator/src/public/side_effect_trace.ts @@ -0,0 +1,323 @@ +import { UnencryptedFunctionL2Logs, UnencryptedL2Log } from '@aztec/circuit-types'; +import { + AvmContractInstanceHint, + AvmExecutionHints, + AvmExternalCallHint, + AvmKeyValueHint, + AztecAddress, + CallContext, + ContractStorageRead, + ContractStorageUpdateRequest, + EthAddress, + Gas, + L2ToL1Message, + LogHash, + NoteHash, + Nullifier, + ReadRequest, +} from '@aztec/circuits.js'; +import { EventSelector } from '@aztec/foundation/abi'; +import { Fr } from '@aztec/foundation/fields'; +import { type ContractInstanceWithAddress } from '@aztec/types/contracts'; + +import { type AvmExecutionEnvironment } from '../avm/avm_execution_environment.js'; +import { type AvmContractCallResults } from '../avm/avm_message_call_result.js'; +import { createSimulationError } from '../common/errors.js'; +import { type PublicExecution, type PublicExecutionResult } from './execution.js'; +import { type PublicSideEffectTraceInterface } from './side_effect_trace_interface.js'; + +export type TracedContractInstance = { exists: boolean } & ContractInstanceWithAddress; + +export class PublicSideEffectTrace implements PublicSideEffectTraceInterface { + /** The side effect counter increments with every call to the trace. */ + private sideEffectCounter: number; // kept as number until finalized for efficiency + + private contractStorageReads: ContractStorageRead[] = []; + private contractStorageUpdateRequests: ContractStorageUpdateRequest[] = []; + + private noteHashReadRequests: ReadRequest[] = []; + private newNoteHashes: NoteHash[] = []; + + private nullifierReadRequests: ReadRequest[] = []; + private nullifierNonExistentReadRequests: ReadRequest[] = []; + private newNullifiers: Nullifier[] = []; + + private l1ToL2MsgReadRequests: ReadRequest[] = []; + private newL2ToL1Messages: L2ToL1Message[] = []; + + private unencryptedLogs: UnencryptedL2Log[] = []; + private allUnencryptedLogs: UnencryptedL2Log[] = []; + private unencryptedLogsHashes: LogHash[] = []; + + private gotContractInstances: ContractInstanceWithAddress[] = []; + + private nestedExecutions: PublicExecutionResult[] = []; + + private avmCircuitHints: AvmExecutionHints; + + constructor( + /** The counter of this trace's first side effect. */ + public readonly startSideEffectCounter: number = 0, + ) { + this.sideEffectCounter = startSideEffectCounter; + this.avmCircuitHints = AvmExecutionHints.empty(); + } + + public fork() { + return new PublicSideEffectTrace(this.sideEffectCounter); + } + + public getCounter() { + return this.sideEffectCounter; + } + + private incrementSideEffectCounter() { + this.sideEffectCounter++; + } + + public tracePublicStorageRead(storageAddress: Fr, slot: Fr, value: Fr, _exists: boolean, _cached: boolean) { + // TODO(4805): check if some threshold is reached for max storage reads + // (need access to parent length, or trace needs to be initialized with parent's contents) + // NOTE: exists and cached are unused for now but may be used for optimizations or kernel hints later + this.contractStorageReads.push( + new ContractStorageRead(slot, value, this.sideEffectCounter, AztecAddress.fromField(storageAddress)), + ); + this.avmCircuitHints.storageValues.items.push( + new AvmKeyValueHint(/*key=*/ new Fr(this.sideEffectCounter), /*value=*/ value), + ); + this.incrementSideEffectCounter(); + } + + public tracePublicStorageWrite(storageAddress: Fr, slot: Fr, value: Fr) { + // TODO(4805): check if some threshold is reached for max storage writes + // (need access to parent length, or trace needs to be initialized with parent's contents) + this.contractStorageUpdateRequests.push( + new ContractStorageUpdateRequest(slot, value, this.sideEffectCounter, storageAddress), + ); + this.incrementSideEffectCounter(); + } + + public traceNoteHashCheck(_storageAddress: Fr, noteHash: Fr, _leafIndex: Fr, exists: boolean) { + // TODO(4805): check if some threshold is reached for max note hash checks + // NOTE: storageAddress is unused but will be important when an AVM circuit processes an entire enqueued call + // TODO(dbanks12): leafIndex is unused for now but later must be used by kernel to constrain that the kernel + // is in fact checking the leaf indicated by the user + this.noteHashReadRequests.push(new ReadRequest(noteHash, this.sideEffectCounter)); + this.avmCircuitHints.noteHashExists.items.push( + new AvmKeyValueHint(/*key=*/ new Fr(this.sideEffectCounter), /*value=*/ new Fr(exists ? 1 : 0)), + ); + this.incrementSideEffectCounter(); + } + + public traceNewNoteHash(_storageAddress: Fr, noteHash: Fr) { + // TODO(4805): check if some threshold is reached for max new note hash + // NOTE: storageAddress is unused but will be important when an AVM circuit processes an entire enqueued call + // TODO(dbanks12): non-existent note hashes should emit a read request of the note hash that actually + // IS there, and the AVM circuit should accept THAT noteHash as a hint. The circuit will then compare + // the noteHash against the one provided by the user code to determine what to return to the user (exists or not), + // and will then propagate the actually-present noteHash to its public inputs. + this.newNoteHashes.push(new NoteHash(noteHash, this.sideEffectCounter)); + this.incrementSideEffectCounter(); + } + + public traceNullifierCheck(_storageAddress: Fr, nullifier: Fr, _leafIndex: Fr, exists: boolean, _isPending: boolean) { + // TODO(4805): check if some threshold is reached for max new nullifier + // NOTE: storageAddress is unused but will be important when an AVM circuit processes an entire enqueued call + // NOTE: isPending and leafIndex are unused for now but may be used for optimizations or kernel hints later + const readRequest = new ReadRequest(nullifier, this.sideEffectCounter); + if (exists) { + this.nullifierReadRequests.push(readRequest); + } else { + this.nullifierNonExistentReadRequests.push(readRequest); + } + this.avmCircuitHints.nullifierExists.items.push( + new AvmKeyValueHint(/*key=*/ new Fr(this.sideEffectCounter), /*value=*/ new Fr(exists ? 1 : 0)), + ); + this.incrementSideEffectCounter(); + } + + public traceNewNullifier(_storageAddress: Fr, nullifier: Fr) { + // TODO(4805): check if some threshold is reached for max new nullifier + // NOTE: storageAddress is unused but will be important when an AVM circuit processes an entire enqueued call + this.newNullifiers.push(new Nullifier(nullifier, this.sideEffectCounter, /*noteHash=*/ Fr.ZERO)); + this.incrementSideEffectCounter(); + } + + public traceL1ToL2MessageCheck(_contractAddress: Fr, msgHash: Fr, _msgLeafIndex: Fr, exists: boolean) { + // TODO(4805): check if some threshold is reached for max message reads + // NOTE: contractAddress is unused but will be important when an AVM circuit processes an entire enqueued call + // TODO(dbanks12): leafIndex is unused for now but later must be used by kernel to constrain that the kernel + // is in fact checking the leaf indicated by the user + this.l1ToL2MsgReadRequests.push(new ReadRequest(msgHash, this.sideEffectCounter)); + this.avmCircuitHints.l1ToL2MessageExists.items.push( + new AvmKeyValueHint(/*key=*/ new Fr(this.sideEffectCounter), /*value=*/ new Fr(exists ? 1 : 0)), + ); + this.incrementSideEffectCounter(); + } + + public traceNewL2ToL1Message(recipient: Fr, content: Fr) { + // TODO(4805): check if some threshold is reached for max messages + const recipientAddress = EthAddress.fromField(recipient); + this.newL2ToL1Messages.push(new L2ToL1Message(recipientAddress, content, this.sideEffectCounter)); + this.incrementSideEffectCounter(); + } + + public traceUnencryptedLog(contractAddress: Fr, event: Fr, log: Fr[]) { + // TODO(4805): check if some threshold is reached for max logs + const ulog = new UnencryptedL2Log( + AztecAddress.fromField(contractAddress), + EventSelector.fromField(event), + Buffer.concat(log.map(f => f.toBuffer())), + ); + const basicLogHash = Fr.fromBuffer(ulog.hash()); + this.unencryptedLogs.push(ulog); + this.allUnencryptedLogs.push(ulog); + // TODO(6578): explain magic number 4 here + this.unencryptedLogsHashes.push(new LogHash(basicLogHash, this.sideEffectCounter, new Fr(ulog.length + 4))); + this.incrementSideEffectCounter(); + } + + public traceGetContractInstance(instance: TracedContractInstance) { + // TODO(4805): check if some threshold is reached for max contract instance retrievals + this.gotContractInstances.push(instance); + this.avmCircuitHints.contractInstances.items.push( + new AvmContractInstanceHint( + instance.address, + new Fr(instance.exists ? 1 : 0), + instance.salt, + instance.deployer, + instance.contractClassId, + instance.initializationHash, + instance.publicKeysHash, + ), + ); + this.incrementSideEffectCounter(); + } + + /** + * Trace a nested call. + * Accept some results from a finished nested call's trace into this one. + */ + public traceNestedCall( + /** The trace of the nested call. */ + nestedCallTrace: PublicSideEffectTrace, + /** The execution environment of the nested call. */ + nestedEnvironment: AvmExecutionEnvironment, + /** How much gas was available for this public execution. */ + startGasLeft: Gas, + /** How much gas was left after this public execution. */ + endGasLeft: Gas, + /** Bytecode used for this execution. */ + bytecode: Buffer, + /** The call's results */ + avmCallResults: AvmContractCallResults, + /** Function name for logging */ + functionName: string = 'unknown', + ) { + const result = nestedCallTrace.toPublicExecutionResult( + nestedEnvironment, + startGasLeft, + endGasLeft, + bytecode, + avmCallResults, + functionName, + ); + this.sideEffectCounter = result.endSideEffectCounter.toNumber(); + // when a nested call returns, caller accepts its updated counter + this.allUnencryptedLogs.push(...result.allUnencryptedLogs.logs); + // NOTE: eventually if the AVM circuit processes an entire enqueued call, + // this function will accept all of the nested's side effects into this instance + this.nestedExecutions.push(result); + + const gasUsed = new Gas( + result.startGasLeft.daGas - result.endGasLeft.daGas, + result.startGasLeft.l2Gas - result.endGasLeft.l2Gas, + ); + this.avmCircuitHints.externalCalls.items.push( + new AvmExternalCallHint(/*success=*/ new Fr(result.reverted ? 0 : 1), result.returnValues, gasUsed), + ); + } + + /** + * Convert this trace to a PublicExecutionResult for use externally to the simulator. + */ + public toPublicExecutionResult( + /** The execution environment of the nested call. */ + avmEnvironment: AvmExecutionEnvironment, + /** How much gas was available for this public execution. */ + startGasLeft: Gas, + /** How much gas was left after this public execution. */ + endGasLeft: Gas, + /** Bytecode used for this execution. */ + bytecode: Buffer, + /** The call's results */ + avmCallResults: AvmContractCallResults, + /** Function name for logging */ + functionName: string = 'unknown', + /** The side effect counter of the execution request itself */ + requestSideEffectCounter: number = this.startSideEffectCounter, + ): PublicExecutionResult { + return { + execution: createPublicExecutionRequest(requestSideEffectCounter, avmEnvironment), + + startSideEffectCounter: new Fr(this.startSideEffectCounter), + endSideEffectCounter: new Fr(this.sideEffectCounter), + startGasLeft, + endGasLeft, + transactionFee: avmEnvironment.transactionFee, + + bytecode, + calldata: avmEnvironment.calldata, + returnValues: avmCallResults.output, + reverted: avmCallResults.reverted, + revertReason: avmCallResults.revertReason ? createSimulationError(avmCallResults.revertReason) : undefined, + + contractStorageReads: this.contractStorageReads, + contractStorageUpdateRequests: this.contractStorageUpdateRequests, + noteHashReadRequests: this.noteHashReadRequests, + newNoteHashes: this.newNoteHashes, + nullifierReadRequests: this.nullifierReadRequests, + nullifierNonExistentReadRequests: this.nullifierNonExistentReadRequests, + newNullifiers: this.newNullifiers, + l1ToL2MsgReadRequests: this.l1ToL2MsgReadRequests, + newL2ToL1Messages: this.newL2ToL1Messages, + // correct the type on these now that they are finalized (lists won't grow) + unencryptedLogs: new UnencryptedFunctionL2Logs(this.unencryptedLogs), + allUnencryptedLogs: new UnencryptedFunctionL2Logs(this.allUnencryptedLogs), + unencryptedLogsHashes: this.unencryptedLogsHashes, + // TODO(dbanks12): process contract instance read requests in public kernel + //gotContractInstances: this.gotContractInstances, + + nestedExecutions: this.nestedExecutions, + + avmCircuitHints: this.avmCircuitHints, + + functionName, + }; + } +} + +/** + * Helper function to create a public execution request from an AVM execution environment + */ +function createPublicExecutionRequest( + requestSideEffectCounter: number, + avmEnvironment: AvmExecutionEnvironment, +): PublicExecution { + const callContext = CallContext.from({ + msgSender: avmEnvironment.sender, + storageContractAddress: avmEnvironment.storageAddress, + functionSelector: avmEnvironment.temporaryFunctionSelector, + isDelegateCall: avmEnvironment.isDelegateCall, + isStaticCall: avmEnvironment.isStaticCall, + sideEffectCounter: requestSideEffectCounter, + }); + const execution: PublicExecution = { + contractAddress: avmEnvironment.address, + functionSelector: avmEnvironment.temporaryFunctionSelector, + callContext, + // execution request does not contain AvmContextInputs prefix + args: avmEnvironment.getCalldataWithoutPrefix(), + }; + return execution; +} diff --git a/yarn-project/simulator/src/public/side_effect_trace_interface.ts b/yarn-project/simulator/src/public/side_effect_trace_interface.ts new file mode 100644 index 00000000000..60dd0b1107d --- /dev/null +++ b/yarn-project/simulator/src/public/side_effect_trace_interface.ts @@ -0,0 +1,41 @@ +import { type Gas } from '@aztec/circuits.js'; +import { type Fr } from '@aztec/foundation/fields'; + +import { type AvmExecutionEnvironment } from '../avm/avm_execution_environment.js'; +import { type AvmContractCallResults } from '../avm/avm_message_call_result.js'; +import { type TracedContractInstance } from './side_effect_trace.js'; + +export interface PublicSideEffectTraceInterface { + fork(): PublicSideEffectTraceInterface; + getCounter(): number; + tracePublicStorageRead(storageAddress: Fr, slot: Fr, value: Fr, exists: boolean, cached: boolean): void; + tracePublicStorageWrite(storageAddress: Fr, slot: Fr, value: Fr): void; + traceNoteHashCheck(storageAddress: Fr, noteHash: Fr, leafIndex: Fr, exists: boolean): void; + traceNewNoteHash(storageAddress: Fr, noteHash: Fr): void; + traceNullifierCheck(storageAddress: Fr, nullifier: Fr, leafIndex: Fr, exists: boolean, isPending: boolean): void; + traceNewNullifier(storageAddress: Fr, nullifier: Fr): void; + traceL1ToL2MessageCheck(contractAddress: Fr, msgHash: Fr, msgLeafIndex: Fr, exists: boolean): void; + // TODO(dbanks12): should new message accept contract address as arg? + traceNewL2ToL1Message(recipient: Fr, content: Fr): void; + traceUnencryptedLog(contractAddress: Fr, event: Fr, log: Fr[]): void; + // TODO(dbanks12): odd that getContractInstance is a one-off in that it accepts an entire object instead of components + traceGetContractInstance(instance: TracedContractInstance): void; + traceNestedCall( + /** The trace of the nested call. */ + nestedCallTrace: PublicSideEffectTraceInterface, + /** The execution environment of the nested call. */ + nestedEnvironment: AvmExecutionEnvironment, + /** How much gas was available for this public execution. */ + // TODO(dbanks12): consider moving to AvmExecutionEnvironment + startGasLeft: Gas, + /** How much gas was left after this public execution. */ + // TODO(dbanks12): consider moving to AvmContractCallResults + endGasLeft: Gas, + /** Bytecode used for this execution. */ + bytecode: Buffer, + /** The call's results */ + avmCallResults: AvmContractCallResults, + /** Function name */ + functionName: string, + ): void; +} diff --git a/yarn-project/simulator/src/public/teardown_phase_manager.ts b/yarn-project/simulator/src/public/teardown_phase_manager.ts index bd1eafbcba9..14eb475746a 100644 --- a/yarn-project/simulator/src/public/teardown_phase_manager.ts +++ b/yarn-project/simulator/src/public/teardown_phase_manager.ts @@ -44,6 +44,7 @@ export class TeardownPhaseManager extends AbstractPhaseManager { ); if (revertReason) { await this.publicStateDB.rollbackToCheckpoint(); + tx.filterRevertedLogs(kernelOutput); } else { // TODO(#6464): Should we allow emitting contracts in the public teardown phase? // if so, we should insert them here diff --git a/yarn-project/simulator/src/public/transitional_adaptors.ts b/yarn-project/simulator/src/public/transitional_adaptors.ts index 36d0f2ade12..9cea3c78075 100644 --- a/yarn-project/simulator/src/public/transitional_adaptors.ts +++ b/yarn-project/simulator/src/public/transitional_adaptors.ts @@ -1,29 +1,13 @@ // All code in this file needs to die once the public executor is phased out in favor of the AVM. -import { UnencryptedFunctionL2Logs } from '@aztec/circuit-types'; -import { - AvmContractInstanceHint, - AvmExecutionHints, - AvmExternalCallHint, - AvmKeyValueHint, - CallContext, - Gas, - type GasSettings, - type GlobalVariables, - type Header, -} from '@aztec/circuits.js'; +import { type GasSettings, type GlobalVariables, type Header } from '@aztec/circuits.js'; import { Fr } from '@aztec/foundation/fields'; import { promisify } from 'util'; import { gunzip } from 'zlib'; -import { type AvmContext } from '../avm/avm_context.js'; import { AvmExecutionEnvironment } from '../avm/avm_execution_environment.js'; -import { type AvmContractCallResults } from '../avm/avm_message_call_result.js'; -import { type PartialPublicExecutionResult } from '../avm/journal/journal.js'; -import { type WorldStateAccessTrace } from '../avm/journal/trace.js'; import { Mov } from '../avm/opcodes/memory.js'; -import { createSimulationError } from '../common/errors.js'; -import { type PublicExecution, type PublicExecutionResult } from './execution.js'; +import { type PublicExecution } from './execution.js'; /** * Convert a PublicExecution(Environment) object to an AvmExecutionEnvironment @@ -57,90 +41,6 @@ export function createAvmExecutionEnvironment( ); } -export function createPublicExecution( - startSideEffectCounter: number, - avmEnvironment: AvmExecutionEnvironment, - calldata: Fr[], -): PublicExecution { - const callContext = CallContext.from({ - msgSender: avmEnvironment.sender, - storageContractAddress: avmEnvironment.storageAddress, - functionSelector: avmEnvironment.temporaryFunctionSelector, - isDelegateCall: avmEnvironment.isDelegateCall, - isStaticCall: avmEnvironment.isStaticCall, - sideEffectCounter: startSideEffectCounter, - }); - const execution: PublicExecution = { - contractAddress: avmEnvironment.address, - callContext, - args: calldata, - functionSelector: avmEnvironment.temporaryFunctionSelector, - }; - return execution; -} - -function computeHints(trace: WorldStateAccessTrace, executionResult: PartialPublicExecutionResult): AvmExecutionHints { - return new AvmExecutionHints( - trace.publicStorageReads.map(read => new AvmKeyValueHint(read.counter, read.value)), - trace.noteHashChecks.map(check => new AvmKeyValueHint(check.counter, new Fr(check.exists ? 1 : 0))), - trace.nullifierChecks.map(check => new AvmKeyValueHint(check.counter, new Fr(check.exists ? 1 : 0))), - trace.l1ToL2MessageChecks.map(check => new AvmKeyValueHint(check.counter, new Fr(check.exists ? 1 : 0))), - executionResult.nestedExecutions.map(nested => { - const gasUsed = new Gas( - nested.startGasLeft.daGas - nested.endGasLeft.daGas, - nested.startGasLeft.l2Gas - nested.endGasLeft.l2Gas, - ); - return new AvmExternalCallHint(/*success=*/ new Fr(nested.reverted ? 0 : 1), nested.returnValues, gasUsed); - }), - trace.gotContractInstances.map( - instance => - new AvmContractInstanceHint( - instance.address, - new Fr(instance.exists ? 1 : 0), - instance.salt, - instance.deployer, - instance.contractClassId, - instance.initializationHash, - instance.publicKeysHash, - ), - ), - ); -} - -export function convertAvmResultsToPxResult( - avmResult: AvmContractCallResults, - startSideEffectCounter: number, - fromPx: PublicExecution, - startGas: Gas, - endAvmContext: AvmContext, - bytecode: Buffer | undefined, - functionName: string, -): PublicExecutionResult { - const endPersistableState = endAvmContext.persistableState; - const endMachineState = endAvmContext.machineState; - - return { - ...endPersistableState.transitionalExecutionResult, // includes nestedExecutions - functionName: functionName, - execution: fromPx, - returnValues: avmResult.output, - startSideEffectCounter: new Fr(startSideEffectCounter), - endSideEffectCounter: new Fr(endPersistableState.trace.accessCounter), - unencryptedLogs: new UnencryptedFunctionL2Logs(endPersistableState.transitionalExecutionResult.unencryptedLogs), - allUnencryptedLogs: new UnencryptedFunctionL2Logs( - endPersistableState.transitionalExecutionResult.allUnencryptedLogs, - ), - reverted: avmResult.reverted, - revertReason: avmResult.revertReason ? createSimulationError(avmResult.revertReason) : undefined, - startGasLeft: startGas, - endGasLeft: endMachineState.gasLeft, - transactionFee: endAvmContext.environment.transactionFee, - bytecode: bytecode, - calldata: endAvmContext.environment.calldata, - avmHints: computeHints(endPersistableState.trace, endPersistableState.transitionalExecutionResult), - }; -} - const AVM_MAGIC_SUFFIX = Buffer.from([ Mov.opcode, // opcode 0x00, // indirect diff --git a/yarn-project/simulator/tsconfig.json b/yarn-project/simulator/tsconfig.json index effb5a7151c..60a3f7e62de 100644 --- a/yarn-project/simulator/tsconfig.json +++ b/yarn-project/simulator/tsconfig.json @@ -21,6 +21,9 @@ { "path": "../protocol-contracts" }, + { + "path": "../telemetry-client" + }, { "path": "../types" }, diff --git a/yarn-project/telemetry-client/.eslintrc.cjs b/yarn-project/telemetry-client/.eslintrc.cjs new file mode 100644 index 00000000000..e659927475c --- /dev/null +++ b/yarn-project/telemetry-client/.eslintrc.cjs @@ -0,0 +1 @@ +module.exports = require('@aztec/foundation/eslint'); diff --git a/yarn-project/telemetry-client/package.json b/yarn-project/telemetry-client/package.json new file mode 100644 index 00000000000..d937716d35f --- /dev/null +++ b/yarn-project/telemetry-client/package.json @@ -0,0 +1,77 @@ +{ + "name": "@aztec/telemetry-client", + "inherits": [ + "../package.common.json" + ], + "type": "module", + "exports": { + ".": "./dest/index.js", + "./start": "./dest/start.js", + "./noop": "./dest/noop.js" + }, + "scripts": { + "build": "yarn clean && tsc -b", + "build:dev": "tsc -b --watch", + "clean": "rm -rf ./dest .tsbuildinfo", + "formatting": "run -T prettier --check ./src && run -T eslint ./src", + "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", + "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" + }, + "engines": { + "node": ">=18" + }, + "files": [ + "dest", + "src", + "!*.test.*" + ], + "dependencies": { + "@aztec/foundation": "workspace:^", + "@opentelemetry/api": "^1.9.0", + "@opentelemetry/exporter-metrics-otlp-http": "^0.52.0", + "@opentelemetry/exporter-trace-otlp-http": "^0.52.0", + "@opentelemetry/host-metrics": "^0.35.2", + "@opentelemetry/resources": "^1.25.0", + "@opentelemetry/sdk-metrics": "^1.25.0", + "@opentelemetry/sdk-trace-node": "^1.25.0", + "@opentelemetry/semantic-conventions": "^1.25.0" + }, + "devDependencies": { + "@jest/globals": "^29.5.0", + "@types/jest": "^29.5.0", + "jest": "^29.5.0", + "ts-node": "^10.9.1", + "typescript": "^5.0.4" + }, + "jest": { + "extensionsToTreatAsEsm": [ + ".ts" + ], + "transform": { + "^.+\\.tsx?$": [ + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } + ] + }, + "moduleNameMapper": { + "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" + }, + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] + ], + "testRegex": "./src/.*\\.test\\.(js|mjs|ts)$", + "rootDir": "./src" + } +} diff --git a/yarn-project/telemetry-client/src/attributes.ts b/yarn-project/telemetry-client/src/attributes.ts new file mode 100644 index 00000000000..d4df0253436 --- /dev/null +++ b/yarn-project/telemetry-client/src/attributes.ts @@ -0,0 +1,49 @@ +/** + * @overview This file contains the custom attributes used in telemetry events. + * Attribute names exist in a global namespace, alongside metric names. Use this file to ensure that attribute names are unique. + * + * To define a new attribute follow these steps: + * 1. Make sure it's not a semantic attribute that's already been defined by {@link @opentelemetry/semantic-conventions | OpenTelemetry} (e.g. `service.name`) + * 2. Come up with a unique name for it so that it doesn't clash with other attributes or metrics. + * 3. Prefix the attribute name with `aztec` to make it clear that it's a custom attribute. + * 4. Add a description of what the attribute represents and examples of what it might contain. + * 5. Start using it. + * + * @note Attributes and metric names exist in a hierarchy of namespaces. If a name has been used as a namespace, then it can not be used as a name for an attribute or metric. + * @example If `aztec.circuit.name` has been defined as an attribute then `aztec.circuit` alone can not be re-used for a metric or attribute because it is already a namespace. + * @see {@link https://opentelemetry.io/docs/specs/semconv/general/attribute-naming/} + */ + +/** + * The name of the protocol circuit being run (e.g. public-kernel-setup or base-rollup) + * @see {@link @aztec/circuit-types/stats:CircuitName} + */ +export const PROTOCOL_CIRCUIT_NAME = 'aztec.circuit.protocol_circuit_name'; + +/** + * The type of protocol circuit being run: server or client + */ +export const PROTOCOL_CIRCUIT_TYPE = 'aztec.circuit.protocol_circuit_type'; + +/** + * For an app circuit, the contract:function being run (e.g. Token:transfer) + */ +export const APP_CIRCUIT_NAME = 'aztec.circuit.app_circuit_name'; + +/** + * The type of app circuit being run: server or client + */ +export const APP_CIRCUIT_TYPE = 'aztec.circuit.app_circuit_type'; + +/** The block number */ +export const BLOCK_NUMBER = 'aztec.block.number'; +/** The parent's block number */ +export const BLOCK_PARENT = 'aztec.block.parent'; +/** How many txs are being processed to build this block */ +export const BLOCK_CANDIDATE_TXS_COUNT = 'aztec.block.candidate_txs_count'; +/** How many actual txs were included in this block */ +export const BLOCK_TXS_COUNT = 'aztec.block.txs_count'; +/** The block size (power of 2) */ +export const BLOCK_SIZE = 'aztec.block.size'; +/** The tx hash */ +export const TX_HASH = 'aztec.tx.hash'; diff --git a/yarn-project/telemetry-client/src/index.ts b/yarn-project/telemetry-client/src/index.ts new file mode 100644 index 00000000000..f84f46bf75c --- /dev/null +++ b/yarn-project/telemetry-client/src/index.ts @@ -0,0 +1 @@ +export * from './telemetry.js'; diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts new file mode 100644 index 00000000000..e5487ef41b3 --- /dev/null +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -0,0 +1,30 @@ +/** + * @file Metric names used in Aztec. + * Metric names must be unique and not clash with {@link attributes.ts | Attribute names}. + * Prefix metric names with `aztec` and use dots `.` to separate namespaces. + * + * @see {@link https://opentelemetry.io/docs/specs/semconv/general/metrics/ | OpenTelemetry Metrics} for naming conventions. + */ + +/** How long it takes to simulate a circuit */ +export const CIRCUIT_SIMULATION_DURATION = 'aztec.circuit.simulation.duration'; +export const CIRCUIT_SIMULATION_INPUT_SIZE = 'aztec.circuit.simulation.input_size'; +export const CIRCUIT_SIMULATION_OUTPUT_SIZE = 'aztec.circuit.simulation.output_size'; + +export const CIRCUIT_WITNESS_GEN_DURATION = 'aztec.circuit.witness_generation.duration'; +export const CIRCUIT_WITNESS_GEN_INPUT_SIZE = 'aztec.circuit.witness_generation.input_size'; +export const CIRCUIT_WITNESS_GEN_OUTPUT_SIZE = 'aztec.circuit.witness_generation.output_size'; + +export const CIRCUIT_PROVING_DURATION = 'aztec.circuit.proving.duration'; +export const CIRCUIT_PROVING_INPUT_SIZE = 'aztec.circuit.proving.input_size'; +export const CIRCUIT_PROVING_PROOF_SIZE = 'aztec.circuit.proving.proof_size'; + +export const CIRCUIT_PUBLIC_INPUTS_COUNT = 'aztec.circuit.public_inputs_count'; +export const CIRCUIT_GATE_COUNT = 'aztec.circuit.gate_count'; +export const CIRCUIT_SIZE = 'aztec.circuit.size'; + +export const MEMPOOL_TX_COUNT = 'aztec.mempool.tx_count'; +export const MEMPOOL_TX_SIZE = 'aztec.mempool.tx_size'; + +export const ARCHIVER_BLOCK_HEIGHT = 'aztec.archiver.block_height'; +export const ARCHIVER_BLOCK_SIZE = 'aztec.archiver.block_size'; diff --git a/yarn-project/telemetry-client/src/noop.ts b/yarn-project/telemetry-client/src/noop.ts new file mode 100644 index 00000000000..e4ab8162ffb --- /dev/null +++ b/yarn-project/telemetry-client/src/noop.ts @@ -0,0 +1,83 @@ +import { type Meter, type Span, type SpanContext, type Tracer, createNoopMeter } from '@opentelemetry/api'; + +import { type TelemetryClient } from './telemetry.js'; + +export class NoopTelemetryClient implements TelemetryClient { + getMeter(): Meter { + return createNoopMeter(); + } + + getTracer(): Tracer { + return new NoopTracer(); + } + + stop(): Promise { + return Promise.resolve(); + } +} + +// @opentelemetry/api internally uses NoopTracer and NoopSpan but they're not exported +// make our own versions +// https://github.com/open-telemetry/opentelemetry-js/issues/4518#issuecomment-2179405444 +class NoopTracer implements Tracer { + startSpan(): Span { + return new NoopSpan(); + } + + startActiveSpan any>(_name: string, ...args: (unknown | F)[]): ReturnType { + // there are three different signatures for startActiveSpan, grab the function, we don't care about the rest + const fn = args.find(arg => typeof arg === 'function') as F; + return fn(new NoopSpan()); + } +} + +class NoopSpan implements Span { + private recording: boolean = true; + addEvent(): this { + return this; + } + + addLink(): this { + return this; + } + + addLinks(): this { + return this; + } + + end(): void { + this.recording = false; + } + + isRecording(): boolean { + return this.recording; + } + + recordException(): void { + return; + } + + setAttribute(): this { + return this; + } + + setAttributes(): this { + return this; + } + + setStatus(): this { + return this; + } + + spanContext(): SpanContext { + return { + spanId: '', + traceId: '', + traceFlags: 0, + }; + } + + updateName(): this { + return this; + } +} diff --git a/yarn-project/telemetry-client/src/otel.ts b/yarn-project/telemetry-client/src/otel.ts new file mode 100644 index 00000000000..6ed1ce01218 --- /dev/null +++ b/yarn-project/telemetry-client/src/otel.ts @@ -0,0 +1,71 @@ +import { type Meter, type Tracer, type TracerProvider } from '@opentelemetry/api'; +import { OTLPMetricExporter } from '@opentelemetry/exporter-metrics-otlp-http'; +import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http'; +import { HostMetrics } from '@opentelemetry/host-metrics'; +import { Resource } from '@opentelemetry/resources'; +import { MeterProvider, PeriodicExportingMetricReader } from '@opentelemetry/sdk-metrics'; +import { BatchSpanProcessor, NodeTracerProvider } from '@opentelemetry/sdk-trace-node'; +import { SEMRESATTRS_SERVICE_NAME, SEMRESATTRS_SERVICE_VERSION } from '@opentelemetry/semantic-conventions'; + +import { type TelemetryClient } from './telemetry.js'; + +export class OpenTelemetryClient implements TelemetryClient { + hostMetrics: HostMetrics | undefined; + protected constructor( + private resource: Resource, + private meterProvider: MeterProvider, + private traceProvider: TracerProvider, + ) {} + + getMeter(name: string): Meter { + return this.meterProvider.getMeter(name, this.resource.attributes[SEMRESATTRS_SERVICE_VERSION] as string); + } + + getTracer(name: string): Tracer { + return this.traceProvider.getTracer(name, this.resource.attributes[SEMRESATTRS_SERVICE_VERSION] as string); + } + + public start() { + this.hostMetrics = new HostMetrics({ + name: this.resource.attributes[SEMRESATTRS_SERVICE_NAME] as string, + meterProvider: this.meterProvider, + }); + + this.hostMetrics.start(); + } + + public async stop() { + await Promise.all([this.meterProvider.shutdown()]); + } + + public static createAndStart(name: string, version: string, collectorBaseUrl: URL): OpenTelemetryClient { + const resource = new Resource({ + [SEMRESATTRS_SERVICE_NAME]: name, + [SEMRESATTRS_SERVICE_VERSION]: version, + }); + + const tracerProvider = new NodeTracerProvider({ + resource, + }); + tracerProvider.addSpanProcessor( + new BatchSpanProcessor(new OTLPTraceExporter({ url: new URL('/v1/traces', collectorBaseUrl).href })), + ); + tracerProvider.register(); + + const meterProvider = new MeterProvider({ + resource, + readers: [ + new PeriodicExportingMetricReader({ + exporter: new OTLPMetricExporter({ + url: new URL('/v1/metrics', collectorBaseUrl).href, + }), + }), + ], + }); + + const service = new OpenTelemetryClient(resource, meterProvider, tracerProvider); + service.start(); + + return service; + } +} diff --git a/yarn-project/telemetry-client/src/start.ts b/yarn-project/telemetry-client/src/start.ts new file mode 100644 index 00000000000..f83baa83400 --- /dev/null +++ b/yarn-project/telemetry-client/src/start.ts @@ -0,0 +1,27 @@ +import { NoopTelemetryClient } from './noop.js'; +import { OpenTelemetryClient } from './otel.js'; +import { type TelemetryClient } from './telemetry.js'; + +export interface TelemetryClientConfig { + collectorBaseUrl?: URL; + serviceName: string; + serviceVersion: string; +} + +export function createAndStartTelemetryClient(config: TelemetryClientConfig): TelemetryClient { + if (config.collectorBaseUrl) { + return OpenTelemetryClient.createAndStart(config.serviceName, config.serviceVersion, config.collectorBaseUrl); + } else { + return new NoopTelemetryClient(); + } +} + +export function getConfigEnvVars(): TelemetryClientConfig { + const { TEL_COLLECTOR_BASE_URL, TEL_SERVICE_NAME = 'aztec', TEL_SERVICE_VERSION = '0.0.0' } = process.env; + + return { + collectorBaseUrl: TEL_COLLECTOR_BASE_URL ? new URL(TEL_COLLECTOR_BASE_URL) : undefined, + serviceName: TEL_SERVICE_NAME, + serviceVersion: TEL_SERVICE_VERSION, + }; +} diff --git a/yarn-project/telemetry-client/src/telemetry.ts b/yarn-project/telemetry-client/src/telemetry.ts new file mode 100644 index 00000000000..bf56bf51af5 --- /dev/null +++ b/yarn-project/telemetry-client/src/telemetry.ts @@ -0,0 +1,180 @@ +import { + type AttributeValue, + type MetricOptions, + type Gauge as OtelGauge, + type Histogram as OtelHistogram, + type UpDownCounter as OtelUpDownCounter, + type Span, + SpanStatusCode, + Tracer, +} from '@opentelemetry/api'; + +import * as Attributes from './attributes.js'; +import * as Metrics from './metrics.js'; + +export { ValueType, Span } from '@opentelemetry/api'; + +type ValuesOf = T extends Record ? U : never; + +/** Global registry of attributes */ +type Attributes = Partial, AttributeValue>>; +export { Attributes }; + +/** Global registry of metrics */ +type Metrics = (typeof Metrics)[keyof typeof Metrics]; +export { Metrics }; + +export type Gauge = OtelGauge; +export type Histogram = OtelHistogram; +export type UpDownCounter = OtelUpDownCounter; + +export { Tracer }; + +// INTERNAL NOTE: this interface is the same as opentelemetry's Meter, but with proper types +/** + * A meter that provides instruments for recording metrics. + */ +export interface Meter { + /** + * Creates a new gauge instrument. A gauge is a metric that represents a single numerical value that can arbitrarily go up and down. + * @param name - The name of the gauge + * @param options - The options for the gauge + */ + createGauge(name: Metrics, options?: MetricOptions): Gauge; + + /** + * Creates a new histogram instrument. A histogram is a metric that samples observations (usually things like request durations or response sizes) and counts them in configurable buckets. + * @param name - The name of the histogram + * @param options - The options for the histogram + */ + createHistogram(name: Metrics, options?: MetricOptions): Histogram; + + /** + * Creates a new counter instrument. A counter can go up or down with a delta from the previous value. + * @param name - The name of the counter + * @param options - The options for the counter + */ + createUpDownCounter(name: Metrics, options?: MetricOptions): UpDownCounter; +} + +/** + * A telemetry client that provides meters for recording metrics. + */ +export interface TelemetryClient { + /** + * Creates a new meter + * @param name - The name of the meter. + */ + getMeter(name: string): Meter; + + /** + * Creates a new tracer + * @param name - The name of the tracer. + */ + getTracer(name: string): Tracer; + + /** + * Stops the telemetry client. + */ + stop(): Promise; +} + +/** Objects that adhere to this interface can use @trackSpan */ +export interface Traceable { + tracer: Tracer; +} + +type SpanDecorator any> = ( + originalMethod: F, + context: ClassMethodDecoratorContext, +) => F; + +/** + * Starts a new span whenever the decorated method is called. + * @param spanName - The name of the span to create. Can be a string or a function that returns a string. + * @param attributes - Initial attributes to set on the span. If a function is provided, it will be called with the arguments of the method. + * @param extraAttributes - Extra attributes to set on the span after the method is called. Will be called with the return value of the method. Note: if the function throws then this will not be called. + * @returns A decorator that wraps the method in a span. + * + * @privateRemarks + * This code looks complex but it's not that difficult: + * - decorators are functions that _replace_ a method with a different implementation + * - normal decorators can't take function arguments, but if we write a function that returns a decorator, we can pass arguments to that function + * + * The trackSpan function takes a span's name and some attributes and builds a decorator that wraps a method in a span with the given name and props + * The decorator can currently only be applied to methods on classes that have a `tracer` property. The compiler will enforce this. + */ +export function trackSpan any>( + spanName: string | ((this: T, ...args: Parameters) => string), + attributes?: Attributes | ((this: T, ...args: Parameters) => Attributes), + extraAttributes?: (this: T, returnValue: Awaited>) => Attributes, +): SpanDecorator { + // the return value of trackSpan is a decorator + return (originalMethod: F, _context: ClassMethodDecoratorContext) => { + // the return value of the decorator replaces the original method + // in this wrapper method we start a span, call the original method, and then end the span + return function replacementMethod(this: T, ...args: Parameters): Promise>> { + const name = typeof spanName === 'function' ? spanName.call(this, ...args) : spanName; + const currentAttrs = typeof attributes === 'function' ? attributes.call(this, ...args) : attributes; + + // run originalMethod wrapped in an active span + // "active" means the span will be alive for the duration of the function execution + // and if any other spans are started during the execution of originalMethod, they will be children of this span + // behind the scenes this uses AsyncLocalStorage https://nodejs.org/dist/latest-v18.x/docs/api/async_context.html + return this.tracer.startActiveSpan(name, async (span: Span) => { + span.setAttributes(currentAttrs ?? {}); + + try { + const res = await originalMethod.call(this, ...args); + const extraAttrs = extraAttributes?.call(this, res); + span.setAttributes(extraAttrs ?? {}); + return res; + } catch (err) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: String(err), + }); + throw err; + } finally { + span.end(); + } + }); + } as F; + }; +} + +/** + * Runs an event callback in a span. The span is started immediately and completes once the callback finishes running. + * The span will have two events added: 'callbackStart' and 'callbackEnd' to mark the start and end of the callback. + * + * @param tracer - The tracer instance to use + * @param spanName - The name of the span to create + * @param attributes - Initial attributes to set on the span + * @param callback - The callback to wrap in a span + * + * @returns - A new function that wraps the callback in a span + */ +export function wrapCallbackInSpan any>( + tracer: Tracer, + spanName: string, + attributes: Attributes, + callback: F, +): F { + const span = tracer.startSpan(spanName, { attributes }); + return (async (...args: Parameters) => { + try { + span.addEvent('callbackStart'); + const res = await callback(...args); + return res; + } catch (err) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: String(err), + }); + throw err; + } finally { + span.addEvent('callbackEnd'); + span.end(); + } + }) as F; +} diff --git a/yarn-project/telemetry-client/tsconfig.json b/yarn-project/telemetry-client/tsconfig.json new file mode 100644 index 00000000000..63f8ab3e9f7 --- /dev/null +++ b/yarn-project/telemetry-client/tsconfig.json @@ -0,0 +1,14 @@ +{ + "extends": "..", + "compilerOptions": { + "outDir": "dest", + "rootDir": "src", + "tsBuildInfoFile": ".tsbuildinfo" + }, + "references": [ + { + "path": "../foundation" + } + ], + "include": ["src"] +} diff --git a/yarn-project/txe/package.json b/yarn-project/txe/package.json index 5658cd3b454..f77a47b7f62 100644 --- a/yarn-project/txe/package.json +++ b/yarn-project/txe/package.json @@ -18,7 +18,8 @@ "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests", - "start": "DEBUG='aztec:*' && node ./dest/bin/index.js" + "dev": "DEBUG='aztec:*' && node ./dest/bin/index.js", + "start": "node ./dest/bin/index.js" }, "inherits": [ "../package.common.json" @@ -32,7 +33,15 @@ "workerThreads": true, "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/txe/src/bin/index.ts b/yarn-project/txe/src/bin/index.ts index 14934762159..f46cd541c95 100644 --- a/yarn-project/txe/src/bin/index.ts +++ b/yarn-project/txe/src/bin/index.ts @@ -1,4 +1,5 @@ #!/usr/bin/env -S node --no-warnings +import { Fr } from '@aztec/foundation/fields'; import { JsonRpcServer } from '@aztec/foundation/json-rpc/server'; import { type Logger, createDebugLogger } from '@aztec/foundation/log'; @@ -32,25 +33,20 @@ class TXEDispatcher { function: functionName, inputs, }: TXEForeignCallInput): Promise { - this.logger.debug( - `Calling ${functionName} with inputs: ${JSON.stringify(inputs, null, 2)} on session ${sessionId}`, - ); + this.logger.debug(`Calling ${functionName} on session ${sessionId}`); if (!TXESessions.has(sessionId) && functionName != 'reset') { - this.logger.debug(`Creating new session ${sessionId}`); + this.logger.info(`Creating new session ${sessionId}`); TXESessions.set(sessionId, await TXEService.init(logger)); } if (functionName === 'reset') { TXESessions.delete(sessionId) && - this.logger.debug(`Called reset on session ${sessionId}, yeeting it out of existence`); + this.logger.info(`Called reset on session ${sessionId}, yeeting it out of existence`); return toForeignCallResult([]); } else { const txeService = TXESessions.get(sessionId); const response = await (txeService as any)[functionName](...inputs); - this.logger.debug( - `${sessionId}:${functionName}(${JSON.stringify(inputs, null, 2)}) -> ${JSON.stringify(response, null, 2)}`, - ); return response; } } @@ -63,10 +59,11 @@ class TXEDispatcher { * @returns A running http server. */ export function startTXEHttpServer(dispatcher: TXEDispatcher, port: string | number): http.Server { - const txeServer = new JsonRpcServer(dispatcher, {}, {}, ['init']); + const txeServer = new JsonRpcServer(dispatcher, { Fr }, {}, ['init']); const app = txeServer.getApp(); const httpServer = http.createServer(app.callback()); + httpServer.timeout = 1e3 * 60 * 5; // 5 minutes httpServer.listen(port); return httpServer; diff --git a/yarn-project/txe/src/oracle/txe_oracle.ts b/yarn-project/txe/src/oracle/txe_oracle.ts index 09115f3ebab..567bf0a656d 100644 --- a/yarn-project/txe/src/oracle/txe_oracle.ts +++ b/yarn-project/txe/src/oracle/txe_oracle.ts @@ -1,4 +1,5 @@ import { + AuthWitness, L1NotePayload, MerkleTreeId, Note, @@ -11,9 +12,11 @@ import { } from '@aztec/circuit-types'; import { type CircuitWitnessGenerationStats } from '@aztec/circuit-types/stats'; import { - type CompleteAddress, + CallContext, FunctionData, - type Header, + Gas, + GlobalVariables, + Header, type KeyValidationRequest, NULLIFIER_SUBTREE_HEIGHT, type NULLIFIER_TREE_HEIGHT, @@ -23,16 +26,23 @@ import { PrivateCallStackItem, PrivateCircuitPublicInputs, PrivateContextInputs, - type PublicCallRequest, + PublicCallRequest, PublicDataTreeLeaf, type PublicDataTreeLeafPreimage, + TxContext, computeContractClassId, deriveKeys, getContractClassFromArtifact, } from '@aztec/circuits.js'; -import { Aes128 } from '@aztec/circuits.js/barretenberg'; +import { Aes128, Schnorr } from '@aztec/circuits.js/barretenberg'; import { computePublicDataTreeLeafSlot, siloNoteHash, siloNullifier } from '@aztec/circuits.js/hash'; -import { type ContractArtifact, type FunctionAbi, FunctionSelector, countArgumentsSize } from '@aztec/foundation/abi'; +import { + type ContractArtifact, + type FunctionAbi, + FunctionSelector, + type NoteSelector, + countArgumentsSize, +} from '@aztec/foundation/abi'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr, GrumpkinScalar, type Point } from '@aztec/foundation/fields'; import { type Logger, applyStringFormatting } from '@aztec/foundation/log'; @@ -40,13 +50,16 @@ import { Timer } from '@aztec/foundation/timer'; import { type KeyStore } from '@aztec/key-store'; import { ContractDataOracle } from '@aztec/pxe'; import { + ContractsDataSourcePublicDB, ExecutionError, type ExecutionNoteCache, type MessageLoadOracleInputs, type NoteData, Oracle, type PackedValuesCache, + PublicExecutor, type TypedOracle, + WorldStateDB, acvm, createSimulationError, extractCallStack, @@ -58,15 +71,21 @@ import { type ContractInstance, type ContractInstanceWithAddress } from '@aztec/ import { MerkleTreeSnapshotOperationsFacade, type MerkleTrees } from '@aztec/world-state'; import { type TXEDatabase } from '../util/txe_database.js'; +import { TXEPublicContractDataSource } from '../util/txe_public_contract_data_source.js'; +import { TXEPublicStateDB } from '../util/txe_public_state_db.js'; export class TXE implements TypedOracle { private blockNumber = 0; private sideEffectsCounter = 0; private contractAddress: AztecAddress; private msgSender: AztecAddress; + private functionSelector = FunctionSelector.fromField(new Fr(0)); private contractDataOracle: ContractDataOracle; + private version: Fr = Fr.ONE; + private chainId: Fr = Fr.ONE; + constructor( private logger: Logger, private trees: MerkleTrees, @@ -82,6 +101,14 @@ export class TXE implements TypedOracle { // Utils + getChainId() { + return Promise.resolve(this.chainId); + } + + getVersion() { + return Promise.resolve(this.version); + } + getMsgSender() { return this.msgSender; } @@ -90,6 +117,10 @@ export class TXE implements TypedOracle { this.msgSender = msgSender; } + setFunctionSelector(functionSelector: FunctionSelector) { + this.functionSelector = functionSelector; + } + getSideEffectsCounter() { return this.sideEffectsCounter; } @@ -110,6 +141,10 @@ export class TXE implements TypedOracle { return this.trees; } + getContractDataOracle() { + return this.contractDataOracle; + } + getTXEDatabase() { return this.txeDatabase; } @@ -127,16 +162,24 @@ export class TXE implements TypedOracle { await this.txeDatabase.addContractArtifact(computeContractClassId(contractClass), artifact); } - async getPrivateContextInputs(blockNumber: number, sideEffectsCounter = this.sideEffectsCounter) { + async getPrivateContextInputs( + blockNumber: number, + sideEffectsCounter = this.sideEffectsCounter, + isStaticCall = false, + isDelegateCall = false, + ) { const trees = this.getTrees(); - const stateReference = await trees.getStateReference(true); + const stateReference = await trees.getStateReference(false); const inputs = PrivateContextInputs.empty(); inputs.historicalHeader.globalVariables.blockNumber = new Fr(blockNumber); inputs.historicalHeader.state = stateReference; inputs.callContext.msgSender = this.msgSender; inputs.callContext.storageContractAddress = this.contractAddress; inputs.callContext.sideEffectCounter = sideEffectsCounter; + inputs.callContext.isStaticCall = isStaticCall; + inputs.callContext.isDelegateCall = isDelegateCall; inputs.startSideEffectCounter = sideEffectsCounter; + inputs.callContext.functionSelector = this.functionSelector; return inputs; } @@ -177,13 +220,35 @@ export class TXE implements TypedOracle { return deriveKeys(secret); } + async addAuthWitness(address: AztecAddress, messageHash: Fr) { + const account = this.txeDatabase.getAccount(address); + const privateKey = await this.keyStore.getMasterSecretKey(account.publicKeys.masterIncomingViewingPublicKey); + const schnorr = new Schnorr(); + const signature = schnorr.constructSignature(messageHash.toBuffer(), privateKey).toBuffer(); + const authWitness = new AuthWitness(messageHash, [...signature]); + return this.txeDatabase.addAuthWitness(authWitness.requestHash, authWitness.witness); + } + + async addNullifiers(contractAddress: AztecAddress, nullifiers: Fr[]) { + const db = this.trees.asLatest(); + const siloedNullifiers = nullifiers.map(nullifier => siloNullifier(contractAddress, nullifier).toBuffer()); + + await db.batchInsert(MerkleTreeId.NULLIFIER_TREE, siloedNullifiers, NULLIFIER_SUBTREE_HEIGHT); + } + + async addNoteHashes(contractAddress: AztecAddress, innerNoteHashes: Fr[]) { + const db = this.trees.asLatest(); + const siloedNoteHashes = innerNoteHashes.map(innerNoteHash => siloNoteHash(contractAddress, innerNoteHash)); + await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, siloedNoteHashes); + } + // TypedOracle - getBlockNumber(): Promise { + getBlockNumber() { return Promise.resolve(this.blockNumber); } - getContractAddress(): Promise { + getContractAddress() { return Promise.resolve(this.contractAddress); } @@ -191,15 +256,15 @@ export class TXE implements TypedOracle { return Fr.random(); } - packArgumentsArray(args: Fr[]): Promise { + packArgumentsArray(args: Fr[]) { return Promise.resolve(this.packedValuesCache.pack(args)); } - packReturns(returns: Fr[]): Promise { + packReturns(returns: Fr[]) { return Promise.resolve(this.packedValuesCache.pack(returns)); } - unpackReturns(returnsHash: Fr): Promise { + unpackReturns(returnsHash: Fr) { return Promise.resolve(this.packedValuesCache.unpack(returnsHash)); } @@ -208,11 +273,11 @@ export class TXE implements TypedOracle { } async getContractInstance(address: AztecAddress): Promise { - const contractInstance = await this.txeDatabase.getContractInstance(address); + const contractInstance = await this.contractDataOracle.getContractInstance(address); if (!contractInstance) { throw new Error(`Contract instance not found for address ${address}`); } - return Promise.resolve(contractInstance); + return contractInstance; } getMembershipWitness(_blockNumber: number, _treeId: MerkleTreeId, _leafValue: Fr): Promise { @@ -279,12 +344,12 @@ export class TXE implements TypedOracle { throw new Error('Method not implemented.'); } - getCompleteAddress(account: AztecAddress): Promise { + getCompleteAddress(account: AztecAddress) { return Promise.resolve(this.txeDatabase.getAccount(account)); } - getAuthWitness(_messageHash: Fr): Promise { - throw new Error('Method not implemented.'); + getAuthWitness(messageHash: Fr) { + return this.txeDatabase.getAuthWitness(messageHash); } popCapsule(): Promise { @@ -333,7 +398,7 @@ export class TXE implements TypedOracle { return Promise.resolve(notes); } - async notifyCreatedNote(storageSlot: Fr, noteTypeId: Fr, noteItems: Fr[], innerNoteHash: Fr, counter: number) { + notifyCreatedNote(storageSlot: Fr, noteTypeId: NoteSelector, noteItems: Fr[], innerNoteHash: Fr, counter: number) { const note = new Note(noteItems); this.noteCache.addNewNote( { @@ -346,16 +411,11 @@ export class TXE implements TypedOracle { }, counter, ); - const db = this.trees.asLatest(); - const noteHash = siloNoteHash(this.contractAddress, innerNoteHash); - await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, [noteHash]); + return Promise.resolve(); } - async notifyNullifiedNote(innerNullifier: Fr, innerNoteHash: Fr, _counter: number) { + notifyNullifiedNote(innerNullifier: Fr, innerNoteHash: Fr, _counter: number) { this.noteCache.nullifyNote(this.contractAddress, innerNullifier, innerNoteHash); - const db = this.trees.asLatest(); - const siloedNullifier = siloNullifier(this.contractAddress, innerNullifier); - await db.batchInsert(MerkleTreeId.NULLIFIER_TREE, [siloedNullifier.toBuffer()], NULLIFIER_SUBTREE_HEIGHT); return Promise.resolve(); } @@ -425,7 +485,7 @@ export class TXE implements TypedOracle { computeEncryptedNoteLog( contractAddress: AztecAddress, storageSlot: Fr, - noteTypeId: Fr, + noteTypeId: NoteSelector, ovKeys: KeyValidationRequest, ivpkM: Point, preimage: Fr[], @@ -442,7 +502,7 @@ export class TXE implements TypedOracle { } emitUnencryptedLog(_log: UnencryptedL2Log, _counter: number): void { - throw new Error('Method not implemented.'); + return; } emitContractClassUnencryptedLog(_log: UnencryptedL2Log, _counter: number): Fr { @@ -454,69 +514,97 @@ export class TXE implements TypedOracle { functionSelector: FunctionSelector, argsHash: Fr, sideEffectCounter: number, - _isStaticCall: boolean, - _isDelegateCall: boolean, + isStaticCall: boolean, + isDelegateCall: boolean, ): Promise { - this.logger.debug( - `Calling private function ${targetContractAddress}:${functionSelector} from ${this.contractAddress}`, + this.logger.verbose( + `Executing external function ${targetContractAddress}:${functionSelector}(${await this.getDebugFunctionName( + targetContractAddress, + functionSelector, + )}) isStaticCall=${isStaticCall} isDelegateCall=${isDelegateCall}`, ); + // Store and modify env const currentContractAddress = AztecAddress.fromField(this.contractAddress); const currentMessageSender = AztecAddress.fromField(this.msgSender); + const currentFunctionSelector = FunctionSelector.fromField(this.functionSelector.toField()); this.setMsgSender(this.contractAddress); this.setContractAddress(targetContractAddress); + this.setFunctionSelector(functionSelector); const artifact = await this.contractDataOracle.getFunctionArtifact(targetContractAddress, functionSelector); const acir = artifact.bytecode; - const initialWitness = await this.getInitialWitness(artifact, argsHash, sideEffectCounter); + const initialWitness = await this.getInitialWitness( + artifact, + argsHash, + sideEffectCounter, + isStaticCall, + isDelegateCall, + ); const acvmCallback = new Oracle(this); const timer = new Timer(); - const acirExecutionResult = await acvm(acir, initialWitness, acvmCallback).catch((err: Error) => { - const execError = new ExecutionError( - err.message, - { - contractAddress: targetContractAddress, - functionSelector, - }, - extractCallStack(err, artifact.debug), - { cause: err }, + try { + const acirExecutionResult = await acvm(acir, initialWitness, acvmCallback).catch((err: Error) => { + const execError = new ExecutionError( + err.message, + { + contractAddress: targetContractAddress, + functionSelector, + }, + extractCallStack(err, artifact.debug), + { cause: err }, + ); + this.logger.debug(`Error executing private function ${targetContractAddress}:${functionSelector}`); + throw createSimulationError(execError); + }); + const duration = timer.ms(); + const returnWitness = witnessMapToFields(acirExecutionResult.returnWitness); + const publicInputs = PrivateCircuitPublicInputs.fromFields(returnWitness); + + const initialWitnessSize = witnessMapToFields(initialWitness).length * Fr.SIZE_IN_BYTES; + this.logger.debug(`Ran external function ${targetContractAddress.toString()}:${functionSelector}`, { + circuitName: 'app-circuit', + duration, + eventName: 'circuit-witness-generation', + inputSize: initialWitnessSize, + outputSize: publicInputs.toBuffer().length, + appCircuitName: 'noname', + } satisfies CircuitWitnessGenerationStats); + + const callStackItem = new PrivateCallStackItem( + targetContractAddress, + new FunctionData(functionSelector, true), + publicInputs, ); - this.logger.debug( - `Error executing private function ${targetContractAddress}:${functionSelector}\n${createSimulationError( - execError, - )}`, + // Apply side effects + this.sideEffectsCounter = publicInputs.endSideEffectCounter.toNumber(); + + await this.addNullifiers( + targetContractAddress, + publicInputs.newNullifiers.filter(nullifier => !nullifier.isEmpty()).map(nullifier => nullifier.value), ); - throw execError; - }); - const duration = timer.ms(); - const returnWitness = witnessMapToFields(acirExecutionResult.returnWitness); - const publicInputs = PrivateCircuitPublicInputs.fromFields(returnWitness); - - const initialWitnessSize = witnessMapToFields(initialWitness).length * Fr.SIZE_IN_BYTES; - this.logger.debug(`Ran external function ${targetContractAddress.toString()}:${functionSelector}`, { - circuitName: 'app-circuit', - duration, - eventName: 'circuit-witness-generation', - inputSize: initialWitnessSize, - outputSize: publicInputs.toBuffer().length, - appCircuitName: 'noname', - } satisfies CircuitWitnessGenerationStats); - - const callStackItem = new PrivateCallStackItem( - targetContractAddress, - new FunctionData(functionSelector, true), - publicInputs, - ); - // Apply side effects - this.sideEffectsCounter += publicInputs.endSideEffectCounter.toNumber(); - this.setContractAddress(currentContractAddress); - this.setMsgSender(currentMessageSender); - return callStackItem; + await this.addNoteHashes( + targetContractAddress, + publicInputs.newNoteHashes.filter(noteHash => !noteHash.isEmpty()).map(noteHash => noteHash.value), + ); + + return callStackItem; + } finally { + this.setContractAddress(currentContractAddress); + this.setMsgSender(currentMessageSender); + this.setFunctionSelector(currentFunctionSelector); + } } - async getInitialWitness(abi: FunctionAbi, argsHash: Fr, sideEffectCounter: number) { + async getInitialWitness( + abi: FunctionAbi, + argsHash: Fr, + sideEffectCounter: number, + isStaticCall: boolean, + isDelegateCall: boolean, + ) { const argumentsSize = countArgumentsSize(abi); const args = this.packedValuesCache.unpack(argsHash); @@ -525,33 +613,220 @@ export class TXE implements TypedOracle { throw new Error('Invalid arguments size'); } - const privateContextInputs = await this.getPrivateContextInputs(this.blockNumber - 1, sideEffectCounter); + const privateContextInputs = await this.getPrivateContextInputs( + this.blockNumber - 1, + sideEffectCounter, + isStaticCall, + isDelegateCall, + ); const fields = [...privateContextInputs.toFields(), ...args]; return toACVMWitness(0, fields); } - callPublicFunction( - _targetContractAddress: AztecAddress, - _functionSelector: FunctionSelector, - _argsHash: Fr, - _sideEffectCounter: number, - _isStaticCall: boolean, - _isDelegateCall: boolean, + public async getDebugFunctionName(address: AztecAddress, selector: FunctionSelector): Promise { + const instance = await this.contractDataOracle.getContractInstance(address); + if (!instance) { + return undefined; + } + const artifact = await this.contractDataOracle.getContractArtifact(instance!.contractClassId); + if (!artifact) { + return undefined; + } + + const f = artifact.functions.find(f => + FunctionSelector.fromNameAndParameters(f.name, f.parameters).equals(selector), + ); + if (!f) { + return undefined; + } + + return `${artifact.name}:${f.name}`; + } + + async executePublicFunction( + targetContractAddress: AztecAddress, + functionSelector: FunctionSelector, + args: Fr[], + callContext: CallContext, + ) { + const header = Header.empty(); + header.state = await this.trees.getStateReference(true); + header.globalVariables.blockNumber = new Fr(await this.getBlockNumber()); + header.state.partial.nullifierTree.root = Fr.fromBuffer( + (await this.trees.getTreeInfo(MerkleTreeId.NULLIFIER_TREE, true)).root, + ); + header.state.partial.noteHashTree.root = Fr.fromBuffer( + (await this.trees.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE, true)).root, + ); + header.state.partial.publicDataTree.root = Fr.fromBuffer( + (await this.trees.getTreeInfo(MerkleTreeId.PUBLIC_DATA_TREE, true)).root, + ); + header.state.l1ToL2MessageTree.root = Fr.fromBuffer( + (await this.trees.getTreeInfo(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, true)).root, + ); + const executor = new PublicExecutor( + new TXEPublicStateDB(this), + new ContractsDataSourcePublicDB(new TXEPublicContractDataSource(this)), + new WorldStateDB(this.trees.asLatest()), + header, + ); + const execution = { + contractAddress: targetContractAddress, + functionSelector, + args, + callContext, + }; + + return executor.simulate( + execution, + GlobalVariables.empty(), + Gas.test(), + TxContext.empty(), + /* pendingNullifiers */ [], + /* transactionFee */ Fr.ZERO, + callContext.sideEffectCounter, + ); + } + + async avmOpcodeCall( + targetContractAddress: AztecAddress, + functionSelector: FunctionSelector, + args: Fr[], + isStaticCall: boolean, + isDelegateCall: boolean, + ) { + // Store and modify env + const currentContractAddress = AztecAddress.fromField(this.contractAddress); + const currentMessageSender = AztecAddress.fromField(this.msgSender); + const currentFunctionSelector = FunctionSelector.fromField(this.functionSelector.toField()); + this.setMsgSender(this.contractAddress); + this.setContractAddress(targetContractAddress); + this.setFunctionSelector(functionSelector); + + const callContext = CallContext.empty(); + callContext.msgSender = this.msgSender; + callContext.functionSelector = this.functionSelector; + callContext.sideEffectCounter = this.sideEffectsCounter; + callContext.storageContractAddress = targetContractAddress; + callContext.isStaticCall = isStaticCall; + callContext.isDelegateCall = isDelegateCall; + + const executionResult = await this.executePublicFunction( + targetContractAddress, + functionSelector, + args, + callContext, + ); + + // Apply side effects + if (!executionResult.reverted) { + this.sideEffectsCounter += executionResult.endSideEffectCounter.toNumber(); + } + this.setContractAddress(currentContractAddress); + this.setMsgSender(currentMessageSender); + this.setFunctionSelector(currentFunctionSelector); + + return executionResult; + } + + async callPublicFunction( + targetContractAddress: AztecAddress, + functionSelector: FunctionSelector, + argsHash: Fr, + sideEffectCounter: number, + isStaticCall: boolean, + isDelegateCall: boolean, ): Promise { - throw new Error('Method not implemented.'); + // Store and modify env + const currentContractAddress = AztecAddress.fromField(this.contractAddress); + const currentMessageSender = AztecAddress.fromField(this.msgSender); + const currentFunctionSelector = FunctionSelector.fromField(this.functionSelector.toField()); + this.setMsgSender(this.contractAddress); + this.setContractAddress(targetContractAddress); + this.setFunctionSelector(functionSelector); + + const callContext = CallContext.empty(); + callContext.msgSender = this.msgSender; + callContext.functionSelector = this.functionSelector; + callContext.sideEffectCounter = sideEffectCounter; + callContext.storageContractAddress = targetContractAddress; + callContext.isStaticCall = isStaticCall; + callContext.isDelegateCall = isDelegateCall; + + const args = this.packedValuesCache.unpack(argsHash); + + const executionResult = await this.executePublicFunction( + targetContractAddress, + functionSelector, + args, + callContext, + ); + + // Apply side effects + this.sideEffectsCounter = executionResult.endSideEffectCounter.toNumber(); + this.setContractAddress(currentContractAddress); + this.setMsgSender(currentMessageSender); + this.setFunctionSelector(currentFunctionSelector); + + return executionResult.returnValues; } - enqueuePublicFunctionCall( - _targetContractAddress: AztecAddress, - _functionSelector: FunctionSelector, - _argsHash: Fr, - _sideEffectCounter: number, - _isStaticCall: boolean, - _isDelegateCall: boolean, + async enqueuePublicFunctionCall( + targetContractAddress: AztecAddress, + functionSelector: FunctionSelector, + argsHash: Fr, + sideEffectCounter: number, + isStaticCall: boolean, + isDelegateCall: boolean, ): Promise { - throw new Error('Method not implemented.'); + // Store and modify env + const currentContractAddress = AztecAddress.fromField(this.contractAddress); + const currentMessageSender = AztecAddress.fromField(this.msgSender); + const currentFunctionSelector = FunctionSelector.fromField(this.functionSelector.toField()); + this.setMsgSender(this.contractAddress); + this.setContractAddress(targetContractAddress); + this.setFunctionSelector(functionSelector); + + const callContext = CallContext.empty(); + callContext.msgSender = this.msgSender; + callContext.functionSelector = this.functionSelector; + callContext.sideEffectCounter = sideEffectCounter; + callContext.storageContractAddress = targetContractAddress; + callContext.isStaticCall = isStaticCall; + callContext.isDelegateCall = isDelegateCall; + + const args = this.packedValuesCache.unpack(argsHash); + + const executionResult = await this.executePublicFunction( + targetContractAddress, + functionSelector, + args, + callContext, + ); + + // Apply side effects + this.sideEffectsCounter += executionResult.endSideEffectCounter.toNumber(); + this.setContractAddress(currentContractAddress); + this.setMsgSender(currentMessageSender); + this.setFunctionSelector(currentFunctionSelector); + + const parentCallContext = CallContext.empty(); + parentCallContext.msgSender = currentMessageSender; + parentCallContext.functionSelector = currentFunctionSelector; + parentCallContext.sideEffectCounter = sideEffectCounter; + parentCallContext.storageContractAddress = currentContractAddress; + parentCallContext.isStaticCall = isStaticCall; + parentCallContext.isDelegateCall = isDelegateCall; + + return PublicCallRequest.from({ + parentCallContext, + contractAddress: targetContractAddress, + functionSelector, + callContext, + args, + }); } setPublicTeardownFunctionCall( diff --git a/yarn-project/txe/src/txe_service/txe_service.ts b/yarn-project/txe/src/txe_service/txe_service.ts index 6b53c0de1eb..de25cc81299 100644 --- a/yarn-project/txe/src/txe_service/txe_service.ts +++ b/yarn-project/txe/src/txe_service/txe_service.ts @@ -10,10 +10,10 @@ import { getContractInstanceFromDeployParams, } from '@aztec/circuits.js'; import { computePublicDataTreeLeafSlot } from '@aztec/circuits.js/hash'; +import { NoteSelector } from '@aztec/foundation/abi'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { type Logger } from '@aztec/foundation/log'; import { KeyStore } from '@aztec/key-store'; -import { type AztecKVStore } from '@aztec/kv-store'; import { openTmpStore } from '@aztec/kv-store/utils'; import { ExecutionNoteCache, PackedValuesCache, type TypedOracle } from '@aztec/simulator'; import { MerkleTrees } from '@aztec/world-state'; @@ -28,10 +28,11 @@ import { toForeignCallResult, toSingle, } from '../util/encoding.js'; +import { ExpectedFailureError } from '../util/expected_failure_error.js'; import { TXEDatabase } from '../util/txe_database.js'; export class TXEService { - constructor(private logger: Logger, private typedOracle: TypedOracle, private store: AztecKVStore) {} + constructor(private logger: Logger, private typedOracle: TypedOracle) {} static async init(logger: Logger) { const store = openTmpStore(true); @@ -42,8 +43,8 @@ export class TXEService { const txeDatabase = new TXEDatabase(store); logger.info(`TXE service initialized`); const txe = new TXE(logger, trees, packedValuesCache, noteCache, keyStore, txeDatabase); - const service = new TXEService(logger, txe, store); - await service.timeTravel(toSingle(new Fr(1n))); + const service = new TXEService(logger, txe); + await service.advanceBlocksBy(toSingle(new Fr(1n))); return service; } @@ -59,31 +60,20 @@ export class TXEService { return toForeignCallResult(inputs.toFields().map(toSingle)); } - async timeTravel(blocks: ForeignCallSingle) { + async advanceBlocksBy(blocks: ForeignCallSingle) { const nBlocks = fromSingle(blocks).toNumber(); - this.logger.info(`time traveling ${nBlocks} blocks`); + this.logger.debug(`time traveling ${nBlocks} blocks`); const trees = (this.typedOracle as TXE).getTrees(); + const header = Header.empty(); + const l2Block = L2Block.empty(); + header.state = await trees.getStateReference(true); + const blockNumber = await this.typedOracle.getBlockNumber(); + header.globalVariables.blockNumber = new Fr(blockNumber); + l2Block.archive.root = Fr.fromBuffer((await trees.getTreeInfo(MerkleTreeId.ARCHIVE, true)).root); + l2Block.header = header; for (let i = 0; i < nBlocks; i++) { - const header = Header.empty(); - const l2Block = L2Block.empty(); - header.state = await trees.getStateReference(true); const blockNumber = await this.typedOracle.getBlockNumber(); - header.globalVariables.blockNumber = new Fr(blockNumber); - header.state.partial.nullifierTree.root = Fr.fromBuffer( - (await trees.getTreeInfo(MerkleTreeId.NULLIFIER_TREE, true)).root, - ); - header.state.partial.noteHashTree.root = Fr.fromBuffer( - (await trees.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE, true)).root, - ); - header.state.partial.publicDataTree.root = Fr.fromBuffer( - (await trees.getTreeInfo(MerkleTreeId.PUBLIC_DATA_TREE, true)).root, - ); - header.state.l1ToL2MessageTree.root = Fr.fromBuffer( - (await trees.getTreeInfo(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, true)).root, - ); - l2Block.archive.root = Fr.fromBuffer((await trees.getTreeInfo(MerkleTreeId.ARCHIVE, true)).root); - l2Block.header = header; await trees.handleL2BlockAndMessages(l2Block, []); (this.typedOracle as TXE).setBlockNumber(blockNumber + 1); } @@ -115,7 +105,10 @@ export class TXEService { .map(char => String.fromCharCode(char.toNumber())) .join(''); const decodedArgs = fromArray(args); - this.logger.debug(`Deploy ${pathStr} with ${initializerStr} and ${decodedArgs}`); + const publicKeysHashFr = fromSingle(publicKeysHash); + this.logger.debug( + `Deploy ${pathStr} with initializer ${initializerStr}(${decodedArgs}) and public keys hash ${publicKeysHashFr}`, + ); const contractModule = await import(pathStr); // Hacky way of getting the class, the name of the Artifact is always longer const contractClass = contractModule[Object.keys(contractModule).sort((a, b) => a.length - b.length)[0]]; @@ -123,7 +116,7 @@ export class TXEService { constructorArgs: decodedArgs, skipArgsDecoding: true, salt: Fr.ONE, - publicKeysHash: fromSingle(publicKeysHash), + publicKeysHash: publicKeysHashFr, constructorArtifact: initializerStr ? initializerStr : undefined, deployer: AztecAddress.ZERO, }); @@ -131,7 +124,15 @@ export class TXEService { this.logger.debug(`Deployed ${contractClass.artifact.name} at ${instance.address}`); await (this.typedOracle as TXE).addContractInstance(instance); await (this.typedOracle as TXE).addContractArtifact(contractClass.artifact); - return toForeignCallResult([toSingle(instance.address)]); + return toForeignCallResult([ + toArray([ + instance.salt, + instance.deployer, + instance.contractClassId, + instance.initializationHash, + instance.publicKeysHash, + ]), + ]); } async directStorageWrite( @@ -175,6 +176,7 @@ export class TXEService { const completeAddress = await keyStore.addAccount(fromSingle(secret), fromSingle(partialAddress)); const accountStore = (this.typedOracle as TXE).getTXEDatabase(); await accountStore.setAccount(completeAddress.address, completeAddress); + this.logger.debug(`Created account ${completeAddress.address}`); return toForeignCallResult([ toSingle(completeAddress.address), ...completeAddress.publicKeys.toFields().map(toSingle), @@ -196,6 +198,59 @@ export class TXEService { return toForeignCallResult([toSingle(new Fr(counter))]); } + async addAuthWitness(address: ForeignCallSingle, messageHash: ForeignCallSingle) { + await (this.typedOracle as TXE).addAuthWitness(fromSingle(address), fromSingle(messageHash)); + return toForeignCallResult([]); + } + + async assertPublicCallFails( + address: ForeignCallSingle, + functionSelector: ForeignCallSingle, + _length: ForeignCallSingle, + args: ForeignCallArray, + ) { + const parsedAddress = fromSingle(address); + const parsedSelector = FunctionSelector.fromField(fromSingle(functionSelector)); + const result = await (this.typedOracle as TXE).avmOpcodeCall( + parsedAddress, + parsedSelector, + fromArray(args), + false, + false, + ); + if (!result.reverted) { + throw new ExpectedFailureError('Public call did not revert'); + } + + return toForeignCallResult([]); + } + + async assertPrivateCallFails( + targetContractAddress: ForeignCallSingle, + functionSelector: ForeignCallSingle, + argsHash: ForeignCallSingle, + sideEffectCounter: ForeignCallSingle, + isStaticCall: ForeignCallSingle, + isDelegateCall: ForeignCallSingle, + ) { + try { + await this.typedOracle.callPrivateFunction( + fromSingle(targetContractAddress), + FunctionSelector.fromField(fromSingle(functionSelector)), + fromSingle(argsHash), + fromSingle(sideEffectCounter).toNumber(), + fromSingle(isStaticCall).toBool(), + fromSingle(isDelegateCall).toBool(), + ); + throw new ExpectedFailureError('Private call did not fail'); + } catch (e) { + if (e instanceof ExpectedFailureError) { + throw e; + } + } + return toForeignCallResult([]); + } + // PXE oracles getRandomField() { @@ -356,7 +411,7 @@ export class TXEService { ) { this.typedOracle.notifyCreatedNote( fromSingle(storageSlot), - fromSingle(noteTypeId), + NoteSelector.fromField(fromSingle(noteTypeId)), fromArray(note), fromSingle(innerNoteHash), fromSingle(counter).toNumber(), @@ -433,10 +488,27 @@ export class TXEService { return toForeignCallResult([toSingle(new Fr(exists))]); } + async avmOpcodeCall( + _gas: ForeignCallArray, + address: ForeignCallSingle, + _length: ForeignCallSingle, + args: ForeignCallArray, + functionSelector: ForeignCallSingle, + ) { + const result = await (this.typedOracle as TXE).avmOpcodeCall( + fromSingle(address), + FunctionSelector.fromField(fromSingle(functionSelector)), + fromArray(args), + false, + false, + ); + + return toForeignCallResult([toArray(result.returnValues), toSingle(new Fr(1))]); + } + async getPublicKeysAndPartialAddress(address: ForeignCallSingle) { const parsedAddress = AztecAddress.fromField(fromSingle(address)); const { publicKeys, partialAddress } = await this.typedOracle.getCompleteAddress(parsedAddress); - return toForeignCallResult([toArray([...publicKeys.toFields(), partialAddress])]); } @@ -462,7 +534,7 @@ export class TXEService { const encLog = this.typedOracle.computeEncryptedNoteLog( AztecAddress.fromString(fromSingle(contractAddress).toString()), Fr.fromString(fromSingle(storageSlot).toString()), - Fr.fromString(fromSingle(noteTypeId).toString()), + NoteSelector.fromField(Fr.fromString(fromSingle(noteTypeId).toString())), ovKeys, ivpkM, fromArray(preimage), @@ -519,4 +591,56 @@ export class TXEService { } return toForeignCallResult([toArray(witness.toFields())]); } + + async getAuthWitness(messageHash: ForeignCallSingle) { + const parsedMessageHash = fromSingle(messageHash); + const authWitness = await this.typedOracle.getAuthWitness(parsedMessageHash); + if (!authWitness) { + throw new Error(`Auth witness not found for message hash ${parsedMessageHash}.`); + } + return toForeignCallResult([toArray(authWitness)]); + } + + async enqueuePublicFunctionCall( + targetContractAddress: ForeignCallSingle, + functionSelector: ForeignCallSingle, + argsHash: ForeignCallSingle, + sideEffectCounter: ForeignCallSingle, + isStaticCall: ForeignCallSingle, + isDelegateCall: ForeignCallSingle, + ) { + const publicCallRequest = await this.typedOracle.enqueuePublicFunctionCall( + fromSingle(targetContractAddress), + FunctionSelector.fromField(fromSingle(functionSelector)), + fromSingle(argsHash), + fromSingle(sideEffectCounter).toNumber(), + fromSingle(isStaticCall).toBool(), + fromSingle(isDelegateCall).toBool(), + ); + const fields = [ + publicCallRequest.contractAddress.toField(), + publicCallRequest.functionSelector.toField(), + ...publicCallRequest.callContext.toFields(), + publicCallRequest.getArgsHash(), + ]; + return toForeignCallResult([toArray(fields)]); + } + + async getChainId() { + return toForeignCallResult([toSingle(await this.typedOracle.getChainId())]); + } + + async getVersion() { + return toForeignCallResult([toSingle(await this.typedOracle.getVersion())]); + } + + async addNullifiers(contractAddress: ForeignCallSingle, _length: ForeignCallSingle, nullifiers: ForeignCallArray) { + await (this.typedOracle as TXE).addNullifiers(fromSingle(contractAddress), fromArray(nullifiers)); + return toForeignCallResult([]); + } + + async addNoteHashes(contractAddress: ForeignCallSingle, _length: ForeignCallSingle, noteHashes: ForeignCallArray) { + await (this.typedOracle as TXE).addNoteHashes(fromSingle(contractAddress), fromArray(noteHashes)); + return toForeignCallResult([]); + } } diff --git a/yarn-project/txe/src/util/expected_failure_error.ts b/yarn-project/txe/src/util/expected_failure_error.ts new file mode 100644 index 00000000000..8f97a3ae2bf --- /dev/null +++ b/yarn-project/txe/src/util/expected_failure_error.ts @@ -0,0 +1,5 @@ +export class ExpectedFailureError extends Error { + constructor(message: string) { + super(message); + } +} diff --git a/yarn-project/txe/src/util/txe_public_contract_data_source.ts b/yarn-project/txe/src/util/txe_public_contract_data_source.ts new file mode 100644 index 00000000000..64f410f9595 --- /dev/null +++ b/yarn-project/txe/src/util/txe_public_contract_data_source.ts @@ -0,0 +1,63 @@ +import { type AztecAddress, Fr, type FunctionSelector, unpackBytecode } from '@aztec/circuits.js'; +import { type ContractArtifact } from '@aztec/foundation/abi'; +import { PrivateFunctionsTree } from '@aztec/pxe'; +import { + type ContractClassPublic, + type ContractDataSource, + type ContractInstanceWithAddress, + type PublicFunction, +} from '@aztec/types/contracts'; + +import { type TXE } from '../oracle/txe_oracle.js'; + +export class TXEPublicContractDataSource implements ContractDataSource { + constructor(private txeOracle: TXE) {} + + async getPublicFunction(address: AztecAddress, selector: FunctionSelector): Promise { + const bytecode = await this.txeOracle.getContractDataOracle().getBytecode(address, selector); + if (!bytecode) { + return undefined; + } + return { bytecode, selector }; + } + + getBlockNumber(): Promise { + return this.txeOracle.getBlockNumber(); + } + + async getContractClass(id: Fr): Promise { + const contractClass = await this.txeOracle.getContractDataOracle().getContractClass(id); + const artifact = await this.txeOracle.getContractDataOracle().getContractArtifact(id); + const tree = new PrivateFunctionsTree(artifact); + const privateFunctionsRoot = tree.getFunctionTreeRoot(); + + return { + id, + artifactHash: contractClass!.artifactHash, + packedBytecode: contractClass!.packedBytecode, + publicFunctions: unpackBytecode(contractClass!.packedBytecode), + privateFunctionsRoot: new Fr(privateFunctionsRoot!.root), + version: contractClass!.version, + privateFunctions: [], + unconstrainedFunctions: [], + }; + } + + async getContract(address: AztecAddress): Promise { + const instance = await this.txeOracle.getContractDataOracle().getContractInstance(address); + return { ...instance, address }; + } + + getContractClassIds(): Promise { + throw new Error('Method not implemented.'); + } + + async getContractArtifact(address: AztecAddress): Promise { + const instance = await this.txeOracle.getContractDataOracle().getContractInstance(address); + return this.txeOracle.getContractDataOracle().getContractArtifact(instance.contractClassId); + } + + addContractArtifact(address: AztecAddress, contract: ContractArtifact): Promise { + return this.txeOracle.addContractArtifact(contract); + } +} diff --git a/yarn-project/txe/src/util/txe_public_state_db.ts b/yarn-project/txe/src/util/txe_public_state_db.ts new file mode 100644 index 00000000000..62bdbaf7e5b --- /dev/null +++ b/yarn-project/txe/src/util/txe_public_state_db.ts @@ -0,0 +1,57 @@ +import { MerkleTreeId } from '@aztec/circuit-types'; +import { + type AztecAddress, + Fr, + PUBLIC_DATA_SUBTREE_HEIGHT, + PublicDataTreeLeaf, + type PublicDataTreeLeafPreimage, +} from '@aztec/circuits.js'; +import { computePublicDataTreeLeafSlot } from '@aztec/circuits.js/hash'; +import { type PublicStateDB } from '@aztec/simulator'; + +import { type TXE } from '../oracle/txe_oracle.js'; + +export class TXEPublicStateDB implements PublicStateDB { + constructor(private txeOracle: TXE) {} + + async storageRead(contract: AztecAddress, slot: Fr): Promise { + const db = this.txeOracle.getTrees().asLatest(); + const leafSlot = computePublicDataTreeLeafSlot(contract, slot).toBigInt(); + + const lowLeafResult = await db.getPreviousValueIndex(MerkleTreeId.PUBLIC_DATA_TREE, leafSlot); + + let value = Fr.ZERO; + if (lowLeafResult && lowLeafResult.alreadyPresent) { + const preimage = (await db.getLeafPreimage( + MerkleTreeId.PUBLIC_DATA_TREE, + lowLeafResult.index, + )) as PublicDataTreeLeafPreimage; + value = preimage.value; + } + return value; + } + + async storageWrite(contract: AztecAddress, slot: Fr, newValue: Fr): Promise { + const db = this.txeOracle.getTrees().asLatest(); + + await db.batchInsert( + MerkleTreeId.PUBLIC_DATA_TREE, + [new PublicDataTreeLeaf(computePublicDataTreeLeafSlot(contract, slot), newValue).toBuffer()], + PUBLIC_DATA_SUBTREE_HEIGHT, + ); + return newValue.toBigInt(); + } + + checkpoint(): Promise { + return Promise.resolve(); + } + rollbackToCheckpoint(): Promise { + throw new Error('Cannot rollback'); + } + commit(): Promise { + return Promise.resolve(); + } + rollbackToCommit(): Promise { + throw new Error('Cannot rollback'); + } +} diff --git a/yarn-project/types/package.json b/yarn-project/types/package.json index 4abfe1f9a65..b750c105ca8 100644 --- a/yarn-project/types/package.json +++ b/yarn-project/types/package.json @@ -34,7 +34,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/types/src/abi/contract_artifact.ts b/yarn-project/types/src/abi/contract_artifact.ts index 9fff2b21b6a..91b0eb30ba8 100644 --- a/yarn-project/types/src/abi/contract_artifact.ts +++ b/yarn-project/types/src/abi/contract_artifact.ts @@ -8,6 +8,7 @@ import { type FunctionArtifact, FunctionType, type IntegerValue, + NoteSelector, type StructValue, type TypedStructFieldValue, } from '@aztec/foundation/abi'; @@ -57,6 +58,9 @@ export function contractArtifactFromBuffer(buffer: Buffer): ContractArtifact { if (key === 'bytecode' && typeof value === 'string') { return Buffer.from(value, 'base64'); } + if (typeof value === 'object' && value !== null && value.type === 'NoteSelector') { + return new NoteSelector(Number(value.value)); + } if (typeof value === 'object' && value !== null && value.type === 'Fr') { return new Fr(BigInt(value.value)); } @@ -252,7 +256,8 @@ function getNoteTypes(input: NoirCompiledContract) { return notes.reduce((acc: Record, note) => { const name = note.fields[1].value as string; - const id = new Fr(BigInt(note.fields[0].value)); + // Note id is encoded as a hex string + const id = NoteSelector.fromField(Fr.fromString(note.fields[0].value)); acc[name] = { id, typ: name, diff --git a/yarn-project/world-state/package.json b/yarn-project/world-state/package.json index 5e3dd632a20..2f2b53a1cdf 100644 --- a/yarn-project/world-state/package.json +++ b/yarn-project/world-state/package.json @@ -29,7 +29,15 @@ "rootDir": "./src", "transform": { "^.+\\.tsx?$": [ - "@swc/jest" + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } ] }, "extensionsToTreatAsEsm": [ diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index cc328c7abd1..1b9ed2231ac 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -57,6 +57,7 @@ __metadata: "@aztec/l1-artifacts": "workspace:^" "@aztec/noir-contracts.js": "workspace:^" "@aztec/protocol-contracts": "workspace:^" + "@aztec/telemetry-client": "workspace:^" "@aztec/types": "workspace:^" "@jest/globals": ^29.5.0 "@types/debug": ^4.1.7 @@ -119,6 +120,7 @@ __metadata: "@aztec/prover-client": "workspace:^" "@aztec/sequencer-client": "workspace:^" "@aztec/simulator": "workspace:^" + "@aztec/telemetry-client": "workspace:^" "@aztec/types": "workspace:^" "@aztec/world-state": "workspace:^" "@jest/globals": ^29.5.0 @@ -158,7 +160,6 @@ __metadata: ts-loader: ^9.4.4 ts-node: ^10.9.1 tslib: ^2.4.0 - tty-browserify: ^0.0.1 typescript: ^5.0.4 util: ^0.12.5 webpack: ^5.88.2 @@ -207,6 +208,7 @@ __metadata: "@aztec/protocol-contracts": "workspace:^" "@aztec/prover-client": "workspace:^" "@aztec/pxe": "workspace:^" + "@aztec/telemetry-client": "workspace:^" "@jest/globals": ^29.5.0 "@types/jest": ^29.5.0 "@types/koa": ^2.13.6 @@ -234,6 +236,7 @@ __metadata: "@aztec/foundation": "workspace:^" "@aztec/noir-protocol-circuits-types": "workspace:^" "@aztec/simulator": "workspace:^" + "@aztec/telemetry-client": "workspace:^" "@jest/globals": ^29.5.0 "@noir-lang/noirc_abi": "portal:../../noir/packages/noirc_abi" "@noir-lang/types": "portal:../../noir/packages/types" @@ -423,6 +426,7 @@ __metadata: "@aztec/pxe": "workspace:^" "@aztec/sequencer-client": "workspace:^" "@aztec/simulator": "workspace:^" + "@aztec/telemetry-client": "workspace:^" "@aztec/types": "workspace:^" "@aztec/world-state": "workspace:^" "@jest/globals": ^29.5.0 @@ -464,7 +468,6 @@ __metadata: ts-loader: ^9.4.4 ts-node: ^10.9.1 tslib: ^2.4.0 - tty-browserify: ^0.0.1 typescript: ^5.0.4 util: ^0.12.5 viem: ^2.7.15 @@ -708,6 +711,7 @@ __metadata: "@aztec/circuits.js": "workspace:^" "@aztec/foundation": "workspace:^" "@aztec/kv-store": "workspace:^" + "@aztec/telemetry-client": "workspace:^" "@chainsafe/discv5": 9.0.0 "@chainsafe/enr": 3.0.0 "@chainsafe/libp2p-gossipsub": 13.0.0 @@ -776,6 +780,7 @@ __metadata: "@aztec/kv-store": "workspace:^" "@aztec/noir-protocol-circuits-types": "workspace:^" "@aztec/simulator": "workspace:^" + "@aztec/telemetry-client": "workspace:^" "@aztec/world-state": "workspace:^" "@jest/globals": ^29.5.0 "@noir-lang/types": "portal:../../noir/packages/types" @@ -872,6 +877,7 @@ __metadata: "@aztec/p2p": "workspace:^" "@aztec/protocol-contracts": "workspace:^" "@aztec/simulator": "workspace:^" + "@aztec/telemetry-client": "workspace:^" "@aztec/types": "workspace:^" "@aztec/world-state": "workspace:^" "@jest/globals": ^29.5.0 @@ -911,6 +917,7 @@ __metadata: "@aztec/noir-contracts.js": "workspace:^" "@aztec/noir-protocol-circuits-types": "workspace:^" "@aztec/protocol-contracts": "workspace:^" + "@aztec/telemetry-client": "workspace:^" "@aztec/types": "workspace:^" "@aztec/world-state": "workspace:^" "@jest/globals": ^29.5.0 @@ -933,6 +940,27 @@ __metadata: languageName: unknown linkType: soft +"@aztec/telemetry-client@workspace:^, @aztec/telemetry-client@workspace:telemetry-client": + version: 0.0.0-use.local + resolution: "@aztec/telemetry-client@workspace:telemetry-client" + dependencies: + "@aztec/foundation": "workspace:^" + "@jest/globals": ^29.5.0 + "@opentelemetry/api": ^1.9.0 + "@opentelemetry/exporter-metrics-otlp-http": ^0.52.0 + "@opentelemetry/exporter-trace-otlp-http": ^0.52.0 + "@opentelemetry/host-metrics": ^0.35.2 + "@opentelemetry/resources": ^1.25.0 + "@opentelemetry/sdk-metrics": ^1.25.0 + "@opentelemetry/sdk-trace-node": ^1.25.0 + "@opentelemetry/semantic-conventions": ^1.25.0 + "@types/jest": ^29.5.0 + jest: ^29.5.0 + ts-node: ^10.9.1 + typescript: ^5.0.4 + languageName: unknown + linkType: soft + "@aztec/txe@workspace:txe": version: 0.0.0-use.local resolution: "@aztec/txe@workspace:txe" @@ -3027,6 +3055,209 @@ __metadata: languageName: node linkType: hard +"@opentelemetry/api-logs@npm:0.52.0": + version: 0.52.0 + resolution: "@opentelemetry/api-logs@npm:0.52.0" + dependencies: + "@opentelemetry/api": ^1.0.0 + checksum: 502f60fd3a4b08fb7e54eaf22d0415e34dcbc9995696945eff8a4a12910e933149900cc470fb476b9411b4bbb98f8b598e3f4d4a37137698fcf0a7ea6ab240d6 + languageName: node + linkType: hard + +"@opentelemetry/api@npm:^1.0.0, @opentelemetry/api@npm:^1.9.0": + version: 1.9.0 + resolution: "@opentelemetry/api@npm:1.9.0" + checksum: 9e88e59d53ced668f3daaecfd721071c5b85a67dd386f1c6f051d1be54375d850016c881f656ffbe9a03bedae85f7e89c2f2b635313f9c9b195ad033cdc31020 + languageName: node + linkType: hard + +"@opentelemetry/context-async-hooks@npm:1.25.0": + version: 1.25.0 + resolution: "@opentelemetry/context-async-hooks@npm:1.25.0" + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.10.0" + checksum: f50f6ef621b6cfaa1d0919e4470b7c8326371beaf6be9a635c6f3221677bf9f5429a81a29b5518a41d3c002e35d4a89cb748ae61f650d61aa2ae3cbe123c0301 + languageName: node + linkType: hard + +"@opentelemetry/core@npm:1.25.0": + version: 1.25.0 + resolution: "@opentelemetry/core@npm:1.25.0" + dependencies: + "@opentelemetry/semantic-conventions": 1.25.0 + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.10.0" + checksum: 46a851081e95ff1b9e3f8b518d064fd25c342522f11f0a082a9692bbfbcd947ed6602372f370fab48f8cbc8ebd7358dfa094e6d31bd26f4696b9bde418296045 + languageName: node + linkType: hard + +"@opentelemetry/exporter-metrics-otlp-http@npm:^0.52.0": + version: 0.52.0 + resolution: "@opentelemetry/exporter-metrics-otlp-http@npm:0.52.0" + dependencies: + "@opentelemetry/core": 1.25.0 + "@opentelemetry/otlp-exporter-base": 0.52.0 + "@opentelemetry/otlp-transformer": 0.52.0 + "@opentelemetry/resources": 1.25.0 + "@opentelemetry/sdk-metrics": 1.25.0 + peerDependencies: + "@opentelemetry/api": ^1.3.0 + checksum: 8438733189879e3162ab4a374d7f22a4f9655257cbcde156f1041954cbc86bfab7299e696df49187684f1c219a76b263e6489c411b7008b81a05d5b0e7dcd92d + languageName: node + linkType: hard + +"@opentelemetry/exporter-trace-otlp-http@npm:^0.52.0": + version: 0.52.0 + resolution: "@opentelemetry/exporter-trace-otlp-http@npm:0.52.0" + dependencies: + "@opentelemetry/core": 1.25.0 + "@opentelemetry/otlp-exporter-base": 0.52.0 + "@opentelemetry/otlp-transformer": 0.52.0 + "@opentelemetry/resources": 1.25.0 + "@opentelemetry/sdk-trace-base": 1.25.0 + peerDependencies: + "@opentelemetry/api": ^1.0.0 + checksum: bed18523289c579b8108b1c3fcb2b74361bed2d7f3016270feb080a047fa422fc9dfb0678ff1b726cb1e0fa9413cead5824e7f97d1d781467aa983a87fe1ee93 + languageName: node + linkType: hard + +"@opentelemetry/host-metrics@npm:^0.35.2": + version: 0.35.2 + resolution: "@opentelemetry/host-metrics@npm:0.35.2" + dependencies: + "@opentelemetry/sdk-metrics": ^1.8.0 + systeminformation: 5.22.9 + peerDependencies: + "@opentelemetry/api": ^1.3.0 + checksum: 541df2585f9cbf8b6606f6782a2d351383f7a5b0a92b92ad4011ac46adac513474463d0c2474d6902d9d6d3b633be67c60ea0716ea2de277cebc1cb2538fa7a4 + languageName: node + linkType: hard + +"@opentelemetry/otlp-exporter-base@npm:0.52.0": + version: 0.52.0 + resolution: "@opentelemetry/otlp-exporter-base@npm:0.52.0" + dependencies: + "@opentelemetry/core": 1.25.0 + "@opentelemetry/otlp-transformer": 0.52.0 + peerDependencies: + "@opentelemetry/api": ^1.0.0 + checksum: 5230ba86d274f4d05fa2820a21e8278d796a299299e2af96150085c871427fe5ef4c6fa4954cdc1b8cdd0a87d5d6677ca0e547cc51253968572a6ede51f63ea2 + languageName: node + linkType: hard + +"@opentelemetry/otlp-transformer@npm:0.52.0": + version: 0.52.0 + resolution: "@opentelemetry/otlp-transformer@npm:0.52.0" + dependencies: + "@opentelemetry/api-logs": 0.52.0 + "@opentelemetry/core": 1.25.0 + "@opentelemetry/resources": 1.25.0 + "@opentelemetry/sdk-logs": 0.52.0 + "@opentelemetry/sdk-metrics": 1.25.0 + "@opentelemetry/sdk-trace-base": 1.25.0 + protobufjs: ^7.3.0 + peerDependencies: + "@opentelemetry/api": ">=1.3.0 <1.10.0" + checksum: 5f75f41a710e5e536faecdec7b1687352e450d185d12613bbcbb206570d96ca2833db15e1d7945cb27040a04c017135b07df2f607ccf9ca9a061f86ad87e8c35 + languageName: node + linkType: hard + +"@opentelemetry/propagator-b3@npm:1.25.0": + version: 1.25.0 + resolution: "@opentelemetry/propagator-b3@npm:1.25.0" + dependencies: + "@opentelemetry/core": 1.25.0 + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.10.0" + checksum: 5e8a0feec400ebb20644ee217f904ec8894ccad49b753e80c5e131a4f3390504ca3fd17de58ff546313dedc6498dbd198ff83acc3d8084a205e1d901cfc0bb2d + languageName: node + linkType: hard + +"@opentelemetry/propagator-jaeger@npm:1.25.0": + version: 1.25.0 + resolution: "@opentelemetry/propagator-jaeger@npm:1.25.0" + dependencies: + "@opentelemetry/core": 1.25.0 + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.10.0" + checksum: c652b4285e254041654a5153649f822b8e2eaa526b67e0a8c56c4eb173d9d0d0efa41ffed3f7dcdd1c2c2b85365cd05e001ee145e8701e4af9d7eef79488ca18 + languageName: node + linkType: hard + +"@opentelemetry/resources@npm:1.25.0, @opentelemetry/resources@npm:^1.25.0": + version: 1.25.0 + resolution: "@opentelemetry/resources@npm:1.25.0" + dependencies: + "@opentelemetry/core": 1.25.0 + "@opentelemetry/semantic-conventions": 1.25.0 + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.10.0" + checksum: 6b9e59b7fc70944b418a1ae61396ec82d80869b2918bc664e3bd6d302ddc217e2e8fc5e37bcbd04bac46234f2057a005fa2a657caa1288a5c4ab7b697b0665cb + languageName: node + linkType: hard + +"@opentelemetry/sdk-logs@npm:0.52.0": + version: 0.52.0 + resolution: "@opentelemetry/sdk-logs@npm:0.52.0" + dependencies: + "@opentelemetry/api-logs": 0.52.0 + "@opentelemetry/core": 1.25.0 + "@opentelemetry/resources": 1.25.0 + peerDependencies: + "@opentelemetry/api": ">=1.4.0 <1.10.0" + checksum: 7bf7aed40a168866d76e2260237f6cec9c82acaebcc02a3597985b2be644e4aebf69e0f57739e7fd7cc8e75ecd0bdc98b0429ea985d7de6064148477ffd6432e + languageName: node + linkType: hard + +"@opentelemetry/sdk-metrics@npm:1.25.0, @opentelemetry/sdk-metrics@npm:^1.25.0, @opentelemetry/sdk-metrics@npm:^1.8.0": + version: 1.25.0 + resolution: "@opentelemetry/sdk-metrics@npm:1.25.0" + dependencies: + "@opentelemetry/core": 1.25.0 + "@opentelemetry/resources": 1.25.0 + lodash.merge: ^4.6.2 + peerDependencies: + "@opentelemetry/api": ">=1.3.0 <1.10.0" + checksum: dcb3e80bb41f937db77cb2a91574e2e434875b1740fdcff657d4223ce40002039dac915640a981deada86d53961607150b52fe32497b19c6a17dfd5fb9ed3f05 + languageName: node + linkType: hard + +"@opentelemetry/sdk-trace-base@npm:1.25.0": + version: 1.25.0 + resolution: "@opentelemetry/sdk-trace-base@npm:1.25.0" + dependencies: + "@opentelemetry/core": 1.25.0 + "@opentelemetry/resources": 1.25.0 + "@opentelemetry/semantic-conventions": 1.25.0 + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.10.0" + checksum: 4c0ce40dbe9dcf5e5f79c60c44ffadb6806f1a8cf45c13d901ea6a2345f6cf26a83a1dad4358859fcf941e01f8bd8654f907f88137d5051e023211f8d645e959 + languageName: node + linkType: hard + +"@opentelemetry/sdk-trace-node@npm:^1.25.0": + version: 1.25.0 + resolution: "@opentelemetry/sdk-trace-node@npm:1.25.0" + dependencies: + "@opentelemetry/context-async-hooks": 1.25.0 + "@opentelemetry/core": 1.25.0 + "@opentelemetry/propagator-b3": 1.25.0 + "@opentelemetry/propagator-jaeger": 1.25.0 + "@opentelemetry/sdk-trace-base": 1.25.0 + semver: ^7.5.2 + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.10.0" + checksum: 22a0a61a6c092841ef4438f914edd259d3025078cc9331aaac340c624c2963aa6fdc4970ade5a0e6647c64e92e893ebde0b8ecdd021abac5358ea3c814a5c01c + languageName: node + linkType: hard + +"@opentelemetry/semantic-conventions@npm:1.25.0, @opentelemetry/semantic-conventions@npm:^1.25.0": + version: 1.25.0 + resolution: "@opentelemetry/semantic-conventions@npm:1.25.0" + checksum: 8c9d36f57f0d3d1d4945effe626894ffea860b4be4d5257666ee28b90843ce22694c5b01f9b25ed47a08043958b7e89a65b7ae8e4128f5ed72dcdfe71ac7a19a + languageName: node + linkType: hard + "@pkgjs/parseargs@npm:^0.11.0": version: 0.11.0 resolution: "@pkgjs/parseargs@npm:0.11.0" @@ -3034,6 +3265,79 @@ __metadata: languageName: node linkType: hard +"@protobufjs/aspromise@npm:^1.1.1, @protobufjs/aspromise@npm:^1.1.2": + version: 1.1.2 + resolution: "@protobufjs/aspromise@npm:1.1.2" + checksum: 011fe7ef0826b0fd1a95935a033a3c0fd08483903e1aa8f8b4e0704e3233406abb9ee25350ec0c20bbecb2aad8da0dcea58b392bbd77d6690736f02c143865d2 + languageName: node + linkType: hard + +"@protobufjs/base64@npm:^1.1.2": + version: 1.1.2 + resolution: "@protobufjs/base64@npm:1.1.2" + checksum: 67173ac34de1e242c55da52c2f5bdc65505d82453893f9b51dc74af9fe4c065cf4a657a4538e91b0d4a1a1e0a0642215e31894c31650ff6e3831471061e1ee9e + languageName: node + linkType: hard + +"@protobufjs/codegen@npm:^2.0.4": + version: 2.0.4 + resolution: "@protobufjs/codegen@npm:2.0.4" + checksum: 59240c850b1d3d0b56d8f8098dd04787dcaec5c5bd8de186fa548de86b86076e1c50e80144b90335e705a044edf5bc8b0998548474c2a10a98c7e004a1547e4b + languageName: node + linkType: hard + +"@protobufjs/eventemitter@npm:^1.1.0": + version: 1.1.0 + resolution: "@protobufjs/eventemitter@npm:1.1.0" + checksum: 0369163a3d226851682f855f81413cbf166cd98f131edb94a0f67f79e75342d86e89df9d7a1df08ac28be2bc77e0a7f0200526bb6c2a407abbfee1f0262d5fd7 + languageName: node + linkType: hard + +"@protobufjs/fetch@npm:^1.1.0": + version: 1.1.0 + resolution: "@protobufjs/fetch@npm:1.1.0" + dependencies: + "@protobufjs/aspromise": ^1.1.1 + "@protobufjs/inquire": ^1.1.0 + checksum: 3fce7e09eb3f1171dd55a192066450f65324fd5f7cc01a431df01bb00d0a895e6bfb5b0c5561ce157ee1d886349c90703d10a4e11a1a256418ff591b969b3477 + languageName: node + linkType: hard + +"@protobufjs/float@npm:^1.0.2": + version: 1.0.2 + resolution: "@protobufjs/float@npm:1.0.2" + checksum: 5781e1241270b8bd1591d324ca9e3a3128d2f768077a446187a049e36505e91bc4156ed5ac3159c3ce3d2ba3743dbc757b051b2d723eea9cd367bfd54ab29b2f + languageName: node + linkType: hard + +"@protobufjs/inquire@npm:^1.1.0": + version: 1.1.0 + resolution: "@protobufjs/inquire@npm:1.1.0" + checksum: ca06f02eaf65ca36fb7498fc3492b7fc087bfcc85c702bac5b86fad34b692bdce4990e0ef444c1e2aea8c034227bd1f0484be02810d5d7e931c55445555646f4 + languageName: node + linkType: hard + +"@protobufjs/path@npm:^1.1.2": + version: 1.1.2 + resolution: "@protobufjs/path@npm:1.1.2" + checksum: 856eeb532b16a7aac071cacde5c5620df800db4c80cee6dbc56380524736205aae21e5ae47739114bf669ab5e8ba0e767a282ad894f3b5e124197cb9224445ee + languageName: node + linkType: hard + +"@protobufjs/pool@npm:^1.1.0": + version: 1.1.0 + resolution: "@protobufjs/pool@npm:1.1.0" + checksum: d6a34fbbd24f729e2a10ee915b74e1d77d52214de626b921b2d77288bd8f2386808da2315080f2905761527cceffe7ec34c7647bd21a5ae41a25e8212ff79451 + languageName: node + linkType: hard + +"@protobufjs/utf8@npm:^1.1.0": + version: 1.1.0 + resolution: "@protobufjs/utf8@npm:1.1.0" + checksum: f9bf3163d13aaa3b6f5e6fbf37a116e094ea021c0e1f2a7ccd0e12a29e2ce08dafba4e8b36e13f8ed7397e1591610ce880ed1289af4d66cf4ace8a36a9557278 + languageName: node + linkType: hard + "@puppeteer/browsers@npm:2.2.3": version: 2.2.3 resolution: "@puppeteer/browsers@npm:2.2.3" @@ -3922,6 +4226,15 @@ __metadata: languageName: node linkType: hard +"@types/node@npm:>=13.7.0": + version: 20.14.2 + resolution: "@types/node@npm:20.14.2" + dependencies: + undici-types: ~5.26.4 + checksum: 265362479b8f3b50fcd1e3f9e9af6121feb01a478dff0335ae67cccc3babfe45d0f12209d3d350595eebd7e67471762697b877c380513f8e5d27a238fa50c805 + languageName: node + linkType: hard + "@types/node@npm:^18.14.6, @types/node@npm:^18.15.11, @types/node@npm:^18.15.3, @types/node@npm:^18.7.23": version: 18.19.33 resolution: "@types/node@npm:18.19.33" @@ -10271,6 +10584,13 @@ __metadata: languageName: node linkType: hard +"long@npm:^5.0.0": + version: 5.2.3 + resolution: "long@npm:5.2.3" + checksum: 885ede7c3de4facccbd2cacc6168bae3a02c3e836159ea4252c87b6e34d40af819824b2d4edce330bfb5c4d6e8ce3ec5864bdcf9473fa1f53a4f8225860e5897 + languageName: node + linkType: hard + "lru-cache@npm:^10.0.1, lru-cache@npm:^10.1.0, lru-cache@npm:^10.2.0": version: 10.2.2 resolution: "lru-cache@npm:10.2.2" @@ -11794,6 +12114,26 @@ __metadata: languageName: node linkType: hard +"protobufjs@npm:^7.3.0": + version: 7.3.2 + resolution: "protobufjs@npm:7.3.2" + dependencies: + "@protobufjs/aspromise": ^1.1.2 + "@protobufjs/base64": ^1.1.2 + "@protobufjs/codegen": ^2.0.4 + "@protobufjs/eventemitter": ^1.1.0 + "@protobufjs/fetch": ^1.1.0 + "@protobufjs/float": ^1.0.2 + "@protobufjs/inquire": ^1.1.0 + "@protobufjs/path": ^1.1.2 + "@protobufjs/pool": ^1.1.0 + "@protobufjs/utf8": ^1.1.0 + "@types/node": ">=13.7.0" + long: ^5.0.0 + checksum: cfb2a744787f26ee7c82f3e7c4b72cfc000e9bb4c07828ed78eb414db0ea97a340c0cc3264d0e88606592f847b12c0351411f10e9af255b7ba864eec44d7705f + languageName: node + linkType: hard + "protons-runtime@npm:5.4.0, protons-runtime@npm:^5.0.0, protons-runtime@npm:^5.4.0": version: 5.4.0 resolution: "protons-runtime@npm:5.4.0" @@ -12486,6 +12826,15 @@ __metadata: languageName: node linkType: hard +"semver@npm:^7.5.2": + version: 7.6.2 + resolution: "semver@npm:7.6.2" + bin: + semver: bin/semver.js + checksum: 40f6a95101e8d854357a644da1b8dd9d93ce786d5c6a77227bc69dbb17bea83d0d1d1d7c4cd5920a6df909f48e8bd8a5909869535007f90278289f2451d0292d + languageName: node + linkType: hard + "serialize-javascript@npm:^6.0.1": version: 6.0.2 resolution: "serialize-javascript@npm:6.0.2" @@ -13200,6 +13549,16 @@ __metadata: languageName: node linkType: hard +"systeminformation@npm:5.22.9": + version: 5.22.9 + resolution: "systeminformation@npm:5.22.9" + bin: + systeminformation: lib/cli.js + checksum: c605e568395041e57483722b38802928bc6122e347f9e1c6a9588b30297e28c19ffb425be0306fcd6e4f14cd443fa0bbbb407e69ef15d891f6776946718b26bb + conditions: (os=darwin | os=linux | os=win32 | os=freebsd | os=openbsd | os=netbsd | os=sunos | os=android) + languageName: node + linkType: hard + "table-layout@npm:^1.0.2": version: 1.0.2 resolution: "table-layout@npm:1.0.2" @@ -13608,13 +13967,6 @@ __metadata: languageName: node linkType: hard -"tty-browserify@npm:^0.0.1": - version: 0.0.1 - resolution: "tty-browserify@npm:0.0.1" - checksum: 93b745d43fa5a7d2b948fa23be8d313576d1d884b48acd957c07710bac1c0d8ac34c0556ad4c57c73d36e11741763ef66b3fb4fb97b06b7e4d525315a3cd45f5 - languageName: node - linkType: hard - "type-check@npm:^0.4.0, type-check@npm:~0.4.0": version: 0.4.0 resolution: "type-check@npm:0.4.0" From c19aae17635b27aa4fe9abeac6010b2614fe71aa Mon Sep 17 00:00:00 2001 From: spypsy Date: Wed, 26 Jun 2024 14:07:54 +0000 Subject: [PATCH 14/21] undo formatting --- docs/docs/migration_notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/migration_notes.md b/docs/docs/migration_notes.md index a67a72ab615..776ee4ac9ef 100644 --- a/docs/docs/migration_notes.md +++ b/docs/docs/migration_notes.md @@ -1668,4 +1668,4 @@ Now, just remove the `src` folder,: ```rust easy_private_token_contract = {git = "https://github.com/AztecProtocol/aztec-packages/", tag ="v0.17.0", directory = "noir-projects/noir-contracts/contracts/easy_private_token_contract"} -``` +``` \ No newline at end of file From 38bc061a58e0b6b842b934c9f0359bb1bf491f73 Mon Sep 17 00:00:00 2001 From: spypsy Date: Wed, 26 Jun 2024 14:08:44 +0000 Subject: [PATCH 15/21] undo formatting again --- docs/docs/migration_notes.md | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/docs/docs/migration_notes.md b/docs/docs/migration_notes.md index 776ee4ac9ef..e9e1abb4d16 100644 --- a/docs/docs/migration_notes.md +++ b/docs/docs/migration_notes.md @@ -7,9 +7,7 @@ keywords: [sandbox, aztec, notes, migration, updating, upgrading] Aztec is in full-speed development. Literally every version breaks compatibility with the previous ones. This page attempts to target errors and difficulties you might encounter when upgrading, and how to resolve them. ## 0.44.0 - ### [Aztec.nr] Autogenerate Serialize methods for events - ```diff #[aztec(event)] struct WithdrawalProcessed { @@ -25,11 +23,10 @@ struct WithdrawalProcessed { ``` ### [Aztec.nr] rename `encode_and_encrypt_with_keys` to `encode_and_encrypt_note_with_keys` - -````diff +```diff contract XYZ { - use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_with_keys; -+ use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_note_with_keys; ++ use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_note_with_keys; .... - numbers.at(owner).initialize(&mut new_number).emit(encode_and_encrypt_with_keys(&mut context, owner_ovpk_m, owner_ivpk_m)); @@ -89,7 +86,7 @@ These changes were done because having the note hash exposed allowed us to not h + (note_hash_for_nullify, nullifier) + } + } -```` +``` ### [Aztec.nr] `note_getter` returns `BoundedVec` @@ -646,7 +643,7 @@ This change was made to communicate that we do not constrain the value in circui Historically it have been possible to "view" `unconstrained` functions to simulate them and get the return values, but not for `public` nor `private` functions. This has lead to a lot of bad code where we have the same function implemented thrice, once in `private`, once in `public` and once in `unconstrained`. It is not possible to call `simulate` on any call to get the return values! -However, beware that it currently always returns a Field array of size 4 for private and public. +However, beware that it currently always returns a Field array of size 4 for private and public. This will change to become similar to the return values of the `unconstrained` functions with proper return types. ```diff From a04678ae3ac126cc6ac9a5bb62907d84510fe42a Mon Sep 17 00:00:00 2001 From: spypsy Date: Wed, 26 Jun 2024 14:11:08 +0000 Subject: [PATCH 16/21] cancel-in-progress --- .github/workflows/devnet-deploys.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/devnet-deploys.yml b/.github/workflows/devnet-deploys.yml index 80fa8aae0c2..1120fef4628 100644 --- a/.github/workflows/devnet-deploys.yml +++ b/.github/workflows/devnet-deploys.yml @@ -3,6 +3,10 @@ on: push: branches: [devnet] +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + env: DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }} GIT_COMMIT: ${{ github.sha }} From cf32b29f46ed08f2cae84b445fa8345143523677 Mon Sep 17 00:00:00 2001 From: spypsy Date: Wed, 26 Jun 2024 14:16:48 +0000 Subject: [PATCH 17/21] fix ecs task def name --- yarn-project/p2p-bootstrap/terraform/main.tf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/p2p-bootstrap/terraform/main.tf b/yarn-project/p2p-bootstrap/terraform/main.tf index 3595f7a102b..7a0e55d9c73 100644 --- a/yarn-project/p2p-bootstrap/terraform/main.tf +++ b/yarn-project/p2p-bootstrap/terraform/main.tf @@ -104,7 +104,7 @@ resource "aws_ecs_task_definition" "p2p-bootstrap" { container_definitions = < Date: Wed, 26 Jun 2024 14:44:47 +0000 Subject: [PATCH 18/21] fix aws_ecs_service name ref --- yarn-project/p2p-bootstrap/terraform/main.tf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/p2p-bootstrap/terraform/main.tf b/yarn-project/p2p-bootstrap/terraform/main.tf index 7a0e55d9c73..85fa61ad536 100644 --- a/yarn-project/p2p-bootstrap/terraform/main.tf +++ b/yarn-project/p2p-bootstrap/terraform/main.tf @@ -167,7 +167,7 @@ DEFINITIONS resource "aws_ecs_service" "p2p-bootstrap" { count = local.bootnode_count - name = "${var.DEPLOY_TAG}-p2p-bootstrap-${count.index + 1}" + name = "${var.DEPLOY_TAG}-p2p-bootstrap-node-${count.index + 1}" cluster = data.terraform_remote_state.setup_iac.outputs.ecs_cluster_id launch_type = "FARGATE" desired_count = 1 From b400d3b360175bd293b36dcafc4800022189c2f3 Mon Sep 17 00:00:00 2001 From: spypsy Date: Wed, 26 Jun 2024 15:03:25 +0000 Subject: [PATCH 19/21] fix container names --- yarn-project/p2p-bootstrap/terraform/main.tf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/yarn-project/p2p-bootstrap/terraform/main.tf b/yarn-project/p2p-bootstrap/terraform/main.tf index 85fa61ad536..31b76cb33cd 100644 --- a/yarn-project/p2p-bootstrap/terraform/main.tf +++ b/yarn-project/p2p-bootstrap/terraform/main.tf @@ -185,13 +185,13 @@ resource "aws_ecs_service" "p2p-bootstrap" { service_registries { registry_arn = aws_service_discovery_service.p2p-bootstrap[count.index].arn - container_name = "${var.DEPLOY_TAG}-p2p-bootstrap-${count.index + 1}" + container_name = "${var.DEPLOY_TAG}-p2p-bootstrap-node-${count.index + 1}" container_port = 80 } load_balancer { target_group_arn = aws_lb_target_group.p2p-bootstrap-target-group-udp[count.index].id - container_name = "${var.DEPLOY_TAG}-p2p-bootstrap-${count.index + 1}" + container_name = "${var.DEPLOY_TAG}-p2p-bootstrap-node-${count.index + 1}" container_port = var.BOOTNODE_LISTEN_PORT + count.index } From bf8c93967ee44d4f620b462d0318c36c35c287ec Mon Sep 17 00:00:00 2001 From: spypsy Date: Fri, 28 Jun 2024 11:33:45 +0000 Subject: [PATCH 20/21] PR fixes --- yarn-project/aztec/terraform/node/variables.tf | 5 ----- yarn-project/aztec/terraform/prover/variables.tf | 2 +- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/yarn-project/aztec/terraform/node/variables.tf b/yarn-project/aztec/terraform/node/variables.tf index f761cfee2e7..36cb280645f 100644 --- a/yarn-project/aztec/terraform/node/variables.tf +++ b/yarn-project/aztec/terraform/node/variables.tf @@ -72,8 +72,3 @@ variable "PROVING_ENABLED" { type = bool default = true } - -variable "AGENTS_PER_SEQUENCER" { - type = string - default = 4 -} diff --git a/yarn-project/aztec/terraform/prover/variables.tf b/yarn-project/aztec/terraform/prover/variables.tf index 79ae2219a09..e6c6865d26d 100644 --- a/yarn-project/aztec/terraform/prover/variables.tf +++ b/yarn-project/aztec/terraform/prover/variables.tf @@ -4,7 +4,7 @@ variable "DEPLOY_TAG" { variable "AGENTS_PER_SEQUENCER" { type = string - default = 4 + default = 1 } variable "PROVING_ENABLED" { From 543f660517b211d77dd560b5563e161224390acc Mon Sep 17 00:00:00 2001 From: spypsy Date: Fri, 28 Jun 2024 11:45:17 +0000 Subject: [PATCH 21/21] remove ref from main.tf --- yarn-project/aztec/terraform/node/main.tf | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/yarn-project/aztec/terraform/node/main.tf b/yarn-project/aztec/terraform/node/main.tf index a1a52ffeb41..d446d334c89 100644 --- a/yarn-project/aztec/terraform/node/main.tf +++ b/yarn-project/aztec/terraform/node/main.tf @@ -57,9 +57,7 @@ locals { publisher_private_keys = [var.SEQ_1_PUBLISHER_PRIVATE_KEY, var.SEQ_2_PUBLISHER_PRIVATE_KEY] node_p2p_private_keys = [var.NODE_1_PRIVATE_KEY, var.NODE_2_PRIVATE_KEY] node_count = length(local.publisher_private_keys) - #node_count = 1 - data_dir = "/usr/src/yarn-project/aztec/data" - agents_per_sequencer = var.AGENTS_PER_SEQUENCER + data_dir = "/usr/src/yarn-project/aztec/data" } output "node_count" {