Skip to content

Commit

Permalink
chore(ci): Re-enable certain bb solidity ACIR tests (#5065)
Browse files Browse the repository at this point in the history
It isn't clear after some of the recursion cleanup in
#4221 why
`double_verify_proof` is failing the solidity verifier.

`double_verify_proof` was being used as a recursive proof itself to be
verified inside of `double_verify_nested_proof`. I have renamed this
test to `double_verify_proof_recursive` to note that its proof should be
used as input to another circuit.

I have also included a new test `double_verify_proof` where we accept
two non-nested proofs and use the Keccak prover. This is what we were
previously expecting for `double_verify_proof`. I also brought back
`arretenberg-acir-tests-bb-sol` for a few tests.

---------

Co-authored-by: Maddiaa <[email protected]>
  • Loading branch information
vezenovm and Maddiaa0 authored Mar 8, 2024
1 parent f3d9f9b commit 58e1ff4
Show file tree
Hide file tree
Showing 8 changed files with 72 additions and 10 deletions.
26 changes: 21 additions & 5 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -380,6 +380,18 @@ jobs:
command: cond_spot_run_build barretenberg-acir-tests-bb 32
aztec_manifest_key: barretenberg-acir-tests-bb

barretenberg-acir-tests-bb-sol:
docker:
- image: aztecprotocol/alpine-build-image
resource_class: small
steps:
- *checkout
- *setup_env
- run:
name: "Build and test"
command: cond_spot_run_build barretenberg-acir-tests-bb-sol 32
aztec_manifest_key: barretenberg-acir-tests-bb-sol

bb-js:
machine:
image: default
Expand Down Expand Up @@ -1217,6 +1229,12 @@ defaults: &defaults
- slack/notify:
event: fail
branch_pattern: "master"

bb_acir_tests: &bb_acir_tests
requires:
- barretenberg-x86_64-linux-clang-assert
- noir-compile-acir-tests
<<: *defaults

defaults_yarn_project_pre_join: &defaults_yarn_project_pre_join
requires:
Expand Down Expand Up @@ -1293,11 +1311,8 @@ workflows:
- barretenberg-stdlib-tests: *bb_test
- barretenberg-stdlib-recursion-ultra-tests: *bb_test
- barretenberg-join-split-tests: *bb_test
- barretenberg-acir-tests-bb:
requires:
- barretenberg-x86_64-linux-clang-assert
- noir-compile-acir-tests
<<: *defaults
- barretenberg-acir-tests-bb: *bb_acir_tests
- barretenberg-acir-tests-bb-sol: *bb_acir_tests
- barretenberg-docs: *defaults
- bb-js:
requires:
Expand Down Expand Up @@ -1433,6 +1448,7 @@ workflows:
- barretenberg-stdlib-recursion-ultra-tests
- barretenberg-join-split-tests
- barretenberg-acir-tests-bb
- barretenberg-acir-tests-bb-sol
- barretenberg-docs
- build-docs
- mainnet-fork
Expand Down
10 changes: 8 additions & 2 deletions barretenberg/acir_tests/Dockerfile.bb.sol
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,12 @@ COPY --from=noir-acir-tests /usr/src/noir/noir-repo/test_programs /usr/src/noir/
COPY --from=ghcr.io/foundry-rs/foundry:latest /usr/local/bin/anvil /usr/local/bin/anvil
WORKDIR /usr/src/barretenberg/acir_tests
COPY . .
# Run every acir test through a solidity verifier.
# Run the relevant acir tests through a solidity verifier.
# This includes the basic `assert_statement` test that contains a single public input
# and the recursive aggregation circuits which use the Keccak based prover.
#
# NOTE: When circuits are marked `recursive` it means the backend will use a prover that
# produces SNARK recursion friendly proofs, while the solidity verifier expects proofs
# whose transcript uses Keccak hashing.
RUN (cd sol-test && yarn)
RUN PARALLEL=1 FLOW=sol ./run_acir_tests.sh
RUN PARALLEL=1 FLOW=sol ./run_acir_tests.sh assert_statement double_verify_proof double_verify_nested_proof
2 changes: 1 addition & 1 deletion barretenberg/acir_tests/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -59,4 +59,4 @@ To generate a new input you can run the script again. To generate a new file und

You can then copy these inputs over to your working branch in Noir and regenerate the witness for `double_verify_proof`. You can then change the branch in `run_acir_tests.sh` to this Noir working branch as well and `double_verify_proof` should pass.

The same process should then be repeated, but now `double_verify_proof` will be the circuit for which we will be generating recursive inputs using `gen_inner_proof_inputs.sh`. The recursive artifacts should then supplied as inputs to `double_verify_nested_proof`.
The same process should then be repeated, but now `double_verify_proof_recursive` will be the circuit for which we will be generating recursive inputs using `gen_inner_proof_inputs.sh`. The recursive artifacts should then supplied as inputs to `double_verify_nested_proof`.
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,6 @@
name = "double_verify_proof"
type = "bin"
authors = [""]
[dependencies]
compiler_version = ">=0.24.0"

[dependencies]
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use dep::std;

#[recursive]
fn main(
verification_key: [Field; 114],
// This is the proof without public inputs attached.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
[package]
name = "double_verify_proof_recursive"
type = "bin"
authors = [""]
[dependencies]

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
use dep::std;

#[recursive]
fn main(
verification_key: [Field; 114],
// This is the proof without public inputs attached.
//
// This means: the size of this does not change with the number of public inputs.
proof: [Field; 93],
public_inputs: pub [Field; 1],
// This is currently not public. It is fine given that the vk is a part of the circuit definition.
// I believe we want to eventually make it public too though.
key_hash: Field,
proof_b: [Field; 93]
) {
std::verify_proof(
verification_key.as_slice(),
proof.as_slice(),
public_inputs.as_slice(),
key_hash
);

std::verify_proof(
verification_key.as_slice(),
proof_b.as_slice(),
public_inputs.as_slice(),
key_hash
);
}

0 comments on commit 58e1ff4

Please sign in to comment.