From f6646039df429a91f1b7ac60e58de2a173fd6aef Mon Sep 17 00:00:00 2001 From: sirasistant Date: Fri, 9 Feb 2024 14:32:18 +0000 Subject: [PATCH 1/6] git_subrepo.sh: Fix parent in .gitrepo file. [skip ci] --- noir/.gitrepo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir/.gitrepo b/noir/.gitrepo index f725a01b89b..3081b81b8d0 100644 --- a/noir/.gitrepo +++ b/noir/.gitrepo @@ -7,6 +7,6 @@ remote = https://github.com/noir-lang/noir branch = aztec-packages commit = f1b91511124df89bbe9e059b87536901bdf0d6f3 - parent = f5be1f17beeded5ab1ac59331bd520787b3778a1 + parent = 2082fedfb03d4882a269881f51c5337263bc539b method = merge cmdver = 0.4.6 From c03137bc137961c8edf91b89cbd135df714d5038 Mon Sep 17 00:00:00 2001 From: sirasistant Date: Fri, 9 Feb 2024 14:51:33 +0000 Subject: [PATCH 2/6] git subrepo pull (merge) noir subrepo: subdir: "noir" merged: "5700263e0" upstream: origin: "https://github.com/noir-lang/noir" branch: "aztec-packages" commit: "2521150ed" git-subrepo: version: "0.4.6" origin: "???" commit: "???" --- noir/.github/ISSUE_TEMPLATE/bug_report.yml | 63 +- .../ISSUE_TEMPLATE/feature_request.yml | 31 +- .../actions/install-playwright/action.yml | 2 +- ...on-test.sh => integration-test-browser.sh} | 3 +- noir/.github/scripts/integration-test-node.sh | 5 + noir/.github/scripts/noir-js-test.sh | 4 +- noir/.github/workflows/docker-test-flow.yml | 59 +- noir/.github/workflows/docs-pr.yml | 12 +- noir/.github/workflows/gates_report.yml | 4 +- .../.github/workflows/publish-es-packages.yml | 14 +- noir/.github/workflows/publish-nargo.yml | 4 +- noir/.github/workflows/test-js-packages.yml | 112 +++- noir/.gitrepo | 2 +- noir/Cargo.lock | 88 +-- noir/Cargo.toml | 3 + noir/acvm-repo/acir/codegen/acir.cpp | 60 +- noir/acvm-repo/acir/src/circuit/directives.rs | 15 +- noir/acvm-repo/acir/src/circuit/mod.rs | 4 + noir/acvm-repo/acir/src/circuit/opcodes.rs | 15 +- noir/acvm-repo/acir_field/src/generic_ark.rs | 10 + .../acvm/src/compiler/transformers/mod.rs | 5 - noir/acvm-repo/acvm/src/pwg/brillig.rs | 4 + noir/acvm-repo/acvm/src/pwg/directives/mod.rs | 37 -- .../acvm/src/pwg/directives/sorting.rs | 396 ------------ .../bn254_blackbox_solver/Cargo.toml | 5 +- noir/acvm-repo/brillig/src/foreign_call.rs | 2 +- noir/acvm-repo/brillig_vm/src/lib.rs | 10 + noir/compiler/integration-tests/package.json | 4 +- noir/compiler/noirc_driver/Cargo.toml | 1 + noir/compiler/noirc_driver/src/lib.rs | 50 +- noir/compiler/noirc_errors/Cargo.toml | 1 + noir/compiler/noirc_errors/src/debug_info.rs | 26 +- .../src/brillig/brillig_gen/brillig_block.rs | 53 +- .../brillig_gen/brillig_block_variables.rs | 8 +- .../src/brillig/brillig_ir/debug_show.rs | 5 +- noir/compiler/noirc_evaluator/src/errors.rs | 18 +- noir/compiler/noirc_evaluator/src/ssa.rs | 29 +- .../src/ssa/acir_gen/acir_ir.rs | 1 - .../src/ssa/acir_gen/acir_ir/acir_variable.rs | 147 ++--- .../ssa/acir_gen/acir_ir/generated_acir.rs | 38 +- .../src/ssa/acir_gen/acir_ir/sort.rs | 88 --- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 196 +++--- .../src/ssa/function_builder/mod.rs | 109 +--- .../noirc_evaluator/src/ssa/ir/instruction.rs | 61 +- .../src/ssa/ir/instruction/binary.rs | 34 +- .../src/ssa/ir/instruction/call.rs | 27 +- .../src/ssa/ir/instruction/constrain.rs | 31 +- .../noirc_evaluator/src/ssa/ir/printer.rs | 40 +- .../noirc_evaluator/src/ssa/ir/types.rs | 2 +- .../src/ssa/opt/bubble_up_constrains.rs | 8 +- .../src/ssa/opt/constant_folding.rs | 126 +++- .../src/ssa/opt/defunctionalize.rs | 35 +- .../src/ssa/opt/flatten_cfg.rs | 117 ++-- .../ssa/opt/flatten_cfg/capacity_tracker.rs | 150 +++++ .../src/ssa/opt/flatten_cfg/value_merger.rs | 115 +--- .../noirc_evaluator/src/ssa/opt/mod.rs | 1 + .../src/ssa/opt/remove_bit_shifts.rs | 285 ++++++++ .../src/ssa/ssa_gen/context.rs | 51 +- .../noirc_evaluator/src/ssa/ssa_gen/mod.rs | 105 ++- noir/compiler/noirc_frontend/Cargo.toml | 1 + .../noirc_frontend/src/ast/statement.rs | 2 +- noir/compiler/noirc_frontend/src/debug/mod.rs | 607 ++++++++++++++++++ .../src/hir/def_collector/dc_crate.rs | 50 +- .../src/hir/def_collector/dc_mod.rs | 19 +- .../src/hir/def_map/module_data.rs | 4 +- .../src/hir/def_map/module_def.rs | 18 +- noir/compiler/noirc_frontend/src/hir/mod.rs | 5 + .../src/hir/resolution/errors.rs | 9 + .../src/hir/resolution/globals.rs | 15 +- .../src/hir/resolution/resolver.rs | 140 +++- .../src/hir/resolution/structs.rs | 6 +- .../src/hir/type_check/errors.rs | 10 +- .../noirc_frontend/src/hir/type_check/expr.rs | 50 +- .../noirc_frontend/src/hir/type_check/mod.rs | 10 +- .../noirc_frontend/src/hir/type_check/stmt.rs | 24 +- .../noirc_frontend/src/hir_def/function.rs | 7 +- .../noirc_frontend/src/hir_def/stmt.rs | 16 +- .../noirc_frontend/src/hir_def/types.rs | 2 +- noir/compiler/noirc_frontend/src/lib.rs | 3 +- .../src/monomorphization/ast.rs | 14 +- .../src/monomorphization/debug.rs | 190 ++++++ .../src/monomorphization/debug_types.rs | 137 ++++ .../src/monomorphization/mod.rs | 71 +- .../noirc_frontend/src/node_interner.rs | 243 ++++++- .../noirc_frontend/src/parser/errors.rs | 9 + .../noirc_frontend/src/parser/parser.rs | 88 ++- noir/compiler/noirc_frontend/src/tests.rs | 24 +- noir/compiler/noirc_printable_type/src/lib.rs | 8 +- noir/compiler/wasm/README.md | 25 +- noir/compiler/wasm/src/index.cts | 30 + noir/compiler/wasm/src/index.mts | 30 + noir/compiler/wasm/src/noir/debug.ts | 5 +- .../noir/file-manager/nodejs-file-manager.ts | 5 +- noir/cspell.json | 2 +- noir/deny.toml | 11 +- .../installation/other_install_methods.md | 120 +++- noir/docs/docs/how_to/how-to-recursion.md | 24 +- noir/docs/docs/noir/concepts/assert.md | 20 +- .../docs/noir/concepts/data_types/fields.md | 9 + .../docs/noir/concepts/data_types/integers.md | 51 +- noir/docs/docs/noir/concepts/globals.md | 72 +++ noir/docs/docs/noir/concepts/mutability.md | 82 ++- noir/docs/docs/noir/concepts/unconstrained.md | 2 +- noir/docs/docs/noir/standard_library/bn254.md | 46 ++ .../docs/noir/standard_library/options.md | 4 + noir/docs/docs/reference/nargo_commands.md | 253 -------- noir/docs/docusaurus.config.ts | 33 +- noir/docs/package.json | 2 +- noir/docs/scripts/codegen_nargo_reference.sh | 33 + .../how_to/solidity_verifier.md | 1 + noir/noir_stdlib/src/array.nr | 55 +- noir/noir_stdlib/src/cmp.nr | 26 - noir/noir_stdlib/src/convert.nr | 9 - noir/noir_stdlib/src/default.nr | 2 - noir/noir_stdlib/src/field/bn254.nr | 11 + noir/noir_stdlib/src/ops.nr | 20 - noir/noir_stdlib/src/option.nr | 6 + noir/noirc_macros/Cargo.toml | 14 + noir/noirc_macros/src/lib.rs | 61 ++ .../nargo_compile_noir_js_assert_lt.sh | 4 - .../assert_msg_runtime/Nargo.toml | 7 + .../assert_msg_runtime/Prover.toml | 2 + .../assert_msg_runtime/src/main.nr | 7 + .../brillig_assert_msg_runtime/Nargo.toml | 7 + .../brillig_assert_msg_runtime/Prover.toml | 1 + .../brillig_assert_msg_runtime/src/main.nr | 10 + .../brillig_mut_ref_from_acir/Nargo.toml | 7 + .../brillig_mut_ref_from_acir/src/main.nr | 8 + .../brillig_slice_to_acir/Nargo.toml | 7 + .../brillig_slice_to_acir/src/main.nr | 14 + .../brillig_vec_to_acir/Nargo.toml | 7 + .../brillig_vec_to_acir/src/main.nr | 14 + .../compile_failure/option_expect/Nargo.toml | 7 + .../compile_failure/option_expect/src/main.nr | 8 + .../option_expect_bad_input/Nargo.toml | 7 + .../option_expect_bad_input/src/main.nr | 6 + .../brillig_cast/src/main.nr | 26 +- .../brillig_modulo/src/main.nr | 8 +- .../comptime_sort/src/main.nr | 7 - .../literal_not_simplification/Nargo.toml | 7 + .../literal_not_simplification/src/main.nr | 8 + .../method_call_regression/Nargo.toml | 2 +- .../compile_success_empty/option/src/main.nr | 4 +- .../trait_static_methods/Nargo.toml | 2 +- .../execution_success/5_over/src/main.nr | 4 +- .../execution_success/bit_not/Nargo.toml | 7 + .../execution_success/bit_not/Prover.toml | 1 + .../execution_success/bit_not/src/main.nr | 8 + .../bit_shifts_comptime/src/main.nr | 2 +- .../bit_shifts_runtime/src/main.nr | 1 + .../brillig_assert/src/main.nr | 4 +- .../brillig_bit_shifts_runtime}/Nargo.toml | 3 +- .../brillig_bit_shifts_runtime/Prover.toml | 2 + .../brillig_bit_shifts_runtime/src/main.nr | 20 + .../brillig_cow_assign/Nargo.toml | 7 + .../brillig_cow_assign/Prover.toml | 2 + .../brillig_cow_assign/src/main.nr | 23 + .../src/main.nr | 8 +- .../execution_success/debug_logs/src/main.nr | 7 + .../global_consts/src/main.nr | 6 +- .../missing_closure_env/Nargo.toml | 7 + .../missing_closure_env/Prover.toml | 1 + .../missing_closure_env/src/main.nr | 16 + .../execution_success/regression/src/main.nr | 43 +- .../regression_4202/Nargo.toml | 7 + .../regression_4202/Prover.toml | 1 + .../regression_4202/src/main.nr | 14 + .../execution_success/u128/src/main.nr | 2 +- .../tooling/backend_interface/src/download.rs | 8 +- noir/tooling/bb_abstraction_leaks/build.rs | 2 +- noir/tooling/debugger/Cargo.toml | 3 +- noir/tooling/debugger/build.rs | 14 +- noir/tooling/debugger/ignored-tests.txt | 20 + noir/tooling/debugger/src/context.rs | 134 +++- noir/tooling/debugger/src/dap.rs | 195 ++++-- noir/tooling/debugger/src/foreign_calls.rs | 138 ++++ noir/tooling/debugger/src/lib.rs | 1 + noir/tooling/debugger/src/repl.rs | 133 +++- .../debugger/src/source_code_printer.rs | 12 +- noir/tooling/debugger/tests/debug.rs | 2 +- noir/tooling/lsp/src/requests/test_run.rs | 2 +- noir/tooling/nargo/Cargo.toml | 1 - noir/tooling/nargo/src/artifacts/debug.rs | 7 +- .../tooling/nargo/src/artifacts/debug_vars.rs | 117 ++++ noir/tooling/nargo/src/artifacts/mod.rs | 1 + noir/tooling/nargo/src/ops/compile.rs | 26 +- noir/tooling/nargo/src/ops/execute.rs | 27 +- noir/tooling/nargo/src/ops/foreign_calls.rs | 120 +++- noir/tooling/nargo/src/ops/mod.rs | 8 +- noir/tooling/nargo/src/ops/test.rs | 2 +- noir/tooling/nargo_cli/Cargo.toml | 4 + noir/tooling/nargo_cli/src/cli/dap_cmd.rs | 21 +- noir/tooling/nargo_cli/src/cli/debug_cmd.rs | 47 +- noir/tooling/nargo_cli/src/cli/export_cmd.rs | 2 +- noir/tooling/nargo_cli/src/cli/mod.rs | 10 + noir/tooling/nargo_cli/src/cli/test_cmd.rs | 2 +- noir/tooling/nargo_fmt/src/items.rs | 117 ++++ noir/tooling/nargo_fmt/src/lib.rs | 1 + noir/tooling/nargo_fmt/src/rewrite.rs | 2 + noir/tooling/nargo_fmt/src/rewrite/array.rs | 7 +- noir/tooling/nargo_fmt/src/rewrite/imports.rs | 115 ++++ noir/tooling/nargo_fmt/src/utils.rs | 122 +--- noir/tooling/nargo_fmt/src/visitor.rs | 2 +- noir/tooling/nargo_fmt/src/visitor/expr.rs | 20 +- noir/tooling/nargo_fmt/src/visitor/item.rs | 11 +- noir/tooling/nargo_fmt/src/visitor/stmt.rs | 11 +- .../nargo_fmt/tests/expected/assert.nr | 4 + .../nargo_fmt/tests/expected/contract.nr | 10 +- noir/tooling/nargo_fmt/tests/input/assert.nr | 7 + noir/tooling/noir_js/.gitignore | 2 - noir/tooling/noir_js/package.json | 3 +- .../noir_js/scripts/compile_test_programs.sh | 5 + .../tooling/noir_js/src/witness_generation.ts | 6 + .../tooling/noir_js/test/node/execute.test.ts | 15 + .../assert_lt/src/main.nr | 4 + .../assert_lt/target/assert_lt.json | 1 - .../assert_msg_runtime/Nargo.toml | 7 + .../assert_msg_runtime/src/main.nr | 6 + .../noir_js_backend_barretenberg/package.json | 2 +- .../noir_js_backend_barretenberg/src/index.ts | 3 +- noir/yarn.lock | 10 +- 221 files changed, 5399 insertions(+), 2313 deletions(-) rename noir/.github/scripts/{integration-test.sh => integration-test-browser.sh} (52%) create mode 100755 noir/.github/scripts/integration-test-node.sh delete mode 100644 noir/acvm-repo/acvm/src/pwg/directives/sorting.rs delete mode 100644 noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/sort.rs create mode 100644 noir/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs create mode 100644 noir/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs create mode 100644 noir/compiler/noirc_frontend/src/debug/mod.rs create mode 100644 noir/compiler/noirc_frontend/src/monomorphization/debug.rs create mode 100644 noir/compiler/noirc_frontend/src/monomorphization/debug_types.rs create mode 100644 noir/docs/docs/noir/concepts/globals.md create mode 100644 noir/docs/docs/noir/standard_library/bn254.md delete mode 100644 noir/docs/docs/reference/nargo_commands.md create mode 100755 noir/docs/scripts/codegen_nargo_reference.sh create mode 100644 noir/noirc_macros/Cargo.toml create mode 100644 noir/noirc_macros/src/lib.rs delete mode 100755 noir/scripts/nargo_compile_noir_js_assert_lt.sh create mode 100644 noir/test_programs/compile_failure/assert_msg_runtime/Nargo.toml create mode 100644 noir/test_programs/compile_failure/assert_msg_runtime/Prover.toml create mode 100644 noir/test_programs/compile_failure/assert_msg_runtime/src/main.nr create mode 100644 noir/test_programs/compile_failure/brillig_assert_msg_runtime/Nargo.toml create mode 100644 noir/test_programs/compile_failure/brillig_assert_msg_runtime/Prover.toml create mode 100644 noir/test_programs/compile_failure/brillig_assert_msg_runtime/src/main.nr create mode 100644 noir/test_programs/compile_failure/brillig_mut_ref_from_acir/Nargo.toml create mode 100644 noir/test_programs/compile_failure/brillig_mut_ref_from_acir/src/main.nr create mode 100644 noir/test_programs/compile_failure/brillig_slice_to_acir/Nargo.toml create mode 100644 noir/test_programs/compile_failure/brillig_slice_to_acir/src/main.nr create mode 100644 noir/test_programs/compile_failure/brillig_vec_to_acir/Nargo.toml create mode 100644 noir/test_programs/compile_failure/brillig_vec_to_acir/src/main.nr create mode 100644 noir/test_programs/compile_failure/option_expect/Nargo.toml create mode 100644 noir/test_programs/compile_failure/option_expect/src/main.nr create mode 100644 noir/test_programs/compile_failure/option_expect_bad_input/Nargo.toml create mode 100644 noir/test_programs/compile_failure/option_expect_bad_input/src/main.nr delete mode 100644 noir/test_programs/compile_success_empty/comptime_sort/src/main.nr create mode 100644 noir/test_programs/compile_success_empty/literal_not_simplification/Nargo.toml create mode 100644 noir/test_programs/compile_success_empty/literal_not_simplification/src/main.nr create mode 100644 noir/test_programs/execution_success/bit_not/Nargo.toml create mode 100644 noir/test_programs/execution_success/bit_not/Prover.toml create mode 100644 noir/test_programs/execution_success/bit_not/src/main.nr rename noir/test_programs/{compile_success_empty/comptime_sort => execution_success/brillig_bit_shifts_runtime}/Nargo.toml (58%) create mode 100644 noir/test_programs/execution_success/brillig_bit_shifts_runtime/Prover.toml create mode 100644 noir/test_programs/execution_success/brillig_bit_shifts_runtime/src/main.nr create mode 100644 noir/test_programs/execution_success/brillig_cow_assign/Nargo.toml create mode 100644 noir/test_programs/execution_success/brillig_cow_assign/Prover.toml create mode 100644 noir/test_programs/execution_success/brillig_cow_assign/src/main.nr create mode 100644 noir/test_programs/execution_success/missing_closure_env/Nargo.toml create mode 100644 noir/test_programs/execution_success/missing_closure_env/Prover.toml create mode 100644 noir/test_programs/execution_success/missing_closure_env/src/main.nr create mode 100644 noir/test_programs/execution_success/regression_4202/Nargo.toml create mode 100644 noir/test_programs/execution_success/regression_4202/Prover.toml create mode 100644 noir/test_programs/execution_success/regression_4202/src/main.nr create mode 100644 noir/tooling/debugger/ignored-tests.txt create mode 100644 noir/tooling/debugger/src/foreign_calls.rs create mode 100644 noir/tooling/nargo/src/artifacts/debug_vars.rs create mode 100644 noir/tooling/nargo_fmt/src/items.rs create mode 100644 noir/tooling/nargo_fmt/src/rewrite/imports.rs create mode 100644 noir/tooling/nargo_fmt/tests/expected/assert.nr create mode 100644 noir/tooling/nargo_fmt/tests/input/assert.nr create mode 100755 noir/tooling/noir_js/scripts/compile_test_programs.sh delete mode 100644 noir/tooling/noir_js/test/noir_compiled_examples/assert_lt/target/assert_lt.json create mode 100644 noir/tooling/noir_js/test/noir_compiled_examples/assert_msg_runtime/Nargo.toml create mode 100644 noir/tooling/noir_js/test/noir_compiled_examples/assert_msg_runtime/src/main.nr diff --git a/noir/.github/ISSUE_TEMPLATE/bug_report.yml b/noir/.github/ISSUE_TEMPLATE/bug_report.yml index 112da342e10..71207793e53 100644 --- a/noir/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/noir/.github/ISSUE_TEMPLATE/bug_report.yml @@ -6,7 +6,7 @@ body: attributes: value: | # Description - Thanks for taking the time to create the Issue, and welcome to the Noirot family! + Thanks for taking the time to create the Issue and welcome to the Noir community! - type: textarea id: aim attributes: @@ -38,45 +38,69 @@ body: 2. 3. 4. + - type: dropdown + id: impact + attributes: + label: Project Impact + description: How does this affect a project you or others are working on? + options: + - "Nice-to-have" + - "Blocker" + - type: textarea + id: impact_context + attributes: + label: Impact Context + description: If a nice-to-have / blocker, supplement how does this Issue affect the project. + - type: dropdown + id: workaround + attributes: + label: Workaround + description: Is there a workaround for this Issue? + options: + - "Yes" + - type: textarea + id: workaround_description + attributes: + label: Workaround Description + description: If yes, supplement how could the Issue be worked around. + - type: textarea + id: additional + attributes: + label: Additional Context + description: Supplement further information if applicable. - type: markdown attributes: value: | # Environment - Specify your versions of Noir releases used. + Specify your version of Noir tooling used. - type: markdown attributes: value: | - ## Using Nargo? + ## Nargo (CLI) - type: dropdown id: nargo-install attributes: label: Installation Method description: How did you install Nargo? - multiple: false options: - - Binary + - Binary (`noirup` default) - Compiled from source - type: input id: nargo-version attributes: label: Nargo Version - description: What is the output of the `nargo --version` command? - placeholder: "nargo 0.6.0 (git version hash: 0181813203a9e3e46c6d8c3169ad5d25971d4282, is dirty: false)" + description: Output of running `nargo --version` + placeholder: "nargo version = 0.23.0 noirc version = 0.23.0+5be9f9d7e2f39ca228df10e5a530474af0331704 (git version hash: 5be9f9d7e2f39ca228df10e5a530474af0331704, is dirty: false)" - type: markdown attributes: value: | - ## Using TypeScript? - Please await for our new set of packages. - You can find our target release timeframe on the [Noir Roadmap](https://github.com/orgs/noir-lang/projects/1/views/16). - - type: markdown - attributes: - value: | - # Misc - - type: textarea - id: additional + ## NoirJS (JavaScript) + - type: input + id: noirjs-version attributes: - label: Additional Context - description: Supplement further information if applicable. + label: NoirJS Version + description: Version number of `noir_js` in `package.json` + placeholder: "0.23.0" - type: markdown attributes: value: | @@ -87,11 +111,8 @@ body: label: Would you like to submit a PR for this Issue? description: Fellow contributors are happy to provide support where applicable. options: - - "No" - "Maybe" - "Yes" - validations: - required: true - type: textarea id: pr_support attributes: diff --git a/noir/.github/ISSUE_TEMPLATE/feature_request.yml b/noir/.github/ISSUE_TEMPLATE/feature_request.yml index 979ac75811e..abbfe392454 100644 --- a/noir/.github/ISSUE_TEMPLATE/feature_request.yml +++ b/noir/.github/ISSUE_TEMPLATE/feature_request.yml @@ -6,7 +6,7 @@ body: attributes: value: | ## Description - Thanks for taking the time to create the Issue, and welcome to the Noirot family! + Thanks for taking the time to create the Issue and welcome to the Noir community! - type: textarea id: problem attributes: @@ -21,11 +21,31 @@ body: description: Describe how you think it should work. Supply pseudocode / step-by-step examples if applicable. validations: required: true + - type: dropdown + id: impact + attributes: + label: Project Impact + description: How does this affect a project you or others are working on? + options: + - "Nice-to-have" + - "Blocker" - type: textarea - id: alternatives + id: impact_context attributes: - label: Alternatives Considered - description: Describe less-happy cases you have considered, if any. + label: Impact Context + description: If a nice-to-have / blocker, supplement how does this Issue affect the project. + - type: dropdown + id: workaround + attributes: + label: Workaround + description: Is there a workaround for this Issue? + options: + - "Yes" + - type: textarea + id: workaround_description + attributes: + label: Workaround Description + description: If yes, supplement how could the Issue be worked around. - type: textarea id: additional attributes: @@ -42,11 +62,8 @@ body: description: Fellow contributors are happy to provide support where applicable. multiple: false options: - - "No" - "Maybe" - "Yes" - validations: - required: true - type: textarea id: pr-support attributes: diff --git a/noir/.github/actions/install-playwright/action.yml b/noir/.github/actions/install-playwright/action.yml index ac412a7dd4a..9579e2dd32d 100644 --- a/noir/.github/actions/install-playwright/action.yml +++ b/noir/.github/actions/install-playwright/action.yml @@ -9,7 +9,7 @@ runs: run: echo "PLAYWRIGHT_VERSION=$(yarn workspace @noir-lang/noirc_abi info @web/test-runner-playwright --json | jq .children.Version | tr -d '"')" >> $GITHUB_ENV - name: Cache playwright binaries - uses: actions/cache@v3 + uses: actions/cache@v4 id: playwright-cache with: path: | diff --git a/noir/.github/scripts/integration-test.sh b/noir/.github/scripts/integration-test-browser.sh similarity index 52% rename from noir/.github/scripts/integration-test.sh rename to noir/.github/scripts/integration-test-browser.sh index 4e1b52cedf9..2ace2723a20 100755 --- a/noir/.github/scripts/integration-test.sh +++ b/noir/.github/scripts/integration-test-browser.sh @@ -1,6 +1,5 @@ #!/bin/bash set -eu -apt-get install libc++-dev -y npx playwright install && npx playwright install-deps -yarn workspace integration-tests test \ No newline at end of file +yarn workspace integration-tests test diff --git a/noir/.github/scripts/integration-test-node.sh b/noir/.github/scripts/integration-test-node.sh new file mode 100755 index 00000000000..7260ca4bb0f --- /dev/null +++ b/noir/.github/scripts/integration-test-node.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +apt-get install libc++-dev -y +yarn workspace integration-tests test diff --git a/noir/.github/scripts/noir-js-test.sh b/noir/.github/scripts/noir-js-test.sh index b5fe34038fe..72458d8de6a 100755 --- a/noir/.github/scripts/noir-js-test.sh +++ b/noir/.github/scripts/noir-js-test.sh @@ -1,6 +1,4 @@ #!/bin/bash set -eu -./scripts/nargo_compile_noir_js_assert_lt.sh -rm -rf /usr/src/noir/tooling/noir_js/test/noir_compiled_examples/assert_lt/target/debug_assert_lt.json -yarn workspace @noir-lang/noir_js test \ No newline at end of file +yarn workspace @noir-lang/noir_js test diff --git a/noir/.github/workflows/docker-test-flow.yml b/noir/.github/workflows/docker-test-flow.yml index 4b4a2ac2add..f9f4815dd3d 100644 --- a/noir/.github/workflows/docker-test-flow.yml +++ b/noir/.github/workflows/docker-test-flow.yml @@ -720,7 +720,63 @@ jobs: - name: Test working-directory: /usr/src/noir run: | - ./.github/scripts/integration-test.sh + ./.github/scripts/integration-test-node.sh + + test-integration-browser: + name: Integration test browser + runs-on: ubuntu-latest + needs: [ + build-base-js, + build-noir-wasm, + build-noirc-abi, + build-acvm_js, + build-noir-js-types, + build-noir_js, + build-barretenberg-backend + ] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download noir wasm + uses: actions/download-artifact@v4 + with: + name: noir_wasm + path: /usr/src/noir/compiler/wasm + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: /usr/src/noir/tooling/noirc_abi_wasm + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: /usr/src/noir/acvm-repo/acvm_js + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + - name: Download noir js + uses: actions/download-artifact@v4 + with: + name: noir_js + path: + /usr/src/noir/tooling/noir_js/lib + - name: Download Barretenberg backend + uses: actions/download-artifact@v4 + with: + name: barretenberg-backend + path: + /usr/src/noir/tooling/noir_js_backend_barretenberg/lib + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/integration-test-browser.sh tests-end: name: End @@ -733,6 +789,7 @@ jobs: - test-noir-wasm - test-noir-wasm-browser - test-integration + - test-integration-browser - test-noir_codegen - test-acvm_js - test-acvm_js-browser diff --git a/noir/.github/workflows/docs-pr.yml b/noir/.github/workflows/docs-pr.yml index 87bec37c438..3cdca177a20 100644 --- a/noir/.github/workflows/docs-pr.yml +++ b/noir/.github/workflows/docs-pr.yml @@ -53,6 +53,14 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.71.1 + + - uses: Swatinem/rust-cache@v2 + with: + key: x86_64-unknown-linux-gnu + save-if: false - name: Setup toolchain uses: dtolnay/rust-toolchain@1.71.1 @@ -80,7 +88,7 @@ jobs: yarn workspaces foreach -Rpt --from docs run build - name: Upload artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: docs path: ./docs/build/ @@ -98,7 +106,7 @@ jobs: uses: actions/checkout@v4 - name: Download built docs - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: docs path: ./docs/build diff --git a/noir/.github/workflows/gates_report.yml b/noir/.github/workflows/gates_report.yml index 8e3ef768828..39416e628a9 100644 --- a/noir/.github/workflows/gates_report.yml +++ b/noir/.github/workflows/gates_report.yml @@ -36,7 +36,7 @@ jobs: 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - name: Upload artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: nargo path: ./dist/* @@ -53,7 +53,7 @@ jobs: - uses: actions/checkout@v4 - name: Download nargo binary - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: nargo path: ./nargo diff --git a/noir/.github/workflows/publish-es-packages.yml b/noir/.github/workflows/publish-es-packages.yml index 2c825ffd45f..181df8f85ad 100644 --- a/noir/.github/workflows/publish-es-packages.yml +++ b/noir/.github/workflows/publish-es-packages.yml @@ -34,7 +34,7 @@ jobs: run: | nix build -L .#noirc_abi_wasm - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: noirc_abi_wasm path: | @@ -59,7 +59,7 @@ jobs: save-if: false - name: Download noirc_abi_wasm package artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: noirc_abi_wasm path: ./tooling/noirc_abi_wasm @@ -71,7 +71,7 @@ jobs: run: yarn workspace @noir-lang/noir_wasm build - name: Upload artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: noir_wasm path: | @@ -98,7 +98,7 @@ jobs: run: | nix build -L .#acvm_js - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: acvm_js path: | @@ -114,17 +114,17 @@ jobs: with: ref: ${{ inputs.noir-ref }} - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: acvm_js path: acvm-repo/acvm_js - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: noir_wasm path: compiler/wasm - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: noirc_abi_wasm path: tooling/noirc_abi_wasm diff --git a/noir/.github/workflows/publish-nargo.yml b/noir/.github/workflows/publish-nargo.yml index fc089008657..085ab013e4e 100644 --- a/noir/.github/workflows/publish-nargo.yml +++ b/noir/.github/workflows/publish-nargo.yml @@ -67,7 +67,7 @@ jobs: 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-${{ matrix.target }}.tar.gz - name: Upload artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: nargo-${{ matrix.target }} path: ./dist/* @@ -145,7 +145,7 @@ jobs: 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-${{ matrix.target }}.tar.gz - name: Upload artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: nargo-${{ matrix.target }} path: ./dist/* diff --git a/noir/.github/workflows/test-js-packages.yml b/noir/.github/workflows/test-js-packages.yml index addc9ce3d83..7ebe5ced36b 100644 --- a/noir/.github/workflows/test-js-packages.yml +++ b/noir/.github/workflows/test-js-packages.yml @@ -40,7 +40,7 @@ jobs: 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - name: Upload artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: nargo path: ./dist/* @@ -65,7 +65,7 @@ jobs: save-if: ${{ github.event_name != 'merge_group' }} - name: Download noirc_abi_wasm package artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: noirc_abi_wasm path: ./tooling/noirc_abi_wasm @@ -77,7 +77,7 @@ jobs: run: yarn workspace @noir-lang/noir_wasm build - name: Upload artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: noir_wasm path: | @@ -108,7 +108,7 @@ jobs: run: echo "UPLOAD_PATH=$(readlink -f result/acvm_js)" >> $GITHUB_ENV - name: Upload artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: acvm-js path: ${{ env.UPLOAD_PATH }} @@ -120,7 +120,7 @@ jobs: steps: - name: Checkout sources - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Nix uses: ./.github/actions/nix @@ -137,7 +137,7 @@ jobs: run: echo "UPLOAD_PATH=$(readlink -f ./result/noirc_abi_wasm)" >> $GITHUB_ENV - name: Upload artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: noirc_abi_wasm path: ${{ env.UPLOAD_PATH }} @@ -154,7 +154,7 @@ jobs: uses: actions/checkout@v4 - name: Download artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: acvm-js path: ./acvm-repo/acvm_js @@ -176,7 +176,7 @@ jobs: uses: actions/checkout@v4 - name: Download artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: acvm-js path: ./acvm-repo/acvm_js @@ -200,10 +200,10 @@ jobs: steps: - name: Checkout sources - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Download wasm package artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: noirc_abi_wasm path: ./tooling/noirc_abi_wasm @@ -231,7 +231,7 @@ jobs: uses: actions/checkout@v4 - name: Download wasm package artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: noirc_abi_wasm path: ./tooling/noirc_abi_wasm @@ -247,7 +247,7 @@ jobs: yarn workspace @noir-lang/backend_barretenberg test test-noir-js: - needs: [build-acvm-js, build-noirc-abi] + needs: [build-nargo, build-acvm-js, build-noirc-abi] name: Noir JS runs-on: ubuntu-latest timeout-minutes: 30 @@ -256,18 +256,32 @@ jobs: - name: Checkout uses: actions/checkout@v4 + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + - name: Download artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: acvm-js path: ./acvm-repo/acvm_js - name: Download wasm package artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: noirc_abi_wasm path: ./tooling/noirc_abi_wasm + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + - name: Install Yarn dependencies uses: ./.github/actions/setup @@ -293,7 +307,7 @@ jobs: uses: actions/checkout@v4 - name: Download wasm package artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: noir_wasm path: ./compiler/wasm @@ -302,7 +316,7 @@ jobs: uses: ./.github/actions/setup - name: Download nargo binary - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: nargo path: ./nargo @@ -337,19 +351,19 @@ jobs: uses: actions/checkout@v4 - name: Download nargo binary - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: nargo path: ./nargo - name: Download acvm_js package artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: acvm-js path: ./acvm-repo/acvm_js - name: Download noirc_abi package artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: noirc_abi_wasm path: ./tooling/noirc_abi_wasm @@ -374,8 +388,8 @@ jobs: - name: Run noir_codegen tests run: yarn workspace @noir-lang/noir_codegen test - test-integration: - name: Integration Tests + test-integration-node: + name: Integration Tests (Node) runs-on: ubuntu-latest needs: [build-acvm-js, build-noir-wasm, build-nargo, build-noirc-abi] timeout-minutes: 30 @@ -385,25 +399,25 @@ jobs: uses: actions/checkout@v4 - name: Download nargo binary - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: nargo path: ./nargo - name: Download acvm_js package artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: acvm-js path: ./acvm-repo/acvm_js - name: Download noir_wasm package artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: noir_wasm path: ./compiler/wasm - name: Download noirc_abi package artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: noirc_abi_wasm path: ./tooling/noirc_abi_wasm @@ -419,6 +433,48 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Setup `integration-tests` + run: | + # Note the lack of spaces between package names. + PACKAGES_TO_BUILD="@noir-lang/types,@noir-lang/backend_barretenberg,@noir-lang/noir_js" + yarn workspaces foreach -vtp --from "{$PACKAGES_TO_BUILD}" run build + + - name: Run `integration-tests` + working-directory: ./compiler/integration-tests + run: | + yarn test:node + + test-integration-browser: + name: Integration Tests (Browser) + runs-on: ubuntu-latest + needs: [build-acvm-js, build-noir-wasm, build-nargo, build-noirc-abi] + timeout-minutes: 30 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Download acvm_js package artifact + uses: actions/download-artifact@v4 + with: + name: acvm-js + path: ./acvm-repo/acvm_js + + - name: Download noir_wasm package artifact + uses: actions/download-artifact@v4 + with: + name: noir_wasm + path: ./compiler/wasm + + - name: Download noirc_abi package artifact + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: ./tooling/noirc_abi_wasm + + - name: Install Yarn dependencies + uses: ./.github/actions/setup + - name: Install Playwright uses: ./.github/actions/install-playwright @@ -429,8 +485,9 @@ jobs: yarn workspaces foreach -vtp --from "{$PACKAGES_TO_BUILD}" run build - name: Run `integration-tests` + working-directory: ./compiler/integration-tests run: | - yarn test:integration + yarn test:browser # This is a job which depends on all test jobs and reports the overall status. # This allows us to add/remove test jobs without having to update the required workflows. @@ -447,7 +504,8 @@ jobs: - test-noir-js - test-noir-wasm - test-noir-codegen - - test-integration + - test-integration-node + - test-integration-browser steps: - name: Report overall success diff --git a/noir/.gitrepo b/noir/.gitrepo index 3081b81b8d0..57dc3712652 100644 --- a/noir/.gitrepo +++ b/noir/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/noir-lang/noir branch = aztec-packages - commit = f1b91511124df89bbe9e059b87536901bdf0d6f3 + commit = 2521150edad6d5ac774f133b5fee02a3ef255bae parent = 2082fedfb03d4882a269881f51c5337263bc539b method = merge cmdver = 0.4.6 diff --git a/noir/Cargo.lock b/noir/Cargo.lock index 291ac30336c..7b912c139ea 100644 --- a/noir/Cargo.lock +++ b/noir/Cargo.lock @@ -589,8 +589,8 @@ dependencies = [ "ark-ff", "flate2", "getrandom 0.2.10", - "grumpkin", "js-sys", + "noir_grumpkin", "num-bigint", "pkg-config", "reqwest", @@ -826,6 +826,14 @@ dependencies = [ "clap_derive", ] +[[package]] +name = "clap-markdown" +version = "0.1.3" +source = "git+https://github.com/noir-lang/clap-markdown?rev=450d759532c88f0dba70891ceecdbc9ff8f25d2b#450d759532c88f0dba70891ceecdbc9ff8f25d2b" +dependencies = [ + "clap", +] + [[package]] name = "clap_builder" version = "4.4.7" @@ -1672,6 +1680,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + [[package]] name = "flate2" version = "1.0.28" @@ -1942,17 +1956,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "grumpkin" -version = "0.1.0" -source = "git+https://github.com/noir-lang/grumpkin?rev=56d99799381f79e42148aaef0de2b0cf9a4b9a5d#56d99799381f79e42148aaef0de2b0cf9a4b9a5d" -dependencies = [ - "ark-bn254", - "ark-ec", - "ark-ff", - "ark-std", -] - [[package]] name = "h2" version = "0.3.24" @@ -2678,7 +2681,6 @@ dependencies = [ "rayon", "rustc_version", "serde", - "serial_test", "tempfile", "thiserror", "tracing", @@ -2696,6 +2698,7 @@ dependencies = [ "bn254_blackbox_solver", "build-data", "clap", + "clap-markdown", "color-eyre", "const_format", "criterion", @@ -2834,6 +2837,7 @@ dependencies = [ "nargo", "noirc_driver", "noirc_errors", + "noirc_frontend", "noirc_printable_type", "owo-colors", "rexpect", @@ -2843,6 +2847,18 @@ dependencies = [ "thiserror", ] +[[package]] +name = "noir_grumpkin" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e7d49a4b14b13c0dc730b05780b385828ab88f4148daaad7db080ecdce07350" +dependencies = [ + "ark-bn254", + "ark-ec", + "ark-ff", + "ark-std", +] + [[package]] name = "noir_lsp" version = "0.23.0" @@ -2941,6 +2957,7 @@ dependencies = [ "noirc_errors", "noirc_evaluator", "noirc_frontend", + "noirc_macros", "rust-embed", "serde", "tracing", @@ -2957,6 +2974,7 @@ dependencies = [ "codespan-reporting", "flate2", "fm", + "noirc_printable_type", "serde", "serde_json", "serde_with", @@ -2990,6 +3008,7 @@ dependencies = [ "iter-extended", "noirc_errors", "noirc_printable_type", + "petgraph", "regex", "rustc-hash", "serde", @@ -3003,6 +3022,14 @@ dependencies = [ "tracing", ] +[[package]] +name = "noirc_macros" +version = "0.23.0" +dependencies = [ + "iter-extended", + "noirc_frontend", +] + [[package]] name = "noirc_printable_type" version = "0.23.0" @@ -3193,6 +3220,16 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" +[[package]] +name = "petgraph" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" +dependencies = [ + "fixedbitset", + "indexmap 2.0.0", +] + [[package]] name = "phf" version = "0.10.1" @@ -4242,31 +4279,6 @@ dependencies = [ "syn 2.0.32", ] -[[package]] -name = "serial_test" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e56dd856803e253c8f298af3f4d7eb0ae5e23a737252cd90bb4f3b435033b2d" -dependencies = [ - "dashmap", - "futures 0.3.28", - "lazy_static", - "log", - "parking_lot 0.12.1", - "serial_test_derive", -] - -[[package]] -name = "serial_test_derive" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.32", -] - [[package]] name = "sha2" version = "0.10.7" diff --git a/noir/Cargo.toml b/noir/Cargo.toml index 5dfff3dbb5d..2ba97e906b1 100644 --- a/noir/Cargo.toml +++ b/noir/Cargo.toml @@ -1,7 +1,10 @@ [workspace] members = [ + # Macros crates for metaprogramming "aztec_macros", + "noirc_macros", + # Compiler crates "compiler/noirc_evaluator", "compiler/noirc_frontend", "compiler/noirc_errors", diff --git a/noir/acvm-repo/acir/codegen/acir.cpp b/noir/acvm-repo/acir/codegen/acir.cpp index 3ce63ecfa94..0fc84d47a0f 100644 --- a/noir/acvm-repo/acir/codegen/acir.cpp +++ b/noir/acvm-repo/acir/codegen/acir.cpp @@ -983,18 +983,7 @@ namespace Circuit { static ToLeRadix bincodeDeserialize(std::vector); }; - struct PermutationSort { - std::vector> inputs; - uint32_t tuple; - std::vector bits; - std::vector sort_by; - - friend bool operator==(const PermutationSort&, const PermutationSort&); - std::vector bincodeSerialize() const; - static PermutationSort bincodeDeserialize(std::vector); - }; - - std::variant value; + std::variant value; friend bool operator==(const Directive&, const Directive&); std::vector bincodeSerialize() const; @@ -5200,53 +5189,6 @@ Circuit::Directive::ToLeRadix serde::Deserializable Directive::PermutationSort::bincodeSerialize() const { - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); - } - - inline Directive::PermutationSort Directive::PermutationSort::bincodeDeserialize(std::vector input) { - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw serde::deserialization_error("Some input bytes were not read"); - } - return value; - } - -} // end of namespace Circuit - -template <> -template -void serde::Serializable::serialize(const Circuit::Directive::PermutationSort &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.inputs, serializer); - serde::Serializable::serialize(obj.tuple, serializer); - serde::Serializable::serialize(obj.bits, serializer); - serde::Serializable::serialize(obj.sort_by, serializer); -} - -template <> -template -Circuit::Directive::PermutationSort serde::Deserializable::deserialize(Deserializer &deserializer) { - Circuit::Directive::PermutationSort obj; - obj.inputs = serde::Deserializable::deserialize(deserializer); - obj.tuple = serde::Deserializable::deserialize(deserializer); - obj.bits = serde::Deserializable::deserialize(deserializer); - obj.sort_by = serde::Deserializable::deserialize(deserializer); - return obj; -} - namespace Circuit { inline bool operator==(const Expression &lhs, const Expression &rhs) { diff --git a/noir/acvm-repo/acir/src/circuit/directives.rs b/noir/acvm-repo/acir/src/circuit/directives.rs index 2486f4cfb83..099d0634399 100644 --- a/noir/acvm-repo/acir/src/circuit/directives.rs +++ b/noir/acvm-repo/acir/src/circuit/directives.rs @@ -7,18 +7,5 @@ use serde::{Deserialize, Serialize}; /// In the future, this can be replaced with asm non-determinism blocks pub enum Directive { //decomposition of a: a=\sum b[i]*radix^i where b is an array of witnesses < radix in little endian form - ToLeRadix { - a: Expression, - b: Vec, - radix: u32, - }, - - // Sort directive, using a sorting network - // This directive is used to generate the values of the control bits for the sorting network such that its outputs are properly sorted according to sort_by - PermutationSort { - inputs: Vec>, // Array of tuples to sort - tuple: u32, // tuple size; if 1 then inputs is a single array [a0,a1,..], if 2 then inputs=[(a0,b0),..] is [a0,b0,a1,b1,..], etc.. - bits: Vec, // control bits of the network which permutes the inputs into its sorted version - sort_by: Vec, // specify primary index to sort by, then the secondary,... For instance, if tuple is 2 and sort_by is [1,0], then a=[(a0,b0),..] is sorted by bi and then ai. - }, + ToLeRadix { a: Expression, b: Vec, radix: u32 }, } diff --git a/noir/acvm-repo/acir/src/circuit/mod.rs b/noir/acvm-repo/acir/src/circuit/mod.rs index 9cbacdc2ab0..7e6cbf23803 100644 --- a/noir/acvm-repo/acir/src/circuit/mod.rs +++ b/noir/acvm-repo/acir/src/circuit/mod.rs @@ -56,6 +56,10 @@ pub struct Circuit { // Note: This should be a BTreeMap, but serde-reflect is creating invalid // c++ code at the moment when it is, due to OpcodeLocation needing a comparison // implementation which is never generated. + // + // TODO: These are only used for constraints that are explicitly created during code generation (such as index out of bounds on slices) + // TODO: We should move towards having all the checks being evaluated in the same manner + // TODO: as runtime assert messages specified by the user. This will also be a breaking change as the `Circuit` structure will change. pub assert_messages: Vec<(OpcodeLocation, String)>, /// States whether the backend should use a SNARK recursion friendly prover. diff --git a/noir/acvm-repo/acir/src/circuit/opcodes.rs b/noir/acvm-repo/acir/src/circuit/opcodes.rs index 5aab9d4d472..f725ba8c32a 100644 --- a/noir/acvm-repo/acir/src/circuit/opcodes.rs +++ b/noir/acvm-repo/acir/src/circuit/opcodes.rs @@ -8,6 +8,7 @@ mod memory_operation; pub use black_box_function_call::{BlackBoxFuncCall, FunctionInput}; pub use memory_operation::{BlockId, MemOp}; +#[allow(clippy::large_enum_variant)] #[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum Opcode { AssertZero(Expression), @@ -59,20 +60,6 @@ impl std::fmt::Display for Opcode { b.last().unwrap().witness_index(), ) } - Opcode::Directive(Directive::PermutationSort { inputs: a, tuple, bits, sort_by }) => { - write!(f, "DIR::PERMUTATIONSORT ")?; - write!( - f, - "(permutation size: {} {}-tuples, sort_by: {:#?}, bits: [_{}..._{}]))", - a.len(), - tuple, - sort_by, - // (Note): the bits do not have contiguous index but there are too many for display - bits.first().unwrap().witness_index(), - bits.last().unwrap().witness_index(), - ) - } - Opcode::Brillig(brillig) => { write!(f, "BRILLIG: ")?; writeln!(f, "inputs: {:?}", brillig.inputs)?; diff --git a/noir/acvm-repo/acir_field/src/generic_ark.rs b/noir/acvm-repo/acir_field/src/generic_ark.rs index 542e291982b..dc54d271beb 100644 --- a/noir/acvm-repo/acir_field/src/generic_ark.rs +++ b/noir/acvm-repo/acir_field/src/generic_ark.rs @@ -175,6 +175,10 @@ impl FieldElement { self == &Self::one() } + pub fn is_negative(&self) -> bool { + self.neg().num_bits() < self.num_bits() + } + pub fn pow(&self, exponent: &Self) -> Self { FieldElement(self.0.pow(exponent.0.into_bigint())) } @@ -240,6 +244,12 @@ impl FieldElement { self.fits_in_u128().then(|| self.to_u128()) } + pub fn to_i128(self) -> i128 { + let is_negative = self.is_negative(); + let bytes = if is_negative { self.neg() } else { self }.to_be_bytes(); + i128::from_be_bytes(bytes[16..32].try_into().unwrap()) * if is_negative { -1 } else { 1 } + } + pub fn try_to_u64(&self) -> Option { (self.num_bits() <= 64).then(|| self.to_u128() as u64) } diff --git a/noir/acvm-repo/acvm/src/compiler/transformers/mod.rs b/noir/acvm-repo/acvm/src/compiler/transformers/mod.rs index 4be2eb7029e..214243d9360 100644 --- a/noir/acvm-repo/acvm/src/compiler/transformers/mod.rs +++ b/noir/acvm-repo/acvm/src/compiler/transformers/mod.rs @@ -111,11 +111,6 @@ pub(super) fn transform_internal( transformer.mark_solvable(*witness); } } - Directive::PermutationSort { bits, .. } => { - for witness in bits { - transformer.mark_solvable(*witness); - } - } } new_acir_opcode_positions.push(acir_opcode_positions[index]); transformed_opcodes.push(opcode); diff --git a/noir/acvm-repo/acvm/src/pwg/brillig.rs b/noir/acvm-repo/acvm/src/pwg/brillig.rs index c5a98aaf01c..b0fb7469fd9 100644 --- a/noir/acvm-repo/acvm/src/pwg/brillig.rs +++ b/noir/acvm-repo/acvm/src/pwg/brillig.rs @@ -130,6 +130,10 @@ impl<'b, B: BlackBoxFunctionSolver> BrilligSolver<'b, B> { self.vm.write_memory_at(ptr, value); } + pub fn get_call_stack(&self) -> Vec { + self.vm.get_call_stack() + } + pub(super) fn solve(&mut self) -> Result { let status = self.vm.process_opcodes(); self.handle_vm_status(status) diff --git a/noir/acvm-repo/acvm/src/pwg/directives/mod.rs b/noir/acvm-repo/acvm/src/pwg/directives/mod.rs index 4605168d98b..07226c85b27 100644 --- a/noir/acvm-repo/acvm/src/pwg/directives/mod.rs +++ b/noir/acvm-repo/acvm/src/pwg/directives/mod.rs @@ -1,5 +1,3 @@ -use std::cmp::Ordering; - use acir::{circuit::directives::Directive, native_types::WitnessMap, FieldElement}; use num_bigint::BigUint; @@ -7,8 +5,6 @@ use crate::OpcodeResolutionError; use super::{get_value, insert_value, ErrorLocation}; -mod sorting; - /// Attempts to solve the [`Directive`] opcode `directive`. /// If successful, `initial_witness` will be mutated to contain the new witness assignment. /// @@ -48,38 +44,5 @@ pub(super) fn solve_directives( Ok(()) } - Directive::PermutationSort { inputs: a, tuple, bits, sort_by } => { - let mut val_a = Vec::new(); - let mut base = Vec::new(); - for (i, element) in a.iter().enumerate() { - assert_eq!(element.len(), *tuple as usize); - let mut element_val = Vec::with_capacity(*tuple as usize + 1); - for e in element { - element_val.push(get_value(e, initial_witness)?); - } - let field_i = FieldElement::from(i as i128); - element_val.push(field_i); - base.push(field_i); - val_a.push(element_val); - } - val_a.sort_by(|a, b| { - for i in sort_by { - let int_a = BigUint::from_bytes_be(&a[*i as usize].to_be_bytes()); - let int_b = BigUint::from_bytes_be(&b[*i as usize].to_be_bytes()); - let cmp = int_a.cmp(&int_b); - if cmp != Ordering::Equal { - return cmp; - } - } - Ordering::Equal - }); - let b = val_a.iter().map(|a| *a.last().unwrap()).collect(); - let control = sorting::route(base, b); - for (w, value) in bits.iter().zip(control) { - let value = if value { FieldElement::one() } else { FieldElement::zero() }; - insert_value(w, value, initial_witness)?; - } - Ok(()) - } } } diff --git a/noir/acvm-repo/acvm/src/pwg/directives/sorting.rs b/noir/acvm-repo/acvm/src/pwg/directives/sorting.rs deleted file mode 100644 index 2749e88b023..00000000000 --- a/noir/acvm-repo/acvm/src/pwg/directives/sorting.rs +++ /dev/null @@ -1,396 +0,0 @@ -use std::collections::{BTreeMap, BTreeSet}; - -use acir::FieldElement; - -// A sorting network is a graph of connected switches -// It is defined recursively so here we only keep track of the outer layer of switches -struct SortingNetwork { - n: usize, // size of the network - x_inputs: Vec, // inputs of the network - y_inputs: Vec, // outputs of the network - x_values: BTreeMap, // map for matching a y value with a x value - y_values: BTreeMap, // map for matching a x value with a y value - inner_x: Vec, // positions after the switch_x - inner_y: Vec, // positions after the sub-networks, and before the switch_y - switch_x: Vec, // outer switches for the inputs - switch_y: Vec, // outer switches for the outputs - free: BTreeSet, // outer switches available for looping -} - -impl SortingNetwork { - fn new(n: usize) -> SortingNetwork { - let free_len = (n - 1) / 2; - let mut free = BTreeSet::new(); - for i in 0..free_len { - free.insert(i); - } - SortingNetwork { - n, - x_inputs: Vec::with_capacity(n), - y_inputs: Vec::with_capacity(n), - x_values: BTreeMap::new(), - y_values: BTreeMap::new(), - inner_x: Vec::with_capacity(n), - inner_y: Vec::with_capacity(n), - switch_x: Vec::with_capacity(n / 2), - switch_y: Vec::with_capacity(free_len), - free, - } - } - - fn init(&mut self, inputs: Vec, outputs: Vec) { - let n = self.n; - assert_eq!(inputs.len(), outputs.len()); - assert_eq!(inputs.len(), n); - - self.x_inputs = inputs; - self.y_inputs = outputs; - for i in 0..self.n { - self.x_values.insert(self.x_inputs[i], i); - self.y_values.insert(self.y_inputs[i], i); - } - self.switch_x = vec![false; n / 2]; - self.switch_y = vec![false; (n - 1) / 2]; - self.inner_x = vec![FieldElement::zero(); n]; - self.inner_y = vec![FieldElement::zero(); n]; - - //Route the single wires so we do not need to handle this case later on - self.inner_y[n - 1] = self.y_inputs[n - 1]; - if n % 2 == 0 { - self.inner_y[n / 2 - 1] = self.y_inputs[n - 2]; - } else { - self.inner_x[n - 1] = self.x_inputs[n - 1]; - } - } - - //route a wire from outputs to its value in the inputs - fn route_out_wire(&mut self, y: usize, sub: bool) -> usize { - // sub <- y - if self.is_single_y(y) { - assert!(sub); - } else { - let port = y % 2 != 0; - let s1 = sub ^ port; - let inner = self.compute_inner(y, s1); - self.configure_y(y, s1, inner); - } - // x <- sub - let x = self.x_values.remove(&self.y_inputs[y]).unwrap(); - if !self.is_single_x(x) { - let port2 = x % 2 != 0; - let s2 = sub ^ port2; - let inner = self.compute_inner(x, s2); - self.configure_x(x, s2, inner); - } - x - } - - //route a wire from inputs to its value in the outputs - fn route_in_wire(&mut self, x: usize, sub: bool) -> usize { - // x -> sub - assert!(!self.is_single_x(x)); - let port = x % 2 != 0; - let s1 = sub ^ port; - let inner = self.compute_inner(x, s1); - self.configure_x(x, s1, inner); - - // sub -> y - let y = self.y_values.remove(&self.x_inputs[x]).unwrap(); - if !self.is_single_y(y) { - let port = y % 2 != 0; - let s2 = sub ^ port; - let inner = self.compute_inner(y, s2); - self.configure_y(y, s2, inner); - } - y - } - - //update the computed switch and inner values for an input wire - fn configure_x(&mut self, x: usize, switch: bool, inner: usize) { - self.inner_x[inner] = self.x_inputs[x]; - self.switch_x[x / 2] = switch; - } - - //update the computed switch and inner values for an output wire - fn configure_y(&mut self, y: usize, switch: bool, inner: usize) { - self.inner_y[inner] = self.y_inputs[y]; - self.switch_y[y / 2] = switch; - } - - // returns the other wire belonging to the same switch - fn sibling(index: usize) -> usize { - index + 1 - 2 * (index % 2) - } - - // returns a free switch - fn take(&mut self) -> Option { - self.free.first().copied() - } - - fn is_single_x(&self, a: usize) -> bool { - let n = self.x_inputs.len(); - n % 2 == 1 && a == n - 1 - } - - fn is_single_y(&mut self, a: usize) -> bool { - let n = self.x_inputs.len(); - a >= n - 2 + n % 2 - } - - // compute the inner position of idx through its switch - fn compute_inner(&self, idx: usize, switch: bool) -> usize { - if switch ^ (idx % 2 == 1) { - idx / 2 + self.n / 2 - } else { - idx / 2 - } - } - - fn new_start(&mut self) -> (Option, usize) { - let next = self.take(); - if let Some(switch) = next { - (next, 2 * switch) - } else { - (None, 0) - } - } -} - -// Computes the control bits of the sorting network which transform inputs into outputs -// implementation is based on https://www.mdpi.com/2227-7080/10/1/16 -pub(super) fn route(inputs: Vec, outputs: Vec) -> Vec { - assert_eq!(inputs.len(), outputs.len()); - match inputs.len() { - 0 => Vec::new(), - 1 => { - assert_eq!(inputs[0], outputs[0]); - Vec::new() - } - 2 => { - if inputs[0] == outputs[0] { - assert_eq!(inputs[1], outputs[1]); - vec![false] - } else { - assert_eq!(inputs[1], outputs[0]); - assert_eq!(inputs[0], outputs[1]); - vec![true] - } - } - _ => { - let n = inputs.len(); - - let mut result; - let n1 = n / 2; - let in_sub1; - let out_sub1; - let in_sub2; - let out_sub2; - - // process the outer layer in a code block so that the intermediate data is cleared before recursion - { - let mut network = SortingNetwork::new(n); - network.init(inputs, outputs); - - //We start with the last single wire - let mut out_idx = n - 1; - let mut start_sub = true; //it is connected to the lower inner network - let mut switch = None; - let mut start = None; - - while !network.free.is_empty() { - // the processed switch is no more available - if let Some(free_switch) = switch { - network.free.remove(&free_switch); - } - - // connect the output wire to its matching input - let in_idx = network.route_out_wire(out_idx, start_sub); - if network.is_single_x(in_idx) { - start_sub = !start_sub; //We need to restart, but did not complete the loop so we switch the sub network - (start, out_idx) = network.new_start(); - switch = start; - continue; - } - - // loop from the sibling - let next = SortingNetwork::sibling(in_idx); - // connect the input wire to its matching output, using the other sub-network - out_idx = network.route_in_wire(next, !start_sub); - switch = Some(out_idx / 2); - if start == switch || network.is_single_y(out_idx) { - //loop is complete, need a fresh start - (start, out_idx) = network.new_start(); - switch = start; - } else { - // we loop back from the sibling - out_idx = SortingNetwork::sibling(out_idx); - } - } - //All the wires are connected, we can now route the sub-networks - result = network.switch_x; - result.extend(network.switch_y); - in_sub1 = network.inner_x[0..n1].to_vec(); - in_sub2 = network.inner_x[n1..].to_vec(); - out_sub1 = network.inner_y[0..n1].to_vec(); - out_sub2 = network.inner_y[n1..].to_vec(); - } - let s1 = route(in_sub1, out_sub1); - result.extend(s1); - let s2 = route(in_sub2, out_sub2); - result.extend(s2); - result - } - } -} - -#[cfg(test)] -mod tests { - // Silence `unused_crate_dependencies` warning - use paste as _; - use proptest as _; - - use super::route; - use acir::FieldElement; - use rand::prelude::*; - - fn execute_network(config: Vec, inputs: Vec) -> Vec { - let n = inputs.len(); - if n == 1 { - return inputs; - } - let mut in1 = Vec::new(); - let mut in2 = Vec::new(); - //layer 1: - for i in 0..n / 2 { - if config[i] { - in1.push(inputs[2 * i + 1]); - in2.push(inputs[2 * i]); - } else { - in1.push(inputs[2 * i]); - in2.push(inputs[2 * i + 1]); - } - } - if n % 2 == 1 { - in2.push(*inputs.last().unwrap()); - } - let n2 = n / 2 + (n - 1) / 2; - let n3 = n2 + switch_nb(n / 2); - let mut result = Vec::new(); - let out1 = execute_network(config[n2..n3].to_vec(), in1); - let out2 = execute_network(config[n3..].to_vec(), in2); - //last layer: - for i in 0..(n - 1) / 2 { - if config[n / 2 + i] { - result.push(out2[i]); - result.push(out1[i]); - } else { - result.push(out1[i]); - result.push(out2[i]); - } - } - if n % 2 == 0 { - result.push(*out1.last().unwrap()); - result.push(*out2.last().unwrap()); - } else { - result.push(*out2.last().unwrap()); - } - result - } - - // returns the number of switches in the network - fn switch_nb(n: usize) -> usize { - let mut s = 0; - for i in 0..n { - s += f64::from((i + 1) as u32).log2().ceil() as usize; - } - s - } - - #[test] - fn test_route() { - //basic tests - let a = vec![ - FieldElement::from(1_i128), - FieldElement::from(2_i128), - FieldElement::from(3_i128), - ]; - let b = vec![ - FieldElement::from(1_i128), - FieldElement::from(2_i128), - FieldElement::from(3_i128), - ]; - let c = route(a, b); - assert_eq!(c, vec![false, false, false]); - - let a = vec![ - FieldElement::from(1_i128), - FieldElement::from(2_i128), - FieldElement::from(3_i128), - ]; - let b = vec![ - FieldElement::from(1_i128), - FieldElement::from(3_i128), - FieldElement::from(2_i128), - ]; - let c = route(a, b); - assert_eq!(c, vec![false, false, true]); - - let a = vec![ - FieldElement::from(1_i128), - FieldElement::from(2_i128), - FieldElement::from(3_i128), - ]; - let b = vec![ - FieldElement::from(3_i128), - FieldElement::from(2_i128), - FieldElement::from(1_i128), - ]; - let c = route(a, b); - assert_eq!(c, vec![true, true, true]); - - let a = vec![ - FieldElement::from(0_i128), - FieldElement::from(1_i128), - FieldElement::from(2_i128), - FieldElement::from(3_i128), - ]; - let b = vec![ - FieldElement::from(2_i128), - FieldElement::from(3_i128), - FieldElement::from(0_i128), - FieldElement::from(1_i128), - ]; - let c = route(a, b); - assert_eq!(c, vec![false, true, true, true, true]); - - let a = vec![ - FieldElement::from(0_i128), - FieldElement::from(1_i128), - FieldElement::from(2_i128), - FieldElement::from(3_i128), - FieldElement::from(4_i128), - ]; - let b = vec![ - FieldElement::from(0_i128), - FieldElement::from(3_i128), - FieldElement::from(4_i128), - FieldElement::from(2_i128), - FieldElement::from(1_i128), - ]; - let c = route(a, b); - assert_eq!(c, vec![false, false, false, true, false, true, false, true]); - - // random tests - for i in 2..50 { - let mut a = vec![FieldElement::zero()]; - for j in 0..i - 1 { - a.push(a[j] + FieldElement::one()); - } - let mut rng = rand::thread_rng(); - let mut b = a.clone(); - b.shuffle(&mut rng); - let c = route(a.clone(), b.clone()); - assert_eq!(b, execute_network(c, a)); - } - } -} diff --git a/noir/acvm-repo/bn254_blackbox_solver/Cargo.toml b/noir/acvm-repo/bn254_blackbox_solver/Cargo.toml index a73aded231f..ef80e2c1c0f 100644 --- a/noir/acvm-repo/bn254_blackbox_solver/Cargo.toml +++ b/noir/acvm-repo/bn254_blackbox_solver/Cargo.toml @@ -23,8 +23,9 @@ rust-embed = { version = "6.6.0", features = [ "include-exclude", ] } -# BN254 fixed base scalar multiplication solver -grumpkin = { git = "https://github.com/noir-lang/grumpkin", rev = "56d99799381f79e42148aaef0de2b0cf9a4b9a5d", features = ["std"] } +grumpkin = { version = "0.1.0", package = "noir_grumpkin", features = [ + "std", +] } # BN254 fixed base scalar multiplication solver ark-ec = { version = "^0.4.0", default-features = false } ark-ff = { version = "^0.4.0", default-features = false } num-bigint.workspace = true diff --git a/noir/acvm-repo/brillig/src/foreign_call.rs b/noir/acvm-repo/brillig/src/foreign_call.rs index 1359d7d604d..3f124a9a0a7 100644 --- a/noir/acvm-repo/brillig/src/foreign_call.rs +++ b/noir/acvm-repo/brillig/src/foreign_call.rs @@ -37,7 +37,7 @@ impl ForeignCallParam { } /// Represents the full output of a [foreign call][crate::Opcode::ForeignCall]. -#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)] +#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone, Default)] pub struct ForeignCallResult { /// Resolved output values of the foreign call. pub values: Vec, diff --git a/noir/acvm-repo/brillig_vm/src/lib.rs b/noir/acvm-repo/brillig_vm/src/lib.rs index 081ecd33cb6..13accbeacb3 100644 --- a/noir/acvm-repo/brillig_vm/src/lib.rs +++ b/noir/acvm-repo/brillig_vm/src/lib.rs @@ -167,6 +167,16 @@ impl<'a, B: BlackBoxFunctionSolver> VM<'a, B> { self.memory.write(MemoryAddress(ptr), value); } + /// Returns the VM's current call stack, including the actual program + /// counter in the last position of the returned vector. + pub fn get_call_stack(&self) -> Vec { + self.call_stack + .iter() + .map(|program_counter| program_counter.to_usize()) + .chain(std::iter::once(self.program_counter)) + .collect() + } + /// Process a single opcode and modify the program counter. pub fn process_opcode(&mut self) -> VMStatus { let opcode = &self.bytecode[self.program_counter]; diff --git a/noir/compiler/integration-tests/package.json b/noir/compiler/integration-tests/package.json index c4e424df480..a89e37dc64f 100644 --- a/noir/compiler/integration-tests/package.json +++ b/noir/compiler/integration-tests/package.json @@ -5,8 +5,8 @@ "private": true, "scripts": { "build": "echo Integration Test build step", - "test": "bash ./scripts/codegen-verifiers.sh && yarn test:browser && yarn test:node", - "test:node": "hardhat test test/node/**/*", + "test": "yarn test:browser && yarn test:node", + "test:node": "bash ./scripts/codegen-verifiers.sh && hardhat test test/node/**/*", "test:browser": "web-test-runner", "test:integration:browser": "web-test-runner test/browser/**/*.test.ts", "test:integration:browser:watch": "web-test-runner test/browser/**/*.test.ts --watch", diff --git a/noir/compiler/noirc_driver/Cargo.toml b/noir/compiler/noirc_driver/Cargo.toml index eb9650e8aec..d9b240101d8 100644 --- a/noir/compiler/noirc_driver/Cargo.toml +++ b/noir/compiler/noirc_driver/Cargo.toml @@ -25,3 +25,4 @@ rust-embed.workspace = true tracing.workspace = true aztec_macros = { path = "../../aztec_macros" } +noirc_macros = { path = "../../noirc_macros" } diff --git a/noir/compiler/noirc_driver/src/lib.rs b/noir/compiler/noirc_driver/src/lib.rs index cded514f28c..8b0fc5dc97a 100644 --- a/noir/compiler/noirc_driver/src/lib.rs +++ b/noir/compiler/noirc_driver/src/lib.rs @@ -11,11 +11,12 @@ use noirc_abi::{AbiParameter, AbiType, ContractEvent}; use noirc_errors::{CustomDiagnostic, FileDiagnostic}; use noirc_evaluator::create_circuit; use noirc_evaluator::errors::RuntimeError; +use noirc_frontend::debug::build_debug_crate_file; use noirc_frontend::graph::{CrateId, CrateName}; use noirc_frontend::hir::def_map::{Contract, CrateDefMap}; use noirc_frontend::hir::Context; use noirc_frontend::macros_api::MacroProcessor; -use noirc_frontend::monomorphization::monomorphize; +use noirc_frontend::monomorphization::{monomorphize, monomorphize_debug}; use noirc_frontend::node_interner::FuncId; use std::path::Path; use tracing::info; @@ -33,6 +34,7 @@ pub use debug::DebugFile; pub use program::CompiledProgram; const STD_CRATE_NAME: &str = "std"; +const DEBUG_CRATE_NAME: &str = "__debug"; pub const GIT_COMMIT: &str = env!("GIT_COMMIT"); pub const GIT_DIRTY: &str = env!("GIT_DIRTY"); @@ -76,13 +78,21 @@ pub struct CompileOptions { #[arg(long, hide = true)] pub only_acir: bool, - /// Disables the builtin macros being used in the compiler + /// Disables the builtin Aztec macros being used in the compiler #[arg(long, hide = true)] pub disable_macros: bool, /// Outputs the monomorphized IR to stdout for debugging #[arg(long, hide = true)] pub show_monomorphized: bool, + + /// Insert debug symbols to inspect variables + #[arg(long, hide = true)] + pub instrument_debug: bool, + + /// Force Brillig output (for step debugging) + #[arg(long, hide = true)] + pub force_brillig: bool, } fn parse_expression_width(input: &str) -> Result { @@ -115,6 +125,7 @@ pub fn file_manager_with_stdlib(root: &Path) -> FileManager { let mut file_manager = FileManager::new(root); add_stdlib_source_to_file_manager(&mut file_manager); + add_debug_source_to_file_manager(&mut file_manager); file_manager } @@ -131,6 +142,15 @@ fn add_stdlib_source_to_file_manager(file_manager: &mut FileManager) { } } +/// Adds the source code of the debug crate needed to support instrumentation to +/// track variables values +fn add_debug_source_to_file_manager(file_manager: &mut FileManager) { + // Adds the synthetic debug module for instrumentation into the file manager + let path_to_debug_lib_file = Path::new(DEBUG_CRATE_NAME).join("lib.nr"); + file_manager + .add_file_with_source_canonical_path(&path_to_debug_lib_file, build_debug_crate_file()); +} + /// Adds the file from the file system at `Path` to the crate graph as a root file /// /// Note: This methods adds the stdlib as a dependency to the crate. @@ -152,6 +172,12 @@ pub fn prepare_crate(context: &mut Context, file_name: &Path) -> CrateId { root_crate_id } +pub fn link_to_debug_crate(context: &mut Context, root_crate_id: CrateId) { + let path_to_debug_lib_file = Path::new(DEBUG_CRATE_NAME).join("lib.nr"); + let debug_crate_id = prepare_dependency(context, &path_to_debug_lib_file); + add_dep(context, root_crate_id, debug_crate_id, DEBUG_CRATE_NAME.parse().unwrap()); +} + // Adds the file from the file system at `Path` to the crate graph pub fn prepare_dependency(context: &mut Context, file_name: &Path) -> CrateId { let root_file_id = context @@ -193,9 +219,12 @@ pub fn check_crate( disable_macros: bool, ) -> CompilationResult<()> { let macros: Vec<&dyn MacroProcessor> = if disable_macros { - vec![] + vec![&noirc_macros::AssertMessageMacro as &dyn MacroProcessor] } else { - vec![&aztec_macros::AztecMacro as &dyn MacroProcessor] + vec![ + &aztec_macros::AztecMacro as &dyn MacroProcessor, + &noirc_macros::AssertMessageMacro as &dyn MacroProcessor, + ] }; let mut errors = vec![]; @@ -246,6 +275,7 @@ pub fn compile_main( let compiled_program = compile_no_check(context, options, main, cached_program, options.force_compile) .map_err(FileDiagnostic::from)?; + let compilation_warnings = vecmap(compiled_program.warnings.clone(), FileDiagnostic::from); if options.deny_warnings && !compilation_warnings.is_empty() { return Err(compilation_warnings); @@ -325,7 +355,7 @@ fn has_errors(errors: &[FileDiagnostic], deny_warnings: bool) -> bool { /// Compile all of the functions associated with a Noir contract. fn compile_contract_inner( - context: &Context, + context: &mut Context, contract: Contract, options: &CompileOptions, ) -> Result { @@ -401,13 +431,17 @@ fn compile_contract_inner( /// This function assumes [`check_crate`] is called beforehand. #[tracing::instrument(level = "trace", skip_all, fields(function_name = context.function_name(&main_function)))] pub fn compile_no_check( - context: &Context, + context: &mut Context, options: &CompileOptions, main_function: FuncId, cached_program: Option, force_compile: bool, ) -> Result { - let program = monomorphize(main_function, &context.def_interner); + let program = if options.instrument_debug { + monomorphize_debug(main_function, &mut context.def_interner, &context.debug_instrumenter) + } else { + monomorphize(main_function, &mut context.def_interner) + }; let hash = fxhash::hash64(&program); let hashes_match = cached_program.as_ref().map_or(false, |program| program.hash == hash); @@ -426,7 +460,7 @@ pub fn compile_no_check( } let visibility = program.return_visibility; let (circuit, debug, input_witnesses, return_witnesses, warnings) = - create_circuit(program, options.show_ssa, options.show_brillig)?; + create_circuit(program, options.show_ssa, options.show_brillig, options.force_brillig)?; let abi = abi_gen::gen_abi(context, &main_function, input_witnesses, return_witnesses, visibility); diff --git a/noir/compiler/noirc_errors/Cargo.toml b/noir/compiler/noirc_errors/Cargo.toml index 935137ba2fc..da18399971e 100644 --- a/noir/compiler/noirc_errors/Cargo.toml +++ b/noir/compiler/noirc_errors/Cargo.toml @@ -13,6 +13,7 @@ codespan-reporting.workspace = true codespan.workspace = true fm.workspace = true chumsky.workspace = true +noirc_printable_type.workspace = true serde.workspace = true serde_with = "3.2.0" tracing.workspace = true diff --git a/noir/compiler/noirc_errors/src/debug_info.rs b/noir/compiler/noirc_errors/src/debug_info.rs index ffca8fbf2e1..25722aac57f 100644 --- a/noir/compiler/noirc_errors/src/debug_info.rs +++ b/noir/compiler/noirc_errors/src/debug_info.rs @@ -16,10 +16,26 @@ use std::io::Write; use std::mem; use crate::Location; +use noirc_printable_type::PrintableType; use serde::{ de::Error as DeserializationError, ser::Error as SerializationError, Deserialize, Serialize, }; +#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, PartialOrd, Ord, Deserialize, Serialize)] +pub struct DebugVarId(pub u32); + +#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, PartialOrd, Ord, Deserialize, Serialize)] +pub struct DebugTypeId(pub u32); + +#[derive(Debug, Clone, Hash, Deserialize, Serialize)] +pub struct DebugVariable { + pub name: String, + pub debug_type_id: DebugTypeId, +} + +pub type DebugVariables = BTreeMap; +pub type DebugTypes = BTreeMap; + #[serde_as] #[derive(Default, Debug, Clone, Deserialize, Serialize)] pub struct DebugInfo { @@ -28,6 +44,8 @@ pub struct DebugInfo { /// that they should be serialized to/from strings. #[serde_as(as = "BTreeMap")] pub locations: BTreeMap>, + pub variables: DebugVariables, + pub types: DebugTypes, } /// Holds OpCodes Counts for Acir and Brillig Opcodes @@ -39,8 +57,12 @@ pub struct OpCodesCount { } impl DebugInfo { - pub fn new(locations: BTreeMap>) -> Self { - DebugInfo { locations } + pub fn new( + locations: BTreeMap>, + variables: DebugVariables, + types: DebugTypes, + ) -> Self { + Self { locations, variables, types } } /// Updates the locations map when the [`Circuit`][acvm::acir::circuit::Circuit] is modified. diff --git a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index e0630655253..65b593b77f5 100644 --- a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -5,6 +5,7 @@ use crate::brillig::brillig_ir::{ BrilligBinaryOp, BrilligContext, BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, }; use crate::ssa::ir::dfg::CallStack; +use crate::ssa::ir::instruction::ConstrainError; use crate::ssa::ir::{ basic_block::{BasicBlock, BasicBlockId}, dfg::DataFlowGraph, @@ -257,7 +258,30 @@ impl<'block> BrilligBlock<'block> { condition, ); - self.brillig_context.constrain_instruction(condition, assert_message.clone()); + let assert_message = if let Some(error) = assert_message { + match error.as_ref() { + ConstrainError::Static(string) => Some(string.clone()), + ConstrainError::Dynamic(call_instruction) => { + let Instruction::Call { func, arguments } = call_instruction else { + unreachable!("expected a call instruction") + }; + + let Value::Function(func_id) = &dfg[*func] else { + unreachable!("expected a function value") + }; + + self.convert_ssa_function_call(*func_id, arguments, dfg, &[]); + + // Dynamic assert messages are handled in the generated function call. + // We then don't need to attach one to the constrain instruction. + None + } + } + } else { + None + }; + + self.brillig_context.constrain_instruction(condition, assert_message); self.brillig_context.deallocate_register(condition); } Instruction::Allocate => { @@ -368,7 +392,8 @@ impl<'block> BrilligBlock<'block> { } } Value::Function(func_id) => { - self.convert_ssa_function_call(*func_id, arguments, dfg, instruction_id); + let result_ids = dfg.instruction_results(instruction_id); + self.convert_ssa_function_call(*func_id, arguments, dfg, result_ids); } Value::Intrinsic(Intrinsic::BlackBox(bb_func)) => { // Slices are represented as a tuple of (length, slice contents). @@ -640,7 +665,7 @@ impl<'block> BrilligBlock<'block> { func_id: FunctionId, arguments: &[ValueId], dfg: &DataFlowGraph, - instruction_id: InstructionId, + result_ids: &[ValueId], ) { // Convert the arguments to registers casting those to the types of the receiving function let argument_registers: Vec = arguments @@ -648,8 +673,6 @@ impl<'block> BrilligBlock<'block> { .flat_map(|argument_id| self.convert_ssa_value(*argument_id, dfg).extract_registers()) .collect(); - let result_ids = dfg.instruction_results(instruction_id); - // Create label for the function that will be called let label_of_function_to_call = FunctionContext::function_id_to_function_label(func_id); @@ -1233,7 +1256,23 @@ impl<'block> BrilligBlock<'block> { new_variable } } - Value::Function(_) | Value::Intrinsic(_) | Value::ForeignFunction(_) => { + Value::Function(_) => { + // For the debugger instrumentation we want to allow passing + // around values representing function pointers, even though + // there is no interaction with the function possible given that + // value. + let new_variable = + self.variables.allocate_constant(self.brillig_context, value_id, dfg); + let register_index = new_variable.extract_register(); + + self.brillig_context.const_instruction( + register_index, + value_id.to_usize().into(), + 32, + ); + new_variable + } + Value::Intrinsic(_) | Value::ForeignFunction(_) => { todo!("ICE: Cannot convert value {value:?}") } } @@ -1419,6 +1458,8 @@ pub(crate) fn convert_ssa_binary_op_to_brillig_binary_op( BinaryOp::And => BinaryIntOp::And, BinaryOp::Or => BinaryIntOp::Or, BinaryOp::Xor => BinaryIntOp::Xor, + BinaryOp::Shl => BinaryIntOp::Shl, + BinaryOp::Shr => BinaryIntOp::Shr, }; BrilligBinaryOp::Integer { op: operation, bit_size } diff --git a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs index 49d40ca3697..cbb3049a904 100644 --- a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs +++ b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs @@ -173,7 +173,10 @@ pub(crate) fn allocate_value( let typ = dfg.type_of_value(value_id); match typ { - Type::Numeric(_) | Type::Reference(_) => { + Type::Numeric(_) | Type::Reference(_) | Type::Function => { + // NB. function references are converted to a constant when + // translating from SSA to Brillig (to allow for debugger + // instrumentation to work properly) let register = brillig_context.allocate_register(); BrilligVariable::Simple(register) } @@ -199,8 +202,5 @@ pub(crate) fn allocate_value( rc: rc_register, }) } - Type::Function => { - unreachable!("ICE: Function values should have been removed from the SSA") - } } } diff --git a/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs b/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs index 6ee2e0c0b9f..dd57f0c4426 100644 --- a/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs +++ b/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs @@ -74,9 +74,8 @@ impl DebugToString for BinaryIntOp { BinaryIntOp::And => "&&".into(), BinaryIntOp::Or => "||".into(), BinaryIntOp::Xor => "^".into(), - BinaryIntOp::Shl | BinaryIntOp::Shr => { - unreachable!("bit shift should have been replaced") - } + BinaryIntOp::Shl => "<<".into(), + BinaryIntOp::Shr => ">>".into(), } } } diff --git a/noir/compiler/noirc_evaluator/src/errors.rs b/noir/compiler/noirc_evaluator/src/errors.rs index 73b6e671bd5..ed94adac28e 100644 --- a/noir/compiler/noirc_evaluator/src/errors.rs +++ b/noir/compiler/noirc_evaluator/src/errors.rs @@ -46,6 +46,8 @@ pub enum RuntimeError { NestedSlice { call_stack: CallStack }, #[error("Big Integer modulus do no match")] BigIntModulus { call_stack: CallStack }, + #[error("Slices cannot be returned from an unconstrained runtime to a constrained runtime")] + UnconstrainedSliceReturnToConstrained { call_stack: CallStack }, } // We avoid showing the actual lhs and rhs since most of the time they are just 0 @@ -135,7 +137,8 @@ impl RuntimeError { | RuntimeError::IntegerOutOfBounds { call_stack, .. } | RuntimeError::UnsupportedIntegerSize { call_stack, .. } | RuntimeError::NestedSlice { call_stack, .. } - | RuntimeError::BigIntModulus { call_stack, .. } => call_stack, + | RuntimeError::BigIntModulus { call_stack, .. } + | RuntimeError::UnconstrainedSliceReturnToConstrained { call_stack } => call_stack, } } } @@ -160,10 +163,21 @@ impl RuntimeError { noirc_errors::Span::inclusive(0, 0) ) } + RuntimeError::UnknownLoopBound { .. } => { + let primary_message = self.to_string(); + let location = + self.call_stack().back().expect("Expected RuntimeError to have a location"); + + Diagnostic::simple_error( + primary_message, + "If attempting to fetch the length of a slice, try converting to an array. Slices only use dynamic lengths.".to_string(), + location.span, + ) + } _ => { let message = self.to_string(); let location = - self.call_stack().back().expect("Expected RuntimeError to have a location"); + self.call_stack().back().unwrap_or_else(|| panic!("Expected RuntimeError to have a location. Error message: {message}")); Diagnostic::simple_error(message, String::new(), location.span) } diff --git a/noir/compiler/noirc_evaluator/src/ssa.rs b/noir/compiler/noirc_evaluator/src/ssa.rs index e1a2e0d3564..d19c4467235 100644 --- a/noir/compiler/noirc_evaluator/src/ssa.rs +++ b/noir/compiler/noirc_evaluator/src/ssa.rs @@ -40,12 +40,13 @@ pub(crate) fn optimize_into_acir( program: Program, print_ssa_passes: bool, print_brillig_trace: bool, + force_brillig_output: bool, ) -> Result { let abi_distinctness = program.return_distinctness; let ssa_gen_span = span!(Level::TRACE, "ssa_generation"); let ssa_gen_span_guard = ssa_gen_span.enter(); - let ssa = SsaBuilder::new(program, print_ssa_passes)? + let ssa = SsaBuilder::new(program, print_ssa_passes, force_brillig_output)? .run_pass(Ssa::defunctionalize, "After Defunctionalization:") .run_pass(Ssa::inline_functions, "After Inlining:") // Run mem2reg with the CFG separated into blocks @@ -59,9 +60,14 @@ pub(crate) fn optimize_into_acir( // and this pass is missed, slice merging will fail inside of flattening. .run_pass(Ssa::mem2reg, "After Mem2Reg:") .run_pass(Ssa::flatten_cfg, "After Flattening:") + .run_pass(Ssa::remove_bit_shifts, "After Removing Bit Shifts:") // Run mem2reg once more with the flattened CFG to catch any remaining loads/stores .run_pass(Ssa::mem2reg, "After Mem2Reg:") .run_pass(Ssa::fold_constants, "After Constant Folding:") + .run_pass( + Ssa::fold_constants_using_constraints, + "After Constant Folding With Constraint Info:", + ) .run_pass(Ssa::dead_instruction_elimination, "After Dead Instruction Elimination:") .finish(); @@ -83,11 +89,18 @@ pub fn create_circuit( program: Program, enable_ssa_logging: bool, enable_brillig_logging: bool, + force_brillig_output: bool, ) -> Result<(Circuit, DebugInfo, Vec, Vec, Vec), RuntimeError> { + let debug_variables = program.debug_variables.clone(); + let debug_types = program.debug_types.clone(); let func_sig = program.main_function_signature.clone(); let recursive = program.recursive; - let mut generated_acir = - optimize_into_acir(program, enable_ssa_logging, enable_brillig_logging)?; + let mut generated_acir = optimize_into_acir( + program, + enable_ssa_logging, + enable_brillig_logging, + force_brillig_output, + )?; let opcodes = generated_acir.take_opcodes(); let current_witness_index = generated_acir.current_witness_index().0; let GeneratedAcir { @@ -122,7 +135,7 @@ pub fn create_circuit( .map(|(index, locations)| (index, locations.into_iter().collect())) .collect(); - let mut debug_info = DebugInfo::new(locations); + let mut debug_info = DebugInfo::new(locations, debug_variables, debug_types); // Perform any ACIR-level optimizations let (optimized_circuit, transformation_map) = acvm::compiler::optimize(circuit); @@ -172,8 +185,12 @@ struct SsaBuilder { } impl SsaBuilder { - fn new(program: Program, print_ssa_passes: bool) -> Result { - let ssa = ssa_gen::generate_ssa(program)?; + fn new( + program: Program, + print_ssa_passes: bool, + force_brillig_runtime: bool, + ) -> Result { + let ssa = ssa_gen::generate_ssa(program, force_brillig_runtime)?; Ok(SsaBuilder { print_ssa_passes, ssa }.print("Initial SSA:")) } diff --git a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir.rs b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir.rs index 1ddbae0f339..090d5bb0a83 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir.rs @@ -1,4 +1,3 @@ pub(crate) mod acir_variable; pub(crate) mod big_int; pub(crate) mod generated_acir; -pub(crate) mod sort; diff --git a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index 94e62e76746..912447721cd 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -319,6 +319,7 @@ impl AcirContext { vec![AcirValue::Var(var, AcirType::field())], vec![AcirType::field()], true, + false, )?; let inverted_var = Self::expect_one_var(results); @@ -631,18 +632,22 @@ impl AcirContext { bit_size: u32, predicate: AcirVar, ) -> Result<(AcirVar, AcirVar), RuntimeError> { - // lhs = rhs * q + r - // - // If predicate is zero, `q_witness` and `r_witness` will be 0 let zero = self.add_constant(FieldElement::zero()); - if self.var_to_expression(predicate)?.is_zero() { - return Ok((zero, zero)); - } + let one = self.add_constant(FieldElement::one()); + + let lhs_expr = self.var_to_expression(lhs)?; + let rhs_expr = self.var_to_expression(rhs)?; + let predicate_expr = self.var_to_expression(predicate)?; + + match (lhs_expr.to_const(), rhs_expr.to_const(), predicate_expr.to_const()) { + // If predicate is zero, `quotient_var` and `remainder_var` will be 0. + (_, _, Some(predicate_const)) if predicate_const.is_zero() => { + return Ok((zero, zero)); + } - match (self.var_to_expression(lhs)?.to_const(), self.var_to_expression(rhs)?.to_const()) { // If `lhs` and `rhs` are known constants then we can calculate the result at compile time. // `rhs` must be non-zero. - (Some(lhs_const), Some(rhs_const)) if rhs_const != FieldElement::zero() => { + (Some(lhs_const), Some(rhs_const), _) if rhs_const != FieldElement::zero() => { let quotient = lhs_const.to_u128() / rhs_const.to_u128(); let remainder = lhs_const.to_u128() - quotient * rhs_const.to_u128(); @@ -652,36 +657,29 @@ impl AcirContext { } // If `rhs` is one then the division is a noop. - (_, Some(rhs_const)) if rhs_const == FieldElement::one() => { + (_, Some(rhs_const), _) if rhs_const == FieldElement::one() => { return Ok((lhs, zero)); } - _ => (), - } - - // Check that we the rhs is not zero. - // Otherwise, when executing the brillig quotient we may attempt to divide by zero, causing a VM panic. - // - // When the predicate is 0, the equation always passes. - // When the predicate is 1, the rhs must not be 0. - let one = self.add_constant(FieldElement::one()); + // After this point, we cannot perform the division at compile-time. + // + // We need to check that the rhs is not zero, otherwise when executing the brillig quotient, + // we may attempt to divide by zero and cause a VM panic. + // + // When the predicate is 0, the division always succeeds (as it is skipped). + // When the predicate is 1, the rhs must not be 0. - let rhs_expr = self.var_to_expression(rhs)?; - let rhs_is_nonzero_const = rhs_expr.is_const() && !rhs_expr.is_zero(); - if !rhs_is_nonzero_const { - match self.var_to_expression(predicate)?.to_const() { - Some(predicate) if predicate.is_one() => { - // If the predicate is known to be active, we simply assert that an inverse must exist. - // This implies that `rhs != 0`. - let _inverse = self.inv_var(rhs, one)?; - } + // If the predicate is known to be active, we simply assert that an inverse must exist. + // This implies that `rhs != 0`. + (_, _, Some(predicate_const)) if predicate_const.is_one() => { + let _inverse = self.inv_var(rhs, one)?; + } - _ => { - // Otherwise we must handle both potential cases. - let rhs_is_zero = self.eq_var(rhs, zero)?; - let rhs_is_not_zero = self.mul_var(rhs_is_zero, predicate)?; - self.assert_eq_var(rhs_is_not_zero, zero, None)?; - } + // Otherwise we must handle both potential cases. + _ => { + let rhs_is_zero = self.eq_var(rhs, zero)?; + let rhs_is_zero_and_predicate_active = self.mul_var(rhs_is_zero, predicate)?; + self.assert_eq_var(rhs_is_zero_and_predicate_active, zero, None)?; } } @@ -689,7 +687,7 @@ impl AcirContext { let mut max_q_bits = bit_size; let mut max_rhs_bits = bit_size; // when rhs is constant, we can better estimate the maximum bit sizes - if let Some(rhs_const) = self.var_to_expression(rhs)?.to_const() { + if let Some(rhs_const) = rhs_expr.to_const() { max_rhs_bits = rhs_const.num_bits(); if max_rhs_bits != 0 { if max_rhs_bits > bit_size { @@ -699,18 +697,6 @@ impl AcirContext { } } - // Avoids overflow: 'q*b+r < 2^max_q_bits*2^max_rhs_bits' - let mut avoid_overflow = false; - if max_q_bits + max_rhs_bits >= FieldElement::max_num_bits() - 1 { - // q*b+r can overflow; we avoid this when b is constant - if self.var_to_expression(rhs)?.is_const() { - avoid_overflow = true; - } else { - // we do not support unbounded division - unreachable!("overflow in unbounded division"); - } - } - let [q_value, r_value]: [AcirValue; 2] = self .brillig( predicate, @@ -721,6 +707,7 @@ impl AcirContext { ], vec![AcirType::unsigned(max_q_bits), AcirType::unsigned(max_rhs_bits)], true, + false, )? .try_into() .expect("quotient only returns two values"); @@ -761,7 +748,19 @@ impl AcirContext { let lhs_constraint = self.mul_var(lhs, predicate)?; self.assert_eq_var(lhs_constraint, rhs_constraint, None)?; - if let Some(rhs_const) = self.var_to_expression(rhs)?.to_const() { + // Avoids overflow: 'q*b+r < 2^max_q_bits*2^max_rhs_bits' + let mut avoid_overflow = false; + if max_q_bits + max_rhs_bits >= FieldElement::max_num_bits() - 1 { + // q*b+r can overflow; we avoid this when b is constant + if rhs_expr.is_const() { + avoid_overflow = true; + } else { + // we do not support unbounded division + unreachable!("overflow in unbounded division"); + } + } + + if let Some(rhs_const) = rhs_expr.to_const() { if avoid_overflow { // we compute q0 = p/rhs let rhs_big = BigUint::from_bytes_be(&rhs_const.to_be_bytes()); @@ -1442,6 +1441,7 @@ impl AcirContext { inputs: Vec, outputs: Vec, attempt_execution: bool, + unsafe_return_values: bool, ) -> Result, RuntimeError> { let b_inputs = try_vecmap(inputs, |i| -> Result<_, InternalError> { match i { @@ -1514,10 +1514,13 @@ impl AcirContext { Ok(()) } - for output_var in &outputs_var { - range_constraint_value(self, output_var)?; + // This is a hack to ensure that if we're compiling a brillig entrypoint function then + // we don't also add a number of range constraints. + if !unsafe_return_values { + for output_var in &outputs_var { + range_constraint_value(self, output_var)?; + } } - Ok(outputs_var) } @@ -1634,50 +1637,6 @@ impl AcirContext { AcirValue::Array(array_values) } - /// Generate output variables that are constrained to be the sorted inputs - /// The outputs are the sorted inputs iff - /// outputs are sorted and - /// outputs are a permutation of the inputs - pub(crate) fn sort( - &mut self, - inputs: Vec, - bit_size: u32, - predicate: AcirVar, - ) -> Result, RuntimeError> { - let len = inputs.len(); - // Convert the inputs into expressions - let inputs_expr = try_vecmap(inputs, |input| self.var_to_expression(input))?; - // Generate output witnesses - let outputs_witness = vecmap(0..len, |_| self.acir_ir.next_witness_index()); - let output_expr = - vecmap(&outputs_witness, |witness_index| Expression::from(*witness_index)); - let outputs_var = vecmap(&outputs_witness, |witness_index| { - self.add_data(AcirVarData::Witness(*witness_index)) - }); - - // Enforce the outputs to be a permutation of the inputs - self.acir_ir.permutation(&inputs_expr, &output_expr)?; - - // Enforce the outputs to be sorted - for i in 0..(outputs_var.len() - 1) { - self.less_than_constrain(outputs_var[i], outputs_var[i + 1], bit_size, predicate)?; - } - - Ok(outputs_var) - } - - /// Constrain lhs to be less than rhs - fn less_than_constrain( - &mut self, - lhs: AcirVar, - rhs: AcirVar, - bit_size: u32, - predicate: AcirVar, - ) -> Result<(), RuntimeError> { - let lhs_less_than_rhs = self.more_than_eq_var(rhs, lhs, bit_size)?; - self.maybe_eq_predicate(lhs_less_than_rhs, predicate) - } - /// Returns a Variable that is constrained to be the result of reading /// from the memory `block_id` at the given `index`. pub(crate) fn read_from_memory( diff --git a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs index 4d88a449e1c..1d05e998b13 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs @@ -74,6 +74,10 @@ impl GeneratedAcir { } } + pub(crate) fn opcodes(&self) -> &[AcirOpcode] { + &self.opcodes + } + pub(crate) fn take_opcodes(&mut self) -> Vec { std::mem::take(&mut self.opcodes) } @@ -563,40 +567,6 @@ impl GeneratedAcir { } } - /// Generate gates and control bits witnesses which ensure that out_expr is a permutation of in_expr - /// Add the control bits of the sorting network used to generate the constrains - /// into the PermutationSort directive for solving in ACVM. - /// The directive is solving the control bits so that the outputs are sorted in increasing order. - /// - /// n.b. A sorting network is a predetermined set of switches, - /// the control bits indicate the configuration of each switch: false for pass-through and true for cross-over - pub(crate) fn permutation( - &mut self, - in_expr: &[Expression], - out_expr: &[Expression], - ) -> Result<(), RuntimeError> { - let mut bits_len = 0; - for i in 0..in_expr.len() { - bits_len += ((i + 1) as f32).log2().ceil() as u32; - } - - let bits = vecmap(0..bits_len, |_| self.next_witness_index()); - let inputs = in_expr.iter().map(|a| vec![a.clone()]).collect(); - self.push_opcode(AcirOpcode::Directive(Directive::PermutationSort { - inputs, - tuple: 1, - bits: bits.clone(), - sort_by: vec![0], - })); - let (_, b) = self.permutation_layer(in_expr, &bits, false)?; - - // Constrain the network output to out_expr - for (b, o) in b.iter().zip(out_expr) { - self.push_opcode(AcirOpcode::AssertZero(b - o)); - } - Ok(()) - } - pub(crate) fn last_acir_opcode_location(&self) -> OpcodeLocation { OpcodeLocation::Acir(self.opcodes.len() - 1) } diff --git a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/sort.rs b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/sort.rs deleted file mode 100644 index 52640d32337..00000000000 --- a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/sort.rs +++ /dev/null @@ -1,88 +0,0 @@ -use crate::errors::InternalError; - -use super::generated_acir::GeneratedAcir; -use acvm::acir::native_types::{Expression, Witness}; - -impl GeneratedAcir { - // Generates gates for a sorting network - // returns witness corresponding to the network configuration and the expressions corresponding to the network output - // in_expr: inputs of the sorting network - // if generate_witness is false, it uses the witness provided in bits instead of generating them - // in both cases it returns the witness of the network configuration - // if generate_witness is true, bits is ignored - pub(crate) fn permutation_layer( - &mut self, - in_expr: &[Expression], - bits: &[Witness], - generate_witness: bool, - ) -> Result<(Vec, Vec), InternalError> { - let n = in_expr.len(); - if n == 1 { - return Ok((Vec::new(), in_expr.to_vec())); - } - let n1 = n / 2; - - // witness for the input switches - let mut conf = iter_extended::vecmap(0..n1, |i| { - if generate_witness { - self.next_witness_index() - } else { - bits[i] - } - }); - - // compute expressions after the input switches - // If inputs are a1,a2, and the switch value is c, then we compute expressions b1,b2 where - // b1 = a1+q, b2 = a2-q, q = c(a2-a1) - let mut in_sub1 = Vec::new(); - let mut in_sub2 = Vec::new(); - for i in 0..n1 { - //q = c*(a2-a1); - let intermediate = self.mul_with_witness( - &Expression::from(conf[i]), - &(&in_expr[2 * i + 1] - &in_expr[2 * i]), - ); - //b1=a1+q - in_sub1.push(&intermediate + &in_expr[2 * i]); - //b2=a2-q - in_sub2.push(&in_expr[2 * i + 1] - &intermediate); - } - if n % 2 == 1 { - in_sub2.push(match in_expr.last() { - Some(in_expr) => in_expr.clone(), - None => { - return Err(InternalError::EmptyArray { call_stack: self.call_stack.clone() }) - } - }); - } - let mut out_expr = Vec::new(); - // compute results for the sub networks - let bits1 = if generate_witness { bits } else { &bits[n1 + (n - 1) / 2..] }; - let (w1, b1) = self.permutation_layer(&in_sub1, bits1, generate_witness)?; - let bits2 = if generate_witness { bits } else { &bits[n1 + (n - 1) / 2 + w1.len()..] }; - let (w2, b2) = self.permutation_layer(&in_sub2, bits2, generate_witness)?; - // apply the output switches - for i in 0..(n - 1) / 2 { - let c = if generate_witness { self.next_witness_index() } else { bits[n1 + i] }; - conf.push(c); - let intermediate = self.mul_with_witness(&Expression::from(c), &(&b2[i] - &b1[i])); - out_expr.push(&intermediate + &b1[i]); - out_expr.push(&b2[i] - &intermediate); - } - if n % 2 == 0 { - out_expr.push(match b1.last() { - Some(b1) => b1.clone(), - None => { - return Err(InternalError::EmptyArray { call_stack: self.call_stack.clone() }) - } - }); - } - out_expr.push(match b2.last() { - Some(b2) => b2.clone(), - None => return Err(InternalError::EmptyArray { call_stack: self.call_stack.clone() }), - }); - conf.extend(w1); - conf.extend(w2); - Ok((conf, out_expr)) - } -} diff --git a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 120c5bf25df..9603377a107 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -7,6 +7,7 @@ use std::fmt::Debug; use self::acir_ir::acir_variable::{AcirContext, AcirType, AcirVar}; use super::function_builder::data_bus::DataBus; use super::ir::dfg::CallStack; +use super::ir::instruction::ConstrainError; use super::{ ir::{ dfg::DataFlowGraph, @@ -269,6 +270,7 @@ impl Context { inputs, outputs, false, + true, )?; let output_vars: Vec<_> = output_values .iter() @@ -279,7 +281,16 @@ impl Context { for acir_var in output_vars { self.acir_context.return_var(acir_var)?; } - Ok(self.acir_context.finish(witness_inputs, Vec::new())) + + let generated_acir = self.acir_context.finish(witness_inputs, Vec::new()); + + assert_eq!( + generated_acir.opcodes().len(), + 1, + "Unconstrained programs should only generate a single opcode but multiple were emitted" + ); + + Ok(generated_acir) } /// Adds and binds `AcirVar`s for each numeric block parameter or block parameter array element. @@ -413,15 +424,94 @@ impl Context { let lhs = self.convert_numeric_value(*lhs, dfg)?; let rhs = self.convert_numeric_value(*rhs, dfg)?; - self.acir_context.assert_eq_var(lhs, rhs, assert_message.clone())?; + let assert_message = if let Some(error) = assert_message { + match error.as_ref() { + ConstrainError::Static(string) => Some(string.clone()), + ConstrainError::Dynamic(call_instruction) => { + self.convert_ssa_call(call_instruction, dfg, ssa, brillig, &[])?; + None + } + } + } else { + None + }; + + self.acir_context.assert_eq_var(lhs, rhs, assert_message)?; } Instruction::Cast(value_id, _) => { let acir_var = self.convert_numeric_value(*value_id, dfg)?; self.define_result_var(dfg, instruction_id, acir_var); } - Instruction::Call { func, arguments } => { + Instruction::Call { .. } => { let result_ids = dfg.instruction_results(instruction_id); - match &dfg[*func] { + warnings.extend(self.convert_ssa_call( + instruction, + dfg, + ssa, + brillig, + result_ids, + )?); + } + Instruction::Not(value_id) => { + let (acir_var, typ) = match self.convert_value(*value_id, dfg) { + AcirValue::Var(acir_var, typ) => (acir_var, typ), + _ => unreachable!("NOT is only applied to numerics"), + }; + let result_acir_var = self.acir_context.not_var(acir_var, typ)?; + self.define_result_var(dfg, instruction_id, result_acir_var); + } + Instruction::Truncate { value, bit_size, max_bit_size } => { + let result_acir_var = + self.convert_ssa_truncate(*value, *bit_size, *max_bit_size, dfg)?; + self.define_result_var(dfg, instruction_id, result_acir_var); + } + Instruction::EnableSideEffects { condition } => { + let acir_var = self.convert_numeric_value(*condition, dfg)?; + self.current_side_effects_enabled_var = acir_var; + } + Instruction::ArrayGet { .. } | Instruction::ArraySet { .. } => { + self.handle_array_operation(instruction_id, dfg, last_array_uses)?; + } + Instruction::Allocate => { + unreachable!("Expected all allocate instructions to be removed before acir_gen") + } + Instruction::Store { .. } => { + unreachable!("Expected all store instructions to be removed before acir_gen") + } + Instruction::Load { .. } => { + unreachable!("Expected all load instructions to be removed before acir_gen") + } + Instruction::IncrementRc { .. } => { + // Do nothing. Only Brillig needs to worry about reference counted arrays + } + Instruction::RangeCheck { value, max_bit_size, assert_message } => { + let acir_var = self.convert_numeric_value(*value, dfg)?; + self.acir_context.range_constrain_var( + acir_var, + &NumericType::Unsigned { bit_size: *max_bit_size }, + assert_message.clone(), + )?; + } + } + + self.acir_context.set_call_stack(CallStack::new()); + Ok(warnings) + } + + fn convert_ssa_call( + &mut self, + instruction: &Instruction, + dfg: &DataFlowGraph, + ssa: &Ssa, + brillig: &Brillig, + result_ids: &[ValueId], + ) -> Result, RuntimeError> { + let mut warnings = Vec::new(); + + match instruction { + Instruction::Call { func, arguments } => { + let function_value = &dfg[*func]; + match function_value { Value::Function(id) => { let func = &ssa.functions[id]; match func.runtime() { @@ -429,13 +519,21 @@ impl Context { "expected an intrinsic/brillig call, but found {func:?}. All ACIR methods should be inlined" ), RuntimeType::Brillig => { + // Check that we are not attempting to return a slice from + // an unconstrained runtime to a constrained runtime + for result_id in result_ids { + if dfg.type_of_value(*result_id).contains_slice_element() { + return Err(RuntimeError::UnconstrainedSliceReturnToConstrained { call_stack: self.acir_context.get_call_stack() }) + } + } + let inputs = vecmap(arguments, |arg| self.convert_value(*arg, dfg)); let code = self.gen_brillig_for(func, brillig)?; let outputs: Vec = vecmap(result_ids, |result_id| dfg.type_of_value(*result_id).into()); - let output_values = self.acir_context.brillig(self.current_side_effects_enabled_var, code, inputs, outputs, true)?; + let output_values = self.acir_context.brillig(self.current_side_effects_enabled_var, code, inputs, outputs, true, false)?; // Compiler sanity check assert_eq!(result_ids.len(), output_values.len(), "ICE: The number of Brillig output values should match the result ids in SSA"); @@ -495,51 +593,11 @@ impl Context { Value::ForeignFunction(_) => unreachable!( "All `oracle` methods should be wrapped in an unconstrained fn" ), - _ => unreachable!("expected calling a function"), + _ => unreachable!("expected calling a function but got {function_value:?}"), } } - Instruction::Not(value_id) => { - let (acir_var, typ) = match self.convert_value(*value_id, dfg) { - AcirValue::Var(acir_var, typ) => (acir_var, typ), - _ => unreachable!("NOT is only applied to numerics"), - }; - let result_acir_var = self.acir_context.not_var(acir_var, typ)?; - self.define_result_var(dfg, instruction_id, result_acir_var); - } - Instruction::Truncate { value, bit_size, max_bit_size } => { - let result_acir_var = - self.convert_ssa_truncate(*value, *bit_size, *max_bit_size, dfg)?; - self.define_result_var(dfg, instruction_id, result_acir_var); - } - Instruction::EnableSideEffects { condition } => { - let acir_var = self.convert_numeric_value(*condition, dfg)?; - self.current_side_effects_enabled_var = acir_var; - } - Instruction::ArrayGet { .. } | Instruction::ArraySet { .. } => { - self.handle_array_operation(instruction_id, dfg, last_array_uses)?; - } - Instruction::Allocate => { - unreachable!("Expected all allocate instructions to be removed before acir_gen") - } - Instruction::Store { .. } => { - unreachable!("Expected all store instructions to be removed before acir_gen") - } - Instruction::Load { .. } => { - unreachable!("Expected all load instructions to be removed before acir_gen") - } - Instruction::IncrementRc { .. } => { - // Do nothing. Only Brillig needs to worry about reference counted arrays - } - Instruction::RangeCheck { value, max_bit_size, assert_message } => { - let acir_var = self.convert_numeric_value(*value, dfg)?; - self.acir_context.range_constrain_var( - acir_var, - &NumericType::Unsigned { bit_size: *max_bit_size }, - assert_message.clone(), - )?; - } + _ => unreachable!("expected calling a call instruction"), } - self.acir_context.set_call_stack(CallStack::new()); Ok(warnings) } @@ -621,11 +679,11 @@ impl Context { instruction: InstructionId, dfg: &DataFlowGraph, index: ValueId, - array: ValueId, + array_id: ValueId, store_value: Option, ) -> Result { let index_const = dfg.get_numeric_constant(index); - let value_type = dfg.type_of_value(array); + let value_type = dfg.type_of_value(array_id); // Compiler sanity checks assert!( !value_type.is_nested_slice(), @@ -635,7 +693,7 @@ impl Context { unreachable!("ICE: expected array or slice type"); }; - match self.convert_value(array, dfg) { + match self.convert_value(array_id, dfg) { AcirValue::Var(acir_var, _) => { return Err(RuntimeError::InternalError(InternalError::Unexpected { expected: "an array value".to_string(), @@ -1328,7 +1386,13 @@ impl Context { AcirValue::Array(elements.collect()) } Value::Intrinsic(..) => todo!(), - Value::Function(..) => unreachable!("ICE: All functions should have been inlined"), + Value::Function(function_id) => { + // This conversion is for debugging support only, to allow the + // debugging instrumentation code to work. Taking the reference + // of a function in ACIR is useless. + let id = self.acir_context.add_constant(function_id.to_usize()); + AcirValue::Var(id, AcirType::field()) + } Value::ForeignFunction(_) => unimplemented!( "Oracle calls directly in constrained functions are not yet available." ), @@ -1419,6 +1483,9 @@ impl Context { bit_count, self.current_side_effects_enabled_var, ), + BinaryOp::Shl | BinaryOp::Shr => unreachable!( + "ICE - bit shift operators do not exist in ACIR and should have been replaced" + ), } } @@ -1563,33 +1630,6 @@ impl Context { self.acir_context.bit_decompose(endian, field, bit_size, result_type) } - Intrinsic::Sort => { - let inputs = vecmap(arguments, |arg| self.convert_value(*arg, dfg)); - // We flatten the inputs and retrieve the bit_size of the elements - let mut input_vars = Vec::new(); - let mut bit_size = 0; - for input in inputs { - for (var, typ) in input.flatten() { - input_vars.push(var); - if bit_size == 0 { - bit_size = typ.bit_size(); - } else { - assert_eq!( - bit_size, - typ.bit_size(), - "cannot sort element of different bit size" - ); - } - } - } - // Generate the sorted output variables - let out_vars = self - .acir_context - .sort(input_vars, bit_size, self.current_side_effects_enabled_var) - .expect("Could not sort"); - - Ok(self.convert_vars_to_values(out_vars, dfg, result_ids)) - } Intrinsic::ArrayLen => { let len = match self.convert_value(arguments[0], dfg) { AcirValue::Var(_, _) => unreachable!("Non-array passed to array.len() method"), diff --git a/noir/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/noir/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index 44be423be10..7ad9a4b4031 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -18,8 +18,7 @@ use super::{ basic_block::BasicBlock, dfg::{CallStack, InsertInstructionResult}, function::RuntimeType, - instruction::{Endian, InstructionId, Intrinsic}, - types::NumericType, + instruction::{ConstrainError, InstructionId, Intrinsic}, }, ssa_gen::Ssa, }; @@ -250,7 +249,7 @@ impl FunctionBuilder { &mut self, lhs: ValueId, rhs: ValueId, - assert_message: Option, + assert_message: Option>, ) { self.insert_instruction(Instruction::Constrain(lhs, rhs, assert_message), None); } @@ -279,108 +278,6 @@ impl FunctionBuilder { self.insert_instruction(Instruction::Call { func, arguments }, Some(result_types)).results() } - /// Insert ssa instructions which computes lhs << rhs by doing lhs*2^rhs - /// and truncate the result to bit_size - pub(crate) fn insert_wrapping_shift_left( - &mut self, - lhs: ValueId, - rhs: ValueId, - bit_size: u32, - ) -> ValueId { - let base = self.field_constant(FieldElement::from(2_u128)); - let typ = self.current_function.dfg.type_of_value(lhs); - let (max_bit, pow) = - if let Some(rhs_constant) = self.current_function.dfg.get_numeric_constant(rhs) { - // Happy case is that we know precisely by how many bits the the integer will - // increase: lhs_bit_size + rhs - let bit_shift_size = rhs_constant.to_u128() as u32; - - let (rhs_bit_size_pow_2, overflows) = 2_u128.overflowing_pow(bit_shift_size); - if overflows { - assert!(bit_size < 128, "ICE - shift left with big integers are not supported"); - if bit_size < 128 { - let zero = self.numeric_constant(FieldElement::zero(), typ); - return InsertInstructionResult::SimplifiedTo(zero).first(); - } - } - let pow = self.numeric_constant(FieldElement::from(rhs_bit_size_pow_2), typ); - - let max_lhs_bits = self.current_function.dfg.get_value_max_num_bits(lhs); - - (max_lhs_bits + bit_shift_size, pow) - } else { - // we use a predicate to nullify the result in case of overflow - let bit_size_var = - self.numeric_constant(FieldElement::from(bit_size as u128), typ.clone()); - let overflow = self.insert_binary(rhs, BinaryOp::Lt, bit_size_var); - let predicate = self.insert_cast(overflow, typ.clone()); - // we can safely cast to unsigned because overflow_checks prevent bit-shift with a negative value - let rhs_unsigned = self.insert_cast(rhs, Type::unsigned(bit_size)); - let pow = self.pow(base, rhs_unsigned); - let pow = self.insert_cast(pow, typ); - (FieldElement::max_num_bits(), self.insert_binary(predicate, BinaryOp::Mul, pow)) - }; - - if max_bit <= bit_size { - self.insert_binary(lhs, BinaryOp::Mul, pow) - } else { - let result = self.insert_binary(lhs, BinaryOp::Mul, pow); - self.insert_truncate(result, bit_size, max_bit) - } - } - - /// Insert ssa instructions which computes lhs >> rhs by doing lhs/2^rhs - pub(crate) fn insert_shift_right( - &mut self, - lhs: ValueId, - rhs: ValueId, - bit_size: u32, - ) -> ValueId { - let base = self.field_constant(FieldElement::from(2_u128)); - // we can safely cast to unsigned because overflow_checks prevent bit-shift with a negative value - let rhs_unsigned = self.insert_cast(rhs, Type::unsigned(bit_size)); - let pow = self.pow(base, rhs_unsigned); - self.insert_binary(lhs, BinaryOp::Div, pow) - } - - /// Computes lhs^rhs via square&multiply, using the bits decomposition of rhs - /// Pseudo-code of the computation: - /// let mut r = 1; - /// let rhs_bits = to_bits(rhs); - /// for i in 1 .. bit_size + 1 { - /// let r_squared = r * r; - /// let b = rhs_bits[bit_size - i]; - /// r = (r_squared * lhs * b) + (1 - b) * r_squared; - /// } - pub(crate) fn pow(&mut self, lhs: ValueId, rhs: ValueId) -> ValueId { - let typ = self.current_function.dfg.type_of_value(rhs); - if let Type::Numeric(NumericType::Unsigned { bit_size }) = typ { - let to_bits = self.import_intrinsic_id(Intrinsic::ToBits(Endian::Little)); - let length = self.field_constant(FieldElement::from(bit_size as i128)); - let result_types = - vec![Type::field(), Type::Array(Rc::new(vec![Type::bool()]), bit_size as usize)]; - let rhs_bits = self.insert_call(to_bits, vec![rhs, length], result_types); - let rhs_bits = rhs_bits[1]; - let one = self.field_constant(FieldElement::one()); - let mut r = one; - for i in 1..bit_size + 1 { - let r_squared = self.insert_binary(r, BinaryOp::Mul, r); - let a = self.insert_binary(r_squared, BinaryOp::Mul, lhs); - let idx = self.field_constant(FieldElement::from((bit_size - i) as i128)); - let b = self.insert_array_get(rhs_bits, idx, Type::bool()); - let not_b = self.insert_not(b); - let b = self.insert_cast(b, Type::field()); - let not_b = self.insert_cast(not_b, Type::field()); - let r1 = self.insert_binary(a, BinaryOp::Mul, b); - let r2 = self.insert_binary(r_squared, BinaryOp::Mul, not_b); - r = self.insert_binary(r1, BinaryOp::Add, r2); - } - r - } else { - unreachable!("Value must be unsigned in power operation"); - } - } - /// Insert an instruction to extract an element from an array pub(crate) fn insert_array_get( &mut self, @@ -487,9 +384,9 @@ impl FunctionBuilder { } } Type::Array(..) | Type::Slice(..) => { - self.insert_instruction(Instruction::IncrementRc { value }, None); // If there are nested arrays or slices, we wait until ArrayGet // is issued to increment the count of that array. + self.insert_instruction(Instruction::IncrementRc { value }, None); } } } diff --git a/noir/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/noir/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index 331a02a6974..0b6c7074e45 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -36,7 +36,6 @@ pub(crate) type InstructionId = Id; /// of this is println. #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub(crate) enum Intrinsic { - Sort, ArrayLen, AssertConstant, SlicePushBack, @@ -57,7 +56,6 @@ pub(crate) enum Intrinsic { impl std::fmt::Display for Intrinsic { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Intrinsic::Sort => write!(f, "arraysort"), Intrinsic::ArrayLen => write!(f, "array_len"), Intrinsic::AssertConstant => write!(f, "assert_constant"), Intrinsic::SlicePushBack => write!(f, "slice_push_back"), @@ -90,8 +88,7 @@ impl Intrinsic { // These apply a constraint that the input must fit into a specified number of limbs. Intrinsic::ToBits(_) | Intrinsic::ToRadix(_) => true, - Intrinsic::Sort - | Intrinsic::ArrayLen + Intrinsic::ArrayLen | Intrinsic::SlicePushBack | Intrinsic::SlicePushFront | Intrinsic::SlicePopBack @@ -111,7 +108,6 @@ impl Intrinsic { /// If there is no such intrinsic by that name, None is returned. pub(crate) fn lookup(name: &str) -> Option { match name { - "arraysort" => Some(Intrinsic::Sort), "array_len" => Some(Intrinsic::ArrayLen), "assert_constant" => Some(Intrinsic::AssertConstant), "apply_range_constraint" => Some(Intrinsic::ApplyRangeConstraint), @@ -157,7 +153,7 @@ pub(crate) enum Instruction { Truncate { value: ValueId, bit_size: u32, max_bit_size: u32 }, /// Constrains two values to be equal to one another. - Constrain(ValueId, ValueId, Option), + Constrain(ValueId, ValueId, Option>), /// Range constrain `value` to `max_bit_size` RangeCheck { value: ValueId, max_bit_size: u32, assert_message: Option }, @@ -326,7 +322,17 @@ impl Instruction { max_bit_size: *max_bit_size, }, Instruction::Constrain(lhs, rhs, assert_message) => { - Instruction::Constrain(f(*lhs), f(*rhs), assert_message.clone()) + // Must map the `lhs` and `rhs` first as the value `f` is moved with the closure + let lhs = f(*lhs); + let rhs = f(*rhs); + let assert_message = assert_message.as_ref().map(|error| match error.as_ref() { + ConstrainError::Dynamic(call_instr) => { + let new_instr = call_instr.map_values(f); + Box::new(ConstrainError::Dynamic(new_instr)) + } + _ => error.clone(), + }); + Instruction::Constrain(lhs, rhs, assert_message) } Instruction::Call { func, arguments } => Instruction::Call { func: f(*func), @@ -376,9 +382,14 @@ impl Instruction { | Instruction::Load { address: value } => { f(*value); } - Instruction::Constrain(lhs, rhs, _) => { + Instruction::Constrain(lhs, rhs, assert_error) => { f(*lhs); f(*rhs); + if let Some(error) = assert_error.as_ref() { + if let ConstrainError::Dynamic(call_instr) = error.as_ref() { + call_instr.for_each_value(f); + } + } } Instruction::Store { address, value } => { @@ -425,9 +436,10 @@ impl Instruction { // Limit optimizing ! on constants to only booleans. If we tried it on fields, // there is no Not on FieldElement, so we'd need to convert between u128. This // would be incorrect however since the extra bits on the field would not be flipped. - Value::NumericConstant { constant, typ } if *typ == Type::bool() => { - let value = constant.is_zero() as u128; - SimplifiedTo(dfg.make_constant(value.into(), Type::bool())) + Value::NumericConstant { constant, typ } if typ.is_unsigned() => { + // As we're casting to a `u128`, we need to clear out any upper bits that the NOT fills. + let value = !constant.to_u128() % (1 << typ.bit_size()); + SimplifiedTo(dfg.make_constant(value.into(), typ.clone())) } Value::Instruction { instruction, .. } => { // !!v => v @@ -441,7 +453,7 @@ impl Instruction { } } Instruction::Constrain(lhs, rhs, msg) => { - let constraints = decompose_constrain(*lhs, *rhs, msg.clone(), dfg); + let constraints = decompose_constrain(*lhs, *rhs, msg, dfg); if constraints.is_empty() { Remove } else { @@ -475,6 +487,9 @@ impl Instruction { None } Instruction::Truncate { value, bit_size, max_bit_size } => { + if bit_size == max_bit_size { + return SimplifiedTo(*value); + } if let Some((numeric_constant, typ)) = dfg.get_numeric_constant_with_type(*value) { let integer_modulus = 2_u128.pow(*bit_size); let truncated = numeric_constant.to_u128() % integer_modulus; @@ -551,6 +566,28 @@ impl Instruction { } } +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +pub(crate) enum ConstrainError { + // These are errors which have been hardcoded during SSA gen + Static(String), + // These are errors which come from runtime expressions specified by a Noir program + // We store an `Instruction` as we want this Instruction to be atomic in SSA with + // a constrain instruction, and leave codegen of this instruction to lower level passes. + Dynamic(Instruction), +} + +impl From for ConstrainError { + fn from(value: String) -> Self { + ConstrainError::Static(value) + } +} + +impl From for Box { + fn from(value: String) -> Self { + Box::new(value.into()) + } +} + /// The possible return values for Instruction::return_types pub(crate) enum InstructionResultType { /// The result type of this instruction matches that of this operand diff --git a/noir/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs b/noir/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs index 1cb32d94148..552be9420d9 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs @@ -38,6 +38,10 @@ pub(crate) enum BinaryOp { Or, /// Bitwise xor (^) Xor, + /// Bitshift left (<<) + Shl, + /// Bitshift right (>>) + Shr, } impl std::fmt::Display for BinaryOp { @@ -53,6 +57,8 @@ impl std::fmt::Display for BinaryOp { BinaryOp::And => write!(f, "and"), BinaryOp::Or => write!(f, "or"), BinaryOp::Xor => write!(f, "xor"), + BinaryOp::Shl => write!(f, "shl"), + BinaryOp::Shr => write!(f, "shr"), } } } @@ -215,7 +221,27 @@ impl Binary { return SimplifyResult::SimplifiedTo(zero); } } - } + BinaryOp::Shl => return SimplifyResult::None, + BinaryOp::Shr => { + // Bit shifts by constants can be treated as divisions. + if let Some(rhs_const) = rhs { + if rhs_const >= FieldElement::from(operand_type.bit_size() as u128) { + // Shifting by the full width of the operand type, any `lhs` goes to zero. + let zero = dfg.make_constant(FieldElement::zero(), operand_type); + return SimplifyResult::SimplifiedTo(zero); + } + + // `two_pow_rhs` is limited to be at most `2 ^ {operand_bitsize - 1}` so it fits in `operand_type`. + let two_pow_rhs = FieldElement::from(2u128).pow(&rhs_const); + let two_pow_rhs = dfg.make_constant(two_pow_rhs, operand_type); + return SimplifyResult::SimplifiedToInstruction(Instruction::binary( + BinaryOp::Div, + self.lhs, + two_pow_rhs, + )); + } + } + }; SimplifyResult::None } } @@ -314,6 +340,8 @@ impl BinaryOp { BinaryOp::And => None, BinaryOp::Or => None, BinaryOp::Xor => None, + BinaryOp::Shl => None, + BinaryOp::Shr => None, } } @@ -329,6 +357,8 @@ impl BinaryOp { BinaryOp::Xor => |x, y| Some(x ^ y), BinaryOp::Eq => |x, y| Some((x == y) as u128), BinaryOp::Lt => |x, y| Some((x < y) as u128), + BinaryOp::Shl => |x, y| Some(x << y), + BinaryOp::Shr => |x, y| Some(x >> y), } } @@ -344,6 +374,8 @@ impl BinaryOp { BinaryOp::Xor => |x, y| Some(x ^ y), BinaryOp::Eq => |x, y| Some((x == y) as i128), BinaryOp::Lt => |x, y| Some((x < y) as i128), + BinaryOp::Shl => |x, y| Some(x << y), + BinaryOp::Shr => |x, y| Some(x >> y), } } } diff --git a/noir/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/noir/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 64f81e05f77..4217a3d4710 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -1,3 +1,4 @@ +use fxhash::FxHashMap as HashMap; use std::{collections::VecDeque, rc::Rc}; use acvm::{acir::BlackBoxFunc, BlackBoxResolutionError, FieldElement}; @@ -238,7 +239,6 @@ pub(super) fn simplify_call( } } Intrinsic::BlackBox(bb_func) => simplify_black_box_func(bb_func, arguments, dfg), - Intrinsic::Sort => simplify_sort(dfg, arguments), Intrinsic::AsField => { let instruction = Instruction::Cast( arguments[0], @@ -319,6 +319,8 @@ fn simplify_slice_push_back( for elem in &arguments[2..] { slice.push_back(*elem); } + let slice_size = slice.len(); + let element_size = element_type.element_size(); let new_slice = dfg.make_array(slice, element_type); let set_last_slice_value_instr = @@ -327,7 +329,11 @@ fn simplify_slice_push_back( .insert_instruction_and_results(set_last_slice_value_instr, block, None, call_stack) .first(); - let mut value_merger = ValueMerger::new(dfg, block, None, None); + let mut slice_sizes = HashMap::default(); + slice_sizes.insert(set_last_slice_value, slice_size / element_size); + slice_sizes.insert(new_slice, slice_size / element_size); + + let mut value_merger = ValueMerger::new(dfg, block, &mut slice_sizes); let new_slice = value_merger.merge_values( len_not_equals_capacity, len_equals_capacity, @@ -584,20 +590,3 @@ fn simplify_signature( _ => SimplifyResult::None, } } - -fn simplify_sort(dfg: &mut DataFlowGraph, arguments: &[ValueId]) -> SimplifyResult { - match dfg.get_array_constant(arguments[0]) { - Some((input, _)) => { - let inputs: Option> = - input.iter().map(|id| dfg.get_numeric_constant(*id)).collect(); - - let Some(mut sorted_inputs) = inputs else { return SimplifyResult::None }; - sorted_inputs.sort_unstable(); - - let (_, element_type) = dfg.get_numeric_constant_with_type(input[0]).unwrap(); - let result_array = make_constant_array(dfg, sorted_inputs, element_type); - SimplifyResult::SimplifiedTo(result_array) - } - _ => SimplifyResult::None, - } -} diff --git a/noir/compiler/noirc_evaluator/src/ssa/ir/instruction/constrain.rs b/noir/compiler/noirc_evaluator/src/ssa/ir/instruction/constrain.rs index 7fb0970c834..b4198e2cfec 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ir/instruction/constrain.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ir/instruction/constrain.rs @@ -1,13 +1,13 @@ use acvm::FieldElement; -use super::{Binary, BinaryOp, DataFlowGraph, Instruction, Type, Value, ValueId}; +use super::{Binary, BinaryOp, ConstrainError, DataFlowGraph, Instruction, Type, Value, ValueId}; /// Try to decompose this constrain instruction. This constraint will be broken down such that it instead constrains /// all the values which are used to compute the values which were being constrained. pub(super) fn decompose_constrain( lhs: ValueId, rhs: ValueId, - msg: Option, + msg: &Option>, dfg: &mut DataFlowGraph, ) -> Vec { let lhs = dfg.resolve(lhs); @@ -39,7 +39,7 @@ pub(super) fn decompose_constrain( // Note that this doesn't remove the value `v2` as it may be used in other instructions, but it // will likely be removed through dead instruction elimination. - vec![Instruction::Constrain(lhs, rhs, msg)] + vec![Instruction::Constrain(lhs, rhs, msg.clone())] } Instruction::Binary(Binary { lhs, rhs, operator: BinaryOp::Mul }) @@ -64,7 +64,7 @@ pub(super) fn decompose_constrain( let one = dfg.make_constant(one, Type::bool()); [ - decompose_constrain(lhs, one, msg.clone(), dfg), + decompose_constrain(lhs, one, msg, dfg), decompose_constrain(rhs, one, msg, dfg), ] .concat() @@ -92,7 +92,7 @@ pub(super) fn decompose_constrain( let zero = dfg.make_constant(zero, dfg.type_of_value(lhs)); [ - decompose_constrain(lhs, zero, msg.clone(), dfg), + decompose_constrain(lhs, zero, msg, dfg), decompose_constrain(rhs, zero, msg, dfg), ] .concat() @@ -116,11 +116,28 @@ pub(super) fn decompose_constrain( decompose_constrain(value, reversed_constant, msg, dfg) } - _ => vec![Instruction::Constrain(lhs, rhs, msg)], + _ => vec![Instruction::Constrain(lhs, rhs, msg.clone())], } } - _ => vec![Instruction::Constrain(lhs, rhs, msg)], + ( + Value::Instruction { instruction: instruction_lhs, .. }, + Value::Instruction { instruction: instruction_rhs, .. }, + ) => { + match (&dfg[*instruction_lhs], &dfg[*instruction_rhs]) { + // Casting two values just to enforce an equality on them. + // + // This is equivalent to enforcing equality on the original values. + (Instruction::Cast(original_lhs, _), Instruction::Cast(original_rhs, _)) + if dfg.type_of_value(*original_lhs) == dfg.type_of_value(*original_rhs) => + { + vec![Instruction::Constrain(*original_lhs, *original_rhs, msg.clone())] + } + + _ => vec![Instruction::Constrain(lhs, rhs, msg.clone())], + } + } + _ => vec![Instruction::Constrain(lhs, rhs, msg.clone())], } } } diff --git a/noir/compiler/noirc_evaluator/src/ssa/ir/printer.rs b/noir/compiler/noirc_evaluator/src/ssa/ir/printer.rs index 2899b987c1d..9bd43fab1ff 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ir/printer.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ir/printer.rs @@ -9,7 +9,7 @@ use iter_extended::vecmap; use super::{ basic_block::BasicBlockId, function::Function, - instruction::{Instruction, InstructionId, TerminatorInstruction}, + instruction::{ConstrainError, Instruction, InstructionId, TerminatorInstruction}, value::ValueId, }; @@ -133,9 +133,17 @@ pub(crate) fn display_instruction( write!(f, "{} = ", value_list(function, results))?; } + display_instruction_inner(function, &function.dfg[instruction], f) +} + +fn display_instruction_inner( + function: &Function, + instruction: &Instruction, + f: &mut Formatter, +) -> Result { let show = |id| value(function, id); - match &function.dfg[instruction] { + match instruction { Instruction::Binary(binary) => { writeln!(f, "{} {}, {}", binary.operator, show(binary.lhs), show(binary.rhs)) } @@ -145,10 +153,15 @@ pub(crate) fn display_instruction( let value = show(*value); writeln!(f, "truncate {value} to {bit_size} bits, max_bit_size: {max_bit_size}",) } - Instruction::Constrain(lhs, rhs, message) => match message { - Some(message) => writeln!(f, "constrain {} == {} '{message}'", show(*lhs), show(*rhs)), - None => writeln!(f, "constrain {} == {}", show(*lhs), show(*rhs)), - }, + Instruction::Constrain(lhs, rhs, error) => { + write!(f, "constrain {} == {}", show(*lhs), show(*rhs))?; + if let Some(error) = error { + write!(f, " ")?; + display_constrain_error(function, error, f) + } else { + writeln!(f) + } + } Instruction::Call { func, arguments } => { writeln!(f, "call {}({})", show(*func), value_list(function, arguments)) } @@ -180,3 +193,18 @@ pub(crate) fn display_instruction( } } } + +fn display_constrain_error( + function: &Function, + error: &ConstrainError, + f: &mut Formatter, +) -> Result { + match error { + ConstrainError::Static(assert_message_string) => { + writeln!(f, "{assert_message_string:?}") + } + ConstrainError::Dynamic(assert_message_call) => { + display_instruction_inner(function, assert_message_call, f) + } + } +} diff --git a/noir/compiler/noirc_evaluator/src/ssa/ir/types.rs b/noir/compiler/noirc_evaluator/src/ssa/ir/types.rs index f412def1e76..8dc9e67db79 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -120,7 +120,7 @@ impl Type { } Type::Slice(_) => true, Type::Numeric(_) => false, - Type::Reference(_) => false, + Type::Reference(element) => element.contains_slice_element(), Type::Function => false, } } diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/bubble_up_constrains.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/bubble_up_constrains.rs index 8a903cbd87b..b737c51d145 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/bubble_up_constrains.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/bubble_up_constrains.rs @@ -109,11 +109,11 @@ mod test { let v1 = builder.insert_binary(v0, BinaryOp::Add, one); let v2 = builder.insert_binary(v1, BinaryOp::Add, one); - builder.insert_constrain(v0, one, Some("With message".to_string())); + builder.insert_constrain(v0, one, Some("With message".to_string().into())); builder.insert_constrain(v2, three, None); builder.insert_constrain(v0, one, None); builder.insert_constrain(v1, two, None); - builder.insert_constrain(v1, two, Some("With message".to_string())); + builder.insert_constrain(v1, two, Some("With message".to_string().into())); builder.terminate_with_return(vec![]); let ssa = builder.finish(); @@ -137,11 +137,11 @@ mod test { assert_eq!(block.instructions().len(), 7); let expected_instructions = vec![ - Instruction::Constrain(v0, one, Some("With message".to_string())), + Instruction::Constrain(v0, one, Some("With message".to_string().into())), Instruction::Constrain(v0, one, None), Instruction::Binary(Binary { lhs: v0, rhs: one, operator: BinaryOp::Add }), Instruction::Constrain(v1, two, None), - Instruction::Constrain(v1, two, Some("With message".to_string())), + Instruction::Constrain(v1, two, Some("With message".to_string().into())), Instruction::Binary(Binary { lhs: v1, rhs: one, operator: BinaryOp::Add }), Instruction::Constrain(v2, three, None), ]; diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index addaee3ba8d..06ae4bf5202 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -22,6 +22,7 @@ //! different blocks are merged, i.e. after the [`flatten_cfg`][super::flatten_cfg] pass. use std::collections::HashSet; +use acvm::FieldElement; use iter_extended::vecmap; use crate::ssa::{ @@ -30,7 +31,8 @@ use crate::ssa::{ dfg::{DataFlowGraph, InsertInstructionResult}, function::Function, instruction::{Instruction, InstructionId}, - value::ValueId, + types::Type, + value::{Value, ValueId}, }, ssa_gen::Ssa, }; @@ -43,7 +45,20 @@ impl Ssa { #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn fold_constants(mut self) -> Ssa { for function in self.functions.values_mut() { - constant_fold(function); + constant_fold(function, false); + } + self + } + + /// Performs constant folding on each instruction. + /// + /// Also uses constraint information to inform more optimizations. + /// + /// See [`constant_folding`][self] module for more information. + #[tracing::instrument(level = "trace", skip(self))] + pub(crate) fn fold_constants_using_constraints(mut self) -> Ssa { + for function in self.functions.values_mut() { + constant_fold(function, true); } self } @@ -51,8 +66,8 @@ impl Ssa { /// The structure of this pass is simple: /// Go through each block and re-insert all instructions. -fn constant_fold(function: &mut Function) { - let mut context = Context::default(); +fn constant_fold(function: &mut Function, use_constraint_info: bool) { + let mut context = Context { use_constraint_info, ..Default::default() }; context.block_queue.push(function.entry_block()); while let Some(block) = context.block_queue.pop() { @@ -67,6 +82,7 @@ fn constant_fold(function: &mut Function) { #[derive(Default)] struct Context { + use_constraint_info: bool, /// Maps pre-folded ValueIds to the new ValueIds obtained by re-inserting the instruction. visited_blocks: HashSet, block_queue: Vec, @@ -79,24 +95,43 @@ impl Context { // Cache of instructions without any side-effects along with their outputs. let mut cached_instruction_results: HashMap> = HashMap::default(); + // Contains sets of values which are constrained to be equivalent to each other. + // + // The mapping's structure is `side_effects_enabled_var => (constrained_value => simplified_value)`. + // + // We partition the maps of constrained values according to the side-effects flag at the point + // at which the values are constrained. This prevents constraints which are only sometimes enforced + // being used to modify the rest of the program. + let mut constraint_simplification_mappings: HashMap> = + HashMap::default(); + let mut side_effects_enabled_var = + function.dfg.make_constant(FieldElement::one(), Type::bool()); + for instruction_id in instructions { - Self::fold_constants_into_instruction( + self.fold_constants_into_instruction( &mut function.dfg, block, instruction_id, &mut cached_instruction_results, + &mut constraint_simplification_mappings, + &mut side_effects_enabled_var, ); } self.block_queue.extend(function.dfg[block].successors()); } fn fold_constants_into_instruction( + &self, dfg: &mut DataFlowGraph, block: BasicBlockId, id: InstructionId, instruction_result_cache: &mut HashMap>, + constraint_simplification_mappings: &mut HashMap>, + side_effects_enabled_var: &mut ValueId, ) { - let instruction = Self::resolve_instruction(id, dfg); + let constraint_simplification_mapping = + constraint_simplification_mappings.entry(*side_effects_enabled_var).or_default(); + let instruction = Self::resolve_instruction(id, dfg, constraint_simplification_mapping); let old_results = dfg.instruction_results(id).to_vec(); // If a copy of this instruction exists earlier in the block, then reuse the previous results. @@ -110,15 +145,49 @@ impl Context { Self::replace_result_ids(dfg, &old_results, &new_results); - Self::cache_instruction(instruction, new_results, dfg, instruction_result_cache); + self.cache_instruction( + instruction.clone(), + new_results, + dfg, + instruction_result_cache, + constraint_simplification_mapping, + ); + + // If we just inserted an `Instruction::EnableSideEffects`, we need to update `side_effects_enabled_var` + // so that we use the correct set of constrained values in future. + if let Instruction::EnableSideEffects { condition } = instruction { + *side_effects_enabled_var = condition; + }; } /// Fetches an [`Instruction`] by its [`InstructionId`] and fully resolves its inputs. - fn resolve_instruction(instruction_id: InstructionId, dfg: &DataFlowGraph) -> Instruction { + fn resolve_instruction( + instruction_id: InstructionId, + dfg: &DataFlowGraph, + constraint_simplification_mapping: &HashMap, + ) -> Instruction { let instruction = dfg[instruction_id].clone(); + // Alternate between resolving `value_id` in the `dfg` and checking to see if the resolved value + // has been constrained to be equal to some simpler value in the current block. + // + // This allows us to reach a stable final `ValueId` for each instruction input as we add more + // constraints to the cache. + fn resolve_cache( + dfg: &DataFlowGraph, + cache: &HashMap, + value_id: ValueId, + ) -> ValueId { + let resolved_id = dfg.resolve(value_id); + match cache.get(&resolved_id) { + Some(cached_value) => resolve_cache(dfg, cache, *cached_value), + None => resolved_id, + } + } + // Resolve any inputs to ensure that we're comparing like-for-like instructions. - instruction.map_values(|value_id| dfg.resolve(value_id)) + instruction + .map_values(|value_id| resolve_cache(dfg, constraint_simplification_mapping, value_id)) } /// Pushes a new [`Instruction`] into the [`DataFlowGraph`] which applies any optimizations @@ -152,11 +221,42 @@ impl Context { } fn cache_instruction( + &self, instruction: Instruction, instruction_results: Vec, dfg: &DataFlowGraph, instruction_result_cache: &mut HashMap>, + constraint_simplification_mapping: &mut HashMap, ) { + if self.use_constraint_info { + // If the instruction was a constraint, then create a link between the two `ValueId`s + // to map from the more complex to the simpler value. + if let Instruction::Constrain(lhs, rhs, _) = instruction { + // These `ValueId`s should be fully resolved now. + match (&dfg[lhs], &dfg[rhs]) { + // Ignore trivial constraints + (Value::NumericConstant { .. }, Value::NumericConstant { .. }) => (), + + // Prefer replacing with constants where possible. + (Value::NumericConstant { .. }, _) => { + constraint_simplification_mapping.insert(rhs, lhs); + } + (_, Value::NumericConstant { .. }) => { + constraint_simplification_mapping.insert(lhs, rhs); + } + // Otherwise prefer block parameters over instruction results. + // This is as block parameters are more likely to be a single witness rather than a full expression. + (Value::Param { .. }, Value::Instruction { .. }) => { + constraint_simplification_mapping.insert(rhs, lhs); + } + (Value::Instruction { .. }, Value::Param { .. }) => { + constraint_simplification_mapping.insert(lhs, rhs); + } + (_, _) => (), + } + } + } + // If the instruction doesn't have side-effects, cache the results so we can reuse them if // the same instruction appears again later in the block. if instruction.is_pure(dfg) { @@ -336,9 +436,9 @@ mod test { // // fn main f0 { // b0(v0: u16, Field 255: Field): - // v5 = div v0, Field 255 - // v6 = truncate v5 to 8 bits, max_bit_size: 16 - // return v6 + // v6 = div v0, Field 255 + // v7 = truncate v6 to 8 bits, max_bit_size: 16 + // return v7 // } main.dfg.set_value_from_id(v1, constant); @@ -354,7 +454,7 @@ mod test { ); assert_eq!( &main.dfg[instructions[1]], - &Instruction::Truncate { value: ValueId::test_new(5), bit_size: 8, max_bit_size: 16 } + &Instruction::Truncate { value: ValueId::test_new(6), bit_size: 8, max_bit_size: 16 } ); } diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs index b7f154397a6..1f09a132132 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs @@ -14,7 +14,7 @@ use crate::ssa::{ ir::{ basic_block::BasicBlockId, function::{Function, FunctionId, RuntimeType, Signature}, - instruction::{BinaryOp, Instruction}, + instruction::{BinaryOp, ConstrainError, Instruction}, types::{NumericType, Type}, value::{Value, ValueId}, }, @@ -86,9 +86,22 @@ impl DefunctionalizationContext { let instruction = func.dfg[instruction_id].clone(); let mut replacement_instruction = None; // Operate on call instructions - let (target_func_id, mut arguments) = match instruction { + let (target_func_id, arguments) = match &instruction { Instruction::Call { func: target_func_id, arguments } => { - (target_func_id, arguments) + (*target_func_id, arguments) + } + // Constrain instruction potentially hold a call instruction themselves + // thus we need to account for them. + Instruction::Constrain(_, _, Some(constrain_error)) => { + if let ConstrainError::Dynamic(Instruction::Call { + func: target_func_id, + arguments, + }) = constrain_error.as_ref() + { + (*target_func_id, arguments) + } else { + continue; + } } _ => continue, }; @@ -96,6 +109,7 @@ impl DefunctionalizationContext { match func.dfg[target_func_id] { // If the target is a function used as value Value::Param { .. } | Value::Instruction { .. } => { + let mut arguments = arguments.clone(); let results = func.dfg.instruction_results(instruction_id); let signature = Signature { params: vecmap(&arguments, |param| func.dfg.type_of_value(*param)), @@ -120,7 +134,20 @@ impl DefunctionalizationContext { } _ => {} } - if let Some(new_instruction) = replacement_instruction { + if let Some(mut new_instruction) = replacement_instruction { + if let Instruction::Constrain(lhs, rhs, constrain_error_call) = instruction { + let new_error_call = if let Some(error) = constrain_error_call { + match error.as_ref() { + ConstrainError::Dynamic(_) => { + Some(Box::new(ConstrainError::Dynamic(new_instruction))) + } + _ => None, + } + } else { + None + }; + new_instruction = Instruction::Constrain(lhs, rhs, new_error_call); + } func.dfg[instruction_id] = new_instruction; } } diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 1059994b9be..943a57c1bc0 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -152,8 +152,10 @@ use crate::ssa::{ }; mod branch_analysis; +mod capacity_tracker; pub(crate) mod value_merger; +use capacity_tracker::SliceCapacityTracker; use value_merger::ValueMerger; impl Ssa { @@ -184,17 +186,6 @@ struct Context<'f> { /// between inlining of branches. store_values: HashMap, - /// Maps an address to the old and new value of the element at that address - /// The difference between this map and store_values is that this stores - /// the old and new value of an element from the outer block whose jmpif - /// terminator is being flattened. - /// - /// This map persists throughout the flattening process, where addresses - /// are overwritten as new stores are found. This overwriting is the desired behavior, - /// as we want the most update to date value to be stored at a given address as - /// we walk through blocks to flatten. - outer_block_stores: HashMap, - /// Stores all allocations local to the current branch. /// Since these branches are local to the current branch (ie. only defined within one branch of /// an if expression), they should not be merged with their previous value or stored value in @@ -209,6 +200,10 @@ struct Context<'f> { /// condition. If we are under multiple conditions (a nested if), the topmost condition is /// the most recent condition combined with all previous conditions via `And` instructions. conditions: Vec<(BasicBlockId, ValueId)>, + + /// Maps SSA array values with a slice type to their size. + /// This is maintained by appropriate calls to the `SliceCapacityTracker` and is used by the `ValueMerger`. + slice_sizes: HashMap, } pub(crate) struct Store { @@ -239,7 +234,7 @@ fn flatten_function_cfg(function: &mut Function) { local_allocations: HashSet::new(), branch_ends, conditions: Vec::new(), - outer_block_stores: HashMap::default(), + slice_sizes: HashMap::default(), }; context.flatten(); } @@ -262,21 +257,18 @@ impl<'f> Context<'f> { /// Returns the last block to be inlined. This is either the return block of the function or, /// if self.conditions is not empty, the end block of the most recent condition. fn handle_terminator(&mut self, block: BasicBlockId) -> BasicBlockId { - if let TerminatorInstruction::JmpIf { .. } = - self.inserter.function.dfg[block].unwrap_terminator() - { - // Find stores in the outer block and insert into the `outer_block_stores` map. - // Not using this map can lead to issues when attempting to merge slices. - // When inlining a branch end, only the then branch and the else branch are checked for stores. - // However, there are cases where we want to load a value that comes from the outer block - // that we are handling the terminator for here. - let instructions = self.inserter.function.dfg[block].instructions().to_vec(); - for instruction in instructions { - let (instruction, _) = self.inserter.map_instruction(instruction); - if let Instruction::Store { address, value } = instruction { - self.outer_block_stores.insert(address, value); - } - } + // As we recursively flatten inner blocks, we need to track the slice information + // for the outer block before we start recursively inlining + let outer_block_instructions = self.inserter.function.dfg[block].instructions(); + let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); + for instruction in outer_block_instructions { + let results = self.inserter.function.dfg.instruction_results(*instruction); + let instruction = &self.inserter.function.dfg[*instruction]; + capacity_tracker.collect_slice_information( + instruction, + &mut self.slice_sizes, + results.to_vec(), + ); } match self.inserter.function.dfg[block].unwrap_terminator() { @@ -494,12 +486,16 @@ impl<'f> Context<'f> { }); let block = self.inserter.function.entry_block(); - let mut value_merger = ValueMerger::new( - &mut self.inserter.function.dfg, - block, - Some(&self.store_values), - Some(&self.outer_block_stores), - ); + + // Make sure we have tracked the slice capacities of any block arguments + let capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); + for (then_arg, else_arg) in args.iter() { + capacity_tracker.compute_slice_capacity(*then_arg, &mut self.slice_sizes); + capacity_tracker.compute_slice_capacity(*else_arg, &mut self.slice_sizes); + } + + let mut value_merger = + ValueMerger::new(&mut self.inserter.function.dfg, block, &mut self.slice_sizes); // Cannot include this in the previous vecmap since it requires exclusive access to self let args = vecmap(args, |(then_arg, else_arg)| { @@ -538,18 +534,22 @@ impl<'f> Context<'f> { } } + // Most slice information is collected when instructions are inlined. + // We need to collect information on slice values here as we may possibly merge stores + // before any inlining occurs. + let capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); + for (then_case, else_case, _) in new_map.values() { + capacity_tracker.compute_slice_capacity(*then_case, &mut self.slice_sizes); + capacity_tracker.compute_slice_capacity(*else_case, &mut self.slice_sizes); + } + let then_condition = then_branch.condition; let else_condition = else_branch.condition; let block = self.inserter.function.entry_block(); - let mut value_merger = ValueMerger::new( - &mut self.inserter.function.dfg, - block, - Some(&self.store_values), - Some(&self.outer_block_stores), - ); - + let mut value_merger = + ValueMerger::new(&mut self.inserter.function.dfg, block, &mut self.slice_sizes); // Merging must occur in a separate loop as we cannot borrow `self` as mutable while `value_merger` does let mut new_values = HashMap::default(); for (address, (then_case, else_case, _)) in &new_map { @@ -571,6 +571,16 @@ impl<'f> Context<'f> { .insert(address, Store { old_value: *old_value, new_value: value }); } } + + // Collect any potential slice information on the stores we are merging + for (address, (_, _, _)) in &new_map { + let value = new_values[address]; + let address = *address; + let instruction = Instruction::Store { address, value }; + + let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); + capacity_tracker.collect_slice_information(&instruction, &mut self.slice_sizes, vec![]); + } } fn remember_store(&mut self, address: ValueId, new_value: ValueId) { @@ -579,8 +589,18 @@ impl<'f> Context<'f> { store_value.new_value = new_value; } else { let load = Instruction::Load { address }; + let load_type = Some(vec![self.inserter.function.dfg.type_of_value(new_value)]); - let old_value = self.insert_instruction_with_typevars(load, load_type).first(); + let old_value = + self.insert_instruction_with_typevars(load.clone(), load_type).first(); + + // Need this or else we will be missing a the previous value of a slice that we wish to merge + let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); + capacity_tracker.collect_slice_information( + &load, + &mut self.slice_sizes, + vec![old_value], + ); self.store_values.insert(address, Store { old_value, new_value }); } @@ -602,8 +622,15 @@ impl<'f> Context<'f> { // unnecessary, when removing it actually causes an aliasing/mutability error. let instructions = self.inserter.function.dfg[destination].instructions().to_vec(); - for instruction in instructions { - self.push_instruction(instruction); + for instruction in instructions.iter() { + let results = self.push_instruction(*instruction); + let (instruction, _) = self.inserter.map_instruction(*instruction); + let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); + capacity_tracker.collect_slice_information( + &instruction, + &mut self.slice_sizes, + results, + ); } self.handle_terminator(destination) @@ -615,7 +642,7 @@ impl<'f> Context<'f> { /// As a result, the instruction that will be pushed will actually be a new instruction /// with a different InstructionId from the original. The results of the given instruction /// will also be mapped to the results of the new instruction. - fn push_instruction(&mut self, id: InstructionId) { + fn push_instruction(&mut self, id: InstructionId) -> Vec { let (instruction, call_stack) = self.inserter.map_instruction(id); let instruction = self.handle_instruction_side_effects(instruction, call_stack.clone()); let is_allocate = matches!(instruction, Instruction::Allocate); @@ -628,6 +655,8 @@ impl<'f> Context<'f> { if is_allocate { self.local_allocations.insert(results.first()); } + + results.results().into_owned() } /// If we are currently in a branch, we need to modify constrain instructions diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs new file mode 100644 index 00000000000..7cd0fe3084e --- /dev/null +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs @@ -0,0 +1,150 @@ +use crate::ssa::ir::{ + dfg::DataFlowGraph, + instruction::{Instruction, Intrinsic}, + types::Type, + value::{Value, ValueId}, +}; + +use fxhash::FxHashMap as HashMap; + +pub(crate) struct SliceCapacityTracker<'a> { + dfg: &'a DataFlowGraph, +} + +impl<'a> SliceCapacityTracker<'a> { + pub(crate) fn new(dfg: &'a DataFlowGraph) -> Self { + SliceCapacityTracker { dfg } + } + + /// Determine how the slice sizes map needs to be updated according to the provided instruction. + pub(crate) fn collect_slice_information( + &mut self, + instruction: &Instruction, + slice_sizes: &mut HashMap, + results: Vec, + ) { + match instruction { + Instruction::ArrayGet { array, .. } => { + let array_typ = self.dfg.type_of_value(*array); + let array_value = &self.dfg[*array]; + if matches!(array_value, Value::Array { .. }) && array_typ.contains_slice_element() + { + // Initial insertion into the slice sizes map + // Any other insertions should only occur if the value is already + // a part of the map. + self.compute_slice_capacity(*array, slice_sizes); + } + } + Instruction::ArraySet { array, value, .. } => { + let array_typ = self.dfg.type_of_value(*array); + let array_value = &self.dfg[*array]; + if matches!(array_value, Value::Array { .. }) && array_typ.contains_slice_element() + { + // Initial insertion into the slice sizes map + // Any other insertions should only occur if the value is already + // a part of the map. + self.compute_slice_capacity(*array, slice_sizes); + } + + let value_typ = self.dfg.type_of_value(*value); + // Compiler sanity check + assert!(!value_typ.contains_slice_element(), "ICE: Nested slices are not allowed and should not have reached the flattening pass of SSA"); + + if let Some(capacity) = slice_sizes.get(array) { + slice_sizes.insert(results[0], *capacity); + } + } + Instruction::Call { func, arguments } => { + let func = &self.dfg[*func]; + if let Value::Intrinsic(intrinsic) = func { + let (argument_index, result_index) = match intrinsic { + Intrinsic::SlicePushBack + | Intrinsic::SlicePushFront + | Intrinsic::SlicePopBack + | Intrinsic::SliceInsert + | Intrinsic::SliceRemove => (1, 1), + // `pop_front` returns the popped element, and then the respective slice. + // This means in the case of a slice with structs, the result index of the popped slice + // will change depending on the number of elements in the struct. + // For example, a slice with four elements will look as such in SSA: + // v3, v4, v5, v6, v7, v8 = call slice_pop_front(v1, v2) + // where v7 is the slice length and v8 is the popped slice itself. + Intrinsic::SlicePopFront => (1, results.len() - 1), + _ => return, + }; + let slice_contents = arguments[argument_index]; + match intrinsic { + Intrinsic::SlicePushBack + | Intrinsic::SlicePushFront + | Intrinsic::SliceInsert => { + for arg in &arguments[(argument_index + 1)..] { + let element_typ = self.dfg.type_of_value(*arg); + if element_typ.contains_slice_element() { + self.compute_slice_capacity(*arg, slice_sizes); + } + } + if let Some(contents_capacity) = slice_sizes.get(&slice_contents) { + let new_capacity = *contents_capacity + 1; + slice_sizes.insert(results[result_index], new_capacity); + } + } + Intrinsic::SlicePopBack + | Intrinsic::SliceRemove + | Intrinsic::SlicePopFront => { + // We do not decrement the size on intrinsics that could remove values from a slice. + // This is because we could potentially go back to the smaller slice and not fill in dummies. + // This pass should be tracking the potential max that a slice ***could be*** + if let Some(contents_capacity) = slice_sizes.get(&slice_contents) { + let new_capacity = *contents_capacity - 1; + slice_sizes.insert(results[result_index], new_capacity); + } + } + _ => {} + } + } + } + Instruction::Store { address, value } => { + let value_typ = self.dfg.type_of_value(*value); + if value_typ.contains_slice_element() { + self.compute_slice_capacity(*value, slice_sizes); + + let value_capacity = slice_sizes.get(value).unwrap_or_else(|| { + panic!("ICE: should have slice capacity set for value {value} being stored at {address}") + }); + + slice_sizes.insert(*address, *value_capacity); + } + } + Instruction::Load { address } => { + let load_typ = self.dfg.type_of_value(*address); + if load_typ.contains_slice_element() { + let result = results[0]; + + let address_capacity = slice_sizes.get(address).unwrap_or_else(|| { + panic!("ICE: should have slice capacity set at address {address} being loaded into {result}") + }); + + slice_sizes.insert(result, *address_capacity); + } + } + _ => {} + } + } + + /// Computes the starting capacity of a slice which is still a `Value::Array` + pub(crate) fn compute_slice_capacity( + &self, + array_id: ValueId, + slice_sizes: &mut HashMap, + ) { + if let Value::Array { array, typ } = &self.dfg[array_id] { + // Compiler sanity check + assert!(!typ.is_nested_slice(), "ICE: Nested slices are not allowed and should not have reached the flattening pass of SSA"); + if let Type::Slice(_) = typ { + let element_size = typ.element_size(); + let len = array.len() / element_size; + slice_sizes.insert(array_id, len); + } + } + } +} diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs index 446560f45f1..6b923a2e42d 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs @@ -4,35 +4,26 @@ use fxhash::FxHashMap as HashMap; use crate::ssa::ir::{ basic_block::BasicBlockId, dfg::{CallStack, DataFlowGraph}, - instruction::{BinaryOp, Instruction, Intrinsic}, + instruction::{BinaryOp, Instruction}, types::Type, - value::{Value, ValueId}, + value::ValueId, }; -use crate::ssa::opt::flatten_cfg::Store; - pub(crate) struct ValueMerger<'a> { dfg: &'a mut DataFlowGraph, block: BasicBlockId, - store_values: Option<&'a HashMap>, - outer_block_stores: Option<&'a HashMap>, - slice_sizes: HashMap, + // Maps SSA array values with a slice type to their size. + // This must be computed before merging values. + slice_sizes: &'a mut HashMap, } impl<'a> ValueMerger<'a> { pub(crate) fn new( dfg: &'a mut DataFlowGraph, block: BasicBlockId, - store_values: Option<&'a HashMap>, - outer_block_stores: Option<&'a HashMap>, + slice_sizes: &'a mut HashMap, ) -> Self { - ValueMerger { - dfg, - block, - store_values, - outer_block_stores, - slice_sizes: HashMap::default(), - } + ValueMerger { dfg, block, slice_sizes } } /// Merge two values a and b from separate basic blocks to a single value. @@ -184,11 +175,13 @@ impl<'a> ValueMerger<'a> { _ => panic!("Expected slice type"), }; - let then_len = self.get_slice_length(then_value_id); - self.slice_sizes.insert(then_value_id, then_len); + let then_len = *self.slice_sizes.get(&then_value_id).unwrap_or_else(|| { + panic!("ICE: Merging values during flattening encountered slice {then_value_id} without a preset size"); + }); - let else_len = self.get_slice_length(else_value_id); - self.slice_sizes.insert(else_value_id, else_len); + let else_len = *self.slice_sizes.get(&else_value_id).unwrap_or_else(|| { + panic!("ICE: Merging values during flattening encountered slice {else_value_id} without a preset size"); + }); let len = then_len.max(else_len); @@ -218,8 +211,10 @@ impl<'a> ValueMerger<'a> { } }; - let then_element = get_element(then_value_id, typevars.clone(), then_len); - let else_element = get_element(else_value_id, typevars, else_len); + let then_element = + get_element(then_value_id, typevars.clone(), then_len * element_types.len()); + let else_element = + get_element(else_value_id, typevars, else_len * element_types.len()); merged.push_back(self.merge_values( then_condition, @@ -233,82 +228,6 @@ impl<'a> ValueMerger<'a> { self.dfg.make_array(merged, typ) } - fn get_slice_length(&mut self, value_id: ValueId) -> usize { - let value = &self.dfg[value_id]; - match value { - Value::Array { array, .. } => array.len(), - Value::Instruction { instruction: instruction_id, .. } => { - let instruction = &self.dfg[*instruction_id]; - match instruction { - // TODO(#3188): A slice can be the result of an ArrayGet when it is the - // fetched from a slice of slices or as a struct field. - // However, we need to incorporate nested slice support in flattening - // in order for this to be valid - // Instruction::ArrayGet { array, .. } => {} - Instruction::ArraySet { array, .. } => { - let array = *array; - let len = self.get_slice_length(array); - self.slice_sizes.insert(array, len); - len - } - Instruction::Load { address } => { - let outer_block_stores = self.outer_block_stores.expect("ICE: A map of previous stores is required in order to resolve a slice load"); - let store_values = self.store_values.expect("ICE: A map of previous stores is required in order to resolve a slice load"); - let store_value = outer_block_stores - .get(address) - .expect("ICE: load in merger should have store from outer block"); - - if let Some(len) = self.slice_sizes.get(store_value) { - return *len; - } - - let store_value = if let Some(store) = store_values.get(address) { - if let Some(len) = self.slice_sizes.get(&store.new_value) { - return *len; - } - - store.new_value - } else { - *store_value - }; - - self.get_slice_length(store_value) - } - Instruction::Call { func, arguments } => { - let slice_contents = arguments[1]; - let func = &self.dfg[*func]; - match func { - Value::Intrinsic(intrinsic) => match intrinsic { - Intrinsic::SlicePushBack - | Intrinsic::SlicePushFront - | Intrinsic::SliceInsert => { - // `get_slice_length` needs to be called here as it is borrows self as mutable - let initial_len = self.get_slice_length(slice_contents); - self.slice_sizes.insert(slice_contents, initial_len); - initial_len + 1 - } - Intrinsic::SlicePopBack - | Intrinsic::SlicePopFront - | Intrinsic::SliceRemove => { - // `get_slice_length` needs to be called here as it is borrows self as mutable - let initial_len = self.get_slice_length(slice_contents); - self.slice_sizes.insert(slice_contents, initial_len); - initial_len - 1 - } - _ => { - unreachable!("ICE: Intrinsic not supported, got {intrinsic:?}") - } - }, - _ => unreachable!("ICE: Expected intrinsic value but got {func:?}"), - } - } - _ => unreachable!("ICE: Got unexpected instruction: {instruction:?}"), - } - } - _ => unreachable!("ICE: Got unexpected value when resolving slice length {value:?}"), - } - } - /// Construct a dummy value to be attached to the smaller of two slices being merged. /// We need to make sure we follow the internal element type structure of the slice type /// even for dummy data to ensure that we do not have errors later in the compiler, diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/mod.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/mod.rs index 71725422a7a..a315695f7db 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/mod.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/mod.rs @@ -12,5 +12,6 @@ mod die; pub(crate) mod flatten_cfg; mod inlining; mod mem2reg; +mod remove_bit_shifts; mod simplify_cfg; mod unrolling; diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs new file mode 100644 index 00000000000..a71a42d5757 --- /dev/null +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs @@ -0,0 +1,285 @@ +use std::{borrow::Cow, rc::Rc}; + +use acvm::FieldElement; + +use crate::ssa::{ + ir::{ + basic_block::BasicBlockId, + dfg::{CallStack, InsertInstructionResult}, + function::{Function, RuntimeType}, + instruction::{Binary, BinaryOp, Endian, Instruction, InstructionId, Intrinsic}, + types::{NumericType, Type}, + value::ValueId, + }, + ssa_gen::Ssa, +}; + +impl Ssa { + /// Performs constant folding on each instruction. + /// + /// See [`constant_folding`][self] module for more information. + #[tracing::instrument(level = "trace", skip(self))] + pub(crate) fn remove_bit_shifts(mut self) -> Ssa { + remove_bit_shifts(self.main_mut()); + self + } +} + +/// The structure of this pass is simple: +/// Go through each block and re-insert all instructions. +fn remove_bit_shifts(function: &mut Function) { + if let RuntimeType::Brillig = function.runtime() { + return; + } + + let block = function.entry_block(); + let mut context = + Context { function, new_instructions: Vec::new(), block, call_stack: CallStack::default() }; + + context.remove_bit_shifts(); +} + +struct Context<'f> { + function: &'f mut Function, + new_instructions: Vec, + + block: BasicBlockId, + call_stack: CallStack, +} + +impl Context<'_> { + fn remove_bit_shifts(&mut self) { + let instructions = self.function.dfg[self.block].take_instructions(); + + for instruction_id in instructions { + match self.function.dfg[instruction_id] { + Instruction::Binary(Binary { lhs, rhs, operator }) + if matches!(operator, BinaryOp::Shl | BinaryOp::Shr) => + { + self.call_stack = self.function.dfg.get_call_stack(instruction_id).clone(); + let old_result = + *self.function.dfg.instruction_results(instruction_id).first().unwrap(); + + let bit_size = match self.function.dfg.type_of_value(lhs) { + Type::Numeric(NumericType::Signed { bit_size }) + | Type::Numeric(NumericType::Unsigned { bit_size }) => bit_size, + _ => unreachable!("ICE: right-shift attempted on non-integer"), + }; + let new_result = if operator == BinaryOp::Shl { + self.insert_wrapping_shift_left(lhs, rhs, bit_size) + } else { + self.insert_shift_right(lhs, rhs, bit_size) + }; + + self.function.dfg.set_value_from_id(old_result, new_result); + } + _ => { + self.new_instructions.push(instruction_id); + } + }; + } + + *self.function.dfg[self.block].instructions_mut() = + std::mem::take(&mut self.new_instructions); + } + + /// Insert ssa instructions which computes lhs << rhs by doing lhs*2^rhs + /// and truncate the result to bit_size + pub(crate) fn insert_wrapping_shift_left( + &mut self, + lhs: ValueId, + rhs: ValueId, + bit_size: u32, + ) -> ValueId { + let base = self.field_constant(FieldElement::from(2_u128)); + let typ = self.function.dfg.type_of_value(lhs); + let (max_bit, pow) = if let Some(rhs_constant) = self.function.dfg.get_numeric_constant(rhs) + { + // Happy case is that we know precisely by how many bits the the integer will + // increase: lhs_bit_size + rhs + let bit_shift_size = rhs_constant.to_u128() as u32; + + let (rhs_bit_size_pow_2, overflows) = 2_u128.overflowing_pow(bit_shift_size); + if overflows { + assert!(bit_size < 128, "ICE - shift left with big integers are not supported"); + if bit_size < 128 { + let zero = self.numeric_constant(FieldElement::zero(), typ); + return InsertInstructionResult::SimplifiedTo(zero).first(); + } + } + let pow = self.numeric_constant(FieldElement::from(rhs_bit_size_pow_2), typ); + + let max_lhs_bits = self.function.dfg.get_value_max_num_bits(lhs); + + (max_lhs_bits + bit_shift_size, pow) + } else { + // we use a predicate to nullify the result in case of overflow + let bit_size_var = + self.numeric_constant(FieldElement::from(bit_size as u128), typ.clone()); + let overflow = self.insert_binary(rhs, BinaryOp::Lt, bit_size_var); + let predicate = self.insert_cast(overflow, typ.clone()); + // we can safely cast to unsigned because overflow_checks prevent bit-shift with a negative value + let rhs_unsigned = self.insert_cast(rhs, Type::unsigned(bit_size)); + let pow = self.pow(base, rhs_unsigned); + let pow = self.insert_cast(pow, typ); + (FieldElement::max_num_bits(), self.insert_binary(predicate, BinaryOp::Mul, pow)) + }; + + if max_bit <= bit_size { + self.insert_binary(lhs, BinaryOp::Mul, pow) + } else { + let result = self.insert_binary(lhs, BinaryOp::Mul, pow); + self.insert_truncate(result, bit_size, max_bit) + } + } + + /// Insert ssa instructions which computes lhs >> rhs by doing lhs/2^rhs + pub(crate) fn insert_shift_right( + &mut self, + lhs: ValueId, + rhs: ValueId, + bit_size: u32, + ) -> ValueId { + let lhs_typ = self.function.dfg.type_of_value(lhs); + let base = self.field_constant(FieldElement::from(2_u128)); + // we can safely cast to unsigned because overflow_checks prevent bit-shift with a negative value + let rhs_unsigned = self.insert_cast(rhs, Type::unsigned(bit_size)); + let pow = self.pow(base, rhs_unsigned); + // We need at least one more bit for the case where rhs == bit_size + let div_type = Type::unsigned(bit_size + 1); + let casted_lhs = self.insert_cast(lhs, div_type.clone()); + let casted_pow = self.insert_cast(pow, div_type); + let div_result = self.insert_binary(casted_lhs, BinaryOp::Div, casted_pow); + // We have to cast back to the original type + self.insert_cast(div_result, lhs_typ) + } + + /// Computes lhs^rhs via square&multiply, using the bits decomposition of rhs + /// Pseudo-code of the computation: + /// let mut r = 1; + /// let rhs_bits = to_bits(rhs); + /// for i in 1 .. bit_size + 1 { + /// let r_squared = r * r; + /// let b = rhs_bits[bit_size - i]; + /// r = (r_squared * lhs * b) + (1 - b) * r_squared; + /// } + fn pow(&mut self, lhs: ValueId, rhs: ValueId) -> ValueId { + let typ = self.function.dfg.type_of_value(rhs); + if let Type::Numeric(NumericType::Unsigned { bit_size }) = typ { + let to_bits = self.function.dfg.import_intrinsic(Intrinsic::ToBits(Endian::Little)); + let length = self.field_constant(FieldElement::from(bit_size as i128)); + let result_types = + vec![Type::field(), Type::Array(Rc::new(vec![Type::bool()]), bit_size as usize)]; + let rhs_bits = self.insert_call(to_bits, vec![rhs, length], result_types); + + let rhs_bits = rhs_bits[1]; + let one = self.field_constant(FieldElement::one()); + let mut r = one; + for i in 1..bit_size + 1 { + let r_squared = self.insert_binary(r, BinaryOp::Mul, r); + let a = self.insert_binary(r_squared, BinaryOp::Mul, lhs); + let idx = self.field_constant(FieldElement::from((bit_size - i) as i128)); + let b = self.insert_array_get(rhs_bits, idx, Type::bool()); + let not_b = self.insert_not(b); + let b = self.insert_cast(b, Type::field()); + let not_b = self.insert_cast(not_b, Type::field()); + let r1 = self.insert_binary(a, BinaryOp::Mul, b); + let r2 = self.insert_binary(r_squared, BinaryOp::Mul, not_b); + r = self.insert_binary(r1, BinaryOp::Add, r2); + } + r + } else { + unreachable!("Value must be unsigned in power operation"); + } + } + + pub(crate) fn field_constant(&mut self, constant: FieldElement) -> ValueId { + self.function.dfg.make_constant(constant, Type::field()) + } + + /// Insert a numeric constant into the current function + pub(crate) fn numeric_constant( + &mut self, + value: impl Into, + typ: Type, + ) -> ValueId { + self.function.dfg.make_constant(value.into(), typ) + } + + /// Insert a binary instruction at the end of the current block. + /// Returns the result of the binary instruction. + pub(crate) fn insert_binary( + &mut self, + lhs: ValueId, + operator: BinaryOp, + rhs: ValueId, + ) -> ValueId { + let instruction = Instruction::Binary(Binary { lhs, rhs, operator }); + self.insert_instruction(instruction, None).first() + } + + /// Insert a not instruction at the end of the current block. + /// Returns the result of the instruction. + pub(crate) fn insert_not(&mut self, rhs: ValueId) -> ValueId { + self.insert_instruction(Instruction::Not(rhs), None).first() + } + + /// Insert a truncate instruction at the end of the current block. + /// Returns the result of the truncate instruction. + pub(crate) fn insert_truncate( + &mut self, + value: ValueId, + bit_size: u32, + max_bit_size: u32, + ) -> ValueId { + self.insert_instruction(Instruction::Truncate { value, bit_size, max_bit_size }, None) + .first() + } + + /// Insert a cast instruction at the end of the current block. + /// Returns the result of the cast instruction. + pub(crate) fn insert_cast(&mut self, value: ValueId, typ: Type) -> ValueId { + self.insert_instruction(Instruction::Cast(value, typ), None).first() + } + + /// Insert a call instruction at the end of the current block and return + /// the results of the call. + pub(crate) fn insert_call( + &mut self, + func: ValueId, + arguments: Vec, + result_types: Vec, + ) -> Cow<[ValueId]> { + self.insert_instruction(Instruction::Call { func, arguments }, Some(result_types)).results() + } + + /// Insert an instruction to extract an element from an array + pub(crate) fn insert_array_get( + &mut self, + array: ValueId, + index: ValueId, + element_type: Type, + ) -> ValueId { + let element_type = Some(vec![element_type]); + self.insert_instruction(Instruction::ArrayGet { array, index }, element_type).first() + } + + pub(crate) fn insert_instruction( + &mut self, + instruction: Instruction, + ctrl_typevars: Option>, + ) -> InsertInstructionResult { + let result = self.function.dfg.insert_instruction_and_results( + instruction, + self.block, + ctrl_typevars, + self.call_stack.clone(), + ); + + if let InsertInstructionResult::Results(instruction_id, _) = result { + self.new_instructions.push(instruction_id); + } + + result + } +} diff --git a/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index 0e155776545..2219d0264ab 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -435,7 +435,7 @@ impl<'a> FunctionContext<'a> { self.builder.set_location(location).insert_constrain( sign, one, - Some("attempt to bit-shift with overflow".to_string()), + Some("attempt to bit-shift with overflow".to_owned().into()), ); } @@ -446,7 +446,7 @@ impl<'a> FunctionContext<'a> { self.builder.set_location(location).insert_constrain( overflow, one, - Some("attempt to bit-shift with overflow".to_owned()), + Some("attempt to bit-shift with overflow".to_owned().into()), ); self.builder.insert_truncate(result, bit_size, bit_size + 1) } @@ -498,8 +498,11 @@ impl<'a> FunctionContext<'a> { let sign_diff = self.builder.insert_binary(result_sign, BinaryOp::Eq, lhs_sign); let sign_diff_with_predicate = self.builder.insert_binary(sign_diff, BinaryOp::Mul, same_sign); - let overflow_check = - Instruction::Constrain(sign_diff_with_predicate, same_sign, Some(message)); + let overflow_check = Instruction::Constrain( + sign_diff_with_predicate, + same_sign, + Some(message.into()), + ); self.builder.set_location(location).insert_instruction(overflow_check, None); } BinaryOpKind::Multiply => { @@ -509,11 +512,10 @@ impl<'a> FunctionContext<'a> { let rhs_abs = self.absolute_value_helper(rhs, rhs_sign, bit_size); let product_field = self.builder.insert_binary(lhs_abs, BinaryOp::Mul, rhs_abs); // It must not already overflow the bit_size - let message = "attempt to multiply with overflow".to_string(); self.builder.set_location(location).insert_range_check( product_field, bit_size, - Some(message.clone()), + Some("attempt to multiply with overflow".to_string()), ); let product = self.builder.insert_cast(product_field, Type::unsigned(bit_size)); @@ -530,7 +532,7 @@ impl<'a> FunctionContext<'a> { self.builder.set_location(location).insert_constrain( product_overflow_check, one, - Some(message), + Some(message.into()), ); } _ => unreachable!("operator {} should not overflow", operator), @@ -550,22 +552,6 @@ impl<'a> FunctionContext<'a> { ) -> Values { let result_type = self.builder.type_of_value(lhs); let mut result = match operator { - BinaryOpKind::ShiftLeft => { - let bit_size = match result_type { - Type::Numeric(NumericType::Signed { bit_size }) - | Type::Numeric(NumericType::Unsigned { bit_size }) => bit_size, - _ => unreachable!("ICE: left-shift attempted on non-integer"), - }; - self.builder.insert_wrapping_shift_left(lhs, rhs, bit_size) - } - BinaryOpKind::ShiftRight => { - let bit_size = match result_type { - Type::Numeric(NumericType::Signed { bit_size }) - | Type::Numeric(NumericType::Unsigned { bit_size }) => bit_size, - _ => unreachable!("ICE: right-shift attempted on non-integer"), - }; - self.builder.insert_shift_right(lhs, rhs, bit_size) - } BinaryOpKind::Equal | BinaryOpKind::NotEqual if matches!(result_type, Type::Array(..)) => { @@ -738,12 +724,17 @@ impl<'a> FunctionContext<'a> { /// Create a const offset of an address for an array load or store pub(super) fn make_offset(&mut self, mut address: ValueId, offset: u128) -> ValueId { if offset != 0 { - let offset = self.builder.field_constant(offset); + let offset = self.builder.numeric_constant(offset, self.builder.type_of_value(address)); address = self.builder.insert_binary(address, BinaryOp::Add, offset); } address } + /// Array indexes are u64s. This function casts values used as indexes to u64. + pub(super) fn make_array_index(&mut self, index: ValueId) -> ValueId { + self.builder.insert_cast(index, Type::unsigned(64)) + } + /// Define a local variable to be some Values that can later be retrieved /// by calling self.lookup(id) pub(super) fn define(&mut self, id: LocalId, value: Values) { @@ -987,16 +978,19 @@ impl<'a> FunctionContext<'a> { index: ValueId, location: Location, ) -> ValueId { - let element_size = self.builder.field_constant(self.element_size(array)); + let index = self.make_array_index(index); + let element_size = + self.builder.numeric_constant(self.element_size(array), Type::unsigned(64)); // The actual base index is the user's index * the array element type's size let mut index = self.builder.set_location(location).insert_binary(index, BinaryOp::Mul, element_size); - let one = self.builder.field_constant(FieldElement::one()); + let one = self.builder.numeric_constant(FieldElement::one(), Type::unsigned(64)); new_value.for_each(|value| { let value = value.eval(self); array = self.builder.insert_array_set(array, index, value); + self.builder.increment_array_reference_count(array); index = self.builder.insert_binary(index, BinaryOp::Add, one); }); array @@ -1132,9 +1126,8 @@ fn convert_operator(op: noirc_frontend::BinaryOpKind) -> BinaryOp { BinaryOpKind::And => BinaryOp::And, BinaryOpKind::Or => BinaryOp::Or, BinaryOpKind::Xor => BinaryOp::Xor, - BinaryOpKind::ShiftRight | BinaryOpKind::ShiftLeft => unreachable!( - "ICE - bit shift operators do not exist in SSA and should have been replaced" - ), + BinaryOpKind::ShiftLeft => BinaryOp::Shl, + BinaryOpKind::ShiftRight => BinaryOp::Shr, } } diff --git a/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index 9d9635fed34..3780477cf71 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -13,11 +13,8 @@ use noirc_frontend::{ }; use crate::{ - errors::RuntimeError, - ssa::{ - function_builder::data_bus::DataBusBuilder, - ir::{instruction::Intrinsic, types::NumericType}, - }, + errors::{InternalError, RuntimeError}, + ssa::{function_builder::data_bus::DataBusBuilder, ir::instruction::Intrinsic}, }; use self::{ @@ -29,7 +26,7 @@ use super::{ function_builder::data_bus::DataBus, ir::{ function::RuntimeType, - instruction::{BinaryOp, TerminatorInstruction}, + instruction::{BinaryOp, ConstrainError, Instruction, TerminatorInstruction}, types::Type, value::ValueId, }, @@ -38,7 +35,10 @@ use super::{ /// Generates SSA for the given monomorphized program. /// /// This function will generate the SSA but does not perform any optimizations on it. -pub(crate) fn generate_ssa(program: Program) -> Result { +pub(crate) fn generate_ssa( + program: Program, + force_brillig_runtime: bool, +) -> Result { // see which parameter has call_data/return_data attribute let is_databus = DataBusBuilder::is_databus(&program.main_function_signature); @@ -56,7 +56,11 @@ pub(crate) fn generate_ssa(program: Program) -> Result { let mut function_context = FunctionContext::new( main.name.clone(), &main.parameters, - if main.unconstrained { RuntimeType::Brillig } else { RuntimeType::Acir }, + if force_brillig_runtime || main.unconstrained { + RuntimeType::Brillig + } else { + RuntimeType::Acir + }, &context, ); @@ -141,7 +145,7 @@ impl<'a> FunctionContext<'a> { Expression::Call(call) => self.codegen_call(call), Expression::Let(let_expr) => self.codegen_let(let_expr), Expression::Constrain(expr, location, assert_message) => { - self.codegen_constrain(expr, *location, assert_message.clone()) + self.codegen_constrain(expr, *location, assert_message) } Expression::Assign(assign) => self.codegen_assign(assign), Expression::Semi(semi) => self.codegen_semi(semi), @@ -383,8 +387,9 @@ impl<'a> FunctionContext<'a> { length: Option, ) -> Result { // base_index = index * type_size + let index = self.make_array_index(index); let type_size = Self::convert_type(element_type).size_of_type(); - let type_size = self.builder.field_constant(type_size as u128); + let type_size = self.builder.numeric_constant(type_size as u128, Type::unsigned(64)); let base_index = self.builder.set_location(location).insert_binary(index, BinaryOp::Mul, type_size); @@ -421,27 +426,18 @@ impl<'a> FunctionContext<'a> { index: super::ir::value::ValueId, length: Option, ) { - let array_len = length.expect("ICE: a length must be supplied for indexing slices"); - // Check the type of the index value for valid comparisons - let array_len = match self.builder.type_of_value(index) { - Type::Numeric(numeric_type) => match numeric_type { - // If the index itself is an integer, keep the array length as a Field - NumericType::Unsigned { .. } | NumericType::Signed { .. } => array_len, - // If the index and the array length are both Fields we will not be able to perform a less than comparison on them. - // Thus, we cast the array length to a u64 before performing the less than comparison - NumericType::NativeField => self - .builder - .insert_cast(array_len, Type::Numeric(NumericType::Unsigned { bit_size: 64 })), - }, - _ => unreachable!("ICE: array index must be a numeric type"), - }; + let index = self.make_array_index(index); + // We convert the length as an array index type for comparison + let array_len = self + .make_array_index(length.expect("ICE: a length must be supplied for indexing slices")); let is_offset_out_of_bounds = self.builder.insert_binary(index, BinaryOp::Lt, array_len); let true_const = self.builder.numeric_constant(true, Type::bool()); + self.builder.insert_constrain( is_offset_out_of_bounds, true_const, - Some("Index out of bounds".to_owned()), + Some(Box::new("Index out of bounds".to_owned().into())), ); } @@ -665,19 +661,74 @@ impl<'a> FunctionContext<'a> { &mut self, expr: &Expression, location: Location, - assert_message: Option, + assert_message: &Option>, ) -> Result { let expr = self.codegen_non_tuple_expression(expr)?; let true_literal = self.builder.numeric_constant(true, Type::bool()); - self.builder.set_location(location).insert_constrain(expr, true_literal, assert_message); + + // Set the location here for any errors that may occur when we codegen the assert message + self.builder.set_location(location); + + let assert_message = self.codegen_constrain_error(assert_message)?; + + self.builder.insert_constrain(expr, true_literal, assert_message); Ok(Self::unit_value()) } + // This method does not necessary codegen the full assert message expression, thus it does not + // return a `Values` object. Instead we check the internals of the expression to make sure + // we have an `Expression::Call` as expected. An `Instruction::Call` is then constructed but not + // inserted to the SSA as we want that instruction to be atomic in SSA with a constrain instruction. + fn codegen_constrain_error( + &mut self, + assert_message: &Option>, + ) -> Result>, RuntimeError> { + let Some(assert_message_expr) = assert_message else { + return Ok(None) + }; + + if let ast::Expression::Literal(ast::Literal::Str(assert_message)) = + assert_message_expr.as_ref() + { + return Ok(Some(Box::new(ConstrainError::Static(assert_message.to_string())))); + } + + let ast::Expression::Call(call) = assert_message_expr.as_ref() else { + return Err(InternalError::Unexpected { + expected: "Expected a call expression".to_owned(), + found: "Instead found {expr:?}".to_owned(), + call_stack: self.builder.get_call_stack(), + } + .into()); + }; + + let func = self.codegen_non_tuple_expression(&call.func)?; + let mut arguments = Vec::with_capacity(call.arguments.len()); + + for argument in &call.arguments { + let mut values = self.codegen_expression(argument)?.into_value_list(self); + arguments.append(&mut values); + } + + // If an array is passed as an argument we increase its reference count + for argument in &arguments { + self.builder.increment_array_reference_count(*argument); + } + + let instr = Instruction::Call { func, arguments }; + Ok(Some(Box::new(ConstrainError::Dynamic(instr)))) + } + fn codegen_assign(&mut self, assign: &ast::Assign) -> Result { let lhs = self.extract_current_value(&assign.lvalue)?; let rhs = self.codegen_expression(&assign.expression)?; + rhs.clone().for_each(|value| { + let value = value.eval(self); + self.builder.increment_array_reference_count(value); + }); + self.assign_new_value(lhs, rhs); Ok(Self::unit_value()) } diff --git a/noir/compiler/noirc_frontend/Cargo.toml b/noir/compiler/noirc_frontend/Cargo.toml index 80d767f7f2c..a3a8d460572 100644 --- a/noir/compiler/noirc_frontend/Cargo.toml +++ b/noir/compiler/noirc_frontend/Cargo.toml @@ -23,6 +23,7 @@ rustc-hash = "1.1.0" small-ord-set = "0.1.3" regex = "1.9.1" tracing.workspace = true +petgraph = "0.6" [dev-dependencies] strum = "0.24" diff --git a/noir/compiler/noirc_frontend/src/ast/statement.rs b/noir/compiler/noirc_frontend/src/ast/statement.rs index 73b1f68778d..a2c29a8c52e 100644 --- a/noir/compiler/noirc_frontend/src/ast/statement.rs +++ b/noir/compiler/noirc_frontend/src/ast/statement.rs @@ -416,7 +416,7 @@ pub enum LValue { } #[derive(Debug, PartialEq, Eq, Clone)] -pub struct ConstrainStatement(pub Expression, pub Option, pub ConstrainKind); +pub struct ConstrainStatement(pub Expression, pub Option, pub ConstrainKind); #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum ConstrainKind { diff --git a/noir/compiler/noirc_frontend/src/debug/mod.rs b/noir/compiler/noirc_frontend/src/debug/mod.rs new file mode 100644 index 00000000000..9f182d2baa2 --- /dev/null +++ b/noir/compiler/noirc_frontend/src/debug/mod.rs @@ -0,0 +1,607 @@ +use crate::parser::{parse_program, ParsedModule}; +use crate::{ + ast, + ast::{Path, PathKind}, + parser::{Item, ItemKind}, +}; +use noirc_errors::{Span, Spanned}; +use std::collections::HashMap; +use std::collections::VecDeque; + +const MAX_MEMBER_ASSIGN_DEPTH: usize = 8; + +#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] +pub struct SourceVarId(pub u32); + +#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] +pub struct SourceFieldId(pub u32); + +/// This structure is used to collect information about variables to track +/// for debugging during the instrumentation injection phase. +#[derive(Debug, Clone)] +pub struct DebugInstrumenter { + // all collected variable names while instrumenting the source for variable tracking + pub variables: HashMap, + + // all field names referenced when assigning to a member of a variable + pub field_names: HashMap, + + next_var_id: u32, + next_field_name_id: u32, + + // last seen variable names and their IDs grouped by scope + scope: Vec>, +} + +impl Default for DebugInstrumenter { + fn default() -> Self { + Self { + variables: HashMap::default(), + field_names: HashMap::default(), + scope: vec![], + next_var_id: 0, + next_field_name_id: 1, + } + } +} + +impl DebugInstrumenter { + pub fn instrument_module(&mut self, module: &mut ParsedModule) { + module.items.iter_mut().for_each(|item| { + if let Item { kind: ItemKind::Function(f), .. } = item { + self.walk_fn(&mut f.def); + } + }); + // this part absolutely must happen after ast traversal above + // so that oracle functions don't get wrapped, resulting in infinite recursion: + self.insert_state_set_oracle(module, 8); + } + + fn insert_var(&mut self, var_name: &str) -> SourceVarId { + let var_id = SourceVarId(self.next_var_id); + self.next_var_id += 1; + self.variables.insert(var_id, var_name.to_string()); + self.scope.last_mut().unwrap().insert(var_name.to_string(), var_id); + var_id + } + + fn lookup_var(&self, var_name: &str) -> Option { + self.scope.iter().rev().find_map(|vars| vars.get(var_name).copied()) + } + + fn insert_field_name(&mut self, field_name: &str) -> SourceFieldId { + let field_name_id = SourceFieldId(self.next_field_name_id); + self.next_field_name_id += 1; + self.field_names.insert(field_name_id, field_name.to_string()); + field_name_id + } + + fn walk_fn(&mut self, func: &mut ast::FunctionDefinition) { + self.scope.push(HashMap::default()); + + let set_fn_params = func + .parameters + .iter() + .flat_map(|param| { + pattern_vars(¶m.pattern) + .iter() + .map(|(id, _is_mut)| { + let var_id = self.insert_var(&id.0.contents); + build_assign_var_stmt(var_id, id_expr(id)) + }) + .collect::>() + }) + .collect(); + + self.walk_scope(&mut func.body.0, func.span); + + // prepend fn params: + func.body.0 = vec![set_fn_params, func.body.0.clone()].concat(); + } + + // Modify a vector of statements in-place, adding instrumentation for sets and drops. + // This function will consume a scope level. + fn walk_scope(&mut self, statements: &mut Vec, span: Span) { + statements.iter_mut().for_each(|stmt| self.walk_statement(stmt)); + + // extract and save the return value from the scope if there is one + let ret_stmt = statements.pop(); + let has_ret_expr = match ret_stmt { + None => false, + Some(ast::Statement { kind: ast::StatementKind::Expression(ret_expr), .. }) => { + let save_ret_expr = ast::Statement { + kind: ast::StatementKind::Let(ast::LetStatement { + pattern: ast::Pattern::Identifier(ident("__debug_expr", ret_expr.span)), + r#type: ast::UnresolvedType::unspecified(), + expression: ret_expr.clone(), + }), + span: ret_expr.span, + }; + statements.push(save_ret_expr); + true + } + Some(ret_stmt) => { + // not an expression, so leave it untouched + statements.push(ret_stmt); + false + } + }; + + let span = Span::empty(span.end()); + + // drop scope variables + let scope_vars = self.scope.pop().unwrap_or(HashMap::default()); + let drop_vars_stmts = scope_vars.values().map(|var_id| build_drop_var_stmt(*var_id, span)); + statements.extend(drop_vars_stmts); + + // return the saved value in __debug_expr, or unit otherwise + let last_stmt = if has_ret_expr { + ast::Statement { + kind: ast::StatementKind::Expression(ast::Expression { + kind: ast::ExpressionKind::Variable(ast::Path { + segments: vec![ident("__debug_expr", span)], + kind: PathKind::Plain, + span, + }), + span, + }), + span, + } + } else { + ast::Statement { + kind: ast::StatementKind::Expression(ast::Expression { + kind: ast::ExpressionKind::Literal(ast::Literal::Unit), + span, + }), + span, + } + }; + statements.push(last_stmt); + } + + fn walk_let_statement(&mut self, let_stmt: &ast::LetStatement, span: &Span) -> ast::Statement { + // rewrites let statements written like this: + // let (((a,b,c),D { d }),e,f) = x; + // + // into statements like this: + // + // let (a,b,c,d,e,f,g) = { + // let (((a,b,c),D { d }),e,f) = x; + // wrap(1, a); + // wrap(2, b); + // ... + // wrap(6, f); + // (a,b,c,d,e,f,g) + // }; + + // a.b.c[3].x[i*4+1].z + + let vars = pattern_vars(&let_stmt.pattern); + let vars_pattern: Vec = vars + .iter() + .map(|(id, is_mut)| { + if *is_mut { + ast::Pattern::Mutable(Box::new(ast::Pattern::Identifier(id.clone())), id.span()) + } else { + ast::Pattern::Identifier(id.clone()) + } + }) + .collect(); + let vars_exprs: Vec = vars.iter().map(|(id, _)| id_expr(id)).collect(); + + let mut block_stmts = + vec![ast::Statement { kind: ast::StatementKind::Let(let_stmt.clone()), span: *span }]; + block_stmts.extend(vars.iter().map(|(id, _)| { + let var_id = self.insert_var(&id.0.contents); + build_assign_var_stmt(var_id, id_expr(id)) + })); + block_stmts.push(ast::Statement { + kind: ast::StatementKind::Expression(ast::Expression { + kind: ast::ExpressionKind::Tuple(vars_exprs), + span: let_stmt.pattern.span(), + }), + span: let_stmt.pattern.span(), + }); + + ast::Statement { + kind: ast::StatementKind::Let(ast::LetStatement { + pattern: ast::Pattern::Tuple(vars_pattern, let_stmt.pattern.span()), + r#type: ast::UnresolvedType::unspecified(), + expression: ast::Expression { + kind: ast::ExpressionKind::Block(ast::BlockExpression(block_stmts)), + span: let_stmt.expression.span, + }, + }), + span: *span, + } + } + + fn walk_assign_statement( + &mut self, + assign_stmt: &ast::AssignStatement, + span: &Span, + ) -> ast::Statement { + // X = Y becomes: + // X = { + // let __debug_expr = Y; + // + // __debug_var_assign(17, __debug_expr); + // // or: + // __debug_member_assign_{arity}(17, __debug_expr, _v0, _v1..., _v{arity}); + // + // __debug_expr + // }; + + let let_kind = ast::StatementKind::Let(ast::LetStatement { + pattern: ast::Pattern::Identifier(ident("__debug_expr", assign_stmt.expression.span)), + r#type: ast::UnresolvedType::unspecified(), + expression: assign_stmt.expression.clone(), + }); + let expression_span = assign_stmt.expression.span; + let new_assign_stmt = match &assign_stmt.lvalue { + ast::LValue::Ident(id) => { + let var_id = self + .lookup_var(&id.0.contents) + .unwrap_or_else(|| panic!("var lookup failed for var_name={}", &id.0.contents)); + build_assign_var_stmt(var_id, id_expr(&ident("__debug_expr", id.span()))) + } + ast::LValue::Dereference(_lv) => { + // TODO: this is a dummy statement for now, but we should + // somehow track the derefence and update the pointed to + // variable + ast::Statement { + kind: ast::StatementKind::Expression(uint_expr(0, *span)), + span: *span, + } + } + _ => { + let mut indexes = vec![]; + let mut cursor = &assign_stmt.lvalue; + let var_id; + loop { + match cursor { + ast::LValue::Ident(id) => { + var_id = self.lookup_var(&id.0.contents).unwrap_or_else(|| { + panic!("var lookup failed for var_name={}", &id.0.contents) + }); + break; + } + ast::LValue::MemberAccess { object, field_name } => { + cursor = object; + let field_name_id = self.insert_field_name(&field_name.0.contents); + indexes.push(sint_expr(-(field_name_id.0 as i128), expression_span)); + } + ast::LValue::Index { index, array } => { + cursor = array; + indexes.push(index.clone()); + } + ast::LValue::Dereference(_ref) => { + unimplemented![] + } + } + } + build_assign_member_stmt( + var_id, + &indexes, + &id_expr(&ident("__debug_expr", expression_span)), + ) + } + }; + let ret_kind = + ast::StatementKind::Expression(id_expr(&ident("__debug_expr", expression_span))); + + ast::Statement { + kind: ast::StatementKind::Assign(ast::AssignStatement { + lvalue: assign_stmt.lvalue.clone(), + expression: ast::Expression { + kind: ast::ExpressionKind::Block(ast::BlockExpression(vec![ + ast::Statement { kind: let_kind, span: expression_span }, + new_assign_stmt, + ast::Statement { kind: ret_kind, span: expression_span }, + ])), + span: expression_span, + }, + }), + span: *span, + } + } + + fn walk_expr(&mut self, expr: &mut ast::Expression) { + match &mut expr.kind { + ast::ExpressionKind::Block(ast::BlockExpression(ref mut statements)) => { + self.scope.push(HashMap::default()); + self.walk_scope(statements, expr.span); + } + ast::ExpressionKind::Prefix(prefix_expr) => { + self.walk_expr(&mut prefix_expr.rhs); + } + ast::ExpressionKind::Index(index_expr) => { + self.walk_expr(&mut index_expr.collection); + self.walk_expr(&mut index_expr.index); + } + ast::ExpressionKind::Call(call_expr) => { + // TODO: push a stack frame or something here? + self.walk_expr(&mut call_expr.func); + call_expr.arguments.iter_mut().for_each(|ref mut expr| { + self.walk_expr(expr); + }); + } + ast::ExpressionKind::MethodCall(mc_expr) => { + // TODO: also push a stack frame here + self.walk_expr(&mut mc_expr.object); + mc_expr.arguments.iter_mut().for_each(|ref mut expr| { + self.walk_expr(expr); + }); + } + ast::ExpressionKind::Constructor(c_expr) => { + c_expr.fields.iter_mut().for_each(|(_id, ref mut expr)| { + self.walk_expr(expr); + }); + } + ast::ExpressionKind::MemberAccess(ma_expr) => { + self.walk_expr(&mut ma_expr.lhs); + } + ast::ExpressionKind::Cast(cast_expr) => { + self.walk_expr(&mut cast_expr.lhs); + } + ast::ExpressionKind::Infix(infix_expr) => { + self.walk_expr(&mut infix_expr.lhs); + self.walk_expr(&mut infix_expr.rhs); + } + ast::ExpressionKind::If(if_expr) => { + self.walk_expr(&mut if_expr.condition); + self.walk_expr(&mut if_expr.consequence); + if let Some(ref mut alt) = if_expr.alternative { + self.walk_expr(alt); + } + } + ast::ExpressionKind::Tuple(exprs) => { + exprs.iter_mut().for_each(|ref mut expr| { + self.walk_expr(expr); + }); + } + ast::ExpressionKind::Lambda(lambda) => { + self.walk_expr(&mut lambda.body); + } + ast::ExpressionKind::Parenthesized(expr) => { + self.walk_expr(expr); + } + _ => {} + } + } + + fn walk_for(&mut self, for_stmt: &mut ast::ForLoopStatement) { + let var_name = &for_stmt.identifier.0.contents; + let var_id = self.insert_var(var_name); + + let set_stmt = build_assign_var_stmt(var_id, id_expr(&for_stmt.identifier)); + let drop_stmt = build_drop_var_stmt(var_id, Span::empty(for_stmt.span.end())); + + self.walk_expr(&mut for_stmt.block); + for_stmt.block = ast::Expression { + kind: ast::ExpressionKind::Block(ast::BlockExpression(vec![ + set_stmt, + ast::Statement { + kind: ast::StatementKind::Semi(for_stmt.block.clone()), + span: for_stmt.block.span, + }, + drop_stmt, + ])), + span: for_stmt.span, + }; + } + + fn walk_statement(&mut self, stmt: &mut ast::Statement) { + match &mut stmt.kind { + ast::StatementKind::Let(let_stmt) => { + *stmt = self.walk_let_statement(let_stmt, &stmt.span); + } + ast::StatementKind::Assign(assign_stmt) => { + *stmt = self.walk_assign_statement(assign_stmt, &stmt.span); + } + ast::StatementKind::Expression(expr) => { + self.walk_expr(expr); + } + ast::StatementKind::Semi(expr) => { + self.walk_expr(expr); + } + ast::StatementKind::For(ref mut for_stmt) => { + self.walk_for(for_stmt); + } + _ => {} // Constrain, Error + } + } + + fn insert_state_set_oracle(&self, module: &mut ParsedModule, n: u32) { + let member_assigns = (1..=n) + .map(|i| format!["__debug_member_assign_{i}"]) + .collect::>() + .join(",\n"); + let (program, errors) = parse_program(&format!( + r#" + use dep::__debug::{{ + __debug_var_assign, + __debug_var_drop, + __debug_dereference_assign, + {member_assigns}, + }};"# + )); + if !errors.is_empty() { + panic!("errors parsing internal oracle definitions: {errors:?}") + } + module.items.extend(program.items); + } +} + +pub fn build_debug_crate_file() -> String { + [ + r#" + #[oracle(__debug_var_assign)] + unconstrained fn __debug_var_assign_oracle(_var_id: u32, _value: T) {} + unconstrained fn __debug_var_assign_inner(var_id: u32, value: T) { + __debug_var_assign_oracle(var_id, value); + } + pub fn __debug_var_assign(var_id: u32, value: T) { + __debug_var_assign_inner(var_id, value); + } + + #[oracle(__debug_var_drop)] + unconstrained fn __debug_var_drop_oracle(_var_id: u32) {} + unconstrained fn __debug_var_drop_inner(var_id: u32) { + __debug_var_drop_oracle(var_id); + } + pub fn __debug_var_drop(var_id: u32) { + __debug_var_drop_inner(var_id); + } + + #[oracle(__debug_dereference_assign)] + unconstrained fn __debug_dereference_assign_oracle(_var_id: u32, _value: T) {} + unconstrained fn __debug_dereference_assign_inner(var_id: u32, value: T) { + __debug_dereference_assign_oracle(var_id, value); + } + pub fn __debug_dereference_assign(var_id: u32, value: T) { + __debug_dereference_assign_inner(var_id, value); + } + "# + .to_string(), + (1..=MAX_MEMBER_ASSIGN_DEPTH) + .map(|n| { + let var_sig = + (0..n).map(|i| format!["_v{i}: Field"]).collect::>().join(", "); + let vars = (0..n).map(|i| format!["_v{i}"]).collect::>().join(", "); + format!( + r#" + #[oracle(__debug_member_assign_{n})] + unconstrained fn __debug_oracle_member_assign_{n}( + _var_id: u32, _value: T, {var_sig} + ) {{}} + unconstrained fn __debug_inner_member_assign_{n}( + var_id: u32, value: T, {var_sig} + ) {{ + __debug_oracle_member_assign_{n}(var_id, value, {vars}); + }} + pub fn __debug_member_assign_{n}(var_id: u32, value: T, {var_sig}) {{ + __debug_inner_member_assign_{n}(var_id, value, {vars}); + }} + + "# + ) + }) + .collect::>() + .join("\n"), + ] + .join("\n") +} + +fn build_assign_var_stmt(var_id: SourceVarId, expr: ast::Expression) -> ast::Statement { + let span = expr.span; + let kind = ast::ExpressionKind::Call(Box::new(ast::CallExpression { + func: Box::new(ast::Expression { + kind: ast::ExpressionKind::Variable(ast::Path { + segments: vec![ident("__debug_var_assign", span)], + kind: PathKind::Plain, + span, + }), + span, + }), + arguments: vec![uint_expr(var_id.0 as u128, span), expr], + })); + ast::Statement { kind: ast::StatementKind::Semi(ast::Expression { kind, span }), span } +} + +fn build_drop_var_stmt(var_id: SourceVarId, span: Span) -> ast::Statement { + let kind = ast::ExpressionKind::Call(Box::new(ast::CallExpression { + func: Box::new(ast::Expression { + kind: ast::ExpressionKind::Variable(ast::Path { + segments: vec![ident("__debug_var_drop", span)], + kind: PathKind::Plain, + span, + }), + span, + }), + arguments: vec![uint_expr(var_id.0 as u128, span)], + })); + ast::Statement { kind: ast::StatementKind::Semi(ast::Expression { kind, span }), span } +} + +fn build_assign_member_stmt( + var_id: SourceVarId, + indexes: &[ast::Expression], + expr: &ast::Expression, +) -> ast::Statement { + let arity = indexes.len(); + if arity > MAX_MEMBER_ASSIGN_DEPTH { + unreachable!("Assignment to member exceeds maximum depth for debugging"); + } + let span = expr.span; + let kind = ast::ExpressionKind::Call(Box::new(ast::CallExpression { + func: Box::new(ast::Expression { + kind: ast::ExpressionKind::Variable(ast::Path { + segments: vec![ident(&format!["__debug_member_assign_{arity}"], span)], + kind: PathKind::Plain, + span, + }), + span, + }), + arguments: [ + vec![uint_expr(var_id.0 as u128, span)], + vec![expr.clone()], + indexes.iter().rev().cloned().collect(), + ] + .concat(), + })); + ast::Statement { kind: ast::StatementKind::Semi(ast::Expression { kind, span }), span } +} + +fn pattern_vars(pattern: &ast::Pattern) -> Vec<(ast::Ident, bool)> { + let mut vars = vec![]; + let mut stack = VecDeque::from([(pattern, false)]); + while stack.front().is_some() { + let (pattern, is_mut) = stack.pop_front().unwrap(); + match pattern { + ast::Pattern::Identifier(id) => { + vars.push((id.clone(), is_mut)); + } + ast::Pattern::Mutable(pattern, _) => { + stack.push_back((pattern, true)); + } + ast::Pattern::Tuple(patterns, _) => { + stack.extend(patterns.iter().map(|pattern| (pattern, false))); + } + ast::Pattern::Struct(_, pids, _) => { + stack.extend(pids.iter().map(|(_, pattern)| (pattern, is_mut))); + vars.extend(pids.iter().map(|(id, _)| (id.clone(), false))); + } + } + } + vars +} + +fn ident(s: &str, span: Span) -> ast::Ident { + ast::Ident(Spanned::from(span, s.to_string())) +} + +fn id_expr(id: &ast::Ident) -> ast::Expression { + ast::Expression { + kind: ast::ExpressionKind::Variable(Path { + segments: vec![id.clone()], + kind: PathKind::Plain, + span: id.span(), + }), + span: id.span(), + } +} + +fn uint_expr(x: u128, span: Span) -> ast::Expression { + ast::Expression { + kind: ast::ExpressionKind::Literal(ast::Literal::Integer(x.into(), false)), + span, + } +} + +fn sint_expr(x: i128, span: Span) -> ast::Expression { + ast::Expression { + kind: ast::ExpressionKind::Literal(ast::Literal::Integer(x.abs().into(), x < 0)), + span, + } +} diff --git a/noir/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/noir/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index f7441750fc8..0d1dd1b4337 100644 --- a/noir/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/noir/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -15,7 +15,7 @@ use crate::hir::type_check::{type_check_func, TypeCheckError, TypeChecker}; use crate::hir::Context; use crate::macros_api::{MacroError, MacroProcessor}; -use crate::node_interner::{FuncId, NodeInterner, StmtId, StructId, TraitId, TypeAliasId}; +use crate::node_interner::{FuncId, GlobalId, NodeInterner, StructId, TraitId, TypeAliasId}; use crate::parser::{ParserError, SortedModule}; use crate::{ @@ -109,7 +109,7 @@ pub struct UnresolvedTypeAlias { pub struct UnresolvedGlobal { pub file_id: FileId, pub module_id: LocalModuleId, - pub stmt_id: StmtId, + pub global_id: GlobalId, pub stmt_def: LetStatement, } @@ -317,9 +317,10 @@ impl DefCollector { // Must resolve structs before we resolve globals. errors.extend(resolve_structs(context, def_collector.collected_types, crate_id)); - // We must wait to resolve non-integer globals until after we resolve structs since structs + // We must wait to resolve non-integer globals until after we resolve structs since struct // globals will need to reference the struct type they're initialized to to ensure they are valid. resolved_globals.extend(resolve_globals(context, other_globals, crate_id)); + errors.extend(resolved_globals.errors); // Bind trait impls to their trait. Collect trait functions, that have a // default implementation, which hasn't been overridden. @@ -338,31 +339,31 @@ impl DefCollector { // over trait methods if there are name conflicts. errors.extend(collect_impls(context, crate_id, &def_collector.collected_impls)); - // Lower each function in the crate. This is now possible since imports have been resolved - let file_func_ids = resolve_free_functions( + // Resolve each function in the crate. This is now possible since imports have been resolved + let mut functions = Vec::new(); + functions.extend(resolve_free_functions( &mut context.def_interner, crate_id, &context.def_maps, def_collector.collected_functions, None, &mut errors, - ); + )); - let file_method_ids = resolve_impls( + functions.extend(resolve_impls( &mut context.def_interner, crate_id, &context.def_maps, def_collector.collected_impls, &mut errors, - ); - let file_trait_impls_ids = resolve_trait_impls( + )); + + functions.extend(resolve_trait_impls( context, def_collector.collected_traits_impls, crate_id, &mut errors, - ); - - errors.extend(resolved_globals.errors); + )); for macro_processor in macro_processors { macro_processor.process_typed_ast(&crate_id, context).unwrap_or_else( @@ -371,12 +372,11 @@ impl DefCollector { }, ); } - errors.extend(type_check_globals(&mut context.def_interner, resolved_globals.globals)); - // Type check all of the functions in the crate - errors.extend(type_check_functions(&mut context.def_interner, file_func_ids)); - errors.extend(type_check_functions(&mut context.def_interner, file_method_ids)); - errors.extend(type_check_functions(&mut context.def_interner, file_trait_impls_ids)); + errors.extend(context.def_interner.check_for_dependency_cycles()); + + errors.extend(type_check_globals(&mut context.def_interner, resolved_globals.globals)); + errors.extend(type_check_functions(&mut context.def_interner, functions)); errors } } @@ -436,15 +436,15 @@ fn filter_literal_globals( fn type_check_globals( interner: &mut NodeInterner, - global_ids: Vec<(FileId, StmtId)>, + global_ids: Vec<(FileId, GlobalId)>, ) -> Vec<(CompilationError, fm::FileId)> { global_ids - .iter() - .flat_map(|(file_id, stmt_id)| { - TypeChecker::check_global(stmt_id, interner) + .into_iter() + .flat_map(|(file_id, global_id)| { + TypeChecker::check_global(global_id, interner) .iter() .cloned() - .map(|e| (e.into(), *file_id)) + .map(|e| (e.into(), file_id)) .collect::>() }) .collect() @@ -455,12 +455,12 @@ fn type_check_functions( file_func_ids: Vec<(FileId, FuncId)>, ) -> Vec<(CompilationError, fm::FileId)> { file_func_ids - .iter() + .into_iter() .flat_map(|(file, func)| { - type_check_func(interner, *func) + type_check_func(interner, func) .iter() .cloned() - .map(|e| (e.into(), *file)) + .map(|e| (e.into(), file)) .collect::>() }) .collect() diff --git a/noir/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/noir/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 3cd60c33b8b..e63f7f4c413 100644 --- a/noir/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/noir/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -89,13 +89,12 @@ impl<'a> ModCollector<'a> { for global in globals { let name = global.pattern.name_ident().clone(); - // First create dummy function in the DefInterner - // So that we can get a StmtId - let stmt_id = context.def_interner.push_empty_global(); + let global_id = + context.def_interner.push_empty_global(name.clone(), self.module_id, self.file_id); // Add the statement to the scope so its path can be looked up later - let result = - self.def_collector.def_map.modules[self.module_id.0].declare_global(name, stmt_id); + let result = self.def_collector.def_map.modules[self.module_id.0] + .declare_global(name, global_id); if let Err((first_def, second_def)) = result { let err = DefCollectorErrorKind::Duplicate { @@ -109,7 +108,7 @@ impl<'a> ModCollector<'a> { self.def_collector.collected_globals.push(UnresolvedGlobal { file_id: self.file_id, module_id: self.module_id, - stmt_id, + global_id, stmt_def: global, }); } @@ -440,11 +439,15 @@ impl<'a> ModCollector<'a> { } } TraitItem::Constant { name, .. } => { - let stmt_id = context.def_interner.push_empty_global(); + let global_id = context.def_interner.push_empty_global( + name.clone(), + trait_id.0.local_id, + self.file_id, + ); if let Err((first_def, second_def)) = self.def_collector.def_map.modules [trait_id.0.local_id.0] - .declare_global(name.clone(), stmt_id) + .declare_global(name.clone(), global_id) { let error = DefCollectorErrorKind::Duplicate { typ: DuplicateType::TraitAssociatedConst, diff --git a/noir/compiler/noirc_frontend/src/hir/def_map/module_data.rs b/noir/compiler/noirc_frontend/src/hir/def_map/module_data.rs index fbb5e5cf741..309618dd011 100644 --- a/noir/compiler/noirc_frontend/src/hir/def_map/module_data.rs +++ b/noir/compiler/noirc_frontend/src/hir/def_map/module_data.rs @@ -3,7 +3,7 @@ use std::collections::HashMap; use noirc_errors::Location; use crate::{ - node_interner::{FuncId, StmtId, StructId, TraitId, TypeAliasId}, + node_interner::{FuncId, GlobalId, StructId, TraitId, TypeAliasId}, Ident, }; @@ -76,7 +76,7 @@ impl ModuleData { self.definitions.remove_definition(name); } - pub fn declare_global(&mut self, name: Ident, id: StmtId) -> Result<(), (Ident, Ident)> { + pub fn declare_global(&mut self, name: Ident, id: GlobalId) -> Result<(), (Ident, Ident)> { self.declare(name, id.into(), None) } diff --git a/noir/compiler/noirc_frontend/src/hir/def_map/module_def.rs b/noir/compiler/noirc_frontend/src/hir/def_map/module_def.rs index 3e5629639fa..54d092f9515 100644 --- a/noir/compiler/noirc_frontend/src/hir/def_map/module_def.rs +++ b/noir/compiler/noirc_frontend/src/hir/def_map/module_def.rs @@ -1,4 +1,4 @@ -use crate::node_interner::{FuncId, StmtId, StructId, TraitId, TypeAliasId}; +use crate::node_interner::{FuncId, GlobalId, StructId, TraitId, TypeAliasId}; use super::ModuleId; @@ -10,7 +10,7 @@ pub enum ModuleDefId { TypeId(StructId), TypeAliasId(TypeAliasId), TraitId(TraitId), - GlobalId(StmtId), + GlobalId(GlobalId), } impl ModuleDefId { @@ -42,9 +42,9 @@ impl ModuleDefId { } } - pub fn as_global(&self) -> Option { + pub fn as_global(&self) -> Option { match self { - ModuleDefId::GlobalId(stmt_id) => Some(*stmt_id), + ModuleDefId::GlobalId(global_id) => Some(*global_id), _ => None, } } @@ -88,9 +88,9 @@ impl From for ModuleDefId { } } -impl From for ModuleDefId { - fn from(stmt_id: StmtId) -> Self { - ModuleDefId::GlobalId(stmt_id) +impl From for ModuleDefId { + fn from(global_id: GlobalId) -> Self { + ModuleDefId::GlobalId(global_id) } } @@ -162,13 +162,13 @@ impl TryFromModuleDefId for TraitId { } } -impl TryFromModuleDefId for StmtId { +impl TryFromModuleDefId for GlobalId { fn try_from(id: ModuleDefId) -> Option { id.as_global() } fn dummy_id() -> Self { - StmtId::dummy_id() + GlobalId::dummy_id() } fn description() -> String { diff --git a/noir/compiler/noirc_frontend/src/hir/mod.rs b/noir/compiler/noirc_frontend/src/hir/mod.rs index 2124b5281f4..4d3800f1a50 100644 --- a/noir/compiler/noirc_frontend/src/hir/mod.rs +++ b/noir/compiler/noirc_frontend/src/hir/mod.rs @@ -4,6 +4,7 @@ pub mod resolution; pub mod scope; pub mod type_check; +use crate::debug::DebugInstrumenter; use crate::graph::{CrateGraph, CrateId}; use crate::hir_def::function::FuncMeta; use crate::node_interner::{FuncId, NodeInterner, StructId}; @@ -31,6 +32,8 @@ pub struct Context<'file_manager, 'parsed_files> { // is read-only however, once it has been passed to the Context. pub file_manager: Cow<'file_manager, FileManager>, + pub debug_instrumenter: DebugInstrumenter, + /// A map of each file that already has been visited from a prior `mod foo;` declaration. /// This is used to issue an error if a second `mod foo;` is declared to the same file. pub visited_files: BTreeMap, @@ -56,6 +59,7 @@ impl Context<'_, '_> { visited_files: BTreeMap::new(), crate_graph: CrateGraph::default(), file_manager: Cow::Owned(file_manager), + debug_instrumenter: DebugInstrumenter::default(), parsed_files: Cow::Owned(parsed_files), } } @@ -70,6 +74,7 @@ impl Context<'_, '_> { visited_files: BTreeMap::new(), crate_graph: CrateGraph::default(), file_manager: Cow::Borrowed(file_manager), + debug_instrumenter: DebugInstrumenter::default(), parsed_files: Cow::Borrowed(parsed_files), } } diff --git a/noir/compiler/noirc_frontend/src/hir/resolution/errors.rs b/noir/compiler/noirc_frontend/src/hir/resolution/errors.rs index 0752838c82e..8e782ccdef4 100644 --- a/noir/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/noir/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -88,6 +88,8 @@ pub enum ResolverError { MisplacedRecursiveAttribute { ident: Ident }, #[error("Usage of the `#[foreign]` or `#[builtin]` function attributes are not allowed outside of the Noir standard library")] LowLevelFunctionOutsideOfStdlib { ident: Ident }, + #[error("Dependency cycle found, '{item}' recursively depends on itself: {cycle} ")] + DependencyCycle { span: Span, item: String, cycle: String }, } impl ResolverError { @@ -332,6 +334,13 @@ impl From for Diagnostic { "Usage of the `#[foreign]` or `#[builtin]` function attributes are not allowed outside of the Noir standard library".into(), ident.span(), ), + ResolverError::DependencyCycle { span, item, cycle } => { + Diagnostic::simple_error( + "Dependency cycle found".into(), + format!("'{item}' recursively depends on itself: {cycle}"), + span, + ) + }, } } } diff --git a/noir/compiler/noirc_frontend/src/hir/resolution/globals.rs b/noir/compiler/noirc_frontend/src/hir/resolution/globals.rs index b5aec212dbf..9fb31271727 100644 --- a/noir/compiler/noirc_frontend/src/hir/resolution/globals.rs +++ b/noir/compiler/noirc_frontend/src/hir/resolution/globals.rs @@ -6,13 +6,13 @@ use crate::{ def_map::ModuleId, Context, }, - node_interner::StmtId, + node_interner::GlobalId, }; use fm::FileId; use iter_extended::vecmap; pub(crate) struct ResolvedGlobals { - pub(crate) globals: Vec<(FileId, StmtId)>, + pub(crate) globals: Vec<(FileId, GlobalId)>, pub(crate) errors: Vec<(CompilationError, FileId)>, } @@ -40,16 +40,13 @@ pub(crate) fn resolve_globals( global.file_id, ); - let name = global.stmt_def.pattern.name_ident().clone(); - - let hir_stmt = resolver.resolve_global_let(global.stmt_def); + let hir_stmt = resolver.resolve_global_let(global.stmt_def, global.global_id); errors.extend(take_errors(global.file_id, resolver)); - context.def_interner.update_global(global.stmt_id, hir_stmt); - - context.def_interner.push_global(global.stmt_id, name, global.module_id); + let statement_id = context.def_interner.get_global(global.global_id).let_statement; + context.def_interner.replace_statement(statement_id, hir_stmt); - (global.file_id, global.stmt_id) + (global.file_id, global.global_id) }); ResolvedGlobals { globals, errors } } diff --git a/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs index f025f817b09..cf1018a4927 100644 --- a/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -28,8 +28,8 @@ use crate::graph::CrateId; use crate::hir::def_map::{LocalModuleId, ModuleDefId, TryFromModuleDefId, MAIN_FUNCTION}; use crate::hir_def::stmt::{HirAssignStatement, HirForStatement, HirLValue, HirPattern}; use crate::node_interner::{ - DefinitionId, DefinitionKind, ExprId, FuncId, NodeInterner, StmtId, StructId, TraitId, - TraitImplId, TraitMethodId, + DefinitionId, DefinitionKind, DependencyId, ExprId, FuncId, GlobalId, NodeInterner, StmtId, + StructId, TraitId, TraitImplId, TraitMethodId, }; use crate::{ hir::{def_map::CrateDefMap, resolution::path_resolver::PathResolver}, @@ -93,6 +93,10 @@ pub struct Resolver<'a> { /// to the corresponding trait impl ID. current_trait_impl: Option, + /// The current dependency item we're resolving. + /// Used to link items to their dependencies in the dependency graph + current_item: Option, + /// True if the current module is a contract. /// This is usually determined by self.path_resolver.module_id(), but it can /// be overridden for impls. Impls are an odd case since the methods within resolve @@ -148,6 +152,7 @@ impl<'a> Resolver<'a> { errors: Vec::new(), lambda_stack: Vec::new(), current_trait_impl: None, + current_item: None, file, in_contract, } @@ -184,6 +189,7 @@ impl<'a> Resolver<'a> { func_id: FuncId, ) -> (HirFunction, FuncMeta, Vec) { self.scopes.start_function(); + self.current_item = Some(DependencyId::Function(func_id)); // Check whether the function has globals in the local module and add them to the scope self.resolve_local_globals(); @@ -338,21 +344,22 @@ impl<'a> Resolver<'a> { // This check is necessary to maintain the same definition ids in the interner. Currently, each function uses a new resolver that has its own ScopeForest and thus global scope. // We must first check whether an existing definition ID has been inserted as otherwise there will be multiple definitions for the same global statement. // This leads to an error in evaluation where the wrong definition ID is selected when evaluating a statement using the global. The check below prevents this error. - let mut stmt_id = None; + let mut global_id = None; let global = self.interner.get_all_globals(); - for (global_stmt_id, global_info) in global { + for global_info in global { if global_info.ident == name && global_info.local_id == self.path_resolver.local_module_id() { - stmt_id = Some(global_stmt_id); + global_id = Some(global_info.id); } } - let (ident, resolver_meta) = if let Some(id) = stmt_id { - let hir_let_stmt = self.interner.let_statement(&id); - let ident = hir_let_stmt.ident(); + let (ident, resolver_meta) = if let Some(id) = global_id { + let global = self.interner.get_global(id); + let hir_ident = HirIdent::non_trait_method(global.definition_id, global.location); + let ident = hir_ident.clone(); let resolver_meta = ResolverMeta { num_times_used: 0, ident, warn_if_unused: true }; - (hir_let_stmt.ident(), resolver_meta) + (hir_ident, resolver_meta) } else { let location = Location::new(name.span(), self.file); let id = @@ -599,6 +606,11 @@ impl<'a> Resolver<'a> { struct_type.borrow().to_string() }); + if let Some(current_item) = self.current_item { + let dependency_id = struct_type.borrow().id; + self.interner.add_type_dependency(current_item, dependency_id); + } + Type::Struct(struct_type, args) } None => Type::Error, @@ -651,7 +663,10 @@ impl<'a> Resolver<'a> { // If we cannot find a local generic of the same name, try to look up a global match self.path_resolver.resolve(self.def_maps, path.clone()) { Ok(ModuleDefId::GlobalId(id)) => { - Some(Type::Constant(self.eval_global_as_array_length(id))) + if let Some(current_item) = self.current_item { + self.interner.add_global_dependency(current_item, id); + } + Some(Type::Constant(self.eval_global_as_array_length(id, path))) } _ => None, } @@ -830,23 +845,27 @@ impl<'a> Resolver<'a> { pub fn resolve_struct_fields( mut self, unresolved: NoirStruct, + struct_id: StructId, ) -> (Generics, Vec<(Ident, Type)>, Vec) { let generics = self.add_generics(&unresolved.generics); // Check whether the struct definition has globals in the local module and add them to the scope self.resolve_local_globals(); + self.current_item = Some(DependencyId::Struct(struct_id)); let fields = vecmap(unresolved.fields, |(ident, typ)| (ident, self.resolve_type(typ))); (generics, fields, self.errors) } fn resolve_local_globals(&mut self) { - for (stmt_id, global_info) in self.interner.get_all_globals() { - if global_info.local_id == self.path_resolver.local_module_id() { - let global_stmt = self.interner.let_statement(&stmt_id); - let definition = DefinitionKind::Global(global_stmt.expression); - self.add_global_variable_decl(global_info.ident, definition); + let globals = vecmap(self.interner.get_all_globals(), |global| { + (global.id, global.local_id, global.ident.clone()) + }); + for (id, local_module_id, name) in globals { + if local_module_id == self.path_resolver.local_module_id() { + let definition = DefinitionKind::Global(id); + self.add_global_variable_decl(name, definition); } } } @@ -1116,9 +1135,15 @@ impl<'a> Resolver<'a> { } } - pub fn resolve_global_let(&mut self, let_stmt: crate::LetStatement) -> HirStatement { + pub fn resolve_global_let( + &mut self, + let_stmt: crate::LetStatement, + global_id: GlobalId, + ) -> HirStatement { + self.current_item = Some(DependencyId::Global(global_id)); let expression = self.resolve_expression(let_stmt.expression); - let definition = DefinitionKind::Global(expression); + let global_id = self.interner.next_global_id(); + let definition = DefinitionKind::Global(global_id); HirStatement::Let(HirLetStatement { pattern: self.resolve_pattern(let_stmt.pattern, definition), @@ -1139,9 +1164,16 @@ impl<'a> Resolver<'a> { }) } StatementKind::Constrain(constrain_stmt) => { + let span = constrain_stmt.0.span; + let assert_msg_call_expr_id = + self.resolve_assert_message(constrain_stmt.1, span, constrain_stmt.0.clone()); let expr_id = self.resolve_expression(constrain_stmt.0); - let assert_message = constrain_stmt.1; - HirStatement::Constrain(HirConstrainStatement(expr_id, self.file, assert_message)) + + HirStatement::Constrain(HirConstrainStatement( + expr_id, + self.file, + assert_msg_call_expr_id, + )) } StatementKind::Expression(expr) => { HirStatement::Expression(self.resolve_expression(expr)) @@ -1190,6 +1222,44 @@ impl<'a> Resolver<'a> { } } + fn resolve_assert_message( + &mut self, + assert_message_expr: Option, + span: Span, + condition: Expression, + ) -> Option { + let assert_message_expr = assert_message_expr?; + + if matches!( + assert_message_expr, + Expression { kind: ExpressionKind::Literal(Literal::Str(..)), .. } + ) { + return Some(self.resolve_expression(assert_message_expr)); + } + + let is_in_stdlib = self.path_resolver.module_id().krate.is_stdlib(); + let assert_msg_call_path = if is_in_stdlib { + ExpressionKind::Variable(Path { + segments: vec![Ident::from("resolve_assert_message")], + kind: PathKind::Crate, + span, + }) + } else { + ExpressionKind::Variable(Path { + segments: vec![Ident::from("std"), Ident::from("resolve_assert_message")], + kind: PathKind::Dep, + span, + }) + }; + let assert_msg_call_args = vec![assert_message_expr.clone(), condition]; + let assert_msg_call_expr = Expression::call( + Expression { kind: assert_msg_call_path, span }, + assert_msg_call_args, + span, + ); + Some(self.resolve_expression(assert_msg_call_expr)) + } + pub fn intern_stmt(&mut self, stmt: StatementKind) -> StmtId { let hir_stmt = self.resolve_stmt(stmt); self.interner.push_stmt(hir_stmt) @@ -1352,6 +1422,10 @@ impl<'a> Resolver<'a> { if hir_ident.id != DefinitionId::dummy_id() { match self.interner.definition(hir_ident.id).kind { DefinitionKind::Function(id) => { + if let Some(current_item) = self.current_item { + self.interner.add_function_dependency(current_item, id); + } + if self.interner.function_visibility(id) != FunctionVisibility::Public { @@ -1363,7 +1437,11 @@ impl<'a> Resolver<'a> { ); } } - DefinitionKind::Global(_) => {} + DefinitionKind::Global(global_id) => { + if let Some(current_item) = self.current_item { + self.interner.add_global_dependency(current_item, global_id); + } + } DefinitionKind::GenericType(_) => { // Initialize numeric generics to a polymorphic integer type in case // they're used in expressions. We must do this here since the type @@ -1541,13 +1619,15 @@ impl<'a> Resolver<'a> { } let pattern = self.resolve_pattern_mutable(*pattern, Some(span), definition); - HirPattern::Mutable(Box::new(pattern), span) + let location = Location::new(span, self.file); + HirPattern::Mutable(Box::new(pattern), location) } Pattern::Tuple(fields, span) => { let fields = vecmap(fields, |field| { self.resolve_pattern_mutable(field, mutable, definition.clone()) }); - HirPattern::Tuple(fields, span) + let location = Location::new(span, self.file); + HirPattern::Tuple(fields, location) } Pattern::Struct(name, fields, span) => { let error_identifier = |this: &mut Self| { @@ -1576,7 +1656,8 @@ impl<'a> Resolver<'a> { let fields = self.resolve_constructor_fields(typ, fields, span, resolve_field); let typ = Type::Struct(struct_type, generics); - HirPattern::Struct(typ, fields, span) + let location = Location::new(span, self.file); + HirPattern::Struct(typ, fields, location) } } } @@ -1656,8 +1737,8 @@ impl<'a> Resolver<'a> { } if let Some(global) = TryFromModuleDefId::try_from(id) { - let let_stmt = self.interner.let_statement(&global); - return Ok(let_stmt.ident().id); + let global = self.interner.get_global(global); + return Ok(global.definition_id); } let expected = "global variable".into(); @@ -1839,10 +1920,11 @@ impl<'a> Resolver<'a> { self.interner.push_expr(hir_block) } - fn eval_global_as_array_length(&mut self, global: StmtId) -> u64 { - let stmt = match self.interner.statement(&global) { - HirStatement::Let(let_expr) => let_expr, - _ => return 0, + fn eval_global_as_array_length(&mut self, global: GlobalId, path: &Path) -> u64 { + let Some(stmt) = self.interner.get_global_let_statement(global) else { + let path = path.clone(); + self.push_err(ResolverError::NoSuchNumericTypeVariable { path }); + return 0; }; let length = stmt.expression; diff --git a/noir/compiler/noirc_frontend/src/hir/resolution/structs.rs b/noir/compiler/noirc_frontend/src/hir/resolution/structs.rs index cf3e3436c88..ed7aa86e718 100644 --- a/noir/compiler/noirc_frontend/src/hir/resolution/structs.rs +++ b/noir/compiler/noirc_frontend/src/hir/resolution/structs.rs @@ -32,7 +32,8 @@ pub(crate) fn resolve_structs( // Each struct should already be present in the NodeInterner after def collection. for (type_id, typ) in structs { let file_id = typ.file_id; - let (generics, fields, resolver_errors) = resolve_struct_fields(context, crate_id, typ); + let (generics, fields, resolver_errors) = + resolve_struct_fields(context, crate_id, type_id, typ); errors.extend(vecmap(resolver_errors, |err| (err.into(), file_id))); context.def_interner.update_struct(type_id, |struct_def| { struct_def.set_fields(fields); @@ -67,6 +68,7 @@ pub(crate) fn resolve_structs( fn resolve_struct_fields( context: &mut Context, krate: CrateId, + type_id: StructId, unresolved: UnresolvedStruct, ) -> (Generics, Vec<(Ident, Type)>, Vec) { let path_resolver = @@ -74,7 +76,7 @@ fn resolve_struct_fields( let file_id = unresolved.file_id; let (generics, fields, errors) = Resolver::new(&mut context.def_interner, &path_resolver, &context.def_maps, file_id) - .resolve_struct_fields(unresolved.struct_def); + .resolve_struct_fields(unresolved.struct_def, type_id); (generics, fields, errors) } diff --git a/noir/compiler/noirc_frontend/src/hir/type_check/errors.rs b/noir/compiler/noirc_frontend/src/hir/type_check/errors.rs index 267dbd6b5be..3967d7642f7 100644 --- a/noir/compiler/noirc_frontend/src/hir/type_check/errors.rs +++ b/noir/compiler/noirc_frontend/src/hir/type_check/errors.rs @@ -115,6 +115,12 @@ pub enum TypeCheckError { NoMatchingImplFound { constraints: Vec<(Type, String)>, span: Span }, #[error("Constraint for `{typ}: {trait_name}` is not needed, another matching impl is already in scope")] UnneededTraitConstraint { trait_name: String, typ: Type, span: Span }, + #[error( + "Cannot pass a mutable reference from a constrained runtime to an unconstrained runtime" + )] + ConstrainedReferenceToUnconstrained { span: Span }, + #[error("Slices cannot be returned from an unconstrained runtime to a constrained runtime")] + UnconstrainedSliceReturnToConstrained { span: Span }, } impl TypeCheckError { @@ -202,7 +208,9 @@ impl From for Diagnostic { | TypeCheckError::AmbiguousBitWidth { span, .. } | TypeCheckError::IntegerAndFieldBinaryOperation { span } | TypeCheckError::OverflowingAssignment { span, .. } - | TypeCheckError::FieldModulo { span } => { + | TypeCheckError::FieldModulo { span } + | TypeCheckError::ConstrainedReferenceToUnconstrained { span } + | TypeCheckError::UnconstrainedSliceReturnToConstrained { span } => { Diagnostic::simple_error(error.to_string(), String::new(), span) } TypeCheckError::PublicReturnType { typ, span } => Diagnostic::simple_error( diff --git a/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs b/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs index 5885707a9b7..998abeedcec 100644 --- a/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -36,6 +36,18 @@ impl<'interner> TypeChecker<'interner> { } } + fn is_unconstrained_call(&self, expr: &ExprId) -> bool { + if let HirExpression::Ident(expr::HirIdent { id, .. }) = self.interner.expression(expr) { + if let Some(DefinitionKind::Function(func_id)) = + self.interner.try_definition(id).map(|def| &def.kind) + { + let modifiers = self.interner.function_modifiers(func_id); + return modifiers.is_unconstrained; + } + } + false + } + /// Infers a type for a given expression, and return this type. /// As a side-effect, this function will also remember this type in the NodeInterner /// for the given expr_id key. @@ -139,6 +151,15 @@ impl<'interner> TypeChecker<'interner> { } HirExpression::Index(index_expr) => self.check_index_expression(expr_id, index_expr), HirExpression::Call(call_expr) => { + // Need to setup these flags here as `self` is borrowed mutably to type check the rest of the call expression + // These flags are later used to type check calls to unconstrained functions from constrained functions + let current_func = self + .current_function + .expect("Can only have call expression inside of a function body"); + let func_mod = self.interner.function_modifiers(¤t_func); + let is_current_func_constrained = !func_mod.is_unconstrained; + let is_unconstrained_call = self.is_unconstrained_call(&call_expr.func); + self.check_if_deprecated(&call_expr.func); let function = self.check_expression(&call_expr.func); @@ -147,8 +168,35 @@ impl<'interner> TypeChecker<'interner> { let typ = self.check_expression(arg); (typ, *arg, self.interner.expr_span(arg)) }); + + // Check that we are not passing a mutable reference from a constrained runtime to an unconstrained runtime + for (typ, _, _) in args.iter() { + if is_current_func_constrained + && is_unconstrained_call + && matches!(&typ, Type::MutableReference(_)) + { + self.errors.push(TypeCheckError::ConstrainedReferenceToUnconstrained { + span: self.interner.expr_span(expr_id), + }); + return Type::Error; + } + } + let span = self.interner.expr_span(expr_id); - self.bind_function_type(function, args, span) + let return_type = self.bind_function_type(function, args, span); + + // Check that we are not passing a slice from an unconstrained runtime to a constrained runtime + if is_current_func_constrained + && is_unconstrained_call + && return_type.contains_slice() + { + self.errors.push(TypeCheckError::UnconstrainedSliceReturnToConstrained { + span: self.interner.expr_span(expr_id), + }); + return Type::Error; + } + + return_type } HirExpression::MethodCall(mut method_call) => { let mut object_type = self.check_expression(&method_call.object).follow_bindings(); diff --git a/noir/compiler/noirc_frontend/src/hir/type_check/mod.rs b/noir/compiler/noirc_frontend/src/hir/type_check/mod.rs index 3c2a970ee84..8952ba83586 100644 --- a/noir/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/noir/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -15,7 +15,7 @@ pub use errors::TypeCheckError; use crate::{ hir_def::{expr::HirExpression, stmt::HirStatement, traits::TraitConstraint}, - node_interner::{ExprId, FuncId, NodeInterner, StmtId}, + node_interner::{ExprId, FuncId, GlobalId, NodeInterner}, Type, }; @@ -193,7 +193,10 @@ impl<'interner> TypeChecker<'interner> { (body_type, std::mem::take(&mut self.delayed_type_checks)) } - pub fn check_global(id: &StmtId, interner: &'interner mut NodeInterner) -> Vec { + pub fn check_global( + id: GlobalId, + interner: &'interner mut NodeInterner, + ) -> Vec { let mut this = Self { delayed_type_checks: Vec::new(), interner, @@ -201,7 +204,8 @@ impl<'interner> TypeChecker<'interner> { trait_constraints: Vec::new(), current_function: None, }; - this.check_statement(id); + let statement = this.interner.get_global(id).let_statement; + this.check_statement(&statement); this.errors } diff --git a/noir/compiler/noirc_frontend/src/hir/type_check/stmt.rs b/noir/compiler/noirc_frontend/src/hir/type_check/stmt.rs index fd8ae62d34e..1bd6c16277b 100644 --- a/noir/compiler/noirc_frontend/src/hir/type_check/stmt.rs +++ b/noir/compiler/noirc_frontend/src/hir/type_check/stmt.rs @@ -8,6 +8,7 @@ use crate::hir_def::stmt::{ }; use crate::hir_def::types::Type; use crate::node_interner::{DefinitionId, ExprId, StmtId}; +use crate::UnaryOp; use super::errors::{Source, TypeCheckError}; use super::TypeChecker; @@ -92,7 +93,7 @@ impl<'interner> TypeChecker<'interner> { match pattern { HirPattern::Identifier(ident) => self.interner.push_definition_type(ident.id, typ), HirPattern::Mutable(pattern, _) => self.bind_pattern(pattern, typ), - HirPattern::Tuple(fields, span) => match typ { + HirPattern::Tuple(fields, location) => match typ { Type::Tuple(field_types) if field_types.len() == fields.len() => { for (field, field_type) in fields.iter().zip(field_types) { self.bind_pattern(field, field_type); @@ -106,16 +107,16 @@ impl<'interner> TypeChecker<'interner> { self.errors.push(TypeCheckError::TypeMismatchWithSource { expected, actual: other, - span: *span, + span: location.span, source: Source::Assignment, }); } }, - HirPattern::Struct(struct_type, fields, span) => { + HirPattern::Struct(struct_type, fields, location) => { self.unify(struct_type, &typ, || TypeCheckError::TypeMismatchWithSource { expected: struct_type.clone(), actual: typ.clone(), - span: *span, + span: location.span, source: Source::Assignment, }); @@ -303,6 +304,9 @@ impl<'interner> TypeChecker<'interner> { let expr_type = self.check_expression(&stmt.0); let expr_span = self.interner.expr_span(&stmt.0); + // Must type check the assertion message expression so that we instantiate bindings + stmt.2.map(|assert_msg_expr| self.check_expression(&assert_msg_expr)); + self.unify(&expr_type, &Type::Bool, || TypeCheckError::TypeMismatch { expr_typ: expr_type.to_string(), expected_typ: Type::Bool.to_string(), @@ -358,9 +362,15 @@ impl<'interner> TypeChecker<'interner> { }; }; } - HirExpression::Prefix(_) => self - .errors - .push(TypeCheckError::InvalidUnaryOp { kind: annotated_type.to_string(), span }), + HirExpression::Prefix(expr) => { + self.lint_overflowing_uint(&expr.rhs, annotated_type); + if matches!(expr.operator, UnaryOp::Minus) { + self.errors.push(TypeCheckError::InvalidUnaryOp { + kind: "annotated_type".to_string(), + span, + }); + } + } HirExpression::Infix(expr) => { self.lint_overflowing_uint(&expr.lhs, annotated_type); self.lint_overflowing_uint(&expr.rhs, annotated_type); diff --git a/noir/compiler/noirc_frontend/src/hir_def/function.rs b/noir/compiler/noirc_frontend/src/hir_def/function.rs index 78f44696b72..d3ab2a9393b 100644 --- a/noir/compiler/noirc_frontend/src/hir_def/function.rs +++ b/noir/compiler/noirc_frontend/src/hir_def/function.rs @@ -43,12 +43,7 @@ pub struct Parameters(pub Vec); impl Parameters { pub fn span(&self) -> Span { assert!(!self.is_empty()); - let mut spans = vecmap(&self.0, |param| match ¶m.0 { - HirPattern::Identifier(ident) => ident.location.span, - HirPattern::Mutable(_, span) => *span, - HirPattern::Tuple(_, span) => *span, - HirPattern::Struct(_, _, span) => *span, - }); + let mut spans = vecmap(&self.0, |param| param.0.span()); let merged_span = spans.pop().unwrap(); for span in spans { diff --git a/noir/compiler/noirc_frontend/src/hir_def/stmt.rs b/noir/compiler/noirc_frontend/src/hir_def/stmt.rs index 34c9302c251..b910be1fdda 100644 --- a/noir/compiler/noirc_frontend/src/hir_def/stmt.rs +++ b/noir/compiler/noirc_frontend/src/hir_def/stmt.rs @@ -2,7 +2,7 @@ use super::expr::HirIdent; use crate::node_interner::ExprId; use crate::{Ident, Type}; use fm::FileId; -use noirc_errors::Span; +use noirc_errors::{Location, Span}; /// A HirStatement is the result of performing name resolution on /// the Statement AST node. Unlike the AST node, any nested nodes @@ -55,14 +55,14 @@ pub struct HirAssignStatement { /// originates from. This is used later in the SSA pass to issue /// an error if a constrain is found to be always false. #[derive(Debug, Clone)] -pub struct HirConstrainStatement(pub ExprId, pub FileId, pub Option); +pub struct HirConstrainStatement(pub ExprId, pub FileId, pub Option); #[derive(Debug, Clone, Hash)] pub enum HirPattern { Identifier(HirIdent), - Mutable(Box, Span), - Tuple(Vec, Span), - Struct(Type, Vec<(Ident, HirPattern)>, Span), + Mutable(Box, Location), + Tuple(Vec, Location), + Struct(Type, Vec<(Ident, HirPattern)>, Location), } impl HirPattern { @@ -92,9 +92,9 @@ impl HirPattern { pub fn span(&self) -> Span { match self { HirPattern::Identifier(ident) => ident.location.span, - HirPattern::Mutable(_, span) - | HirPattern::Tuple(_, span) - | HirPattern::Struct(_, _, span) => *span, + HirPattern::Mutable(_, location) + | HirPattern::Tuple(_, location) + | HirPattern::Struct(_, _, location) => location.span, } } } diff --git a/noir/compiler/noirc_frontend/src/hir_def/types.rs b/noir/compiler/noirc_frontend/src/hir_def/types.rs index 0ba4cb2da65..00e24de279b 100644 --- a/noir/compiler/noirc_frontend/src/hir_def/types.rs +++ b/noir/compiler/noirc_frontend/src/hir_def/types.rs @@ -157,7 +157,7 @@ impl Type { } } - fn contains_slice(&self) -> bool { + pub(crate) fn contains_slice(&self) -> bool { match self { Type::Array(size, _) => matches!(size.as_ref(), Type::NotConstant), Type::Struct(struct_typ, generics) => { diff --git a/noir/compiler/noirc_frontend/src/lib.rs b/noir/compiler/noirc_frontend/src/lib.rs index b6d4c568334..eb00a61adf6 100644 --- a/noir/compiler/noirc_frontend/src/lib.rs +++ b/noir/compiler/noirc_frontend/src/lib.rs @@ -11,6 +11,7 @@ #![warn(clippy::semicolon_if_nothing_returned)] pub mod ast; +pub mod debug; pub mod graph; pub mod lexer; pub mod monomorphization; @@ -48,7 +49,7 @@ pub mod macros_api { pub use crate::hir_def::expr::{HirExpression, HirLiteral}; pub use crate::hir_def::stmt::HirStatement; pub use crate::node_interner::{NodeInterner, StructId}; - pub use crate::parser::SortedModule; + pub use crate::parser::{parse_program, SortedModule}; pub use crate::token::SecondaryAttribute; pub use crate::hir::def_map::ModuleDefId; diff --git a/noir/compiler/noirc_frontend/src/monomorphization/ast.rs b/noir/compiler/noirc_frontend/src/monomorphization/ast.rs index 515d9710882..73e7ef372ab 100644 --- a/noir/compiler/noirc_frontend/src/monomorphization/ast.rs +++ b/noir/compiler/noirc_frontend/src/monomorphization/ast.rs @@ -1,6 +1,9 @@ use acvm::FieldElement; use iter_extended::vecmap; -use noirc_errors::Location; +use noirc_errors::{ + debug_info::{DebugTypes, DebugVariables}, + Location, +}; use crate::{ hir_def::function::FunctionSignature, BinaryOpKind, Distinctness, Signedness, Visibility, @@ -31,7 +34,7 @@ pub enum Expression { ExtractTupleField(Box, usize), Call(Call), Let(Let), - Constrain(Box, Location, Option), + Constrain(Box, Location, Option>), Assign(Assign), Semi(Box), } @@ -248,9 +251,12 @@ pub struct Program { pub return_visibility: Visibility, /// Indicates to a backend whether a SNARK-friendly prover should be used. pub recursive: bool, + pub debug_variables: DebugVariables, + pub debug_types: DebugTypes, } impl Program { + #[allow(clippy::too_many_arguments)] pub fn new( functions: Vec, main_function_signature: FunctionSignature, @@ -258,6 +264,8 @@ impl Program { return_location: Option, return_visibility: Visibility, recursive: bool, + debug_variables: DebugVariables, + debug_types: DebugTypes, ) -> Program { Program { functions, @@ -266,6 +274,8 @@ impl Program { return_location, return_visibility, recursive, + debug_variables, + debug_types, } } diff --git a/noir/compiler/noirc_frontend/src/monomorphization/debug.rs b/noir/compiler/noirc_frontend/src/monomorphization/debug.rs new file mode 100644 index 00000000000..d36816e3d37 --- /dev/null +++ b/noir/compiler/noirc_frontend/src/monomorphization/debug.rs @@ -0,0 +1,190 @@ +use iter_extended::vecmap; +use noirc_errors::debug_info::DebugVarId; +use noirc_errors::Location; +use noirc_printable_type::PrintableType; + +use crate::debug::{SourceFieldId, SourceVarId}; +use crate::hir_def::expr::*; +use crate::node_interner::ExprId; + +use super::ast::{Expression, Ident}; +use super::Monomorphizer; + +const DEBUG_MEMBER_ASSIGN_PREFIX: &str = "__debug_member_assign_"; +const DEBUG_VAR_ID_ARG_SLOT: usize = 0; +const DEBUG_VALUE_ARG_SLOT: usize = 1; +const DEBUG_MEMBER_FIELD_INDEX_ARG_SLOT: usize = 2; + +impl From for SourceVarId { + fn from(var_id: u128) -> Self { + Self(var_id as u32) + } +} + +impl From for SourceFieldId { + fn from(field_id: u128) -> Self { + Self(field_id as u32) + } +} + +impl<'interner> Monomorphizer<'interner> { + /// Patch instrumentation calls inserted for debugging. This will record + /// tracked variables and their types, and assign them an ID to use at + /// runtime. This ID is different from the source ID assigned at + /// instrumentation time because at that point we haven't fully resolved the + /// types for generic functions. So a single generic function may be + /// instantiated multiple times with its tracked variables being of + /// different types for each instance at runtime. + pub(super) fn patch_debug_instrumentation_call( + &mut self, + call: &HirCallExpression, + arguments: &mut [Expression], + ) { + let original_func = Box::new(self.expr(call.func)); + if let Expression::Ident(Ident { name, .. }) = original_func.as_ref() { + if name == "__debug_var_assign" { + self.patch_debug_var_assign(call, arguments); + } else if name == "__debug_var_drop" { + self.patch_debug_var_drop(call, arguments); + } else if let Some(arity) = name.strip_prefix(DEBUG_MEMBER_ASSIGN_PREFIX) { + let arity = arity.parse::().expect("failed to parse member assign arity"); + self.patch_debug_member_assign(call, arguments, arity); + } + } + } + + /// Update instrumentation code inserted on variable assignment. We need to + /// register the variable instance, its type and replace the source_var_id + /// with the ID of the registration. Multiple registrations of the same + /// variable are possible if using generic functions, hence the temporary ID + /// created when injecting the instrumentation code can map to multiple IDs + /// at runtime. + fn patch_debug_var_assign(&mut self, call: &HirCallExpression, arguments: &mut [Expression]) { + let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); + let var_id_arg = hir_arguments.get(DEBUG_VAR_ID_ARG_SLOT); + let Some(HirExpression::Literal(HirLiteral::Integer(source_var_id, _))) = var_id_arg else { + unreachable!("Missing source_var_id in __debug_var_assign call"); + }; + + // instantiate tracked variable for the value type and associate it with + // the ID used by the injected instrumentation code + let var_type = self.interner.id_type(call.arguments[DEBUG_VALUE_ARG_SLOT]); + let source_var_id = source_var_id.to_u128().into(); + // then update the ID used for tracking at runtime + let var_id = self.debug_type_tracker.insert_var(source_var_id, var_type); + let interned_var_id = self.intern_var_id(var_id, &call.location); + arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id); + } + + /// Update instrumentation code for a variable being dropped out of scope. + /// Given the source_var_id we search for the last assigned debug var_id and + /// replace it instead. + fn patch_debug_var_drop(&mut self, call: &HirCallExpression, arguments: &mut [Expression]) { + let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); + let var_id_arg = hir_arguments.get(DEBUG_VAR_ID_ARG_SLOT); + let Some(HirExpression::Literal(HirLiteral::Integer(source_var_id, _))) = var_id_arg else { + unreachable!("Missing source_var_id in __debug_var_drop call"); + }; + // update variable ID for tracked drops (ie. when the var goes out of scope) + let source_var_id = source_var_id.to_u128().into(); + let var_id = self + .debug_type_tracker + .get_var_id(source_var_id) + .unwrap_or_else(|| unreachable!("failed to find debug variable")); + let interned_var_id = self.intern_var_id(var_id, &call.location); + arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id); + } + + /// Update instrumentation code inserted when assigning to a member of an + /// existing variable. Same as above for replacing the source_var_id, but also + /// we need to resolve the path and the type of the member being assigned. + /// For this last part, we need to resolve the mapping from field names in + /// structs to positions in the runtime tuple, since all structs are + /// replaced by tuples during compilation. + fn patch_debug_member_assign( + &mut self, + call: &HirCallExpression, + arguments: &mut [Expression], + arity: usize, + ) { + let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); + let var_id_arg = hir_arguments.get(DEBUG_VAR_ID_ARG_SLOT); + let Some(HirExpression::Literal(HirLiteral::Integer(source_var_id, _))) = var_id_arg else { + unreachable!("Missing source_var_id in __debug_member_assign call"); + }; + // update variable member assignments + let source_var_id = source_var_id.to_u128().into(); + + let var_type = self + .debug_type_tracker + .get_type(source_var_id) + .unwrap_or_else(|| panic!("type not found for {source_var_id:?}")) + .clone(); + let mut cursor_type = &var_type; + for i in 0..arity { + if let Some(HirExpression::Literal(HirLiteral::Integer(fe_i, i_neg))) = + hir_arguments.get(DEBUG_MEMBER_FIELD_INDEX_ARG_SLOT + i) + { + let index = fe_i.to_i128().unsigned_abs(); + if *i_neg { + // We use negative indices at instrumentation time to indicate + // and reference member accesses by name which cannot be + // resolved until we have a type. This strategy is also used + // for tuple member access because syntactically they are + // the same as named field accesses. + let field_index = self + .debug_type_tracker + .resolve_field_index(index.into(), cursor_type) + .unwrap_or_else(|| { + unreachable!("failed to resolve {i}-th member indirection on type {cursor_type:?}") + }); + + cursor_type = element_type_at_index(cursor_type, field_index); + let index_id = self.interner.push_expr(HirExpression::Literal( + HirLiteral::Integer(field_index.into(), false), + )); + self.interner.push_expr_type(&index_id, crate::Type::FieldElement); + self.interner.push_expr_location( + index_id, + call.location.span, + call.location.file, + ); + arguments[DEBUG_MEMBER_FIELD_INDEX_ARG_SLOT + i] = self.expr(index_id); + } else { + // array/string element using constant index + cursor_type = element_type_at_index(cursor_type, index as usize); + } + } else { + // array element using non-constant index + cursor_type = element_type_at_index(cursor_type, 0); + } + } + + let var_id = self + .debug_type_tracker + .get_var_id(source_var_id) + .unwrap_or_else(|| unreachable!("failed to find debug variable")); + let interned_var_id = self.intern_var_id(var_id, &call.location); + arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id); + } + + fn intern_var_id(&mut self, var_id: DebugVarId, location: &Location) -> ExprId { + let var_id_literal = HirLiteral::Integer((var_id.0 as u128).into(), false); + let expr_id = self.interner.push_expr(HirExpression::Literal(var_id_literal)); + self.interner.push_expr_type(&expr_id, crate::Type::FieldElement); + self.interner.push_expr_location(expr_id, location.span, location.file); + expr_id + } +} + +fn element_type_at_index(ptype: &PrintableType, i: usize) -> &PrintableType { + match ptype { + PrintableType::Array { length: _length, typ } => typ.as_ref(), + PrintableType::Tuple { types } => &types[i], + PrintableType::Struct { name: _name, fields } => &fields[i].1, + PrintableType::String { length: _length } => &PrintableType::UnsignedInteger { width: 8 }, + _ => { + panic!["expected type with sub-fields, found terminal type"] + } + } +} diff --git a/noir/compiler/noirc_frontend/src/monomorphization/debug_types.rs b/noir/compiler/noirc_frontend/src/monomorphization/debug_types.rs new file mode 100644 index 00000000000..fea073d394f --- /dev/null +++ b/noir/compiler/noirc_frontend/src/monomorphization/debug_types.rs @@ -0,0 +1,137 @@ +use crate::{ + debug::{DebugInstrumenter, SourceFieldId, SourceVarId}, + hir_def::types::Type, +}; +use noirc_errors::debug_info::{ + DebugTypeId, DebugTypes, DebugVarId, DebugVariable, DebugVariables, +}; +use noirc_printable_type::PrintableType; +use std::collections::HashMap; + +/// We keep a collection of the debug variables and their types in this +/// structure. The source_var_id refers to the ID given by the debug +/// instrumenter. This variable does not have a type yet and hence it +/// can be instantiated for multiple types if it's in the context of a generic +/// variable. The var_id refers to the ID of the instantiated variable which +/// will have a valid type. +#[derive(Debug, Clone, Default)] +pub struct DebugTypeTracker { + // Variable names collected during instrumentation injection + source_variables: HashMap, + + // Field names used for member access collected during instrumentation injection + source_field_names: HashMap, + + // Current instances of tracked variables from the ID given during + // instrumentation. The tracked var_id will change for each source_var_id + // when compiling generic functions. + source_to_debug_vars: HashMap, + + // All instances of tracked variables + variables: HashMap, + + // Types of tracked variables + types: HashMap, + types_reverse: HashMap, + + next_var_id: u32, + next_type_id: u32, +} + +impl DebugTypeTracker { + pub fn build_from_debug_instrumenter(instrumenter: &DebugInstrumenter) -> Self { + DebugTypeTracker { + source_variables: instrumenter.variables.clone(), + source_field_names: instrumenter.field_names.clone(), + ..DebugTypeTracker::default() + } + } + + pub fn extract_vars_and_types(&self) -> (DebugVariables, DebugTypes) { + let debug_variables = self + .variables + .clone() + .into_iter() + .map(|(var_id, (source_var_id, type_id))| { + ( + var_id, + DebugVariable { + name: self.source_variables.get(&source_var_id).cloned().unwrap_or_else( + || { + unreachable!( + "failed to retrieve variable name for {source_var_id:?}" + ); + }, + ), + debug_type_id: type_id, + }, + ) + }) + .collect(); + let debug_types = self.types.clone().into_iter().collect(); + + (debug_variables, debug_types) + } + + pub fn resolve_field_index( + &self, + field_id: SourceFieldId, + cursor_type: &PrintableType, + ) -> Option { + self.source_field_names + .get(&field_id) + .and_then(|field_name| get_field(cursor_type, field_name)) + } + + pub fn insert_var(&mut self, source_var_id: SourceVarId, var_type: Type) -> DebugVarId { + if !self.source_variables.contains_key(&source_var_id) { + unreachable!("cannot find source debug variable {source_var_id:?}"); + } + + let ptype: PrintableType = var_type.follow_bindings().into(); + let type_id = self.types_reverse.get(&ptype).copied().unwrap_or_else(|| { + let type_id = DebugTypeId(self.next_type_id); + self.next_type_id += 1; + self.types_reverse.insert(ptype.clone(), type_id); + self.types.insert(type_id, ptype); + type_id + }); + // check if we need to instantiate the var with a new type + let existing_var_id = self.source_to_debug_vars.get(&source_var_id).and_then(|var_id| { + let (_, existing_type_id) = self.variables.get(var_id).unwrap(); + (*existing_type_id == type_id).then_some(var_id) + }); + if let Some(var_id) = existing_var_id { + *var_id + } else { + let var_id = DebugVarId(self.next_var_id); + self.next_var_id += 1; + self.variables.insert(var_id, (source_var_id, type_id)); + self.source_to_debug_vars.insert(source_var_id, var_id); + var_id + } + } + + pub fn get_var_id(&self, source_var_id: SourceVarId) -> Option { + self.source_to_debug_vars.get(&source_var_id).copied() + } + + pub fn get_type(&self, source_var_id: SourceVarId) -> Option<&PrintableType> { + self.source_to_debug_vars + .get(&source_var_id) + .and_then(|var_id| self.variables.get(var_id)) + .and_then(|(_, type_id)| self.types.get(type_id)) + } +} + +fn get_field(ptype: &PrintableType, field_name: &str) -> Option { + match ptype { + PrintableType::Struct { fields, .. } => { + fields.iter().position(|(name, _)| name == field_name) + } + PrintableType::Tuple { .. } | PrintableType::Array { .. } => { + field_name.parse::().ok() + } + _ => None, + } +} diff --git a/noir/compiler/noirc_frontend/src/monomorphization/mod.rs b/noir/compiler/noirc_frontend/src/monomorphization/mod.rs index 0334e01af5d..21c095eb877 100644 --- a/noir/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/noir/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -18,9 +18,10 @@ use std::{ }; use crate::{ + debug::DebugInstrumenter, hir_def::{ expr::*, - function::{FunctionSignature, Parameters}, + function::{FuncMeta, FunctionSignature, Parameters}, stmt::{HirAssignStatement, HirLValue, HirLetStatement, HirPattern, HirStatement}, types, }, @@ -31,8 +32,11 @@ use crate::{ }; use self::ast::{Definition, FuncId, Function, LocalId, Program}; +use self::debug_types::DebugTypeTracker; pub mod ast; +mod debug; +pub mod debug_types; pub mod printer; struct LambdaContext { @@ -67,7 +71,7 @@ struct Monomorphizer<'interner> { finished_functions: BTreeMap, /// Used to reference existing definitions in the HIR - interner: &'interner NodeInterner, + interner: &'interner mut NodeInterner, lambda_envs_stack: Vec, @@ -77,6 +81,8 @@ struct Monomorphizer<'interner> { is_range_loop: bool, return_location: Option, + + debug_type_tracker: DebugTypeTracker, } type HirType = crate::Type; @@ -93,8 +99,17 @@ type HirType = crate::Type; /// this function. Typically, this is the function named "main" in the source project, /// but it can also be, for example, an arbitrary test function for running `nargo test`. #[tracing::instrument(level = "trace", skip(main, interner))] -pub fn monomorphize(main: node_interner::FuncId, interner: &NodeInterner) -> Program { - let mut monomorphizer = Monomorphizer::new(interner); +pub fn monomorphize(main: node_interner::FuncId, interner: &mut NodeInterner) -> Program { + monomorphize_debug(main, interner, &DebugInstrumenter::default()) +} + +pub fn monomorphize_debug( + main: node_interner::FuncId, + interner: &mut NodeInterner, + debug_instrumenter: &DebugInstrumenter, +) -> Program { + let debug_type_tracker = DebugTypeTracker::build_from_debug_instrumenter(debug_instrumenter); + let mut monomorphizer = Monomorphizer::new(interner, debug_type_tracker); let function_sig = monomorphizer.compile_main(main); while !monomorphizer.queue.is_empty() { @@ -109,20 +124,24 @@ pub fn monomorphize(main: node_interner::FuncId, interner: &NodeInterner) -> Pro } let functions = vecmap(monomorphizer.finished_functions, |(_, f)| f); - let meta = interner.function_meta(&main); + let FuncMeta { return_distinctness, return_visibility, kind, .. } = + monomorphizer.interner.function_meta(&main); + let (debug_variables, debug_types) = monomorphizer.debug_type_tracker.extract_vars_and_types(); Program::new( functions, function_sig, - meta.return_distinctness, + *return_distinctness, monomorphizer.return_location, - meta.return_visibility, - meta.kind == FunctionKind::Recursive, + *return_visibility, + *kind == FunctionKind::Recursive, + debug_variables, + debug_types, ) } impl<'interner> Monomorphizer<'interner> { - fn new(interner: &'interner NodeInterner) -> Self { + fn new(interner: &'interner mut NodeInterner, debug_type_tracker: DebugTypeTracker) -> Self { Monomorphizer { globals: HashMap::new(), locals: HashMap::new(), @@ -134,6 +153,7 @@ impl<'interner> Monomorphizer<'interner> { lambda_envs_stack: Vec::new(), is_range_loop: false, return_location: None, + debug_type_tracker, } } @@ -234,7 +254,7 @@ impl<'interner> Monomorphizer<'interner> { the_trait.self_type_typevar.force_bind(self_type); } - let meta = self.interner.function_meta(&f); + let meta = self.interner.function_meta(&f).clone(); let modifiers = self.interner.function_modifiers(&f); let name = self.interner.function_name(&f).to_owned(); @@ -244,14 +264,13 @@ impl<'interner> Monomorphizer<'interner> { Type::TraitAsType(..) => &body_return_type, _ => meta.return_type(), }); - - let parameters = self.parameters(&meta.parameters); - - let body = self.expr(body_expr_id); let unconstrained = modifiers.is_unconstrained || matches!(modifiers.contract_function_type, Some(ContractFunctionType::Open)); + let parameters = self.parameters(&meta.parameters); + let body = self.expr(body_expr_id); let function = ast::Function { id, name, parameters, body, return_type, unconstrained }; + self.push_function(id, function); } @@ -370,7 +389,6 @@ impl<'interner> Monomorphizer<'interner> { let rhs = self.expr(infix.rhs); let operator = infix.operator.kind; let location = self.interner.expr_location(&expr); - if self.interner.get_selected_impl_for_expression(expr).is_some() { // If an impl was selected for this infix operator, replace it // with a method call to the appropriate trait impl method. @@ -483,7 +501,9 @@ impl<'interner> Monomorphizer<'interner> { HirStatement::Constrain(constrain) => { let expr = self.expr(constrain.0); let location = self.interner.expr_location(&constrain.0); - ast::Expression::Constrain(Box::new(expr), location, constrain.2) + let assert_message = + constrain.2.map(|assert_msg_expr| Box::new(self.expr(assert_msg_expr))); + ast::Expression::Constrain(Box::new(expr), location, assert_message) } HirStatement::Assign(assign) => self.assign(assign), HirStatement::For(for_loop) => { @@ -710,7 +730,12 @@ impl<'interner> Monomorphizer<'interner> { ident_expression } } - DefinitionKind::Global(expr_id) => self.expr(*expr_id), + DefinitionKind::Global(global_id) => { + let Some(let_) = self.interner.get_global_let_statement(*global_id) else { + unreachable!("Globals should have a corresponding let statement by monomorphization") + }; + self.expr(let_.expression) + } DefinitionKind::Local(_) => self.lookup_captured_expr(ident.id).unwrap_or_else(|| { let ident = self.local_ident(&ident).unwrap(); ast::Expression::Ident(ident) @@ -921,6 +946,9 @@ impl<'interner> Monomorphizer<'interner> { let original_func = Box::new(self.expr(call.func)); let mut arguments = vecmap(&call.arguments, |id| self.expr(*id)); let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); + + self.patch_debug_instrumentation_call(&call, &mut arguments); + let return_type = self.interner.id_type(id); let return_type = self.convert_type(&return_type); @@ -933,6 +961,9 @@ impl<'interner> Monomorphizer<'interner> { // The first argument to the `print` oracle is a bool, indicating a newline to be inserted at the end of the input // The second argument is expected to always be an ident self.append_printable_type_info(&hir_arguments[1], &mut arguments); + } else if name.as_str() == "assert_message" { + // The first argument to the `assert_message` oracle is the expression passed as a mesage to an `assert` or `assert_eq` statement + self.append_printable_type_info(&hir_arguments[0], &mut arguments); } } } @@ -1028,7 +1059,7 @@ impl<'interner> Monomorphizer<'interner> { // The caller needs information as to whether it is handling a format string or a single type arguments.push(ast::Expression::Literal(ast::Literal::Bool(is_fmt_str))); } - _ => unreachable!("logging expr {:?} is not supported", arguments[0]), + _ => unreachable!("logging expr {:?} is not supported", hir_argument), } } @@ -1037,10 +1068,10 @@ impl<'interner> Monomorphizer<'interner> { // since they cannot be passed from ACIR into Brillig if let HirType::Array(size, _) = typ { if let HirType::NotConstant = **size { - unreachable!("println does not support slices. Convert the slice to an array before passing it to println"); + unreachable!("println and format strings do not support slices. Convert the slice to an array before passing it to println"); } } else if matches!(typ, HirType::MutableReference(_)) { - unreachable!("println does not support mutable references."); + unreachable!("println and format strings do not support mutable references."); } let printable_type: PrintableType = typ.into(); diff --git a/noir/compiler/noirc_frontend/src/node_interner.rs b/noir/compiler/noirc_frontend/src/node_interner.rs index 11ef12ef83e..0051c1b4f5f 100644 --- a/noir/compiler/noirc_frontend/src/node_interner.rs +++ b/noir/compiler/noirc_frontend/src/node_interner.rs @@ -1,3 +1,4 @@ +use std::borrow::Cow; use std::collections::HashMap; use std::ops::Deref; @@ -5,12 +6,17 @@ use arena::{Arena, Index}; use fm::FileId; use iter_extended::vecmap; use noirc_errors::{Location, Span, Spanned}; +use petgraph::algo::tarjan_scc; +use petgraph::prelude::DiGraph; +use petgraph::prelude::NodeIndex as PetGraphIndex; use crate::ast::Ident; use crate::graph::CrateId; +use crate::hir::def_collector::dc_crate::CompilationError; use crate::hir::def_collector::dc_crate::{UnresolvedStruct, UnresolvedTrait, UnresolvedTypeAlias}; use crate::hir::def_map::{LocalModuleId, ModuleId}; +use crate::hir::resolution::errors::ResolverError; use crate::hir_def::stmt::HirLetStatement; use crate::hir_def::traits::TraitImpl; use crate::hir_def::traits::{Trait, TraitConstraint}; @@ -42,6 +48,7 @@ type StructAttributes = Vec; pub struct NodeInterner { pub(crate) nodes: Arena, pub(crate) func_meta: HashMap, + function_definition_ids: HashMap, // For a given function ID, this gives the function's modifiers which includes @@ -52,6 +59,14 @@ pub struct NodeInterner { // Contains the source module each function was defined in function_modules: HashMap, + /// This graph tracks dependencies between different global definitions. + /// This is used to ensure the absense of dependency cycles for globals and types. + dependency_graph: DiGraph, + + /// To keep track of where each DependencyId is in `dependency_graph`, we need + /// this separate graph to map between the ids and indices. + dependency_graph_indices: HashMap, + // Map each `Index` to it's own location pub(crate) id_to_location: HashMap, @@ -126,7 +141,9 @@ pub struct NodeInterner { /// checking. field_indices: HashMap, - globals: HashMap, // NOTE: currently only used for checking repeat globals and restricting their scope to a module + // Maps GlobalId -> GlobalInfo + // NOTE: currently only used for checking repeat globals and restricting their scope to a module + globals: Vec, next_type_variable_id: std::cell::Cell, @@ -152,6 +169,24 @@ pub struct NodeInterner { pub(crate) type_ref_locations: Vec<(Type, Location)>, } +/// A dependency in the dependency graph may be a type or a definition. +/// Types can depend on definitions too. E.g. `Foo` depends on `COUNT` in: +/// +/// ```struct +/// global COUNT = 3; +/// +/// struct Foo { +/// array: [Field; COUNT], +/// } +/// ``` +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum DependencyId { + Struct(StructId), + Global(GlobalId), + Function(FuncId), + Alias(TypeAliasId), +} + /// A trait implementation is either a normal implementation that is present in the source /// program via an `impl` block, or it is assumed to exist from a `where` clause or similar. #[derive(Debug, Clone)] @@ -247,6 +282,17 @@ impl From for Index { } } +/// An ID for a global value +#[derive(Debug, Eq, PartialEq, Hash, Clone, Copy)] +pub struct GlobalId(usize); + +impl GlobalId { + // Dummy id for error reporting + pub fn dummy_id() -> Self { + GlobalId(std::usize::MAX) + } +} + #[derive(Debug, Eq, PartialEq, Hash, Clone, Copy)] pub struct StmtId(Index); @@ -397,7 +443,7 @@ impl DefinitionInfo { pub enum DefinitionKind { Function(FuncId), - Global(ExprId), + Global(GlobalId), /// Locals may be defined in let statements or parameters, /// in which case they will not have an associated ExprId @@ -418,7 +464,7 @@ impl DefinitionKind { pub fn get_rhs(&self) -> Option { match self { DefinitionKind::Function(_) => None, - DefinitionKind::Global(id) => Some(*id), + DefinitionKind::Global(_) => None, DefinitionKind::Local(id) => *id, DefinitionKind::GenericType(_) => None, } @@ -427,8 +473,12 @@ impl DefinitionKind { #[derive(Debug, Clone)] pub struct GlobalInfo { + pub id: GlobalId, + pub definition_id: DefinitionId, pub ident: Ident, pub local_id: LocalModuleId, + pub location: Location, + pub let_statement: StmtId, } impl Default for NodeInterner { @@ -440,6 +490,8 @@ impl Default for NodeInterner { function_modifiers: HashMap::new(), function_modules: HashMap::new(), func_id_to_trait: HashMap::new(), + dependency_graph: petgraph::graph::DiGraph::new(), + dependency_graph_indices: HashMap::new(), id_to_location: HashMap::new(), definitions: vec![], id_to_type: HashMap::new(), @@ -455,7 +507,7 @@ impl Default for NodeInterner { instantiation_bindings: HashMap::new(), field_indices: HashMap::new(), next_type_variable_id: std::cell::Cell::new(0), - globals: HashMap::new(), + globals: Vec::new(), struct_methods: HashMap::new(), primitive_methods: HashMap::new(), type_alias_ref: Vec::new(), @@ -617,26 +669,41 @@ impl NodeInterner { self.type_ref_locations.push((typ, location)); } - pub fn push_global(&mut self, stmt_id: StmtId, ident: Ident, local_id: LocalModuleId) { - self.globals.insert(stmt_id, GlobalInfo { ident, local_id }); + fn push_global( + &mut self, + ident: Ident, + local_id: LocalModuleId, + let_statement: StmtId, + file: FileId, + ) -> GlobalId { + let id = GlobalId(self.globals.len()); + let location = Location::new(ident.span(), file); + let name = ident.to_string(); + let definition_id = self.push_definition(name, false, DefinitionKind::Global(id), location); + self.globals.push(GlobalInfo { + id, + definition_id, + ident, + local_id, + let_statement, + location, + }); + id } - /// Intern an empty global stmt. Used for collecting globals - pub fn push_empty_global(&mut self) -> StmtId { - self.push_stmt(HirStatement::Error) + pub fn next_global_id(&mut self) -> GlobalId { + GlobalId(self.globals.len()) } - pub fn update_global(&mut self, stmt_id: StmtId, hir_stmt: HirStatement) { - let def = - self.nodes.get_mut(stmt_id.0).expect("ice: all function ids should have definitions"); - - let stmt = match def { - Node::Statement(stmt) => stmt, - _ => { - panic!("ice: all global ids should correspond to a statement in the interner") - } - }; - *stmt = hir_stmt; + /// Intern an empty global. Used for collecting globals before they're defined + pub fn push_empty_global( + &mut self, + name: Ident, + local_id: LocalModuleId, + file: FileId, + ) -> GlobalId { + let statement = self.push_stmt(HirStatement::Error); + self.push_global(name, local_id, statement, file) } /// Intern an empty function. @@ -816,19 +883,19 @@ impl NodeInterner { } } - /// Returns the interned let statement corresponding to `stmt_id` - pub fn let_statement(&self, stmt_id: &StmtId) -> HirLetStatement { - let def = - self.nodes.get(stmt_id.0).expect("ice: all statement ids should have definitions"); + /// Try to get the `HirLetStatement` which defines a given global value + pub fn get_global_let_statement(&self, global: GlobalId) -> Option { + let global = self.get_global(global); + let def = self.nodes.get(global.let_statement.0)?; match def { - Node::Statement(hir_stmt) => { - match hir_stmt { - HirStatement::Let(let_stmt) => let_stmt.clone(), - _ => panic!("ice: all let statement ids should correspond to a let statement in the interner"), + Node::Statement(hir_stmt) => match hir_stmt { + HirStatement::Let(let_stmt) => Some(let_stmt.clone()), + _ => { + panic!("ice: all globals should correspond to a let statement in the interner") } }, - _ => panic!("ice: all statement ids should correspond to a statement in the interner"), + _ => panic!("ice: all globals should correspond to a statement in the interner"), } } @@ -894,12 +961,17 @@ impl NodeInterner { &self.type_aliases[id.0] } - pub fn get_global(&self, stmt_id: &StmtId) -> Option { - self.globals.get(stmt_id).cloned() + pub fn get_global(&self, global_id: GlobalId) -> &GlobalInfo { + &self.globals[global_id.0] + } + + pub fn get_global_definition(&self, global_id: GlobalId) -> &DefinitionInfo { + let global = self.get_global(global_id); + self.definition(global.definition_id) } - pub fn get_all_globals(&self) -> HashMap { - self.globals.clone() + pub fn get_all_globals(&self) -> &[GlobalInfo] { + &self.globals } /// Returns the type of an item stored in the Interner or Error if it was not found. @@ -935,6 +1007,12 @@ impl NodeInterner { *old = Node::Expression(new); } + /// Replaces the HirStatement at the given StmtId with a new HirStatement + pub fn replace_statement(&mut self, stmt_id: StmtId, hir_stmt: HirStatement) { + let old = self.nodes.get_mut(stmt_id.0).unwrap(); + *old = Node::Statement(hir_stmt); + } + pub fn next_type_variable_id(&self) -> TypeVariableId { let id = self.next_type_variable_id.get(); self.next_type_variable_id.set(id + 1); @@ -1446,6 +1524,105 @@ impl NodeInterner { pub(crate) fn ordering_type(&self) -> Type { self.ordering_type.clone().expect("Expected ordering_type to be set in the NodeInterner") } + + /// Register that `dependent` depends on `dependency`. + /// This is usually because `dependent` refers to `dependency` in one of its struct fields. + pub fn add_type_dependency(&mut self, dependent: DependencyId, dependency: StructId) { + self.add_dependency(dependent, DependencyId::Struct(dependency)); + } + + pub fn add_global_dependency(&mut self, dependent: DependencyId, dependency: GlobalId) { + self.add_dependency(dependent, DependencyId::Global(dependency)); + } + + pub fn add_function_dependency(&mut self, dependent: DependencyId, dependency: FuncId) { + self.add_dependency(dependent, DependencyId::Function(dependency)); + } + + fn add_dependency(&mut self, dependent: DependencyId, dependency: DependencyId) { + let dependent_index = self.get_or_insert_dependency(dependent); + let dependency_index = self.get_or_insert_dependency(dependency); + self.dependency_graph.update_edge(dependent_index, dependency_index, ()); + } + + fn get_or_insert_dependency(&mut self, id: DependencyId) -> PetGraphIndex { + if let Some(index) = self.dependency_graph_indices.get(&id) { + return *index; + } + + let index = self.dependency_graph.add_node(id); + self.dependency_graph_indices.insert(id, index); + index + } + + pub(crate) fn check_for_dependency_cycles(&self) -> Vec<(CompilationError, FileId)> { + let strongly_connected_components = tarjan_scc(&self.dependency_graph); + let mut errors = Vec::new(); + + let mut push_error = |item: String, scc: &[_], i, location: Location| { + let cycle = self.get_cycle_error_string(scc, i); + let span = location.span; + let error = ResolverError::DependencyCycle { item, cycle, span }; + errors.push((error.into(), location.file)); + }; + + for scc in strongly_connected_components { + if scc.len() > 1 { + // If a SCC contains a type, type alias, or global, it must be the only element in the SCC + for (i, index) in scc.iter().enumerate() { + match self.dependency_graph[*index] { + DependencyId::Struct(struct_id) => { + let struct_type = self.get_struct(struct_id); + let struct_type = struct_type.borrow(); + push_error(struct_type.name.to_string(), &scc, i, struct_type.location); + break; + } + DependencyId::Global(global_id) => { + let global = self.get_global(global_id); + let name = global.ident.to_string(); + push_error(name, &scc, i, global.location); + break; + } + DependencyId::Alias(alias_id) => { + let alias = self.get_type_alias(alias_id); + push_error(alias.name.to_string(), &scc, i, alias.location); + break; + } + // Mutually recursive functions are allowed + DependencyId::Function(_) => (), + } + } + } + } + + errors + } + + /// Build up a string starting from the given item containing each item in the dependency + /// cycle. The final result will resemble `foo -> bar -> baz -> foo`, always going back to the + /// element at the given start index. + fn get_cycle_error_string(&self, scc: &[PetGraphIndex], start_index: usize) -> String { + let index_to_string = |index: PetGraphIndex| match self.dependency_graph[index] { + DependencyId::Struct(id) => Cow::Owned(self.get_struct(id).borrow().name.to_string()), + DependencyId::Function(id) => Cow::Borrowed(self.function_name(&id)), + DependencyId::Alias(id) => { + Cow::Borrowed(self.get_type_alias(id).name.0.contents.as_ref()) + } + DependencyId::Global(id) => { + Cow::Borrowed(self.get_global(id).ident.0.contents.as_ref()) + } + }; + + let mut cycle = index_to_string(scc[start_index]).to_string(); + + // Reversing the dependencies here matches the order users would expect for the error message + for i in (0..scc.len()).rev() { + cycle += " -> "; + cycle += &index_to_string(scc[(start_index + i) % scc.len()]); + } + + cycle + } } impl Methods { diff --git a/noir/compiler/noirc_frontend/src/parser/errors.rs b/noir/compiler/noirc_frontend/src/parser/errors.rs index 5c869ff4719..9158c68db72 100644 --- a/noir/compiler/noirc_frontend/src/parser/errors.rs +++ b/noir/compiler/noirc_frontend/src/parser/errors.rs @@ -40,6 +40,8 @@ pub enum ParserErrorReason { NoFunctionAttributesAllowedOnStruct, #[error("Assert statements can only accept string literals")] AssertMessageNotString, + #[error("Integer bit size {0} won't be supported")] + DeprecatedBitSize(u32), #[error("{0}")] Lexer(LexerErrorKind), } @@ -130,6 +132,8 @@ impl std::fmt::Display for ParserError { } } +pub(crate) static ALLOWED_INTEGER_BIT_SIZES: &[u32] = &[1, 8, 32, 64]; + impl From for Diagnostic { fn from(error: ParserError) -> Diagnostic { match error.reason { @@ -145,6 +149,11 @@ impl From for Diagnostic { "The 'comptime' keyword has been deprecated. It can be removed without affecting your program".into(), error.span, ), + ParserErrorReason::DeprecatedBitSize(bit_size) => Diagnostic::simple_warning( + format!("Use of deprecated bit size {}", bit_size), + format!("Bit sizes for integers will be restricted to {}", ALLOWED_INTEGER_BIT_SIZES.iter().map(|n| n.to_string()).collect::>().join(", ")), + error.span, + ), ParserErrorReason::ExperimentalFeature(_) => Diagnostic::simple_warning( reason.to_string(), "".into(), diff --git a/noir/compiler/noirc_frontend/src/parser/parser.rs b/noir/compiler/noirc_frontend/src/parser/parser.rs index f82ce95c718..507d58ad8f1 100644 --- a/noir/compiler/noirc_frontend/src/parser/parser.rs +++ b/noir/compiler/noirc_frontend/src/parser/parser.rs @@ -23,6 +23,7 @@ //! prevent other parsers from being tried afterward since there is no longer an error. Thus, they should //! be limited to cases like the above `fn` example where it is clear we shouldn't back out of the //! current parser to try alternative parsers in a `choice` expression. +use super::errors::ALLOWED_INTEGER_BIT_SIZES; use super::{ foldl_with_span, labels::ParsingRuleLabel, parameter_name_recovery, parameter_recovery, parenthesized, then_commit, then_commit_ignore, top_level_statement_recovery, ExprParser, @@ -35,7 +36,7 @@ use crate::ast::{ }; use crate::lexer::Lexer; use crate::parser::{force, ignore_then_commit, statement_recovery}; -use crate::token::{Attribute, Attributes, Keyword, SecondaryAttribute, Token, TokenKind}; +use crate::token::{Attribute, Attributes, IntType, Keyword, SecondaryAttribute, Token, TokenKind}; use crate::{ BinaryOp, BinaryOpKind, BlockExpression, ConstrainKind, ConstrainStatement, Distinctness, ForLoopStatement, ForRange, FunctionDefinition, FunctionReturnType, FunctionVisibility, Ident, @@ -132,7 +133,7 @@ fn global_declaration() -> impl NoirParser { ); let p = then_commit(p, optional_type_annotation()); let p = then_commit_ignore(p, just(Token::Assign)); - let p = then_commit(p, literal_or_collection(expression()).map_with_span(Expression::new)); + let p = then_commit(p, expression()); p.map(LetStatement::new_let).map(TopLevelStatement::Global) } @@ -832,23 +833,10 @@ where ignore_then_commit(keyword(Keyword::Assert), parenthesized(argument_parser)) .labelled(ParsingRuleLabel::Statement) - .validate(|expressions, span, emit| { + .validate(|expressions, span, _| { let condition = expressions.get(0).unwrap_or(&Expression::error(span)).clone(); - let mut message_str = None; - - if let Some(message) = expressions.get(1) { - if let ExpressionKind::Literal(Literal::Str(message)) = &message.kind { - message_str = Some(message.clone()); - } else { - emit(ParserError::with_reason(ParserErrorReason::AssertMessageNotString, span)); - } - } - - StatementKind::Constrain(ConstrainStatement( - condition, - message_str, - ConstrainKind::Assert, - )) + let message = expressions.get(1).cloned(); + StatementKind::Constrain(ConstrainStatement(condition, message, ConstrainKind::Assert)) }) } @@ -861,7 +849,7 @@ where ignore_then_commit(keyword(Keyword::AssertEq), parenthesized(argument_parser)) .labelled(ParsingRuleLabel::Statement) - .validate(|exprs: Vec, span, emit| { + .validate(|exprs: Vec, span, _| { let predicate = Expression::new( ExpressionKind::Infix(Box::new(InfixExpression { lhs: exprs.get(0).unwrap_or(&Expression::error(span)).clone(), @@ -870,18 +858,10 @@ where })), span, ); - let mut message_str = None; - - if let Some(message) = exprs.get(2) { - if let ExpressionKind::Literal(Literal::Str(message)) = &message.kind { - message_str = Some(message.clone()); - } else { - emit(ParserError::with_reason(ParserErrorReason::AssertMessageNotString, span)); - } - } + let message = exprs.get(2).cloned(); StatementKind::Constrain(ConstrainStatement( predicate, - message_str, + message, ConstrainKind::AssertEq, )) }) @@ -1113,6 +1093,18 @@ fn int_type() -> impl NoirParser { Err(ParserError::expected_label(ParsingRuleLabel::IntegerType, unexpected, span)) } })) + .validate(|int_type, span, emit| { + let bit_size = match int_type.1 { + IntType::Signed(bit_size) | IntType::Unsigned(bit_size) => bit_size, + }; + if !ALLOWED_INTEGER_BIT_SIZES.contains(&bit_size) { + emit(ParserError::with_reason( + ParserErrorReason::DeprecatedBitSize(bit_size), + span, + )); + } + int_type + }) .map_with_span(|(_, token), span| UnresolvedTypeData::from_int_token(token).with_span(span)) } @@ -1684,24 +1676,6 @@ fn literal() -> impl NoirParser { }) } -fn literal_with_sign() -> impl NoirParser { - choice(( - literal(), - just(Token::Minus).then(literal()).map(|(_, exp)| match exp { - ExpressionKind::Literal(Literal::Integer(value, sign)) => { - ExpressionKind::Literal(Literal::Integer(value, !sign)) - } - _ => unreachable!(), - }), - )) -} - -fn literal_or_collection<'a>( - expr_parser: impl ExprParser + 'a, -) -> impl NoirParser + 'a { - choice((literal_with_sign(), constructor(expr_parser.clone()), array_expr(expr_parser))) -} - #[cfg(test)] mod test { use noirc_errors::CustomDiagnostic; @@ -2092,7 +2066,13 @@ mod test { match parse_with(assertion(expression()), "assert(x == y, \"assertion message\")").unwrap() { StatementKind::Constrain(ConstrainStatement(_, message, _)) => { - assert_eq!(message, Some("assertion message".to_owned())); + let message = message.unwrap(); + match message.kind { + ExpressionKind::Literal(Literal::Str(message_string)) => { + assert_eq!(message_string, "assertion message".to_owned()); + } + _ => unreachable!(), + } } _ => unreachable!(), } @@ -2116,7 +2096,13 @@ mod test { .unwrap() { StatementKind::Constrain(ConstrainStatement(_, message, _)) => { - assert_eq!(message, Some("assertion message".to_owned())); + let message = message.unwrap(); + match message.kind { + ExpressionKind::Literal(Literal::Str(message_string)) => { + assert_eq!(message_string, "assertion message".to_owned()); + } + _ => unreachable!(), + } } _ => unreachable!(), } @@ -2483,7 +2469,7 @@ mod test { Case { source: "assert(x == x, x)", expect: "constrain (plain::x == plain::x)", - errors: 1, + errors: 0, }, Case { source: "assert_eq(x,)", expect: "constrain (Error == Error)", errors: 1 }, Case { @@ -2494,7 +2480,7 @@ mod test { Case { source: "assert_eq(x, x, x)", expect: "constrain (plain::x == plain::x)", - errors: 1, + errors: 0, }, ]; diff --git a/noir/compiler/noirc_frontend/src/tests.rs b/noir/compiler/noirc_frontend/src/tests.rs index a4246a9fe7d..1deff446d7e 100644 --- a/noir/compiler/noirc_frontend/src/tests.rs +++ b/noir/compiler/noirc_frontend/src/tests.rs @@ -1128,9 +1128,9 @@ mod test { } fn check_rewrite(src: &str, expected: &str) { - let (_program, context, _errors) = get_program(src); + let (_program, mut context, _errors) = get_program(src); let main_func_id = context.def_interner.find_function("main").unwrap(); - let program = monomorphize(main_func_id, &context.def_interner); + let program = monomorphize(main_func_id, &mut context.def_interner); assert!(format!("{}", program) == expected); } @@ -1164,4 +1164,24 @@ fn lambda$f1(mut env$l1: (Field)) -> Field { "#; check_rewrite(src, expected_rewrite); } + + #[test] + fn deny_cyclic_structs() { + let src = r#" + struct Foo { bar: Bar } + struct Bar { foo: Foo } + fn main() {} + "#; + assert_eq!(get_program_errors(src).len(), 1); + } + + #[test] + fn deny_cyclic_globals() { + let src = r#" + global A = B; + global B = A; + fn main() {} + "#; + assert_eq!(get_program_errors(src).len(), 1); + } } diff --git a/noir/compiler/noirc_printable_type/src/lib.rs b/noir/compiler/noirc_printable_type/src/lib.rs index 18f2fe0a873..24f4f275a14 100644 --- a/noir/compiler/noirc_printable_type/src/lib.rs +++ b/noir/compiler/noirc_printable_type/src/lib.rs @@ -6,7 +6,7 @@ use regex::{Captures, Regex}; use serde::{Deserialize, Serialize}; use thiserror::Error; -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] #[serde(tag = "kind", rename_all = "lowercase")] pub enum PrintableType { Field, @@ -50,6 +50,7 @@ pub enum PrintableValue { String(String), Vec(Vec), Struct(BTreeMap), + Other, } /// In order to display a `PrintableValue` we need a `PrintableType` to accurately @@ -69,6 +70,9 @@ pub enum ForeignCallError { #[error("Failed calling external resolver. {0}")] ExternalResolverError(#[from] jsonrpc::Error), + + #[error("Assert message resolved after an unsatisified constrain. {0}")] + ResolvedAssertMessage(String), } impl TryFrom<&[ForeignCallParam]> for PrintableValueDisplay { @@ -293,7 +297,7 @@ fn format_field_string(field: FieldElement) -> String { } /// Assumes that `field_iterator` contains enough [FieldElement] in order to decode the [PrintableType] -fn decode_value( +pub fn decode_value( field_iterator: &mut impl Iterator, typ: &PrintableType, ) -> PrintableValue { diff --git a/noir/compiler/wasm/README.md b/noir/compiler/wasm/README.md index 0b2d92b0815..52f7e83e19e 100644 --- a/noir/compiler/wasm/README.md +++ b/noir/compiler/wasm/README.md @@ -1,9 +1,32 @@ # Noir Lang WASM JavaScript Package -This JavaScript package enables users to compile a Noir program, i.e. generating its artifacts. +This JavaScript package enables users to compile a Noir program, i.e. generating its artifacts, both in Node.JS environments and the browser. The package also handles dependency management like how Nargo (Noir's CLI tool) operates, but the package is used just for compilation, not proving, verifying and simulating functions. +## Usage + +```typescript +// Node.js + +import { compile, createFileManager } from '@noir-lang/noir_wasm'; + +const fm = createFileManager(myProjectPath); +const myCompiledCode = await compile(fm); +``` + +```typescript +// Browser + +import { compile, createFileManager } from '@noir-lang/noir_wasm'; + +const fm = createFileManager('/'); +for (const path of files) { + await fm.writeFile(path, await getFileAsStream(path)); +} +const myCompiledCode = await compile(fm); +``` + ## Building from source Outside of the [noir repo](https://github.com/noir-lang/noir), this package can be built using the command below: diff --git a/noir/compiler/wasm/src/index.cts b/noir/compiler/wasm/src/index.cts index 14687e615df..7c707e662d8 100644 --- a/noir/compiler/wasm/src/index.cts +++ b/noir/compiler/wasm/src/index.cts @@ -5,6 +5,36 @@ import { LogData, LogFn } from './utils'; import { CompilationResult } from './types/noir_artifact'; import { inflateDebugSymbols } from './noir/debug'; +/** + * Compiles a Noir project + * + * @param fileManager - The file manager to use + * @param projectPath - The path to the project inside the file manager. Defaults to the root of the file manager + * @param logFn - A logging function. If not provided, console.log will be used + * @param debugLogFn - A debug logging function. If not provided, logFn will be used + * + * @example + * ```typescript + * // Node.js + * + * import { compile, createFileManager } from '@noir-lang/noir_wasm'; + * + * const fm = createFileManager(myProjectPath); + * const myCompiledCode = await compile(fm); + * ``` + * + * ```typescript + * // Browser + * + * import { compile, createFileManager } from '@noir-lang/noir_wasm'; + * + * const fm = createFileManager('/'); + * for (const path of files) { + * await fm.writeFile(path, await getFileAsStream(path)); + * } + * const myCompiledCode = await compile(fm); + * ``` + */ async function compile( fileManager: FileManager, projectPath?: string, diff --git a/noir/compiler/wasm/src/index.mts b/noir/compiler/wasm/src/index.mts index 8774a7857ef..d4ed0beccfc 100644 --- a/noir/compiler/wasm/src/index.mts +++ b/noir/compiler/wasm/src/index.mts @@ -5,6 +5,36 @@ import { LogData, LogFn } from './utils'; import { CompilationResult } from './types/noir_artifact'; import { inflateDebugSymbols } from './noir/debug'; +/** + * Compiles a Noir project + * + * @param fileManager - The file manager to use + * @param projectPath - The path to the project inside the file manager. Defaults to the root of the file manager + * @param logFn - A logging function. If not provided, console.log will be used + * @param debugLogFn - A debug logging function. If not provided, logFn will be used + * + * @example + * ```typescript + * // Node.js + * + * import { compile, createFileManager } from '@noir-lang/noir_wasm'; + * + * const fm = createFileManager(myProjectPath); + * const myCompiledCode = await compile(fm); + * ``` + * + * ```typescript + * // Browser + * + * import { compile, createFileManager } from '@noir-lang/noir_wasm'; + * + * const fm = createFileManager('/'); + * for (const path of files) { + * await fm.writeFile(path, await getFileAsStream(path)); + * } + * const myCompiledCode = await compile(fm); + * ``` + */ async function compile( fileManager: FileManager, projectPath?: string, diff --git a/noir/compiler/wasm/src/noir/debug.ts b/noir/compiler/wasm/src/noir/debug.ts index 7a65f4b68c2..1a4ccfe95ec 100644 --- a/noir/compiler/wasm/src/noir/debug.ts +++ b/noir/compiler/wasm/src/noir/debug.ts @@ -1,6 +1,9 @@ import { inflate } from 'pako'; -/** Decompresses and decodes the debug symbols */ +/** + * Decompresses and decodes the debug symbols + * @param debugSymbols - The base64 encoded debug symbols + */ export function inflateDebugSymbols(debugSymbols: string) { return JSON.parse(inflate(Buffer.from(debugSymbols, 'base64'), { to: 'string', raw: true })); } diff --git a/noir/compiler/wasm/src/noir/file-manager/nodejs-file-manager.ts b/noir/compiler/wasm/src/noir/file-manager/nodejs-file-manager.ts index 1a8250f49cc..195eea8a70d 100644 --- a/noir/compiler/wasm/src/noir/file-manager/nodejs-file-manager.ts +++ b/noir/compiler/wasm/src/noir/file-manager/nodejs-file-manager.ts @@ -18,8 +18,9 @@ export async function readdirRecursive(dir: string): Promise { } /** - * Creates a new FileManager instance based on nodejs fs - * @param dataDir - where to store files + * Creates a new FileManager instance based on fs in node and memfs in the browser (via webpack alias) + * + * @param dataDir - root of the file system */ export function createNodejsFileManager(dataDir: string): FileManager { return new FileManager( diff --git a/noir/cspell.json b/noir/cspell.json index 12b1e3f63d3..34424647616 100644 --- a/noir/cspell.json +++ b/noir/cspell.json @@ -13,7 +13,6 @@ "arithmetization", "arity", "arkworks", - "arraysort", "barebones", "barretenberg", "bincode", @@ -157,6 +156,7 @@ "subshell", "subtyping", "swcurve", + "Taiko", "tecurve", "tempdir", "tempfile", diff --git a/noir/deny.toml b/noir/deny.toml index 5edce08fb70..a3e506984c9 100644 --- a/noir/deny.toml +++ b/noir/deny.toml @@ -57,7 +57,7 @@ allow = [ # bitmaps 2.1.0, generational-arena 0.2.9,im 15.1.0 "MPL-2.0", # Boost Software License - "BSL-1.0" + "BSL-1.0", ] # Allow 1 or more licenses on a per-crate basis, so that particular licenses @@ -93,7 +93,12 @@ unknown-registry = "warn" # Lint level for what to happen when a crate from a git repository that is not # in the allow list is encountered unknown-git = "deny" + +# DON'T YOU DARE ADD ANYTHING TO THIS IF YOU WANT TO PUBLISH ANYTHING NOIR RELATED TO CRATES.IO +# +# crates.io rejects git dependencies so anything depending on these is unpublishable and you'll ruin my day +# when I find out. allow-git = [ - "https://github.com/noir-lang/grumpkin", - "https://github.com/jfecher/chumsky" + "https://github.com/jfecher/chumsky", + "https://github.com/noir-lang/clap-markdown", ] diff --git a/noir/docs/docs/getting_started/installation/other_install_methods.md b/noir/docs/docs/getting_started/installation/other_install_methods.md index a532f83750e..489f1eda802 100644 --- a/noir/docs/docs/getting_started/installation/other_install_methods.md +++ b/noir/docs/docs/getting_started/installation/other_install_methods.md @@ -1,38 +1,102 @@ --- title: Alternative Install Methods -description: - There are different ways to install Nargo, the one-stop shop and command-line tool for developing Noir programs. This guide explains other methods that don't rely on noirup, such as compiling from source, installing from binaries, and using WSL for windows +description: There are different ways to install Nargo, the one-stop shop and command-line tool for developing Noir programs. This guide explains other methods that don't rely on noirup, such as compiling from source, installing from binaries, and using WSL for windows keywords: [ - Installation - Nargo - Noirup - Binaries - Compiling from Source - WSL for Windows - macOS - Linux - Nix - Direnv - Shell & editor experience - Building and testing - Uninstalling Nargo - Noir vs code extension -] + Installation + Nargo + Noirup + Binaries + Compiling from Source + WSL for Windows + macOS + Linux + Nix + Direnv + Shell & editor experience + Building and testing + Uninstalling Nargo + Noir vs code extension, + ] sidebar_position: 1 --- +## Encouraged Installation Method: Noirup -## Installation +Noirup is the endorsed method for installing Nargo, streamlining the process of fetching binaries or compiling from source. It supports a range of options to cater to your specific needs, from nightly builds and specific versions to compiling from various sources. -The most common method of installing Nargo is through [Noirup](./index.md) +### Installing Noirup + +First, ensure you have `noirup` installed: + +```sh +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +### Fetching Binaries + +With `noirup`, you can easily switch between different Nargo versions, including nightly builds: + +- **Nightly Version**: Install the latest nightly build. + + ```sh + noirup --version nightly + ``` + +- **Specific Version**: Install a specific version of Nargo. + ```sh + noirup --version + ``` + +### Compiling from Source + +`noirup` also enables compiling Nargo from various sources: + +- **From a Specific Branch**: Install from the latest commit on a branch. + + ```sh + noirup --branch + ``` + +- **From a Fork**: Install from the main branch of a fork. + + ```sh + noirup --repo + ``` + +- **From a Specific Branch in a Fork**: Install from a specific branch in a fork. + + ```sh + noirup --repo --branch + ``` + +- **From a Specific Pull Request**: Install from a specific PR. + + ```sh + noirup --pr + ``` + +- **From a Specific Commit**: Install from a specific commit. + + ```sh + noirup -C + ``` + +- **From Local Source**: Compile and install from a local directory. + ```sh + noirup --path ./path/to/local/source + ``` + +## Alternate Installation Methods (No Longer Recommended) + +While the following methods are available, they are no longer recommended. We advise using noirup for a more efficient and flexible installation experience. However, there are other methods for installing Nargo: -- [Binaries](#binaries) -- [Compiling from Source](#compile-from-source) -- [WSL for Windows](#wsl-for-windows) +- [Binaries](#option-1-installing-from-binaries) +- [Compiling from Source](#option-2-compile-from-source) +- [WSL for Windows](#option-3-wsl-for-windows) -### Binaries +### Option 1: Installing from Binaries See [GitHub Releases](https://github.com/noir-lang/noir/releases) for the latest and previous platform specific binaries. @@ -81,7 +145,7 @@ Check if the installation was successful by running `nargo --version`. You shoul > **macOS:** If you are prompted with an OS alert, right-click and open the _nargo_ executable from > Finder. Close the new terminal popped up and `nargo` should now be accessible. -### Option 3: Compile from Source +### Option 2: Compile from Source Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). @@ -161,19 +225,19 @@ If you have hesitations with using direnv, you can launch a subshell with `nix d Advanced: If you aren't using direnv nor launching your editor within the subshell, you can try to install Barretenberg and other global dependencies the package needs. This is an advanced workflow and likely won't receive support! -### Option 4: WSL (for Windows) +### Option 3: WSL (for Windows) The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. -step 2: Follow the [Noirup instructions](./index.md). +step 2: Follow the [Noirup instructions](#encouraged-installation-method-noirup). ## Uninstalling Nargo ### Noirup -If you installed Noir with `noirup`, you can uninstall Noir by removing the files in `~/.nargo`, `~/nargo` and `~/noir_cache`. +If you installed Nargo with `noirup` or through directly downloading binaries, you can uninstall Nargo by removing the files in `~/.nargo`, `~/nargo`, and `~/noir_cache`. This ensures that all installed binaries, configurations, and cache related to Nargo are fully removed from your system. ```bash rm -r ~/.nargo @@ -183,7 +247,7 @@ rm -r ~/noir_cache ### Nix -If you installed Noir with Nix or from source, you can remove the binary located at `~/.nix-profile/bin/nargo`. +If you installed Nargo with Nix or compiled it from source, you can remove the binary located at `~/.nix-profile/bin/nargo`. ```bash rm ~/.nix-profile/bin/nargo diff --git a/noir/docs/docs/how_to/how-to-recursion.md b/noir/docs/docs/how_to/how-to-recursion.md index f34647a99d5..4c45bb87ae2 100644 --- a/noir/docs/docs/how_to/how-to-recursion.md +++ b/noir/docs/docs/how_to/how-to-recursion.md @@ -42,9 +42,9 @@ In short: ::: -In a standard recursive app, you're also dealing with at least two circuits. For the purpose of this guide, we will assume these two: +In a standard recursive app, you're also dealing with at least two circuits. For the purpose of this guide, we will assume the following: -- `main`: a circuit of type `assert(x != y)` +- `main`: a circuit of type `assert(x != y)`, where `main` is marked with a `#[recursive]` attribute. This attribute states that the backend should generate proofs that are friendly for verification within another circuit. - `recursive`: a circuit that verifies `main` For a full example on how recursive proofs work, please refer to the [noir-examples](https://github.com/noir-lang/noir-examples) repository. We will *not* be using it as a reference for this guide. @@ -77,7 +77,7 @@ const { witness } = noir.execute(input) With this witness, you are now able to generate the intermediate proof for the main circuit: ```js -const { proof, publicInputs } = await backend.generateIntermediateProof(witness) +const { proof, publicInputs } = await backend.generateProof(witness) ``` :::warning @@ -95,13 +95,13 @@ With this in mind, it becomes clear that our intermediate proof is the one *mean Optionally, you are able to verify the intermediate proof: ```js -const verified = await backend.verifyIntermediateProof({ proof, publicInputs }) +const verified = await backend.verifyProof({ proof, publicInputs }) ``` -This can be useful to make sure our intermediate proof was correctly generated. But the real goal is to do it within another circuit. For that, we need to generate the intermediate artifacts: +This can be useful to make sure our intermediate proof was correctly generated. But the real goal is to do it within another circuit. For that, we need to generate recursive proof artifacts that will be passed to the circuit that is verifying the proof we just generated. Instead of passing the proof and verification key as a byte array, we pass them as fields which makes it cheaper to verify in a circuit: ```js -const { proofAsFields, vkAsFields, vkHash } = await backend.generateIntermediateProofArtifacts( { publicInputs, proof }, publicInputsCount) +const { proofAsFields, vkAsFields, vkHash } = await backend.generateRecursiveProofArtifacts( { publicInputs, proof }, publicInputsCount) ``` This call takes the public inputs and the proof, but also the public inputs count. While this is easily retrievable by simply counting the `publicInputs` length, the backend interface doesn't currently abstract it away. @@ -135,8 +135,8 @@ const recursiveInputs = { } const { witness, returnValue } = noir.execute(recursiveInputs) // we're executing the recursive circuit now! -const { proof, publicInputs } = backend.generateFinalProof(witness) -const verified = backend.verifyFinalProof({ proof, publicInputs }) +const { proof, publicInputs } = backend.generateProof(witness) +const verified = backend.verifyProof({ proof, publicInputs }) ``` You can obviously chain this proof into another proof. In fact, if you're using recursive proofs, you're probably interested of using them this way! @@ -165,15 +165,15 @@ This allows you to neatly call exactly the method you want without conflicting n ```js // Alice runs this 👇 const { witness: mainWitness } = await noir_programs.main.execute(input) -const proof = await backends.main.generateIntermediateProof(mainWitness) +const proof = await backends.main.generateProof(mainWitness) // Bob runs this 👇 -const verified = await backends.main.verifyIntermediateProof(proof) -const { proofAsFields, vkAsFields, vkHash } = await backends.main.generateIntermediateProofArtifacts( +const verified = await backends.main.verifyProof(proof) +const { proofAsFields, vkAsFields, vkHash } = await backends.main.generateRecursiveProofArtifacts( proof, numPublicInputs, ); -const recursiveProof = await noir_programs.recursive.generateFinalProof(recursiveInputs) +const recursiveProof = await noir_programs.recursive.generateProof(recursiveInputs) ``` ::: diff --git a/noir/docs/docs/noir/concepts/assert.md b/noir/docs/docs/noir/concepts/assert.md index c5f9aff139c..bcff613a695 100644 --- a/noir/docs/docs/noir/concepts/assert.md +++ b/noir/docs/docs/noir/concepts/assert.md @@ -18,10 +18,28 @@ fn main(x : Field, y : Field) { } ``` +> Assertions only work for predicate operations, such as `==`. If there's any ambiguity on the operation, the program will fail to compile. For example, it is unclear if `assert(x + y)` would check for `x + y == 0` or simply would return `true`. + You can optionally provide a message to be logged when the assertion fails: ```rust assert(x == y, "x and y are not equal"); ``` -> Assertions only work for predicate operations, such as `==`. If there's any ambiguity on the operation, the program will fail to compile. For example, it is unclear if `assert(x + y)` would check for `x + y == 0` or simply would return `true`. +Aside string literals, the optional message can be a format string or any other type supported as input for Noir's [print](../standard_library/logging.md) functions. This feature lets you incorporate runtime variables into your failed assertion logs: + +```rust +assert(x == y, f"Expected x == y, but got {x} == {y}"); +``` + +Using a variable as an assertion message directly: + +```rust +struct myStruct { + myField: Field +} + +let s = myStruct { myField: y }; +assert(s.myField == x, s); +``` + diff --git a/noir/docs/docs/noir/concepts/data_types/fields.md b/noir/docs/docs/noir/concepts/data_types/fields.md index 7870c98c858..99b4aa63549 100644 --- a/noir/docs/docs/noir/concepts/data_types/fields.md +++ b/noir/docs/docs/noir/concepts/data_types/fields.md @@ -181,3 +181,12 @@ Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ \{0, ..., p-1\} ```rust fn sgn0(self) -> u1 ``` + + +### lt + +Returns true if the field is less than the other field + +```rust +pub fn lt(self, another: Field) -> bool +``` diff --git a/noir/docs/docs/noir/concepts/data_types/integers.md b/noir/docs/docs/noir/concepts/data_types/integers.md index 7d1e83cf4e9..30135d76e4a 100644 --- a/noir/docs/docs/noir/concepts/data_types/integers.md +++ b/noir/docs/docs/noir/concepts/data_types/integers.md @@ -51,6 +51,55 @@ If you are using the default proving backend with Noir, both even (e.g. _u2_, _i ::: + +## 128 bits Unsigned Integers + +The built-in structure `U128` allows you to use 128-bit unsigned integers almost like a native integer type. However, there are some differences to keep in mind: +- You cannot cast between a native integer and `U128` +- There is a higher performance cost when using `U128`, compared to a native type. + +Conversion between unsigned integer types and U128 are done through the use of `from_integer` and `to_integer` functions. + +```rust +fn main() { + let x = U128::from_integer(23); + let y = U128::from_hex("0x7"); + let z = x + y; + assert(z.to_integer() == 30); +} +``` + +`U128` is implemented with two 64 bits limbs, representing the low and high bits, which explains the performance cost. You should expect `U128` to be twice more costly for addition and four times more costly for multiplication. +You can construct a U128 from its limbs: +```rust +fn main(x: u64, y: u64) { + let x = U128::from_u64s_be(x,y); + assert(z.hi == x as Field); + assert(z.lo == y as Field); +} +``` + +Note that the limbs are stored as Field elements in order to avoid unnecessary conversions. +Apart from this, most operations will work as usual: + +```rust +fn main(x: U128, y: U128) { + // multiplication + let c = x * y; + // addition and subtraction + let c = c - x + y; + // division + let c = x / y; + // bit operation; + let c = x & y | y; + // bit shift + let c = x << y; + // comparisons; + let c = x < y; + let c = x == y; +} +``` + ## Overflows Computations that exceed the type boundaries will result in overflow errors. This happens with both signed and unsigned integers. For example, attempting to prove: @@ -108,6 +157,6 @@ Example of how it is used: use dep::std; fn main(x: u8, y: u8) -> pub u8 { - std::wrapping_add(x + y) + std::wrapping_add(x, y) } ``` diff --git a/noir/docs/docs/noir/concepts/globals.md b/noir/docs/docs/noir/concepts/globals.md new file mode 100644 index 00000000000..063a3d89248 --- /dev/null +++ b/noir/docs/docs/noir/concepts/globals.md @@ -0,0 +1,72 @@ +--- +title: Global Variables +description: + Learn about global variables in Noir. Discover how + to declare, modify, and use them in your programs. +keywords: [noir programming language, globals, global variables, constants] +sidebar_position: 8 +--- + +## Globals + + +Noir supports global variables. The global's type can be inferred by the compiler entirely: + +```rust +global N = 5; // Same as `global N: Field = 5` + +global TUPLE = (3, 2); + +fn main() { + assert(N == 5); + assert(N == TUPLE.0 + TUPLE.1); +} +``` + +:::info + +Globals can be defined as any expression, so long as they don't depend on themselves - otherwise there would be a dependency cycle! For example: + +```rust +global T = foo(T); // dependency error +``` + +::: + + +If they are initialized to a literal integer, globals can be used to specify an array's length: + +```rust +global N: Field = 2; + +fn main(y : [Field; N]) { + assert(y[0] == y[1]) +} +``` + +A global from another module can be imported or referenced externally like any other name: + +```rust +global N = 20; + +fn main() { + assert(my_submodule::N != N); +} + +mod my_submodule { + global N: Field = 10; +} +``` + +When a global is used, Noir replaces the name with its definition on each occurrence. +This means globals defined using function calls will repeat the call each time they're used: + +```rust +global RESULT = foo(); + +fn foo() -> [Field; 100] { ... } +``` + +This is usually fine since Noir will generally optimize any function call that does not +refer to a program input into a constant. It should be kept in mind however, if the called +function performs side-effects like `println`, as these will still occur on each use. diff --git a/noir/docs/docs/noir/concepts/mutability.md b/noir/docs/docs/noir/concepts/mutability.md index 9cc10429cb4..fdeef6a87c5 100644 --- a/noir/docs/docs/noir/concepts/mutability.md +++ b/noir/docs/docs/noir/concepts/mutability.md @@ -1,9 +1,9 @@ --- title: Mutability description: - Learn about mutable variables, constants, and globals in Noir programming language. Discover how + Learn about mutable variables in Noir. Discover how to declare, modify, and use them in your programs. -keywords: [noir programming language, mutability in noir, mutable variables, constants, globals] +keywords: [noir programming language, mutability in noir, mutable variables] sidebar_position: 8 --- @@ -49,45 +49,73 @@ fn helper(mut x: i32) { } ``` -## Comptime Values +## Non-local mutability -:::warning +Non-local mutability can be achieved through the mutable reference type `&mut T`: -The 'comptime' keyword was removed in version 0.10. The comptime keyword and syntax are currently still kept and parsed for backwards compatibility, but are now deprecated and will issue a warning when used. `comptime` has been removed because it is no longer needed for accessing arrays. - -::: +```rust +fn set_to_zero(x: &mut Field) { + *x = 0; +} -## Globals +fn main() { + let mut y = 42; + set_to_zero(&mut y); + assert(*y == 0); +} +``` -Noir also supports global variables. However, they must be known at compile-time. The global type can also be inferred by the compiler entirely. Globals can also be used to specify array -annotations for function parameters and can be imported from submodules. +When creating a mutable reference, the original variable being referred to (`y` in this +example) must also be mutable. Since mutable references are a reference type, they must +be explicitly dereferenced via `*` to retrieve the underlying value. Note that this yields +a copy of the value, so mutating this copy will not change the original value behind the +reference: ```rust -global N: Field = 5; // Same as `global N: Field = 5` +fn main() { + let mut x = 1; + let x_ref = &mut x; + + let mut y = *x_ref; + let y_ref = &mut y; -fn main(x : Field, y : [Field; N]) { - let res = x * N; + x = 2; + *x_ref = 3; - assert(res == y[0]); + y = 4; + *y_ref = 5; - let res2 = x * my_submodule::N; - assert(res != res2); + assert(x == 3); + assert(*x_ref == 3); + assert(y == 5); + assert(*y_ref == 5); } +``` -mod my_submodule { - use dep::std; +Note that types in Noir are actually deeply immutable so the copy that occurs when +dereferencing is only a conceptual copy - no additional constraints will occur. - global N: Field = 10; +Mutable references can also be stored within structs. Note that there is also +no lifetime parameter on these unlike rust. This is because the allocated memory +always lasts the entire program - as if it were an array of one element. - fn my_helper() -> Field { - let x = N; - x +```rust +struct Foo { + x: &mut Field +} + +impl Foo { + fn incr(mut self) { + *self.x += 1; } } -``` -## Why only local mutability? +fn main() { + let foo = Foo { x: &mut 0 }; + foo.incr(); + assert(*foo.x == 1); +} +``` -Witnesses in a proving system are immutable in nature. Noir aims to _closely_ mirror this setting -without applying additional overhead to the user. Modeling a mutable reference is not as -straightforward as on conventional architectures and would incur some possibly unexpected overhead. +In general, you should avoid non-local & shared mutability unless it is needed. Sticking +to only local mutability will improve readability and potentially improve compiler optimizations as well. diff --git a/noir/docs/docs/noir/concepts/unconstrained.md b/noir/docs/docs/noir/concepts/unconstrained.md index 6b3424f7993..89d12c1c971 100644 --- a/noir/docs/docs/noir/concepts/unconstrained.md +++ b/noir/docs/docs/noir/concepts/unconstrained.md @@ -40,7 +40,7 @@ Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 91 Backend circuit size: 3619 ``` -A lot of the operations in this function are optimized away by the compiler (all the bit-shifts turn into divisions by constants). However we can save a bunch of gates by casting to u8 a bit earlier. This automatically truncates the bit-shifted value to fit in a u8 which allows us to remove the XOR against 0xff. This saves us ~480 gates in total. +A lot of the operations in this function are optimized away by the compiler (all the bit-shifts turn into divisions by constants). However we can save a bunch of gates by casting to u8 a bit earlier. This automatically truncates the bit-shifted value to fit in a u8 which allows us to remove the AND against 0xff. This saves us ~480 gates in total. ```rust fn main(num: u72) -> pub [u8; 8] { diff --git a/noir/docs/docs/noir/standard_library/bn254.md b/noir/docs/docs/noir/standard_library/bn254.md new file mode 100644 index 00000000000..3294f005dbb --- /dev/null +++ b/noir/docs/docs/noir/standard_library/bn254.md @@ -0,0 +1,46 @@ +--- +title: Bn254 Field Library +--- + +Noir provides a module in standard library with some optimized functions for bn254 Fr in `std::field::bn254`. + +## decompose + +```rust +fn decompose(x: Field) -> (Field, Field) {} +``` + +Decomposes a single field into two fields, low and high. The low field contains the lower 16 bytes of the input field and the high field contains the upper 16 bytes of the input field. Both field results are range checked to 128 bits. + + +## assert_gt + +```rust +fn assert_gt(a: Field, b: Field) {} +``` + +Asserts that a > b. This will generate less constraints than using `assert(gt(a, b))`. + +## assert_lt + +```rust +fn assert_lt(a: Field, b: Field) {} +``` + +Asserts that a < b. This will generate less constraints than using `assert(lt(a, b))`. + +## gt + +```rust +fn gt(a: Field, b: Field) -> bool {} +``` + +Returns true if a > b. + +## lt + +```rust +fn lt(a: Field, b: Field) -> bool {} +``` + +Returns true if a < b. \ No newline at end of file diff --git a/noir/docs/docs/noir/standard_library/options.md b/noir/docs/docs/noir/standard_library/options.md index 970c9cfbf11..a1bd4e1de5f 100644 --- a/noir/docs/docs/noir/standard_library/options.md +++ b/noir/docs/docs/noir/standard_library/options.md @@ -56,6 +56,10 @@ Returns the wrapped value if `self.is_some()`. Otherwise, returns the given defa Returns the wrapped value if `self.is_some()`. Otherwise, calls the given function to return a default value. +### expect + +Asserts `self.is_some()` with a provided custom message and returns the contained `Some` value. The custom message is expected to be a format string. + ### map If self is `Some(x)`, this returns `Some(f(x))`. Otherwise, this returns `None`. diff --git a/noir/docs/docs/reference/nargo_commands.md b/noir/docs/docs/reference/nargo_commands.md deleted file mode 100644 index fc2671b2bfc..00000000000 --- a/noir/docs/docs/reference/nargo_commands.md +++ /dev/null @@ -1,253 +0,0 @@ ---- -title: Nargo -description: - Noir CLI Commands for Noir Prover and Verifier to create, execute, prove and verify programs, - generate Solidity verifier smart contract and compile into JSON file containing ACIR - representation and ABI of circuit. -keywords: - [ - Nargo, - Noir CLI, - Noir Prover, - Noir Verifier, - generate Solidity verifier, - compile JSON file, - ACIR representation, - ABI of circuit, - TypeScript, - ] -sidebar_position: 0 ---- - -## General options - -| Option | Description | -| -------------------- | -------------------------------------------------- | -| `--show-ssa` | Emit debug information for the intermediate SSA IR | -| `--deny-warnings` | Quit execution when warnings are emitted | -| `--silence-warnings` | Suppress warnings | -| `-h, --help` | Print help | - -## `nargo help [subcommand]` - -Prints the list of available commands or specific information of a subcommand. - -_Arguments_ - -| Argument | Description | -| -------------- | -------------------------------------------- | -| `` | The subcommand whose help message to display | - -## `nargo backend` - -Installs and selects custom backends used to generate and verify proofs. - -### Commands - -| Command | Description | -| ----------- | --------------------------------------------------------- | -| `current` | Prints the name of the currently active backend | -| `ls` | Prints the list of currently installed backends | -| `use` | Select the backend to use | -| `install` | Install a new backend from a URL | -| `uninstall` | Uninstalls a backend | -| `help` | Print this message or the help of the given subcommand(s) | - -### Options - -| Option | Description | -| ------------ | ----------- | -| `-h, --help` | Print help | - -## `nargo check` - -Generate the `Prover.toml` and `Verifier.toml` files for specifying prover and verifier in/output -values of the Noir program respectively. - -### Options - -| Option | Description | -| --------------------- | ------------------------------------- | -| `--package ` | The name of the package to check | -| `--workspace` | Check all packages in the workspace | -| `--print-acir` | Display the ACIR for compiled circuit | -| `--deny-warnings` | Treat all warnings as errors | -| `--silence-warnings` | Suppress warnings | -| `-h, --help` | Print help | - -### `nargo codegen-verifier` - -Generate a Solidity verifier smart contract for the program. - -### Options - -| Option | Description | -| --------------------- | ------------------------------------- | -| `--package ` | The name of the package to codegen | -| `--workspace` | Codegen all packages in the workspace | -| `--print-acir` | Display the ACIR for compiled circuit | -| `--deny-warnings` | Treat all warnings as errors | -| `--silence-warnings` | Suppress warnings | -| `-h, --help` | Print help | - -## `nargo compile` - -Compile the program into a JSON build artifact file containing the ACIR representation and the ABI -of the circuit. This build artifact can then be used to generate and verify proofs. - -You can also use "build" as an alias for compile (e.g. `nargo build`). - -### Options - -| Option | Description | -| --------------------- | ------------------------------------------------------------ | -| `--package ` | The name of the package to compile | -| `--workspace` | Compile all packages in the workspace | -| `--print-acir` | Display the ACIR for compiled circuit | -| `--deny-warnings` | Treat all warnings as errors | -| `--silence-warnings` | Suppress warnings | -| `-h, --help` | Print help | - -## `nargo new ` - -Creates a new Noir project in a new folder. - -**Arguments** - -| Argument | Description | -| -------- | -------------------------------- | -| `` | The path to save the new project | - -### Options - -| Option | Description | -| --------------- | ----------------------------------------------------- | -| `--name ` | Name of the package [default: package directory name] | -| `--lib` | Use a library template | -| `--bin` | Use a binary template [default] | -| `--contract` | Use a contract template | -| `-h, --help` | Print help | - -## `nargo init` - -Creates a new Noir project in the current directory. - -### Options - -| Option | Description | -| --------------- | ----------------------------------------------------- | -| `--name ` | Name of the package [default: current directory name] | -| `--lib` | Use a library template | -| `--bin` | Use a binary template [default] | -| `--contract` | Use a contract template | -| `-h, --help` | Print help | - -## `nargo execute [WITNESS_NAME]` - -Runs the Noir program and prints its return value. - -**Arguments** - -| Argument | Description | -| ---------------- | ----------------------------------------- | -| `[WITNESS_NAME]` | Write the execution witness to named file | - -### Options - -| Option | Description | -| --------------------------------- | ------------------------------------------------------------------------------------ | -| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover] | -| `--package ` | The name of the package to execute | -| `--workspace` | Execute all packages in the workspace | -| `--print-acir` | Display the ACIR for compiled circuit | -| `--deny-warnings` | Treat all warnings as errors | -| `--silence-warnings` | Suppress warnings | -| `--oracle-resolver` | JSON RPC url to solve oracle calls | -| `-h, --help` | Print help | - -_Usage_ - -The inputs to the circuit are read from the `Prover.toml` file generated by `nargo check`, which -must be filled in. - -To save the witness to file, run the command with a value for the `WITNESS_NAME` argument. A -`.tr` file will then be saved in the `./target` folder. - -## `nargo prove` - -Creates a proof for the program. - -### Options - -| Option | Description | -| ------------------------------------- | ---------------------------------------------------------------------------------------- | -| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover] | -| `-v, --verifier-name ` | The name of the toml file which contains the inputs for the verifier [default: Verifier] | -| `--verify` | Verify proof after proving | -| `--package ` | The name of the package to prove | -| `--workspace` | Prove all packages in the workspace | -| `--print-acir` | Display the ACIR for compiled circuit | -| `--deny-warnings` | Treat all warnings as errors | -| `--silence-warnings` | Suppress warnings | -| `--oracle-resolver` | JSON RPC url to solve oracle calls | -| `-h, --help` | Print help | - -## `nargo verify` - -Given a proof and a program, verify whether the proof is valid. - -### Options - -| Option | Description | -| ------------------------------------- | ---------------------------------------------------------------------------------------- | -| `-v, --verifier-name ` | The name of the toml file which contains the inputs for the verifier [default: Verifier] | -| `--package ` | The name of the package to verify | -| `--workspace` | Verify all packages in the workspace | -| `--print-acir` | Display the ACIR for compiled circuit | -| `--deny-warnings` | Treat all warnings as errors | -| `--silence-warnings` | Suppress warnings | -| `-h, --help` | Print help | - -## `nargo test [TEST_NAME]` - -Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if -you run `nargo test`. To print `println` statements in tests, use the `--show-output` flag. - -Takes an optional `--exact` flag which allows you to select tests based on an exact name. - -See an example on the [testing page](../getting_started/tooling/testing.md). - -### Options - -| Option | Description | -| --------------------- | -------------------------------------- | -| `--show-output` | Display output of `println` statements | -| `--exact` | Only run tests that match exactly | -| `--package ` | The name of the package to test | -| `--workspace` | Test all packages in the workspace | -| `--print-acir` | Display the ACIR for compiled circuit | -| `--deny-warnings` | Treat all warnings as errors | -| `--silence-warnings` | Suppress warnings | -| `--oracle-resolver` | JSON RPC url to solve oracle calls | -| `-h, --help` | Print help | - -## `nargo info` - -Prints a table containing the information of the package. - -Currently the table provide - -1. The number of ACIR opcodes -2. The final number gates in the circuit used by a backend - -If the file contains a contract the table will provide the -above information about each function of the contract. - -## `nargo lsp` - -Start a long-running Language Server process that communicates over stdin/stdout. -Usually this command is not run by a user, but instead will be run by a Language Client, such as [vscode-noir](https://github.com/noir-lang/vscode-noir). - -## `nargo fmt` - -Automatically formats your Noir source code based on the default formatting settings. diff --git a/noir/docs/docusaurus.config.ts b/noir/docs/docusaurus.config.ts index d1d344ba635..e041d0a32a4 100644 --- a/noir/docs/docusaurus.config.ts +++ b/noir/docs/docusaurus.config.ts @@ -26,7 +26,7 @@ export default { '@docusaurus/preset-classic', { docs: { - path: "processed-docs", + path: 'processed-docs', sidebarPath: './sidebars.js', routeBasePath: '/docs', remarkPlugins: [math], @@ -210,6 +210,37 @@ export default { membersWithOwnFile: ['Interface', 'Class', 'TypeAlias'], }, ], + [ + 'docusaurus-plugin-typedoc', + { + id: 'noir_wasm', + entryPoints: ['../compiler/wasm/src/index.cts'], + tsconfig: '../compiler/wasm/tsconfig.json', + entryPointStrategy: 'resolve', + out: 'processed-docs/reference/NoirJS/noir_wasm', + plugin: ['typedoc-plugin-markdown'], + name: 'noir_wasm', + disableSources: true, + excludePrivate: true, + skipErrorChecking: true, + sidebar: { + filteredIds: ['reference/noir_wasm/index'], + }, + readme: 'none', + hidePageHeader: true, + hideBreadcrumbs: true, + hideInPageTOC: true, + useCodeBlocks: true, + typeDeclarationFormat: 'table', + propertiesFormat: 'table', + parametersFormat: 'table', + enumMembersFormat: 'table', + indexFormat: 'table', + outputFileStrategy: 'members', + memberPageTitle: '{name}', + membersWithOwnFile: ['Function', 'TypeAlias'], + }, + ], ], markdown: { format: 'detect', diff --git a/noir/docs/package.json b/noir/docs/package.json index 71b624ff565..c2a36357b05 100644 --- a/noir/docs/package.json +++ b/noir/docs/package.json @@ -3,7 +3,7 @@ "version": "0.0.0", "private": true, "scripts": { - "preprocess": "yarn node ./scripts/preprocess/index.js", + "preprocess": "./scripts/codegen_nargo_reference.sh && yarn node ./scripts/preprocess/index.js", "start": "yarn preprocess && docusaurus start", "build": "yarn preprocess && yarn version::stables && docusaurus build", "version::stables": "ts-node ./scripts/setStable.ts", diff --git a/noir/docs/scripts/codegen_nargo_reference.sh b/noir/docs/scripts/codegen_nargo_reference.sh new file mode 100755 index 00000000000..4ff7d43d142 --- /dev/null +++ b/noir/docs/scripts/codegen_nargo_reference.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash +set -eu + +cd $(dirname "$0")/.. + +REFERENCE_DIR="./processed-docs/reference" +NARGO_REFERENCE="$REFERENCE_DIR/nargo_commands.md" +rm -f $NARGO_REFERENCE +mkdir -p $REFERENCE_DIR + +echo "--- +title: Nargo +description: + Noir CLI Commands for Noir Prover and Verifier to create, execute, prove and verify programs, + generate Solidity verifier smart contract and compile into JSON file containing ACIR + representation and ABI of circuit. +keywords: + [ + Nargo, + Noir CLI, + Noir Prover, + Noir Verifier, + generate Solidity verifier, + compile JSON file, + ACIR representation, + ABI of circuit, + TypeScript, + ] +sidebar_position: 0 +--- +" > $NARGO_REFERENCE + +cargo run -F codegen-docs -- info >> $NARGO_REFERENCE diff --git a/noir/docs/versioned_docs/version-v0.22.0/how_to/solidity_verifier.md b/noir/docs/versioned_docs/version-v0.22.0/how_to/solidity_verifier.md index 8022b0e5f20..6aaad542ee0 100644 --- a/noir/docs/versioned_docs/version-v0.22.0/how_to/solidity_verifier.md +++ b/noir/docs/versioned_docs/version-v0.22.0/how_to/solidity_verifier.md @@ -120,6 +120,7 @@ You can currently deploy the Solidity verifier contracts to most EVM compatible - Polygon PoS - Scroll - Celo +- Taiko Other EVM chains should work, but have not been tested directly by our team. If you test any other chains, please open a PR on this page to update the list. See [this doc](https://github.com/noir-lang/noir-starter/tree/main/with-foundry#testing-on-chain) for more info about testing verifier contracts on different EVM chains. diff --git a/noir/noir_stdlib/src/array.nr b/noir/noir_stdlib/src/array.nr index 87cf4167dac..995af6c4c6f 100644 --- a/noir/noir_stdlib/src/array.nr +++ b/noir/noir_stdlib/src/array.nr @@ -1,3 +1,4 @@ +use crate::cmp::{Ord}; // TODO: Once we fully move to the new SSA pass this module can be removed and replaced // by the methods in the `slice` module @@ -5,23 +6,53 @@ impl [T; N] { #[builtin(array_len)] pub fn len(self) -> Field {} - #[builtin(arraysort)] - pub fn sort(self) -> Self {} + pub fn sort(self) -> Self where T: Ord { + self.sort_via(|a, b| a <= b) + } + + pub fn sort_via(self, ordering: fn[Env](T, T) -> bool) -> Self { + let sorted_index = self.get_sorting_index(ordering); + let mut result = self; + // Ensure the indexes are correct + for i in 0..N { + let pos = find_index(sorted_index, i); + assert(sorted_index[pos] == i); + } + // Sort the array using the indexes + for i in 0..N { + result[i] = self[sorted_index[i]]; + } + // Ensure the array is sorted + for i in 0..N-1 { + assert(ordering(result[i], result[i+1])); + } + + result + } - // Sort with a custom sorting function. - pub fn sort_via(mut a: Self, ordering: fn[Env](T, T) -> bool) -> Self { - for i in 1 .. a.len() { + /// Returns the index of the elements in the array that would sort it, using the provided custom sorting function. + unconstrained fn get_sorting_index(self, ordering: fn[Env](T, T) -> bool) -> [Field; N] { + let mut result = [0;N]; + let mut a = self; + for i in 0..N { + result[i] = i; + } + for i in 1 .. N { for j in 0..i { if ordering(a[i], a[j]) { let old_a_j = a[j]; a[j] = a[i]; a[i] = old_a_j; + let old_j = result[j]; + result[j] = result[i]; + result[i] = old_j; } } } - a + result } + // Converts an array into a slice. pub fn as_slice(self) -> [T] { let mut slice = []; @@ -83,3 +114,15 @@ impl [T; N] { ret } } + +// helper function used to look up the position of a value in an array of Field +// Note that function returns 0 if the value is not found +unconstrained fn find_index(a: [Field;N], find: Field) -> Field { + let mut result = 0; + for i in 0..a.len() { + if a[i] == find { + result = i; + } + } + result +} \ No newline at end of file diff --git a/noir/noir_stdlib/src/cmp.nr b/noir/noir_stdlib/src/cmp.nr index b3de3e2658e..38316e5d6a8 100644 --- a/noir/noir_stdlib/src/cmp.nr +++ b/noir/noir_stdlib/src/cmp.nr @@ -8,12 +8,10 @@ impl Eq for Field { fn eq(self, other: Field) -> bool { self == other } } impl Eq for u1 { fn eq(self, other: u1) -> bool { self == other } } impl Eq for u8 { fn eq(self, other: u8) -> bool { self == other } } -impl Eq for u16 { fn eq(self, other: u16) -> bool { self == other } } impl Eq for u32 { fn eq(self, other: u32) -> bool { self == other } } impl Eq for u64 { fn eq(self, other: u64) -> bool { self == other } } impl Eq for i8 { fn eq(self, other: i8) -> bool { self == other } } -impl Eq for i16 { fn eq(self, other: i16) -> bool { self == other } } impl Eq for i32 { fn eq(self, other: i32) -> bool { self == other } } impl Eq for i64 { fn eq(self, other: i64) -> bool { self == other } } @@ -111,18 +109,6 @@ impl Ord for u8 { } } -impl Ord for u16 { - fn cmp(self, other: u16) -> Ordering { - if self < other { - Ordering::less() - } else if self > other { - Ordering::greater() - } else { - Ordering::equal() - } - } -} - impl Ord for u32 { fn cmp(self, other: u32) -> Ordering { if self < other { @@ -159,18 +145,6 @@ impl Ord for i8 { } } -impl Ord for i16 { - fn cmp(self, other: i16) -> Ordering { - if self < other { - Ordering::less() - } else if self > other { - Ordering::greater() - } else { - Ordering::equal() - } - } -} - impl Ord for i32 { fn cmp(self, other: i32) -> Ordering { if self < other { diff --git a/noir/noir_stdlib/src/convert.nr b/noir/noir_stdlib/src/convert.nr index 814f63f1cde..00ac0a0fd8c 100644 --- a/noir/noir_stdlib/src/convert.nr +++ b/noir/noir_stdlib/src/convert.nr @@ -24,37 +24,28 @@ impl Into for U where T: From { // docs:start:from-impls // Unsigned integers -impl From for u16 { fn from(value: u8) -> u16 { value as u16 } } impl From for u32 { fn from(value: u8) -> u32 { value as u32 } } -impl From for u32 { fn from(value: u16) -> u32 { value as u32 } } impl From for u64 { fn from(value: u8) -> u64 { value as u64 } } -impl From for u64 { fn from(value: u16) -> u64 { value as u64 } } impl From for u64 { fn from(value: u32) -> u64 { value as u64 } } impl From for Field { fn from(value: u8) -> Field { value as Field } } -impl From for Field { fn from(value: u16) -> Field { value as Field } } impl From for Field { fn from(value: u32) -> Field { value as Field } } impl From for Field { fn from(value: u64) -> Field { value as Field } } // Signed integers -impl From for i16 { fn from(value: i8) -> i16 { value as i16 } } impl From for i32 { fn from(value: i8) -> i32 { value as i32 } } -impl From for i32 { fn from(value: i16) -> i32 { value as i32 } } impl From for i64 { fn from(value: i8) -> i64 { value as i64 } } -impl From for i64 { fn from(value: i16) -> i64 { value as i64 } } impl From for i64 { fn from(value: i32) -> i64 { value as i64 } } // Booleans impl From for u8 { fn from(value: bool) -> u8 { value as u8 } } -impl From for u16 { fn from(value: bool) -> u16 { value as u16 } } impl From for u32 { fn from(value: bool) -> u32 { value as u32 } } impl From for u64 { fn from(value: bool) -> u64 { value as u64 } } impl From for i8 { fn from(value: bool) -> i8 { value as i8 } } -impl From for i16 { fn from(value: bool) -> i16 { value as i16 } } impl From for i32 { fn from(value: bool) -> i32 { value as i32 } } impl From for i64 { fn from(value: bool) -> i64 { value as i64 } } impl From for Field { fn from(value: bool) -> Field { value as Field } } diff --git a/noir/noir_stdlib/src/default.nr b/noir/noir_stdlib/src/default.nr index ba6412a834f..32c4f3f3b48 100644 --- a/noir/noir_stdlib/src/default.nr +++ b/noir/noir_stdlib/src/default.nr @@ -7,12 +7,10 @@ trait Default { impl Default for Field { fn default() -> Field { 0 } } impl Default for u8 { fn default() -> u8 { 0 } } -impl Default for u16 { fn default() -> u16 { 0 } } impl Default for u32 { fn default() -> u32 { 0 } } impl Default for u64 { fn default() -> u64 { 0 } } impl Default for i8 { fn default() -> i8 { 0 } } -impl Default for i16 { fn default() -> i16 { 0 } } impl Default for i32 { fn default() -> i32 { 0 } } impl Default for i64 { fn default() -> i64 { 0 } } diff --git a/noir/noir_stdlib/src/field/bn254.nr b/noir/noir_stdlib/src/field/bn254.nr index f6e23f8db0c..9e1445fd3ba 100644 --- a/noir/noir_stdlib/src/field/bn254.nr +++ b/noir/noir_stdlib/src/field/bn254.nr @@ -1,7 +1,10 @@ +// The low and high decomposition of the field modulus global PLO: Field = 53438638232309528389504892708671455233; global PHI: Field = 64323764613183177041862057485226039389; + global TWO_POW_128: Field = 0x100000000000000000000000000000000; +/// A hint for decomposing a single field into two 16 byte fields. unconstrained fn decompose_unsafe(x: Field) -> (Field, Field) { let x_bytes = x.to_le_bytes(32); @@ -18,14 +21,20 @@ unconstrained fn decompose_unsafe(x: Field) -> (Field, Field) { (low, high) } +/// Decompose a single field into two 16 byte fields. pub fn decompose(x: Field) -> (Field, Field) { + // Take hints of the decomposition let (xlo, xhi) = decompose_unsafe(x); let borrow = lt_unsafe(PLO, xlo, 16); + // Range check the limbs xlo.assert_max_bit_size(128); xhi.assert_max_bit_size(128); + // Check that the decomposition is correct assert_eq(x, xlo + TWO_POW_128 * xhi); + + // Check that (xlo < plo && xhi <= phi) || (xlo >= plo && xhi < phi) let rlo = PLO - xlo + (borrow as Field) * TWO_POW_128; let rhi = PHI - xhi - (borrow as Field); @@ -59,11 +68,13 @@ unconstrained fn lte_unsafe(x: Field, y: Field, num_bytes: u32) -> bool { } pub fn assert_gt(a: Field, b: Field) { + // Decompose a and b let (alo, ahi) = decompose(a); let (blo, bhi) = decompose(b); let borrow = lte_unsafe(alo, blo, 16); + // Assert that (alo > blo && ahi >= bhi) || (alo <= blo && ahi > bhi) let rlo = alo - blo - 1 + (borrow as Field) * TWO_POW_128; let rhi = ahi - bhi - (borrow as Field); diff --git a/noir/noir_stdlib/src/ops.nr b/noir/noir_stdlib/src/ops.nr index 50386290b8e..e561265629e 100644 --- a/noir/noir_stdlib/src/ops.nr +++ b/noir/noir_stdlib/src/ops.nr @@ -7,12 +7,10 @@ trait Add { impl Add for Field { fn add(self, other: Field) -> Field { self + other } } impl Add for u8 { fn add(self, other: u8) -> u8 { self + other } } -impl Add for u16 { fn add(self, other: u16) -> u16 { self + other } } impl Add for u32 { fn add(self, other: u32) -> u32 { self + other } } impl Add for u64 { fn add(self, other: u64) -> u64 { self + other } } impl Add for i8 { fn add(self, other: i8) -> i8 { self + other } } -impl Add for i16 { fn add(self, other: i16) -> i16 { self + other } } impl Add for i32 { fn add(self, other: i32) -> i32 { self + other } } impl Add for i64 { fn add(self, other: i64) -> i64 { self + other } } @@ -25,12 +23,10 @@ trait Sub { impl Sub for Field { fn sub(self, other: Field) -> Field { self - other } } impl Sub for u8 { fn sub(self, other: u8) -> u8 { self - other } } -impl Sub for u16 { fn sub(self, other: u16) -> u16 { self - other } } impl Sub for u32 { fn sub(self, other: u32) -> u32 { self - other } } impl Sub for u64 { fn sub(self, other: u64) -> u64 { self - other } } impl Sub for i8 { fn sub(self, other: i8) -> i8 { self - other } } -impl Sub for i16 { fn sub(self, other: i16) -> i16 { self - other } } impl Sub for i32 { fn sub(self, other: i32) -> i32 { self - other } } impl Sub for i64 { fn sub(self, other: i64) -> i64 { self - other } } @@ -43,12 +39,10 @@ trait Mul { impl Mul for Field { fn mul(self, other: Field) -> Field { self * other } } impl Mul for u8 { fn mul(self, other: u8) -> u8 { self * other } } -impl Mul for u16 { fn mul(self, other: u16) -> u16 { self * other } } impl Mul for u32 { fn mul(self, other: u32) -> u32 { self * other } } impl Mul for u64 { fn mul(self, other: u64) -> u64 { self * other } } impl Mul for i8 { fn mul(self, other: i8) -> i8 { self * other } } -impl Mul for i16 { fn mul(self, other: i16) -> i16 { self * other } } impl Mul for i32 { fn mul(self, other: i32) -> i32 { self * other } } impl Mul for i64 { fn mul(self, other: i64) -> i64 { self * other } } @@ -61,12 +55,10 @@ trait Div { impl Div for Field { fn div(self, other: Field) -> Field { self / other } } impl Div for u8 { fn div(self, other: u8) -> u8 { self / other } } -impl Div for u16 { fn div(self, other: u16) -> u16 { self / other } } impl Div for u32 { fn div(self, other: u32) -> u32 { self / other } } impl Div for u64 { fn div(self, other: u64) -> u64 { self / other } } impl Div for i8 { fn div(self, other: i8) -> i8 { self / other } } -impl Div for i16 { fn div(self, other: i16) -> i16 { self / other } } impl Div for i32 { fn div(self, other: i32) -> i32 { self / other } } impl Div for i64 { fn div(self, other: i64) -> i64 { self / other } } @@ -77,12 +69,10 @@ trait Rem{ // docs:end:rem-trait impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } -impl Rem for u16 { fn rem(self, other: u16) -> u16 { self % other } } impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } -impl Rem for i16 { fn rem(self, other: i16) -> i16 { self % other } } impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } @@ -95,12 +85,10 @@ trait BitOr { impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } -impl BitOr for u16 { fn bitor(self, other: u16) -> u16 { self | other } } impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } -impl BitOr for i16 { fn bitor(self, other: i16) -> i16 { self | other } } impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } @@ -113,12 +101,10 @@ trait BitAnd { impl BitAnd for bool { fn bitand(self, other: bool) -> bool { self & other } } impl BitAnd for u8 { fn bitand(self, other: u8) -> u8 { self & other } } -impl BitAnd for u16 { fn bitand(self, other: u16) -> u16 { self & other } } impl BitAnd for u32 { fn bitand(self, other: u32) -> u32 { self & other } } impl BitAnd for u64 { fn bitand(self, other: u64) -> u64 { self & other } } impl BitAnd for i8 { fn bitand(self, other: i8) -> i8 { self & other } } -impl BitAnd for i16 { fn bitand(self, other: i16) -> i16 { self & other } } impl BitAnd for i32 { fn bitand(self, other: i32) -> i32 { self & other } } impl BitAnd for i64 { fn bitand(self, other: i64) -> i64 { self & other } } @@ -131,12 +117,10 @@ trait BitXor { impl BitXor for bool { fn bitxor(self, other: bool) -> bool { self ^ other } } impl BitXor for u8 { fn bitxor(self, other: u8) -> u8 { self ^ other } } -impl BitXor for u16 { fn bitxor(self, other: u16) -> u16 { self ^ other } } impl BitXor for u32 { fn bitxor(self, other: u32) -> u32 { self ^ other } } impl BitXor for u64 { fn bitxor(self, other: u64) -> u64 { self ^ other } } impl BitXor for i8 { fn bitxor(self, other: i8) -> i8 { self ^ other } } -impl BitXor for i16 { fn bitxor(self, other: i16) -> i16 { self ^ other } } impl BitXor for i32 { fn bitxor(self, other: i32) -> i32 { self ^ other } } impl BitXor for i64 { fn bitxor(self, other: i64) -> i64 { self ^ other } } @@ -147,13 +131,11 @@ trait Shl { // docs:end:shl-trait impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } -impl Shl for u16 { fn shl(self, other: u16) -> u16 { self << other } } impl Shl for u32 { fn shl(self, other: u32) -> u32 { self << other } } impl Shl for u64 { fn shl(self, other: u64) -> u64 { self << other } } // Bit shifting is not currently supported for signed integer types // impl Shl for i8 { fn shl(self, other: i8) -> i8 { self << other } } -// impl Shl for i16 { fn shl(self, other: i16) -> i16 { self << other } } // impl Shl for i32 { fn shl(self, other: i32) -> i32 { self << other } } // impl Shl for i64 { fn shl(self, other: i64) -> i64 { self << other } } @@ -164,12 +146,10 @@ trait Shr { // docs:end:shr-trait impl Shr for u8 { fn shr(self, other: u8) -> u8 { self >> other } } -impl Shr for u16 { fn shr(self, other: u16) -> u16 { self >> other } } impl Shr for u32 { fn shr(self, other: u32) -> u32 { self >> other } } impl Shr for u64 { fn shr(self, other: u64) -> u64 { self >> other } } // Bit shifting is not currently supported for signed integer types // impl Shr for i8 { fn shr(self, other: i8) -> i8 { self >> other } } -// impl Shr for i16 { fn shr(self, other: i16) -> i16 { self >> other } } // impl Shr for i32 { fn shr(self, other: i32) -> i32 { self >> other } } // impl Shr for i64 { fn shr(self, other: i64) -> i64 { self >> other } } diff --git a/noir/noir_stdlib/src/option.nr b/noir/noir_stdlib/src/option.nr index 137d57f33db..cab95731d05 100644 --- a/noir/noir_stdlib/src/option.nr +++ b/noir/noir_stdlib/src/option.nr @@ -56,6 +56,12 @@ impl Option { } } + /// Asserts `self.is_some()` with a provided custom message and returns the contained `Some` value + fn expect(self, message: fmtstr) -> T { + assert(self.is_some(), message); + self._value + } + /// If self is `Some(x)`, this returns `Some(f(x))`. Otherwise, this returns `None`. pub fn map(self, f: fn[Env](T) -> U) -> Option { if self._is_some { diff --git a/noir/noirc_macros/Cargo.toml b/noir/noirc_macros/Cargo.toml new file mode 100644 index 00000000000..699e6b01cae --- /dev/null +++ b/noir/noirc_macros/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "noirc_macros" +version.workspace = true +authors.workspace = true +edition.workspace = true +rust-version.workspace = true +license.workspace = true +repository.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +noirc_frontend.workspace = true +iter-extended.workspace = true \ No newline at end of file diff --git a/noir/noirc_macros/src/lib.rs b/noir/noirc_macros/src/lib.rs new file mode 100644 index 00000000000..4337214d69f --- /dev/null +++ b/noir/noirc_macros/src/lib.rs @@ -0,0 +1,61 @@ +use noirc_frontend::macros_api::parse_program; +use noirc_frontend::macros_api::HirContext; +use noirc_frontend::macros_api::SortedModule; +use noirc_frontend::macros_api::{CrateId, FileId}; +use noirc_frontend::macros_api::{MacroError, MacroProcessor}; + +pub struct AssertMessageMacro; + +impl MacroProcessor for AssertMessageMacro { + fn process_untyped_ast( + &self, + ast: SortedModule, + crate_id: &CrateId, + _context: &HirContext, + ) -> Result { + transform(ast, crate_id) + } + + // This macro does not need to process any information after name resolution + fn process_typed_ast( + &self, + _crate_id: &CrateId, + _context: &mut HirContext, + ) -> Result<(), (MacroError, FileId)> { + Ok(()) + } +} + +fn transform(ast: SortedModule, crate_id: &CrateId) -> Result { + let ast = add_resolve_assert_message_funcs(ast, crate_id)?; + + Ok(ast) +} + +fn add_resolve_assert_message_funcs( + mut ast: SortedModule, + crate_id: &CrateId, +) -> Result { + if !crate_id.is_stdlib() { + return Ok(ast); + } + let assert_message_oracles = " + #[oracle(assert_message)] + unconstrained fn assert_message_oracle(_input: T) {} + unconstrained pub fn resolve_assert_message(input: T, condition: bool) { + if !condition { + assert_message_oracle(input); + } + }"; + + let (assert_msg_funcs_ast, errors) = parse_program(assert_message_oracles); + assert_eq!(errors.len(), 0, "Failed to parse Noir macro code. This is either a bug in the compiler or the Noir macro code"); + + let assert_msg_funcs_ast = assert_msg_funcs_ast.into_sorted(); + + for func in assert_msg_funcs_ast.functions { + ast.functions.push(func) + } + + Ok(ast) +} diff --git a/noir/scripts/nargo_compile_noir_js_assert_lt.sh b/noir/scripts/nargo_compile_noir_js_assert_lt.sh deleted file mode 100755 index 636ae59b996..00000000000 --- a/noir/scripts/nargo_compile_noir_js_assert_lt.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -cd ./tooling/noir_js/test/noir_compiled_examples/assert_lt -nargo compile \ No newline at end of file diff --git a/noir/test_programs/compile_failure/assert_msg_runtime/Nargo.toml b/noir/test_programs/compile_failure/assert_msg_runtime/Nargo.toml new file mode 100644 index 00000000000..765f632ff74 --- /dev/null +++ b/noir/test_programs/compile_failure/assert_msg_runtime/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "assert_msg_runtime" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] \ No newline at end of file diff --git a/noir/test_programs/compile_failure/assert_msg_runtime/Prover.toml b/noir/test_programs/compile_failure/assert_msg_runtime/Prover.toml new file mode 100644 index 00000000000..f28f2f8cc48 --- /dev/null +++ b/noir/test_programs/compile_failure/assert_msg_runtime/Prover.toml @@ -0,0 +1,2 @@ +x = "5" +y = "10" diff --git a/noir/test_programs/compile_failure/assert_msg_runtime/src/main.nr b/noir/test_programs/compile_failure/assert_msg_runtime/src/main.nr new file mode 100644 index 00000000000..bec3082550a --- /dev/null +++ b/noir/test_programs/compile_failure/assert_msg_runtime/src/main.nr @@ -0,0 +1,7 @@ +fn main(x: Field, y: pub Field) { + assert(x != y, f"Expected x != y, but got both equal {x}"); + assert(x != y); + let z = x + y; + assert(z != y, f"Expected z != y, but got both equal {z}"); + assert_eq(x, y, f"Expected x == y, but x is {x} and y is {y}"); +} \ No newline at end of file diff --git a/noir/test_programs/compile_failure/brillig_assert_msg_runtime/Nargo.toml b/noir/test_programs/compile_failure/brillig_assert_msg_runtime/Nargo.toml new file mode 100644 index 00000000000..00f97b7273a --- /dev/null +++ b/noir/test_programs/compile_failure/brillig_assert_msg_runtime/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "brillig_assert_msg_runtime" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] \ No newline at end of file diff --git a/noir/test_programs/compile_failure/brillig_assert_msg_runtime/Prover.toml b/noir/test_programs/compile_failure/brillig_assert_msg_runtime/Prover.toml new file mode 100644 index 00000000000..0e5dfd5638d --- /dev/null +++ b/noir/test_programs/compile_failure/brillig_assert_msg_runtime/Prover.toml @@ -0,0 +1 @@ +x = "5" diff --git a/noir/test_programs/compile_failure/brillig_assert_msg_runtime/src/main.nr b/noir/test_programs/compile_failure/brillig_assert_msg_runtime/src/main.nr new file mode 100644 index 00000000000..428b2006363 --- /dev/null +++ b/noir/test_programs/compile_failure/brillig_assert_msg_runtime/src/main.nr @@ -0,0 +1,10 @@ +fn main(x: Field) { + assert(1 == conditional(x)); +} + +unconstrained fn conditional(x: Field) -> Field { + let z = x as u8 + 20; + assert_eq(z, 25, f"Expected 25 but got {z}"); + assert(x == 10, f"Expected x to equal 10, but got {x}"); + 1 +} \ No newline at end of file diff --git a/noir/test_programs/compile_failure/brillig_mut_ref_from_acir/Nargo.toml b/noir/test_programs/compile_failure/brillig_mut_ref_from_acir/Nargo.toml new file mode 100644 index 00000000000..a20ee09714c --- /dev/null +++ b/noir/test_programs/compile_failure/brillig_mut_ref_from_acir/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "brillig_mut_ref_from_acir" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] \ No newline at end of file diff --git a/noir/test_programs/compile_failure/brillig_mut_ref_from_acir/src/main.nr b/noir/test_programs/compile_failure/brillig_mut_ref_from_acir/src/main.nr new file mode 100644 index 00000000000..cf3279cac0d --- /dev/null +++ b/noir/test_programs/compile_failure/brillig_mut_ref_from_acir/src/main.nr @@ -0,0 +1,8 @@ +unconstrained fn mut_ref_identity(value: &mut Field) -> Field { + *value +} + +fn main(mut x: Field, y: pub Field) { + let returned_x = mut_ref_identity(&mut x); + assert(returned_x == x); +} \ No newline at end of file diff --git a/noir/test_programs/compile_failure/brillig_slice_to_acir/Nargo.toml b/noir/test_programs/compile_failure/brillig_slice_to_acir/Nargo.toml new file mode 100644 index 00000000000..c3e51561cc7 --- /dev/null +++ b/noir/test_programs/compile_failure/brillig_slice_to_acir/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "brillig_slice_to_acir" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] \ No newline at end of file diff --git a/noir/test_programs/compile_failure/brillig_slice_to_acir/src/main.nr b/noir/test_programs/compile_failure/brillig_slice_to_acir/src/main.nr new file mode 100644 index 00000000000..dcf23aac5f5 --- /dev/null +++ b/noir/test_programs/compile_failure/brillig_slice_to_acir/src/main.nr @@ -0,0 +1,14 @@ +global DEPTH: Field = 40000; + +fn main(x: [u32; DEPTH], y: u32) { + let mut new_x = []; + new_x = clear(x, y); +} + +unconstrained fn clear(x: [u32; DEPTH], y: u32) -> [u32] { + let mut a = []; + for i in 0..y { + a = a.push_back(x[i]); + } + a +} diff --git a/noir/test_programs/compile_failure/brillig_vec_to_acir/Nargo.toml b/noir/test_programs/compile_failure/brillig_vec_to_acir/Nargo.toml new file mode 100644 index 00000000000..c09fc417b55 --- /dev/null +++ b/noir/test_programs/compile_failure/brillig_vec_to_acir/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "brillig_vec_to_acir" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] \ No newline at end of file diff --git a/noir/test_programs/compile_failure/brillig_vec_to_acir/src/main.nr b/noir/test_programs/compile_failure/brillig_vec_to_acir/src/main.nr new file mode 100644 index 00000000000..8f872f1b903 --- /dev/null +++ b/noir/test_programs/compile_failure/brillig_vec_to_acir/src/main.nr @@ -0,0 +1,14 @@ +global DEPTH: Field = 40000; + +fn main(x: [u32; DEPTH], y: u32) { + let mut new_x = Vec::new(); + new_x = clear(x, y); +} + +unconstrained fn clear(x: [u32; DEPTH], y: u32) -> Vec { + let mut a = Vec::new(); + for i in 0..y { + a.push(x[i]); + } + a +} diff --git a/noir/test_programs/compile_failure/option_expect/Nargo.toml b/noir/test_programs/compile_failure/option_expect/Nargo.toml new file mode 100644 index 00000000000..1ee1215ff71 --- /dev/null +++ b/noir/test_programs/compile_failure/option_expect/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "option_expect" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] \ No newline at end of file diff --git a/noir/test_programs/compile_failure/option_expect/src/main.nr b/noir/test_programs/compile_failure/option_expect/src/main.nr new file mode 100644 index 00000000000..439ce4f386e --- /dev/null +++ b/noir/test_programs/compile_failure/option_expect/src/main.nr @@ -0,0 +1,8 @@ +fn main() { + let inner_value = 3; + let none = Option::none(); + let some = Option::some(inner_value); + + assert(some.expect(f"Should have the value {inner_value}") == 3); + assert(none.expect(f"Should have the value {inner_value}") == 3); +} diff --git a/noir/test_programs/compile_failure/option_expect_bad_input/Nargo.toml b/noir/test_programs/compile_failure/option_expect_bad_input/Nargo.toml new file mode 100644 index 00000000000..0555681e188 --- /dev/null +++ b/noir/test_programs/compile_failure/option_expect_bad_input/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "option_expect_bad_input" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] \ No newline at end of file diff --git a/noir/test_programs/compile_failure/option_expect_bad_input/src/main.nr b/noir/test_programs/compile_failure/option_expect_bad_input/src/main.nr new file mode 100644 index 00000000000..cc93e767975 --- /dev/null +++ b/noir/test_programs/compile_failure/option_expect_bad_input/src/main.nr @@ -0,0 +1,6 @@ +fn main() { + let inner_value = 3; + let some = Option::some(inner_value); + + assert(some.expect("Should have the value {inner_value}") == 3); +} diff --git a/noir/test_programs/compile_success_empty/brillig_cast/src/main.nr b/noir/test_programs/compile_success_empty/brillig_cast/src/main.nr index 3ba29b52982..ecb832468ba 100644 --- a/noir/test_programs/compile_success_empty/brillig_cast/src/main.nr +++ b/noir/test_programs/compile_success_empty/brillig_cast/src/main.nr @@ -17,33 +17,25 @@ unconstrained fn bool_casts() { unconstrained fn field_casts() { assert(5 as u8 as Field == 5); - assert(16 as u4 as Field == 0); + assert(256 as u8 as Field == 0); } unconstrained fn uint_casts() { - let x: u32 = 100; - assert(x as u2 == 0); - assert(x as u4 == 4); - assert(x as u6 == 36); - assert(x as u8 == 100); - assert(x as u64 == 100); - assert(x as u126 == 100); + let x: u32 = 300; + assert(x as u8 == 44); + assert(x as u32 == 300); + assert(x as u64 == 300); } unconstrained fn int_casts() { - let x: i32 = 100; - assert(x as i2 == 0); - assert(x as i4 == 4); - assert(x as i6 == -28 as i6); - assert(x as i8 == 100); - assert(x as i8 == 100); - assert(x as i8 == 100); + let x: i32 = 456; + assert(x as i8 == -56 as i8); + assert(x as i64 == 456); } unconstrained fn mixed_casts() { assert(100 as u32 as i32 as u32 == 100); - assert(13 as u4 as i2 as u32 == 1); - assert(15 as u4 as i2 as u32 == 3); + assert(257 as u8 as u32 == 1); assert(1 as u8 as bool == true); assert(true as i8 == 1); } diff --git a/noir/test_programs/compile_success_empty/brillig_modulo/src/main.nr b/noir/test_programs/compile_success_empty/brillig_modulo/src/main.nr index ed0353b101a..195ed31fb08 100644 --- a/noir/test_programs/compile_success_empty/brillig_modulo/src/main.nr +++ b/noir/test_programs/compile_success_empty/brillig_modulo/src/main.nr @@ -7,9 +7,9 @@ fn main() { assert(signed_modulo(5, 3) == 2); assert(signed_modulo(2, 3) == 2); - let minus_two: i4 = -2; // 14 - let minus_three: i4 = -3; // 13 - let minus_five: i4 = -5; // 11 + let minus_two: i8 = -2; // 254 + let minus_three: i8 = -3; // 253 + let minus_five: i8 = -5; // 251 // (5 / -3) * -3 + 2 = -1 * -3 + 2 = 3 + 2 = 5 assert(signed_modulo(5, minus_three) == 2); // (-5 / 3) * 3 - 2 = -1 * 3 - 2 = -3 - 2 = -5 @@ -22,6 +22,6 @@ unconstrained fn modulo(x: u32, y: u32) -> u32 { x % y } -unconstrained fn signed_modulo(x: i4, y: i4) -> i4 { +unconstrained fn signed_modulo(x: i8, y: i8) -> i8 { x % y } diff --git a/noir/test_programs/compile_success_empty/comptime_sort/src/main.nr b/noir/test_programs/compile_success_empty/comptime_sort/src/main.nr deleted file mode 100644 index a24a6ebaba6..00000000000 --- a/noir/test_programs/compile_success_empty/comptime_sort/src/main.nr +++ /dev/null @@ -1,7 +0,0 @@ -fn main() { - let unsorted: [u8; 3] = [3, 1, 2]; - let sorted = unsorted.sort(); - assert(sorted[0] == 1); - assert(sorted[1] == 2); - assert(sorted[2] == 3); -} diff --git a/noir/test_programs/compile_success_empty/literal_not_simplification/Nargo.toml b/noir/test_programs/compile_success_empty/literal_not_simplification/Nargo.toml new file mode 100644 index 00000000000..63d73ed3c0a --- /dev/null +++ b/noir/test_programs/compile_success_empty/literal_not_simplification/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "literal_not_simplification" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] diff --git a/noir/test_programs/compile_success_empty/literal_not_simplification/src/main.nr b/noir/test_programs/compile_success_empty/literal_not_simplification/src/main.nr new file mode 100644 index 00000000000..33198a326c9 --- /dev/null +++ b/noir/test_programs/compile_success_empty/literal_not_simplification/src/main.nr @@ -0,0 +1,8 @@ +fn main() { + let four: u8 = 4; + let not_four: u8 = !four; + + let five: u8 = 5; + let not_five: u8 = !five; + assert(not_four != not_five); +} diff --git a/noir/test_programs/compile_success_empty/method_call_regression/Nargo.toml b/noir/test_programs/compile_success_empty/method_call_regression/Nargo.toml index 92c9b942008..09f95590aad 100644 --- a/noir/test_programs/compile_success_empty/method_call_regression/Nargo.toml +++ b/noir/test_programs/compile_success_empty/method_call_regression/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "short" +name = "method_call_regression" type = "bin" authors = [""] compiler_version = ">=0.19.4" diff --git a/noir/test_programs/compile_success_empty/option/src/main.nr b/noir/test_programs/compile_success_empty/option/src/main.nr index 1f879bd375f..989c8f65bf4 100644 --- a/noir/test_programs/compile_success_empty/option/src/main.nr +++ b/noir/test_programs/compile_success_empty/option/src/main.nr @@ -1,5 +1,3 @@ -use dep::std::option::Option; - fn main() { let ten = 10; // giving this a name, to ensure that the Option functions work with closures let none = Option::none(); @@ -22,6 +20,8 @@ fn main() { assert(some.map(|x| x * 2).unwrap() == 6); assert(some.map(|x| x * ten).unwrap() == 30); + assert(some.expect(f"Should have a value") == 3); + assert(none.map_or(0, |x| x * 2) == 0); assert(some.map_or(0, |x| x * 2) == 6); assert(none.map_or(0, |x| x * ten) == 0); diff --git a/noir/test_programs/compile_success_empty/trait_static_methods/Nargo.toml b/noir/test_programs/compile_success_empty/trait_static_methods/Nargo.toml index 71c541ccd4f..ea30031b9a5 100644 --- a/noir/test_programs/compile_success_empty/trait_static_methods/Nargo.toml +++ b/noir/test_programs/compile_success_empty/trait_static_methods/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "trait_self" +name = "trait_static_methods" type = "bin" authors = [""] diff --git a/noir/test_programs/execution_success/5_over/src/main.nr b/noir/test_programs/execution_success/5_over/src/main.nr index f24ff06cb2a..313d580a8d1 100644 --- a/noir/test_programs/execution_success/5_over/src/main.nr +++ b/noir/test_programs/execution_success/5_over/src/main.nr @@ -5,6 +5,6 @@ fn main(mut x: u32, y: u32) { x = std::wrapping_mul(x,x); assert(y == x); - let c: u3 = 2; - assert(c > x as u3); + let c: u1 = 0; + assert(x as u1 > c); } diff --git a/noir/test_programs/execution_success/bit_not/Nargo.toml b/noir/test_programs/execution_success/bit_not/Nargo.toml new file mode 100644 index 00000000000..e89a338595b --- /dev/null +++ b/noir/test_programs/execution_success/bit_not/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "bit_not" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] diff --git a/noir/test_programs/execution_success/bit_not/Prover.toml b/noir/test_programs/execution_success/bit_not/Prover.toml new file mode 100644 index 00000000000..b4bcbcec177 --- /dev/null +++ b/noir/test_programs/execution_success/bit_not/Prover.toml @@ -0,0 +1 @@ +four_as_u32 = 4 diff --git a/noir/test_programs/execution_success/bit_not/src/main.nr b/noir/test_programs/execution_success/bit_not/src/main.nr new file mode 100644 index 00000000000..30b78d330ce --- /dev/null +++ b/noir/test_programs/execution_success/bit_not/src/main.nr @@ -0,0 +1,8 @@ +fn main(four_as_u32: u32) { + let four_as_u8: u8 = 4; + let not_four_as_u8: u8 = !four_as_u8; + assert_eq(not_four_as_u8, 251); + + let not_four_as_u32: u32 = !four_as_u32; + assert_eq(not_four_as_u32, 4294967291); +} diff --git a/noir/test_programs/execution_success/bit_shifts_comptime/src/main.nr b/noir/test_programs/execution_success/bit_shifts_comptime/src/main.nr index 9bb1028173d..9184b5bd5e6 100644 --- a/noir/test_programs/execution_success/bit_shifts_comptime/src/main.nr +++ b/noir/test_programs/execution_success/bit_shifts_comptime/src/main.nr @@ -14,7 +14,7 @@ fn main(x: u64) { //regression for 3481 assert(x << 63 == 0); - assert_eq((1 as u56) << (32 as u56), 0x0100000000); + assert_eq((1 as u64) << (32 as u64), 0x0100000000); } fn regression_2250() { diff --git a/noir/test_programs/execution_success/bit_shifts_runtime/src/main.nr b/noir/test_programs/execution_success/bit_shifts_runtime/src/main.nr index 33d68765598..28b3ef656c1 100644 --- a/noir/test_programs/execution_success/bit_shifts_runtime/src/main.nr +++ b/noir/test_programs/execution_success/bit_shifts_runtime/src/main.nr @@ -16,4 +16,5 @@ fn main(x: u64, y: u64) { assert(a << 7 == -128); assert(a << -a == -2); + assert(x >> x == 0); } diff --git a/noir/test_programs/execution_success/brillig_assert/src/main.nr b/noir/test_programs/execution_success/brillig_assert/src/main.nr index 91e4cebd9d3..16fe7b29061 100644 --- a/noir/test_programs/execution_success/brillig_assert/src/main.nr +++ b/noir/test_programs/execution_success/brillig_assert/src/main.nr @@ -6,7 +6,7 @@ fn main(x: Field) { } unconstrained fn conditional(x: bool) -> Field { - assert(x, "x is false"); - assert_eq(x, true, "x is false"); + assert(x, f"Expected x to be false but got {x}"); + assert_eq(x, true, f"Expected x to be false but got {x}"); 1 } diff --git a/noir/test_programs/compile_success_empty/comptime_sort/Nargo.toml b/noir/test_programs/execution_success/brillig_bit_shifts_runtime/Nargo.toml similarity index 58% rename from noir/test_programs/compile_success_empty/comptime_sort/Nargo.toml rename to noir/test_programs/execution_success/brillig_bit_shifts_runtime/Nargo.toml index 7d215a22496..ed8200d8a95 100644 --- a/noir/test_programs/compile_success_empty/comptime_sort/Nargo.toml +++ b/noir/test_programs/execution_success/brillig_bit_shifts_runtime/Nargo.toml @@ -1,5 +1,6 @@ [package] -name = "comptime_sort" +name = "brillig_bit_shifts_runtime" type = "bin" authors = [""] + [dependencies] diff --git a/noir/test_programs/execution_success/brillig_bit_shifts_runtime/Prover.toml b/noir/test_programs/execution_success/brillig_bit_shifts_runtime/Prover.toml new file mode 100644 index 00000000000..98d8630792e --- /dev/null +++ b/noir/test_programs/execution_success/brillig_bit_shifts_runtime/Prover.toml @@ -0,0 +1,2 @@ +x = 64 +y = 1 \ No newline at end of file diff --git a/noir/test_programs/execution_success/brillig_bit_shifts_runtime/src/main.nr b/noir/test_programs/execution_success/brillig_bit_shifts_runtime/src/main.nr new file mode 100644 index 00000000000..f22166b5993 --- /dev/null +++ b/noir/test_programs/execution_success/brillig_bit_shifts_runtime/src/main.nr @@ -0,0 +1,20 @@ +unconstrained fn main(x: u64, y: u64) { + // runtime shifts on compile-time known values + assert(64 << y == 128); + assert(64 >> y == 32); + // runtime shifts on runtime values + assert(x << y == 128); + assert(x >> y == 32); + + // Bit-shift with signed integers + let mut a :i8 = y as i8; + let mut b: i8 = x as i8; + assert(b << 1 == -128); + assert(b >> 2 == 16); + assert(b >> a == 32); + a = -a; + assert(a << 7 == -128); + assert(a << -a == -2); + + assert(x >> x == 0); +} diff --git a/noir/test_programs/execution_success/brillig_cow_assign/Nargo.toml b/noir/test_programs/execution_success/brillig_cow_assign/Nargo.toml new file mode 100644 index 00000000000..a878566a372 --- /dev/null +++ b/noir/test_programs/execution_success/brillig_cow_assign/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "brillig_cow_assign" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] diff --git a/noir/test_programs/execution_success/brillig_cow_assign/Prover.toml b/noir/test_programs/execution_success/brillig_cow_assign/Prover.toml new file mode 100644 index 00000000000..882c73b83f8 --- /dev/null +++ b/noir/test_programs/execution_success/brillig_cow_assign/Prover.toml @@ -0,0 +1,2 @@ +items_to_update = 10 +index = 6 diff --git a/noir/test_programs/execution_success/brillig_cow_assign/src/main.nr b/noir/test_programs/execution_success/brillig_cow_assign/src/main.nr new file mode 100644 index 00000000000..e5c3e2bd2f5 --- /dev/null +++ b/noir/test_programs/execution_success/brillig_cow_assign/src/main.nr @@ -0,0 +1,23 @@ +global N = 10; + +unconstrained fn main() { + let mut arr = [0; N]; + let mut mid_change = arr; + + for i in 0..N { + if i == N / 2 { + mid_change = arr; + } + arr[i] = 27; + } + + // Expect: + // arr = [27, 27, 27, 27, 27, 27, 27, 27, 27, 27] + // mid_change = [27, 27, 27, 27, 27, 0, 0, 0, 0, 0] + + let modified_i = N / 2 + 1; + assert_eq(arr[modified_i], 27); + + // Fail here! + assert(mid_change[modified_i] != 27); +} diff --git a/noir/test_programs/execution_success/conditional_regression_underflow/src/main.nr b/noir/test_programs/execution_success/conditional_regression_underflow/src/main.nr index a101af32505..aaf3754a20f 100644 --- a/noir/test_programs/execution_success/conditional_regression_underflow/src/main.nr +++ b/noir/test_programs/execution_success/conditional_regression_underflow/src/main.nr @@ -1,12 +1,12 @@ // Regression test for https://github.com/noir-lang/noir/issues/3493 -fn main(x: u4) { +fn main(x: u8) { if x == 10 { - x + 15; + x + 255; } if x == 9 { - x << 3; + x << 7; } - if x == 8 { + if x == 128 { x * 3; } if x == 7 { diff --git a/noir/test_programs/execution_success/debug_logs/src/main.nr b/noir/test_programs/execution_success/debug_logs/src/main.nr index cbce6f15286..49e0041594a 100644 --- a/noir/test_programs/execution_success/debug_logs/src/main.nr +++ b/noir/test_programs/execution_success/debug_logs/src/main.nr @@ -3,8 +3,15 @@ use dep::std; fn main(x: Field, y: pub Field) { let string = "i: {i}, j: {j}"; std::println(string); + + // TODO: fmtstr cannot be printed + // let fmt_str: fmtstr<14, (Field, Field)> = f"i: {x}, j: {y}"; + // let fmt_fmt_str = f"fmtstr: {fmt_str}, i: {x}"; + // std::println(fmt_fmt_str); + // A `fmtstr` lets you easily perform string interpolation. let fmt_str: fmtstr<14, (Field, Field)> = f"i: {x}, j: {y}"; + let fmt_str = string_identity(fmt_str); std::println(fmt_str); diff --git a/noir/test_programs/execution_success/global_consts/src/main.nr b/noir/test_programs/execution_success/global_consts/src/main.nr index 70c7a745a22..4b22940b3d1 100644 --- a/noir/test_programs/execution_success/global_consts/src/main.nr +++ b/noir/test_programs/execution_success/global_consts/src/main.nr @@ -5,7 +5,10 @@ global M: Field = 32; global L: Field = 10; // Unused globals currently allowed global N: Field = 5; global T_LEN = 2; // Type inference is allowed on globals -//global N: Field = 5; // Uncomment to see duplicate globals error + +// Globals can reference other globals +global DERIVED = M + L; + struct Dummy { x: [Field; N], y: [Field; foo::MAGIC_NUMBER] @@ -70,6 +73,7 @@ fn main( foo::from_foo(d); baz::from_baz(c); + assert(DERIVED == M + L); } fn multiplyByM(x: Field) -> Field { diff --git a/noir/test_programs/execution_success/missing_closure_env/Nargo.toml b/noir/test_programs/execution_success/missing_closure_env/Nargo.toml new file mode 100644 index 00000000000..284e61b1144 --- /dev/null +++ b/noir/test_programs/execution_success/missing_closure_env/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "missing_closure_env" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] \ No newline at end of file diff --git a/noir/test_programs/execution_success/missing_closure_env/Prover.toml b/noir/test_programs/execution_success/missing_closure_env/Prover.toml new file mode 100644 index 00000000000..2d76abaa89f --- /dev/null +++ b/noir/test_programs/execution_success/missing_closure_env/Prover.toml @@ -0,0 +1 @@ +x = 42 diff --git a/noir/test_programs/execution_success/missing_closure_env/src/main.nr b/noir/test_programs/execution_success/missing_closure_env/src/main.nr new file mode 100644 index 00000000000..0bc99b0671c --- /dev/null +++ b/noir/test_programs/execution_success/missing_closure_env/src/main.nr @@ -0,0 +1,16 @@ +fn main(x: Field) { + let x1 = &mut 42; + let set_x1 = |y| { *x1 = y; }; + + assert(*x1 == 42); + set_x1(44); + assert(*x1 == 44); + set_x1(*x1); + assert(*x1 == 44); + assert(x == 42); +} + +#[test] +fn test_main() { + main(42); +} diff --git a/noir/test_programs/execution_success/regression/src/main.nr b/noir/test_programs/execution_success/regression/src/main.nr index 08112d4c616..c70e2e75fa8 100644 --- a/noir/test_programs/execution_success/regression/src/main.nr +++ b/noir/test_programs/execution_success/regression/src/main.nr @@ -1,29 +1,49 @@ global NIBBLE_LENGTH: Field = 16; -fn compact_decode(input: [u8; N], length: Field) -> ([u4; NIBBLE_LENGTH], Field) { +struct U4 { + inner: u8, +} + +impl U4 { + fn zero() -> U4 { + U4 { inner: 0 } + } + + fn from_u8(x: u8) -> U4 { + U4 { inner: x % 16 } + } +} + +impl Eq for U4 { + fn eq(self, other: Self) -> bool { + self.inner == other.inner + } +} + +fn compact_decode(input: [u8; N], length: Field) -> ([U4; NIBBLE_LENGTH], Field) { assert(2 * input.len() as u64 <= NIBBLE_LENGTH as u64); assert(length as u64 <= input.len() as u64); - let mut nibble = [0 as u4; NIBBLE_LENGTH]; + let mut nibble = [U4::zero(); NIBBLE_LENGTH]; - let first_nibble = (input[0] >> 4) as u4; - let parity = first_nibble as u1; + let first_nibble = U4::from_u8(input[0] >> 4); + let parity = first_nibble.inner as u1; if parity == 1 { - nibble[0] = (input[0] & 0x0f) as u4; + nibble[0] = U4::from_u8(input[0] & 0x0f); for i in 1..input.len() { if i as u64 < length as u64 { let x = input[i]; - nibble[2*i - 1] = (x >> 4) as u4; - nibble[2*i] = (x & 0x0f) as u4; + nibble[2*i - 1] = U4::from_u8(x >> 4); + nibble[2*i] = U4::from_u8(x & 0x0f); } } } else { for i in 0..2 { if (i as u64) < length as u64 - 1 { let x = input[i + 1]; - nibble[2*i] = (x >> 4) as u4; - nibble[2*i + 1] = (x & 0x0f) as u4; + nibble[2*i] = U4::from_u8(x >> 4); + nibble[2*i + 1] = U4::from_u8(x & 0x0f); } } } @@ -78,7 +98,10 @@ fn main(x: [u8; 5], z: Field) { //Issue 1144 let (nib, len) = compact_decode(x, z); assert(len == 5); - assert([nib[0], nib[1], nib[2], nib[3], nib[4]] == [15, 1, 12, 11, 8]); + assert( + [nib[0], nib[1], nib[2], nib[3], nib[4]] + == [U4::from_u8(15), U4::from_u8(1), U4::from_u8(12), U4::from_u8(11), U4::from_u8(8)] + ); // Issue 1169 let val1 = [ 0xb8, 0x8f, 0x61, 0xe6, 0xfb, 0xda, 0x83, 0xfb, 0xff, 0xfa, 0xbe, 0x36, 0x41, 0x12, 0x13, diff --git a/noir/test_programs/execution_success/regression_4202/Nargo.toml b/noir/test_programs/execution_success/regression_4202/Nargo.toml new file mode 100644 index 00000000000..acfba12dd4f --- /dev/null +++ b/noir/test_programs/execution_success/regression_4202/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_4202" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] \ No newline at end of file diff --git a/noir/test_programs/execution_success/regression_4202/Prover.toml b/noir/test_programs/execution_success/regression_4202/Prover.toml new file mode 100644 index 00000000000..e9319802dfd --- /dev/null +++ b/noir/test_programs/execution_success/regression_4202/Prover.toml @@ -0,0 +1 @@ +input = [1, 2, 3, 4] diff --git a/noir/test_programs/execution_success/regression_4202/src/main.nr b/noir/test_programs/execution_success/regression_4202/src/main.nr new file mode 100644 index 00000000000..37d2ee4578d --- /dev/null +++ b/noir/test_programs/execution_success/regression_4202/src/main.nr @@ -0,0 +1,14 @@ +fn main(input: [u32; 4]) { + let mut slice1: [u32] = [1, 2, 3, 4]; + if slice1[0] == 3 { + slice1[1] = 4; + } + + if slice1[1] == 5 { + slice1[3] = 6; + } + + for i in 0..4 { + assert(slice1[i] == input[i]); + } +} diff --git a/noir/test_programs/execution_success/u128/src/main.nr b/noir/test_programs/execution_success/u128/src/main.nr index 4c734f3a8f9..dc586408795 100644 --- a/noir/test_programs/execution_success/u128/src/main.nr +++ b/noir/test_programs/execution_success/u128/src/main.nr @@ -39,6 +39,6 @@ fn main(mut x: u32, y: u32, z: u32, big_int: U128, hexa: str<7>) { assert(shift >> small_int == small_int); assert(shift >> U128::from_integer(127) == U128::from_integer(0)); assert(shift << U128::from_integer(127) == U128::from_integer(0)); - + assert(U128::from_integer(3).to_integer() == 3); } diff --git a/noir/tooling/backend_interface/src/download.rs b/noir/tooling/backend_interface/src/download.rs index 27aab7ef351..60ecb14e642 100644 --- a/noir/tooling/backend_interface/src/download.rs +++ b/noir/tooling/backend_interface/src/download.rs @@ -17,8 +17,12 @@ pub fn download_backend(backend_url: &str, destination_path: &Path) -> std::io:: use tempfile::tempdir; // Download sources - let compressed_file: Cursor> = download_binary_from_url(backend_url) - .map_err(|_| std::io::Error::from(ErrorKind::Other))?; + let compressed_file: Cursor> = download_binary_from_url(backend_url).map_err(|_| { + std::io::Error::new( + ErrorKind::Other, + format!("Could not download backend from install url: {backend_url}"), + ) + })?; // Unpack the tarball let gz_decoder = GzDecoder::new(compressed_file); diff --git a/noir/tooling/bb_abstraction_leaks/build.rs b/noir/tooling/bb_abstraction_leaks/build.rs index 6197f52cb4b..24603186c87 100644 --- a/noir/tooling/bb_abstraction_leaks/build.rs +++ b/noir/tooling/bb_abstraction_leaks/build.rs @@ -10,7 +10,7 @@ use const_format::formatcp; const USERNAME: &str = "AztecProtocol"; const REPO: &str = "aztec-packages"; -const VERSION: &str = "0.21.0"; +const VERSION: &str = "0.23.0"; const TAG: &str = formatcp!("aztec-packages-v{}", VERSION); const API_URL: &str = diff --git a/noir/tooling/debugger/Cargo.toml b/noir/tooling/debugger/Cargo.toml index 4d240c61f90..785eacf9463 100644 --- a/noir/tooling/debugger/Cargo.toml +++ b/noir/tooling/debugger/Cargo.toml @@ -11,11 +11,12 @@ build-data.workspace = true [dependencies] acvm.workspace = true +fm.workspace = true nargo.workspace = true +noirc_frontend.workspace = true noirc_printable_type.workspace = true noirc_errors.workspace = true noirc_driver.workspace = true -fm.workspace = true thiserror.workspace = true codespan-reporting.workspace = true dap.workspace = true diff --git a/noir/tooling/debugger/build.rs b/noir/tooling/debugger/build.rs index cedeebcae86..26a8bc64b0e 100644 --- a/noir/tooling/debugger/build.rs +++ b/noir/tooling/debugger/build.rs @@ -1,3 +1,4 @@ +use std::collections::HashSet; use std::fs::File; use std::io::Write; use std::path::{Path, PathBuf}; @@ -6,9 +7,6 @@ use std::{env, fs}; const GIT_COMMIT: &&str = &"GIT_COMMIT"; fn main() { - // Rebuild if the tests have changed - println!("cargo:rerun-if-changed=tests"); - // Only use build_data if the environment variable isn't set // The environment variable is always set when working via Nix if std::env::var(GIT_COMMIT).is_err() { @@ -29,6 +27,11 @@ fn main() { }; let test_dir = root_dir.join("test_programs"); + // Rebuild if the tests have changed + println!("cargo:rerun-if-changed=tests"); + println!("cargo:rerun-if-changed=ignored-tests.txt"); + println!("cargo:rerun-if-changed={}", test_dir.as_os_str().to_str().unwrap()); + generate_debugger_tests(&mut test_file, &test_dir); } @@ -38,10 +41,13 @@ fn generate_debugger_tests(test_file: &mut File, test_data_dir: &Path) { let test_case_dirs = fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); + let ignored_tests_contents = fs::read_to_string("ignored-tests.txt").unwrap(); + let ignored_tests = ignored_tests_contents.lines().collect::>(); for test_dir in test_case_dirs { let test_name = test_dir.file_name().into_string().expect("Directory can't be converted to string"); + let ignored = ignored_tests.contains(test_name.as_str()); if test_name.contains('-') { panic!( "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" @@ -53,11 +59,13 @@ fn generate_debugger_tests(test_file: &mut File, test_data_dir: &Path) { test_file, r#" #[test] +{ignored} fn debug_{test_name}() {{ debugger_execution_success("{test_dir}"); }} "#, test_dir = test_dir.display(), + ignored = if ignored { "#[ignore]" } else { "" }, ) .expect("Could not write templated test file."); } diff --git a/noir/tooling/debugger/ignored-tests.txt b/noir/tooling/debugger/ignored-tests.txt new file mode 100644 index 00000000000..3fb443ebb72 --- /dev/null +++ b/noir/tooling/debugger/ignored-tests.txt @@ -0,0 +1,20 @@ +array_sort +assign_ex +bit_shifts_comptime +brillig_cow +brillig_nested_arrays +brillig_references +brillig_to_bytes_integration +debug_logs +double_verify_proof +modulus +nested_array_dynamic +nested_array_in_slice +nested_arrays_from_brillig +references +scalar_mul +signed_comparison +simple_2d_array +to_bytes_integration +bigint +brillig_slices \ No newline at end of file diff --git a/noir/tooling/debugger/src/context.rs b/noir/tooling/debugger/src/context.rs index 9c12794d5dd..5ab2c63c365 100644 --- a/noir/tooling/debugger/src/context.rs +++ b/noir/tooling/debugger/src/context.rs @@ -1,5 +1,7 @@ +use crate::foreign_calls::DebugForeignCallExecutor; use acvm::acir::circuit::{Circuit, Opcode, OpcodeLocation}; use acvm::acir::native_types::{Witness, WitnessMap}; +use acvm::brillig_vm::brillig::ForeignCallResult; use acvm::brillig_vm::brillig::Value; use acvm::pwg::{ ACVMStatus, BrilligSolver, BrilligSolverStatus, ForeignCallWaitInfo, StepResult, ACVM, @@ -8,8 +10,8 @@ use acvm::{BlackBoxFunctionSolver, FieldElement}; use nargo::artifacts::debug::DebugArtifact; use nargo::errors::{ExecutionError, Location}; -use nargo::ops::ForeignCallExecutor; use nargo::NargoError; +use noirc_printable_type::{PrintableType, PrintableValue}; use std::collections::{hash_set::Iter, HashSet}; @@ -24,7 +26,7 @@ pub(super) enum DebugCommandResult { pub(super) struct DebugContext<'a, B: BlackBoxFunctionSolver> { acvm: ACVM<'a, B>, brillig_solver: Option>, - foreign_call_executor: Box, + foreign_call_executor: Box, debug_artifact: &'a DebugArtifact, breakpoints: HashSet, } @@ -35,7 +37,7 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { circuit: &'a Circuit, debug_artifact: &'a DebugArtifact, initial_witness: WitnessMap, - foreign_call_executor: Box, + foreign_call_executor: Box, ) -> Self { Self { acvm: ACVM::new(blackbox_solver, &circuit.opcodes, initial_witness), @@ -76,15 +78,82 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } } + pub(super) fn get_call_stack(&self) -> Vec { + let instruction_pointer = self.acvm.instruction_pointer(); + if instruction_pointer >= self.get_opcodes().len() { + vec![] + } else if let Some(ref solver) = self.brillig_solver { + solver + .get_call_stack() + .iter() + .map(|program_counter| OpcodeLocation::Brillig { + acir_index: instruction_pointer, + brillig_index: *program_counter, + }) + .collect() + } else { + vec![OpcodeLocation::Acir(instruction_pointer)] + } + } + + pub(super) fn is_source_location_in_debug_module(&self, location: &Location) -> bool { + self.debug_artifact + .file_map + .get(&location.file) + .map(|file| file.path.starts_with("__debug/")) + .unwrap_or(false) + } + /// Returns the callstack in source code locations for the currently /// executing opcode. This can be `None` if the execution finished (and /// `get_current_opcode_location()` returns `None`) or if the opcode is not /// mapped to a specific source location in the debug artifact (which can - /// happen for certain opcodes inserted synthetically by the compiler) + /// happen for certain opcodes inserted synthetically by the compiler). + /// This function also filters source locations that are determined to be in + /// the internal debug module. pub(super) fn get_current_source_location(&self) -> Option> { self.get_current_opcode_location() .as_ref() - .and_then(|location| self.debug_artifact.debug_symbols[0].opcode_location(location)) + .map(|opcode_location| self.get_source_location_for_opcode_location(opcode_location)) + .filter(|v: &Vec| !v.is_empty()) + } + + /// Returns the (possible) stack of source locations corresponding to the + /// given opcode location. Due to compiler inlining it's possible for this + /// function to return multiple source locations. An empty vector means that + /// the given opcode location cannot be mapped back to a source location + /// (eg. it may be pure debug instrumentation code or other synthetically + /// produced opcode by the compiler) + pub(super) fn get_source_location_for_opcode_location( + &self, + opcode_location: &OpcodeLocation, + ) -> Vec { + self.debug_artifact.debug_symbols[0] + .opcode_location(opcode_location) + .map(|source_locations| { + source_locations + .into_iter() + .filter(|source_location| { + !self.is_source_location_in_debug_module(source_location) + }) + .collect() + }) + .unwrap_or(vec![]) + } + + /// Returns the current call stack with expanded source locations. In + /// general, the matching between opcode location and source location is 1 + /// to 1, but due to the compiler inlining functions a single opcode + /// location may expand to multiple source locations. + pub(super) fn get_source_call_stack(&self) -> Vec<(OpcodeLocation, Location)> { + self.get_call_stack() + .iter() + .flat_map(|opcode_location| { + self.get_source_location_for_opcode_location(opcode_location) + .into_iter() + .map(|source_location| (*opcode_location, source_location)) + }) + .collect() } fn get_opcodes_sizes(&self) -> Vec { @@ -224,6 +293,9 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { let foreign_call_result = self.foreign_call_executor.execute(&foreign_call); match foreign_call_result { Ok(foreign_call_result) => { + let foreign_call_result = foreign_call_result + .get_brillig_output() + .unwrap_or(ForeignCallResult::default()); if let Some(mut solver) = self.brillig_solver.take() { solver.resolve_pending_foreign_call(foreign_call_result); self.brillig_solver = Some(solver); @@ -323,7 +395,8 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } } - pub(super) fn next(&mut self) -> DebugCommandResult { + /// Steps debugging execution until the next source location + pub(super) fn next_into(&mut self) -> DebugCommandResult { let start_location = self.get_current_source_location(); loop { let result = self.step_into_opcode(); @@ -337,6 +410,38 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } } + /// Steps debugging execution until the next source location at the same (or + /// less) call stack depth (eg. don't dive into function calls) + pub(super) fn next_over(&mut self) -> DebugCommandResult { + let start_call_stack = self.get_source_call_stack(); + loop { + let result = self.next_into(); + if !matches!(result, DebugCommandResult::Ok) { + return result; + } + let new_call_stack = self.get_source_call_stack(); + if new_call_stack.len() <= start_call_stack.len() { + return DebugCommandResult::Ok; + } + } + } + + /// Steps debugging execution until the next source location with a smaller + /// call stack depth (eg. returning from the current function) + pub(super) fn next_out(&mut self) -> DebugCommandResult { + let start_call_stack = self.get_source_call_stack(); + loop { + let result = self.next_into(); + if !matches!(result, DebugCommandResult::Ok) { + return result; + } + let new_call_stack = self.get_source_call_stack(); + if new_call_stack.len() < start_call_stack.len() { + return DebugCommandResult::Ok; + } + } + } + pub(super) fn cont(&mut self) -> DebugCommandResult { loop { let result = self.step_into_opcode(); @@ -362,6 +467,10 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } } + pub(super) fn get_variables(&self) -> Vec<(&str, &PrintableValue, &PrintableType)> { + return self.foreign_call_executor.get_variables(); + } + fn breakpoint_reached(&self) -> bool { if let Some(location) = self.get_current_opcode_location() { self.breakpoints.contains(&location) @@ -422,6 +531,7 @@ mod tests { use super::*; use crate::context::{DebugCommandResult, DebugContext}; + use crate::foreign_calls::DefaultDebugForeignCallExecutor; use acvm::{ acir::{ circuit::{ @@ -435,7 +545,7 @@ mod tests { BinaryFieldOp, HeapValueType, MemoryAddress, Opcode as BrilligOpcode, ValueOrArray, }, }; - use nargo::{artifacts::debug::DebugArtifact, ops::DefaultForeignCallExecutor}; + use nargo::artifacts::debug::DebugArtifact; use std::collections::BTreeMap; #[test] @@ -483,12 +593,14 @@ mod tests { let initial_witness = BTreeMap::from([(Witness(1), fe_1)]).into(); + let foreign_call_executor = + Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, debug_artifact)); let mut context = DebugContext::new( &StubbedBlackBoxSolver, circuit, debug_artifact, initial_witness, - Box::new(DefaultForeignCallExecutor::new(true, None)), + foreign_call_executor, ); assert_eq!(context.get_current_opcode_location(), Some(OpcodeLocation::Acir(0))); @@ -588,12 +700,14 @@ mod tests { let initial_witness = BTreeMap::from([(Witness(1), fe_1), (Witness(2), fe_1)]).into(); + let foreign_call_executor = + Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, debug_artifact)); let mut context = DebugContext::new( &StubbedBlackBoxSolver, circuit, debug_artifact, initial_witness, - Box::new(DefaultForeignCallExecutor::new(true, None)), + foreign_call_executor, ); // set breakpoint @@ -650,7 +764,7 @@ mod tests { &circuit, &debug_artifact, WitnessMap::new(), - Box::new(DefaultForeignCallExecutor::new(true, None)), + Box::new(DefaultDebugForeignCallExecutor::new(true)), ); assert_eq!(context.offset_opcode_location(&None, 0), (None, 0)); diff --git a/noir/tooling/debugger/src/dap.rs b/noir/tooling/debugger/src/dap.rs index 803f9f108db..dd9a30d50da 100644 --- a/noir/tooling/debugger/src/dap.rs +++ b/noir/tooling/debugger/src/dap.rs @@ -9,6 +9,7 @@ use codespan_reporting::files::{Files, SimpleFile}; use crate::context::DebugCommandResult; use crate::context::DebugContext; +use crate::foreign_calls::DefaultDebugForeignCallExecutor; use dap::errors::ServerError; use dap::events::StoppedEventBody; @@ -17,15 +18,14 @@ use dap::requests::{Command, Request, SetBreakpointsArguments}; use dap::responses::{ ContinueResponse, DisassembleResponse, ResponseBody, ScopesResponse, SetBreakpointsResponse, SetExceptionBreakpointsResponse, SetInstructionBreakpointsResponse, StackTraceResponse, - ThreadsResponse, + ThreadsResponse, VariablesResponse, }; use dap::server::Server; use dap::types::{ - Breakpoint, DisassembledInstruction, Source, StackFrame, SteppingGranularity, - StoppedEventReason, Thread, + Breakpoint, DisassembledInstruction, Scope, Source, StackFrame, SteppingGranularity, + StoppedEventReason, Thread, Variable, }; use nargo::artifacts::debug::DebugArtifact; -use nargo::ops::DefaultForeignCallExecutor; use fm::FileId; use noirc_driver::CompiledProgram; @@ -41,6 +41,22 @@ pub struct DapSession<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> { source_breakpoints: BTreeMap>, } +enum ScopeReferences { + Locals = 1, + WitnessMap = 2, + InvalidScope = 0, +} + +impl From for ScopeReferences { + fn from(value: i64) -> Self { + match value { + 1 => Self::Locals, + 2 => Self::WitnessMap, + _ => Self::InvalidScope, + } + } +} + // BTreeMap impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { @@ -57,7 +73,7 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { circuit, debug_artifact, initial_witness, - Box::new(DefaultForeignCallExecutor::new(true, None)), + Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, debug_artifact)), ); Self { server, @@ -125,14 +141,14 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { } pub fn run_loop(&mut self) -> Result<(), ServerError> { - self.running = true; + self.running = self.context.get_current_opcode_location().is_some(); - if matches!(self.context.get_current_source_location(), None) { + if self.running && matches!(self.context.get_current_source_location(), None) { // TODO: remove this? This is to ensure that the tool has a proper // source location to show when first starting the debugger, but // maybe the default behavior should be to start executing until the // first breakpoint set. - _ = self.context.next(); + _ = self.context.next_into(); } self.server.send_event(Event::Initialized)?; @@ -176,7 +192,7 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { args.granularity.as_ref().unwrap_or(&SteppingGranularity::Statement); match granularity { SteppingGranularity::Instruction => self.handle_step(req)?, - _ => self.handle_next(req)?, + _ => self.handle_next_into(req)?, } } Command::StepOut(ref args) => { @@ -184,7 +200,7 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { args.granularity.as_ref().unwrap_or(&SteppingGranularity::Statement); match granularity { SteppingGranularity::Instruction => self.handle_step(req)?, - _ => self.handle_next(req)?, + _ => self.handle_next_out(req)?, } } Command::Next(ref args) => { @@ -192,18 +208,17 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { args.granularity.as_ref().unwrap_or(&SteppingGranularity::Statement); match granularity { SteppingGranularity::Instruction => self.handle_step(req)?, - _ => self.handle_next(req)?, + _ => self.handle_next_over(req)?, } } Command::Continue(_) => { self.handle_continue(req)?; } Command::Scopes(_) => { - // FIXME: this needs a proper implementation when we can - // show the parameters and variables - self.server.respond( - req.success(ResponseBody::Scopes(ScopesResponse { scopes: vec![] })), - )?; + self.handle_scopes(req)?; + } + Command::Variables(ref _args) => { + self.handle_variables(req)?; } _ => { eprintln!("ERROR: unhandled command: {:?}", req.command); @@ -213,37 +228,38 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { Ok(()) } + fn build_stack_trace(&self) -> Vec { + self.context + .get_source_call_stack() + .iter() + .enumerate() + .map(|(index, (opcode_location, source_location))| { + let line_number = + self.debug_artifact.location_line_number(*source_location).unwrap(); + let column_number = + self.debug_artifact.location_column_number(*source_location).unwrap(); + StackFrame { + id: index as i64, + name: format!("frame #{index}"), + source: Some(Source { + path: self.debug_artifact.file_map[&source_location.file] + .path + .to_str() + .map(String::from), + ..Source::default() + }), + line: line_number as i64, + column: column_number as i64, + instruction_pointer_reference: Some(opcode_location.to_string()), + ..StackFrame::default() + } + }) + .rev() + .collect() + } + fn handle_stack_trace(&mut self, req: Request) -> Result<(), ServerError> { - let opcode_location = self.context.get_current_opcode_location(); - let source_location = self.context.get_current_source_location(); - let frames = match source_location { - None => vec![], - Some(locations) => locations - .iter() - .enumerate() - .map(|(index, location)| { - let line_number = self.debug_artifact.location_line_number(*location).unwrap(); - let column_number = - self.debug_artifact.location_column_number(*location).unwrap(); - let ip_reference = opcode_location.map(|location| location.to_string()); - StackFrame { - id: index as i64, - name: format!("frame #{index}"), - source: Some(Source { - path: self.debug_artifact.file_map[&location.file] - .path - .to_str() - .map(String::from), - ..Source::default() - }), - line: line_number as i64, - column: column_number as i64, - instruction_pointer_reference: ip_reference, - ..StackFrame::default() - } - }) - .collect(), - }; + let frames = self.build_stack_trace(); let total_frames = Some(frames.len() as i64); self.server.respond(req.success(ResponseBody::StackTrace(StackTraceResponse { stack_frames: frames, @@ -315,9 +331,23 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { self.handle_execution_result(result) } - fn handle_next(&mut self, req: Request) -> Result<(), ServerError> { - let result = self.context.next(); - eprintln!("INFO: stepped by statement with result {result:?}"); + fn handle_next_into(&mut self, req: Request) -> Result<(), ServerError> { + let result = self.context.next_into(); + eprintln!("INFO: stepped into by statement with result {result:?}"); + self.server.respond(req.ack()?)?; + self.handle_execution_result(result) + } + + fn handle_next_out(&mut self, req: Request) -> Result<(), ServerError> { + let result = self.context.next_out(); + eprintln!("INFO: stepped out by statement with result {result:?}"); + self.server.respond(req.ack()?)?; + self.handle_execution_result(result) + } + + fn handle_next_over(&mut self, req: Request) -> Result<(), ServerError> { + let result = self.context.next_over(); + eprintln!("INFO: stepped over by statement with result {result:?}"); self.server.respond(req.ack()?)?; self.handle_execution_result(result) } @@ -548,6 +578,73 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { )?; Ok(()) } + + fn handle_scopes(&mut self, req: Request) -> Result<(), ServerError> { + self.server.respond(req.success(ResponseBody::Scopes(ScopesResponse { + scopes: vec![ + Scope { + name: String::from("Locals"), + variables_reference: ScopeReferences::Locals as i64, + ..Scope::default() + }, + Scope { + name: String::from("Witness Map"), + variables_reference: ScopeReferences::WitnessMap as i64, + ..Scope::default() + }, + ], + })))?; + Ok(()) + } + + fn build_local_variables(&self) -> Vec { + let mut variables: Vec<_> = self + .context + .get_variables() + .iter() + .map(|(name, value, _var_type)| Variable { + name: String::from(*name), + value: format!("{:?}", *value), + ..Variable::default() + }) + .collect(); + variables.sort_by(|a, b| a.name.partial_cmp(&b.name).unwrap()); + variables + } + + fn build_witness_map(&self) -> Vec { + self.context + .get_witness_map() + .clone() + .into_iter() + .map(|(witness, value)| Variable { + name: format!("_{}", witness.witness_index()), + value: format!("{value:?}"), + ..Variable::default() + }) + .collect() + } + + fn handle_variables(&mut self, req: Request) -> Result<(), ServerError> { + let Command::Variables(ref args) = req.command else { + unreachable!("handle_variables called on a different request"); + }; + let scope: ScopeReferences = args.variables_reference.into(); + let variables: Vec<_> = match scope { + ScopeReferences::Locals => self.build_local_variables(), + ScopeReferences::WitnessMap => self.build_witness_map(), + _ => { + eprintln!( + "handle_variables with an unknown variables_reference {}", + args.variables_reference + ); + vec![] + } + }; + self.server + .respond(req.success(ResponseBody::Variables(VariablesResponse { variables })))?; + Ok(()) + } } pub fn run_session( diff --git a/noir/tooling/debugger/src/foreign_calls.rs b/noir/tooling/debugger/src/foreign_calls.rs new file mode 100644 index 00000000000..01676adfef3 --- /dev/null +++ b/noir/tooling/debugger/src/foreign_calls.rs @@ -0,0 +1,138 @@ +use acvm::{ + acir::brillig::{ForeignCallParam, ForeignCallResult, Value}, + pwg::ForeignCallWaitInfo, +}; +use nargo::{ + artifacts::debug::{DebugArtifact, DebugVars}, + ops::{DefaultForeignCallExecutor, ForeignCallExecutor, NargoForeignCallResult}, +}; +use noirc_errors::debug_info::DebugVarId; +use noirc_printable_type::{ForeignCallError, PrintableType, PrintableValue}; + +pub(crate) enum DebugForeignCall { + VarAssign, + VarDrop, + MemberAssign(u32), + DerefAssign, +} + +impl DebugForeignCall { + pub(crate) fn lookup(op_name: &str) -> Option { + let member_pre = "__debug_member_assign_"; + if let Some(op_suffix) = op_name.strip_prefix(member_pre) { + let arity = + op_suffix.parse::().expect("failed to parse debug_member_assign arity"); + return Some(DebugForeignCall::MemberAssign(arity)); + } + match op_name { + "__debug_var_assign" => Some(DebugForeignCall::VarAssign), + "__debug_var_drop" => Some(DebugForeignCall::VarDrop), + "__debug_deref_assign" => Some(DebugForeignCall::DerefAssign), + _ => None, + } + } +} + +pub trait DebugForeignCallExecutor: ForeignCallExecutor { + fn get_variables(&self) -> Vec<(&str, &PrintableValue, &PrintableType)>; +} + +pub struct DefaultDebugForeignCallExecutor { + executor: DefaultForeignCallExecutor, + pub debug_vars: DebugVars, +} + +impl DefaultDebugForeignCallExecutor { + pub fn new(show_output: bool) -> Self { + Self { + executor: DefaultForeignCallExecutor::new(show_output, None), + debug_vars: DebugVars::default(), + } + } + + pub fn from_artifact(show_output: bool, artifact: &DebugArtifact) -> Self { + let mut ex = Self::new(show_output); + ex.load_artifact(artifact); + ex + } + + pub fn load_artifact(&mut self, artifact: &DebugArtifact) { + artifact.debug_symbols.iter().for_each(|info| { + self.debug_vars.insert_variables(&info.variables); + self.debug_vars.insert_types(&info.types); + }); + } +} + +impl DebugForeignCallExecutor for DefaultDebugForeignCallExecutor { + fn get_variables(&self) -> Vec<(&str, &PrintableValue, &PrintableType)> { + self.debug_vars.get_variables() + } +} + +fn debug_var_id(value: &Value) -> DebugVarId { + DebugVarId(value.to_u128() as u32) +} + +impl ForeignCallExecutor for DefaultDebugForeignCallExecutor { + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result { + let foreign_call_name = foreign_call.function.as_str(); + match DebugForeignCall::lookup(foreign_call_name) { + Some(DebugForeignCall::VarAssign) => { + let fcp_var_id = &foreign_call.inputs[0]; + if let ForeignCallParam::Single(var_id_value) = fcp_var_id { + let var_id = debug_var_id(var_id_value); + let values: Vec = + foreign_call.inputs[1..].iter().flat_map(|x| x.values()).collect(); + self.debug_vars.assign_var(var_id, &values); + } + Ok(ForeignCallResult::default().into()) + } + Some(DebugForeignCall::VarDrop) => { + let fcp_var_id = &foreign_call.inputs[0]; + if let ForeignCallParam::Single(var_id_value) = fcp_var_id { + let var_id = debug_var_id(var_id_value); + self.debug_vars.drop_var(var_id); + } + Ok(ForeignCallResult::default().into()) + } + Some(DebugForeignCall::MemberAssign(arity)) => { + if let Some(ForeignCallParam::Single(var_id_value)) = foreign_call.inputs.get(0) { + let arity = arity as usize; + let var_id = debug_var_id(var_id_value); + let n = foreign_call.inputs.len(); + let indexes: Vec = foreign_call.inputs[(n - arity)..n] + .iter() + .map(|fcp_v| { + if let ForeignCallParam::Single(v) = fcp_v { + v.to_u128() as u32 + } else { + panic!("expected ForeignCallParam::Single(v)"); + } + }) + .collect(); + let values: Vec = (0..n - 1 - arity) + .flat_map(|i| { + foreign_call.inputs.get(1 + i).map(|fci| fci.values()).unwrap_or(vec![]) + }) + .collect(); + self.debug_vars.assign_field(var_id, indexes, &values); + } + Ok(ForeignCallResult::default().into()) + } + Some(DebugForeignCall::DerefAssign) => { + let fcp_var_id = &foreign_call.inputs[0]; + let fcp_value = &foreign_call.inputs[1]; + if let ForeignCallParam::Single(var_id_value) = fcp_var_id { + let var_id = debug_var_id(var_id_value); + self.debug_vars.assign_deref(var_id, &fcp_value.values()); + } + Ok(ForeignCallResult::default().into()) + } + None => self.executor.execute(foreign_call), + } + } +} diff --git a/noir/tooling/debugger/src/lib.rs b/noir/tooling/debugger/src/lib.rs index 21834e44f93..35014f9a8c8 100644 --- a/noir/tooling/debugger/src/lib.rs +++ b/noir/tooling/debugger/src/lib.rs @@ -1,5 +1,6 @@ mod context; mod dap; +mod foreign_calls; mod repl; mod source_code_printer; diff --git a/noir/tooling/debugger/src/repl.rs b/noir/tooling/debugger/src/repl.rs index 92224ab785a..8441dbde9be 100644 --- a/noir/tooling/debugger/src/repl.rs +++ b/noir/tooling/debugger/src/repl.rs @@ -4,9 +4,11 @@ use acvm::acir::circuit::{Circuit, Opcode, OpcodeLocation}; use acvm::acir::native_types::{Witness, WitnessMap}; use acvm::{BlackBoxFunctionSolver, FieldElement}; -use nargo::{artifacts::debug::DebugArtifact, ops::DefaultForeignCallExecutor, NargoError}; +use crate::foreign_calls::DefaultDebugForeignCallExecutor; +use nargo::{artifacts::debug::DebugArtifact, NargoError}; use easy_repl::{command, CommandStatus, Repl}; +use noirc_printable_type::PrintableValueDisplay; use std::cell::RefCell; use crate::source_code_printer::print_source_code_location; @@ -27,21 +29,22 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { debug_artifact: &'a DebugArtifact, initial_witness: WitnessMap, ) -> Self { + let foreign_call_executor = + Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, debug_artifact)); let context = DebugContext::new( blackbox_solver, circuit, debug_artifact, initial_witness.clone(), - Box::new(DefaultForeignCallExecutor::new(true, None)), + foreign_call_executor, ); - Self { - context, - blackbox_solver, - circuit, - debug_artifact, - initial_witness, - last_result: DebugCommandResult::Ok, - } + let last_result = if context.get_current_opcode_location().is_none() { + // handle circuit with no opcodes + DebugCommandResult::Done + } else { + DebugCommandResult::Ok + }; + Self { context, blackbox_solver, circuit, debug_artifact, initial_witness, last_result } } pub fn show_current_vm_status(&self) { @@ -73,10 +76,45 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { ); } } + let locations = self.context.get_source_location_for_opcode_location(&location); + print_source_code_location(self.debug_artifact, &locations); + } + } + } - print_source_code_location(self.debug_artifact, &location); + fn show_stack_frame(&self, index: usize, location: &OpcodeLocation) { + let opcodes = self.context.get_opcodes(); + match location { + OpcodeLocation::Acir(instruction_pointer) => { + println!( + "Frame #{index}, opcode {}: {}", + instruction_pointer, opcodes[*instruction_pointer] + ) + } + OpcodeLocation::Brillig { acir_index, brillig_index } => { + let Opcode::Brillig(ref brillig) = opcodes[*acir_index] else { + unreachable!("Brillig location does not contain a Brillig block"); + }; + println!( + "Frame #{index}, opcode {}.{}: {:?}", + acir_index, brillig_index, brillig.bytecode[*brillig_index] + ); } } + let locations = self.context.get_source_location_for_opcode_location(location); + print_source_code_location(self.debug_artifact, &locations); + } + + pub fn show_current_call_stack(&self) { + let call_stack = self.context.get_call_stack(); + if call_stack.is_empty() { + println!("Finished execution. Call stack empty."); + return; + } + + for (i, frame_location) in call_stack.iter().enumerate() { + self.show_stack_frame(i, frame_location); + } } fn display_opcodes(&self) { @@ -193,9 +231,23 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { } } - fn next(&mut self) { + fn next_into(&mut self) { + if self.validate_in_progress() { + let result = self.context.next_into(); + self.handle_debug_command_result(result); + } + } + + fn next_over(&mut self) { if self.validate_in_progress() { - let result = self.context.next(); + let result = self.context.next_over(); + self.handle_debug_command_result(result); + } + } + + fn next_out(&mut self) { + if self.validate_in_progress() { + let result = self.context.next_out(); self.handle_debug_command_result(result); } } @@ -211,12 +263,14 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { fn restart_session(&mut self) { let breakpoints: Vec = self.context.iterate_breakpoints().copied().collect(); + let foreign_call_executor = + Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, self.debug_artifact)); self.context = DebugContext::new( self.blackbox_solver, self.circuit, self.debug_artifact, self.initial_witness.clone(), - Box::new(DefaultForeignCallExecutor::new(true, None)), + foreign_call_executor, ); for opcode_location in breakpoints { self.context.add_breakpoint(opcode_location); @@ -282,6 +336,15 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { self.context.write_brillig_memory(index, field_value); } + pub fn show_vars(&self) { + let vars = self.context.get_variables(); + for (var_name, value, var_type) in vars.iter() { + let printable_value = + PrintableValueDisplay::Plain((*value).clone(), (*var_type).clone()); + println!("{var_name}:{var_type:?} = {}", printable_value); + } + } + fn is_solved(&self) -> bool { self.context.is_solved() } @@ -329,11 +392,31 @@ pub fn run( command! { "step until a new source location is reached", () => || { - ref_context.borrow_mut().next(); + ref_context.borrow_mut().next_into(); Ok(CommandStatus::Done) } }, ) + .add( + "over", + command! { + "step until a new source location is reached without diving into function calls", + () => || { + ref_context.borrow_mut().next_over(); + Ok(CommandStatus::Done) + } + } + ) + .add( + "out", + command! { + "step until a new source location is reached and the current stack frame is finished", + () => || { + ref_context.borrow_mut().next_out(); + Ok(CommandStatus::Done) + } + } + ) .add( "continue", command! { @@ -434,6 +517,26 @@ pub fn run( } }, ) + .add( + "stacktrace", + command! { + "display the current stack trace", + () => || { + ref_context.borrow().show_current_call_stack(); + Ok(CommandStatus::Done) + } + }, + ) + .add( + "vars", + command! { + "show variable values available at this point in execution", + () => || { + ref_context.borrow_mut().show_vars(); + Ok(CommandStatus::Done) + } + }, + ) .build() .expect("Failed to initialize debugger repl"); diff --git a/noir/tooling/debugger/src/source_code_printer.rs b/noir/tooling/debugger/src/source_code_printer.rs index 1707f9066b7..b5ffdb12d01 100644 --- a/noir/tooling/debugger/src/source_code_printer.rs +++ b/noir/tooling/debugger/src/source_code_printer.rs @@ -1,4 +1,3 @@ -use acvm::acir::circuit::OpcodeLocation; use codespan_reporting::files::Files; use nargo::artifacts::debug::DebugArtifact; use noirc_errors::Location; @@ -31,13 +30,7 @@ struct LocationPrintContext { // Given a DebugArtifact and an OpcodeLocation, prints all the source code // locations the OpcodeLocation maps to, with some surrounding context and // visual aids to highlight the location itself. -pub(crate) fn print_source_code_location( - debug_artifact: &DebugArtifact, - location: &OpcodeLocation, -) { - let locations = debug_artifact.debug_symbols[0].opcode_location(location); - let Some(locations) = locations else { return; }; - +pub(crate) fn print_source_code_location(debug_artifact: &DebugArtifact, locations: &[Location]) { let locations = locations.iter(); for loc in locations { @@ -276,7 +269,8 @@ mod tests { let mut opcode_locations = BTreeMap::>::new(); opcode_locations.insert(OpcodeLocation::Acir(42), vec![loc]); - let debug_symbols = vec![DebugInfo::new(opcode_locations)]; + let debug_symbols = + vec![DebugInfo::new(opcode_locations, BTreeMap::default(), BTreeMap::default())]; let debug_artifact = DebugArtifact::new(debug_symbols, &fm); let location_rendered: Vec<_> = render_location(&debug_artifact, &loc).collect(); diff --git a/noir/tooling/debugger/tests/debug.rs b/noir/tooling/debugger/tests/debug.rs index e8b17b8a7af..82872ce2739 100644 --- a/noir/tooling/debugger/tests/debug.rs +++ b/noir/tooling/debugger/tests/debug.rs @@ -27,7 +27,7 @@ mod tests { // Start debugger and test that it loads for the given program. dbg_session .execute( - &format!("{} debug --program-dir {}", nargo_bin, test_program_dir), + &format!("{} debug --program-dir {} --force-brillig", nargo_bin, test_program_dir), ".*\\Starting debugger.*", ) .expect("Could not start debugger"); diff --git a/noir/tooling/lsp/src/requests/test_run.rs b/noir/tooling/lsp/src/requests/test_run.rs index 135090d7ed9..0b88d814265 100644 --- a/noir/tooling/lsp/src/requests/test_run.rs +++ b/noir/tooling/lsp/src/requests/test_run.rs @@ -83,7 +83,7 @@ fn on_test_run_request_inner( let test_result = run_test( &state.solver, - &context, + &mut context, test_function, false, None, diff --git a/noir/tooling/nargo/Cargo.toml b/noir/tooling/nargo/Cargo.toml index cd97980b9e0..efd38a182e0 100644 --- a/noir/tooling/nargo/Cargo.toml +++ b/noir/tooling/nargo/Cargo.toml @@ -36,4 +36,3 @@ jsonrpc-http-server = "18.0" jsonrpc-core-client = "18.0" jsonrpc-derive = "18.0" jsonrpc-core = "18.0" -serial_test = "2.0" diff --git a/noir/tooling/nargo/src/artifacts/debug.rs b/noir/tooling/nargo/src/artifacts/debug.rs index 2e2d98f279e..a249ecb03ad 100644 --- a/noir/tooling/nargo/src/artifacts/debug.rs +++ b/noir/tooling/nargo/src/artifacts/debug.rs @@ -8,6 +8,7 @@ use std::{ ops::Range, }; +pub use super::debug_vars::DebugVars; use fm::{FileId, FileManager, PathString}; /// A Debug Artifact stores, for a given program, the debug info for every function @@ -86,7 +87,8 @@ impl DebugArtifact { let line_index = self.line_index(location.file, location_start)?; let line_span = self.line_range(location.file, line_index)?; - let line_length = line_span.end - (line_span.start + 1); + let line_length = + if line_span.end > line_span.start { line_span.end - (line_span.start + 1) } else { 0 }; let start_in_line = location_start - line_span.start; // The location might continue beyond the line, @@ -229,7 +231,8 @@ mod tests { let mut opcode_locations = BTreeMap::>::new(); opcode_locations.insert(OpcodeLocation::Acir(42), vec![loc]); - let debug_symbols = vec![DebugInfo::new(opcode_locations)]; + let debug_symbols = + vec![DebugInfo::new(opcode_locations, BTreeMap::default(), BTreeMap::default())]; let debug_artifact = DebugArtifact::new(debug_symbols, &fm); let location_in_line = debug_artifact.location_in_line(loc).expect("Expected a range"); diff --git a/noir/tooling/nargo/src/artifacts/debug_vars.rs b/noir/tooling/nargo/src/artifacts/debug_vars.rs new file mode 100644 index 00000000000..b5559ca53c8 --- /dev/null +++ b/noir/tooling/nargo/src/artifacts/debug_vars.rs @@ -0,0 +1,117 @@ +use acvm::brillig_vm::brillig::Value; +use noirc_errors::debug_info::{ + DebugTypeId, DebugTypes, DebugVarId, DebugVariable, DebugVariables, +}; +use noirc_printable_type::{decode_value, PrintableType, PrintableValue}; +use std::collections::{HashMap, HashSet}; + +#[derive(Debug, Default, Clone)] +pub struct DebugVars { + variables: HashMap, + types: HashMap, + active: HashSet, + values: HashMap, +} + +impl DebugVars { + pub fn get_variables(&self) -> Vec<(&str, &PrintableValue, &PrintableType)> { + self.active + .iter() + .filter_map(|var_id| { + self.variables + .get(var_id) + .and_then(|debug_var| { + let Some(value) = self.values.get(var_id) else { return None; }; + let Some(ptype) = self.types.get(&debug_var.debug_type_id) else { return None; }; + Some((debug_var.name.as_str(), value, ptype)) + }) + }) + .collect() + } + + pub fn insert_variables(&mut self, vars: &DebugVariables) { + self.variables.extend(vars.clone().into_iter()); + } + + pub fn insert_types(&mut self, types: &DebugTypes) { + self.types.extend(types.clone().into_iter()); + } + + pub fn assign_var(&mut self, var_id: DebugVarId, values: &[Value]) { + self.active.insert(var_id); + let type_id = &self.variables.get(&var_id).unwrap().debug_type_id; + let ptype = self.types.get(type_id).unwrap(); + self.values.insert(var_id, decode_value(&mut values.iter().map(|v| v.to_field()), ptype)); + } + + pub fn assign_field(&mut self, var_id: DebugVarId, indexes: Vec, values: &[Value]) { + let mut cursor: &mut PrintableValue = self + .values + .get_mut(&var_id) + .unwrap_or_else(|| panic!("value unavailable for var_id {var_id:?}")); + let cursor_type_id = &self + .variables + .get(&var_id) + .unwrap_or_else(|| panic!("variable {var_id:?} not found")) + .debug_type_id; + let mut cursor_type = self + .types + .get(cursor_type_id) + .unwrap_or_else(|| panic!("type unavailable for type id {cursor_type_id:?}")); + for index in indexes.iter() { + (cursor, cursor_type) = match (cursor, cursor_type) { + (PrintableValue::Vec(array), PrintableType::Array { length, typ }) => { + if let Some(len) = length { + if *index as u64 >= *len { + panic!("unexpected field index past array length") + } + if *len != array.len() as u64 { + panic!("type/array length mismatch") + } + } + (array.get_mut(*index as usize).unwrap(), &*Box::leak(typ.clone())) + } + ( + PrintableValue::Struct(field_map), + PrintableType::Struct { name: _name, fields }, + ) => { + if *index as usize >= fields.len() { + panic!("unexpected field index past struct field length") + } + let (key, typ) = fields.get(*index as usize).unwrap(); + (field_map.get_mut(key).unwrap(), typ) + } + (PrintableValue::Vec(array), PrintableType::Tuple { types }) => { + if *index >= types.len() as u32 { + panic!( + "unexpected field index ({index}) past tuple length ({})", + types.len() + ); + } + if types.len() != array.len() { + panic!("type/array length mismatch") + } + let typ = types.get(*index as usize).unwrap(); + (array.get_mut(*index as usize).unwrap(), typ) + } + _ => { + panic!("unexpected assign field of {cursor_type:?} type"); + } + }; + } + *cursor = decode_value(&mut values.iter().map(|v| v.to_field()), cursor_type); + self.active.insert(var_id); + } + + pub fn assign_deref(&mut self, _var_id: DebugVarId, _values: &[Value]) { + unimplemented![] + } + + pub fn get_type(&self, var_id: DebugVarId) -> Option<&PrintableType> { + self.variables.get(&var_id).and_then(|debug_var| self.types.get(&debug_var.debug_type_id)) + } + + pub fn drop_var(&mut self, var_id: DebugVarId) { + self.active.remove(&var_id); + } +} diff --git a/noir/tooling/nargo/src/artifacts/mod.rs b/noir/tooling/nargo/src/artifacts/mod.rs index 180a900fd81..c7b3736f90b 100644 --- a/noir/tooling/nargo/src/artifacts/mod.rs +++ b/noir/tooling/nargo/src/artifacts/mod.rs @@ -5,4 +5,5 @@ //! to generate them using these artifacts as a starting point. pub mod contract; pub mod debug; +mod debug_vars; pub mod program; diff --git a/noir/tooling/nargo/src/ops/compile.rs b/noir/tooling/nargo/src/ops/compile.rs index dccd2cedbf5..bd1850649c4 100644 --- a/noir/tooling/nargo/src/ops/compile.rs +++ b/noir/tooling/nargo/src/ops/compile.rs @@ -1,5 +1,8 @@ use fm::FileManager; -use noirc_driver::{CompilationResult, CompileOptions, CompiledContract, CompiledProgram}; +use noirc_driver::{ + link_to_debug_crate, CompilationResult, CompileOptions, CompiledContract, CompiledProgram, +}; +use noirc_frontend::debug::DebugInstrumenter; use noirc_frontend::hir::ParsedFiles; use crate::errors::CompileError; @@ -68,8 +71,29 @@ pub fn compile_program( package: &Package, compile_options: &CompileOptions, cached_program: Option, +) -> CompilationResult { + compile_program_with_debug_instrumenter( + file_manager, + parsed_files, + package, + compile_options, + cached_program, + DebugInstrumenter::default(), + ) +} + +pub fn compile_program_with_debug_instrumenter( + file_manager: &FileManager, + parsed_files: &ParsedFiles, + package: &Package, + compile_options: &CompileOptions, + cached_program: Option, + debug_instrumenter: DebugInstrumenter, ) -> CompilationResult { let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); + link_to_debug_crate(&mut context, crate_id); + context.debug_instrumenter = debug_instrumenter; + noirc_driver::compile_main(&mut context, crate_id, compile_options, cached_program) } diff --git a/noir/tooling/nargo/src/ops/execute.rs b/noir/tooling/nargo/src/ops/execute.rs index 4fc7f7b599f..370393fea09 100644 --- a/noir/tooling/nargo/src/ops/execute.rs +++ b/noir/tooling/nargo/src/ops/execute.rs @@ -1,3 +1,4 @@ +use acvm::brillig_vm::brillig::ForeignCallResult; use acvm::pwg::{ACVMStatus, ErrorLocation, OpcodeResolutionError, ACVM}; use acvm::BlackBoxFunctionSolver; use acvm::{acir::circuit::Circuit, acir::native_types::WitnessMap}; @@ -5,7 +6,7 @@ use acvm::{acir::circuit::Circuit, acir::native_types::WitnessMap}; use crate::errors::ExecutionError; use crate::NargoError; -use super::foreign_calls::ForeignCallExecutor; +use super::foreign_calls::{ForeignCallExecutor, NargoForeignCallResult}; #[tracing::instrument(level = "trace", skip_all)] pub fn execute_circuit( @@ -16,6 +17,8 @@ pub fn execute_circuit( ) -> Result { let mut acvm = ACVM::new(blackbox_solver, &circuit.opcodes, initial_witness); + // This message should be resolved by a nargo foreign call only when we have an unsatisfied assertion. + let mut assert_message: Option = None; loop { let solver_status = acvm.solve(); @@ -37,7 +40,13 @@ pub fn execute_circuit( return Err(NargoError::ExecutionError(match call_stack { Some(call_stack) => { - if let Some(assert_message) = circuit.get_assert_message( + // First check whether we have a runtime assertion message that should be resolved on an ACVM failure + // If we do not have a runtime assertion message, we should check whether the circuit has any hardcoded + // messages associated with a specific `OpcodeLocation`. + // Otherwise return the provided opcode resolution error. + if let Some(assert_message) = assert_message { + ExecutionError::AssertionFailed(assert_message.to_owned(), call_stack) + } else if let Some(assert_message) = circuit.get_assert_message( *call_stack.last().expect("Call stacks should not be empty"), ) { ExecutionError::AssertionFailed(assert_message.to_owned(), call_stack) @@ -50,7 +59,19 @@ pub fn execute_circuit( } ACVMStatus::RequiresForeignCall(foreign_call) => { let foreign_call_result = foreign_call_executor.execute(&foreign_call)?; - acvm.resolve_pending_foreign_call(foreign_call_result); + match foreign_call_result { + NargoForeignCallResult::BrilligOutput(foreign_call_result) => { + acvm.resolve_pending_foreign_call(foreign_call_result); + } + NargoForeignCallResult::ResolvedAssertMessage(message) => { + if assert_message.is_some() { + unreachable!("Resolving an assert message should happen only once as the VM should have failed"); + } + assert_message = Some(message); + + acvm.resolve_pending_foreign_call(ForeignCallResult::default()); + } + } } } } diff --git a/noir/tooling/nargo/src/ops/foreign_calls.rs b/noir/tooling/nargo/src/ops/foreign_calls.rs index e3a3174f8dc..f7f36c65c90 100644 --- a/noir/tooling/nargo/src/ops/foreign_calls.rs +++ b/noir/tooling/nargo/src/ops/foreign_calls.rs @@ -9,13 +9,69 @@ pub trait ForeignCallExecutor { fn execute( &mut self, foreign_call: &ForeignCallWaitInfo, - ) -> Result; + ) -> Result; +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum NargoForeignCallResult { + BrilligOutput(ForeignCallResult), + ResolvedAssertMessage(String), +} + +impl NargoForeignCallResult { + pub fn get_assert_message(self) -> Option { + match self { + Self::ResolvedAssertMessage(msg) => Some(msg), + _ => None, + } + } + + pub fn get_brillig_output(self) -> Option { + match self { + Self::BrilligOutput(foreign_call_result) => Some(foreign_call_result), + _ => None, + } + } +} + +impl From for NargoForeignCallResult { + fn from(value: ForeignCallResult) -> Self { + Self::BrilligOutput(value) + } +} + +impl From for NargoForeignCallResult { + fn from(value: String) -> Self { + Self::ResolvedAssertMessage(value) + } +} + +impl From for NargoForeignCallResult { + fn from(value: Value) -> Self { + let foreign_call_result: ForeignCallResult = value.into(); + foreign_call_result.into() + } +} + +impl From> for NargoForeignCallResult { + fn from(values: Vec) -> Self { + let foreign_call_result: ForeignCallResult = values.into(); + foreign_call_result.into() + } +} + +impl From> for NargoForeignCallResult { + fn from(values: Vec) -> Self { + let foreign_call_result: ForeignCallResult = values.into(); + foreign_call_result.into() + } } /// This enumeration represents the Brillig foreign calls that are natively supported by nargo. /// After resolution of a foreign call, nargo will restart execution of the ACVM -pub(crate) enum ForeignCall { +pub enum ForeignCall { Print, + AssertMessage, CreateMock, SetMockParams, SetMockReturns, @@ -33,6 +89,7 @@ impl ForeignCall { pub(crate) fn name(&self) -> &'static str { match self { ForeignCall::Print => "print", + ForeignCall::AssertMessage => "assert_message", ForeignCall::CreateMock => "create_mock", ForeignCall::SetMockParams => "set_mock_params", ForeignCall::SetMockReturns => "set_mock_returns", @@ -44,6 +101,7 @@ impl ForeignCall { pub(crate) fn lookup(op_name: &str) -> Option { match op_name { "print" => Some(ForeignCall::Print), + "assert_message" => Some(ForeignCall::AssertMessage), "create_mock" => Some(ForeignCall::CreateMock), "set_mock_params" => Some(ForeignCall::SetMockParams), "set_mock_returns" => Some(ForeignCall::SetMockReturns), @@ -134,29 +192,49 @@ impl DefaultForeignCallExecutor { fn execute_print(foreign_call_inputs: &[ForeignCallParam]) -> Result<(), ForeignCallError> { let skip_newline = foreign_call_inputs[0].unwrap_value().is_zero(); - let display_values: PrintableValueDisplay = foreign_call_inputs - .split_first() - .ok_or(ForeignCallError::MissingForeignCallInputs)? - .1 - .try_into()?; - print!("{display_values}{}", if skip_newline { "" } else { "\n" }); + + let foreign_call_inputs = + foreign_call_inputs.split_first().ok_or(ForeignCallError::MissingForeignCallInputs)?.1; + let display_string = Self::format_printable_value(foreign_call_inputs, skip_newline)?; + + print!("{display_string}"); + Ok(()) } + + fn execute_assert_message( + foreign_call_inputs: &[ForeignCallParam], + ) -> Result { + let display_string = Self::format_printable_value(foreign_call_inputs, true)?; + Ok(display_string.into()) + } + + fn format_printable_value( + foreign_call_inputs: &[ForeignCallParam], + skip_newline: bool, + ) -> Result { + let display_values: PrintableValueDisplay = foreign_call_inputs.try_into()?; + + let result = format!("{display_values}{}", if skip_newline { "" } else { "\n" }); + + Ok(result) + } } impl ForeignCallExecutor for DefaultForeignCallExecutor { fn execute( &mut self, foreign_call: &ForeignCallWaitInfo, - ) -> Result { + ) -> Result { let foreign_call_name = foreign_call.function.as_str(); match ForeignCall::lookup(foreign_call_name) { Some(ForeignCall::Print) => { if self.show_output { Self::execute_print(&foreign_call.inputs)?; } - Ok(ForeignCallResult { values: vec![] }) + Ok(ForeignCallResult::default().into()) } + Some(ForeignCall::AssertMessage) => Self::execute_assert_message(&foreign_call.inputs), Some(ForeignCall::CreateMock) => { let mock_oracle_name = Self::parse_string(&foreign_call.inputs[0]); assert!(ForeignCall::lookup(&mock_oracle_name).is_none()); @@ -164,7 +242,7 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { self.mocked_responses.push(MockedCall::new(id, mock_oracle_name)); self.last_mock_id += 1; - Ok(ForeignCallResult { values: vec![Value::from(id).into()] }) + Ok(Value::from(id).into()) } Some(ForeignCall::SetMockParams) => { let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; @@ -172,7 +250,7 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { .unwrap_or_else(|| panic!("Unknown mock id {}", id)) .params = Some(params.to_vec()); - Ok(ForeignCallResult { values: vec![] }) + Ok(ForeignCallResult::default().into()) } Some(ForeignCall::SetMockReturns) => { let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; @@ -180,7 +258,7 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { .unwrap_or_else(|| panic!("Unknown mock id {}", id)) .result = ForeignCallResult { values: params.to_vec() }; - Ok(ForeignCallResult { values: vec![] }) + Ok(ForeignCallResult::default().into()) } Some(ForeignCall::SetMockTimes) => { let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; @@ -194,12 +272,12 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { .unwrap_or_else(|| panic!("Unknown mock id {}", id)) .times_left = Some(times); - Ok(ForeignCallResult { values: vec![] }) + Ok(ForeignCallResult::default().into()) } Some(ForeignCall::ClearMock) => { let (id, _) = Self::extract_mock_id(&foreign_call.inputs)?; self.mocked_responses.retain(|response| response.id != id); - Ok(ForeignCallResult { values: vec![] }) + Ok(ForeignCallResult::default().into()) } None => { let mock_response_position = self @@ -222,7 +300,7 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { } } - Ok(ForeignCallResult { values: result }) + Ok(result.into()) } (None, Some(external_resolver)) => { let encoded_params: Vec<_> = @@ -235,7 +313,7 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { let parsed_response: ForeignCallResult = response.result()?; - Ok(parsed_response) + Ok(parsed_response.into()) } (None, None) => panic!("Unknown foreign call {}", foreign_call_name), } @@ -255,7 +333,6 @@ mod tests { use jsonrpc_core::Result as RpcResult; use jsonrpc_derive::rpc; use jsonrpc_http_server::{Server, ServerBuilder}; - use serial_test::serial; use crate::ops::{DefaultForeignCallExecutor, ForeignCallExecutor}; @@ -291,15 +368,15 @@ mod tests { let mut io = jsonrpc_core::IoHandler::new(); io.extend_with(OracleResolverImpl.to_delegate()); + // Choosing port 0 results in a random port being assigned. let server = ServerBuilder::new(io) - .start_http(&"127.0.0.1:5555".parse().expect("Invalid address")) + .start_http(&"127.0.0.1:0".parse().expect("Invalid address")) .expect("Could not start server"); let url = format!("http://{}", server.address()); (server, url) } - #[serial] #[test] fn test_oracle_resolver_echo() { let (server, url) = build_oracle_server(); @@ -312,12 +389,11 @@ mod tests { }; let result = executor.execute(&foreign_call); - assert_eq!(result.unwrap(), ForeignCallResult { values: foreign_call.inputs }); + assert_eq!(result.unwrap(), ForeignCallResult { values: foreign_call.inputs }.into()); server.close(); } - #[serial] #[test] fn test_oracle_resolver_sum() { let (server, url) = build_oracle_server(); diff --git a/noir/tooling/nargo/src/ops/mod.rs b/noir/tooling/nargo/src/ops/mod.rs index 4f92faa73a4..23dd0db15b9 100644 --- a/noir/tooling/nargo/src/ops/mod.rs +++ b/noir/tooling/nargo/src/ops/mod.rs @@ -1,6 +1,10 @@ -pub use self::compile::{compile_contract, compile_program, compile_workspace}; +pub use self::compile::{ + compile_contract, compile_program, compile_program_with_debug_instrumenter, compile_workspace, +}; pub use self::execute::execute_circuit; -pub use self::foreign_calls::{DefaultForeignCallExecutor, ForeignCallExecutor}; +pub use self::foreign_calls::{ + DefaultForeignCallExecutor, ForeignCall, ForeignCallExecutor, NargoForeignCallResult, +}; pub use self::optimize::{optimize_contract, optimize_program}; pub use self::transform::{transform_contract, transform_program}; diff --git a/noir/tooling/nargo/src/ops/test.rs b/noir/tooling/nargo/src/ops/test.rs index f38dcad0c2f..0929739a6ab 100644 --- a/noir/tooling/nargo/src/ops/test.rs +++ b/noir/tooling/nargo/src/ops/test.rs @@ -16,7 +16,7 @@ pub enum TestStatus { pub fn run_test( blackbox_solver: &B, - context: &Context, + context: &mut Context, test_function: TestFunction, show_output: bool, foreign_call_resolver_url: Option<&str>, diff --git a/noir/tooling/nargo_cli/Cargo.toml b/noir/tooling/nargo_cli/Cargo.toml index 6e022f090f0..57edbf5ae04 100644 --- a/noir/tooling/nargo_cli/Cargo.toml +++ b/noir/tooling/nargo_cli/Cargo.toml @@ -48,6 +48,7 @@ termcolor = "1.1.2" color-eyre = "0.6.2" tokio = { version = "1.0", features = ["io-std"] } dap.workspace = true +clap-markdown = { git = "https://github.com/noir-lang/clap-markdown", rev = "450d759532c88f0dba70891ceecdbc9ff8f25d2b", optional = true } # Backends backend-interface = { path = "../backend_interface" } @@ -83,3 +84,6 @@ harness = false [[bench]] name = "iai" harness = false + +[features] +codegen-docs = ["dep:clap-markdown"] \ No newline at end of file diff --git a/noir/tooling/nargo_cli/src/cli/dap_cmd.rs b/noir/tooling/nargo_cli/src/cli/dap_cmd.rs index f25d0ac212b..7c7e6056901 100644 --- a/noir/tooling/nargo_cli/src/cli/dap_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/dap_cmd.rs @@ -3,7 +3,7 @@ use acvm::acir::native_types::WitnessMap; use backend_interface::Backend; use clap::Args; use nargo::constants::PROVER_INPUT_FILE; -use nargo::ops::compile_program; +use nargo::ops::compile_program_with_debug_instrumenter; use nargo::workspace::Workspace; use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; @@ -24,6 +24,7 @@ use dap::types::Capabilities; use serde_json::Value; use super::compile_cmd::report_errors; +use super::debug_cmd::instrument_package_files; use super::fs::inputs::read_inputs_from_file; use crate::errors::CliError; @@ -87,11 +88,21 @@ fn load_and_compile_project( let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + let mut parsed_files = parse_all(&workspace_file_manager); - let compile_options = CompileOptions::default(); - let compilation_result = - compile_program(&workspace_file_manager, &parsed_files, package, &compile_options, None); + let compile_options = + CompileOptions { instrument_debug: true, force_brillig: true, ..CompileOptions::default() }; + + let debug_state = instrument_package_files(&mut parsed_files, &workspace_file_manager, package); + + let compilation_result = compile_program_with_debug_instrumenter( + &workspace_file_manager, + &parsed_files, + package, + &compile_options, + None, + debug_state, + ); let compiled_program = report_errors( compilation_result, diff --git a/noir/tooling/nargo_cli/src/cli/debug_cmd.rs b/noir/tooling/nargo_cli/src/cli/debug_cmd.rs index a0bac3bdac1..b3ee9137530 100644 --- a/noir/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -4,9 +4,10 @@ use acvm::acir::native_types::WitnessMap; use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; +use fm::FileManager; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; -use nargo::ops::compile_program; +use nargo::ops::compile_program_with_debug_instrumenter; use nargo::package::Package; use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; @@ -15,7 +16,9 @@ use noirc_abi::InputMap; use noirc_driver::{ file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, }; +use noirc_frontend::debug::DebugInstrumenter; use noirc_frontend::graph::CrateName; +use noirc_frontend::hir::ParsedFiles; use super::compile_cmd::report_errors; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; @@ -46,6 +49,10 @@ pub(crate) fn run( args: DebugCommand, config: NargoConfig, ) -> Result<(), CliError> { + // Override clap default for compiler option flag + let mut args = args.clone(); + args.compile_options.instrument_debug = true; + let toml_path = get_package_manifest(&config.program_dir)?; let selection = args.package.map_or(PackageSelection::DefaultOrAll, PackageSelection::Selected); let workspace = resolve_workspace_from_toml( @@ -61,7 +68,7 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + let mut parsed_files = parse_all(&workspace_file_manager); let Some(package) = workspace.into_iter().find(|p| p.is_binary()) else { println!( @@ -70,12 +77,16 @@ pub(crate) fn run( return Ok(()); }; - let compilation_result = compile_program( + let debug_instrumenter = + instrument_package_files(&mut parsed_files, &workspace_file_manager, package); + + let compilation_result = compile_program_with_debug_instrumenter( &workspace_file_manager, &parsed_files, package, &args.compile_options, None, + debug_instrumenter, ); let compiled_program = report_errors( @@ -90,6 +101,36 @@ pub(crate) fn run( run_async(package, compiled_program, &args.prover_name, &args.witness_name, target_dir) } +/// Add debugging instrumentation to all parsed files belonging to the package +/// being compiled +pub(crate) fn instrument_package_files( + parsed_files: &mut ParsedFiles, + file_manager: &FileManager, + package: &Package, +) -> DebugInstrumenter { + // Start off at the entry path and read all files in the parent directory. + let entry_path_parent = package + .entry_path + .parent() + .unwrap_or_else(|| panic!("The entry path is expected to be a single file within a directory and so should have a parent {:?}", package.entry_path)) + .clone(); + + let mut debug_instrumenter = DebugInstrumenter::default(); + + for (file_id, parsed_file) in parsed_files.iter_mut() { + let file_path = + file_manager.path(*file_id).expect("Parsed file ID not found in file manager"); + for ancestor in file_path.ancestors() { + if ancestor == entry_path_parent { + // file is in package + debug_instrumenter.instrument_module(&mut parsed_file.0); + } + } + } + + debug_instrumenter +} + fn run_async( package: &Package, program: CompiledProgram, diff --git a/noir/tooling/nargo_cli/src/cli/export_cmd.rs b/noir/tooling/nargo_cli/src/cli/export_cmd.rs index feaa55857e5..96b24796a2b 100644 --- a/noir/tooling/nargo_cli/src/cli/export_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/export_cmd.rs @@ -102,7 +102,7 @@ fn compile_exported_functions( exported_functions, |(function_name, function_id)| -> Result<(String, CompiledProgram), CompileError> { // TODO: We should to refactor how to deal with compilation errors to avoid this. - let program = compile_no_check(&context, compile_options, function_id, None, false) + let program = compile_no_check(&mut context, compile_options, function_id, None, false) .map_err(|error| vec![FileDiagnostic::from(error)]); let program = report_errors( diff --git a/noir/tooling/nargo_cli/src/cli/mod.rs b/noir/tooling/nargo_cli/src/cli/mod.rs index 01adbe9da98..c080061d44f 100644 --- a/noir/tooling/nargo_cli/src/cli/mod.rs +++ b/noir/tooling/nargo_cli/src/cli/mod.rs @@ -83,6 +83,9 @@ enum NargoCommand { } pub(crate) fn start_cli() -> eyre::Result<()> { + #[cfg(feature = "codegen-docs")] + return codegen_docs(); + let NargoCli { command, mut config } = NargoCli::parse(); // If the provided `program_dir` is relative, make it absolute by joining it to the current directory. @@ -126,3 +129,10 @@ pub(crate) fn start_cli() -> eyre::Result<()> { Ok(()) } + +#[cfg(feature = "codegen-docs")] +fn codegen_docs() -> eyre::Result<()> { + let markdown: String = clap_markdown::help_markdown::(); + println!("{markdown}"); + Ok(()) +} diff --git a/noir/tooling/nargo_cli/src/cli/test_cmd.rs b/noir/tooling/nargo_cli/src/cli/test_cmd.rs index 9fee27b9172..503fd5afdd4 100644 --- a/noir/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/test_cmd.rs @@ -160,7 +160,7 @@ fn run_tests( let test_status = run_test( blackbox_solver, - &context, + &mut context, test_function, show_output, foreign_call_resolver_url, diff --git a/noir/tooling/nargo_fmt/src/items.rs b/noir/tooling/nargo_fmt/src/items.rs new file mode 100644 index 00000000000..7f998f45b59 --- /dev/null +++ b/noir/tooling/nargo_fmt/src/items.rs @@ -0,0 +1,117 @@ +use noirc_frontend::macros_api::Span; + +use crate::{ + utils::{comment_len, find_comment_end}, + visitor::{FmtVisitor, Shape}, +}; + +#[derive(Debug)] +pub(crate) struct Item { + pub(crate) leading: String, + pub(crate) value: String, + pub(crate) trailing: String, + pub(crate) different_line: bool, +} + +impl Item { + pub(crate) fn total_width(&self) -> usize { + comment_len(&self.leading) + self.value.chars().count() + comment_len(&self.trailing) + } + + pub(crate) fn is_multiline(&self) -> bool { + self.leading.contains('\n') || self.trailing.contains('\n') + } +} + +pub(crate) struct Items<'me, T> { + visitor: &'me FmtVisitor<'me>, + shape: Shape, + elements: std::iter::Peekable>, + last_position: u32, + end_position: u32, +} + +impl<'me, T: HasItem> Items<'me, T> { + pub(crate) fn new( + visitor: &'me FmtVisitor<'me>, + shape: Shape, + span: Span, + elements: Vec, + ) -> Self { + Self { + visitor, + shape, + last_position: span.start() + 1, + end_position: span.end() - 1, + elements: elements.into_iter().peekable(), + } + } +} + +impl Iterator for Items<'_, T> { + type Item = Item; + + fn next(&mut self) -> Option { + let element = self.elements.next()?; + let element_span = element.span(); + + let start = self.last_position; + let end = element_span.start(); + + let is_last = self.elements.peek().is_none(); + let next_start = self.elements.peek().map_or(self.end_position, |expr| expr.start()); + + let (leading, different_line) = self.leading(start, end); + let expr = element.format(self.visitor, self.shape); + let trailing = self.trailing(element_span.end(), next_start, is_last); + + Item { leading, value: expr, trailing, different_line }.into() + } +} + +impl<'me, T> Items<'me, T> { + pub(crate) fn leading(&mut self, start: u32, end: u32) -> (String, bool) { + let mut different_line = false; + + let leading = self.visitor.slice(start..end); + let leading_trimmed = leading.trim(); + + let starts_with_block_comment = leading_trimmed.starts_with("/*"); + let ends_with_block_comment = leading_trimmed.ends_with("*/"); + let starts_with_single_line_comment = leading_trimmed.starts_with("//"); + + if ends_with_block_comment { + let comment_end = leading_trimmed.rfind(|c| c == '/').unwrap(); + + if leading[comment_end..].contains('\n') { + different_line = true; + } + } else if starts_with_single_line_comment || starts_with_block_comment { + different_line = true; + }; + + (leading_trimmed.to_string(), different_line) + } + + pub(crate) fn trailing(&mut self, start: u32, end: u32, is_last: bool) -> String { + let slice = self.visitor.slice(start..end); + let comment_end = find_comment_end(slice, is_last); + let trailing = slice[..comment_end].trim_matches(',').trim(); + self.last_position = start + (comment_end as u32); + trailing.to_string() + } +} + +pub(crate) trait HasItem { + fn span(&self) -> Span; + + fn format(self, visitor: &FmtVisitor, shape: Shape) -> String; + + fn start(&self) -> u32 { + self.span().start() + } + + fn end(&self) -> u32 { + self.span().end() + } +} diff --git a/noir/tooling/nargo_fmt/src/lib.rs b/noir/tooling/nargo_fmt/src/lib.rs index d731934c3c3..0a7903f0ce1 100644 --- a/noir/tooling/nargo_fmt/src/lib.rs +++ b/noir/tooling/nargo_fmt/src/lib.rs @@ -20,6 +20,7 @@ /// in both placement and content during the formatting process. mod config; pub mod errors; +mod items; mod rewrite; mod utils; mod visitor; diff --git a/noir/tooling/nargo_fmt/src/rewrite.rs b/noir/tooling/nargo_fmt/src/rewrite.rs index 6a95eba8759..61792c7a7fa 100644 --- a/noir/tooling/nargo_fmt/src/rewrite.rs +++ b/noir/tooling/nargo_fmt/src/rewrite.rs @@ -1,11 +1,13 @@ mod array; mod expr; +mod imports; mod infix; mod parenthesized; mod typ; pub(crate) use array::rewrite as array; pub(crate) use expr::{rewrite as expr, rewrite_sub_expr as sub_expr}; +pub(crate) use imports::UseTree; pub(crate) use infix::rewrite as infix; pub(crate) use parenthesized::rewrite as parenthesized; pub(crate) use typ::rewrite as typ; diff --git a/noir/tooling/nargo_fmt/src/rewrite/array.rs b/noir/tooling/nargo_fmt/src/rewrite/array.rs index fc5b240f83e..77e5e756f19 100644 --- a/noir/tooling/nargo_fmt/src/rewrite/array.rs +++ b/noir/tooling/nargo_fmt/src/rewrite/array.rs @@ -1,7 +1,8 @@ use noirc_frontend::{hir::resolution::errors::Span, token::Token, Expression}; use crate::{ - utils::{Expr, FindToken}, + items::Item, + utils::FindToken, visitor::{expr::NewlineMode, FmtVisitor}, }; @@ -39,12 +40,12 @@ pub(crate) fn rewrite(mut visitor: FmtVisitor, array: Vec, array_spa let (leading, _) = visitor.format_comment_in_block(leading); let (trailing, _) = visitor.format_comment_in_block(trailing); - result.push(Expr { leading, value: item, trailing, different_line: false }); + result.push(Item { leading, value: item, trailing, different_line: false }); } let slice = visitor.slice(last_position..end_position); let (comment, _) = visitor.format_comment_in_block(slice); - result.push(Expr { + result.push(Item { leading: "".into(), value: "".into(), trailing: comment, diff --git a/noir/tooling/nargo_fmt/src/rewrite/imports.rs b/noir/tooling/nargo_fmt/src/rewrite/imports.rs new file mode 100644 index 00000000000..2788f778140 --- /dev/null +++ b/noir/tooling/nargo_fmt/src/rewrite/imports.rs @@ -0,0 +1,115 @@ +use noirc_frontend::{PathKind, UseTreeKind}; + +use crate::{ + items::Item, + visitor::{ + expr::{format_exprs, Tactic}, + FmtVisitor, Shape, + }, +}; + +#[derive(Debug)] +pub(crate) enum UseSegment { + Ident(String, Option), + List(Vec), + Dep, + Crate, +} + +impl UseSegment { + fn rewrite(&self, visitor: &FmtVisitor, shape: Shape) -> String { + match self { + UseSegment::Ident(ident, None) => ident.clone(), + UseSegment::Ident(ident, Some(rename)) => format!("{ident} as {rename}"), + UseSegment::List(use_tree_list) => { + let mut nested_shape = shape; + nested_shape.indent.block_indent(visitor.config); + + let items: Vec<_> = use_tree_list + .iter() + .map(|item| Item { + leading: String::new(), + value: item.rewrite(visitor, shape).clone(), + trailing: String::new(), + different_line: false, + }) + .collect(); + + let list_str = + format_exprs(visitor.config, Tactic::Mixed, false, items, nested_shape, true); + + if list_str.contains('\n') { + format!( + "{{\n{}{list_str}\n{}}}", + nested_shape.indent.to_string(), + shape.indent.to_string() + ) + } else { + format!("{{{list_str}}}") + } + } + UseSegment::Dep => "dep".into(), + UseSegment::Crate => "crate".into(), + } + } +} + +#[derive(Debug)] +pub(crate) struct UseTree { + path: Vec, +} + +impl UseTree { + pub(crate) fn from_ast(use_tree: noirc_frontend::UseTree) -> Self { + let mut result = UseTree { path: vec![] }; + + match use_tree.prefix.kind { + PathKind::Crate => result.path.push(UseSegment::Crate), + PathKind::Dep => result.path.push(UseSegment::Dep), + PathKind::Plain => {} + }; + + result.path.extend( + use_tree + .prefix + .segments + .into_iter() + .map(|segment| UseSegment::Ident(segment.to_string(), None)), + ); + + match use_tree.kind { + UseTreeKind::Path(name, alias) => { + result.path.push(UseSegment::Ident( + name.to_string(), + alias.map(|rename| rename.to_string()), + )); + } + UseTreeKind::List(list) => { + let segment = UseSegment::List(list.into_iter().map(UseTree::from_ast).collect()); + result.path.push(segment); + } + } + + result + } + + pub(crate) fn rewrite_top_level(&self, visitor: &FmtVisitor, shape: Shape) -> String { + format!("use {};", self.rewrite(visitor, shape)) + } + + fn rewrite(&self, visitor: &FmtVisitor, shape: Shape) -> String { + let mut result = String::new(); + + let mut iter = self.path.iter().peekable(); + while let Some(segment) = iter.next() { + let segment_str = segment.rewrite(visitor, shape); + result.push_str(&segment_str); + + if iter.peek().is_some() { + result.push_str("::"); + } + } + + result + } +} diff --git a/noir/tooling/nargo_fmt/src/utils.rs b/noir/tooling/nargo_fmt/src/utils.rs index 1160f01972f..7f8e4a39849 100644 --- a/noir/tooling/nargo_fmt/src/utils.rs +++ b/noir/tooling/nargo_fmt/src/utils.rs @@ -1,3 +1,4 @@ +use crate::items::HasItem; use crate::rewrite; use crate::visitor::{FmtVisitor, Shape}; use noirc_frontend::hir::resolution::errors::Span; @@ -21,103 +22,6 @@ pub(crate) fn comments(source: &str) -> impl Iterator + '_ { }) } -#[derive(Debug)] -pub(crate) struct Expr { - pub(crate) leading: String, - pub(crate) value: String, - pub(crate) trailing: String, - pub(crate) different_line: bool, -} - -impl Expr { - pub(crate) fn total_width(&self) -> usize { - comment_len(&self.leading) + self.value.chars().count() + comment_len(&self.trailing) - } - - pub(crate) fn is_multiline(&self) -> bool { - self.leading.contains('\n') || self.trailing.contains('\n') - } -} - -pub(crate) struct Exprs<'me, T> { - pub(crate) visitor: &'me FmtVisitor<'me>, - shape: Shape, - pub(crate) elements: std::iter::Peekable>, - pub(crate) last_position: u32, - pub(crate) end_position: u32, -} - -impl<'me, T: Item> Exprs<'me, T> { - pub(crate) fn new( - visitor: &'me FmtVisitor<'me>, - shape: Shape, - span: Span, - elements: Vec, - ) -> Self { - Self { - visitor, - shape, - last_position: span.start() + 1, /*(*/ - end_position: span.end() - 1, /*)*/ - elements: elements.into_iter().peekable(), - } - } -} - -impl Iterator for Exprs<'_, T> { - type Item = Expr; - - fn next(&mut self) -> Option { - let element = self.elements.next()?; - let element_span = element.span(); - - let start = self.last_position; - let end = element_span.start(); - - let is_last = self.elements.peek().is_none(); - let next_start = self.elements.peek().map_or(self.end_position, |expr| expr.start()); - - let (leading, different_line) = self.leading(start, end); - let expr = element.format(self.visitor, self.shape); - let trailing = self.trailing(element_span.end(), next_start, is_last); - - Expr { leading, value: expr, trailing, different_line }.into() - } -} - -impl<'me, T> Exprs<'me, T> { - pub(crate) fn leading(&mut self, start: u32, end: u32) -> (String, bool) { - let mut different_line = false; - - let leading = self.visitor.slice(start..end); - let leading_trimmed = leading.trim(); - - let starts_with_block_comment = leading_trimmed.starts_with("/*"); - let ends_with_block_comment = leading_trimmed.ends_with("*/"); - let starts_with_single_line_comment = leading_trimmed.starts_with("//"); - - if ends_with_block_comment { - let comment_end = leading_trimmed.rfind(|c| c == '/').unwrap(); - - if leading[comment_end..].contains('\n') { - different_line = true; - } - } else if starts_with_single_line_comment || starts_with_block_comment { - different_line = true; - }; - - (leading_trimmed.to_string(), different_line) - } - - pub(crate) fn trailing(&mut self, start: u32, end: u32, is_last: bool) -> String { - let slice = self.visitor.slice(start..end); - let comment_end = find_comment_end(slice, is_last); - let trailing = slice[..comment_end].trim_matches(',').trim(); - self.last_position = start + (comment_end as u32); - trailing.to_string() - } -} - pub(crate) trait FindToken { fn find_token(&self, token: Token) -> Option; fn find_token_with(&self, f: impl Fn(&Token) -> bool) -> Option; @@ -183,7 +87,7 @@ pub(crate) fn find_comment_end(slice: &str, is_last: bool) -> usize { } } -fn comment_len(comment: &str) -> usize { +pub(crate) fn comment_len(comment: &str) -> usize { match comment { "" => 0, _ => { @@ -201,21 +105,7 @@ pub(crate) fn count_newlines(slice: &str) -> usize { bytecount::count(slice.as_bytes(), b'\n') } -pub(crate) trait Item { - fn span(&self) -> Span; - - fn format(self, visitor: &FmtVisitor, shape: Shape) -> String; - - fn start(&self) -> u32 { - self.span().start() - } - - fn end(&self) -> u32 { - self.span().end() - } -} - -impl Item for Expression { +impl HasItem for Expression { fn span(&self) -> Span { self.span } @@ -225,7 +115,7 @@ impl Item for Expression { } } -impl Item for (Ident, Expression) { +impl HasItem for (Ident, Expression) { fn span(&self) -> Span { let (name, value) = self; (name.span().start()..value.span.end()).into() @@ -245,7 +135,7 @@ impl Item for (Ident, Expression) { } } -impl Item for Param { +impl HasItem for Param { fn span(&self) -> Span { self.span } @@ -263,7 +153,7 @@ impl Item for Param { } } -impl Item for Ident { +impl HasItem for Ident { fn span(&self) -> Span { self.span() } diff --git a/noir/tooling/nargo_fmt/src/visitor.rs b/noir/tooling/nargo_fmt/src/visitor.rs index 85989db79d8..db084e5a49d 100644 --- a/noir/tooling/nargo_fmt/src/visitor.rs +++ b/noir/tooling/nargo_fmt/src/visitor.rs @@ -277,7 +277,7 @@ impl Indent { } } -#[derive(Clone, Copy, Debug)] +#[derive(Clone, Copy, Debug, Default)] pub(crate) struct Shape { pub(crate) width: usize, pub(crate) indent: Indent, diff --git a/noir/tooling/nargo_fmt/src/visitor/expr.rs b/noir/tooling/nargo_fmt/src/visitor/expr.rs index 586d9583e32..9b36911b1af 100644 --- a/noir/tooling/nargo_fmt/src/visitor/expr.rs +++ b/noir/tooling/nargo_fmt/src/visitor/expr.rs @@ -5,8 +5,9 @@ use noirc_frontend::{ use super::{ExpressionType, FmtVisitor, Shape}; use crate::{ + items::{HasItem, Item, Items}, rewrite, - utils::{self, first_line_width, Expr, FindToken, Item}, + utils::{first_line_width, FindToken}, Config, }; @@ -81,8 +82,7 @@ impl FmtVisitor<'_> { let nested_indent = visitor.shape(); let exprs: Vec<_> = - utils::Exprs::new(&visitor, nested_indent, fields_span, constructor.fields) - .collect(); + Items::new(&visitor, nested_indent, fields_span, constructor.fields).collect(); let exprs = format_exprs( visitor.config, Tactic::HorizontalVertical, @@ -189,7 +189,7 @@ impl FmtVisitor<'_> { // TODO: fixme #[allow(clippy::too_many_arguments)] -pub(crate) fn format_seq( +pub(crate) fn format_seq( shape: Shape, prefix: &str, suffix: &str, @@ -206,7 +206,7 @@ pub(crate) fn format_seq( nested_indent.indent.block_indent(visitor.config); - let exprs: Vec<_> = utils::Exprs::new(&visitor, nested_indent, span, exprs).collect(); + let exprs: Vec<_> = Items::new(&visitor, nested_indent, span, exprs).collect(); let exprs = format_exprs(visitor.config, tactic, trailing_comma, exprs, nested_indent, reduce); wrap_exprs(prefix, suffix, exprs, nested_indent, shape, mode) @@ -249,11 +249,11 @@ pub(crate) fn format_parens( format_seq(shape, "(", ")", visitor, trailing_comma, exprs, span, tactic, mode, reduce) } -fn format_exprs( +pub(crate) fn format_exprs( config: &Config, tactic: Tactic, trailing_comma: bool, - exprs: Vec, + exprs: Vec, shape: Shape, reduce: bool, ) -> String { @@ -396,7 +396,7 @@ pub(crate) enum Tactic { impl Tactic { fn definitive( self, - exprs: &[Expr], + exprs: &[Item], short_width_threshold: usize, reduce: bool, ) -> DefinitiveTactic { @@ -449,7 +449,7 @@ enum DefinitiveTactic { } impl DefinitiveTactic { - fn reduce(self, exprs: &[Expr], short_array_element_width_threshold: usize) -> Self { + fn reduce(self, exprs: &[Item], short_array_element_width_threshold: usize) -> Self { match self { DefinitiveTactic::Vertical if no_long_exprs(exprs, short_array_element_width_threshold) => @@ -467,7 +467,7 @@ fn has_single_line_comment(slice: &str) -> bool { slice.trim_start().starts_with("//") } -fn no_long_exprs(exprs: &[Expr], max_width: usize) -> bool { +fn no_long_exprs(exprs: &[Item], max_width: usize) -> bool { exprs.iter().all(|expr| expr.value.len() <= max_width) } diff --git a/noir/tooling/nargo_fmt/src/visitor/item.rs b/noir/tooling/nargo_fmt/src/visitor/item.rs index eb2086168ba..13b4c09077c 100644 --- a/noir/tooling/nargo_fmt/src/visitor/item.rs +++ b/noir/tooling/nargo_fmt/src/visitor/item.rs @@ -6,7 +6,7 @@ use noirc_frontend::{ }; use crate::{ - rewrite, + rewrite::{self, UseTree}, utils::{last_line_contains_single_line_comment, last_line_used_width, FindToken}, visitor::expr::{format_seq, NewlineMode}, }; @@ -191,8 +191,13 @@ impl super::FmtVisitor<'_> { self.close_block((self.last_position..span.end() - 1).into()); self.last_position = span.end(); } - ItemKind::Import(_) - | ItemKind::Struct(_) + ItemKind::Import(use_tree) => { + let use_tree = + UseTree::from_ast(use_tree).rewrite_top_level(self, self.shape()); + self.push_rewrite(use_tree, span); + self.last_position = span.end(); + } + ItemKind::Struct(_) | ItemKind::Trait(_) | ItemKind::TraitImpl(_) | ItemKind::Impl(_) diff --git a/noir/tooling/nargo_fmt/src/visitor/stmt.rs b/noir/tooling/nargo_fmt/src/visitor/stmt.rs index 800a8656ef3..b414e5ec5c3 100644 --- a/noir/tooling/nargo_fmt/src/visitor/stmt.rs +++ b/noir/tooling/nargo_fmt/src/visitor/stmt.rs @@ -38,11 +38,10 @@ impl super::FmtVisitor<'_> { nested_shape.indent.block_indent(self.config); - let message = - message.map_or(String::new(), |message| format!(", \"{message}\"")); + let message = message.map_or(String::new(), |message| format!(", {message}")); let (callee, args) = match kind { - ConstrainKind::Assert => { + ConstrainKind::Assert | ConstrainKind::Constrain => { let assertion = rewrite::sub_expr(self, nested_shape, expr); let args = format!("{assertion}{message}"); @@ -60,12 +59,6 @@ impl super::FmtVisitor<'_> { unreachable!() } } - ConstrainKind::Constrain => { - let expr = rewrite::sub_expr(self, self.shape(), expr); - let constrain = format!("constrain {expr};"); - self.push_rewrite(constrain, span); - return; - } }; let args = wrap_exprs( diff --git a/noir/tooling/nargo_fmt/tests/expected/assert.nr b/noir/tooling/nargo_fmt/tests/expected/assert.nr new file mode 100644 index 00000000000..1f38e56b799 --- /dev/null +++ b/noir/tooling/nargo_fmt/tests/expected/assert.nr @@ -0,0 +1,4 @@ +fn main(x: Field) { + assert(x == 0, "with a message"); + assert_eq(x, 1); +} diff --git a/noir/tooling/nargo_fmt/tests/expected/contract.nr b/noir/tooling/nargo_fmt/tests/expected/contract.nr index 2e3f4d7c8c4..3b77440c081 100644 --- a/noir/tooling/nargo_fmt/tests/expected/contract.nr +++ b/noir/tooling/nargo_fmt/tests/expected/contract.nr @@ -5,18 +5,14 @@ contract Benchmarking { use dep::aztec::protocol_types::abis::function_selector::FunctionSelector; - use dep::value_note::{ - utils::{increment, decrement}, - value_note::{VALUE_NOTE_LEN, ValueNote, ValueNoteMethods}, - }; + use dep::value_note::{utils::{increment, decrement}, value_note::{VALUE_NOTE_LEN, ValueNote, ValueNoteMethods}}; use dep::aztec::{ context::{Context}, note::{utils as note_utils, note_getter_options::NoteGetterOptions, note_header::NoteHeader}, - log::emit_unencrypted_log, - state_vars::{map::Map, public_state::PublicState, set::Set}, + log::emit_unencrypted_log, state_vars::{map::Map, public_state::PublicState, set::Set}, types::type_serialization::field_serialization::{FieldSerializationMethods, FIELD_SERIALIZED_LEN}, - types::address::{AztecAddress}, + types::address::{AztecAddress} }; struct Storage { diff --git a/noir/tooling/nargo_fmt/tests/input/assert.nr b/noir/tooling/nargo_fmt/tests/input/assert.nr new file mode 100644 index 00000000000..f41e396c041 --- /dev/null +++ b/noir/tooling/nargo_fmt/tests/input/assert.nr @@ -0,0 +1,7 @@ +fn main(x: Field) { + assert(x == 0, "with a message"); + assert_eq( + x, + 1 + ); +} diff --git a/noir/tooling/noir_js/.gitignore b/noir/tooling/noir_js/.gitignore index 5b57ba1708d..a55d1794141 100644 --- a/noir/tooling/noir_js/.gitignore +++ b/noir/tooling/noir_js/.gitignore @@ -1,3 +1 @@ crs - -!test/noir_compiled_examples/*/target diff --git a/noir/tooling/noir_js/package.json b/noir/tooling/noir_js/package.json index 356909a1e35..5f83a03019b 100644 --- a/noir/tooling/noir_js/package.json +++ b/noir/tooling/noir_js/package.json @@ -37,7 +37,8 @@ "scripts": { "dev": "tsc-multi --watch", "build": "tsc-multi", - "test": "yarn test:node:esm && yarn test:node:cjs", + "test": "yarn test:compile_program && yarn test:node:esm && yarn test:node:cjs", + "test:compile_program": "./scripts/compile_test_programs.sh", "test:node:esm": "mocha --timeout 25000 --exit --config ./.mocharc.json", "test:node:cjs": "mocha --timeout 25000 --exit --config ./.mocharc.cjs.json", "prettier": "prettier 'src/**/*.ts'", diff --git a/noir/tooling/noir_js/scripts/compile_test_programs.sh b/noir/tooling/noir_js/scripts/compile_test_programs.sh new file mode 100755 index 00000000000..5257aaae696 --- /dev/null +++ b/noir/tooling/noir_js/scripts/compile_test_programs.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +rm -rf ./test/noir_compiled_examples/**/target +nargo --program-dir ./test/noir_compiled_examples/assert_lt compile --force +nargo --program-dir ./test/noir_compiled_examples/assert_msg_runtime compile --force diff --git a/noir/tooling/noir_js/src/witness_generation.ts b/noir/tooling/noir_js/src/witness_generation.ts index 1f233422061..cef1d817d9b 100644 --- a/noir/tooling/noir_js/src/witness_generation.ts +++ b/noir/tooling/noir_js/src/witness_generation.ts @@ -26,6 +26,12 @@ const defaultForeignCallHandler: ForeignCallHandler = async (name: string, args: // // If a user needs to print values then they should provide a custom foreign call handler. return []; + } else if (name == 'assert_message') { + // By default we do not do anything for `assert_message` foreign calls due to a need for formatting, + // however we provide an empty response in order to not halt execution. + // + // If a user needs to use dynamic assertion messages then they should provide a custom foreign call handler. + return []; } throw Error(`Unexpected oracle during execution: ${name}(${args.join(', ')})`); }; diff --git a/noir/tooling/noir_js/test/node/execute.test.ts b/noir/tooling/noir_js/test/node/execute.test.ts index bfaf80882ab..491bcb0dfc4 100644 --- a/noir/tooling/noir_js/test/node/execute.test.ts +++ b/noir/tooling/noir_js/test/node/execute.test.ts @@ -1,9 +1,11 @@ import assert_lt_json from '../noir_compiled_examples/assert_lt/target/assert_lt.json' assert { type: 'json' }; +import assert_msg_json from '../noir_compiled_examples/assert_msg_runtime/target/assert_msg_runtime.json' assert { type: 'json' }; import { Noir } from '@noir-lang/noir_js'; import { CompiledCircuit } from '@noir-lang/types'; import { expect } from 'chai'; const assert_lt_program = assert_lt_json as CompiledCircuit; +const assert_msg_runtime = assert_msg_json as CompiledCircuit; it('returns the return value of the circuit', async () => { const inputs = { @@ -14,3 +16,16 @@ it('returns the return value of the circuit', async () => { expect(returnValue).to.be.eq('0x05'); }); + +it('circuit with a dynamic assert message should fail on an assert failure not the foreign call handler', async () => { + const inputs = { + x: '10', + y: '5', + }; + try { + await new Noir(assert_msg_runtime).execute(inputs); + } catch (error) { + const knownError = error as Error; + expect(knownError.message).to.equal('Circuit execution failed: Error: Cannot satisfy constraint'); + } +}); diff --git a/noir/tooling/noir_js/test/noir_compiled_examples/assert_lt/src/main.nr b/noir/tooling/noir_js/test/noir_compiled_examples/assert_lt/src/main.nr index 693e7285736..a9aaae5f2f7 100644 --- a/noir/tooling/noir_js/test/noir_compiled_examples/assert_lt/src/main.nr +++ b/noir/tooling/noir_js/test/noir_compiled_examples/assert_lt/src/main.nr @@ -4,6 +4,10 @@ fn main(x: u64, y: pub u64) -> pub u64 { // We include a println statement to show that noirJS will ignore this and continue execution std::println("foo"); + // A dynamic assertion message is used to show that noirJS will ignore the call and continue execution + // The assertion passes and thus the foreign call for resolving an assertion message should not be called. + assert(x < y, f"Expected x < y but got {x} < {y}"); + assert(x < y); x + y } diff --git a/noir/tooling/noir_js/test/noir_compiled_examples/assert_lt/target/assert_lt.json b/noir/tooling/noir_js/test/noir_compiled_examples/assert_lt/target/assert_lt.json deleted file mode 100644 index 5b511cdc140..00000000000 --- a/noir/tooling/noir_js/test/noir_compiled_examples/assert_lt/target/assert_lt.json +++ /dev/null @@ -1 +0,0 @@ -{"noir_version":"0.20.0+010fdb69616f47fc0a9f252a65a903316d3cbe80","hash":17538653710107541030,"backend":"acvm-backend-barretenberg","abi":{"parameters":[{"name":"x","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"private"},{"name":"y","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"public"}],"param_witnesses":{"x":[{"start":1,"end":2}],"y":[{"start":2,"end":3}]},"return_type":{"abi_type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"public"},"return_witnesses":[5]},"bytecode":"H4sIAAAAAAAA/9Wa627aMBiGHcKhBBLOR+0Hl+Ak5PQP9U6gBK3SNqoqWm9kFzzMYu2DujhbPjvFEkrsJu/7vJ+TFDAtQkiH/GnG6VXLtxvQr+V9Mx/jx5pE3Db5lpZqYahI96Ba91e+bee1g60J6objS90mehbqN8nfuUbSpLAeNVAjXg++bZxeD/m+k9esjlyz6+t3A/p1MFfYvkyzharpPrXzmsFmXPU3YL8F8jUV5HvA1aRMs42qGe2YhgVqwuvH2Tvg721QLwu5Xgbw5Lq8bynz9Tye8Vb+joCjozF/R5lveJ7/riR/V8DR1Zi/q8w3TJiGLclvCzhsjfltZb5hyjQcSX5HwOFozO8o8w3XTKMnyd8TcPQ05od8RVmtilnxff0t0+hL8vcFHH2N+SFfUVarYlZ838hnGgNJ/oGAY6Ax/0CZb3R+rgwl+YcCjqHG/ENlvtH5fdVIkn8k4BhpzA/5irJ274jVrpgV3zeMmMZYkn8s4BhrzA/5irJaFbPi+3pPTGMiyT8RcEw05od8RVmtilnxfcPzXE0l+acCjqnG/FNlvmHANGaS/DMBx0xjfshXlNW+I9bRHbEOKmbF9w1jpjGX5J8LOOYa80O+oqzWHbH2KmbF9/XPnwUXkvwLAcdCY/6FMt9ozzSWkvxLAcdSY/4l8MVet3gAmV9en39kHKAOYPg+X2xlzQRjXOALOIeDtsj7hR60qpnk/eolAWNYPgbQ8k/fTK7TyEtd391SL9nFAV0HuzB2YzeIg70X+34ar+Mo2SURTdy1n7qHIPEPuVjt/7nc6wFBdDRtWFe46tgAE18D44/geLgCb4A5eQTniI4xPtBpgzF+vkMUXljQHEvTJJc/T+C6ZS8oE4+R8kmtk8vlWELwfxKg6qYqq9VErOet+v0jJ73idE3EzHXEeS1Rv5sPuM9839yaZ1quXdwntFxzMe+TBsF/7nDNj/6B8P0GuXz4848R/B3INsvS7y/ZKjuutvv96u05+7o6/kxfD9+Ob78Bhjydn08mAAA="} \ No newline at end of file diff --git a/noir/tooling/noir_js/test/noir_compiled_examples/assert_msg_runtime/Nargo.toml b/noir/tooling/noir_js/test/noir_compiled_examples/assert_msg_runtime/Nargo.toml new file mode 100644 index 00000000000..765f632ff74 --- /dev/null +++ b/noir/tooling/noir_js/test/noir_compiled_examples/assert_msg_runtime/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "assert_msg_runtime" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] \ No newline at end of file diff --git a/noir/tooling/noir_js/test/noir_compiled_examples/assert_msg_runtime/src/main.nr b/noir/tooling/noir_js/test/noir_compiled_examples/assert_msg_runtime/src/main.nr new file mode 100644 index 00000000000..40e447cad02 --- /dev/null +++ b/noir/tooling/noir_js/test/noir_compiled_examples/assert_msg_runtime/src/main.nr @@ -0,0 +1,6 @@ +fn main(x: u64, y: pub u64) { + // A dynamic assertion message is used to show that noirJS will ignore the call and continue execution + // We need this assertion to fail as the `assert_message` oracle in Noir is only called + // upon a failing condition in an assert. + assert(x < y, f"Expected x < y but got {x} < {y}"); +} diff --git a/noir/tooling/noir_js_backend_barretenberg/package.json b/noir/tooling/noir_js_backend_barretenberg/package.json index a0123883efd..b8ab24a73bd 100644 --- a/noir/tooling/noir_js_backend_barretenberg/package.json +++ b/noir/tooling/noir_js_backend_barretenberg/package.json @@ -42,7 +42,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "0.21.0", + "@aztec/bb.js": "0.23.0", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, diff --git a/noir/tooling/noir_js_backend_barretenberg/src/index.ts b/noir/tooling/noir_js_backend_barretenberg/src/index.ts index 4d5b6389404..d79b487c3cf 100644 --- a/noir/tooling/noir_js_backend_barretenberg/src/index.ts +++ b/noir/tooling/noir_js_backend_barretenberg/src/index.ts @@ -55,7 +55,6 @@ export class BarretenbergBackend implements Backend { this.acirComposer, this.acirUncompressedBytecode, gunzip(compressedWitness), - false, ); const splitIndex = proofWithPublicInputs.length - numBytesInProofWithoutPublicInputs; @@ -117,7 +116,7 @@ export class BarretenbergBackend implements Backend { await this.instantiate(); await this.api.acirInitVerificationKey(this.acirComposer); // TODO: Change once `@aztec/bb.js` version is updated to use methods without isRecursive flag - return await this.api.acirVerifyProof(this.acirComposer, proof, false); + return await this.api.acirVerifyProof(this.acirComposer, proof); } async destroy(): Promise { diff --git a/noir/yarn.lock b/noir/yarn.lock index 743068f1907..8da9bf2a5ca 100644 --- a/noir/yarn.lock +++ b/noir/yarn.lock @@ -235,9 +235,9 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@npm:0.21.0": - version: 0.21.0 - resolution: "@aztec/bb.js@npm:0.21.0" +"@aztec/bb.js@npm:0.23.0": + version: 0.23.0 + resolution: "@aztec/bb.js@npm:0.23.0" dependencies: comlink: ^4.4.1 commander: ^10.0.1 @@ -245,7 +245,7 @@ __metadata: tslib: ^2.4.0 bin: bb.js: dest/node/main.js - checksum: a0fb97476f52025f3c31b7a5e890966ac375ed47c5cfd3434f5c3e4265af3c7566a162f37d6c56f394f44bfe4ba67e5002b7c5998ecc4f6abe70e04f5b8abe34 + checksum: e7bd32a20575a2834fa8a0500becdfae88a8f4a031c0f1796713d64a8ec90e516e16360f19031efeec59d51ede78c50422982ee08911c062bfa3142819af01fc languageName: node linkType: hard @@ -4435,7 +4435,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: - "@aztec/bb.js": 0.21.0 + "@aztec/bb.js": 0.23.0 "@noir-lang/types": "workspace:*" "@types/node": ^20.6.2 "@types/prettier": ^3 From e382c8aacc0b9d27982b0975a8ca726942d4d847 Mon Sep 17 00:00:00 2001 From: sirasistant Date: Fri, 9 Feb 2024 15:01:34 +0000 Subject: [PATCH 3/6] chore: updated lockfile --- avm-transpiler/Cargo.lock | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/avm-transpiler/Cargo.lock b/avm-transpiler/Cargo.lock index b571e5ffbd4..a6289ae924e 100644 --- a/avm-transpiler/Cargo.lock +++ b/avm-transpiler/Cargo.lock @@ -760,6 +760,12 @@ dependencies = [ "subtle", ] +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + [[package]] name = "flate2" version = "1.0.28" @@ -1086,6 +1092,7 @@ dependencies = [ "noirc_errors", "noirc_evaluator", "noirc_frontend", + "noirc_macros", "rust-embed", "serde", "tracing", @@ -1102,6 +1109,7 @@ dependencies = [ "codespan-reporting", "flate2", "fm", + "noirc_printable_type", "serde", "serde_json", "serde_with", @@ -1135,6 +1143,7 @@ dependencies = [ "iter-extended", "noirc_errors", "noirc_printable_type", + "petgraph", "regex", "rustc-hash", "serde", @@ -1145,6 +1154,14 @@ dependencies = [ "tracing", ] +[[package]] +name = "noirc_macros" +version = "0.23.0" +dependencies = [ + "iter-extended", + "noirc_frontend", +] + [[package]] name = "noirc_printable_type" version = "0.23.0" @@ -1211,6 +1228,16 @@ version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" +[[package]] +name = "petgraph" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" +dependencies = [ + "fixedbitset", + "indexmap 2.2.1", +] + [[package]] name = "pin-project-lite" version = "0.2.13" From 787db5c777f0409a8ef4541d8b091481d24b6421 Mon Sep 17 00:00:00 2001 From: sirasistant Date: Fri, 9 Feb 2024 15:45:55 +0000 Subject: [PATCH 4/6] chore: bump ssd size in build-system --- build-system/scripts/request_spot | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-system/scripts/request_spot b/build-system/scripts/request_spot index 3d669fed1df..6c4e1d1bb2d 100755 --- a/build-system/scripts/request_spot +++ b/build-system/scripts/request_spot @@ -57,7 +57,7 @@ launch_spec=$(cat < Date: Fri, 9 Feb 2024 16:14:17 +0000 Subject: [PATCH 5/6] fix: noir panic workaround --- .../compiler/noirc_frontend/src/hir/type_check/expr.rs | 10 ++++++---- yarn-project/yarn.lock | 2 +- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs b/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs index 998abeedcec..0a3b276d641 100644 --- a/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -153,11 +153,13 @@ impl<'interner> TypeChecker<'interner> { HirExpression::Call(call_expr) => { // Need to setup these flags here as `self` is borrowed mutably to type check the rest of the call expression // These flags are later used to type check calls to unconstrained functions from constrained functions - let current_func = self + let is_current_func_constrained = self .current_function - .expect("Can only have call expression inside of a function body"); - let func_mod = self.interner.function_modifiers(¤t_func); - let is_current_func_constrained = !func_mod.is_unconstrained; + .map(|func| { + let func_mod = self.interner.function_modifiers(&func); + !func_mod.is_unconstrained + }) + .unwrap_or(true); //TODO(4318) If there is no current function, then we assume it is constrained let is_unconstrained_call = self.is_unconstrained_call(&call_expr.func); self.check_if_deprecated(&call_expr.func); diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 5d903bf814d..746c77be12e 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -2790,7 +2790,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@portal:../noir/packages/backend_barretenberg::locator=%40aztec%2Faztec3-packages%40workspace%3A." dependencies: - "@aztec/bb.js": 0.21.0 + "@aztec/bb.js": 0.23.0 "@noir-lang/types": 0.23.0 fflate: ^0.8.0 languageName: node From d928316111c17690114bdf9b44ca40405ca5cf30 Mon Sep 17 00:00:00 2001 From: Tom French Date: Tue, 6 Feb 2024 23:51:08 +0000 Subject: [PATCH 6/6] fix: rewrite noir contracts to avoid bug --- .../aztec-nr/safe-math/src/safe_u120.nr | 9 +++--- .../contracts/card_game_contract/src/game.nr | 32 ++++++++++++------- .../contracts/lending_contract/src/asset.nr | 13 +++++--- .../lending_contract/src/interfaces.nr | 7 +--- .../contracts/lending_contract/src/main.nr | 10 ++++-- .../price_feed_contract/src/asset.nr | 3 +- 6 files changed, 44 insertions(+), 30 deletions(-) diff --git a/yarn-project/aztec-nr/safe-math/src/safe_u120.nr b/yarn-project/aztec-nr/safe-math/src/safe_u120.nr index 4ef341bf839..74acd0db57d 100644 --- a/yarn-project/aztec-nr/safe-math/src/safe_u120.nr +++ b/yarn-project/aztec-nr/safe-math/src/safe_u120.nr @@ -25,7 +25,8 @@ impl Serialize for SafeU120 { impl Deserialize for SafeU120 { // This is safe when reading from storage IF only correct safeu120 was written to storage fn deserialize(fields: [Field; SAFE_U120_SERIALIZED_LEN]) -> SafeU120 { - SafeU120 { value: fields[0] as u120 } + let value = fields[0] as u120; + SafeU120 { value } } } @@ -50,9 +51,9 @@ impl SafeU120 { for i in 0..17 { assert(bytes[i] == 0, "Value too large for SafeU120"); } - Self { - value: value as u120 - } + + let value = value as u120; + Self { value } } pub fn is_zero( diff --git a/yarn-project/noir-contracts/contracts/card_game_contract/src/game.nr b/yarn-project/noir-contracts/contracts/card_game_contract/src/game.nr index 4e9c75a77f7..f7b388c42b8 100644 --- a/yarn-project/noir-contracts/contracts/card_game_contract/src/game.nr +++ b/yarn-project/noir-contracts/contracts/card_game_contract/src/game.nr @@ -19,6 +19,22 @@ impl PlayerEntry { } } +global PLAYER_SERIALIZED_LEN: Field = 3; + +impl Deserialize for PlayerEntry { + fn deserialize(fields: [Field; PLAYER_SERIALIZED_LEN]) -> PlayerEntry { + let address = AztecAddress::from_field(fields[0]); + let deck_strength = fields[1] as u32; + let points = fields[2] as u120; + + PlayerEntry { + address, + deck_strength, + points + } + } +} + global PLAYABLE_CARDS = 4; struct Game { @@ -57,18 +73,10 @@ impl Serialize for Game { impl Deserialize for Game { fn deserialize(fields: [Field; GAME_SERIALIZED_LEN]) -> Game { - let players = [ - PlayerEntry { - address: AztecAddress::from_field(fields[0]), - deck_strength: fields[1] as u32, - points: fields[2] as u120 - }, - PlayerEntry { - address: AztecAddress::from_field(fields[3]), - deck_strength: fields[4] as u32, - points: fields[5] as u120 - } - ]; + let player1 = PlayerEntry::deserialize([fields[0], fields[1], fields[2]]); + let player2 = PlayerEntry::deserialize([fields[3], fields[4], fields[5]]); + + let players = [player1, player2]; let rounds_cards = [ Card::from_field(fields[6]), Card::from_field(fields[7]), Card::from_field(fields[8]), Card::from_field(fields[9]) diff --git a/yarn-project/noir-contracts/contracts/lending_contract/src/asset.nr b/yarn-project/noir-contracts/contracts/lending_contract/src/asset.nr index 5fbbb83a855..a3ef3db1695 100644 --- a/yarn-project/noir-contracts/contracts/lending_contract/src/asset.nr +++ b/yarn-project/noir-contracts/contracts/lending_contract/src/asset.nr @@ -32,11 +32,16 @@ impl Deserialize for Asset { // Right now we are wasting so many writes. If changing last_updated_ts // we will end up rewriting all of them, wasting writes. fn deserialize(fields: [Field; ASSET_SERIALIZED_LEN]) -> Asset { + let interest_accumulator = fields[0] as u120; + let last_updated_ts = fields[1] as u120; + let loan_to_value = fields[2] as u120; + let oracle = AztecAddress::from_field(fields[3]); + Asset { - interest_accumulator: fields[0] as u120, - last_updated_ts: fields[1] as u120, - loan_to_value: fields[2] as u120, - oracle: AztecAddress::from_field(fields[3]) + interest_accumulator, + last_updated_ts, + loan_to_value, + oracle, } } } diff --git a/yarn-project/noir-contracts/contracts/lending_contract/src/interfaces.nr b/yarn-project/noir-contracts/contracts/lending_contract/src/interfaces.nr index 22af86c6457..c65d8033a9d 100644 --- a/yarn-project/noir-contracts/contracts/lending_contract/src/interfaces.nr +++ b/yarn-project/noir-contracts/contracts/lending_contract/src/interfaces.nr @@ -96,11 +96,6 @@ impl Lending { FunctionSelector::from_signature("update_accumulator()"), ); - Asset { - interest_accumulator: return_values[0] as u120, - last_updated_ts: return_values[1] as u120, - loan_to_value: return_values[2] as u120, - oracle: AztecAddress::from_field(return_values[3]), - } + Asset::deserialize(return_values) } } diff --git a/yarn-project/noir-contracts/contracts/lending_contract/src/main.nr b/yarn-project/noir-contracts/contracts/lending_contract/src/main.nr index 2dd9bc1c16a..f8587fe21fc 100644 --- a/yarn-project/noir-contracts/contracts/lending_contract/src/main.nr +++ b/yarn-project/noir-contracts/contracts/lending_contract/src/main.nr @@ -63,11 +63,14 @@ contract Lending { assert(asset.last_updated_ts == 0); assert(asset.interest_accumulator == 0); + let last_updated_ts = context.timestamp() as u120; + let loan_to_value = loan_to_value as u120; + asset_loc.write( Asset { interest_accumulator: 1000000000, - last_updated_ts: context.timestamp() as u120, - loan_to_value: loan_to_value as u120, + last_updated_ts, + loan_to_value, oracle } ); @@ -82,7 +85,8 @@ contract Lending { let asset_loc = storage.assets.at(0); let mut asset = asset_loc.read(); - let dt: SafeU120 = SafeU120 { value: context.timestamp() as u120 }.sub(SafeU120 { value: asset.last_updated_ts }); + let timestamp = context.timestamp() as u120; + let dt: SafeU120 = SafeU120 { value: timestamp }.sub(SafeU120 { value: asset.last_updated_ts }); // Only update if time has passed. if (!dt.is_zero()) { diff --git a/yarn-project/noir-contracts/contracts/price_feed_contract/src/asset.nr b/yarn-project/noir-contracts/contracts/price_feed_contract/src/asset.nr index 047705a1ace..b795fdf68a3 100644 --- a/yarn-project/noir-contracts/contracts/price_feed_contract/src/asset.nr +++ b/yarn-project/noir-contracts/contracts/price_feed_contract/src/asset.nr @@ -14,6 +14,7 @@ impl Serialize for Asset { impl Deserialize for Asset { fn deserialize(fields: [Field; ASSET_SERIALIZED_LEN]) -> Asset { - Asset { price: fields[0] as u120 } + let price = fields[0] as u120; + Asset { price } } }