From 7b59eff33217a1822c1107f5728a8323f0b614a9 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Tue, 9 Apr 2024 16:03:25 +0000 Subject: [PATCH 1/2] feat: Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5619) Automated pull of development from the [noir](https://github.com/noir-lang/noir) programming language, a dependency of Aztec. BEGIN_COMMIT_OVERRIDE feat(nargo): Multiple circuits info for binary programs (https://github.com/noir-lang/noir/pull/4719) chore: update condition for clearing warning comment on release PRs (https://github.com/noir-lang/noir/pull/4739) chore(ci): fix cutting new versions of the docs (https://github.com/noir-lang/noir/pull/4737) chore(ci): replace `yarn build:js:only` script (https://github.com/noir-lang/noir/pull/4735) chore: update JS publish workflow to upload build artifacts correctly. (https://github.com/noir-lang/noir/pull/4734) feat: add `remove_enable_side_effects` SSA pass (https://github.com/noir-lang/noir/pull/4224) chore: update from vulnerable version of h2 (https://github.com/noir-lang/noir/pull/4714) chore(ci): stop updating version list before cutting new docs version (https://github.com/noir-lang/noir/pull/4726) chore: remove `FunctionInput::dummy` (https://github.com/noir-lang/noir/pull/4723) chore: remove docker CI flow (https://github.com/noir-lang/noir/pull/4724) fix: unknown slice lengths coming from as_slice (https://github.com/noir-lang/noir/pull/4725) chore: remove unused env vars from `Cross.toml` (https://github.com/noir-lang/noir/pull/4717) feat: improve nargo check cli with --override flag and feedback for existing files (https://github.com/noir-lang/noir/pull/4575) feat: Allow slices to brillig entry points (https://github.com/noir-lang/noir/pull/4713) chore: simplify how `acvm_backend.wasm` is embedded (https://github.com/noir-lang/noir/pull/4703) fix(acvm): Mark outputs of Opcode::Call solvable (https://github.com/noir-lang/noir/pull/4708) fix: Field comparisons (https://github.com/noir-lang/noir/pull/4704) feat(acvm_js): Execute program (https://github.com/noir-lang/noir/pull/4694) chore: simplify how blns is loaded into tests (https://github.com/noir-lang/noir/pull/4705) fix(ssa): Do not use get_value_max_num_bits when we want pure type information (https://github.com/noir-lang/noir/pull/4700) chore: remove conditional compilation around `acvm_js` package (https://github.com/noir-lang/noir/pull/4702) feat(docs): Documenting noir codegen (https://github.com/noir-lang/noir/pull/4454) chore: check for references to private functions during path resolution (https://github.com/noir-lang/noir/pull/4622) chore: fix clippy errors (https://github.com/noir-lang/noir/pull/4684) fix: Last use analysis & make it an SSA pass (https://github.com/noir-lang/noir/pull/4686) feat: improve SSA type-awareness in EQ and MUL instructions (https://github.com/noir-lang/noir/pull/4691) feat: improve optimisations on range constraints (https://github.com/noir-lang/noir/pull/4690) chore: remove last traces of nix (https://github.com/noir-lang/noir/pull/4679) chore: Use is_entry_point helper on RuntimeType (https://github.com/noir-lang/noir/pull/4678) END_COMMIT_OVERRIDE --------- Co-authored-by: sirasistant Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> Co-authored-by: vezenovm --- .aztec-sync-commit | 2 +- .envrc | 2 +- .github/Cross.toml | 2 - .github/scripts/acvm_js-build.sh | 1 + .github/scripts/noir-wasm-build.sh | 1 + .github/scripts/noirc-abi-build.sh | 1 + .github/scripts/wasm-opt-install.sh | 2 +- .github/workflows/docker-test-flow.yml | 808 ------------------ .github/workflows/docs-pr.yml | 8 +- .github/workflows/gates_report.yml | 2 +- .github/workflows/publish-docs.yml | 7 +- .github/workflows/publish-es-packages.yml | 8 +- .github/workflows/release.yml | 2 +- .github/workflows/test-js-packages.yml | 17 +- .gitignore | 6 - .vscode/extensions.json | 1 - .vscode/settings.json | 11 - Cargo.lock | 52 +- Dockerfile.ci | 30 - README.md | 20 - acvm-repo/acir/codegen/acir.cpp | 4 + acvm-repo/acir/src/circuit/mod.rs | 36 +- acvm-repo/acir/src/circuit/opcodes.rs | 7 +- .../opcodes/black_box_function_call.rs | 6 - .../acir/src/native_types/witness_stack.rs | 6 +- .../acir/tests/test_program_serialization.rs | 105 +++ .../acvm/src/compiler/transformers/mod.rs | 6 +- acvm-repo/acvm/src/pwg/memory_op.rs | 15 +- acvm-repo/acvm/src/pwg/mod.rs | 28 +- acvm-repo/acvm_js/Cargo.toml | 7 +- acvm-repo/acvm_js/build.sh | 45 +- acvm-repo/acvm_js/buildPhaseCargoCommand.sh | 42 - acvm-repo/acvm_js/installPhase.sh | 10 - acvm-repo/acvm_js/package.json | 4 +- acvm-repo/acvm_js/src/black_box_solvers.rs | 2 - acvm-repo/acvm_js/src/compression.rs | 40 +- acvm-repo/acvm_js/src/execute.rs | 232 +++-- acvm-repo/acvm_js/src/js_witness_stack.rs | 71 ++ acvm-repo/acvm_js/src/lib.rs | 52 +- .../acvm_js/test/node/execute_circuit.test.ts | 37 + .../test/node/witness_conversion.test.ts | 15 +- .../acvm_js/test/shared/nested_acir_call.ts | 59 ++ acvm-repo/bn254_blackbox_solver/Cargo.toml | 21 +- acvm-repo/bn254_blackbox_solver/build.rs | 14 - acvm-repo/bn254_blackbox_solver/src/lib.rs | 13 +- .../src/{ => wasm}/acvm_backend.wasm | Bin .../bn254_blackbox_solver/src/wasm/mod.rs | 15 +- acvm-repo/brillig_vm/src/black_box.rs | 5 +- acvm-repo/brillig_vm/src/lib.rs | 75 +- aztec_macros/src/lib.rs | 69 +- .../compute_note_hash_and_nullifier.rs | 159 ++-- aztec_macros/src/transforms/events.rs | 10 +- aztec_macros/src/transforms/functions.rs | 246 ++++-- aztec_macros/src/transforms/note_interface.rs | 135 ++- aztec_macros/src/transforms/storage.rs | 254 +++++- aztec_macros/src/utils/ast_utils.rs | 2 + aztec_macros/src/utils/errors.rs | 18 + aztec_macros/src/utils/hir_utils.rs | 206 +++-- compiler/noirc_driver/build.rs | 3 +- compiler/noirc_driver/src/abi_gen.rs | 59 +- compiler/noirc_driver/src/contract.rs | 18 +- compiler/noirc_driver/src/lib.rs | 77 +- compiler/noirc_driver/src/program.rs | 2 + .../src/brillig/brillig_gen/brillig_fn.rs | 19 +- .../src/brillig/brillig_ir/artifact.rs | 7 +- .../src/brillig/brillig_ir/entry_point.rs | 60 +- compiler/noirc_evaluator/src/ssa.rs | 95 +- .../src/ssa/acir_gen/acir_ir/acir_variable.rs | 4 +- .../ssa/acir_gen/acir_ir/generated_acir.rs | 6 +- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 133 +-- .../src/ssa/function_builder/mod.rs | 4 +- compiler/noirc_evaluator/src/ssa/ir/dfg.rs | 24 + compiler/noirc_evaluator/src/ssa/ir/dom.rs | 9 +- .../noirc_evaluator/src/ssa/ir/instruction.rs | 26 +- .../src/ssa/ir/instruction/binary.rs | 7 + .../src/ssa/ir/instruction/call.rs | 24 +- .../noirc_evaluator/src/ssa/ir/post_order.rs | 7 +- .../noirc_evaluator/src/ssa/ir/printer.rs | 14 +- .../noirc_evaluator/src/ssa/opt/array_set.rs | 104 +++ .../noirc_evaluator/src/ssa/opt/array_use.rs | 57 -- .../src/ssa/opt/as_slice_length.rs | 71 ++ .../src/ssa/opt/bubble_up_constrains.rs | 1 - .../src/ssa/opt/constant_folding.rs | 1 - compiler/noirc_evaluator/src/ssa/opt/die.rs | 1 - .../src/ssa/opt/flatten_cfg.rs | 2 +- .../ssa/opt/flatten_cfg/branch_analysis.rs | 7 +- .../noirc_evaluator/src/ssa/opt/inlining.rs | 15 +- .../noirc_evaluator/src/ssa/opt/mem2reg.rs | 1 - compiler/noirc_evaluator/src/ssa/opt/mod.rs | 4 +- compiler/noirc_evaluator/src/ssa/opt/rc.rs | 8 +- .../src/ssa/opt/remove_enable_side_effects.rs | 167 ++++ .../src/ssa/opt/simplify_cfg.rs | 1 - .../noirc_evaluator/src/ssa/opt/unrolling.rs | 7 +- compiler/noirc_frontend/build.rs | 15 - compiler/noirc_frontend/src/ast/statement.rs | 13 +- compiler/noirc_frontend/src/debug/mod.rs | 3 + .../src/hir/def_collector/dc_crate.rs | 37 +- .../src/hir/def_collector/dc_mod.rs | 9 +- .../noirc_frontend/src/hir/def_map/mod.rs | 52 +- compiler/noirc_frontend/src/hir/mod.rs | 2 +- .../src/hir/resolution/errors.rs | 22 +- .../src/hir/resolution/import.rs | 231 +++-- .../src/hir/resolution/path_resolver.rs | 24 +- .../src/hir/resolution/resolver.rs | 104 +-- .../src/hir/resolution/traits.rs | 17 +- .../noirc_frontend/src/hir/type_check/mod.rs | 8 +- compiler/noirc_frontend/src/hir_def/stmt.rs | 2 + .../src/{ => lexer}/blns/LICENSE | 0 .../src/{ => lexer}/blns/README.md | 0 .../src/{ => lexer}/blns/blns.base64.json | 0 compiler/noirc_frontend/src/lexer/lexer.rs | 4 +- compiler/noirc_frontend/src/lexer/token.rs | 7 +- compiler/noirc_frontend/src/lib.rs | 10 - compiler/noirc_frontend/src/node_interner.rs | 12 +- compiler/noirc_frontend/src/parser/parser.rs | 15 +- .../src/parser/parser/attributes.rs | 23 + .../src/parser/parser/literals.rs | 1 + .../src/parser/parser/structs.rs | 32 +- compiler/wasm/README.md | 14 - compiler/wasm/src/compile.rs | 9 +- compiler/wasm/src/compile_new.rs | 2 +- compiler/wasm/src/types/noir_artifact.ts | 61 +- default.nix | 13 - .../installation/other_install_methods.md | 160 +--- .../getting_started/tooling/noir_codegen.md | 113 +++ docs/package.json | 5 +- docs/scripts/cut_version.sh | 16 + flake.nix | 260 ------ noir_stdlib/src/field/bn254.nr | 37 +- package.json | 15 +- release-please-config.json | 3 +- shell.nix | 13 - .../fold_non_contract_method/Nargo.toml | 7 + .../fold_non_contract_method/src/main.nr | 18 + .../array_to_slice_constant_length/Nargo.toml | 7 + .../Prover.toml | 1 + .../src/main.nr | 10 + .../brillig_slice_input/Nargo.toml | 6 + .../brillig_slice_input/src/main.nr | 40 + .../fold_call_witness_condition/Nargo.toml | 7 + .../fold_call_witness_condition/Prover.toml | 3 + .../fold_call_witness_condition/src/main.nr | 16 + tooling/debugger/build.rs | 3 +- tooling/debugger/ignored-tests.txt | 1 + tooling/debugger/src/context.rs | 7 +- tooling/debugger/src/foreign_calls.rs | 2 +- tooling/nargo/src/artifacts/contract.rs | 24 +- tooling/nargo/src/artifacts/debug_vars.rs | 4 +- tooling/nargo/src/artifacts/program.rs | 4 + tooling/nargo_cli/build.rs | 5 +- tooling/nargo_cli/src/cli/check_cmd.rs | 43 +- tooling/nargo_cli/src/cli/info_cmd.rs | 62 +- tooling/nargo_fmt/build.rs | 4 +- tooling/noirc_abi/src/lib.rs | 58 +- tooling/noirc_abi/src/serialization.rs | 34 +- tooling/noirc_abi_wasm/README.md | 14 - tooling/noirc_abi_wasm/build.rs | 3 +- tooling/noirc_abi_wasm/build.sh | 45 +- .../noirc_abi_wasm/buildPhaseCargoCommand.sh | 39 - tooling/noirc_abi_wasm/installPhase.sh | 10 - tooling/noirc_abi_wasm/package.json | 4 +- wasm-bindgen-cli.nix | 43 - 162 files changed, 3145 insertions(+), 2842 deletions(-) delete mode 100644 .github/workflows/docker-test-flow.yml delete mode 100644 Dockerfile.ci delete mode 100755 acvm-repo/acvm_js/buildPhaseCargoCommand.sh delete mode 100755 acvm-repo/acvm_js/installPhase.sh create mode 100644 acvm-repo/acvm_js/src/js_witness_stack.rs create mode 100644 acvm-repo/acvm_js/test/shared/nested_acir_call.ts delete mode 100644 acvm-repo/bn254_blackbox_solver/build.rs rename acvm-repo/bn254_blackbox_solver/src/{ => wasm}/acvm_backend.wasm (100%) create mode 100644 compiler/noirc_evaluator/src/ssa/opt/array_set.rs delete mode 100644 compiler/noirc_evaluator/src/ssa/opt/array_use.rs create mode 100644 compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs create mode 100644 compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs delete mode 100644 compiler/noirc_frontend/build.rs rename compiler/noirc_frontend/src/{ => lexer}/blns/LICENSE (100%) rename compiler/noirc_frontend/src/{ => lexer}/blns/README.md (100%) rename compiler/noirc_frontend/src/{ => lexer}/blns/blns.base64.json (100%) delete mode 100644 default.nix create mode 100644 docs/docs/getting_started/tooling/noir_codegen.md create mode 100755 docs/scripts/cut_version.sh delete mode 100644 flake.nix delete mode 100644 shell.nix create mode 100644 test_programs/compile_success_contract/fold_non_contract_method/Nargo.toml create mode 100644 test_programs/compile_success_contract/fold_non_contract_method/src/main.nr create mode 100644 test_programs/execution_success/array_to_slice_constant_length/Nargo.toml create mode 100644 test_programs/execution_success/array_to_slice_constant_length/Prover.toml create mode 100644 test_programs/execution_success/array_to_slice_constant_length/src/main.nr create mode 100644 test_programs/execution_success/brillig_slice_input/Nargo.toml create mode 100644 test_programs/execution_success/brillig_slice_input/src/main.nr create mode 100644 test_programs/execution_success/fold_call_witness_condition/Nargo.toml create mode 100644 test_programs/execution_success/fold_call_witness_condition/Prover.toml create mode 100644 test_programs/execution_success/fold_call_witness_condition/src/main.nr delete mode 100755 tooling/noirc_abi_wasm/buildPhaseCargoCommand.sh delete mode 100755 tooling/noirc_abi_wasm/installPhase.sh delete mode 100644 wasm-bindgen-cli.nix diff --git a/.aztec-sync-commit b/.aztec-sync-commit index 540b447693c..ec4c854ef2b 100644 --- a/.aztec-sync-commit +++ b/.aztec-sync-commit @@ -1 +1 @@ -bb719200034e3bc6db09fb56538dadca4203abf4 +ff28080bcfb946177010960722925973ee19646b diff --git a/.envrc b/.envrc index b2f868b1898..494e7adf16c 100644 --- a/.envrc +++ b/.envrc @@ -17,4 +17,4 @@ if [[ -z "${SKIP_NIX:-}" ]] && has nix; then use nix fi -fi +fi \ No newline at end of file diff --git a/.github/Cross.toml b/.github/Cross.toml index 6520a288d5d..d8516b9ae09 100644 --- a/.github/Cross.toml +++ b/.github/Cross.toml @@ -2,8 +2,6 @@ passthrough = [ "HOME", "RUST_BACKTRACE", - "BARRETENBERG_BIN_DIR", - "BLNS_JSON_PATH" ] volumes = [ "HOME", diff --git a/.github/scripts/acvm_js-build.sh b/.github/scripts/acvm_js-build.sh index 95bd1efc8b9..e79967e3a8f 100755 --- a/.github/scripts/acvm_js-build.sh +++ b/.github/scripts/acvm_js-build.sh @@ -2,4 +2,5 @@ set -eu .github/scripts/wasm-bindgen-install.sh +.github/scripts/wasm-opt-install.sh yarn workspace @noir-lang/acvm_js build diff --git a/.github/scripts/noir-wasm-build.sh b/.github/scripts/noir-wasm-build.sh index 48e3ad73769..6c0336ee0c5 100755 --- a/.github/scripts/noir-wasm-build.sh +++ b/.github/scripts/noir-wasm-build.sh @@ -2,5 +2,6 @@ set -eu .github/scripts/wasm-pack-install.sh +.github/scripts/wasm-opt-install.sh yarn workspace @noir-lang/types build yarn workspace @noir-lang/noir_wasm build diff --git a/.github/scripts/noirc-abi-build.sh b/.github/scripts/noirc-abi-build.sh index 23b8393088e..99de474eb75 100755 --- a/.github/scripts/noirc-abi-build.sh +++ b/.github/scripts/noirc-abi-build.sh @@ -2,4 +2,5 @@ set -eu .github/scripts/wasm-bindgen-install.sh +.github/scripts/wasm-opt-install.sh yarn workspace @noir-lang/noirc_abi build diff --git a/.github/scripts/wasm-opt-install.sh b/.github/scripts/wasm-opt-install.sh index cbdeb8f2bfe..218778edac6 100755 --- a/.github/scripts/wasm-opt-install.sh +++ b/.github/scripts/wasm-opt-install.sh @@ -5,4 +5,4 @@ cd $(dirname "$0") ./cargo-binstall-install.sh -cargo-binstall wasm-opt --version 0.116.0 -y +cargo-binstall wasm-opt --version 0.116.0 -y --force diff --git a/.github/workflows/docker-test-flow.yml b/.github/workflows/docker-test-flow.yml deleted file mode 100644 index c8b4f53fadd..00000000000 --- a/.github/workflows/docker-test-flow.yml +++ /dev/null @@ -1,808 +0,0 @@ -name: Test Nargo and JS packages - -on: - push: - branches: - - 'master' - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} - cancel-in-progress: true - -jobs: - build-base-nargo: - name: Build base nargo docker image - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v4 - - name: Get current date - id: date - run: echo "date=$(date +'%Y.%m.%d.%H.%M')" >> $GITHUB_STATE - - name: prepare docker images tags - id: prep - run: | - REGISTRY="ghcr.io" - IMG_RAW="${REGISTRY}/${{ github.repository }}" - IMAGE=$(echo "$IMG_RAW" | tr '[:upper:]' '[:lower:]') - TAGS="${IMAGE}:${{ github.sha }}-nargo" - FULL_TAGS="${TAGS},${IMAGE}:latest-nargo,${IMAGE}:v${{ steps.date.outputs.date }}-nargo" - echo "tags=$FULL_TAGS" >> $GITHUB_OUTPUT - echo "image=$IMAGE" >> $GITHUB_OUTPUT - - name: Set up Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v3 - - name: Login to GitHub Container Registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: Build nargo base dockerfile - uses: docker/build-push-action@v5 - with: - context: . - file: Dockerfile.ci - tags: ${{ steps.prep.outputs.tags }} - target: base-nargo - cache-from: type=gha - cache-to: type=gha,mode=max - push: true - - build-base-js: - name: Build base js docker image - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v4 - - name: Get current date - id: date - run: echo "date=$(date +'%Y.%m.%d.%H.%M')" >> $GITHUB_STATE - - name: Prepare docker image tags - id: prep - run: | - REGISTRY="ghcr.io" - IMG_RAW="${REGISTRY}/${{ github.repository }}" - IMAGE=$(echo "$IMG_RAW" | tr '[:upper:]' '[:lower:]') - TAGS="${IMAGE}:${{ github.sha }}-js" - FULL_TAGS="${TAGS},${IMAGE}:latest-js,${IMAGE}:v${{ steps.date.outputs.date }}-js" - echo "tags=$FULL_TAGS" >> $GITHUB_OUTPUT - echo "image=$IMAGE" >> $GITHUB_OUTPUT - - name: Set up Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v3 - - name: Login to GitHub Container Registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: Build js base dockerfile - uses: docker/build-push-action@v5 - with: - context: . - file: Dockerfile.ci - tags: ${{ steps.prep.outputs.tags }} - target: base-js - cache-from: type=gha - cache-to: type=gha,mode=max - push: true - - artifact-nargo: - name: Artifact nargo - runs-on: ubuntu-latest - needs: [build-base-nargo] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-nargo - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Artifact nargo - uses: actions/upload-artifact@v4 - with: - name: nargo - path: /usr/src/noir/target/release/nargo - if-no-files-found: error - compression-level: 0 - - test-nargo: - name: Test nargo - runs-on: ubuntu-latest - needs: [build-base-nargo] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-nargo - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Test - working-directory: /usr/src/noir - run: | - .github/scripts/nargo-test.sh - - build-noir-wasm: - name: Build noir wasm - runs-on: ubuntu-latest - needs: [build-base-js, build-noirc-abi] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Download noirc abi - uses: actions/download-artifact@v4 - with: - name: noirc_abi_wasm - path: | - /usr/src/noir/tooling/noirc_abi_wasm - - name: Build - working-directory: /usr/src/noir - run: | - ./.github/scripts/noir-wasm-build.sh - - name: Artifact - uses: actions/upload-artifact@v4 - with: - name: noir_wasm - path: /usr/src/noir/compiler/wasm - retention-days: 10 - - test-noir-wasm: - name: Test noir wasm - runs-on: ubuntu-latest - needs: [build-base-js, artifact-nargo, build-noir-wasm] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Download nargo - uses: actions/download-artifact@v4 - with: - name: nargo - path: /usr/src/noir/target/release - - name: Prep downloaded artifact - run: | - chmod +x /usr/src/noir/target/release/nargo - - name: Download noirc abi - uses: actions/download-artifact@v4 - with: - name: noirc_abi_wasm - path: | - /usr/src/noir/tooling/noirc_abi_wasm - - name: Download noir_wasm artifact - uses: actions/download-artifact@v4 - with: - name: noir_wasm - path: /usr/src/noir/compiler/wasm - - name: Test - working-directory: /usr/src/noir - run: | - ./.github/scripts/noir-wasm-test.sh - - test-noir-wasm-browser: - name: Test noir wasm browser - runs-on: ubuntu-latest - needs: [build-base-js, artifact-nargo, build-noir-wasm] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Download nargo - uses: actions/download-artifact@v4 - with: - name: nargo - path: /usr/src/noir/target/release - - name: Prep downloaded artifact - run: | - chmod +x /usr/src/noir/target/release/nargo - - name: Download noirc abi - uses: actions/download-artifact@v4 - with: - name: noirc_abi_wasm - path: | - /usr/src/noir/tooling/noirc_abi_wasm - - name: Download noir_wasm artifact - uses: actions/download-artifact@v4 - with: - name: noir_wasm - path: /usr/src/noir/compiler/wasm - - name: Test - working-directory: /usr/src/noir - run: | - ./.github/scripts/noir-wasm-test-browser.sh - - build-acvm_js: - name: Build acvm js - runs-on: ubuntu-latest - needs: [build-base-js] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Build - working-directory: /usr/src/noir - run: | - ./.github/scripts/acvm_js-build.sh - - name: Artifact - uses: actions/upload-artifact@v4 - with: - name: acvm_js - path: - /usr/src/noir/acvm-repo/acvm_js/outputs/out/acvm_js - if-no-files-found: error - compression-level: 0 - - test-acvm_js: - name: Test acvm js - runs-on: ubuntu-latest - needs: [build-base-js, build-acvm_js] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Download acvm js - uses: actions/download-artifact@v4 - with: - name: acvm_js - path: | - /usr/src/noir/acvm-repo/acvm_js - - name: Test - working-directory: /usr/src/noir - run: | - ./.github/scripts/acvm_js-test.sh - - test-acvm_js-browser: - name: Test acvm js browser - runs-on: ubuntu-latest - needs: [build-base-js, build-acvm_js] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Download acvm js - uses: actions/download-artifact@v4 - with: - name: acvm_js - path: | - /usr/src/noir/acvm-repo/acvm_js - - name: Test - working-directory: /usr/src/noir - run: | - ./.github/scripts/acvm_js-test-browser.sh - - build-noirc-abi: - name: Build noirc abi - runs-on: ubuntu-latest - needs: [build-base-js] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Build - working-directory: /usr/src/noir - run: | - ./.github/scripts/noirc-abi-build.sh - - name: Artifact - uses: actions/upload-artifact@v4 - with: - name: noirc_abi_wasm - path: - /usr/src/noir/tooling/noirc_abi_wasm/outputs/out/noirc_abi_wasm - if-no-files-found: error - compression-level: 0 - - test-noirc-abi: - name: Test noirc abi - runs-on: ubuntu-latest - needs: [build-base-js, build-noirc-abi] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Download noirc abi - uses: actions/download-artifact@v4 - with: - name: noirc_abi_wasm - path: | - /usr/src/noir/tooling/noirc_abi_wasm - - name: Test - working-directory: /usr/src/noir - run: | - ./.github/scripts/noirc-abi-test.sh - - test-noirc-abi-browser: - name: Test noirc abi browser - runs-on: ubuntu-latest - needs: [build-base-js, build-noirc-abi] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Download noirc abi - uses: actions/download-artifact@v4 - with: - name: noirc_abi_wasm - path: | - /usr/src/noir/tooling/noirc_abi_wasm - - name: Test - working-directory: /usr/src/noir - run: | - ./.github/scripts/noirc-abi-test-browser.sh - - build-noir-js-types: - name: Build noir js types - runs-on: ubuntu-latest - needs: [build-base-js, build-noirc-abi] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Download noirc abi - uses: actions/download-artifact@v4 - with: - name: noirc_abi_wasm - path: | - /usr/src/noir/tooling/noirc_abi_wasm - - name: Build - working-directory: /usr/src/noir - run: | - ./.github/scripts/noir-js-types-build.sh - - name: Artifact - uses: actions/upload-artifact@v4 - with: - name: noir-js-types - path: | - /usr/src/noir/tooling/noir_js_types/lib - if-no-files-found: error - compression-level: 0 - - build-barretenberg-backend: - name: Build Barretenberg backend - runs-on: ubuntu-latest - needs: [build-base-js, build-noirc-abi, build-noir-js-types] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Download noirc abi - uses: actions/download-artifact@v4 - with: - name: noirc_abi_wasm - path: | - /usr/src/noir/tooling/noirc_abi_wasm - - name: Download noir js types - uses: actions/download-artifact@v4 - with: - name: noir-js-types - path: /usr/src/noir/tooling/noir_js_types/lib/ - - name: Build - working-directory: /usr/src/noir - run: | - ./.github/scripts/backend-barretenberg-build.sh - - name: Artifact - uses: actions/upload-artifact@v4 - with: - name: barretenberg-backend - path: - /usr/src/noir/tooling/noir_js_backend_barretenberg/lib - if-no-files-found: error - compression-level: 0 - - test-barretenberg-backend: - name: Test Barretenberg backend - runs-on: ubuntu-latest - needs: [build-base-js, build-noirc-abi, build-noir-js-types, build-barretenberg-backend] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Download artifact - uses: actions/download-artifact@v4 - with: - name: noirc_abi_wasm - path: | - /usr/src/noir/tooling/noirc_abi_wasm - - name: Download noir js types - uses: actions/download-artifact@v4 - with: - name: noir-js-types - path: /usr/src/noir/tooling/noir_js_types/lib/ - - name: Download Backend barretenberg - uses: actions/download-artifact@v4 - with: - name: barretenberg-backend - path: - /usr/src/noir/tooling/noir_js_backend_barretenberg/lib - - name: Test - working-directory: /usr/src/noir - run: | - ./.github/scripts/backend-barretenberg-test.sh - - build-noir_js: - name: Build noirjs - runs-on: ubuntu-latest - needs: [build-base-js, artifact-nargo, build-noirc-abi, build-acvm_js, build-barretenberg-backend, build-noir-js-types] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Download nargo - uses: actions/download-artifact@v4 - with: - name: nargo - path: /usr/src/noir/target/release - - name: prep downloaded artifact - run: | - chmod +x /usr/src/noir/target/release/nargo - - name: Download noirc abi - uses: actions/download-artifact@v4 - with: - name: noirc_abi_wasm - path: | - /usr/src/noir/tooling/noirc_abi_wasm - - name: Download acvm js - uses: actions/download-artifact@v4 - with: - name: acvm_js - path: | - /usr/src/noir/acvm-repo/acvm_js - - name: Download Barretenberg backend - uses: actions/download-artifact@v4 - with: - name: barretenberg-backend - path: - /usr/src/noir/tooling/noir_js_backend_barretenberg/lib - - name: Download noir js types - uses: actions/download-artifact@v4 - with: - name: noir-js-types - path: | - /usr/src/noir/tooling/noir_js_types/lib - - name: Build - working-directory: /usr/src/noir - run: | - ./.github/scripts/noir-js-build.sh - - name: Artifact - uses: actions/upload-artifact@v4 - with: - name: noir_js - path: - /usr/src/noir/tooling/noir_js/lib - - test-noir_js: - name: Test noirjs - runs-on: ubuntu-latest - needs: [ - build-base-js, - build-noirc-abi, - artifact-nargo, - build-acvm_js, - build-barretenberg-backend, - build-noir_js, - build-noir-js-types - ] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Download nargo - uses: actions/download-artifact@v4 - with: - name: nargo - path: /usr/src/noir/target/release - - name: Prep downloaded artifact - run: | - chmod +x /usr/src/noir/target/release/nargo - - name: Download noirc abi - uses: actions/download-artifact@v4 - with: - name: noirc_abi_wasm - path: | - /usr/src/noir/tooling/noirc_abi_wasm - - name: Download acvm js - uses: actions/download-artifact@v4 - with: - name: acvm_js - path: | - /usr/src/noir/acvm-repo/acvm_js - - name: Download Barretenberg backend - uses: actions/download-artifact@v4 - with: - name: barretenberg-backend - path: - /usr/src/noir/tooling/noir_js_backend_barretenberg/lib - - name: Download noir js types - uses: actions/download-artifact@v4 - with: - name: noir-js-types - path: | - /usr/src/noir/tooling/noir_js_types/lib - - name: Download noir js - uses: actions/download-artifact@v4 - with: - name: noir_js - path: - /usr/src/noir/tooling/noir_js/lib - - name: Test - working-directory: /usr/src/noir - run: | - ./.github/scripts/noir-js-test.sh - - build-noir_codegen: - name: Build noir codegen - runs-on: ubuntu-latest - needs: [build-base-js, build-noirc-abi, build-acvm_js, build-noir-js-types, build-noir_js] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Download nargo - uses: actions/download-artifact@v4 - with: - name: nargo - path: /usr/src/noir/target/release - - name: Prep downloaded artifact - run: | - chmod +x /usr/src/noir/target/release/nargo - - name: Download noirc abi package - uses: actions/download-artifact@v4 - with: - name: noirc_abi_wasm - path: /usr/src/noir/tooling/noirc_abi_wasm - - name: Download acvm js - uses: actions/download-artifact@v4 - with: - name: acvm_js - path: /usr/src/noir/acvm-repo/acvm_js - - name: Download noir js types - uses: actions/download-artifact@v4 - with: - name: noir-js-types - path: | - /usr/src/noir/tooling/noir_js_types/lib - - name: Download noir js - uses: actions/download-artifact@v4 - with: - name: noir_js - path: - /usr/src/noir/tooling/noir_js/lib - - name: Build - working-directory: /usr/src/noir - run: | - ./.github/scripts/noir-codegen-build.sh - - name: Artifact - uses: actions/upload-artifact@v4 - with: - name: noir_codegen - path: - /usr/src/noir/tooling/noir_codegen/lib - - test-noir_codegen: - name: Test noir codegen - runs-on: ubuntu-latest - needs: [build-base-js, artifact-nargo, build-noirc-abi, build-acvm_js, build-noir-js-types, build-noir_js, build-noir_codegen] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Download nargo - uses: actions/download-artifact@v4 - with: - name: nargo - path: /usr/src/noir/target/release - - name: Prep downloaded artifact - run: | - chmod +x /usr/src/noir/target/release/nargo - - name: Download noirc abi - uses: actions/download-artifact@v4 - with: - name: noirc_abi_wasm - path: /usr/src/noir/tooling/noirc_abi_wasm - - name: Download acvm js - uses: actions/download-artifact@v4 - with: - name: acvm_js - path: /usr/src/noir/acvm-repo/acvm_js - - name: Download noir js types - uses: actions/download-artifact@v4 - with: - name: noir-js-types - path: | - /usr/src/noir/tooling/noir_js_types/lib - - name: Download noir js - uses: actions/download-artifact@v4 - with: - name: noir_js - path: - /usr/src/noir/tooling/noir_js/lib - - name: Download noir codegen - uses: actions/download-artifact@v4 - with: - name: noir_codegen - path: - /usr/src/noir/tooling/noir_codegen/lib - - name: Test - working-directory: /usr/src/noir - run: | - ./.github/scripts/noir-codegen-test.sh - - test-integration: - name: Integration test - runs-on: ubuntu-latest - needs: [ - build-base-js, - artifact-nargo, - build-noir-wasm, - build-noirc-abi, - build-acvm_js, - build-noir-js-types, - build-noir_js, - build-barretenberg-backend - ] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Download nargo - uses: actions/download-artifact@v4 - with: - name: nargo - path: /usr/src/noir/target/release - - name: Prep downloaded artifact - run: | - chmod +x /usr/src/noir/target/release/nargo - - name: Download noir wasm - uses: actions/download-artifact@v4 - with: - name: noir_wasm - path: /usr/src/noir/compiler/wasm - - name: Download noirc abi - uses: actions/download-artifact@v4 - with: - name: noirc_abi_wasm - path: /usr/src/noir/tooling/noirc_abi_wasm - - name: Download acvm js - uses: actions/download-artifact@v4 - with: - name: acvm_js - path: /usr/src/noir/acvm-repo/acvm_js - - name: Download noir js types - uses: actions/download-artifact@v4 - with: - name: noir-js-types - path: | - /usr/src/noir/tooling/noir_js_types/lib - - name: Download noir js - uses: actions/download-artifact@v4 - with: - name: noir_js - path: - /usr/src/noir/tooling/noir_js/lib - - name: Download Barretenberg backend - uses: actions/download-artifact@v4 - with: - name: barretenberg-backend - path: - /usr/src/noir/tooling/noir_js_backend_barretenberg/lib - - name: Test - working-directory: /usr/src/noir - run: | - ./.github/scripts/integration-test-node.sh - - test-integration-browser: - name: Integration test browser - runs-on: ubuntu-latest - needs: [ - build-base-js, - build-noir-wasm, - build-noirc-abi, - build-acvm_js, - build-noir-js-types, - build-noir_js, - build-barretenberg-backend - ] - container: - image: ghcr.io/noir-lang/noir:${{ github.sha }}-js - credentials: - username: ${{ github.actor }} - password: ${{ secrets.github_token }} - steps: - - name: Download noir wasm - uses: actions/download-artifact@v4 - with: - name: noir_wasm - path: /usr/src/noir/compiler/wasm - - name: Download noirc abi - uses: actions/download-artifact@v4 - with: - name: noirc_abi_wasm - path: /usr/src/noir/tooling/noirc_abi_wasm - - name: Download acvm js - uses: actions/download-artifact@v4 - with: - name: acvm_js - path: /usr/src/noir/acvm-repo/acvm_js - - name: Download noir js types - uses: actions/download-artifact@v4 - with: - name: noir-js-types - path: | - /usr/src/noir/tooling/noir_js_types/lib - - name: Download noir js - uses: actions/download-artifact@v4 - with: - name: noir_js - path: - /usr/src/noir/tooling/noir_js/lib - - name: Download Barretenberg backend - uses: actions/download-artifact@v4 - with: - name: barretenberg-backend - path: - /usr/src/noir/tooling/noir_js_backend_barretenberg/lib - - name: Test - working-directory: /usr/src/noir - run: | - ./.github/scripts/integration-test-browser.sh - - tests-end: - name: End - runs-on: ubuntu-latest - if: ${{ always() }} - needs: - - test-nargo - - test-noirc-abi - - test-noirc-abi-browser - - test-noir-wasm - - test-noir-wasm-browser - - test-integration - - test-integration-browser - - test-noir_codegen - - test-acvm_js - - test-acvm_js-browser - - test-barretenberg-backend - - test-noir_js - - steps: - - name: Report overall success - run: | - if [[ $FAIL == true ]]; then - exit 1 - else - exit 0 - fi - env: - FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} diff --git a/.github/workflows/docs-pr.yml b/.github/workflows/docs-pr.yml index 5d0b72c6ad8..1f9ccdd946b 100644 --- a/.github/workflows/docs-pr.yml +++ b/.github/workflows/docs-pr.yml @@ -75,9 +75,11 @@ jobs: run: | npm i wasm-opt -g + - name: Query active docs versions + run: yarn workspace docs version::stables + - name: Build docs - run: - yarn workspaces foreach -Rpt --from docs run build + run: yarn workspaces foreach -Rpt --from docs run build - name: Upload artifact uses: actions/upload-artifact@v4 @@ -126,4 +128,4 @@ jobs: with: message: | FYI @noir-lang/developerrelations on Noir doc changes. - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/gates_report.yml b/.github/workflows/gates_report.yml index f3f798fc5ea..ebf17f7374c 100644 --- a/.github/workflows/gates_report.yml +++ b/.github/workflows/gates_report.yml @@ -74,7 +74,7 @@ jobs: - name: Compare gates reports id: gates_diff - uses: TomAFrench/noir-gates-diff@e7cf131b7e7f044c01615f93f0b855f65ddc02d4 + uses: TomAFrench/noir-gates-diff@df05f34e2ab275ddc4f2cac065df1c88f8a05e5d with: report: gates_report.json summaryQuantile: 0.9 # only display the 10% most significant circuit size diffs in the summary (defaults to 20%) diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml index a56583b34eb..8896e613608 100644 --- a/.github/workflows/publish-docs.yml +++ b/.github/workflows/publish-docs.yml @@ -28,10 +28,11 @@ jobs: run: | npm i wasm-opt -g + - name: Query active docs versions + run: yarn workspace docs version::stables + - name: Build docs for deploying - working-directory: docs - run: - yarn workspaces foreach -Rt run build + run: yarn workspaces foreach -Rpt --from docs run build - name: Deploy to Netlify uses: nwtgck/actions-netlify@v2.1 diff --git a/.github/workflows/publish-es-packages.yml b/.github/workflows/publish-es-packages.yml index 470db3b78f7..819be308169 100644 --- a/.github/workflows/publish-es-packages.yml +++ b/.github/workflows/publish-es-packages.yml @@ -44,7 +44,9 @@ jobs: uses: actions/upload-artifact@v4 with: name: noirc_abi_wasm - path: ./tooling/noirc_abi_wasm/outputs/out/noirc_abi_wasm + path: | + ./tooling/noirc_abi_wasm/nodejs + ./tooling/noirc_abi_wasm/web retention-days: 10 build-noir_wasm: @@ -113,7 +115,9 @@ jobs: uses: actions/upload-artifact@v4 with: name: acvm-js - path: ./acvm-repo/acvm_js/outputs/out/acvm_js + path: | + ./acvm-repo/acvm_js/nodejs + ./acvm-repo/acvm_js/web retention-days: 3 publish-es-packages: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index badcd3af2dd..d27fac0e039 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -131,7 +131,7 @@ jobs: # We need to specify the PR on which to make the comment as workflow is triggered by push. number: ${{ fromJSON(needs.release-please.outputs.release-pr).number }} # delete the comment in case failures have been fixed - delete: ${{ !env.FAIL }} + delete: ${{ env.FAIL == false }} message: "The release workflow has not completed successfully. Releasing now will result in a broken release" - name: Report overall success diff --git a/.github/workflows/test-js-packages.yml b/.github/workflows/test-js-packages.yml index b3908ee5d3e..06a96ee8932 100644 --- a/.github/workflows/test-js-packages.yml +++ b/.github/workflows/test-js-packages.yml @@ -66,9 +66,6 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup - - name: Install wasm-opt - run: ./.github/scripts/wasm-opt-install.sh - - name: Build noirc_abi run: ./.github/scripts/noirc-abi-build.sh @@ -76,7 +73,9 @@ jobs: uses: actions/upload-artifact@v4 with: name: noirc_abi_wasm - path: ./tooling/noirc_abi_wasm/outputs/out/noirc_abi_wasm + path: | + ./tooling/noirc_abi_wasm/nodejs + ./tooling/noirc_abi_wasm/web retention-days: 10 @@ -100,9 +99,6 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup - - name: Install wasm-opt - run: ./.github/scripts/wasm-opt-install.sh - - name: Build noir_js_types run: yarn workspace @noir-lang/types build @@ -138,9 +134,6 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup - - name: Install wasm-opt - run: ./.github/scripts/wasm-opt-install.sh - - name: Build acvm_js run: ./.github/scripts/acvm_js-build.sh @@ -148,7 +141,9 @@ jobs: uses: actions/upload-artifact@v4 with: name: acvm-js - path: ./acvm-repo/acvm_js/outputs/out/acvm_js + path: | + ./acvm-repo/acvm_js/nodejs + ./acvm-repo/acvm_js/web retention-days: 3 test-acvm_js-node: diff --git a/.gitignore b/.gitignore index 5f41566c94b..9a829afab8b 100644 --- a/.gitignore +++ b/.gitignore @@ -17,12 +17,6 @@ pkg/ # Noir.js tooling/noir_js/lib -# Nix stuff -**/outputs -result -.envrc.local -.direnv/ - # Nargo output *.proof *.acir diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 1fb1def7ce1..efb17cb0085 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -4,7 +4,6 @@ // List of extensions which should be recommended for users of this workspace. "recommendations": [ "mkhl.direnv", - "jnoortheen.nix-ide", "rust-lang.rust-analyzer", "redhat.vscode-yaml", "esbenp.prettier-vscode", diff --git a/.vscode/settings.json b/.vscode/settings.json index 171d36f4e04..fb8ea527881 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -2,17 +2,6 @@ "direnv.restart.automatic": true, "redhat.telemetry.enabled": false, "yaml.recommendations.show": false, - "nix.serverPath": "nil", - "nix.enableLanguageServer": true, - "nix.serverSettings": { - "nil": { - "formatting": { - "command": [ - "nixpkgs-fmt" - ] - } - } - }, "yaml.schemas": { "https://json.schemastore.org/github-workflow.json": "${workspaceRoot}/.github/workflows/*.yml" }, diff --git a/Cargo.lock b/Cargo.lock index 2f85b26f974..f3803063f33 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -93,7 +93,6 @@ dependencies = [ "acvm", "bn254_blackbox_solver", "build-data", - "cfg-if 1.0.0", "console_error_panic_hook", "const-str", "gloo-utils", @@ -542,9 +541,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.2" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" +checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" [[package]] name = "bitmaps" @@ -606,16 +605,12 @@ dependencies = [ "acvm_blackbox_solver", "ark-ec", "ark-ff", - "flate2", + "cfg-if 1.0.0", "getrandom 0.2.10", "js-sys", "noir_grumpkin", "num-bigint", "num-traits", - "pkg-config", - "reqwest", - "rust-embed", - "tar", "thiserror", "wasm-bindgen-futures", "wasmer", @@ -1607,12 +1602,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.8" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.48.0", ] [[package]] @@ -1988,9 +1983,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.24" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" dependencies = [ "bytes", "fnv", @@ -2569,16 +2564,16 @@ version = "0.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3af92c55d7d839293953fcd0fda5ecfe93297cfde6ffbdec13b41d99c0ba6607" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.5.0", "libc", "redox_syscall 0.4.1", ] [[package]] name = "linux-raw-sys" -version = "0.4.13" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" +checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" [[package]] name = "lock_api" @@ -3133,7 +3128,7 @@ version = "6.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.5.0", "crossbeam-channel", "filetime", "fsevent-sys", @@ -3587,7 +3582,7 @@ checksum = "7c003ac8c77cb07bb74f5f198bce836a689bcd5a42574612bf14d17bfd08c20e" dependencies = [ "bit-set", "bit-vec", - "bitflags 2.4.2", + "bitflags 2.5.0", "lazy_static", "num-traits", "rand 0.8.5", @@ -4037,7 +4032,6 @@ dependencies = [ "proc-macro2", "quote", "rust-embed-utils", - "shellexpand", "syn 2.0.32", "walkdir", ] @@ -4048,7 +4042,6 @@ version = "7.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d38ff6bf570dc3bb7100fce9f7b60c33fa71d80e88da3f2580df4ff2bdded74" dependencies = [ - "globset", "sha2", "walkdir", ] @@ -4076,15 +4069,15 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.28" +version = "0.38.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" +checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.5.0", "errno", "libc", "linux-raw-sys", - "windows-sys 0.52.0", + "windows-sys 0.48.0", ] [[package]] @@ -4469,15 +4462,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" -[[package]] -name = "shellexpand" -version = "2.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ccc8076840c4da029af4f87e4e8daeb0fca6b87bbb02e10cb60b791450e11e4" -dependencies = [ - "dirs", -] - [[package]] name = "signature" version = "1.6.4" @@ -5550,7 +5534,7 @@ version = "0.121.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9dbe55c8f9d0dbd25d9447a5a889ff90c0cc3feaa7395310d3d826b2c703eaab" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.5.0", "indexmap 2.0.0", "semver", ] diff --git a/Dockerfile.ci b/Dockerfile.ci deleted file mode 100644 index e0dc030980c..00000000000 --- a/Dockerfile.ci +++ /dev/null @@ -1,30 +0,0 @@ -FROM rust:1.73.0-slim-bookworm as base -RUN apt-get update && apt-get upgrade -y && apt-get install build-essential git -y -WORKDIR /usr/src/noir -ENV PATH="${PATH}:/usr/src/noir/target/release" - -FROM base as base-nargo -COPY . . -RUN .github/scripts/nargo-build.sh - -FROM base as base-js -RUN apt-get install -y ca-certificates curl gnupg -RUN mkdir -p /etc/apt/keyrings -RUN curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg -RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list -RUN apt-get update && apt-get install nodejs -y -RUN corepack enable -RUN apt-get install -y jq -COPY yarn.lock package.json .yarnrc.yml ./ -COPY .yarn/ ./.yarn/ -COPY ./acvm-repo/acvm_js/package.json ./acvm-repo/acvm_js/ -COPY ./tooling/noirc_abi_wasm/package.json ./tooling/noirc_abi_wasm/ -COPY ./compiler/wasm/package.json ./compiler/wasm/ -COPY ./tooling/noir_js_types/package.json ./tooling/noir_js_types/ -COPY ./tooling/noir_js_backend_barretenberg/package.json ./tooling/noir_js_backend_barretenberg/ -COPY ./tooling/noir_js/package.json ./tooling/noir_js/ -COPY ./tooling/noir_codegen/package.json ./tooling/noir_codegen/ -COPY ./compiler/integration-tests/package.json ./compiler/integration-tests/ -COPY ./docs/package.json ./docs/ -RUN yarn --immutable -COPY . . diff --git a/README.md b/README.md index 5c93512ae26..adf68b290ef 100644 --- a/README.md +++ b/README.md @@ -56,26 +56,6 @@ Concretely the following items are on the road map: This crate's minimum supported rustc version is 1.73.0. -## Working on this project - -This project uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. Please follow [our guidelines](https://noir-lang.org/docs/getting_started/installation/other_install_methods#option-3-compile-from-source) to setup your environment for working on the project. - -### Building against a different local/remote version of Barretenberg - -If you are working on this project and want a different version of Barretenberg (instead of the version this project is pinned against), you'll want to replace the lockfile version with your version. This can be done by running: - -```sh -nix flake lock --override-input barretenberg /absolute/path/to/your/barretenberg -``` - -You can also point at a fork and/or branch on GitHub using: - -```sh -nix flake lock --override-input barretenberg github:username/barretenberg/branch_name -``` - -__Note:__ You don't want to commit the updated lockfile, as it will fail in CI! - ## License Noir is free and open source. It is distributed under a dual license. (MIT/APACHE) diff --git a/acvm-repo/acir/codegen/acir.cpp b/acvm-repo/acir/codegen/acir.cpp index d7ef849ab75..e4203b579b0 100644 --- a/acvm-repo/acir/codegen/acir.cpp +++ b/acvm-repo/acir/codegen/acir.cpp @@ -1074,6 +1074,7 @@ namespace Program { uint32_t id; std::vector inputs; std::vector outputs; + std::optional predicate; friend bool operator==(const Call&, const Call&); std::vector bincodeSerialize() const; @@ -6173,6 +6174,7 @@ namespace Program { if (!(lhs.id == rhs.id)) { return false; } if (!(lhs.inputs == rhs.inputs)) { return false; } if (!(lhs.outputs == rhs.outputs)) { return false; } + if (!(lhs.predicate == rhs.predicate)) { return false; } return true; } @@ -6199,6 +6201,7 @@ void serde::Serializable::serialize(const Program::Opcode serde::Serializable::serialize(obj.id, serializer); serde::Serializable::serialize(obj.inputs, serializer); serde::Serializable::serialize(obj.outputs, serializer); + serde::Serializable::serialize(obj.predicate, serializer); } template <> @@ -6208,6 +6211,7 @@ Program::Opcode::Call serde::Deserializable::deserialize( obj.id = serde::Deserializable::deserialize(deserializer); obj.inputs = serde::Deserializable::deserialize(deserializer); obj.outputs = serde::Deserializable::deserialize(deserializer); + obj.predicate = serde::Deserializable::deserialize(deserializer); return obj; } diff --git a/acvm-repo/acir/src/circuit/mod.rs b/acvm-repo/acir/src/circuit/mod.rs index b5d6348d34f..cb846bdaffa 100644 --- a/acvm-repo/acir/src/circuit/mod.rs +++ b/acvm-repo/acir/src/circuit/mod.rs @@ -216,25 +216,33 @@ impl std::fmt::Display for Circuit { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { writeln!(f, "current witness index : {}", self.current_witness_index)?; - let write_public_inputs = |f: &mut std::fmt::Formatter<'_>, - public_inputs: &PublicInputs| - -> Result<(), std::fmt::Error> { - write!(f, "[")?; - let public_input_indices = public_inputs.indices(); - for (index, public_input) in public_input_indices.iter().enumerate() { - write!(f, "{public_input}")?; - if index != public_input_indices.len() - 1 { - write!(f, ", ")?; + let write_witness_indices = + |f: &mut std::fmt::Formatter<'_>, indices: &[u32]| -> Result<(), std::fmt::Error> { + write!(f, "[")?; + for (index, witness_index) in indices.iter().enumerate() { + write!(f, "{witness_index}")?; + if index != indices.len() - 1 { + write!(f, ", ")?; + } } - } - writeln!(f, "]") - }; + writeln!(f, "]") + }; + + write!(f, "private parameters indices : ")?; + write_witness_indices( + f, + &self + .private_parameters + .iter() + .map(|witness| witness.witness_index()) + .collect::>(), + )?; write!(f, "public parameters indices : ")?; - write_public_inputs(f, &self.public_parameters)?; + write_witness_indices(f, &self.public_parameters.indices())?; write!(f, "return value indices : ")?; - write_public_inputs(f, &self.return_values)?; + write_witness_indices(f, &self.return_values.indices())?; for opcode in &self.opcodes { writeln!(f, "{opcode}")?; diff --git a/acvm-repo/acir/src/circuit/opcodes.rs b/acvm-repo/acir/src/circuit/opcodes.rs index 68d28b287e6..d8204132b3e 100644 --- a/acvm-repo/acir/src/circuit/opcodes.rs +++ b/acvm-repo/acir/src/circuit/opcodes.rs @@ -39,6 +39,8 @@ pub enum Opcode { inputs: Vec, /// Outputs of the function call outputs: Vec, + /// Predicate of the circuit execution - indicates if it should be skipped + predicate: Option, }, } @@ -97,8 +99,11 @@ impl std::fmt::Display for Opcode { write!(f, "INIT ")?; write!(f, "(id: {}, len: {}) ", block_id.0, init.len()) } - Opcode::Call { id, inputs, outputs } => { + Opcode::Call { id, inputs, outputs, predicate } => { write!(f, "CALL func {}: ", id)?; + if let Some(pred) = predicate { + writeln!(f, "PREDICATE = {pred}")?; + } write!(f, "inputs: {:?}, ", inputs)?; write!(f, "outputs: {:?}", outputs) } diff --git a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs index 8a0c4692282..c955e435b37 100644 --- a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs +++ b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs @@ -10,12 +10,6 @@ pub struct FunctionInput { pub num_bits: u32, } -impl FunctionInput { - pub fn dummy() -> Self { - Self { witness: Witness(0), num_bits: 0 } - } -} - #[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum BlackBoxFuncCall { AND { diff --git a/acvm-repo/acir/src/native_types/witness_stack.rs b/acvm-repo/acir/src/native_types/witness_stack.rs index a9e8f219b3e..7c79e3db431 100644 --- a/acvm-repo/acir/src/native_types/witness_stack.rs +++ b/acvm-repo/acir/src/native_types/witness_stack.rs @@ -21,7 +21,7 @@ pub struct WitnessStackError(#[from] SerializationError); /// An ordered set of witness maps for separate circuits #[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Default, Serialize, Deserialize)] pub struct WitnessStack { - pub stack: Vec, + stack: Vec, } #[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Default, Serialize, Deserialize)] @@ -37,6 +37,10 @@ impl WitnessStack { self.stack.push(StackItem { index, witness }); } + pub fn pop(&mut self) -> Option { + self.stack.pop() + } + pub fn peek(&self) -> Option<&StackItem> { self.stack.last() } diff --git a/acvm-repo/acir/tests/test_program_serialization.rs b/acvm-repo/acir/tests/test_program_serialization.rs index 8b04292dfaa..8b9160ccf6a 100644 --- a/acvm-repo/acir/tests/test_program_serialization.rs +++ b/acvm-repo/acir/tests/test_program_serialization.rs @@ -362,3 +362,108 @@ fn memory_op_circuit() { assert_eq!(bytes, expected_serialization) } + +#[test] +fn nested_acir_call_circuit() { + // Circuit for the following program: + // fn main(x: Field, y: pub Field) { + // let z = nested_call(x, y); + // let z2 = nested_call(x, y); + // assert(z == z2); + // } + // #[fold] + // fn nested_call(x: Field, y: Field) -> Field { + // inner_call(x + 2, y) + // } + // #[fold] + // fn inner_call(x: Field, y: Field) -> Field { + // assert(x == y); + // x + // } + let nested_call = Opcode::Call { + id: 1, + inputs: vec![Witness(0), Witness(1)], + outputs: vec![Witness(2)], + predicate: None, + }; + let nested_call_two = Opcode::Call { + id: 1, + inputs: vec![Witness(0), Witness(1)], + outputs: vec![Witness(3)], + predicate: None, + }; + + let assert_nested_call_results = Opcode::AssertZero(Expression { + mul_terms: Vec::new(), + linear_combinations: vec![ + (FieldElement::one(), Witness(2)), + (-FieldElement::one(), Witness(3)), + ], + q_c: FieldElement::zero(), + }); + + let main = Circuit { + current_witness_index: 3, + private_parameters: BTreeSet::from([Witness(0)]), + public_parameters: PublicInputs([Witness(1)].into()), + opcodes: vec![nested_call, nested_call_two, assert_nested_call_results], + ..Circuit::default() + }; + + let call_parameter_addition = Opcode::AssertZero(Expression { + mul_terms: Vec::new(), + linear_combinations: vec![ + (FieldElement::one(), Witness(0)), + (-FieldElement::one(), Witness(2)), + ], + q_c: FieldElement::one() + FieldElement::one(), + }); + let call = Opcode::Call { + id: 2, + inputs: vec![Witness(2), Witness(1)], + outputs: vec![Witness(3)], + predicate: None, + }; + + let nested_call = Circuit { + current_witness_index: 3, + private_parameters: BTreeSet::from([Witness(0), Witness(1)]), + return_values: PublicInputs([Witness(3)].into()), + opcodes: vec![call_parameter_addition, call], + ..Circuit::default() + }; + + let assert_param_equality = Opcode::AssertZero(Expression { + mul_terms: Vec::new(), + linear_combinations: vec![ + (FieldElement::one(), Witness(0)), + (-FieldElement::one(), Witness(1)), + ], + q_c: FieldElement::zero(), + }); + + let inner_call = Circuit { + current_witness_index: 1, + private_parameters: BTreeSet::from([Witness(0), Witness(1)]), + return_values: PublicInputs([Witness(0)].into()), + opcodes: vec![assert_param_equality], + ..Circuit::default() + }; + + let program = Program { functions: vec![main, nested_call, inner_call] }; + + let bytes = Program::serialize_program(&program); + + let expected_serialization: Vec = vec![ + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 205, 146, 97, 10, 195, 32, 12, 133, 163, 66, 207, 147, + 24, 173, 241, 223, 174, 50, 153, 189, 255, 17, 214, 177, 148, 89, 17, 250, 99, 14, 246, + 224, 97, 144, 16, 146, 143, 231, 224, 45, 167, 126, 105, 217, 109, 118, 91, 248, 200, 168, + 225, 248, 191, 106, 114, 208, 233, 104, 188, 233, 139, 223, 137, 108, 51, 139, 113, 13, + 161, 38, 95, 137, 233, 142, 62, 23, 137, 24, 98, 89, 133, 132, 162, 196, 135, 23, 230, 42, + 65, 82, 46, 57, 97, 166, 192, 149, 182, 152, 121, 211, 97, 110, 222, 94, 8, 13, 132, 182, + 54, 48, 144, 235, 8, 254, 10, 22, 76, 132, 101, 231, 237, 229, 23, 189, 213, 54, 119, 15, + 83, 212, 199, 172, 175, 79, 113, 51, 48, 198, 253, 207, 84, 13, 204, 141, 224, 21, 176, + 147, 158, 66, 231, 43, 145, 6, 4, 0, 0, + ]; + assert_eq!(bytes, expected_serialization); +} diff --git a/acvm-repo/acvm/src/compiler/transformers/mod.rs b/acvm-repo/acvm/src/compiler/transformers/mod.rs index 1ba261b09a3..003cd4279a1 100644 --- a/acvm-repo/acvm/src/compiler/transformers/mod.rs +++ b/acvm-repo/acvm/src/compiler/transformers/mod.rs @@ -142,7 +142,11 @@ pub(super) fn transform_internal( new_acir_opcode_positions.push(acir_opcode_positions[index]); transformed_opcodes.push(opcode); } - Opcode::Call { .. } => { + Opcode::Call { ref outputs, .. } => { + for witness in outputs { + transformer.mark_solvable(*witness); + } + // `Call` does not write values to the `WitnessMap` // A separate ACIR function should have its own respective `WitnessMap` new_acir_opcode_positions.push(acir_opcode_positions[index]); diff --git a/acvm-repo/acvm/src/pwg/memory_op.rs b/acvm-repo/acvm/src/pwg/memory_op.rs index e51797707a7..672c13e11c2 100644 --- a/acvm-repo/acvm/src/pwg/memory_op.rs +++ b/acvm-repo/acvm/src/pwg/memory_op.rs @@ -6,7 +6,9 @@ use acir::{ FieldElement, }; -use super::{arithmetic::ExpressionSolver, get_value, insert_value, witness_to_value}; +use super::{ + arithmetic::ExpressionSolver, get_value, insert_value, is_predicate_false, witness_to_value, +}; use super::{ErrorLocation, OpcodeResolutionError}; type MemoryIndex = u32; @@ -80,11 +82,8 @@ impl MemoryOpSolver { // `operation == 0` implies a read operation. (`operation == 1` implies write operation). let is_read_operation = operation.is_zero(); - // If the predicate is `None`, then we simply return the value 1 - let pred_value = match predicate { - Some(pred) => get_value(pred, initial_witness), - None => Ok(FieldElement::one()), - }?; + // Fetch whether or not the predicate is false (e.g. equal to zero) + let skip_operation = is_predicate_false(initial_witness, predicate)?; if is_read_operation { // `value_read = arr[memory_index]` @@ -97,7 +96,7 @@ impl MemoryOpSolver { // A zero predicate indicates that we should skip the read operation // and zero out the operation's output. - let value_in_array = if pred_value.is_zero() { + let value_in_array = if skip_operation { FieldElement::zero() } else { self.read_memory_index(memory_index)? @@ -111,7 +110,7 @@ impl MemoryOpSolver { let value_write = value; // A zero predicate indicates that we should skip the write operation. - if pred_value.is_zero() { + if skip_operation { // We only want to write to already initialized memory. // Do nothing if the predicate is zero. Ok(()) diff --git a/acvm-repo/acvm/src/pwg/mod.rs b/acvm-repo/acvm/src/pwg/mod.rs index 3cedcfc0399..bb98eda2689 100644 --- a/acvm-repo/acvm/src/pwg/mod.rs +++ b/acvm-repo/acvm/src/pwg/mod.rs @@ -377,7 +377,7 @@ impl<'a, B: BlackBoxFunctionSolver> ACVM<'a, B> { }; let witness = &mut self.witness_map; - if BrilligSolver::::should_skip(witness, brillig)? { + if is_predicate_false(witness, &brillig.predicate)? { return BrilligSolver::::zero_out_brillig_outputs(witness, brillig).map(|_| None); } @@ -448,7 +448,9 @@ impl<'a, B: BlackBoxFunctionSolver> ACVM<'a, B> { } pub fn solve_call_opcode(&mut self) -> Result, OpcodeResolutionError> { - let Opcode::Call { id, inputs, outputs } = &self.opcodes[self.instruction_pointer] else { + let Opcode::Call { id, inputs, outputs, predicate } = + &self.opcodes[self.instruction_pointer] + else { unreachable!("Not executing a Call opcode"); }; if *id == 0 { @@ -459,6 +461,14 @@ impl<'a, B: BlackBoxFunctionSolver> ACVM<'a, B> { }); } + if is_predicate_false(&self.witness_map, predicate)? { + // Zero out the outputs if we have a false predicate + for output in outputs { + insert_value(output, FieldElement::zero(), &mut self.witness_map)?; + } + return Ok(None); + } + if self.acir_call_counter >= self.acir_call_results.len() { let mut initial_witness = WitnessMap::default(); for (i, input_witness) in inputs.iter().enumerate() { @@ -556,6 +566,20 @@ fn any_witness_from_expression(expr: &Expression) -> Option { } } +/// Returns `true` if the predicate is zero +/// A predicate is used to indicate whether we should skip a certain operation. +/// If we have a zero predicate it means the operation should be skipped. +pub(crate) fn is_predicate_false( + witness: &WitnessMap, + predicate: &Option, +) -> Result { + match predicate { + Some(pred) => get_value(pred, witness).map(|pred_value| pred_value.is_zero()), + // If the predicate is `None`, then we treat it as an unconditional `true` + None => Ok(false), + } +} + #[derive(Debug, Clone, PartialEq)] pub struct AcirCallWaitInfo { /// Index in the list of ACIR function's that should be called diff --git a/acvm-repo/acvm_js/Cargo.toml b/acvm-repo/acvm_js/Cargo.toml index 65c072b1d96..8319c38aee2 100644 --- a/acvm-repo/acvm_js/Cargo.toml +++ b/acvm-repo/acvm_js/Cargo.toml @@ -16,20 +16,17 @@ repository.workspace = true crate-type = ["cdylib"] [dependencies] -cfg-if = "1.0.0" - -[target.'cfg(target_arch = "wasm32")'.dependencies] acvm.workspace = true bn254_blackbox_solver = { workspace = true, optional = true } wasm-bindgen.workspace = true wasm-bindgen-futures.workspace = true console_error_panic_hook.workspace = true gloo-utils.workspace = true -js-sys.workspace = true +js-sys.workspace = true +serde.workspace = true tracing-subscriber.workspace = true tracing-web.workspace = true -serde = { version = "1.0.136", features = ["derive"] } const-str = "0.5.5" [build-dependencies] diff --git a/acvm-repo/acvm_js/build.sh b/acvm-repo/acvm_js/build.sh index 24af149bcea..58724dee02c 100755 --- a/acvm-repo/acvm_js/build.sh +++ b/acvm-repo/acvm_js/build.sh @@ -6,13 +6,6 @@ function require_command { exit 1 fi } -function check_installed { - if ! command -v "$1" >/dev/null 2>&1; then - echo "$1 is not installed. Please install it." >&2 - return 1 - fi - return 0 -} function run_or_fail { "$@" local status=$? @@ -21,27 +14,39 @@ function run_or_fail { exit $status fi } +function run_if_available { + if command -v "$1" >/dev/null 2>&1; then + "$@" + else + echo "$1 is not installed. Please install it to use this feature." >&2 + fi +} require_command jq require_command cargo require_command wasm-bindgen -check_installed wasm-opt self_path=$(dirname "$(readlink -f "$0")") -export pname=$(cargo read-manifest | jq -r '.name') -export CARGO_TARGET_DIR=$self_path/target +pname=$(cargo read-manifest | jq -r '.name') -rm -rf $self_path/outputs >/dev/null 2>&1 -rm -rf $self_path/result >/dev/null 2>&1 +NODE_DIR=$self_path/nodejs +BROWSER_DIR=$self_path/web -if [ -n "$out" ]; then - echo "Will install package to $out (defined outside installPhase.sh script)" -else - export out="$self_path/outputs/out" - echo "Will install package to $out" +# Clear out the existing build artifacts as these aren't automatically removed by wasm-bindgen. +if [ -d ./pkg/ ]; then + rm -r $NODE_DIR + rm -r $BROWSER_DIR fi -run_or_fail $self_path/buildPhaseCargoCommand.sh -run_or_fail $self_path/installPhase.sh +TARGET=wasm32-unknown-unknown +WASM_BINARY=${self_path}/../../target/$TARGET/release/${pname}.wasm + +NODE_WASM=${NODE_DIR}/${pname}_bg.wasm +BROWSER_WASM=${BROWSER_DIR}/${pname}_bg.wasm -ln -s $out $self_path/result +# Build the new wasm package +run_or_fail cargo build --lib --release --target $TARGET --package ${pname} +run_or_fail wasm-bindgen $WASM_BINARY --out-dir $NODE_DIR --typescript --target nodejs +run_or_fail wasm-bindgen $WASM_BINARY --out-dir $BROWSER_DIR --typescript --target web +run_if_available wasm-opt $NODE_WASM -o $NODE_WASM -O +run_if_available wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O diff --git a/acvm-repo/acvm_js/buildPhaseCargoCommand.sh b/acvm-repo/acvm_js/buildPhaseCargoCommand.sh deleted file mode 100755 index 6c710bc938f..00000000000 --- a/acvm-repo/acvm_js/buildPhaseCargoCommand.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env bash - -function run_or_fail { - "$@" - local status=$? - if [ $status -ne 0 ]; then - echo "Command '$*' failed with exit code $status" >&2 - exit $status - fi -} -function run_if_available { - if command -v "$1" >/dev/null 2>&1; then - "$@" - else - echo "$1 is not installed. Please install it to use this feature." >&2 - fi -} - -export self_path=$(dirname "$(readlink -f "$0")") - - -NODE_DIR=$self_path/nodejs/ -BROWSER_DIR=$self_path/web/ - -# Clear out the existing build artifacts as these aren't automatically removed by wasm-pack. -if [ -d ./pkg/ ]; then - rm -r $NODE_DIR - rm -r $BROWSER_DIR -fi - -TARGET=wasm32-unknown-unknown -WASM_BINARY=$CARGO_TARGET_DIR/$TARGET/release/${pname}.wasm - -NODE_WASM=${NODE_DIR}/${pname}_bg.wasm -BROWSER_WASM=${BROWSER_DIR}/${pname}_bg.wasm - -# Build the new wasm package -run_or_fail cargo build --lib --release --target $TARGET --package ${pname} -run_or_fail wasm-bindgen $WASM_BINARY --out-dir $NODE_DIR --typescript --target nodejs -run_or_fail wasm-bindgen $WASM_BINARY --out-dir $BROWSER_DIR --typescript --target web -run_if_available wasm-opt $NODE_WASM -o $NODE_WASM -O -run_if_available wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O diff --git a/acvm-repo/acvm_js/installPhase.sh b/acvm-repo/acvm_js/installPhase.sh deleted file mode 100755 index 34ddb8155e1..00000000000 --- a/acvm-repo/acvm_js/installPhase.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env bash -export self_path=$(dirname "$(readlink -f "$0")") - -export out_path=$out/acvm_js - -mkdir -p $out_path -cp $self_path/README.md $out_path/ -cp $self_path/package.json $out_path/ -cp -r $self_path/nodejs $out_path/ -cp -r $self_path/web $out_path/ diff --git a/acvm-repo/acvm_js/package.json b/acvm-repo/acvm_js/package.json index 55345a2ddf6..44d99f13c31 100644 --- a/acvm-repo/acvm_js/package.json +++ b/acvm-repo/acvm_js/package.json @@ -34,9 +34,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0", "publish": "echo 📡 publishing `$npm_package_name` && yarn npm publish", "nightly:version": "jq --arg new_version \"-$(git rev-parse --short HEAD)$1\" '.version = .version + $new_version' package.json > package-tmp.json && mv package-tmp.json package.json", - "clean": "chmod u+w web nodejs || true && rm -rf web nodejs", - "build:nix": "nix build -L .#acvm_js", - "install:from:nix": "yarn clean && yarn build:nix && cp -rL ./result/acvm_js/nodejs ./ && cp -rL ./result/acvm_js/web ./" + "clean": "chmod u+w web nodejs || true && rm -rf web nodejs" }, "devDependencies": { "@esm-bundle/chai": "^4.3.4-fix.0", diff --git a/acvm-repo/acvm_js/src/black_box_solvers.rs b/acvm-repo/acvm_js/src/black_box_solvers.rs index fc0e3b28ebf..188e5334ed5 100644 --- a/acvm-repo/acvm_js/src/black_box_solvers.rs +++ b/acvm-repo/acvm_js/src/black_box_solvers.rs @@ -59,7 +59,6 @@ pub fn ecdsa_secp256k1_verify( signature, ) .unwrap() - .into() } /// Verifies a ECDSA signature over the secp256r1 curve. @@ -81,5 +80,4 @@ pub fn ecdsa_secp256r1_verify( signature, ) .unwrap() - .into() } diff --git a/acvm-repo/acvm_js/src/compression.rs b/acvm-repo/acvm_js/src/compression.rs index 18e9216297e..8114e0d57d2 100644 --- a/acvm-repo/acvm_js/src/compression.rs +++ b/acvm-repo/acvm_js/src/compression.rs @@ -2,12 +2,12 @@ use acvm::acir::native_types::{WitnessMap, WitnessStack}; use js_sys::JsString; use wasm_bindgen::prelude::wasm_bindgen; -use crate::JsWitnessMap; +use crate::{JsWitnessMap, JsWitnessStack}; /// Compresses a `WitnessMap` into the binary format outputted by Nargo. /// -/// @param {Uint8Array} compressed_witness - A witness map. -/// @returns {WitnessMap} A compressed witness map +/// @param {WitnessMap} witness_map - A witness map. +/// @returns {Uint8Array} A compressed witness map #[wasm_bindgen(js_name = compressWitness, skip_jsdoc)] pub fn compress_witness(witness_map: JsWitnessMap) -> Result, JsString> { console_error_panic_hook::set_once(); @@ -21,6 +21,7 @@ pub fn compress_witness(witness_map: JsWitnessMap) -> Result, JsString> } /// Decompresses a compressed witness as outputted by Nargo into a `WitnessMap`. +/// This should be used to only fetch the witness map for the main function. /// /// @param {Uint8Array} compressed_witness - A compressed witness. /// @returns {WitnessMap} The decompressed witness map. @@ -28,8 +29,39 @@ pub fn compress_witness(witness_map: JsWitnessMap) -> Result, JsString> pub fn decompress_witness(compressed_witness: Vec) -> Result { console_error_panic_hook::set_once(); + let mut witness_stack = + WitnessStack::try_from(compressed_witness.as_slice()).map_err(|err| err.to_string())?; + + let witness = + witness_stack.pop().expect("Should have at least one witness on the stack").witness; + Ok(witness.into()) +} + +/// Compresses a `WitnessStack` into the binary format outputted by Nargo. +/// +/// @param {WitnessStack} witness_stack - A witness stack. +/// @returns {Uint8Array} A compressed witness stack +#[wasm_bindgen(js_name = compressWitnessStack, skip_jsdoc)] +pub fn compress_witness_stack(witness_stack: JsWitnessStack) -> Result, JsString> { + console_error_panic_hook::set_once(); + + let witness_stack = WitnessStack::from(witness_stack); + let compressed_witness_stack: Vec = + Vec::::try_from(witness_stack).map_err(|err| err.to_string())?; + + Ok(compressed_witness_stack) +} + +/// Decompresses a compressed witness stack as outputted by Nargo into a `WitnessStack`. +/// +/// @param {Uint8Array} compressed_witness - A compressed witness. +/// @returns {WitnessStack} The decompressed witness stack. +#[wasm_bindgen(js_name = decompressWitnessStack, skip_jsdoc)] +pub fn decompress_witness_stack(compressed_witness: Vec) -> Result { + console_error_panic_hook::set_once(); + let witness_stack = WitnessStack::try_from(compressed_witness.as_slice()).map_err(|err| err.to_string())?; - Ok(witness_stack.stack[0].witness.clone().into()) + Ok(witness_stack.into()) } diff --git a/acvm-repo/acvm_js/src/execute.rs b/acvm-repo/acvm_js/src/execute.rs index 60d27a489e2..0e58ccf039c 100644 --- a/acvm-repo/acvm_js/src/execute.rs +++ b/acvm-repo/acvm_js/src/execute.rs @@ -1,5 +1,9 @@ +use std::{future::Future, pin::Pin}; + +use acvm::BlackBoxFunctionSolver; use acvm::{ - acir::circuit::Program, + acir::circuit::{Circuit, Program}, + acir::native_types::{WitnessMap, WitnessStack}, pwg::{ACVMStatus, ErrorLocation, OpcodeResolutionError, ACVM}, }; use bn254_blackbox_solver::Bn254BlackBoxSolver; @@ -9,7 +13,7 @@ use wasm_bindgen::prelude::wasm_bindgen; use crate::{ foreign_call::{resolve_brillig, ForeignCallHandler}, - JsExecutionError, JsWitnessMap, + JsExecutionError, JsWitnessMap, JsWitnessStack, }; #[wasm_bindgen] @@ -42,8 +46,16 @@ pub async fn execute_circuit( let solver = WasmBlackBoxFunctionSolver::initialize().await; - execute_circuit_with_black_box_solver(&solver, program, initial_witness, foreign_call_handler) - .await + let mut witness_stack = execute_program_with_native_type_return( + &solver, + program, + initial_witness, + &foreign_call_handler, + ) + .await?; + let witness_map = + witness_stack.pop().expect("Should have at least one witness on the stack").witness; + Ok(witness_map.into()) } /// Executes an ACIR circuit to generate the solved witness from the initial witness. @@ -56,69 +68,175 @@ pub async fn execute_circuit( #[wasm_bindgen(js_name = executeCircuitWithBlackBoxSolver, skip_jsdoc)] pub async fn execute_circuit_with_black_box_solver( solver: &WasmBlackBoxFunctionSolver, - // TODO(https://github.com/noir-lang/noir/issues/4428): These need to be updated to match the same interfaces - // as the native ACVM executor. Right now native execution still only handles one circuit so I do not feel the need - // to break the JS interface just yet. program: Vec, initial_witness: JsWitnessMap, foreign_call_handler: ForeignCallHandler, ) -> Result { console_error_panic_hook::set_once(); + + let mut witness_stack = execute_program_with_native_type_return( + solver, + program, + initial_witness, + &foreign_call_handler, + ) + .await?; + let witness_map = + witness_stack.pop().expect("Should have at least one witness on the stack").witness; + Ok(witness_map.into()) +} + +#[wasm_bindgen(js_name = executeProgram, skip_jsdoc)] +pub async fn execute_program( + program: Vec, + initial_witness: JsWitnessMap, + foreign_call_handler: ForeignCallHandler, +) -> Result { + console_error_panic_hook::set_once(); + + let solver = WasmBlackBoxFunctionSolver::initialize().await; + + execute_program_with_black_box_solver(&solver, program, initial_witness, &foreign_call_handler) + .await +} + +#[wasm_bindgen(js_name = executeProgramWithBlackBoxSolver, skip_jsdoc)] +pub async fn execute_program_with_black_box_solver( + solver: &WasmBlackBoxFunctionSolver, + program: Vec, + initial_witness: JsWitnessMap, + foreign_call_executor: &ForeignCallHandler, +) -> Result { + let witness_stack = execute_program_with_native_type_return( + solver, + program, + initial_witness, + foreign_call_executor, + ) + .await?; + + Ok(witness_stack.into()) +} + +async fn execute_program_with_native_type_return( + solver: &WasmBlackBoxFunctionSolver, + program: Vec, + initial_witness: JsWitnessMap, + foreign_call_executor: &ForeignCallHandler, +) -> Result { let program: Program = Program::deserialize_program(&program) - .map_err(|_| JsExecutionError::new("Failed to deserialize circuit. This is likely due to differing serialization formats between ACVM_JS and your compiler".to_string(), None))?; - let circuit = match program.functions.len() { - 0 => return Ok(initial_witness), - 1 => &program.functions[0], - _ => return Err(JsExecutionError::new("Program contains multiple circuits however ACVM currently only supports programs containing a single circuit".to_string(), None).into()) - }; - - let mut acvm = ACVM::new(&solver.0, &circuit.opcodes, initial_witness.into()); - - loop { - let solver_status = acvm.solve(); - - match solver_status { - ACVMStatus::Solved => break, - ACVMStatus::InProgress => { - unreachable!("Execution should not stop while in `InProgress` state.") - } - ACVMStatus::Failure(error) => { - let (assert_message, call_stack) = match &error { - OpcodeResolutionError::UnsatisfiedConstrain { - opcode_location: ErrorLocation::Resolved(opcode_location), - } - | OpcodeResolutionError::IndexOutOfBounds { - opcode_location: ErrorLocation::Resolved(opcode_location), - .. - } => { - (circuit.get_assert_message(*opcode_location), Some(vec![*opcode_location])) + .map_err(|_| JsExecutionError::new("Failed to deserialize circuit. This is likely due to differing serialization formats between ACVM_JS and your compiler".to_string(), None))?; + + let executor = ProgramExecutor::new(&program.functions, &solver.0, foreign_call_executor); + let witness_stack = executor.execute(initial_witness.into()).await?; + + Ok(witness_stack) +} + +struct ProgramExecutor<'a, B: BlackBoxFunctionSolver> { + functions: &'a [Circuit], + + blackbox_solver: &'a B, + + foreign_call_handler: &'a ForeignCallHandler, +} + +impl<'a, B: BlackBoxFunctionSolver> ProgramExecutor<'a, B> { + fn new( + functions: &'a [Circuit], + blackbox_solver: &'a B, + foreign_call_handler: &'a ForeignCallHandler, + ) -> Self { + ProgramExecutor { functions, blackbox_solver, foreign_call_handler } + } + + async fn execute(&self, initial_witness: WitnessMap) -> Result { + let main = &self.functions[0]; + + let mut witness_stack = WitnessStack::default(); + let main_witness = self.execute_circuit(main, initial_witness, &mut witness_stack).await?; + witness_stack.push(0, main_witness); + Ok(witness_stack) + } + + fn execute_circuit( + &'a self, + circuit: &'a Circuit, + initial_witness: WitnessMap, + witness_stack: &'a mut WitnessStack, + ) -> Pin> + 'a>> { + Box::pin(async { + let mut acvm = ACVM::new(self.blackbox_solver, &circuit.opcodes, initial_witness); + + loop { + let solver_status = acvm.solve(); + + match solver_status { + ACVMStatus::Solved => break, + ACVMStatus::InProgress => { + unreachable!("Execution should not stop while in `InProgress` state.") } - OpcodeResolutionError::BrilligFunctionFailed { call_stack, .. } => { - let failing_opcode = - call_stack.last().expect("Brillig error call stacks cannot be empty"); - (circuit.get_assert_message(*failing_opcode), Some(call_stack.clone())) + ACVMStatus::Failure(error) => { + let (assert_message, call_stack) = match &error { + OpcodeResolutionError::UnsatisfiedConstrain { + opcode_location: ErrorLocation::Resolved(opcode_location), + } + | OpcodeResolutionError::IndexOutOfBounds { + opcode_location: ErrorLocation::Resolved(opcode_location), + .. + } => ( + circuit.get_assert_message(*opcode_location), + Some(vec![*opcode_location]), + ), + OpcodeResolutionError::BrilligFunctionFailed { call_stack, .. } => { + let failing_opcode = call_stack + .last() + .expect("Brillig error call stacks cannot be empty"); + ( + circuit.get_assert_message(*failing_opcode), + Some(call_stack.clone()), + ) + } + _ => (None, None), + }; + + let error_string = match &assert_message { + Some(assert_message) => format!("Assertion failed: {}", assert_message), + None => error.to_string(), + }; + + return Err(JsExecutionError::new(error_string, call_stack).into()); } - _ => (None, None), - }; - - let error_string = match &assert_message { - Some(assert_message) => format!("Assertion failed: {}", assert_message), - None => error.to_string(), - }; + ACVMStatus::RequiresForeignCall(foreign_call) => { + let result = + resolve_brillig(self.foreign_call_handler, &foreign_call).await?; - return Err(JsExecutionError::new(error_string.into(), call_stack).into()); + acvm.resolve_pending_foreign_call(result); + } + ACVMStatus::RequiresAcirCall(call_info) => { + let acir_to_call = &self.functions[call_info.id as usize]; + let initial_witness = call_info.initial_witness; + let call_solved_witness = self + .execute_circuit(acir_to_call, initial_witness, witness_stack) + .await?; + let mut call_resolved_outputs = Vec::new(); + for return_witness_index in acir_to_call.return_values.indices() { + if let Some(return_value) = + call_solved_witness.get_index(return_witness_index) + { + call_resolved_outputs.push(*return_value); + } else { + // TODO: look at changing this call stack from None + return Err(JsExecutionError::new(format!("Failed to read from solved witness of ACIR call at witness {}", return_witness_index), None).into()); + } + } + acvm.resolve_pending_acir_call(call_resolved_outputs); + witness_stack.push(call_info.id, call_solved_witness.clone()); + } + } } - ACVMStatus::RequiresForeignCall(foreign_call) => { - let result = resolve_brillig(&foreign_call_handler, &foreign_call).await?; - acvm.resolve_pending_foreign_call(result); - } - ACVMStatus::RequiresAcirCall(_) => { - todo!("Handle acir calls in acvm JS"); - } - } + Ok(acvm.finalize()) + }) } - - let witness_map = acvm.finalize(); - Ok(witness_map.into()) } diff --git a/acvm-repo/acvm_js/src/js_witness_stack.rs b/acvm-repo/acvm_js/src/js_witness_stack.rs new file mode 100644 index 00000000000..59f2dbc051e --- /dev/null +++ b/acvm-repo/acvm_js/src/js_witness_stack.rs @@ -0,0 +1,71 @@ +use acvm::acir::native_types::WitnessStack; +use js_sys::{Array, Map, Object}; +use wasm_bindgen::prelude::{wasm_bindgen, JsValue}; + +use crate::JsWitnessMap; + +#[wasm_bindgen(typescript_custom_section)] +const WITNESS_STACK: &'static str = r#" +export type StackItem = { + index: number; + witness: WitnessMap; +} + +export type WitnessStack = Array; +"#; + +// WitnessStack +#[wasm_bindgen] +extern "C" { + #[wasm_bindgen(extends = Array, js_name = "WitnessStack", typescript_type = "WitnessStack")] + #[derive(Clone, Debug, PartialEq, Eq)] + pub type JsWitnessStack; + + #[wasm_bindgen(constructor, js_class = "Array")] + pub fn new() -> JsWitnessStack; + + #[wasm_bindgen(extends = Object, js_name = "StackItem", typescript_type = "StackItem")] + #[derive(Clone, Debug, PartialEq, Eq)] + pub type JsStackItem; + + #[wasm_bindgen(constructor, js_class = "Object")] + pub fn new() -> JsStackItem; +} + +impl Default for JsWitnessStack { + fn default() -> Self { + Self::new() + } +} + +impl From for JsWitnessStack { + fn from(mut witness_stack: WitnessStack) -> Self { + let js_witness_stack = JsWitnessStack::new(); + while let Some(stack_item) = witness_stack.pop() { + let js_map = JsWitnessMap::from(stack_item.witness); + let js_index = JsValue::from_f64(stack_item.index.into()); + + let entry_map = Map::new(); + entry_map.set(&JsValue::from_str("index"), &js_index); + entry_map.set(&JsValue::from_str("witness"), &js_map); + let stack_item = Object::from_entries(&entry_map).unwrap(); + + js_witness_stack.push(&stack_item); + } + // `reverse()` returns an `Array` so we have to wrap it + JsWitnessStack { obj: js_witness_stack.reverse() } + } +} + +impl From for WitnessStack { + fn from(js_witness_stack: JsWitnessStack) -> Self { + let mut witness_stack = WitnessStack::default(); + js_witness_stack.for_each(&mut |stack_item, _, _| { + let values_array = Object::values(&Object::from(stack_item)); + let index = values_array.get(0).as_f64().unwrap() as u32; + let js_witness_map: JsWitnessMap = values_array.get(1).into(); + witness_stack.push(index, js_witness_map.into()); + }); + witness_stack + } +} diff --git a/acvm-repo/acvm_js/src/lib.rs b/acvm-repo/acvm_js/src/lib.rs index 88afd1767c9..d7ecc0ae192 100644 --- a/acvm-repo/acvm_js/src/lib.rs +++ b/acvm-repo/acvm_js/src/lib.rs @@ -1,31 +1,31 @@ -#![forbid(unsafe_code)] #![warn(unreachable_pub)] #![warn(clippy::semicolon_if_nothing_returned)] #![cfg_attr(not(test), warn(unused_crate_dependencies, unused_extern_crates))] -// TODO: Absence of per package targets -// https://doc.rust-lang.org/cargo/reference/unstable.html#per-package-target -// otherwise could be reorganized to make this file more pretty. +mod black_box_solvers; +mod build_info; +mod compression; +mod execute; +mod foreign_call; +mod js_execution_error; +mod js_witness_map; +mod js_witness_stack; +mod logging; +mod public_witness; -cfg_if::cfg_if! { - if #[cfg(target_arch = "wasm32")] { - mod build_info; - mod compression; - mod execute; - mod foreign_call; - mod js_witness_map; - mod logging; - mod public_witness; - mod js_execution_error; - mod black_box_solvers; - - pub use black_box_solvers::{and, xor, sha256, blake2s256, keccak256, ecdsa_secp256k1_verify, ecdsa_secp256r1_verify}; - pub use build_info::build_info; - pub use compression::{compress_witness, decompress_witness}; - pub use execute::{execute_circuit, execute_circuit_with_black_box_solver, create_black_box_solver}; - pub use js_witness_map::JsWitnessMap; - pub use logging::init_log_level; - pub use public_witness::{get_public_parameters_witness, get_public_witness, get_return_witness}; - pub use js_execution_error::JsExecutionError; - } -} +pub use black_box_solvers::{ + and, blake2s256, ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, keccak256, sha256, xor, +}; +pub use build_info::build_info; +pub use compression::{ + compress_witness, compress_witness_stack, decompress_witness, decompress_witness_stack, +}; +pub use execute::{ + create_black_box_solver, execute_circuit, execute_circuit_with_black_box_solver, + execute_program, execute_program_with_black_box_solver, +}; +pub use js_execution_error::JsExecutionError; +pub use js_witness_map::JsWitnessMap; +pub use js_witness_stack::JsWitnessStack; +pub use logging::init_log_level; +pub use public_witness::{get_public_parameters_witness, get_public_witness, get_return_witness}; diff --git a/acvm-repo/acvm_js/test/node/execute_circuit.test.ts b/acvm-repo/acvm_js/test/node/execute_circuit.test.ts index adee3c15312..32487f8bbba 100644 --- a/acvm-repo/acvm_js/test/node/execute_circuit.test.ts +++ b/acvm-repo/acvm_js/test/node/execute_circuit.test.ts @@ -6,6 +6,9 @@ import { WasmBlackBoxFunctionSolver, WitnessMap, ForeignCallHandler, + executeProgram, + WitnessStack, + StackItem, } from '@noir-lang/acvm_js'; it('successfully executes circuit and extracts return value', async () => { @@ -157,3 +160,37 @@ it('successfully executes 500 circuits with same backend', async function () { expect(solvedWitness).to.be.deep.eq(expectedWitnessMap); } }); + +/** + * Below are all the same tests as above but using `executeProgram` + * TODO: also add a couple tests for executing multiple circuits + */ +it('executeProgram: successfully executes program and extracts return value', async () => { + const { bytecode, initialWitnessMap, resultWitness, expectedResult } = await import('../shared/addition'); + + const witnessStack: WitnessStack = await executeProgram(bytecode, initialWitnessMap, () => { + throw Error('unexpected oracle'); + }); + + const solvedStackItem: StackItem = witnessStack[0]; + expect(solvedStackItem.index).to.be.eq(0); + const solvedWitnessMap: WitnessMap = solvedStackItem.witness; + + // Witness stack should be consistent with initial witness + initialWitnessMap.forEach((value, key) => { + expect(solvedWitnessMap.get(key) as string).to.be.eq(value); + }); + + // Solved witness should contain expected return value + expect(solvedWitnessMap.get(resultWitness)).to.be.eq(expectedResult); +}); + +it('executeProgram: successfully process a program of acir functions with a nested call', async () => { + const { bytecode, initialWitnessMap, expectedWitnessStack } = await import('../shared/nested_acir_call'); + + const witnessStack: WitnessStack = await executeProgram(bytecode, initialWitnessMap, () => { + throw Error('unexpected oracle'); + }); + + expect(witnessStack).to.be.deep.eq(expectedWitnessStack); +}); diff --git a/acvm-repo/acvm_js/test/node/witness_conversion.test.ts b/acvm-repo/acvm_js/test/node/witness_conversion.test.ts index 41291c894ea..c6dccb4c83d 100644 --- a/acvm-repo/acvm_js/test/node/witness_conversion.test.ts +++ b/acvm-repo/acvm_js/test/node/witness_conversion.test.ts @@ -1,6 +1,7 @@ import { expect } from 'chai'; -import { compressWitness, decompressWitness } from '@noir-lang/acvm_js'; +import { compressWitness, decompressWitness, compressWitnessStack, decompressWitnessStack } from '@noir-lang/acvm_js'; import { expectedCompressedWitnessMap, expectedWitnessMap } from '../shared/witness_compression'; +import { expectedCompressedWitnessStack, expectedWitnessStack } from '../shared/nested_acir_call'; it('successfully compresses the witness', () => { const compressedWitnessMap = compressWitness(expectedWitnessMap); @@ -13,3 +14,15 @@ it('successfully decompresses the witness', () => { expect(witnessMap).to.be.deep.eq(expectedWitnessMap); }); + +it('successfully compresses the witness stack', () => { + const compressedWitnessStack = compressWitnessStack(expectedWitnessStack); + + expect(compressedWitnessStack).to.be.deep.eq(expectedCompressedWitnessStack); +}); + +it('successfully decompresses the witness stack', () => { + const witnessStack = decompressWitnessStack(expectedCompressedWitnessStack); + + expect(witnessStack).to.be.deep.eq(expectedWitnessStack); +}); diff --git a/acvm-repo/acvm_js/test/shared/nested_acir_call.ts b/acvm-repo/acvm_js/test/shared/nested_acir_call.ts new file mode 100644 index 00000000000..1b745ab6a79 --- /dev/null +++ b/acvm-repo/acvm_js/test/shared/nested_acir_call.ts @@ -0,0 +1,59 @@ +import { WitnessMap, StackItem, WitnessStack } from '@noir-lang/acvm_js'; + +// See `nested_acir_call_circuit` integration test in `acir/tests/test_program_serialization.rs`. +export const bytecode = Uint8Array.from([ + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 205, 146, 97, 10, 195, 32, 12, 133, 163, 66, 207, 147, 24, 173, 241, 223, 174, 50, + 153, 189, 255, 17, 214, 177, 148, 89, 17, 250, 99, 14, 246, 224, 97, 144, 16, 146, 143, 231, 224, 45, 167, 126, 105, + 217, 109, 118, 91, 248, 200, 168, 225, 248, 191, 106, 114, 208, 233, 104, 188, 233, 139, 223, 137, 108, 51, 139, 113, + 13, 161, 38, 95, 137, 233, 142, 62, 23, 137, 24, 98, 89, 133, 132, 162, 196, 135, 23, 230, 42, 65, 82, 46, 57, 97, + 166, 192, 149, 182, 152, 121, 211, 97, 110, 222, 94, 8, 13, 132, 182, 54, 48, 144, 235, 8, 254, 10, 22, 76, 132, 101, + 231, 237, 229, 23, 189, 213, 54, 119, 15, 83, 212, 199, 172, 175, 79, 113, 51, 48, 198, 253, 207, 84, 13, 204, 141, + 224, 21, 176, 147, 158, 66, 231, 43, 145, 6, 4, 0, 0, +]); + +export const initialWitnessMap: WitnessMap = new Map([ + [0, '0x0000000000000000000000000000000000000000000000000000000000000008'], + [1, '0x000000000000000000000000000000000000000000000000000000000000000a'], +]); + +const inner_call_witness: StackItem = { + index: 2, + witness: new Map([ + [0, '0x000000000000000000000000000000000000000000000000000000000000000a'], + [1, '0x000000000000000000000000000000000000000000000000000000000000000a'], + ]), +}; + +const nested_call_witness: StackItem = { + index: 1, + witness: new Map([ + [0, '0x0000000000000000000000000000000000000000000000000000000000000008'], + [1, '0x000000000000000000000000000000000000000000000000000000000000000a'], + [2, '0x000000000000000000000000000000000000000000000000000000000000000a'], + [3, '0x000000000000000000000000000000000000000000000000000000000000000a'], + ]), +}; + +const main_witness: StackItem = { + index: 0, + witness: new Map([ + [0, '0x0000000000000000000000000000000000000000000000000000000000000008'], + [1, '0x000000000000000000000000000000000000000000000000000000000000000a'], + [2, '0x000000000000000000000000000000000000000000000000000000000000000a'], + [3, '0x000000000000000000000000000000000000000000000000000000000000000a'], + ]), +}; + +export const expectedWitnessStack: WitnessStack = [ + inner_call_witness, + nested_call_witness, + inner_call_witness, + nested_call_witness, + main_witness, +]; + +export const expectedCompressedWitnessStack = Uint8Array.from([ + 31, 139, 8, 0, 0, 0, 0, 0, 2, 255, 237, 145, 177, 13, 0, 32, 8, 4, 17, 117, 31, 75, 75, 87, 113, 255, 37, 44, 196, 5, + 228, 42, 194, 39, 132, 238, 114, 249, 239, 114, 163, 118, 47, 203, 254, 240, 101, 23, 152, 213, 120, 199, 73, 58, 42, + 200, 170, 176, 87, 238, 27, 119, 95, 201, 238, 190, 89, 7, 37, 195, 196, 176, 4, 5, 0, 0, +]); diff --git a/acvm-repo/bn254_blackbox_solver/Cargo.toml b/acvm-repo/bn254_blackbox_solver/Cargo.toml index 396e4aa0146..1ad5103d2cb 100644 --- a/acvm-repo/bn254_blackbox_solver/Cargo.toml +++ b/acvm-repo/bn254_blackbox_solver/Cargo.toml @@ -17,16 +17,10 @@ acir.workspace = true acvm_blackbox_solver.workspace = true thiserror.workspace = true num-traits.workspace = true +cfg-if = "1.0.0" -rust-embed = { version = "6.6.0", features = [ - "debug-embed", - "interpolate-folder-path", - "include-exclude", -] } - -grumpkin = { version = "0.1.0", package = "noir_grumpkin", features = [ - "std", -] } # BN254 fixed base scalar multiplication solver +# BN254 fixed base scalar multiplication solver +grumpkin = { version = "0.1.0", package = "noir_grumpkin", features = ["std"] } ark-ec = { version = "^0.4.0", default-features = false } ark-ff = { version = "^0.4.0", default-features = false } num-bigint.workspace = true @@ -44,15 +38,6 @@ js-sys.workspace = true getrandom.workspace = true wasmer = "4.2.6" -[build-dependencies] -pkg-config = "0.3" -tar = "~0.4.15" -flate2 = "~1.0.1" -reqwest = { version = "0.11.20", default-features = false, features = [ - "rustls-tls", - "blocking", -] } - [features] default = ["bn254"] bn254 = ["acir/bn254"] diff --git a/acvm-repo/bn254_blackbox_solver/build.rs b/acvm-repo/bn254_blackbox_solver/build.rs deleted file mode 100644 index 4269c86aba0..00000000000 --- a/acvm-repo/bn254_blackbox_solver/build.rs +++ /dev/null @@ -1,14 +0,0 @@ -use std::path::PathBuf; - -const BARRETENBERG_BIN_DIR: &str = "BARRETENBERG_BIN_DIR"; - -fn main() -> Result<(), String> { - let out_dir = std::env::var("OUT_DIR").unwrap(); - - let dest_path = PathBuf::from(out_dir.clone()).join("acvm_backend.wasm"); - - println!("cargo:rustc-env={BARRETENBERG_BIN_DIR}={out_dir}"); - std::fs::copy("./src/acvm_backend.wasm", dest_path).unwrap(); - - Ok(()) -} diff --git a/acvm-repo/bn254_blackbox_solver/src/lib.rs b/acvm-repo/bn254_blackbox_solver/src/lib.rs index be0e60ada96..231594170e3 100644 --- a/acvm-repo/bn254_blackbox_solver/src/lib.rs +++ b/acvm-repo/bn254_blackbox_solver/src/lib.rs @@ -20,10 +20,17 @@ pub struct Bn254BlackBoxSolver { } impl Bn254BlackBoxSolver { - #[cfg(target_arch = "wasm32")] pub async fn initialize() -> Bn254BlackBoxSolver { - let blackbox_vendor = Barretenberg::initialize().await; - Bn254BlackBoxSolver { blackbox_vendor } + // We fallback to the sync initialization of barretenberg on non-wasm targets. + // This ensures that wasm packages consuming this still build on the default target (useful for linting, etc.) + cfg_if::cfg_if! { + if #[cfg(target_arch = "wasm32")] { + let blackbox_vendor = Barretenberg::initialize().await; + Bn254BlackBoxSolver { blackbox_vendor } + } else { + Bn254BlackBoxSolver::new() + } + } } #[cfg(not(target_arch = "wasm32"))] diff --git a/acvm-repo/bn254_blackbox_solver/src/acvm_backend.wasm b/acvm-repo/bn254_blackbox_solver/src/wasm/acvm_backend.wasm similarity index 100% rename from acvm-repo/bn254_blackbox_solver/src/acvm_backend.wasm rename to acvm-repo/bn254_blackbox_solver/src/wasm/acvm_backend.wasm diff --git a/acvm-repo/bn254_blackbox_solver/src/wasm/mod.rs b/acvm-repo/bn254_blackbox_solver/src/wasm/mod.rs index 10b1ab22a8d..f4f6f56aa99 100644 --- a/acvm-repo/bn254_blackbox_solver/src/wasm/mod.rs +++ b/acvm-repo/bn254_blackbox_solver/src/wasm/mod.rs @@ -45,7 +45,7 @@ pub(crate) struct BackendError(#[from] Error); impl From for BackendError { fn from(value: FeatureError) -> Self { - value.into() + BackendError(Error::FromFeature(value)) } } @@ -76,10 +76,7 @@ use wasmer::{ pub(super) const WASM_SCRATCH_BYTES: usize = 1024; /// Embed the Barretenberg WASM file -#[derive(rust_embed::RustEmbed)] -#[folder = "$BARRETENBERG_BIN_DIR"] -#[include = "acvm_backend.wasm"] -struct Wasm; +const WASM_BIN: &[u8] = include_bytes!("./acvm_backend.wasm"); impl Barretenberg { #[cfg(not(target_arch = "wasm32"))] @@ -287,7 +284,7 @@ fn instance_load() -> (Instance, Memory, Store) { let (memory, mut store, custom_imports) = init_memory_and_state(); - let module = Module::new(&store, Wasm::get("acvm_backend.wasm").unwrap().data).unwrap(); + let module = Module::new(&store, WASM_BIN).unwrap(); (Instance::new(&mut store, &module, &custom_imports).unwrap(), memory, store) } @@ -299,9 +296,7 @@ async fn instance_load() -> (Instance, Memory, Store) { let (memory, mut store, custom_imports) = init_memory_and_state(); - let wasm_binary = Wasm::get("acvm_backend.wasm").unwrap().data; - - let js_bytes = unsafe { js_sys::Uint8Array::view(&wasm_binary) }; + let js_bytes = unsafe { js_sys::Uint8Array::view(&WASM_BIN) }; let js_module_promise = WebAssembly::compile(&js_bytes); let js_module: js_sys::WebAssembly::Module = wasm_bindgen_futures::JsFuture::from(js_module_promise).await.unwrap().into(); @@ -309,7 +304,7 @@ async fn instance_load() -> (Instance, Memory, Store) { let js_instance_promise = WebAssembly::instantiate_module(&js_module, &custom_imports.as_jsvalue(&store).into()); let js_instance = wasm_bindgen_futures::JsFuture::from(js_instance_promise).await.unwrap(); - let module: wasmer::Module = (js_module, wasm_binary).into(); + let module = wasmer::Module::from((js_module, WASM_BIN)); let instance: wasmer::Instance = Instance::from_jsvalue(&mut store, &module, &js_instance) .map_err(|_| "Error while creating BlackBox Functions vendor instance") .unwrap(); diff --git a/acvm-repo/brillig_vm/src/black_box.rs b/acvm-repo/brillig_vm/src/black_box.rs index ab4358739e9..bd33b5ee8fc 100644 --- a/acvm-repo/brillig_vm/src/black_box.rs +++ b/acvm-repo/brillig_vm/src/black_box.rs @@ -256,10 +256,11 @@ fn black_box_function_from_op(op: &BlackBoxOp) -> BlackBoxFunc { #[cfg(test)] mod test { use acir::brillig::{BlackBoxOp, MemoryAddress}; + use acvm_blackbox_solver::StubbedBlackBoxSolver; use crate::{ black_box::{evaluate_black_box, to_u8_vec, to_value_vec}, - DummyBlackBoxSolver, HeapArray, HeapVector, Memory, + HeapArray, HeapVector, Memory, }; #[test] @@ -280,7 +281,7 @@ mod test { output: HeapArray { pointer: 2.into(), size: 32 }, }; - evaluate_black_box(&op, &DummyBlackBoxSolver, &mut memory).unwrap(); + evaluate_black_box(&op, &StubbedBlackBoxSolver, &mut memory).unwrap(); let result = memory.read_slice(MemoryAddress(result_pointer), 32); diff --git a/acvm-repo/brillig_vm/src/lib.rs b/acvm-repo/brillig_vm/src/lib.rs index 0f430b0d5b2..65654e24720 100644 --- a/acvm-repo/brillig_vm/src/lib.rs +++ b/acvm-repo/brillig_vm/src/lib.rs @@ -16,7 +16,7 @@ use acir::brillig::{ HeapVector, MemoryAddress, Opcode, ValueOrArray, }; use acir::FieldElement; -use acvm_blackbox_solver::{BlackBoxFunctionSolver, BlackBoxResolutionError}; +use acvm_blackbox_solver::BlackBoxFunctionSolver; use arithmetic::{evaluate_binary_field_op, evaluate_binary_int_op, BrilligArithmeticError}; use black_box::evaluate_black_box; use num_bigint::BigUint; @@ -593,59 +593,10 @@ impl<'a, B: BlackBoxFunctionSolver> VM<'a, B> { } } -pub(crate) struct DummyBlackBoxSolver; - -impl BlackBoxFunctionSolver for DummyBlackBoxSolver { - fn schnorr_verify( - &self, - _public_key_x: &FieldElement, - _public_key_y: &FieldElement, - _signature: &[u8], - _message: &[u8], - ) -> Result { - Ok(true) - } - fn pedersen_commitment( - &self, - _inputs: &[FieldElement], - _domain_separator: u32, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - Ok((2_u128.into(), 3_u128.into())) - } - fn pedersen_hash( - &self, - _inputs: &[FieldElement], - _domain_separator: u32, - ) -> Result { - Ok(6_u128.into()) - } - fn fixed_base_scalar_mul( - &self, - _low: &FieldElement, - _high: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - Ok((4_u128.into(), 5_u128.into())) - } - fn ec_add( - &self, - _input1_x: &FieldElement, - _input1_y: &FieldElement, - _input2_x: &FieldElement, - _input2_y: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - Ok((5_u128.into(), 6_u128.into())) - } - fn poseidon2_permutation( - &self, - _input: &[FieldElement], - len: u32, - ) -> Result, BlackBoxResolutionError> { - Ok(vec![0_u128.into(); len as usize]) - } -} - #[cfg(test)] mod tests { + use acvm_blackbox_solver::StubbedBlackBoxSolver; + use super::*; #[test] @@ -662,7 +613,7 @@ mod tests { // Start VM let opcodes = [calldata_copy]; - let mut vm = VM::new(calldata, &opcodes, vec![], &DummyBlackBoxSolver); + let mut vm = VM::new(calldata, &opcodes, vec![], &StubbedBlackBoxSolver); // Process a single VM opcode // @@ -706,7 +657,7 @@ mod tests { opcodes.push(Opcode::Jump { location: 3 }); opcodes.push(Opcode::JumpIf { condition: destination, location: 4 }); - let mut vm = VM::new(calldata, &opcodes, vec![], &DummyBlackBoxSolver); + let mut vm = VM::new(calldata, &opcodes, vec![], &StubbedBlackBoxSolver); let status = vm.process_opcode(); assert_eq!(status, VMStatus::InProgress); @@ -763,7 +714,7 @@ mod tests { jump_if_not_opcode, add_opcode, ]; - let mut vm = VM::new(calldata, &opcodes, vec![], &DummyBlackBoxSolver); + let mut vm = VM::new(calldata, &opcodes, vec![], &StubbedBlackBoxSolver); let status = vm.process_opcode(); assert_eq!(status, VMStatus::InProgress); @@ -811,7 +762,7 @@ mod tests { }, Opcode::Stop { return_data_offset: 1, return_data_size: 1 }, ]; - let mut vm = VM::new(calldata, opcodes, vec![], &DummyBlackBoxSolver); + let mut vm = VM::new(calldata, opcodes, vec![], &StubbedBlackBoxSolver); let status = vm.process_opcode(); assert_eq!(status, VMStatus::InProgress); @@ -842,7 +793,7 @@ mod tests { Opcode::Mov { destination: MemoryAddress::from(2), source: MemoryAddress::from(0) }; let opcodes = &[calldata_copy, mov_opcode]; - let mut vm = VM::new(calldata, opcodes, vec![], &DummyBlackBoxSolver); + let mut vm = VM::new(calldata, opcodes, vec![], &StubbedBlackBoxSolver); let status = vm.process_opcode(); assert_eq!(status, VMStatus::InProgress); @@ -898,7 +849,7 @@ mod tests { condition: MemoryAddress(1), }, ]; - let mut vm = VM::new(calldata, opcodes, vec![], &DummyBlackBoxSolver); + let mut vm = VM::new(calldata, opcodes, vec![], &StubbedBlackBoxSolver); let status = vm.process_opcode(); assert_eq!(status, VMStatus::InProgress); @@ -981,7 +932,7 @@ mod tests { .chain(cast_opcodes) .chain([equal_opcode, not_equal_opcode, less_than_opcode, less_than_equal_opcode]) .collect(); - let mut vm = VM::new(calldata, &opcodes, vec![], &DummyBlackBoxSolver); + let mut vm = VM::new(calldata, &opcodes, vec![], &StubbedBlackBoxSolver); // Calldata copy let status = vm.process_opcode(); @@ -1276,14 +1227,14 @@ mod tests { fn brillig_execute_and_get_vm( calldata: Vec, opcodes: &[Opcode], - ) -> VM<'_, DummyBlackBoxSolver> { - let mut vm = VM::new(calldata, opcodes, vec![], &DummyBlackBoxSolver); + ) -> VM<'_, StubbedBlackBoxSolver> { + let mut vm = VM::new(calldata, opcodes, vec![], &StubbedBlackBoxSolver); brillig_execute(&mut vm); assert_eq!(vm.call_stack, vec![]); vm } - fn brillig_execute(vm: &mut VM) { + fn brillig_execute(vm: &mut VM) { loop { let status = vm.process_opcode(); if matches!(status, VMStatus::Finished { .. } | VMStatus::ForeignCallWait { .. }) { diff --git a/aztec_macros/src/lib.rs b/aztec_macros/src/lib.rs index 3ee6f9c21b9..48c30ab6ffa 100644 --- a/aztec_macros/src/lib.rs +++ b/aztec_macros/src/lib.rs @@ -4,20 +4,16 @@ mod utils; use transforms::{ compute_note_hash_and_nullifier::inject_compute_note_hash_and_nullifier, events::{generate_selector_impl, transform_events}, - functions::{transform_function, transform_unconstrained}, - note_interface::generate_note_interface_impl, + functions::{export_fn_abi, transform_function, transform_unconstrained}, + note_interface::{generate_note_interface_impl, inject_note_exports}, storage::{ assign_storage_slots, check_for_storage_definition, check_for_storage_implementation, - generate_storage_implementation, + generate_storage_implementation, generate_storage_layout, }, }; -use noirc_frontend::{ - hir::def_collector::dc_crate::{UnresolvedFunctions, UnresolvedTraitImpl}, - macros_api::{ - CrateId, FileId, HirContext, MacroError, MacroProcessor, SecondaryAttribute, SortedModule, - Span, - }, +use noirc_frontend::macros_api::{ + CrateId, FileId, HirContext, MacroError, MacroProcessor, SortedModule, Span, }; use utils::{ @@ -39,16 +35,6 @@ impl MacroProcessor for AztecMacro { transform(ast, crate_id, file_id, context) } - fn process_collected_defs( - &self, - crate_id: &CrateId, - context: &mut HirContext, - collected_trait_impls: &[UnresolvedTraitImpl], - collected_functions: &mut [UnresolvedFunctions], - ) -> Result<(), (MacroError, FileId)> { - transform_collected_defs(crate_id, context, collected_trait_impls, collected_functions) - } - fn process_typed_ast( &self, crate_id: &CrateId, @@ -90,15 +76,19 @@ fn transform_module(module: &mut SortedModule) -> Result let mut has_transformed_module = false; // Check for a user defined storage struct - let storage_defined = check_for_storage_definition(module); - let storage_implemented = check_for_storage_implementation(module); - if storage_defined && !storage_implemented { - generate_storage_implementation(module)?; + let maybe_storage_struct_name = check_for_storage_definition(module)?; + let storage_defined = maybe_storage_struct_name.is_some(); + + if let Some(storage_struct_name) = maybe_storage_struct_name { + if !check_for_storage_implementation(module, &storage_struct_name) { + generate_storage_implementation(module, &storage_struct_name)?; + } + generate_storage_layout(module, storage_struct_name)?; } - for structure in module.types.iter() { - if structure.attributes.iter().any(|attr| matches!(attr, SecondaryAttribute::Event)) { + for structure in module.types.iter_mut() { + if structure.attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(event)")) { module.impls.push(generate_selector_impl(structure)); has_transformed_module = true; } @@ -139,6 +129,7 @@ fn transform_module(module: &mut SortedModule) -> Result // Apply transformations to the function based on collected attributes if is_private || is_public || is_public_vm { + export_fn_abi(&mut module.types, func)?; transform_function( if is_private { "Private" @@ -185,24 +176,6 @@ fn transform_module(module: &mut SortedModule) -> Result Ok(has_transformed_module) } -fn transform_collected_defs( - crate_id: &CrateId, - context: &mut HirContext, - collected_trait_impls: &[UnresolvedTraitImpl], - collected_functions: &mut [UnresolvedFunctions], -) -> Result<(), (MacroError, FileId)> { - if has_aztec_dependency(crate_id, context) { - inject_compute_note_hash_and_nullifier( - crate_id, - context, - collected_trait_impls, - collected_functions, - ) - } else { - Ok(()) - } -} - // // Transform Hir Nodes for Aztec // @@ -212,6 +185,12 @@ fn transform_hir( crate_id: &CrateId, context: &mut HirContext, ) -> Result<(), (AztecMacroError, FileId)> { - transform_events(crate_id, context)?; - assign_storage_slots(crate_id, context) + if has_aztec_dependency(crate_id, context) { + transform_events(crate_id, context)?; + inject_compute_note_hash_and_nullifier(crate_id, context)?; + assign_storage_slots(crate_id, context)?; + inject_note_exports(crate_id, context) + } else { + Ok(()) + } } diff --git a/aztec_macros/src/transforms/compute_note_hash_and_nullifier.rs b/aztec_macros/src/transforms/compute_note_hash_and_nullifier.rs index fd538dc578b..1b6630935d9 100644 --- a/aztec_macros/src/transforms/compute_note_hash_and_nullifier.rs +++ b/aztec_macros/src/transforms/compute_note_hash_and_nullifier.rs @@ -1,48 +1,43 @@ use noirc_errors::{Location, Span}; use noirc_frontend::{ graph::CrateId, - hir::{ - def_collector::dc_crate::{UnresolvedFunctions, UnresolvedTraitImpl}, - def_map::{LocalModuleId, ModuleId}, - }, - macros_api::{FileId, HirContext, MacroError}, - node_interner::FuncId, - parse_program, FunctionReturnType, ItemVisibility, NoirFunction, UnresolvedTypeData, + macros_api::{FileId, HirContext}, + parse_program, FunctionReturnType, NoirFunction, Type, UnresolvedTypeData, }; -use crate::utils::hir_utils::fetch_struct_trait_impls; +use crate::utils::{ + errors::AztecMacroError, + hir_utils::{collect_crate_functions, fetch_notes, get_contract_module_data, inject_fn}, +}; // Check if "compute_note_hash_and_nullifier(AztecAddress,Field,Field,Field,[Field; N]) -> [Field; 4]" is defined fn check_for_compute_note_hash_and_nullifier_definition( - functions_data: &[(LocalModuleId, FuncId, NoirFunction)], - module_id: LocalModuleId, + crate_id: &CrateId, + context: &HirContext, ) -> bool { - functions_data.iter().filter(|func_data| func_data.0 == module_id).any(|func_data| { - func_data.2.def.name.0.contents == "compute_note_hash_and_nullifier" - && func_data.2.def.parameters.len() == 5 - && match &func_data.2.def.parameters[0].typ.typ { - UnresolvedTypeData::Named(path, _, _) => path.segments.last().unwrap().0.contents == "AztecAddress", - _ => false, - } - && func_data.2.def.parameters[1].typ.typ == UnresolvedTypeData::FieldElement - && func_data.2.def.parameters[2].typ.typ == UnresolvedTypeData::FieldElement - && func_data.2.def.parameters[3].typ.typ == UnresolvedTypeData::FieldElement - // checks if the 5th parameter is an array and the Box in - // Array(Option, Box) contains only fields - && match &func_data.2.def.parameters[4].typ.typ { - UnresolvedTypeData::Array(_, inner_type) => { - matches!(inner_type.typ, UnresolvedTypeData::FieldElement) - }, - _ => false, - } + collect_crate_functions(crate_id, context).iter().any(|funct_id| { + let func_data = context.def_interner.function_meta(funct_id); + let func_name = context.def_interner.function_name(funct_id); + func_name == "compute_note_hash_and_nullifier" + && func_data.parameters.len() == 5 + && func_data.parameters.0.first().is_some_and(| (_, typ, _) | match typ { + Type::Struct(struct_typ, _) => struct_typ.borrow().name.0.contents == "AztecAddress", + _ => false + }) + && func_data.parameters.0.get(1).is_some_and(|(_, typ, _)| typ.is_field()) + && func_data.parameters.0.get(2).is_some_and(|(_, typ, _)| typ.is_field()) + && func_data.parameters.0.get(3).is_some_and(|(_, typ, _)| typ.is_field()) + // checks if the 5th parameter is an array and contains only fields + && func_data.parameters.0.get(4).is_some_and(|(_, typ, _)| match typ { + Type::Array(_, inner_type) => inner_type.to_owned().is_field(), + _ => false + }) // We check the return type the same way as we did the 5th parameter - && match &func_data.2.def.return_type { + && match &func_data.return_type { FunctionReturnType::Default(_) => false, FunctionReturnType::Ty(unresolved_type) => { match &unresolved_type.typ { - UnresolvedTypeData::Array(_, inner_type) => { - matches!(inner_type.typ, UnresolvedTypeData::FieldElement) - }, + UnresolvedTypeData::Array(_, inner_type) => matches!(inner_type.typ, UnresolvedTypeData::FieldElement), _ => false, } } @@ -53,81 +48,37 @@ fn check_for_compute_note_hash_and_nullifier_definition( pub fn inject_compute_note_hash_and_nullifier( crate_id: &CrateId, context: &mut HirContext, - unresolved_traits_impls: &[UnresolvedTraitImpl], - collected_functions: &mut [UnresolvedFunctions], -) -> Result<(), (MacroError, FileId)> { - // We first fetch modules in this crate which correspond to contracts, along with their file id. - let contract_module_file_ids: Vec<(LocalModuleId, FileId)> = context - .def_map(crate_id) - .expect("ICE: Missing crate in def_map") - .modules() - .iter() - .filter(|(_, module)| module.is_contract) - .map(|(idx, module)| (LocalModuleId(idx), module.location.file)) - .collect(); - - // If the current crate does not contain a contract module we simply skip it. - if contract_module_file_ids.is_empty() { - return Ok(()); - } else if contract_module_file_ids.len() != 1 { - panic!("Found multiple contracts in the same crate"); +) -> Result<(), (AztecMacroError, FileId)> { + if let Some((module_id, file_id)) = get_contract_module_data(context, crate_id) { + // If compute_note_hash_and_nullifier is already defined by the user, we skip auto-generation in order to provide an + // escape hatch for this mechanism. + // TODO(#4647): improve this diagnosis and error messaging. + if check_for_compute_note_hash_and_nullifier_definition(crate_id, context) { + return Ok(()); + } + + // In order to implement compute_note_hash_and_nullifier, we need to know all of the different note types the + // contract might use. These are the types that are marked as #[aztec(note)]. + let note_types = fetch_notes(context) + .iter() + .map(|(_, note)| note.borrow().name.0.contents.clone()) + .collect::>(); + + // We can now generate a version of compute_note_hash_and_nullifier tailored for the contract in this crate. + let func = generate_compute_note_hash_and_nullifier(¬e_types); + + // And inject the newly created function into the contract. + + // TODO(#4373): We don't have a reasonable location for the source code of this autogenerated function, so we simply + // pass an empty span. This function should not produce errors anyway so this should not matter. + let location = Location::new(Span::empty(0), file_id); + + inject_fn(crate_id, context, func, location, module_id, file_id); } - - let (module_id, file_id) = contract_module_file_ids[0]; - - // If compute_note_hash_and_nullifier is already defined by the user, we skip auto-generation in order to provide an - // escape hatch for this mechanism. - // TODO(#4647): improve this diagnosis and error messaging. - if collected_functions.iter().any(|coll_funcs_data| { - check_for_compute_note_hash_and_nullifier_definition(&coll_funcs_data.functions, module_id) - }) { - return Ok(()); - } - - // In order to implement compute_note_hash_and_nullifier, we need to know all of the different note types the - // contract might use. These are the types that implement the NoteInterface trait, which provides the - // get_note_type_id function. - let note_types = fetch_struct_trait_impls(context, unresolved_traits_impls, "NoteInterface"); - - // We can now generate a version of compute_note_hash_and_nullifier tailored for the contract in this crate. - let func = generate_compute_note_hash_and_nullifier(¬e_types); - - // And inject the newly created function into the contract. - - // TODO(#4373): We don't have a reasonable location for the source code of this autogenerated function, so we simply - // pass an empty span. This function should not produce errors anyway so this should not matter. - let location = Location::new(Span::empty(0), file_id); - - // These are the same things the ModCollector does when collecting functions: we push the function to the - // NodeInterner, declare it in the module (which checks for duplicate definitions), and finally add it to the list - // on collected but unresolved functions. - - let func_id = context.def_interner.push_empty_fn(); - context.def_interner.push_function( - func_id, - &func.def, - ModuleId { krate: *crate_id, local_id: module_id }, - location, - ); - - context.def_map_mut(crate_id).unwrap() - .modules_mut()[module_id.0] - .declare_function( - func.name_ident().clone(), ItemVisibility::Public, func_id - ).expect( - "Failed to declare the autogenerated compute_note_hash_and_nullifier function, likely due to a duplicate definition. See https://github.com/AztecProtocol/aztec-packages/issues/4647." - ); - - collected_functions - .iter_mut() - .find(|fns| fns.file_id == file_id) - .expect("ICE: no functions found in contract file") - .push_fn(module_id, func_id, func.clone()); - Ok(()) } -fn generate_compute_note_hash_and_nullifier(note_types: &Vec) -> NoirFunction { +fn generate_compute_note_hash_and_nullifier(note_types: &[String]) -> NoirFunction { let function_source = generate_compute_note_hash_and_nullifier_source(note_types); let (function_ast, errors) = parse_program(&function_source); @@ -140,7 +91,7 @@ fn generate_compute_note_hash_and_nullifier(note_types: &Vec) -> NoirFun function_ast.functions.remove(0) } -fn generate_compute_note_hash_and_nullifier_source(note_types: &Vec) -> String { +fn generate_compute_note_hash_and_nullifier_source(note_types: &[String]) -> String { // TODO(#4649): The serialized_note parameter is a fixed-size array, but we don't know what length it should have. // For now we hardcode it to 20, which is the same as MAX_NOTE_FIELDS_LENGTH. diff --git a/aztec_macros/src/transforms/events.rs b/aztec_macros/src/transforms/events.rs index e7e39ed29ba..4f2b70453df 100644 --- a/aztec_macros/src/transforms/events.rs +++ b/aztec_macros/src/transforms/events.rs @@ -16,7 +16,8 @@ use crate::{ chained_dep, utils::{ ast_utils::{ - call, expression, ident, ident_path, make_statement, make_type, path, variable_path, + call, expression, ident, ident_path, is_custom_attribute, make_statement, make_type, + path, variable_path, }, constants::SIGNATURE_PLACEHOLDER, errors::AztecMacroError, @@ -38,7 +39,8 @@ use crate::{ /// This allows developers to emit events without having to write the signature of the event every time they emit it. /// The signature cannot be known at this point since types are not resolved yet, so we use a signature placeholder. /// It'll get resolved after by transforming the HIR. -pub fn generate_selector_impl(structure: &NoirStruct) -> TypeImpl { +pub fn generate_selector_impl(structure: &mut NoirStruct) -> TypeImpl { + structure.attributes.push(SecondaryAttribute::Abi("events".to_string())); let struct_type = make_type(UnresolvedTypeData::Named(path(structure.name.clone()), vec![], true)); @@ -172,9 +174,9 @@ pub fn transform_events( crate_id: &CrateId, context: &mut HirContext, ) -> Result<(), (AztecMacroError, FileId)> { - for struct_id in collect_crate_structs(crate_id, context) { + for (_, struct_id) in collect_crate_structs(crate_id, context) { let attributes = context.def_interner.struct_attributes(&struct_id); - if attributes.iter().any(|attr| matches!(attr, SecondaryAttribute::Event)) { + if attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(event)")) { transform_event(struct_id, &mut context.def_interner)?; } } diff --git a/aztec_macros/src/transforms/functions.rs b/aztec_macros/src/transforms/functions.rs index 9844abc30fe..a3064ecdd01 100644 --- a/aztec_macros/src/transforms/functions.rs +++ b/aztec_macros/src/transforms/functions.rs @@ -1,10 +1,10 @@ use convert_case::{Case, Casing}; use noirc_errors::Span; use noirc_frontend::{ - macros_api::FieldElement, BlockExpression, ConstrainKind, ConstrainStatement, Distinctness, - Expression, ExpressionKind, ForLoopStatement, ForRange, FunctionReturnType, Ident, Literal, - NoirFunction, Param, PathKind, Pattern, Signedness, Statement, StatementKind, UnresolvedType, - UnresolvedTypeData, Visibility, + macros_api::FieldElement, parse_program, BlockExpression, ConstrainKind, ConstrainStatement, + Distinctness, Expression, ExpressionKind, ForLoopStatement, ForRange, FunctionReturnType, + Ident, Literal, NoirFunction, NoirStruct, Param, PathKind, Pattern, Signedness, Statement, + StatementKind, UnresolvedType, UnresolvedTypeData, Visibility, }; use crate::{ @@ -45,13 +45,13 @@ pub fn transform_function( // Add initialization check if insert_init_check { - let init_check = create_init_check(); + let init_check = create_init_check(ty); func.def.body.statements.insert(0, init_check); } // Add assertion for initialization arguments and sender if is_initializer { - func.def.body.statements.insert(0, create_assert_initializer()); + func.def.body.statements.insert(0, create_assert_initializer(ty)); } // Add access to the storage struct @@ -85,7 +85,7 @@ pub fn transform_function( // Before returning mark the contract as initialized if is_initializer { - let mark_initialized = create_mark_as_initialized(); + let mark_initialized = create_mark_as_initialized(ty); func.def.body.statements.push(mark_initialized); } @@ -113,6 +113,92 @@ pub fn transform_function( Ok(()) } +// Generates a global struct containing the original (before transform_function gets executed) function abi that gets exported +// in the contract artifact after compilation. The abi will be later used to decode the function return values in the simulator. +pub fn export_fn_abi( + types: &mut Vec, + func: &NoirFunction, +) -> Result<(), AztecMacroError> { + let mut parameters_struct_source: Option<&str> = None; + + let struct_source = format!( + " + struct {}_parameters {{ + {} + }} + ", + func.name(), + func.parameters() + .iter() + .map(|param| { + let param_name = match param.pattern.clone() { + Pattern::Identifier(ident) => Ok(ident.0.contents), + _ => Err(AztecMacroError::CouldNotExportFunctionAbi { + span: Some(param.span), + secondary_message: Some( + "Only identifier patterns are supported".to_owned(), + ), + }), + }; + + format!( + "{}: {}", + param_name.unwrap(), + param.typ.typ.to_string().replace("plain::", "") + ) + }) + .collect::>() + .join(",\n"), + ); + + if !func.parameters().is_empty() { + parameters_struct_source = Some(&struct_source); + } + + let mut program = String::new(); + + let parameters = if let Some(parameters_struct_source) = parameters_struct_source { + program.push_str(parameters_struct_source); + format!("parameters: {}_parameters,\n", func.name()) + } else { + "".to_string() + }; + + let return_type_str = func.return_type().typ.to_string().replace("plain::", ""); + let return_type = if return_type_str != "()" { + format!("return_type: {},\n", return_type_str) + } else { + "".to_string() + }; + + let export_struct_source = format!( + " + #[abi(functions)] + struct {}_abi {{ + {}{} + }}", + func.name(), + parameters, + return_type + ); + + program.push_str(&export_struct_source); + + let (ast, errors) = parse_program(&program); + if !errors.is_empty() { + return Err(AztecMacroError::CouldNotExportFunctionAbi { + span: None, + secondary_message: Some( + format!("Failed to parse Noir macro code (struct {}_abi). This is either a bug in the compiler or the Noir macro code", func.name()) + ) + }); + } + + let sorted_ast = ast.into_sorted(); + types.extend(sorted_ast.types); + Ok(()) +} + /// Transform Unconstrained /// /// Inserts the following code at the beginning of an unconstrained function @@ -159,9 +245,10 @@ fn create_inputs(ty: &str) -> Param { /// ```noir /// assert_is_initialized(&mut context); /// ``` -fn create_init_check() -> Statement { +fn create_init_check(ty: &str) -> Statement { + let fname = format!("assert_is_initialized_{}", ty.to_case(Case::Snake)); make_statement(StatementKind::Expression(call( - variable_path(chained_dep!("aztec", "initializer", "assert_is_initialized")), + variable_path(chained_dep!("aztec", "initializer", &fname)), vec![mutable_reference("context")], ))) } @@ -172,9 +259,10 @@ fn create_init_check() -> Statement { /// ```noir /// mark_as_initialized(&mut context); /// ``` -fn create_mark_as_initialized() -> Statement { +fn create_mark_as_initialized(ty: &str) -> Statement { + let fname = format!("mark_as_initialized_{}", ty.to_case(Case::Snake)); make_statement(StatementKind::Expression(call( - variable_path(chained_dep!("aztec", "initializer", "mark_as_initialized")), + variable_path(chained_dep!("aztec", "initializer", &fname)), vec![mutable_reference("context")], ))) } @@ -205,13 +293,11 @@ fn create_internal_check(fname: &str) -> Statement { /// ```noir /// assert_initialization_matches_address_preimage(context); /// ``` -fn create_assert_initializer() -> Statement { +fn create_assert_initializer(ty: &str) -> Statement { + let fname = + format!("assert_initialization_matches_address_preimage_{}", ty.to_case(Case::Snake)); make_statement(StatementKind::Expression(call( - variable_path(chained_dep!( - "aztec", - "initializer", - "assert_initialization_matches_address_preimage" - )), + variable_path(chained_dep!("aztec", "initializer", &fname)), vec![variable("context")], ))) } @@ -223,62 +309,66 @@ fn create_assert_initializer() -> Statement { /// ```noir /// #[aztec(private)] /// fn foo(structInput: SomeStruct, arrayInput: [u8; 10], fieldInput: Field) -> Field { -/// // Create the bounded vec object -/// let mut serialized_args = BoundedVec::new(); +/// // Create the hasher object +/// let mut hasher = Hasher::new(); /// /// // struct inputs call serialize on them to add an array of fields -/// serialized_args.extend_from_array(structInput.serialize()); +/// hasher.add_multiple(structInput.serialize()); /// -/// // Array inputs are iterated over and each element is added to the bounded vec (as a field) +/// // Array inputs are iterated over and each element is added to the hasher (as a field) /// for i in 0..arrayInput.len() { -/// serialized_args.push(arrayInput[i] as Field); +/// hasher.add(arrayInput[i] as Field); /// } -/// // Field inputs are added to the bounded vec -/// serialized_args.push({ident}); +/// // Field inputs are added to the hasher +/// hasher.add({ident}); /// /// // Create the context /// // The inputs (injected by this `create_inputs`) and completed hash object are passed to the context -/// let mut context = PrivateContext::new(inputs, hash_args(serialized_args)); +/// let mut context = PrivateContext::new(inputs, hasher.hash()); /// } /// ``` fn create_context(ty: &str, params: &[Param]) -> Result, AztecMacroError> { let mut injected_expressions: Vec = vec![]; - // `let mut serialized_args = BoundedVec::new();` - let let_serialized_args = mutable_assignment( - "serialized_args", // Assigned to + let hasher_name = "args_hasher"; + + // `let mut args_hasher = Hasher::new();` + let let_hasher = mutable_assignment( + hasher_name, // Assigned to call( - variable_path(chained_dep!("std", "collections", "bounded_vec", "BoundedVec", "new")), // Path - vec![], // args + variable_path(chained_dep!("aztec", "hash", "ArgsHasher", "new")), // Path + vec![], // args ), ); - // Completes: `let mut serialized_args = BoundedVec::new();` - injected_expressions.push(let_serialized_args); + // Completes: `let mut args_hasher = Hasher::new();` + injected_expressions.push(let_hasher); - // Iterate over each of the function parameters, adding to them to the bounded vec + // Iterate over each of the function parameters, adding to them to the hasher for Param { pattern, typ, span, .. } in params { match pattern { Pattern::Identifier(identifier) => { // Match the type to determine the padding to do let unresolved_type = &typ.typ; let expression = match unresolved_type { - // `serialized_args.extend_from_array({ident}.serialize())` - UnresolvedTypeData::Named(..) => add_struct_to_serialized_args(identifier), + // `hasher.add_multiple({ident}.serialize())` + UnresolvedTypeData::Named(..) => add_struct_to_hasher(identifier, hasher_name), UnresolvedTypeData::Array(_, arr_type) => { - add_array_to_serialized_args(identifier, arr_type) + add_array_to_hasher(identifier, arr_type, hasher_name) + } + // `hasher.add({ident})` + UnresolvedTypeData::FieldElement => { + add_field_to_hasher(identifier, hasher_name) } - // `serialized_args.push({ident})` - UnresolvedTypeData::FieldElement => add_field_to_serialized_args(identifier), - // Add the integer to the serialized args, casted to a field - // `serialized_args.push({ident} as Field)` + // Add the integer to the hasher, casted to a field + // `hasher.add({ident} as Field)` UnresolvedTypeData::Integer(..) | UnresolvedTypeData::Bool => { - add_cast_to_serialized_args(identifier) + add_cast_to_hasher(identifier, hasher_name) } UnresolvedTypeData::String(..) => { let (var_bytes, id) = str_to_bytes(identifier); injected_expressions.push(var_bytes); - add_array_to_serialized_args( + add_array_to_hasher( &id, &UnresolvedType { typ: UnresolvedTypeData::Integer( @@ -287,6 +377,7 @@ fn create_context(ty: &str, params: &[Param]) -> Result, AztecMac ), span: None, }, + hasher_name, ) } _ => { @@ -304,10 +395,11 @@ fn create_context(ty: &str, params: &[Param]) -> Result, AztecMac // Create the inputs to the context let inputs_expression = variable("inputs"); - // `hash_args(serialized_args)` - let hash_call = call( - variable_path(chained_dep!("aztec", "hash", "hash_args")), // variable - vec![variable("serialized_args")], // args + // `args_hasher.hash()` + let hash_call = method_call( + variable(hasher_name), // variable + "hash", // method name + vec![], // args ); let path_snippet = ty.to_case(Case::Snake); // e.g. private_context @@ -591,11 +683,11 @@ fn create_context_finish() -> Statement { } // -// Methods to create hash_args inputs +// Methods to create hasher inputs // -fn add_struct_to_serialized_args(identifier: &Ident) -> Statement { - // If this is a struct, we call serialize and add the array to the serialized args +fn add_struct_to_hasher(identifier: &Ident, hasher_name: &str) -> Statement { + // If this is a struct, we call serialize and add the array to the hasher let serialized_call = method_call( variable_path(path(identifier.clone())), // variable "serialize", // method name @@ -603,9 +695,9 @@ fn add_struct_to_serialized_args(identifier: &Ident) -> Statement { ); make_statement(StatementKind::Semi(method_call( - variable("serialized_args"), // variable - "extend_from_array", // method name - vec![serialized_call], // args + variable(hasher_name), // variable + "add_multiple", // method name + vec![serialized_call], // args ))) } @@ -625,7 +717,7 @@ fn str_to_bytes(identifier: &Ident) -> (Statement, Ident) { } fn create_loop_over(var: Expression, loop_body: Vec) -> Statement { - // If this is an array of primitive types (integers / fields) we can add them each to the serialized args + // If this is an array of primitive types (integers / fields) we can add them each to the hasher // casted to a field let span = var.span; @@ -638,7 +730,7 @@ fn create_loop_over(var: Expression, loop_body: Vec) -> Statement { // What will be looped over - // - `serialized_args.push({ident}[i] as Field)` + // - `hasher.add({ident}[i] as Field)` let for_loop_block = expression(ExpressionKind::Block(BlockExpression { statements: loop_body })); @@ -657,66 +749,70 @@ fn create_loop_over(var: Expression, loop_body: Vec) -> Statement { })) } -fn add_array_to_serialized_args(identifier: &Ident, arr_type: &UnresolvedType) -> Statement { - // If this is an array of primitive types (integers / fields) we can add them each to the serialized_args +fn add_array_to_hasher( + identifier: &Ident, + arr_type: &UnresolvedType, + hasher_name: &str, +) -> Statement { + // If this is an array of primitive types (integers / fields) we can add them each to the hasher // casted to a field // Wrap in the semi thing - does that mean ended with semi colon? - // `serialized_args.push({ident}[i] as Field)` + // `hasher.add({ident}[i] as Field)` let arr_index = index_array(identifier.clone(), "i"); - let (add_expression, vec_method_name) = match arr_type.typ { + let (add_expression, hasher_method_name) = match arr_type.typ { UnresolvedTypeData::Named(..) => { - let vec_method_name = "extend_from_array".to_owned(); + let hasher_method_name = "add_multiple".to_owned(); let call = method_call( // All serialize on each element arr_index, // variable "serialize", // method name vec![], // args ); - (call, vec_method_name) + (call, hasher_method_name) } _ => { - let vec_method_name = "push".to_owned(); + let hasher_method_name = "add".to_owned(); let call = cast( arr_index, // lhs - `ident[i]` UnresolvedTypeData::FieldElement, // cast to - `as Field` ); - (call, vec_method_name) + (call, hasher_method_name) } }; let block_statement = make_statement(StatementKind::Semi(method_call( - variable("serialized_args"), // variable - &vec_method_name, // method name + variable(hasher_name), // variable + &hasher_method_name, // method name vec![add_expression], ))); create_loop_over(variable_ident(identifier.clone()), vec![block_statement]) } -fn add_field_to_serialized_args(identifier: &Ident) -> Statement { - // `serialized_args.push({ident})` +fn add_field_to_hasher(identifier: &Ident, hasher_name: &str) -> Statement { + // `hasher.add({ident})` let ident = variable_path(path(identifier.clone())); make_statement(StatementKind::Semi(method_call( - variable("serialized_args"), // variable - "push", // method name - vec![ident], // args + variable(hasher_name), // variable + "add", // method name + vec![ident], // args ))) } -fn add_cast_to_serialized_args(identifier: &Ident) -> Statement { - // `serialized_args.push({ident} as Field)` +fn add_cast_to_hasher(identifier: &Ident, hasher_name: &str) -> Statement { + // `hasher.add({ident} as Field)` // `{ident} as Field` let cast_operation = cast( variable_path(path(identifier.clone())), // lhs UnresolvedTypeData::FieldElement, // rhs ); - // `serialized_args.push({ident} as Field)` + // `hasher.add({ident} as Field)` make_statement(StatementKind::Semi(method_call( - variable("serialized_args"), // variable - "push", // method name - vec![cast_operation], // args + variable(hasher_name), // variable + "add", // method name + vec![cast_operation], // args ))) } diff --git a/aztec_macros/src/transforms/note_interface.rs b/aztec_macros/src/transforms/note_interface.rs index 01d0272088b..0514155824e 100644 --- a/aztec_macros/src/transforms/note_interface.rs +++ b/aztec_macros/src/transforms/note_interface.rs @@ -1,7 +1,11 @@ use noirc_errors::Span; use noirc_frontend::{ - parse_program, parser::SortedModule, ItemVisibility, NoirFunction, NoirStruct, PathKind, - TraitImplItem, TypeImpl, UnresolvedTypeData, UnresolvedTypeExpression, + graph::CrateId, + macros_api::{FileId, HirContext, HirExpression, HirLiteral, HirStatement}, + parse_program, + parser::SortedModule, + ItemVisibility, LetStatement, NoirFunction, NoirStruct, PathKind, TraitImplItem, Type, + TypeImpl, UnresolvedTypeData, UnresolvedTypeExpression, }; use regex::Regex; @@ -12,6 +16,7 @@ use crate::{ check_trait_method_implemented, ident, ident_path, is_custom_attribute, make_type, }, errors::AztecMacroError, + hir_utils::{fetch_notes, get_contract_module_data, inject_global}, }, }; @@ -24,7 +29,7 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt .iter_mut() .filter(|typ| typ.attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(note)"))); - let mut note_properties_structs = vec![]; + let mut structs_to_inject = vec![]; for note_struct in annotated_note_structs { // Look for the NoteInterface trait implementation for the note @@ -80,6 +85,7 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt )), }), }?; + let note_type_id = note_type_id(¬e_type); // Automatically inject the header field if it's not present let (header_field_name, _) = if let Some(existing_header) = @@ -138,7 +144,7 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt &header_field_name.0.contents, note_interface_impl_span, )?; - note_properties_structs.push(note_properties_struct); + structs_to_inject.push(note_properties_struct); let note_properties_fn = generate_note_properties_fn( ¬e_type, ¬e_fields, @@ -167,7 +173,7 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt if !check_trait_method_implemented(trait_impl, "get_note_type_id") { let get_note_type_id_fn = - generate_note_get_type_id(¬e_type, note_interface_impl_span)?; + generate_note_get_type_id(¬e_type_id, note_interface_impl_span)?; trait_impl.items.push(TraitImplItem::Function(get_note_type_id_fn)); } @@ -178,7 +184,7 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt } } - module.types.extend(note_properties_structs); + module.types.extend(structs_to_inject); Ok(()) } @@ -245,19 +251,16 @@ fn generate_note_set_header( // Automatically generate the note type id getter method. The id itself its calculated as the concatenation // of the conversion of the characters in the note's struct name to unsigned integers. fn generate_note_get_type_id( - note_type: &str, + note_type_id: &str, impl_span: Option, ) -> Result { - // TODO(#4519) Improve automatic note id generation and assignment - let note_id = - note_type.chars().map(|c| (c as u32).to_string()).collect::>().join(""); let function_source = format!( " fn get_note_type_id() -> Field {{ {} }} ", - note_id + note_type_id ) .to_string(); @@ -443,6 +446,34 @@ fn generate_compute_note_content_hash( Ok(noir_fn) } +fn generate_note_exports_global( + note_type: &str, + note_type_id: &str, +) -> Result { + let struct_source = format!( + " + #[abi(notes)] + global {0}_EXPORTS: (Field, str<{1}>) = ({2},\"{0}\"); + ", + note_type, + note_type_id.len(), + note_type_id + ) + .to_string(); + + let (global_ast, errors) = parse_program(&struct_source); + if !errors.is_empty() { + dbg!(errors); + return Err(AztecMacroError::CouldNotImplementNoteInterface { + secondary_message: Some(format!("Failed to parse Noir macro code (struct {}Exports). This is either a bug in the compiler or the Noir macro code", note_type)), + span: None + }); + } + + let mut global_ast = global_ast.into_sorted(); + Ok(global_ast.globals.pop().unwrap()) +} + // Source code generator functions. These utility methods produce Noir code as strings, that are then parsed and added to the AST. fn generate_note_properties_struct_source( @@ -581,3 +612,85 @@ fn generate_note_deserialize_content_source( ) .to_string() } + +// Utility function to generate the note type id as a Field +fn note_type_id(note_type: &str) -> String { + // TODO(#4519) Improve automatic note id generation and assignment + note_type.chars().map(|c| (c as u32).to_string()).collect::>().join("") +} + +pub fn inject_note_exports( + crate_id: &CrateId, + context: &mut HirContext, +) -> Result<(), (AztecMacroError, FileId)> { + if let Some((module_id, file_id)) = get_contract_module_data(context, crate_id) { + let notes = fetch_notes(context); + + for (_, note) in notes { + let func_id = context + .def_interner + .lookup_method( + &Type::Struct(context.def_interner.get_struct(note.borrow().id), vec![]), + note.borrow().id, + "get_note_type_id", + false, + ) + .ok_or(( + AztecMacroError::CouldNotExportStorageLayout { + span: None, + secondary_message: Some(format!( + "Could not retrieve get_note_type_id function for note {}", + note.borrow().name.0.contents + )), + }, + file_id, + ))?; + let init_function = + context.def_interner.function(&func_id).block(&context.def_interner); + let init_function_statement_id = init_function.statements().first().ok_or(( + AztecMacroError::CouldNotExportStorageLayout { + span: None, + secondary_message: Some(format!( + "Could not retrieve note id statement from function for note {}", + note.borrow().name.0.contents + )), + }, + file_id, + ))?; + let note_id_statement = context.def_interner.statement(init_function_statement_id); + + let note_id_value = match note_id_statement { + HirStatement::Expression(expression_id) => { + match context.def_interner.expression(&expression_id) { + HirExpression::Literal(HirLiteral::Integer(value, _)) => Ok(value), + _ => Err(( + AztecMacroError::CouldNotExportStorageLayout { + span: None, + secondary_message: Some( + "note_id statement must be a literal expression".to_string(), + ), + }, + file_id, + )), + } + } + _ => Err(( + AztecMacroError::CouldNotAssignStorageSlots { + secondary_message: Some( + "note_id statement must be an expression".to_string(), + ), + }, + file_id, + )), + }?; + let global = generate_note_exports_global( + ¬e.borrow().name.0.contents, + ¬e_id_value.to_string(), + ) + .map_err(|err| (err, file_id))?; + + inject_global(crate_id, context, global, module_id, file_id); + } + } + Ok(()) +} diff --git a/aztec_macros/src/transforms/storage.rs b/aztec_macros/src/transforms/storage.rs index 10f44d01bb4..0bfb39cbc71 100644 --- a/aztec_macros/src/transforms/storage.rs +++ b/aztec_macros/src/transforms/storage.rs @@ -1,4 +1,4 @@ -use std::borrow::{Borrow, BorrowMut}; +use std::borrow::Borrow; use noirc_errors::Span; use noirc_frontend::{ @@ -7,33 +7,53 @@ use noirc_frontend::{ FieldElement, FileId, HirContext, HirExpression, HirLiteral, HirStatement, NodeInterner, }, node_interner::{TraitId, TraitImplKind}, + parse_program, parser::SortedModule, + token::SecondaryAttribute, BlockExpression, Expression, ExpressionKind, FunctionDefinition, Ident, Literal, NoirFunction, - PathKind, Pattern, StatementKind, Type, TypeImpl, UnresolvedType, UnresolvedTypeData, + NoirStruct, PathKind, Pattern, StatementKind, Type, TypeImpl, UnresolvedType, + UnresolvedTypeData, }; use crate::{ chained_dep, chained_path, utils::{ ast_utils::{ - call, expression, ident, ident_path, lambda, make_statement, make_type, pattern, - return_type, variable, variable_path, + call, expression, ident, ident_path, is_custom_attribute, lambda, make_statement, + make_type, pattern, return_type, variable, variable_path, }, errors::AztecMacroError, - hir_utils::{collect_crate_structs, collect_traits}, + hir_utils::{collect_crate_structs, collect_traits, get_contract_module_data}, }, }; // Check to see if the user has defined a storage struct -pub fn check_for_storage_definition(module: &SortedModule) -> bool { - module.types.iter().any(|r#struct| r#struct.name.0.contents == "Storage") +pub fn check_for_storage_definition( + module: &SortedModule, +) -> Result, AztecMacroError> { + let result: Vec<&NoirStruct> = module + .types + .iter() + .filter(|r#struct| { + r#struct.attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(storage)")) + }) + .collect(); + if result.len() > 1 { + return Err(AztecMacroError::MultipleStorageDefinitions { + span: result.first().map(|res| res.name.span()), + }); + } + Ok(result.iter().map(|&r#struct| r#struct.name.0.contents.clone()).next()) } // Check to see if the user has defined a storage struct -pub fn check_for_storage_implementation(module: &SortedModule) -> bool { +pub fn check_for_storage_implementation( + module: &SortedModule, + storage_struct_name: &String, +) -> bool { module.impls.iter().any(|r#impl| match &r#impl.object_type.typ { UnresolvedTypeData::Named(path, _, _) => { - path.segments.last().is_some_and(|segment| segment.0.contents == "Storage") + path.segments.last().is_some_and(|segment| segment.0.contents == *storage_struct_name) } _ => false, }) @@ -117,9 +137,15 @@ pub fn generate_storage_field_constructor( /// /// Storage slots are generated as 0 and will be populated using the information from the HIR /// at a later stage. -pub fn generate_storage_implementation(module: &mut SortedModule) -> Result<(), AztecMacroError> { - let definition = - module.types.iter().find(|r#struct| r#struct.name.0.contents == "Storage").unwrap(); +pub fn generate_storage_implementation( + module: &mut SortedModule, + storage_struct_name: &String, +) -> Result<(), AztecMacroError> { + let definition = module + .types + .iter() + .find(|r#struct| r#struct.name.0.contents == *storage_struct_name) + .unwrap(); let slot_zero = expression(ExpressionKind::Literal(Literal::Integer( FieldElement::from(i128::from(0)), @@ -136,7 +162,7 @@ pub fn generate_storage_implementation(module: &mut SortedModule) -> Result<(), .collect(); let storage_constructor_statement = make_statement(StatementKind::Expression(expression( - ExpressionKind::constructor((chained_path!("Storage"), field_constructors)), + ExpressionKind::constructor((chained_path!(storage_struct_name), field_constructors)), ))); let init = NoirFunction::normal(FunctionDefinition::normal( @@ -157,7 +183,7 @@ pub fn generate_storage_implementation(module: &mut SortedModule) -> Result<(), let storage_impl = TypeImpl { object_type: UnresolvedType { - typ: UnresolvedTypeData::Named(chained_path!("Storage"), vec![], true), + typ: UnresolvedTypeData::Named(chained_path!(storage_struct_name), vec![], true), span: Some(Span::default()), }, type_span: Span::default(), @@ -239,16 +265,51 @@ pub fn assign_storage_slots( context: &mut HirContext, ) -> Result<(), (AztecMacroError, FileId)> { let traits: Vec<_> = collect_traits(context); - for struct_id in collect_crate_structs(crate_id, context) { - let interner: &mut NodeInterner = context.def_interner.borrow_mut(); - let r#struct = interner.get_struct(struct_id); - let file_id = r#struct.borrow().location.file; - if r#struct.borrow().name.0.contents == "Storage" && r#struct.borrow().id.krate().is_root() + if let Some((_, file_id)) = get_contract_module_data(context, crate_id) { + let maybe_storage_struct = + collect_crate_structs(crate_id, context).iter().find_map(|&(_, struct_id)| { + let r#struct = context.def_interner.get_struct(struct_id); + let attributes = context.def_interner.struct_attributes(&struct_id); + if attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(storage)")) + && r#struct.borrow().id.krate().is_root() + { + Some(r#struct) + } else { + None + } + }); + + let maybe_storage_layout = + context.def_interner.get_all_globals().iter().find_map(|global_info| { + let statement = context.def_interner.get_global_let_statement(global_info.id); + if statement.clone().is_some_and(|stmt| { + stmt.attributes + .iter() + .any(|attr| *attr == SecondaryAttribute::Abi("storage".to_string())) + }) { + let expr = context.def_interner.expression(&statement.unwrap().expression); + match expr { + HirExpression::Constructor(hir_constructor_expression) => { + Some(hir_constructor_expression) + } + _ => None, + } + } else { + None + } + }); + + if let (Some(storage_struct), Some(storage_layout)) = + (maybe_storage_struct, maybe_storage_layout) { - let init_id = interner + let init_id = context + .def_interner .lookup_method( - &Type::Struct(interner.get_struct(struct_id), vec![]), - struct_id, + &Type::Struct( + context.def_interner.get_struct(storage_struct.borrow().id), + vec![], + ), + storage_struct.borrow().id, "init", false, ) @@ -260,28 +321,33 @@ pub fn assign_storage_slots( }, file_id, ))?; - let init_function = interner.function(&init_id).block(interner); + let init_function = + context.def_interner.function(&init_id).block(&context.def_interner); let init_function_statement_id = init_function.statements().first().ok_or(( AztecMacroError::CouldNotAssignStorageSlots { secondary_message: Some("Init storage statement not found".to_string()), }, file_id, ))?; - let storage_constructor_statement = interner.statement(init_function_statement_id); + let storage_constructor_statement = + context.def_interner.statement(init_function_statement_id); let storage_constructor_expression = match storage_constructor_statement { HirStatement::Expression(expression_id) => { - match interner.expression(&expression_id) { - HirExpression::Constructor(hir_constructor_expression) => { - Ok(hir_constructor_expression) - } - _ => Err((AztecMacroError::CouldNotAssignStorageSlots { + match context.def_interner.expression(&expression_id) { + HirExpression::Constructor(hir_constructor_expression) => { + Ok(hir_constructor_expression) + } + _ => Err(( + AztecMacroError::CouldNotAssignStorageSlots { secondary_message: Some( "Storage constructor statement must be a constructor expression" .to_string(), ), - }, file_id)) - } + }, + file_id, + )), + } } _ => Err(( AztecMacroError::CouldNotAssignStorageSlots { @@ -295,9 +361,9 @@ pub fn assign_storage_slots( let mut storage_slot: u64 = 1; for (index, (_, expr_id)) in storage_constructor_expression.fields.iter().enumerate() { - let fields = r#struct.borrow().get_fields(&[]); - let (_, field_type) = fields.get(index).unwrap(); - let new_call_expression = match interner.expression(expr_id) { + let fields = storage_struct.borrow().get_fields(&[]); + let (field_name, field_type) = fields.get(index).unwrap(); + let new_call_expression = match context.def_interner.expression(expr_id) { HirExpression::Call(hir_call_expression) => Ok(hir_call_expression), _ => Err(( AztecMacroError::CouldNotAssignStorageSlots { @@ -310,7 +376,8 @@ pub fn assign_storage_slots( )), }?; - let slot_arg_expression = interner.expression(&new_call_expression.arguments[1]); + let slot_arg_expression = + context.def_interner.expression(&new_call_expression.arguments[1]); let current_storage_slot = match slot_arg_expression { HirExpression::Literal(HirLiteral::Integer(slot, _)) => Ok(slot.to_u128()), @@ -325,22 +392,123 @@ pub fn assign_storage_slots( )), }?; - if current_storage_slot != 0 { - continue; - } + let storage_layout_field = + storage_layout.fields.iter().find(|field| field.0 .0.contents == *field_name); - let type_serialized_len = get_serialized_length(&traits, field_type, interner) - .map_err(|err| (err, file_id))?; - interner.update_expression(new_call_expression.arguments[1], |expr| { + let storage_layout_slot_expr_id = + if let Some((_, expr_id)) = storage_layout_field { + let expr = context.def_interner.expression(expr_id); + if let HirExpression::Constructor(storage_layout_field_storable_expr) = expr + { + storage_layout_field_storable_expr.fields.iter().find_map( + |(field, expr_id)| { + if field.0.contents == "slot" { + Some(*expr_id) + } else { + None + } + }, + ) + } else { + None + } + } else { + None + } + .ok_or(( + AztecMacroError::CouldNotAssignStorageSlots { + secondary_message: Some(format!( + "Storage layout field ({}) not found or has an incorrect type", + field_name + )), + }, + file_id, + ))?; + + let new_storage_slot = if current_storage_slot == 0 { + u128::from(storage_slot) + } else { + current_storage_slot + }; + + let type_serialized_len = + get_serialized_length(&traits, field_type, &context.def_interner) + .map_err(|err| (err, file_id))?; + + context.def_interner.update_expression(new_call_expression.arguments[1], |expr| { *expr = HirExpression::Literal(HirLiteral::Integer( - FieldElement::from(u128::from(storage_slot)), + FieldElement::from(new_storage_slot), false, - )); + )) + }); + + context.def_interner.update_expression(storage_layout_slot_expr_id, |expr| { + *expr = HirExpression::Literal(HirLiteral::Integer( + FieldElement::from(new_storage_slot), + false, + )) }); storage_slot += type_serialized_len; } } } + + Ok(()) +} + +pub fn generate_storage_layout( + module: &mut SortedModule, + storage_struct_name: String, +) -> Result<(), AztecMacroError> { + let definition = module + .types + .iter() + .find(|r#struct| r#struct.name.0.contents == *storage_struct_name) + .unwrap(); + + let mut generic_args = vec![]; + let mut storable_fields = vec![]; + let mut storable_fields_impl = vec![]; + + definition.fields.iter().enumerate().for_each(|(index, (field_ident, field_type))| { + storable_fields.push(format!("{}: dep::aztec::prelude::Storable", field_ident, index)); + generic_args.push(format!("N{}", index)); + storable_fields_impl.push(format!( + "{}: dep::aztec::prelude::Storable {{ slot: 0, typ: \"{}\" }}", + field_ident, + field_type.to_string().replace("plain::", "") + )); + }); + + let storage_fields_source = format!( + " + struct StorageLayout<{}> {{ + {} + }} + + #[abi(storage)] + global STORAGE_LAYOUT = StorageLayout {{ + {} + }}; + ", + generic_args.join(", "), + storable_fields.join(",\n"), + storable_fields_impl.join(",\n") + ); + + let (struct_ast, errors) = parse_program(&storage_fields_source); + if !errors.is_empty() { + dbg!(errors); + return Err(AztecMacroError::CouldNotImplementNoteInterface { + secondary_message: Some("Failed to parse Noir macro code (struct StorageLayout). This is either a bug in the compiler or the Noir macro code".to_string()), + span: None + }); + } + + let mut struct_ast = struct_ast.into_sorted(); + module.types.push(struct_ast.types.pop().unwrap()); + module.globals.push(struct_ast.globals.pop().unwrap()); + Ok(()) } diff --git a/aztec_macros/src/utils/ast_utils.rs b/aztec_macros/src/utils/ast_utils.rs index bdcbad646c2..eb8f5a4156d 100644 --- a/aztec_macros/src/utils/ast_utils.rs +++ b/aztec_macros/src/utils/ast_utils.rs @@ -67,6 +67,7 @@ pub fn mutable_assignment(name: &str, assigned_to: Expression) -> Statement { pattern: mutable(name), r#type: make_type(UnresolvedTypeData::Unspecified), expression: assigned_to, + attributes: vec![], })) } @@ -82,6 +83,7 @@ pub fn assignment(name: &str, assigned_to: Expression) -> Statement { pattern: pattern(name), r#type: make_type(UnresolvedTypeData::Unspecified), expression: assigned_to, + attributes: vec![], })) } diff --git a/aztec_macros/src/utils/errors.rs b/aztec_macros/src/utils/errors.rs index 48186555eff..9aead1756f9 100644 --- a/aztec_macros/src/utils/errors.rs +++ b/aztec_macros/src/utils/errors.rs @@ -11,6 +11,9 @@ pub enum AztecMacroError { UnsupportedStorageType { span: Option, typ: UnresolvedTypeData }, CouldNotAssignStorageSlots { secondary_message: Option }, CouldNotImplementNoteInterface { span: Option, secondary_message: Option }, + MultipleStorageDefinitions { span: Option }, + CouldNotExportStorageLayout { span: Option, secondary_message: Option }, + CouldNotExportFunctionAbi { span: Option, secondary_message: Option }, EventError { span: Span, message: String }, UnsupportedAttributes { span: Span, secondary_message: Option }, } @@ -46,6 +49,21 @@ impl From for MacroError { AztecMacroError::CouldNotImplementNoteInterface { span, secondary_message } => MacroError { primary_message: "Could not implement automatic methods for note, please provide an implementation of the NoteInterface trait".to_string(), secondary_message, + span + }, + AztecMacroError::MultipleStorageDefinitions { span } => MacroError { + primary_message: "Only one struct can be tagged as #[aztec(storage)]".to_string(), + secondary_message: None, + span, + }, + AztecMacroError::CouldNotExportStorageLayout { secondary_message, span } => MacroError { + primary_message: "Could not generate and export storage layout".to_string(), + secondary_message, + span, + }, + AztecMacroError::CouldNotExportFunctionAbi { secondary_message, span } => MacroError { + primary_message: "Could not generate and export function abi".to_string(), + secondary_message, span, }, AztecMacroError::EventError { span, message } => MacroError { diff --git a/aztec_macros/src/utils/hir_utils.rs b/aztec_macros/src/utils/hir_utils.rs index f31a0584261..c4414e6419b 100644 --- a/aztec_macros/src/utils/hir_utils.rs +++ b/aztec_macros/src/utils/hir_utils.rs @@ -1,22 +1,43 @@ use iter_extended::vecmap; +use noirc_errors::Location; use noirc_frontend::{ graph::CrateId, - hir::def_collector::dc_crate::UnresolvedTraitImpl, - macros_api::{HirContext, ModuleDefId, StructId}, - node_interner::{TraitId, TraitImplId}, - Signedness, Type, UnresolvedTypeData, + hir::{ + def_map::{LocalModuleId, ModuleId}, + resolution::{path_resolver::StandardPathResolver, resolver::Resolver}, + type_check::type_check_func, + }, + macros_api::{FileId, HirContext, ModuleDefId, StructId}, + node_interner::{FuncId, TraitId}, + ItemVisibility, LetStatement, NoirFunction, Shared, Signedness, StructType, Type, }; -pub fn collect_crate_structs(crate_id: &CrateId, context: &HirContext) -> Vec { +use super::ast_utils::is_custom_attribute; + +pub fn collect_crate_structs(crate_id: &CrateId, context: &HirContext) -> Vec<(String, StructId)> { context .def_map(crate_id) .expect("ICE: Missing crate in def_map") .modules() .iter() .flat_map(|(_, module)| { - module.type_definitions().filter_map(|typ| { + module.type_definitions().filter_map(move |typ| { if let ModuleDefId::TypeId(struct_id) = typ { - Some(struct_id) + let module_id = struct_id.module_id(); + let path = + context.fully_qualified_struct_path(context.root_crate_id(), struct_id); + let path = if path.contains("::") { + let prefix = if &module_id.krate == context.root_crate_id() { + "crate" + } else { + "dep" + }; + format!("{}::{}", prefix, path) + } else { + path + }; + + Some((path, struct_id)) } else { None } @@ -25,6 +46,16 @@ pub fn collect_crate_structs(crate_id: &CrateId, context: &HirContext) -> Vec Vec { + context + .def_map(crate_id) + .expect("ICE: Missing crate in def_map") + .modules() + .iter() + .flat_map(|(_, module)| module.value_definitions().filter_map(|id| id.as_function())) + .collect() +} + pub fn collect_traits(context: &HirContext) -> Vec { let crates = context.crates(); crates @@ -32,8 +63,8 @@ pub fn collect_traits(context: &HirContext) -> Vec { .flatten() .flat_map(|module| { module.type_definitions().filter_map(|typ| { - if let ModuleDefId::TraitId(struct_id) = typ { - Some(struct_id) + if let ModuleDefId::TraitId(trait_id) = typ { + Some(trait_id) } else { None } @@ -69,50 +100,127 @@ pub fn signature_of_type(typ: &Type) -> String { } } -// Fetches the name of all structs that implement trait_name, both in the current crate and all of its dependencies. -pub fn fetch_struct_trait_impls( - context: &mut HirContext, - unresolved_traits_impls: &[UnresolvedTraitImpl], - trait_name: &str, -) -> Vec { - let mut struct_typenames: Vec = Vec::new(); - - // These structs can be declared in either external crates or the current one. External crates that contain - // dependencies have already been processed and resolved, but are available here via the NodeInterner. Note that - // crates on which the current crate does not depend on may not have been processed, and will be ignored. - for trait_impl_id in 0..context.def_interner.next_trait_impl_id().0 { - let trait_impl = &context.def_interner.get_trait_implementation(TraitImplId(trait_impl_id)); - - if trait_impl.borrow().ident.0.contents == *trait_name { - if let Type::Struct(s, _) = &trait_impl.borrow().typ { - struct_typenames.push(s.borrow().name.0.contents.clone()); +// Fetches the name of all structs tagged as #[aztec(note)] in a given crate +pub fn fetch_crate_notes( + context: &HirContext, + crate_id: &CrateId, +) -> Vec<(String, Shared)> { + collect_crate_structs(crate_id, context) + .iter() + .filter_map(|(path, struct_id)| { + let r#struct = context.def_interner.get_struct(*struct_id); + let attributes = context.def_interner.struct_attributes(struct_id); + if attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(note)")) { + Some((path.clone(), r#struct)) } else { - panic!("Found impl for {} on non-Struct", trait_name); + None } - } + }) + .collect() +} + +// Fetches the name of all structs tagged as #[aztec(note)], both in the current crate and all of its dependencies. +pub fn fetch_notes(context: &HirContext) -> Vec<(String, Shared)> { + context.crates().flat_map(|crate_id| fetch_crate_notes(context, &crate_id)).collect() +} + +pub fn get_contract_module_data( + context: &mut HirContext, + crate_id: &CrateId, +) -> Option<(LocalModuleId, FileId)> { + // We first fetch modules in this crate which correspond to contracts, along with their file id. + let contract_module_file_ids: Vec<(LocalModuleId, FileId)> = context + .def_map(crate_id) + .expect("ICE: Missing crate in def_map") + .modules() + .iter() + .filter(|(_, module)| module.is_contract) + .map(|(idx, module)| (LocalModuleId(idx), module.location.file)) + .collect(); + + // If the current crate does not contain a contract module we simply skip it. More than 1 contract in a crate is forbidden by the compiler + if contract_module_file_ids.is_empty() { + return None; } - // This crate's traits and impls have not yet been resolved, so we look for impls in unresolved_trait_impls. - struct_typenames.extend( - unresolved_traits_impls - .iter() - .filter(|trait_impl| { - trait_impl - .trait_path - .segments - .last() - .expect("ICE: empty trait_impl path") - .0 - .contents - == *trait_name - }) - .filter_map(|trait_impl| match &trait_impl.object_type.typ { - UnresolvedTypeData::Named(path, _, _) => { - Some(path.segments.last().unwrap().0.contents.clone()) - } - _ => None, - }), + Some(contract_module_file_ids[0]) +} + +pub fn inject_fn( + crate_id: &CrateId, + context: &mut HirContext, + func: NoirFunction, + location: Location, + module_id: LocalModuleId, + file_id: FileId, +) { + let func_id = context.def_interner.push_empty_fn(); + context.def_interner.push_function( + func_id, + &func.def, + ModuleId { krate: *crate_id, local_id: module_id }, + location, + ); + + context.def_map_mut(crate_id).unwrap().modules_mut()[module_id.0] + .declare_function(func.name_ident().clone(), ItemVisibility::Public, func_id) + .unwrap_or_else(|_| { + panic!( + "Failed to declare autogenerated {} function, likely due to a duplicate definition", + func.name() + ) + }); + + let def_maps = &mut context.def_maps; + + let path_resolver = + StandardPathResolver::new(ModuleId { local_id: module_id, krate: *crate_id }); + + let resolver = Resolver::new(&mut context.def_interner, &path_resolver, def_maps, file_id); + + let (hir_func, meta, _) = resolver.resolve_function(func, func_id); + + context.def_interner.push_fn_meta(meta, func_id); + context.def_interner.update_fn(func_id, hir_func); + + type_check_func(&mut context.def_interner, func_id); +} + +pub fn inject_global( + crate_id: &CrateId, + context: &mut HirContext, + global: LetStatement, + module_id: LocalModuleId, + file_id: FileId, +) { + let name = global.pattern.name_ident().clone(); + + let global_id = context.def_interner.push_empty_global( + name.clone(), + module_id, + file_id, + global.attributes.clone(), ); - struct_typenames + // Add the statement to the scope so its path can be looked up later + context.def_map_mut(crate_id).unwrap().modules_mut()[module_id.0] + .declare_global(name, global_id) + .unwrap_or_else(|(name, _)| { + panic!( + "Failed to declare autogenerated {} global, likely due to a duplicate definition", + name + ) + }); + + let def_maps = &mut context.def_maps; + + let path_resolver = + StandardPathResolver::new(ModuleId { local_id: module_id, krate: *crate_id }); + + let mut resolver = Resolver::new(&mut context.def_interner, &path_resolver, def_maps, file_id); + + let hir_stmt = resolver.resolve_global_let(global, global_id); + + let statement_id = context.def_interner.get_global(global_id).let_statement; + context.def_interner.replace_statement(statement_id, hir_stmt); } diff --git a/compiler/noirc_driver/build.rs b/compiler/noirc_driver/build.rs index 73a56142075..2ed109398a4 100644 --- a/compiler/noirc_driver/build.rs +++ b/compiler/noirc_driver/build.rs @@ -2,8 +2,7 @@ const GIT_COMMIT: &&str = &"GIT_COMMIT"; use std::path::Path; fn main() { - // Only use build_data if the environment variable isn't set - // The environment variable is always set when working via Nix + // Only use build_data if the environment variable isn't set. if std::env::var(GIT_COMMIT).is_err() { build_data::set_GIT_COMMIT(); build_data::set_GIT_DIRTY(); diff --git a/compiler/noirc_driver/src/abi_gen.rs b/compiler/noirc_driver/src/abi_gen.rs index 7fafa719186..86f10818dbc 100644 --- a/compiler/noirc_driver/src/abi_gen.rs +++ b/compiler/noirc_driver/src/abi_gen.rs @@ -2,10 +2,11 @@ use std::collections::BTreeMap; use acvm::acir::native_types::Witness; use iter_extended::{btree_map, vecmap}; -use noirc_abi::{Abi, AbiParameter, AbiReturnType, AbiType}; +use noirc_abi::{Abi, AbiParameter, AbiReturnType, AbiType, AbiValue}; use noirc_frontend::{ hir::Context, - hir_def::{function::Param, stmt::HirPattern}, + hir_def::{expr::HirArrayLiteral, function::Param, stmt::HirPattern}, + macros_api::{HirExpression, HirLiteral}, node_interner::{FuncId, NodeInterner}, Visibility, }; @@ -109,6 +110,60 @@ fn collapse_ranges(witnesses: &[Witness]) -> Vec> { wit } +pub(super) fn value_from_hir_expression(context: &Context, expression: HirExpression) -> AbiValue { + match expression { + HirExpression::Tuple(expr_ids) => { + let fields = expr_ids + .iter() + .map(|expr_id| { + value_from_hir_expression(context, context.def_interner.expression(expr_id)) + }) + .collect(); + AbiValue::Tuple { fields } + } + HirExpression::Constructor(constructor) => { + let fields = constructor + .fields + .iter() + .map(|(ident, expr_id)| { + ( + ident.0.contents.to_string(), + value_from_hir_expression( + context, + context.def_interner.expression(expr_id), + ), + ) + }) + .collect(); + AbiValue::Struct { fields } + } + HirExpression::Literal(literal) => match literal { + HirLiteral::Array(hir_array) => match hir_array { + HirArrayLiteral::Standard(expr_ids) => { + let value = expr_ids + .iter() + .map(|expr_id| { + value_from_hir_expression( + context, + context.def_interner.expression(expr_id), + ) + }) + .collect(); + AbiValue::Array { value } + } + _ => unreachable!("Repeated arrays cannot be used in the abi"), + }, + HirLiteral::Bool(value) => AbiValue::Boolean { value }, + HirLiteral::Str(value) => AbiValue::String { value }, + HirLiteral::Integer(field, sign) => { + AbiValue::Integer { value: field.to_string(), sign } + } + _ => unreachable!("Literal cannot be used in the abi"), + }, + _ => unreachable!("Type cannot be used in the abi {:?}", expression), + } +} + #[cfg(test)] mod test { use std::ops::Range; diff --git a/compiler/noirc_driver/src/contract.rs b/compiler/noirc_driver/src/contract.rs index 9a0e25a321b..a33a9b809d3 100644 --- a/compiler/noirc_driver/src/contract.rs +++ b/compiler/noirc_driver/src/contract.rs @@ -1,14 +1,20 @@ use serde::{Deserialize, Serialize}; -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashMap}; use acvm::acir::circuit::Program; use fm::FileId; -use noirc_abi::{Abi, ContractEvent}; +use noirc_abi::{Abi, AbiType, AbiValue}; use noirc_errors::debug_info::DebugInfo; use noirc_evaluator::errors::SsaReport; use super::debug::DebugFile; +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CompiledContractOutputs { + pub structs: HashMap>, + pub globals: HashMap>, +} + #[derive(Clone, Debug, Serialize, Deserialize)] pub struct CompiledContract { pub noir_version: String, @@ -19,10 +25,7 @@ pub struct CompiledContract { /// stored in this `Vector`. pub functions: Vec, - /// All the events defined inside the contract scope. - /// An event is a struct value that can be emitted via oracles - /// by any contract function during execution. - pub events: Vec, + pub outputs: CompiledContractOutputs, pub file_map: BTreeMap, pub warnings: Vec, @@ -51,4 +54,7 @@ pub struct ContractFunction { pub bytecode: Program, pub debug: DebugInfo, + + /// Names of the functions in the program. These are used for more informative debugging and benchmarking. + pub names: Vec, } diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index 86d4cd1510d..6fe44780484 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -3,14 +3,16 @@ #![warn(unreachable_pub)] #![warn(clippy::semicolon_if_nothing_returned)] +use abi_gen::value_from_hir_expression; use acvm::acir::circuit::ExpressionWidth; use clap::Args; use fm::{FileId, FileManager}; use iter_extended::vecmap; -use noirc_abi::{AbiParameter, AbiType, ContractEvent}; +use noirc_abi::{AbiParameter, AbiType, AbiValue}; use noirc_errors::{CustomDiagnostic, FileDiagnostic}; use noirc_evaluator::create_program; use noirc_evaluator::errors::RuntimeError; +use noirc_evaluator::ssa::SsaProgramArtifact; use noirc_frontend::debug::build_debug_crate_file; use noirc_frontend::graph::{CrateId, CrateName}; use noirc_frontend::hir::def_map::{Contract, CrateDefMap}; @@ -33,7 +35,7 @@ mod stdlib; use debug::filter_relevant_files; -pub use contract::{CompiledContract, ContractFunction}; +pub use contract::{CompiledContract, CompiledContractOutputs, ContractFunction}; pub use debug::DebugFile; pub use program::CompiledProgram; @@ -423,6 +425,7 @@ fn compile_contract_inner( bytecode: function.program, debug: function.debug, is_unconstrained: modifiers.is_unconstrained, + names: function.names, }); } @@ -430,18 +433,51 @@ fn compile_contract_inner( let debug_infos: Vec<_> = functions.iter().map(|function| function.debug.clone()).collect(); let file_map = filter_relevant_files(&debug_infos, &context.file_manager); + let out_structs = contract + .outputs + .structs + .into_iter() + .map(|(tag, structs)| { + let structs = structs + .into_iter() + .map(|struct_id| { + let typ = context.def_interner.get_struct(struct_id); + let typ = typ.borrow(); + let fields = vecmap(typ.get_fields(&[]), |(name, typ)| { + (name, AbiType::from_type(context, &typ)) + }); + let path = + context.fully_qualified_struct_path(context.root_crate_id(), typ.id); + AbiType::Struct { path, fields } + }) + .collect(); + (tag.to_string(), structs) + }) + .collect(); + + let out_globals = contract + .outputs + .globals + .iter() + .map(|(tag, globals)| { + let globals: Vec = globals + .iter() + .map(|global_id| { + let let_statement = + context.def_interner.get_global_let_statement(*global_id).unwrap(); + let hir_expression = + context.def_interner.expression(&let_statement.expression); + value_from_hir_expression(context, hir_expression) + }) + .collect(); + (tag.to_string(), globals) + }) + .collect(); + Ok(CompiledContract { name: contract.name, - events: contract - .events - .iter() - .map(|event_id| { - let typ = context.def_interner.get_struct(*event_id); - let typ = typ.borrow(); - ContractEvent::from_struct_type(context, &typ) - }) - .collect(), functions, + outputs: CompiledContractOutputs { structs: out_structs, globals: out_globals }, file_map, noir_version: NOIR_ARTIFACT_VERSION_STRING.to_string(), warnings, @@ -485,7 +521,14 @@ pub fn compile_no_check( } let visibility = program.return_visibility; - let (program, debug, warnings, input_witnesses, return_witnesses) = create_program( + let SsaProgramArtifact { + program, + debug, + warnings, + main_input_witnesses, + main_return_witnesses, + names, + } = create_program( program, options.show_ssa, options.show_brillig, @@ -493,8 +536,13 @@ pub fn compile_no_check( options.benchmark_codegen, )?; - let abi = - abi_gen::gen_abi(context, &main_function, input_witnesses, return_witnesses, visibility); + let abi = abi_gen::gen_abi( + context, + &main_function, + main_input_witnesses, + main_return_witnesses, + visibility, + ); let file_map = filter_relevant_files(&debug, &context.file_manager); Ok(CompiledProgram { @@ -510,5 +558,6 @@ pub fn compile_no_check( file_map, noir_version: NOIR_ARTIFACT_VERSION_STRING.to_string(), warnings, + names, }) } diff --git a/compiler/noirc_driver/src/program.rs b/compiler/noirc_driver/src/program.rs index 6f527297dcb..9ffd2d70dda 100644 --- a/compiler/noirc_driver/src/program.rs +++ b/compiler/noirc_driver/src/program.rs @@ -27,4 +27,6 @@ pub struct CompiledProgram { pub debug: DebugInfo, pub file_map: BTreeMap, pub warnings: Vec, + /// Names of the functions in the program. These are used for more informative debugging and benchmarking. + pub names: Vec, } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs index 92027026ce8..617e400b92f 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs @@ -70,7 +70,7 @@ impl FunctionContext { function_id.to_string() } - fn ssa_type_to_parameter(typ: &Type) -> BrilligParameter { + pub(crate) fn ssa_type_to_parameter(typ: &Type) -> BrilligParameter { match typ { Type::Numeric(_) | Type::Reference(_) => { BrilligParameter::SingleAddr(get_bit_size_from_ssa_type(typ)) @@ -81,26 +81,13 @@ impl FunctionContext { }), *size, ), - Type::Slice(item_type) => { - BrilligParameter::Slice(vecmap(item_type.iter(), |item_typ| { - FunctionContext::ssa_type_to_parameter(item_typ) - })) + Type::Slice(_) => { + panic!("ICE: Slice parameters cannot be derived from type information") } _ => unimplemented!("Unsupported function parameter/return type {typ:?}"), } } - /// Collects the parameters of a given function - pub(crate) fn parameters(func: &Function) -> Vec { - func.parameters() - .iter() - .map(|&value_id| { - let typ = func.dfg.type_of_value(value_id); - FunctionContext::ssa_type_to_parameter(&typ) - }) - .collect() - } - /// Collects the return values of a given function pub(crate) fn return_values(func: &Function) -> Vec { func.returns() diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs index d10dcf13d9f..8ce15ba4e73 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs @@ -3,7 +3,7 @@ use std::collections::{BTreeMap, HashMap}; use crate::ssa::ir::dfg::CallStack; -/// Represents a parameter or a return value of a function. +/// Represents a parameter or a return value of an entry point function. #[derive(Debug, Clone)] pub(crate) enum BrilligParameter { /// A single address parameter or return value. Holds the bit size of the parameter. @@ -11,7 +11,8 @@ pub(crate) enum BrilligParameter { /// An array parameter or return value. Holds the type of an array item and its size. Array(Vec, usize), /// A slice parameter or return value. Holds the type of a slice item. - Slice(Vec), + /// Only known-length slices can be passed to brillig entry points, so the size is available as well. + Slice(Vec, usize), } /// The result of compiling and linking brillig artifacts. @@ -108,7 +109,7 @@ impl BrilligArtifact { self.byte_code.append(&mut byte_code); // Remove all resolved external calls and transform them to jumps - let is_resolved = |label: &Label| self.labels.get(label).is_some(); + let is_resolved = |label: &Label| self.labels.contains_key(label); let resolved_external_calls = self .unresolved_external_call_labels diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs index 1d823ded718..db872487fcc 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs @@ -1,6 +1,6 @@ use super::{ artifact::{BrilligArtifact, BrilligParameter}, - brillig_variable::{BrilligArray, BrilligVariable, SingleAddrVariable}, + brillig_variable::{BrilligArray, BrilligVariable, BrilligVector, SingleAddrVariable}, debug_show::DebugShow, registers::BrilligRegistersContext, BrilligBinaryOp, BrilligContext, ReservedRegisters, @@ -83,24 +83,56 @@ impl BrilligContext { current_calldata_pointer += flattened_size; var } - BrilligParameter::Slice(_) => unimplemented!("Unsupported slices as parameter"), + BrilligParameter::Slice(_, _) => { + let pointer_to_the_array_in_calldata = + self.make_usize_constant_instruction(current_calldata_pointer.into()); + + let flattened_size = BrilligContext::flattened_size(argument); + let size_register = self.make_usize_constant_instruction(flattened_size.into()); + let rc_register = self.make_usize_constant_instruction(1_usize.into()); + + let var = BrilligVariable::BrilligVector(BrilligVector { + pointer: pointer_to_the_array_in_calldata.address, + size: size_register.address, + rc: rc_register.address, + }); + + current_calldata_pointer += flattened_size; + var + } }) .collect(); // Deflatten arrays for (argument_variable, argument) in argument_variables.iter_mut().zip(arguments) { - if let ( - BrilligVariable::BrilligArray(array), - BrilligParameter::Array(item_type, item_count), - ) = (argument_variable, argument) - { - if BrilligContext::has_nested_arrays(item_type) { + match (argument_variable, argument) { + ( + BrilligVariable::BrilligArray(array), + BrilligParameter::Array(item_type, item_count), + ) => { let deflattened_address = self.deflatten_array(item_type, array.size, array.pointer); self.mov_instruction(array.pointer, deflattened_address); array.size = item_type.len() * item_count; self.deallocate_register(deflattened_address); } + ( + BrilligVariable::BrilligVector(vector), + BrilligParameter::Slice(item_type, item_count), + ) => { + let flattened_size = BrilligContext::flattened_size(argument); + + let deflattened_address = + self.deflatten_array(item_type, flattened_size, vector.pointer); + self.mov_instruction(vector.pointer, deflattened_address); + self.usize_const_instruction( + vector.size, + (item_type.len() * item_count).into(), + ); + + self.deallocate_register(deflattened_address); + } + _ => {} } } } @@ -112,10 +144,10 @@ impl BrilligContext { fn flat_bit_sizes(param: &BrilligParameter) -> Box + '_> { match param { BrilligParameter::SingleAddr(bit_size) => Box::new(std::iter::once(*bit_size)), - BrilligParameter::Array(item_types, item_count) => Box::new( + BrilligParameter::Array(item_types, item_count) + | BrilligParameter::Slice(item_types, item_count) => Box::new( (0..*item_count).flat_map(move |_| item_types.iter().flat_map(flat_bit_sizes)), ), - BrilligParameter::Slice(..) => unimplemented!("Unsupported slices as parameter"), } } @@ -134,13 +166,11 @@ impl BrilligContext { fn flattened_size(param: &BrilligParameter) -> usize { match param { BrilligParameter::SingleAddr(_) => 1, - BrilligParameter::Array(item_types, item_count) => { + BrilligParameter::Array(item_types, item_count) + | BrilligParameter::Slice(item_types, item_count) => { let item_size: usize = item_types.iter().map(BrilligContext::flattened_size).sum(); item_count * item_size } - BrilligParameter::Slice(_) => { - unreachable!("ICE: Slices cannot be passed as entry point arguments") - } } } @@ -457,8 +487,8 @@ mod tests { use acvm::FieldElement; use crate::brillig::brillig_ir::{ - artifact::BrilligParameter, brillig_variable::BrilligArray, + entry_point::BrilligParameter, tests::{create_and_run_vm, create_context, create_entry_point_bytecode}, }; diff --git a/compiler/noirc_evaluator/src/ssa.rs b/compiler/noirc_evaluator/src/ssa.rs index 59f2bdc6f84..fac7a7c0829 100644 --- a/compiler/noirc_evaluator/src/ssa.rs +++ b/compiler/noirc_evaluator/src/ssa.rs @@ -53,6 +53,7 @@ pub(crate) fn optimize_into_acir( .run_pass(Ssa::inline_functions, "After Inlining:") // Run mem2reg with the CFG separated into blocks .run_pass(Ssa::mem2reg, "After Mem2Reg:") + .run_pass(Ssa::as_slice_optimization, "After `as_slice` optimization") .try_run_pass(Ssa::evaluate_assert_constant, "After Assert Constant:")? .try_run_pass(Ssa::unroll_loops, "After Unrolling:")? .run_pass(Ssa::simplify_cfg, "After Simplifying:") @@ -61,19 +62,17 @@ pub(crate) fn optimize_into_acir( // Run mem2reg once more with the flattened CFG to catch any remaining loads/stores .run_pass(Ssa::mem2reg, "After Mem2Reg:") .run_pass(Ssa::fold_constants, "After Constant Folding:") + .run_pass(Ssa::remove_enable_side_effects, "After EnableSideEffects removal:") .run_pass(Ssa::fold_constants_using_constraints, "After Constraint Folding:") .run_pass(Ssa::dead_instruction_elimination, "After Dead Instruction Elimination:") + .run_pass(Ssa::array_set_optimization, "After Array Set Optimizations:") .finish(); let brillig = time("SSA to Brillig", print_timings, || ssa.to_brillig(print_brillig_trace)); drop(ssa_gen_span_guard); - let last_array_uses = ssa.find_last_array_uses(); - - time("SSA to ACIR", print_timings, || { - ssa.into_acir(&brillig, abi_distinctness, &last_array_uses) - }) + time("SSA to ACIR", print_timings, || ssa.into_acir(&brillig, abi_distinctness)) } // Helper to time SSA passes @@ -89,6 +88,29 @@ fn time(name: &str, print_timings: bool, f: impl FnOnce() -> T) -> T { result } +#[derive(Default)] +pub struct SsaProgramArtifact { + pub program: AcirProgram, + pub debug: Vec, + pub warnings: Vec, + pub main_input_witnesses: Vec, + pub main_return_witnesses: Vec, + pub names: Vec, +} + +impl SsaProgramArtifact { + fn add_circuit(&mut self, mut circuit_artifact: SsaCircuitArtifact, is_main: bool) { + self.program.functions.push(circuit_artifact.circuit); + self.debug.push(circuit_artifact.debug_info); + self.warnings.append(&mut circuit_artifact.warnings); + if is_main { + self.main_input_witnesses = circuit_artifact.input_witnesses; + self.main_return_witnesses = circuit_artifact.return_witnesses; + } + self.names.push(circuit_artifact.name); + } +} + /// Compiles the [`Program`] into [`ACIR``][acvm::acir::circuit::Program]. /// /// The output ACIR is is backend-agnostic and so must go through a transformation pass before usage in proof generation. @@ -100,8 +122,7 @@ pub fn create_program( enable_brillig_logging: bool, force_brillig_output: bool, print_codegen_timings: bool, -) -> Result<(AcirProgram, Vec, Vec, Vec, Vec), RuntimeError> -{ +) -> Result { let debug_variables = program.debug_variables.clone(); let debug_types = program.debug_types.clone(); let debug_functions = program.debug_functions.clone(); @@ -122,37 +143,33 @@ pub fn create_program( "The generated ACIRs should match the supplied function signatures" ); - let mut functions = vec![]; - let mut debug_infos = vec![]; - let mut warning_infos = vec![]; - let mut main_input_witnesses = Vec::new(); - let mut main_return_witnesses = Vec::new(); + let mut program_artifact = SsaProgramArtifact::default(); // For setting up the ABI we need separately specify main's input and return witnesses let mut is_main = true; for (acir, func_sig) in generated_acirs.into_iter().zip(func_sigs) { - let (circuit, debug_info, warnings, input_witnesses, return_witnesses) = - convert_generated_acir_into_circuit( - acir, - func_sig, - recursive, - // TODO: get rid of these clones - debug_variables.clone(), - debug_functions.clone(), - debug_types.clone(), - ); - functions.push(circuit); - debug_infos.push(debug_info); - warning_infos.extend(warnings); - if is_main { - main_input_witnesses = input_witnesses; - main_return_witnesses = return_witnesses; - } + let circuit_artifact = convert_generated_acir_into_circuit( + acir, + func_sig, + recursive, + // TODO: get rid of these clones + debug_variables.clone(), + debug_functions.clone(), + debug_types.clone(), + ); + program_artifact.add_circuit(circuit_artifact, is_main); is_main = false; } - let program = AcirProgram { functions }; + Ok(program_artifact) +} - Ok((program, debug_infos, warning_infos, main_input_witnesses, main_return_witnesses)) +pub struct SsaCircuitArtifact { + name: String, + circuit: Circuit, + debug_info: DebugInfo, + warnings: Vec, + input_witnesses: Vec, + return_witnesses: Vec, } fn convert_generated_acir_into_circuit( @@ -162,7 +179,7 @@ fn convert_generated_acir_into_circuit( debug_variables: DebugVariables, debug_functions: DebugFunctions, debug_types: DebugTypes, -) -> (Circuit, DebugInfo, Vec, Vec, Vec) { +) -> SsaCircuitArtifact { let opcodes = generated_acir.take_opcodes(); let current_witness_index = generated_acir.current_witness_index().0; let GeneratedAcir { @@ -171,11 +188,10 @@ fn convert_generated_acir_into_circuit( input_witnesses, assert_messages, warnings, + name, .. } = generated_acir; - let locations = locations.clone(); - let (public_parameter_witnesses, private_parameters) = split_public_and_private_inputs(&func_sig, &input_witnesses); @@ -189,7 +205,7 @@ fn convert_generated_acir_into_circuit( private_parameters, public_parameters, return_values, - assert_messages: assert_messages.clone().into_iter().collect(), + assert_messages: assert_messages.into_iter().collect(), recursive, }; @@ -205,7 +221,14 @@ fn convert_generated_acir_into_circuit( let (optimized_circuit, transformation_map) = acvm::compiler::optimize(circuit); debug_info.update_acir(transformation_map); - (optimized_circuit, debug_info, warnings, input_witnesses, return_witnesses) + SsaCircuitArtifact { + name, + circuit: optimized_circuit, + debug_info, + warnings, + input_witnesses, + return_witnesses, + } } // Takes each function argument and partitions the circuit's inputs witnesses according to its visibility. diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index bcd62e3b062..53d9e2530cc 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -1763,6 +1763,7 @@ impl AcirContext { id: u32, inputs: Vec, output_count: usize, + predicate: AcirVar, ) -> Result, RuntimeError> { let inputs = self.prepare_inputs_for_black_box_func_call(inputs)?; let inputs = inputs @@ -1778,7 +1779,8 @@ impl AcirContext { let results = vecmap(&outputs, |witness_index| self.add_data(AcirVarData::Witness(*witness_index))); - self.acir_ir.push_opcode(Opcode::Call { id, inputs, outputs }); + let predicate = Some(self.var_to_expression(predicate)?); + self.acir_ir.push_opcode(Opcode::Call { id, inputs, outputs, predicate }); Ok(results) } } diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs index 1d05e998b13..b43110b2f5b 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs @@ -25,7 +25,7 @@ use iter_extended::vecmap; use num_bigint::BigUint; #[derive(Debug, Default)] -/// The output of the Acir-gen pass +/// The output of the Acir-gen pass, which should only be produced for entry point Acir functions pub(crate) struct GeneratedAcir { /// The next witness index that may be declared. /// If witness index is `None` then we have not yet created a witness @@ -58,6 +58,10 @@ pub(crate) struct GeneratedAcir { pub(crate) assert_messages: BTreeMap, pub(crate) warnings: Vec, + + /// Name for the corresponding entry point represented by this Acir-gen output. + /// Only used for debugging and benchmarking purposes + pub(crate) name: String, } impl GeneratedAcir { diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 96f959612af..9f2cec5c949 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -21,7 +21,7 @@ use super::{ }, ssa_gen::Ssa, }; -use crate::brillig::brillig_ir::artifact::GeneratedBrillig; +use crate::brillig::brillig_ir::artifact::{BrilligParameter, GeneratedBrillig}; use crate::brillig::brillig_ir::BrilligContext; use crate::brillig::{brillig_gen::brillig_fn::FunctionContext as BrilligFunctionContext, Brillig}; use crate::errors::{InternalError, InternalWarning, RuntimeError, SsaReport}; @@ -181,15 +181,15 @@ impl Ssa { self, brillig: &Brillig, abi_distinctness: Distinctness, - last_array_uses: &HashMap, ) -> Result, RuntimeError> { let mut acirs = Vec::new(); // TODO: can we parallelise this? for function in self.functions.values() { let context = Context::new(); - if let Some(generated_acir) = - context.convert_ssa_function(&self, function, brillig, last_array_uses)? + if let Some(mut generated_acir) = + context.convert_ssa_function(&self, function, brillig)? { + generated_acir.name = function.name().to_owned(); acirs.push(generated_acir); } } @@ -245,7 +245,6 @@ impl Context { ssa: &Ssa, function: &Function, brillig: &Brillig, - last_array_uses: &HashMap, ) -> Result, RuntimeError> { match function.runtime() { RuntimeType::Acir(inline_type) => { @@ -257,8 +256,8 @@ impl Context { } } } - // We only want to convert entry point functions. This being `main` and those marked with `#[fold]` - Ok(Some(self.convert_acir_main(function, ssa, brillig, last_array_uses)?)) + // We only want to convert entry point functions. This being `main` and those marked with `InlineType::Fold` + Ok(Some(self.convert_acir_main(function, ssa, brillig)?)) } RuntimeType::Brillig => { if function.id() == ssa.main_id { @@ -275,7 +274,6 @@ impl Context { main_func: &Function, ssa: &Ssa, brillig: &Brillig, - last_array_uses: &HashMap, ) -> Result { let dfg = &main_func.dfg; let entry_block = &dfg[main_func.entry_block()]; @@ -284,13 +282,7 @@ impl Context { self.data_bus = dfg.data_bus.to_owned(); let mut warnings = Vec::new(); for instruction_id in entry_block.instructions() { - warnings.extend(self.convert_ssa_instruction( - *instruction_id, - dfg, - ssa, - brillig, - last_array_uses, - )?); + warnings.extend(self.convert_ssa_instruction(*instruction_id, dfg, ssa, brillig)?); } warnings.extend(self.convert_ssa_return(entry_block.unwrap_terminator(), dfg)?); @@ -308,12 +300,14 @@ impl Context { let typ = dfg.type_of_value(*param_id); self.create_value_from_type(&typ, &mut |this, _| Ok(this.acir_context.add_variable())) })?; + let arguments = self.gen_brillig_parameters(dfg[main_func.entry_block()].parameters(), dfg); + let witness_inputs = self.acir_context.extract_witness(&inputs); let outputs: Vec = vecmap(main_func.returns(), |result_id| dfg.type_of_value(*result_id).into()); - let code = self.gen_brillig_for(main_func, brillig)?; + let code = self.gen_brillig_for(main_func, arguments, brillig)?; // We specifically do not attempt execution of the brillig code being generated as this can result in it being // replaced with constraints on witnesses to the program outputs. @@ -463,7 +457,6 @@ impl Context { dfg: &DataFlowGraph, ssa: &Ssa, brillig: &Brillig, - last_array_uses: &HashMap, ) -> Result, RuntimeError> { let instruction = &dfg[instruction_id]; self.acir_context.set_call_stack(dfg.get_call_stack(instruction_id)); @@ -523,7 +516,7 @@ impl Context { self.current_side_effects_enabled_var = acir_var; } Instruction::ArrayGet { .. } | Instruction::ArraySet { .. } => { - self.handle_array_operation(instruction_id, dfg, last_array_uses)?; + self.handle_array_operation(instruction_id, dfg)?; } Instruction::Allocate => { unreachable!("Expected all allocate instructions to be removed before acir_gen") @@ -586,6 +579,7 @@ impl Context { *acir_program_id, inputs, output_count, + self.current_side_effects_enabled_var, )?; let output_values = self.convert_vars_to_values(output_vars, dfg, result_ids); @@ -606,8 +600,9 @@ impl Context { } let inputs = vecmap(arguments, |arg| self.convert_value(*arg, dfg)); + let arguments = self.gen_brillig_parameters(arguments, dfg); - let code = self.gen_brillig_for(func, brillig)?; + let code = self.gen_brillig_for(func, arguments, brillig)?; let outputs: Vec = vecmap(result_ids, |result_id| { dfg.type_of_value(*result_id).into() @@ -685,14 +680,49 @@ impl Context { Ok(()) } + fn gen_brillig_parameters( + &self, + values: &[ValueId], + dfg: &DataFlowGraph, + ) -> Vec { + values + .iter() + .map(|&value_id| { + let typ = dfg.type_of_value(value_id); + if let Type::Slice(item_types) = typ { + let len = match self + .ssa_values + .get(&value_id) + .expect("ICE: Unknown slice input to brillig") + { + AcirValue::DynamicArray(AcirDynamicArray { len, .. }) => *len, + AcirValue::Array(array) => array.len(), + _ => unreachable!("ICE: Slice value is not an array"), + }; + + BrilligParameter::Slice( + item_types + .iter() + .map(BrilligFunctionContext::ssa_type_to_parameter) + .collect(), + len / item_types.len(), + ) + } else { + BrilligFunctionContext::ssa_type_to_parameter(&typ) + } + }) + .collect() + } + fn gen_brillig_for( &self, func: &Function, + arguments: Vec, brillig: &Brillig, ) -> Result { // Create the entry point artifact let mut entry_point = BrilligContext::new_entry_point_artifact( - BrilligFunctionContext::parameters(func), + arguments, BrilligFunctionContext::return_values(func), BrilligFunctionContext::function_id_to_function_label(func.id()), ); @@ -721,12 +751,16 @@ impl Context { &mut self, instruction: InstructionId, dfg: &DataFlowGraph, - last_array_uses: &HashMap, ) -> Result<(), RuntimeError> { + let mut mutable_array_set = false; + // Pass the instruction between array methods rather than the internal fields themselves let (array, index, store_value) = match dfg[instruction] { Instruction::ArrayGet { array, index } => (array, index, None), - Instruction::ArraySet { array, index, value, .. } => (array, index, Some(value)), + Instruction::ArraySet { array, index, value, mutable } => { + mutable_array_set = mutable; + (array, index, Some(value)) + } _ => { return Err(InternalError::Unexpected { expected: "Instruction should be an ArrayGet or ArraySet".to_owned(), @@ -744,11 +778,8 @@ impl Context { let (new_index, new_value) = self.convert_array_operation_inputs(array, dfg, index, store_value)?; - let resolved_array = dfg.resolve(array); - let map_array = last_array_uses.get(&resolved_array) == Some(&instruction); - if let Some(new_value) = new_value { - self.array_set(instruction, new_index, new_value, dfg, map_array)?; + self.array_set(instruction, new_index, new_value, dfg, mutable_array_set)?; } else { self.array_get(instruction, array, new_index, dfg)?; } @@ -1028,16 +1059,18 @@ impl Context { } } - /// Copy the array and generates a write opcode on the new array - /// - /// Note: Copying the array is inefficient and is not the way we want to do it in the end. + /// If `mutate_array` is: + /// - true: Mutate the array directly + /// - false: Copy the array and generates a write opcode on the new array. This is + /// generally very inefficient and should be avoided if possible. Currently + /// this is controlled by SSA's array set optimization pass. fn array_set( &mut self, instruction: InstructionId, mut var_index: AcirVar, store_value: AcirValue, dfg: &DataFlowGraph, - map_array: bool, + mutate_array: bool, ) -> Result<(), RuntimeError> { // Pass the instruction between array methods rather than the internal fields themselves let array = match dfg[instruction] { @@ -1075,7 +1108,7 @@ impl Context { .first() .expect("Array set does not have one result"); let result_block_id; - if map_array { + if mutate_array { self.memory_blocks.insert(*result_id, block_id); result_block_id = block_id; } else { @@ -2401,14 +2434,13 @@ mod test { ssa::{ function_builder::FunctionBuilder, ir::{ - function::{FunctionId, InlineType, RuntimeType}, + function::{FunctionId, InlineType}, instruction::BinaryOp, map::Id, types::Type, }, }, }; - use fxhash::FxHashMap as HashMap; fn build_basic_foo_with_return(builder: &mut FunctionBuilder, foo_id: FunctionId) { // acir(fold) fn foo f1 { @@ -2461,11 +2493,7 @@ mod test { let ssa = builder.finish(); let acir_functions = ssa - .into_acir( - &Brillig::default(), - noirc_frontend::Distinctness::Distinct, - &HashMap::default(), - ) + .into_acir(&Brillig::default(), noirc_frontend::Distinctness::Distinct) .expect("Should compile manually written SSA into ACIR"); // Expected result: // main f0 @@ -2516,16 +2544,13 @@ mod test { check_call_opcode(&main_opcodes[0], 1, vec![Witness(0), Witness(1)], vec![Witness(2)]); check_call_opcode(&main_opcodes[1], 1, vec![Witness(0), Witness(1)], vec![Witness(3)]); - match &main_opcodes[2] { - Opcode::AssertZero(expr) => { - assert_eq!(expr.linear_combinations[0].0, FieldElement::from(1u128)); - assert_eq!(expr.linear_combinations[0].1, Witness(2)); + if let Opcode::AssertZero(expr) = &main_opcodes[2] { + assert_eq!(expr.linear_combinations[0].0, FieldElement::from(1u128)); + assert_eq!(expr.linear_combinations[0].1, Witness(2)); - assert_eq!(expr.linear_combinations[1].0, FieldElement::from(-1i128)); - assert_eq!(expr.linear_combinations[1].1, Witness(3)); - assert_eq!(expr.q_c, FieldElement::from(0u128)); - } - _ => {} + assert_eq!(expr.linear_combinations[1].0, FieldElement::from(-1i128)); + assert_eq!(expr.linear_combinations[1].1, Witness(3)); + assert_eq!(expr.q_c, FieldElement::from(0u128)); } } @@ -2564,11 +2589,7 @@ mod test { let ssa = builder.finish(); let acir_functions = ssa - .into_acir( - &Brillig::default(), - noirc_frontend::Distinctness::Distinct, - &HashMap::default(), - ) + .into_acir(&Brillig::default(), noirc_frontend::Distinctness::Distinct) .expect("Should compile manually written SSA into ACIR"); // The expected result should look very similar to the abvoe test expect that the input witnesses of the `Call` // opcodes will be different. The changes can discerned from the checks below. @@ -2659,11 +2680,7 @@ mod test { let ssa = builder.finish(); let acir_functions = ssa - .into_acir( - &Brillig::default(), - noirc_frontend::Distinctness::Distinct, - &HashMap::default(), - ) + .into_acir(&Brillig::default(), noirc_frontend::Distinctness::Distinct) .expect("Should compile manually written SSA into ACIR"); assert_eq!(acir_functions.len(), 3, "Should have three ACIR functions"); @@ -2700,7 +2717,7 @@ mod test { expected_outputs: Vec, ) { match opcode { - Opcode::Call { id, inputs, outputs } => { + Opcode::Call { id, inputs, outputs, .. } => { assert_eq!( *id, expected_id, "Main was expected to call {expected_id} but got {}", diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index e0e60b737ad..d3e5e506111 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -310,7 +310,8 @@ impl FunctionBuilder { index: ValueId, value: ValueId, ) -> ValueId { - self.insert_instruction(Instruction::ArraySet { array, index, value }, None).first() + self.insert_instruction(Instruction::ArraySet { array, index, value, mutable: false }, None) + .first() } /// Insert an instruction to increment an array's reference count. This only has an effect @@ -500,7 +501,6 @@ mod tests { use acvm::FieldElement; use crate::ssa::ir::{ - function::{InlineType, RuntimeType}, instruction::{Endian, Intrinsic}, map::Id, types::Type, diff --git a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index 870b5e602f1..6b950c327cf 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -378,6 +378,30 @@ impl DataFlowGraph { value_id } + /// Replaces an instruction result with a fresh id. + pub(crate) fn replace_result( + &mut self, + instruction_id: InstructionId, + prev_value_id: ValueId, + ) -> ValueId { + let typ = self.type_of_value(prev_value_id); + let results = self.results.get_mut(&instruction_id).unwrap(); + let res_position = results + .iter() + .position(|&id| id == prev_value_id) + .expect("Result id not found while replacing"); + + let value_id = self.values.insert(Value::Instruction { + typ, + position: res_position, + instruction: instruction_id, + }); + + // Replace the value in list of results for this instruction + results[res_position] = value_id; + value_id + } + /// Returns the number of instructions /// inserted into functions. pub(crate) fn num_instructions(&self) -> usize { diff --git a/compiler/noirc_evaluator/src/ssa/ir/dom.rs b/compiler/noirc_evaluator/src/ssa/ir/dom.rs index bd1481a7474..15fa3bad38d 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dom.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dom.rs @@ -249,13 +249,8 @@ mod tests { use crate::ssa::{ function_builder::FunctionBuilder, ir::{ - basic_block::BasicBlockId, - dfg::CallStack, - dom::DominatorTree, - function::{Function, InlineType, RuntimeType}, - instruction::TerminatorInstruction, - map::Id, - types::Type, + basic_block::BasicBlockId, dfg::CallStack, dom::DominatorTree, function::Function, + instruction::TerminatorInstruction, map::Id, types::Type, }, }; diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index dd190c112f3..2b23cc1c1e8 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -189,8 +189,9 @@ pub(crate) enum Instruction { ArrayGet { array: ValueId, index: ValueId }, /// Creates a new array with the new value at the given index. All other elements are identical - /// to those in the given array. This will not modify the original array. - ArraySet { array: ValueId, index: ValueId, value: ValueId }, + /// to those in the given array. This will not modify the original array unless `mutable` is + /// set. This flag is off by default and only enabled when optimizations determine it is safe. + ArraySet { array: ValueId, index: ValueId, value: ValueId, mutable: bool }, /// An instruction to increment the reference count of a value. /// @@ -363,9 +364,12 @@ impl Instruction { Instruction::ArrayGet { array, index } => { Instruction::ArrayGet { array: f(*array), index: f(*index) } } - Instruction::ArraySet { array, index, value } => { - Instruction::ArraySet { array: f(*array), index: f(*index), value: f(*value) } - } + Instruction::ArraySet { array, index, value, mutable } => Instruction::ArraySet { + array: f(*array), + index: f(*index), + value: f(*value), + mutable: *mutable, + }, Instruction::IncrementRc { value } => Instruction::IncrementRc { value: f(*value) }, Instruction::DecrementRc { value } => Instruction::DecrementRc { value: f(*value) }, Instruction::RangeCheck { value, max_bit_size, assert_message } => { @@ -416,7 +420,7 @@ impl Instruction { f(*array); f(*index); } - Instruction::ArraySet { array, index, value } => { + Instruction::ArraySet { array, index, value, mutable: _ } => { f(*array); f(*index); f(*value); @@ -573,12 +577,12 @@ impl Instruction { Instruction::IncrementRc { .. } => None, Instruction::DecrementRc { .. } => None, Instruction::RangeCheck { value, max_bit_size, .. } => { - if let Some(numeric_constant) = dfg.get_numeric_constant(*value) { - if numeric_constant.num_bits() < *max_bit_size { - return Remove; - } + let max_potential_bits = dfg.get_value_max_num_bits(*value); + if max_potential_bits < *max_bit_size { + Remove + } else { + None } - None } } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs index 36f3ae8620b..9099268ace9 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs @@ -130,6 +130,12 @@ impl Binary { let zero = dfg.make_constant(FieldElement::zero(), operand_type); return SimplifyResult::SimplifiedTo(zero); } + if dfg.resolve(self.lhs) == dfg.resolve(self.rhs) + && dfg.get_value_max_num_bits(self.lhs) == 1 + { + // Squaring a boolean value is a noop. + return SimplifyResult::SimplifiedTo(self.lhs); + } } BinaryOp::Div => { if rhs_is_one { @@ -164,6 +170,7 @@ impl Binary { let one = dfg.make_constant(FieldElement::one(), Type::bool()); return SimplifyResult::SimplifiedTo(one); } + if operand_type == Type::bool() { // Simplify forms of `(boolean == true)` into `boolean` if lhs_is_one { diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 5b268de239d..1187ea8cb07 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -240,11 +240,16 @@ pub(super) fn simplify_call( let max_bit_size = dfg.get_numeric_constant(arguments[1]); if let Some(max_bit_size) = max_bit_size { let max_bit_size = max_bit_size.to_u128() as u32; - SimplifyResult::SimplifiedToInstruction(Instruction::RangeCheck { - value, - max_bit_size, - assert_message: Some("call to assert_max_bit_size".to_owned()), - }) + let max_potential_bits = dfg.get_value_max_num_bits(value); + if max_potential_bits < max_bit_size { + SimplifyResult::Remove + } else { + SimplifyResult::SimplifiedToInstruction(Instruction::RangeCheck { + value, + max_bit_size, + assert_message: Some("call to assert_max_bit_size".to_owned()), + }) + } } else { SimplifyResult::None } @@ -334,8 +339,13 @@ fn simplify_slice_push_back( let element_size = element_type.element_size(); let new_slice = dfg.make_array(slice, element_type); - let set_last_slice_value_instr = - Instruction::ArraySet { array: new_slice, index: arguments[0], value: arguments[2] }; + let set_last_slice_value_instr = Instruction::ArraySet { + array: new_slice, + index: arguments[0], + value: arguments[2], + mutable: false, + }; + let set_last_slice_value = dfg .insert_instruction_and_results(set_last_slice_value_instr, block, None, call_stack) .first(); diff --git a/compiler/noirc_evaluator/src/ssa/ir/post_order.rs b/compiler/noirc_evaluator/src/ssa/ir/post_order.rs index 5d743e953a5..d95ec451779 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/post_order.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/post_order.rs @@ -73,12 +73,7 @@ impl PostOrder { mod tests { use crate::ssa::{ function_builder::FunctionBuilder, - ir::{ - function::{Function, InlineType, RuntimeType}, - map::Id, - post_order::PostOrder, - types::Type, - }, + ir::{function::Function, map::Id, post_order::PostOrder, types::Type}, }; #[test] diff --git a/compiler/noirc_evaluator/src/ssa/ir/printer.rs b/compiler/noirc_evaluator/src/ssa/ir/printer.rs index 6ef618fba6f..fc13ab7307a 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/printer.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/printer.rs @@ -176,14 +176,12 @@ fn display_instruction_inner( Instruction::ArrayGet { array, index } => { writeln!(f, "array_get {}, index {}", show(*array), show(*index)) } - Instruction::ArraySet { array, index, value } => { - writeln!( - f, - "array_set {}, index {}, value {}", - show(*array), - show(*index), - show(*value) - ) + Instruction::ArraySet { array, index, value, mutable } => { + let array = show(*array); + let index = show(*index); + let value = show(*value); + let mutable = if *mutable { " mut" } else { "" }; + writeln!(f, "array_set{mutable} {array}, index {index}, value {value}",) } Instruction::IncrementRc { value } => { writeln!(f, "inc_rc {}", show(*value)) diff --git a/compiler/noirc_evaluator/src/ssa/opt/array_set.rs b/compiler/noirc_evaluator/src/ssa/opt/array_set.rs new file mode 100644 index 00000000000..cf61d7fd73f --- /dev/null +++ b/compiler/noirc_evaluator/src/ssa/opt/array_set.rs @@ -0,0 +1,104 @@ +use crate::ssa::{ + ir::{ + basic_block::BasicBlockId, + dfg::DataFlowGraph, + instruction::{Instruction, InstructionId}, + types::Type::{Array, Slice}, + }, + ssa_gen::Ssa, +}; +use fxhash::{FxHashMap as HashMap, FxHashSet}; + +impl Ssa { + /// Map arrays with the last instruction that uses it + /// For this we simply process all the instructions in execution order + /// and update the map whenever there is a match + #[tracing::instrument(level = "trace", skip(self))] + pub(crate) fn array_set_optimization(mut self) -> Self { + for func in self.functions.values_mut() { + if !func.runtime().is_entry_point() { + let mut reachable_blocks = func.reachable_blocks(); + assert_eq!(reachable_blocks.len(), 1, "Expected there to be 1 block remaining in Acir function for array_set optimization"); + + let block = reachable_blocks.pop_first().unwrap(); + let instructions_to_update = analyze_last_uses(&func.dfg, block); + make_mutable(&mut func.dfg, block, instructions_to_update); + } + } + self + } +} + +/// Returns the set of ArraySet instructions that can be made mutable +/// because their input value is unused elsewhere afterward. +fn analyze_last_uses(dfg: &DataFlowGraph, block_id: BasicBlockId) -> FxHashSet { + let block = &dfg[block_id]; + let mut array_to_last_use = HashMap::default(); + let mut instructions_that_can_be_made_mutable = FxHashSet::default(); + + for instruction_id in block.instructions() { + match &dfg[*instruction_id] { + Instruction::ArrayGet { array, .. } => { + let array = dfg.resolve(*array); + + if let Some(existing) = array_to_last_use.insert(array, *instruction_id) { + instructions_that_can_be_made_mutable.remove(&existing); + } + } + Instruction::ArraySet { array, .. } => { + let array = dfg.resolve(*array); + + if let Some(existing) = array_to_last_use.insert(array, *instruction_id) { + instructions_that_can_be_made_mutable.remove(&existing); + } + instructions_that_can_be_made_mutable.insert(*instruction_id); + } + Instruction::Call { arguments, .. } => { + for argument in arguments { + if matches!(dfg.type_of_value(*argument), Array { .. } | Slice { .. }) { + let argument = dfg.resolve(*argument); + + if let Some(existing) = array_to_last_use.insert(argument, *instruction_id) + { + instructions_that_can_be_made_mutable.remove(&existing); + } + } + } + } + _ => (), + } + } + + instructions_that_can_be_made_mutable +} + +/// Make each ArraySet instruction in `instructions_to_update` mutable. +fn make_mutable( + dfg: &mut DataFlowGraph, + block_id: BasicBlockId, + instructions_to_update: FxHashSet, +) { + if instructions_to_update.is_empty() { + return; + } + + // Take the instructions temporarily so we can mutate the DFG while we iterate through them + let block = &mut dfg[block_id]; + let instructions = block.take_instructions(); + + for instruction in &instructions { + if instructions_to_update.contains(instruction) { + let instruction = &mut dfg[*instruction]; + + if let Instruction::ArraySet { mutable, .. } = instruction { + *mutable = true; + } else { + unreachable!( + "Non-ArraySet instruction in instructions_to_update!\n{instruction:?}" + ); + } + } + } + + *dfg[block_id].instructions_mut() = instructions; +} diff --git a/compiler/noirc_evaluator/src/ssa/opt/array_use.rs b/compiler/noirc_evaluator/src/ssa/opt/array_use.rs deleted file mode 100644 index 0bb8b0112b6..00000000000 --- a/compiler/noirc_evaluator/src/ssa/opt/array_use.rs +++ /dev/null @@ -1,57 +0,0 @@ -use crate::ssa::{ - ir::{ - basic_block::BasicBlockId, - dfg::DataFlowGraph, - instruction::{Instruction, InstructionId}, - post_order::PostOrder, - value::{Value, ValueId}, - }, - ssa_gen::Ssa, -}; -use fxhash::FxHashMap as HashMap; - -impl Ssa { - /// Map arrays with the last instruction that uses it - /// For this we simply process all the instructions in execution order - /// and update the map whenever there is a match - #[tracing::instrument(level = "trace", skip(self))] - pub(crate) fn find_last_array_uses(&self) -> HashMap { - let mut array_use = HashMap::default(); - for func in self.functions.values() { - let mut reverse_post_order = PostOrder::with_function(func).into_vec(); - reverse_post_order.reverse(); - for block in reverse_post_order { - last_use(block, &func.dfg, &mut array_use); - } - } - array_use - } -} - -/// Updates the array_def map when an instructions is using an array -fn last_use( - block_id: BasicBlockId, - dfg: &DataFlowGraph, - array_def: &mut HashMap, -) { - let block = &dfg[block_id]; - for instruction_id in block.instructions() { - match &dfg[*instruction_id] { - Instruction::ArrayGet { array, .. } | Instruction::ArraySet { array, .. } => { - let array = dfg.resolve(*array); - array_def.insert(array, *instruction_id); - } - Instruction::Call { arguments, .. } => { - for argument in arguments { - let resolved_arg = dfg.resolve(*argument); - if matches!(dfg[resolved_arg], Value::Array { .. }) { - array_def.insert(resolved_arg, *instruction_id); - } - } - } - _ => { - // Nothing to do - } - } - } -} diff --git a/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs b/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs new file mode 100644 index 00000000000..69eab1da0ed --- /dev/null +++ b/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs @@ -0,0 +1,71 @@ +use crate::ssa::{ + ir::{ + function::Function, + instruction::{Instruction, InstructionId, Intrinsic}, + types::Type, + value::Value, + }, + ssa_gen::Ssa, +}; +use fxhash::FxHashMap as HashMap; + +impl Ssa { + /// A simple SSA pass to find any calls to `Intrinsic::AsSlice` and replacing any references to the length of the + /// resulting slice with the length of the array from which it was generated. + /// + /// This allows the length of a slice generated from an array to be used in locations where a constant value is + /// necessary when the value of the array is unknown. + /// + /// Note that this pass must be placed before loop unrolling to be useful. + #[tracing::instrument(level = "trace", skip(self))] + pub(crate) fn as_slice_optimization(mut self) -> Self { + for func in self.functions.values_mut() { + let known_slice_lengths = known_slice_lengths(func); + replace_known_slice_lengths(func, known_slice_lengths); + } + self + } +} + +fn known_slice_lengths(func: &Function) -> HashMap { + let mut known_slice_lengths = HashMap::default(); + for block_id in func.reachable_blocks() { + let block = &func.dfg[block_id]; + for instruction_id in block.instructions() { + let (target_func, arguments) = match &func.dfg[*instruction_id] { + Instruction::Call { func, arguments } => (func, arguments), + _ => continue, + }; + + match &func.dfg[*target_func] { + Value::Intrinsic(Intrinsic::AsSlice) => { + let array_typ = func.dfg.type_of_value(arguments[0]); + if let Type::Array(_, length) = array_typ { + known_slice_lengths.insert(*instruction_id, length); + } else { + unreachable!("AsSlice called with non-array {}", array_typ); + } + } + _ => continue, + }; + } + } + known_slice_lengths +} + +fn replace_known_slice_lengths( + func: &mut Function, + known_slice_lengths: HashMap, +) { + known_slice_lengths.into_iter().for_each(|(instruction_id, known_length)| { + let call_returns = func.dfg.instruction_results(instruction_id); + let original_slice_length = call_returns[0]; + + // We won't use the new id for the original unknown length. + // This isn't strictly necessary as a new result will be defined the next time for which the instruction + // is reinserted but this avoids leaving the program in an invalid state. + func.dfg.replace_result(instruction_id, original_slice_length); + let known_length = func.dfg.make_constant(known_length.into(), Type::length_type()); + func.dfg.set_value_from_id(original_slice_length, known_length); + }); +} diff --git a/compiler/noirc_evaluator/src/ssa/opt/bubble_up_constrains.rs b/compiler/noirc_evaluator/src/ssa/opt/bubble_up_constrains.rs index 6d556e86cb5..0409f0e6a49 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/bubble_up_constrains.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/bubble_up_constrains.rs @@ -77,7 +77,6 @@ mod test { use crate::ssa::{ function_builder::FunctionBuilder, ir::{ - function::{InlineType, RuntimeType}, instruction::{Binary, BinaryOp, Instruction}, map::Id, types::Type, diff --git a/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index 88948331960..6cac8c91bc3 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -283,7 +283,6 @@ mod test { use crate::ssa::{ function_builder::FunctionBuilder, ir::{ - function::{InlineType, RuntimeType}, instruction::{Binary, BinaryOp, Instruction, TerminatorInstruction}, map::Id, types::Type, diff --git a/compiler/noirc_evaluator/src/ssa/opt/die.rs b/compiler/noirc_evaluator/src/ssa/opt/die.rs index df89dcb716d..d1b3e1e83f5 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/die.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/die.rs @@ -169,7 +169,6 @@ mod test { use crate::ssa::{ function_builder::FunctionBuilder, ir::{ - function::{InlineType, RuntimeType}, instruction::{BinaryOp, Intrinsic}, map::Id, types::Type, diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index e731a7952a6..07771397ce8 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -841,7 +841,7 @@ mod test { function_builder::FunctionBuilder, ir::{ dfg::DataFlowGraph, - function::{Function, InlineType, RuntimeType}, + function::Function, instruction::{BinaryOp, Instruction, Intrinsic, TerminatorInstruction}, map::Id, types::Type, diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/branch_analysis.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/branch_analysis.rs index adb6d2871e5..ce54bb533f7 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/branch_analysis.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/branch_analysis.rs @@ -114,12 +114,7 @@ mod test { use crate::ssa::{ function_builder::FunctionBuilder, - ir::{ - cfg::ControlFlowGraph, - function::{InlineType, RuntimeType}, - map::Id, - types::Type, - }, + ir::{cfg::ControlFlowGraph, map::Id, types::Type}, opt::flatten_cfg::branch_analysis::find_branch_ends, }; diff --git a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index 7c7698d236d..ead3cac071c 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -11,7 +11,7 @@ use crate::ssa::{ ir::{ basic_block::BasicBlockId, dfg::{CallStack, InsertInstructionResult}, - function::{Function, FunctionId, InlineType, RuntimeType}, + function::{Function, FunctionId}, instruction::{Instruction, InstructionId, TerminatorInstruction}, value::{Value, ValueId}, }, @@ -351,14 +351,13 @@ impl<'function> PerFunctionContext<'function> { for id in block.instructions() { match &self.source_function.dfg[*id] { Instruction::Call { func, arguments } => match self.get_function(*func) { - Some(function) => match ssa.functions[&function].runtime() { - RuntimeType::Acir(InlineType::Inline) => { - self.inline_function(ssa, *id, function, arguments); - } - RuntimeType::Acir(InlineType::Fold) | RuntimeType::Brillig => { + Some(function) => { + if ssa.functions[&function].runtime().is_entry_point() { self.push_instruction(*id); + } else { + self.inline_function(ssa, *id, function, arguments); } - }, + } None => self.push_instruction(*id), }, _ => self.push_instruction(*id), @@ -523,7 +522,7 @@ mod test { function_builder::FunctionBuilder, ir::{ basic_block::BasicBlockId, - function::{InlineType, RuntimeType}, + function::InlineType, instruction::{BinaryOp, Intrinsic, TerminatorInstruction}, map::Id, types::Type, diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index f1a38585bd6..7b87142d824 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -414,7 +414,6 @@ mod tests { ir::{ basic_block::BasicBlockId, dfg::DataFlowGraph, - function::{InlineType, RuntimeType}, instruction::{BinaryOp, Instruction, Intrinsic, TerminatorInstruction}, map::Id, types::Type, diff --git a/compiler/noirc_evaluator/src/ssa/opt/mod.rs b/compiler/noirc_evaluator/src/ssa/opt/mod.rs index 8f98b3fb17f..4452840a28c 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mod.rs @@ -3,7 +3,8 @@ //! Each pass is generally expected to mutate the SSA IR into a gradually //! simpler form until the IR only has a single function remaining with 1 block within it. //! Generally, these passes are also expected to minimize the final amount of instructions. -mod array_use; +mod array_set; +mod as_slice_length; mod assert_constant; mod bubble_up_constrains; mod constant_folding; @@ -14,5 +15,6 @@ mod inlining; mod mem2reg; mod rc; mod remove_bit_shifts; +mod remove_enable_side_effects; mod simplify_cfg; mod unrolling; diff --git a/compiler/noirc_evaluator/src/ssa/opt/rc.rs b/compiler/noirc_evaluator/src/ssa/opt/rc.rs index 7b5196f2004..1561547e32e 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/rc.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/rc.rs @@ -166,12 +166,8 @@ mod test { use crate::ssa::{ function_builder::FunctionBuilder, ir::{ - basic_block::BasicBlockId, - dfg::DataFlowGraph, - function::{InlineType, RuntimeType}, - instruction::Instruction, - map::Id, - types::Type, + basic_block::BasicBlockId, dfg::DataFlowGraph, function::RuntimeType, + instruction::Instruction, map::Id, types::Type, }, }; diff --git a/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs b/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs new file mode 100644 index 00000000000..8535dc2661f --- /dev/null +++ b/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs @@ -0,0 +1,167 @@ +//! The goal of the "remove enable side effects" optimization pass is to delay any [Instruction::EnableSideEffects] +//! instructions such that they cover the minimum number of instructions possible. +//! +//! The pass works as follows: +//! - Insert instructions until an [Instruction::EnableSideEffects] is encountered, save this [InstructionId]. +//! - Continue inserting instructions until either +//! - Another [Instruction::EnableSideEffects] is encountered, if so then drop the previous [InstructionId] in favour +//! of this one. +//! - An [Instruction] with side-effects is encountered, if so then insert the currently saved [Instruction::EnableSideEffects] +//! before the [Instruction]. Continue inserting instructions until the next [Instruction::EnableSideEffects] is encountered. +use std::collections::HashSet; + +use acvm::FieldElement; + +use crate::ssa::{ + ir::{ + basic_block::BasicBlockId, + dfg::DataFlowGraph, + function::Function, + instruction::{BinaryOp, Instruction, Intrinsic}, + value::Value, + }, + ssa_gen::Ssa, +}; + +impl Ssa { + /// See [`remove_enable_side_effects`][self] module for more information. + #[tracing::instrument(level = "trace", skip(self))] + pub(crate) fn remove_enable_side_effects(mut self) -> Ssa { + for function in self.functions.values_mut() { + remove_enable_side_effects(function); + } + self + } +} + +fn remove_enable_side_effects(function: &mut Function) { + let mut context = Context::default(); + context.block_queue.push(function.entry_block()); + + while let Some(block) = context.block_queue.pop() { + if context.visited_blocks.contains(&block) { + continue; + } + + context.visited_blocks.insert(block); + context.remove_enable_side_effects_in_block(function, block); + } +} + +#[derive(Default)] +struct Context { + visited_blocks: HashSet, + block_queue: Vec, +} + +impl Context { + fn remove_enable_side_effects_in_block( + &mut self, + function: &mut Function, + block: BasicBlockId, + ) { + let instructions = function.dfg[block].take_instructions(); + + let mut last_side_effects_enabled_instruction = None; + + let mut new_instructions = Vec::with_capacity(instructions.len()); + for instruction_id in instructions { + let instruction = &function.dfg[instruction_id]; + + // If we run into another `Instruction::EnableSideEffects` before encountering any + // instructions with side effects then we can drop the instruction we're holding and + // continue with the new `Instruction::EnableSideEffects`. + if let Instruction::EnableSideEffects { condition } = instruction { + // If we're seeing an `enable_side_effects u1 1` then we want to insert it immediately. + // This is because we want to maximize the effect it will have. + if function + .dfg + .get_numeric_constant(*condition) + .map_or(false, |condition| condition.is_one()) + { + new_instructions.push(instruction_id); + last_side_effects_enabled_instruction = None; + continue; + } + + last_side_effects_enabled_instruction = Some(instruction_id); + continue; + } + + // If we hit an instruction which is affected by the side effects var then we must insert the + // `Instruction::EnableSideEffects` before we insert this new instruction. + if Self::responds_to_side_effects_var(&function.dfg, instruction) { + if let Some(enable_side_effects_instruction_id) = + last_side_effects_enabled_instruction.take() + { + new_instructions.push(enable_side_effects_instruction_id); + } + } + new_instructions.push(instruction_id); + } + + *function.dfg[block].instructions_mut() = new_instructions; + + self.block_queue.extend(function.dfg[block].successors()); + } + + fn responds_to_side_effects_var(dfg: &DataFlowGraph, instruction: &Instruction) -> bool { + use Instruction::*; + match instruction { + Binary(binary) => { + if matches!(binary.operator, BinaryOp::Div | BinaryOp::Mod) { + if let Some(rhs) = dfg.get_numeric_constant(binary.rhs) { + rhs == FieldElement::zero() + } else { + true + } + } else { + false + } + } + + Cast(_, _) + | Not(_) + | Truncate { .. } + | Constrain(..) + | RangeCheck { .. } + | IncrementRc { .. } + | DecrementRc { .. } => false, + + EnableSideEffects { .. } + | ArrayGet { .. } + | ArraySet { .. } + | Allocate + | Store { .. } + | Load { .. } => true, + + // Some `Intrinsic`s have side effects so we must check what kind of `Call` this is. + Call { func, .. } => match dfg[*func] { + Value::Intrinsic(intrinsic) => match intrinsic { + Intrinsic::SlicePushBack + | Intrinsic::SlicePushFront + | Intrinsic::SlicePopBack + | Intrinsic::SlicePopFront + | Intrinsic::SliceInsert + | Intrinsic::SliceRemove => true, + + Intrinsic::ArrayLen + | Intrinsic::AssertConstant + | Intrinsic::ApplyRangeConstraint + | Intrinsic::StrAsBytes + | Intrinsic::ToBits(_) + | Intrinsic::ToRadix(_) + | Intrinsic::BlackBox(_) + | Intrinsic::FromField + | Intrinsic::AsField + | Intrinsic::AsSlice => false, + }, + + // We must assume that functions contain a side effect as we cannot inspect more deeply. + Value::Function(_) => true, + + _ => false, + }, + } + } +} diff --git a/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs index b9675d99c90..f524b10f1f2 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs @@ -154,7 +154,6 @@ mod test { use crate::ssa::{ function_builder::FunctionBuilder, ir::{ - function::{InlineType, RuntimeType}, instruction::{BinaryOp, TerminatorInstruction}, map::Id, types::Type, diff --git a/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs b/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs index 92ec1e8f1bb..8110e3469f1 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs @@ -464,12 +464,7 @@ impl<'f> LoopIteration<'f> { mod tests { use crate::ssa::{ function_builder::FunctionBuilder, - ir::{ - function::{InlineType, RuntimeType}, - instruction::BinaryOp, - map::Id, - types::Type, - }, + ir::{instruction::BinaryOp, map::Id, types::Type}, }; #[test] diff --git a/compiler/noirc_frontend/build.rs b/compiler/noirc_frontend/build.rs deleted file mode 100644 index 53ae9489168..00000000000 --- a/compiler/noirc_frontend/build.rs +++ /dev/null @@ -1,15 +0,0 @@ -use std::path::PathBuf; - -const BLNS_JSON_PATH: &str = "BLNS_JSON_PATH"; - -fn main() -> Result<(), String> { - let out_dir = std::env::var("OUT_DIR").unwrap(); - - let dest_path = PathBuf::from(out_dir.clone()).join("blns.base64.json"); - let dest_path_str = dest_path.to_str().unwrap(); - - println!("cargo:rustc-env={BLNS_JSON_PATH}={dest_path_str}"); - std::fs::copy("./src/blns/blns.base64.json", dest_path).unwrap(); - - Ok(()) -} diff --git a/compiler/noirc_frontend/src/ast/statement.rs b/compiler/noirc_frontend/src/ast/statement.rs index dea9fc0f3d3..753b5a31d32 100644 --- a/compiler/noirc_frontend/src/ast/statement.rs +++ b/compiler/noirc_frontend/src/ast/statement.rs @@ -2,6 +2,7 @@ use std::fmt::Display; use std::sync::atomic::{AtomicU32, Ordering}; use crate::lexer::token::SpannedToken; +use crate::macros_api::SecondaryAttribute; use crate::parser::{ParserError, ParserErrorReason}; use crate::token::Token; use crate::{ @@ -107,7 +108,7 @@ impl StatementKind { pub fn new_let( ((pattern, r#type), expression): ((Pattern, UnresolvedType), Expression), ) -> StatementKind { - StatementKind::Let(LetStatement { pattern, r#type, expression }) + StatementKind::Let(LetStatement { pattern, r#type, expression, attributes: vec![] }) } /// Create a Statement::Assign value, desugaring any combined operators like += if needed. @@ -405,13 +406,17 @@ pub struct LetStatement { pub pattern: Pattern, pub r#type: UnresolvedType, pub expression: Expression, + pub attributes: Vec, } impl LetStatement { pub fn new_let( - ((pattern, r#type), expression): ((Pattern, UnresolvedType), Expression), + (((pattern, r#type), expression), attributes): ( + ((Pattern, UnresolvedType), Expression), + Vec, + ), ) -> LetStatement { - LetStatement { pattern, r#type, expression } + LetStatement { pattern, r#type, expression, attributes } } } @@ -568,6 +573,7 @@ impl ForRange { pattern: Pattern::Identifier(array_ident.clone()), r#type: UnresolvedType::unspecified(), expression: array, + attributes: vec![], }), span: array_span, }; @@ -610,6 +616,7 @@ impl ForRange { pattern: Pattern::Identifier(identifier), r#type: UnresolvedType::unspecified(), expression: Expression::new(loop_element, array_span), + attributes: vec![], }), span: array_span, }; diff --git a/compiler/noirc_frontend/src/debug/mod.rs b/compiler/noirc_frontend/src/debug/mod.rs index 71e0d44b478..67b52071d7b 100644 --- a/compiler/noirc_frontend/src/debug/mod.rs +++ b/compiler/noirc_frontend/src/debug/mod.rs @@ -145,6 +145,7 @@ impl DebugInstrumenter { pattern: ast::Pattern::Identifier(ident("__debug_expr", ret_expr.span)), r#type: ast::UnresolvedType::unspecified(), expression: ret_expr.clone(), + attributes: vec![], }), span: ret_expr.span, }; @@ -248,6 +249,7 @@ impl DebugInstrumenter { }), span: let_stmt.expression.span, }, + attributes: vec![], }), span: *span, } @@ -273,6 +275,7 @@ impl DebugInstrumenter { pattern: ast::Pattern::Identifier(ident("__debug_expr", assign_stmt.expression.span)), r#type: ast::UnresolvedType::unspecified(), expression: assign_stmt.expression.clone(), + attributes: vec![], }); let expression_span = assign_stmt.expression.span; let new_assign_stmt = match &assign_stmt.lvalue { diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 0c53bff4a54..463b8a4b329 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -4,7 +4,7 @@ use crate::graph::CrateId; use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}; use crate::hir::resolution::errors::ResolverError; -use crate::hir::resolution::import::{resolve_import, ImportDirective}; +use crate::hir::resolution::import::{resolve_import, ImportDirective, PathResolution}; use crate::hir::resolution::{ collect_impls, collect_trait_impls, path_resolver, resolve_free_functions, resolve_globals, resolve_impls, resolve_structs, resolve_trait_by_path, resolve_trait_impls, resolve_traits, @@ -56,8 +56,11 @@ impl UnresolvedFunctions { for bound in &mut func.def.where_clause { match resolve_trait_by_path(def_maps, module, bound.trait_bound.trait_path.clone()) { - Ok(trait_id) => { + Ok((trait_id, warning)) => { bound.trait_bound.trait_id = Some(trait_id); + if let Some(warning) = warning { + errors.push(DefCollectorErrorKind::PathResolutionError(warning)); + } } Err(err) => { errors.push(err); @@ -253,20 +256,6 @@ impl DefCollector { // Add the current crate to the collection of DefMaps context.def_maps.insert(crate_id, def_collector.def_map); - // TODO(#4653): generalize this function - for macro_processor in macro_processors { - macro_processor - .process_collected_defs( - &crate_id, - context, - &def_collector.collected_traits_impls, - &mut def_collector.collected_functions, - ) - .unwrap_or_else(|(macro_err, file_id)| { - errors.push((macro_err.into(), file_id)); - }); - } - inject_prelude(crate_id, context, crate_root, &mut def_collector.collected_imports); for submodule in submodules { inject_prelude( @@ -281,6 +270,13 @@ impl DefCollector { for collected_import in def_collector.collected_imports { match resolve_import(crate_id, &collected_import, &context.def_maps) { Ok(resolved_import) => { + if let Some(error) = resolved_import.error { + errors.push(( + DefCollectorErrorKind::PathResolutionError(error).into(), + root_file_id, + )); + } + // Populate module namespaces according to the imports used let current_def_map = context.def_maps.get_mut(&crate_id).unwrap(); @@ -299,9 +295,9 @@ impl DefCollector { } } } - Err((error, module_id)) => { + Err(error) => { let current_def_map = context.def_maps.get(&crate_id).unwrap(); - let file_id = current_def_map.file_id(module_id); + let file_id = current_def_map.file_id(collected_import.module_id); let error = DefCollectorErrorKind::PathResolutionError(error); errors.push((error.into(), file_id)); } @@ -409,12 +405,13 @@ fn inject_prelude( Path { segments: segments.clone(), kind: crate::PathKind::Dep, span: Span::default() }; if !crate_id.is_stdlib() { - if let Ok(module_def) = path_resolver::resolve_path( + if let Ok(PathResolution { module_def_id, error }) = path_resolver::resolve_path( &context.def_maps, ModuleId { krate: crate_id, local_id: crate_root }, path, ) { - let module_id = module_def.as_module().expect("std::prelude should be a module"); + assert!(error.is_none(), "Tried to add private item to prelude"); + let module_id = module_def_id.as_module().expect("std::prelude should be a module"); let prelude = context.module(module_id).scope().names(); for path in prelude { diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index fcb20c740c7..6fbb3b67546 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -102,8 +102,12 @@ impl<'a> ModCollector<'a> { for global in globals { let name = global.pattern.name_ident().clone(); - let global_id = - context.def_interner.push_empty_global(name.clone(), self.module_id, self.file_id); + let global_id = context.def_interner.push_empty_global( + name.clone(), + self.module_id, + self.file_id, + global.attributes.clone(), + ); // Add the statement to the scope so its path can be looked up later let result = self.def_collector.def_map.modules[self.module_id.0] @@ -455,6 +459,7 @@ impl<'a> ModCollector<'a> { name.clone(), trait_id.0.local_id, self.file_id, + vec![], ); if let Err((first_def, second_def)) = self.def_collector.def_map.modules diff --git a/compiler/noirc_frontend/src/hir/def_map/mod.rs b/compiler/noirc_frontend/src/hir/def_map/mod.rs index 157227f763e..7c0090ff95b 100644 --- a/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -2,13 +2,13 @@ use crate::graph::CrateId; use crate::hir::def_collector::dc_crate::{CompilationError, DefCollector}; use crate::hir::Context; use crate::macros_api::MacroProcessor; -use crate::node_interner::{FuncId, NodeInterner, StructId}; +use crate::node_interner::{FuncId, GlobalId, NodeInterner, StructId}; use crate::parser::{parse_program, ParsedModule, ParserError}; use crate::token::{FunctionAttribute, SecondaryAttribute, TestScope}; use arena::{Arena, Index}; use fm::{FileId, FileManager}; use noirc_errors::Location; -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashMap}; mod module_def; pub use module_def::*; mod item_scope; @@ -217,20 +217,37 @@ impl CrateDefMap { }) .collect(); - let events = module - .type_definitions() - .filter_map(|id| { - id.as_type().filter(|struct_id| { - interner - .struct_attributes(struct_id) - .iter() - .any(|attr| attr == &SecondaryAttribute::Event) - }) - }) - .collect(); + let mut outputs = + ContractOutputs { structs: HashMap::new(), globals: HashMap::new() }; + + interner.get_all_globals().iter().for_each(|global_info| { + interner.global_attributes(&global_info.id).iter().for_each(|attr| { + if let SecondaryAttribute::Abi(tag) = attr { + if let Some(tagged) = outputs.globals.get_mut(tag) { + tagged.push(global_info.id); + } else { + outputs.globals.insert(tag.to_string(), vec![global_info.id]); + } + } + }); + }); + + module.type_definitions().for_each(|id| { + if let ModuleDefId::TypeId(struct_id) = id { + interner.struct_attributes(&struct_id).iter().for_each(|attr| { + if let SecondaryAttribute::Abi(tag) = attr { + if let Some(tagged) = outputs.structs.get_mut(tag) { + tagged.push(struct_id); + } else { + outputs.structs.insert(tag.to_string(), vec![struct_id]); + } + } + }); + } + }); let name = self.get_module_path(id, module.parent); - Some(Contract { name, location: module.location, functions, events }) + Some(Contract { name, location: module.location, functions, outputs }) } else { None } @@ -283,6 +300,11 @@ pub struct ContractFunctionMeta { pub is_entry_point: bool, } +pub struct ContractOutputs { + pub structs: HashMap>, + pub globals: HashMap>, +} + /// A 'contract' in Noir source code with a given name, functions and events. /// This is not an AST node, it is just a convenient form to return for CrateDefMap::get_all_contracts. pub struct Contract { @@ -290,7 +312,7 @@ pub struct Contract { pub name: String, pub location: Location, pub functions: Vec, - pub events: Vec, + pub outputs: ContractOutputs, } /// Given a FileId, fetch the File, from the FileManager and parse it's content diff --git a/compiler/noirc_frontend/src/hir/mod.rs b/compiler/noirc_frontend/src/hir/mod.rs index 00bcb0cdebf..727a6596df1 100644 --- a/compiler/noirc_frontend/src/hir/mod.rs +++ b/compiler/noirc_frontend/src/hir/mod.rs @@ -26,7 +26,7 @@ pub type ParsedFiles = HashMap)>; pub struct Context<'file_manager, 'parsed_files> { pub def_interner: NodeInterner, pub crate_graph: CrateGraph, - pub(crate) def_maps: BTreeMap, + pub def_maps: BTreeMap, // In the WASM context, we take ownership of the file manager, // which is why this needs to be a Cow. In all use-cases, the file manager // is read-only however, once it has been passed to the Context. diff --git a/compiler/noirc_frontend/src/hir/resolution/errors.rs b/compiler/noirc_frontend/src/hir/resolution/errors.rs index 3c6c0582292..71e3f3482fc 100644 --- a/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -25,7 +25,7 @@ pub enum ResolverError { #[error("path is not an identifier")] PathIsNotIdent { span: Span }, #[error("could not resolve path")] - PathResolutionError(PathResolutionError), + PathResolutionError(#[from] PathResolutionError), #[error("Expected")] Expected { span: Span, expected: String, got: String }, #[error("Duplicate field in constructor")] @@ -72,14 +72,12 @@ pub enum ResolverError { NumericConstantInFormatString { name: String, span: Span }, #[error("Closure environment must be a tuple or unit type")] InvalidClosureEnvironment { typ: Type, span: Span }, - #[error("{name} is private and not visible from the current module")] - PrivateFunctionCalled { name: String, span: Span }, - #[error("{name} is not visible from the current crate")] - NonCrateFunctionCalled { name: String, span: Span }, #[error("Nested slices are not supported")] NestedSlices { span: Span }, #[error("#[recursive] attribute is only allowed on entry points to a program")] MisplacedRecursiveAttribute { ident: Ident }, + #[error("#[abi(tag)] attribute is only allowed in contracts")] + AbiAttributeOusideContract { span: Span }, #[error("Usage of the `#[foreign]` or `#[builtin]` function attributes are not allowed outside of the Noir standard library")] LowLevelFunctionOutsideOfStdlib { ident: Ident }, #[error("Dependency cycle found, '{item}' recursively depends on itself: {cycle} ")] @@ -290,13 +288,6 @@ impl From for Diagnostic { ResolverError::InvalidClosureEnvironment { span, typ } => Diagnostic::simple_error( format!("{typ} is not a valid closure environment type"), "Closure environment must be a tuple or unit type".to_string(), span), - // This will be upgraded to an error in future versions - ResolverError::PrivateFunctionCalled { span, name } => Diagnostic::simple_warning( - format!("{name} is private and not visible from the current module"), - format!("{name} is private"), span), - ResolverError::NonCrateFunctionCalled { span, name } => Diagnostic::simple_warning( - format!("{name} is not visible from the current crate"), - format!("{name} is only visible within its crate"), span), ResolverError::NestedSlices { span } => Diagnostic::simple_error( "Nested slices are not supported".into(), "Try to use a constant sized array instead".into(), @@ -314,6 +305,13 @@ impl From for Diagnostic { diag.add_note("The `#[recursive]` attribute specifies to the backend whether it should use a prover which generates proofs that are friendly for recursive verification in another circuit".to_owned()); diag } + ResolverError::AbiAttributeOusideContract { span } => { + Diagnostic::simple_error( + "#[abi(tag)] attributes can only be used in contracts".to_string(), + "misplaced #[abi(tag)] attribute".to_string(), + span, + ) + }, ResolverError::LowLevelFunctionOutsideOfStdlib { ident } => Diagnostic::simple_error( "Definition of low-level function outside of standard library".into(), "Usage of the `#[foreign]` or `#[builtin]` function attributes are not allowed outside of the Noir standard library".into(), diff --git a/compiler/noirc_frontend/src/hir/resolution/import.rs b/compiler/noirc_frontend/src/hir/resolution/import.rs index 9c8418daf80..ade97e2cf42 100644 --- a/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -1,10 +1,11 @@ use noirc_errors::{CustomDiagnostic, Span}; +use thiserror::Error; use crate::graph::CrateId; use std::collections::BTreeMap; use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleDefId, ModuleId, PerNs}; -use crate::{Ident, Path, PathKind}; +use crate::{Ident, ItemVisibility, Path, PathKind}; #[derive(Debug, Clone)] pub struct ImportDirective { @@ -14,12 +15,30 @@ pub struct ImportDirective { pub is_prelude: bool, } -pub type PathResolution = Result; +struct NamespaceResolution { + module_id: ModuleId, + namespace: PerNs, + error: Option, +} + +type NamespaceResolutionResult = Result; + +pub struct PathResolution { + pub module_def_id: ModuleDefId, + + pub error: Option, +} + +pub(crate) type PathResolutionResult = Result; -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, Error)] pub enum PathResolutionError { + #[error("Could not resolve '{0}' in path")] Unresolved(Ident), + #[error("Contract variable '{0}' referenced from outside the contract")] ExternalContractUsed(Ident), + #[error("{0} is private and not visible from the current module")] + Private(Ident), } #[derive(Debug)] @@ -31,21 +50,26 @@ pub struct ResolvedImport { // The module which we must add the resolved namespace to pub module_scope: LocalModuleId, pub is_prelude: bool, + pub error: Option, } impl From for CustomDiagnostic { fn from(error: PathResolutionError) -> Self { - match error { - PathResolutionError::Unresolved(ident) => CustomDiagnostic::simple_error( - format!("Could not resolve '{ident}' in path"), - String::new(), - ident.span(), - ), + match &error { + PathResolutionError::Unresolved(ident) => { + CustomDiagnostic::simple_error(error.to_string(), String::new(), ident.span()) + } PathResolutionError::ExternalContractUsed(ident) => CustomDiagnostic::simple_error( - format!("Contract variable '{ident}' referenced from outside the contract"), + error.to_string(), "Contracts may only be referenced from within a contract".to_string(), ident.span(), ), + // This will be upgraded to an error in future versions + PathResolutionError::Private(ident) => CustomDiagnostic::simple_warning( + error.to_string(), + format!("{ident} is private"), + ident.span(), + ), } } } @@ -54,27 +78,49 @@ pub fn resolve_import( crate_id: CrateId, import_directive: &ImportDirective, def_maps: &BTreeMap, -) -> Result { - let def_map = &def_maps[&crate_id]; - +) -> Result { let allow_contracts = allow_referencing_contracts(def_maps, crate_id, import_directive.module_id); let module_scope = import_directive.module_id; - let resolved_namespace = - resolve_path_to_ns(import_directive, def_map, def_maps, allow_contracts) - .map_err(|error| (error, module_scope))?; + let NamespaceResolution { + module_id: resolved_module, + namespace: resolved_namespace, + mut error, + } = resolve_path_to_ns(import_directive, crate_id, crate_id, def_maps, allow_contracts)?; let name = resolve_path_name(import_directive); + + let visibility = resolved_namespace + .values + .or(resolved_namespace.types) + .map(|(_, visibility, _)| visibility) + .expect("Found empty namespace"); + + error = error.or_else(|| { + if can_reference_module_id( + def_maps, + crate_id, + import_directive.module_id, + resolved_module, + visibility, + ) { + None + } else { + Some(PathResolutionError::Private(name.clone())) + } + }); + Ok(ResolvedImport { name, resolved_namespace, module_scope, is_prelude: import_directive.is_prelude, + error, }) } -pub(super) fn allow_referencing_contracts( +fn allow_referencing_contracts( def_maps: &BTreeMap, krate: CrateId, local_id: LocalModuleId, @@ -82,27 +128,40 @@ pub(super) fn allow_referencing_contracts( ModuleId { krate, local_id }.module(def_maps).is_contract } -pub fn resolve_path_to_ns( +fn resolve_path_to_ns( import_directive: &ImportDirective, - def_map: &CrateDefMap, + crate_id: CrateId, + importing_crate: CrateId, def_maps: &BTreeMap, allow_contracts: bool, -) -> PathResolution { +) -> NamespaceResolutionResult { let import_path = &import_directive.path.segments; + let def_map = &def_maps[&crate_id]; match import_directive.path.kind { crate::ast::PathKind::Crate => { // Resolve from the root of the crate - resolve_path_from_crate_root(def_map, import_path, def_maps, allow_contracts) - } - crate::ast::PathKind::Dep => { - resolve_external_dep(def_map, import_directive, def_maps, allow_contracts) + resolve_path_from_crate_root( + crate_id, + importing_crate, + import_path, + def_maps, + allow_contracts, + ) } + crate::ast::PathKind::Dep => resolve_external_dep( + def_map, + import_directive, + def_maps, + allow_contracts, + importing_crate, + ), crate::ast::PathKind::Plain => { // Plain paths are only used to import children modules. It's possible to allow import of external deps, but maybe this distinction is better? // In Rust they can also point to external Dependencies, if no children can be found with the specified name resolve_name_in_module( - def_map, + crate_id, + importing_crate, import_path, import_directive.module_id, def_maps, @@ -113,45 +172,60 @@ pub fn resolve_path_to_ns( } fn resolve_path_from_crate_root( - def_map: &CrateDefMap, + crate_id: CrateId, + importing_crate: CrateId, + import_path: &[Ident], def_maps: &BTreeMap, allow_contracts: bool, -) -> PathResolution { - resolve_name_in_module(def_map, import_path, def_map.root, def_maps, allow_contracts) +) -> NamespaceResolutionResult { + resolve_name_in_module( + crate_id, + importing_crate, + import_path, + def_maps[&crate_id].root, + def_maps, + allow_contracts, + ) } fn resolve_name_in_module( - def_map: &CrateDefMap, + krate: CrateId, + importing_crate: CrateId, import_path: &[Ident], starting_mod: LocalModuleId, def_maps: &BTreeMap, allow_contracts: bool, -) -> PathResolution { - let mut current_mod = &def_map.modules[starting_mod.0]; +) -> NamespaceResolutionResult { + let def_map = &def_maps[&krate]; + let mut current_mod_id = ModuleId { krate, local_id: starting_mod }; + let mut current_mod = &def_map.modules[current_mod_id.local_id.0]; // There is a possibility that the import path is empty // In that case, early return if import_path.is_empty() { - let mod_id = ModuleId { krate: def_map.krate, local_id: starting_mod }; - return Ok(PerNs::types(mod_id.into())); + return Ok(NamespaceResolution { + module_id: current_mod_id, + namespace: PerNs::types(current_mod_id.into()), + error: None, + }); } - let mut import_path = import_path.iter(); - let first_segment = import_path.next().expect("ice: could not fetch first segment"); + let first_segment = import_path.first().expect("ice: could not fetch first segment"); let mut current_ns = current_mod.find_name(first_segment); if current_ns.is_none() { return Err(PathResolutionError::Unresolved(first_segment.clone())); } - for segment in import_path { - let typ = match current_ns.take_types() { - None => return Err(PathResolutionError::Unresolved(segment.clone())), - Some(typ) => typ, + let mut warning: Option = None; + for (last_segment, current_segment) in import_path.iter().zip(import_path.iter().skip(1)) { + let (typ, visibility) = match current_ns.types { + None => return Err(PathResolutionError::Unresolved(last_segment.clone())), + Some((typ, visibility, _)) => (typ, visibility), }; // In the type namespace, only Mod can be used in a path. - let new_module_id = match typ { + current_mod_id = match typ { ModuleDefId::ModuleId(id) => id, ModuleDefId::FunctionId(_) => panic!("functions cannot be in the type namespace"), // TODO: If impls are ever implemented, types can be used in a path @@ -161,22 +235,37 @@ fn resolve_name_in_module( ModuleDefId::GlobalId(_) => panic!("globals cannot be in the type namespace"), }; - current_mod = &def_maps[&new_module_id.krate].modules[new_module_id.local_id.0]; + warning = warning.or_else(|| { + if can_reference_module_id( + def_maps, + importing_crate, + starting_mod, + current_mod_id, + visibility, + ) { + None + } else { + Some(PathResolutionError::Private(last_segment.clone())) + } + }); + + current_mod = &def_maps[¤t_mod_id.krate].modules[current_mod_id.local_id.0]; // Check if namespace - let found_ns = current_mod.find_name(segment); + let found_ns = current_mod.find_name(current_segment); if found_ns.is_none() { - return Err(PathResolutionError::Unresolved(segment.clone())); + return Err(PathResolutionError::Unresolved(current_segment.clone())); } + // Check if it is a contract and we're calling from a non-contract context if current_mod.is_contract && !allow_contracts { - return Err(PathResolutionError::ExternalContractUsed(segment.clone())); + return Err(PathResolutionError::ExternalContractUsed(current_segment.clone())); } current_ns = found_ns; } - Ok(current_ns) + Ok(NamespaceResolution { module_id: current_mod_id, namespace: current_ns, error: warning }) } fn resolve_path_name(import_directive: &ImportDirective) -> Ident { @@ -191,11 +280,11 @@ fn resolve_external_dep( directive: &ImportDirective, def_maps: &BTreeMap, allow_contracts: bool, -) -> PathResolution { + importing_crate: CrateId, +) -> NamespaceResolutionResult { // Use extern_prelude to get the dep - // let path = &directive.path.segments; - // + // Fetch the root module from the prelude let crate_name = path.first().unwrap(); let dep_module = current_def_map @@ -218,7 +307,49 @@ fn resolve_external_dep( is_prelude: false, }; - let dep_def_map = def_maps.get(&dep_module.krate).unwrap(); + resolve_path_to_ns(&dep_directive, dep_module.krate, importing_crate, def_maps, allow_contracts) +} + +// Issue an error if the given private function is being called from a non-child module, or +// if the given pub(crate) function is being called from another crate +fn can_reference_module_id( + def_maps: &BTreeMap, + importing_crate: CrateId, + current_module: LocalModuleId, + target_module: ModuleId, + visibility: ItemVisibility, +) -> bool { + // Note that if the target module is in a different crate from the current module then we will either + // return true as the target module is public or return false as it is private without looking at the `CrateDefMap` in either case. + let same_crate = target_module.krate == importing_crate; + let target_crate_def_map = &def_maps[&target_module.krate]; + + match visibility { + ItemVisibility::Public => true, + ItemVisibility::PublicCrate => same_crate, + ItemVisibility::Private => { + same_crate + && module_descendent_of_target( + target_crate_def_map, + target_module.local_id, + current_module, + ) + } + } +} + +// Returns true if `current` is a (potentially nested) child module of `target`. +// This is also true if `current == target`. +fn module_descendent_of_target( + def_map: &CrateDefMap, + target: LocalModuleId, + current: LocalModuleId, +) -> bool { + if current == target { + return true; + } - resolve_path_to_ns(&dep_directive, dep_def_map, def_maps, allow_contracts) + def_map.modules[current.0] + .parent + .map_or(false, |parent| module_descendent_of_target(def_map, target, parent)) } diff --git a/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs b/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs index 4c5fa3bceef..e19af3c732f 100644 --- a/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs @@ -1,11 +1,9 @@ -use super::import::{ - allow_referencing_contracts, resolve_path_to_ns, ImportDirective, PathResolutionError, -}; +use super::import::{resolve_import, ImportDirective, PathResolution, PathResolutionResult}; use crate::Path; use std::collections::BTreeMap; use crate::graph::CrateId; -use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleDefId, ModuleId}; +use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}; pub trait PathResolver { /// Resolve the given path returning the resolved ModuleDefId. @@ -13,7 +11,7 @@ pub trait PathResolver { &self, def_maps: &BTreeMap, path: Path, - ) -> Result; + ) -> PathResolutionResult; fn local_module_id(&self) -> LocalModuleId; @@ -36,7 +34,7 @@ impl PathResolver for StandardPathResolver { &self, def_maps: &BTreeMap, path: Path, - ) -> Result { + ) -> PathResolutionResult { resolve_path(def_maps, self.module_id, path) } @@ -55,17 +53,15 @@ pub fn resolve_path( def_maps: &BTreeMap, module_id: ModuleId, path: Path, -) -> Result { +) -> PathResolutionResult { // lets package up the path into an ImportDirective and resolve it using that let import = ImportDirective { module_id: module_id.local_id, path, alias: None, is_prelude: false }; - let allow_referencing_contracts = - allow_referencing_contracts(def_maps, module_id.krate, module_id.local_id); + let resolved_import = resolve_import(module_id.krate, &import, def_maps)?; - let def_map = &def_maps[&module_id.krate]; - let ns = resolve_path_to_ns(&import, def_map, def_maps, allow_referencing_contracts)?; + let namespace = resolved_import.resolved_namespace; + let id = + namespace.values.or(namespace.types).map(|(id, _, _)| id).expect("Found empty namespace"); - let function = ns.values.map(|(id, _, _)| id); - let id = function.or_else(|| ns.types.map(|(id, _, _)| id)); - Ok(id.expect("Found empty namespace")) + Ok(PathResolution { module_def_id: id, error: resolved_import.error }) } diff --git a/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/compiler/noirc_frontend/src/hir/resolution/resolver.rs index ec149dee96e..08b12069d76 100644 --- a/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -19,13 +19,14 @@ use crate::hir_def::expr::{ }; use crate::hir_def::traits::{Trait, TraitConstraint}; +use crate::macros_api::SecondaryAttribute; use crate::token::{Attributes, FunctionAttribute}; use regex::Regex; use std::collections::{BTreeMap, HashSet}; use std::rc::Rc; use crate::graph::CrateId; -use crate::hir::def_map::{LocalModuleId, ModuleDefId, TryFromModuleDefId, MAIN_FUNCTION}; +use crate::hir::def_map::{ModuleDefId, TryFromModuleDefId, MAIN_FUNCTION}; use crate::hir_def::stmt::{HirAssignStatement, HirForStatement, HirLValue, HirPattern}; use crate::node_interner::{ DefinitionId, DefinitionKind, DependencyId, ExprId, FuncId, GlobalId, NodeInterner, StmtId, @@ -56,6 +57,7 @@ use crate::hir_def::{ }; use super::errors::{PubPosition, ResolverError}; +use super::import::PathResolution; const SELF_TYPE_NAME: &str = "Self"; @@ -616,7 +618,17 @@ impl<'a> Resolver<'a> { match self.lookup_struct_or_error(path) { Some(struct_type) => { let expected_generic_count = struct_type.borrow().generics.len(); - + if !self.in_contract + && self + .interner + .struct_attributes(&struct_type.borrow().id) + .iter() + .any(|attr| matches!(attr, SecondaryAttribute::Abi(_))) + { + self.push_err(ResolverError::AbiAttributeOusideContract { + span: struct_type.borrow().name.span(), + }); + } self.verify_generics_count(expected_generic_count, &mut args, span, || { struct_type.borrow().to_string() }); @@ -677,10 +689,14 @@ impl<'a> Resolver<'a> { // If we cannot find a local generic of the same name, try to look up a global match self.path_resolver.resolve(self.def_maps, path.clone()) { - Ok(ModuleDefId::GlobalId(id)) => { + Ok(PathResolution { module_def_id: ModuleDefId::GlobalId(id), error }) => { if let Some(current_item) = self.current_item { self.interner.add_global_dependency(current_item, id); } + + if let Some(error) = error { + self.push_err(error.into()); + } Some(Type::Constant(self.eval_global_as_array_length(id, path))) } _ => None, @@ -1162,10 +1178,19 @@ impl<'a> Resolver<'a> { let global_id = self.interner.next_global_id(); let definition = DefinitionKind::Global(global_id); + if !self.in_contract + && let_stmt.attributes.iter().any(|attr| matches!(attr, SecondaryAttribute::Abi(_))) + { + self.push_err(ResolverError::AbiAttributeOusideContract { + span: let_stmt.pattern.span(), + }); + } + HirStatement::Let(HirLetStatement { pattern: self.resolve_pattern(let_stmt.pattern, definition), r#type: self.resolve_type(let_stmt.r#type), expression, + attributes: let_stmt.attributes, }) } @@ -1178,6 +1203,7 @@ impl<'a> Resolver<'a> { pattern: self.resolve_pattern(let_stmt.pattern, definition), r#type: self.resolve_type(let_stmt.r#type), expression, + attributes: let_stmt.attributes, }) } StatementKind::Constrain(constrain_stmt) => { @@ -1327,59 +1353,6 @@ impl<'a> Resolver<'a> { } } - // Issue an error if the given private function is being called from a non-child module, or - // if the given pub(crate) function is being called from another crate - fn check_can_reference_function( - &mut self, - func: FuncId, - span: Span, - visibility: ItemVisibility, - ) { - let function_module = self.interner.function_module(func); - let current_module = self.path_resolver.module_id(); - - let same_crate = function_module.krate == current_module.krate; - let krate = function_module.krate; - let current_module = current_module.local_id; - let name = self.interner.function_name(&func).to_string(); - match visibility { - ItemVisibility::Public => (), - ItemVisibility::Private => { - if !same_crate - || !self.module_descendent_of_target( - krate, - function_module.local_id, - current_module, - ) - { - self.errors.push(ResolverError::PrivateFunctionCalled { span, name }); - } - } - ItemVisibility::PublicCrate => { - if !same_crate { - self.errors.push(ResolverError::NonCrateFunctionCalled { span, name }); - } - } - } - } - - // Returns true if `current` is a (potentially nested) child module of `target`. - // This is also true if `current == target`. - fn module_descendent_of_target( - &self, - krate: CrateId, - target: LocalModuleId, - current: LocalModuleId, - ) -> bool { - if current == target { - return true; - } - - self.def_maps[&krate].modules[current.0] - .parent - .map_or(false, |parent| self.module_descendent_of_target(krate, target, parent)) - } - fn resolve_local_variable(&mut self, hir_ident: HirIdent, var_scope_index: usize) { let mut transitive_capture_index: Option = None; @@ -1473,15 +1446,6 @@ impl<'a> Resolver<'a> { if let Some(current_item) = self.current_item { self.interner.add_function_dependency(current_item, id); } - - if self.interner.function_visibility(id) != ItemVisibility::Public { - let span = hir_ident.location.span; - self.check_can_reference_function( - id, - span, - self.interner.function_visibility(id), - ); - } } DefinitionKind::Global(global_id) => { if let Some(current_item) = self.current_item { @@ -1920,7 +1884,7 @@ impl<'a> Resolver<'a> { } if let Ok(ModuleDefId::TraitId(trait_id)) = - self.path_resolver.resolve(self.def_maps, trait_bound.trait_path.clone()) + self.resolve_path(trait_bound.trait_path.clone()) { let the_trait = self.interner.get_trait(trait_id); if let Some(method) = @@ -1955,7 +1919,13 @@ impl<'a> Resolver<'a> { } fn resolve_path(&mut self, path: Path) -> Result { - self.path_resolver.resolve(self.def_maps, path).map_err(ResolverError::PathResolutionError) + let path_resolution = self.path_resolver.resolve(self.def_maps, path)?; + + if let Some(error) = path_resolution.error { + self.push_err(error.into()); + } + + Ok(path_resolution.module_def_id) } fn resolve_block(&mut self, block_expr: BlockExpression) -> HirExpression { diff --git a/compiler/noirc_frontend/src/hir/resolution/traits.rs b/compiler/noirc_frontend/src/hir/resolution/traits.rs index 04da558a642..a7669f57e33 100644 --- a/compiler/noirc_frontend/src/hir/resolution/traits.rs +++ b/compiler/noirc_frontend/src/hir/resolution/traits.rs @@ -21,6 +21,7 @@ use crate::{ use super::{ functions, get_module_mut, get_struct_type, + import::{PathResolution, PathResolutionError}, path_resolver::{PathResolver, StandardPathResolver}, resolver::Resolver, take_errors, @@ -274,7 +275,15 @@ fn collect_trait_impl( let module = ModuleId { local_id: trait_impl.module_id, krate: crate_id }; trait_impl.trait_id = match resolve_trait_by_path(def_maps, module, trait_impl.trait_path.clone()) { - Ok(trait_id) => Some(trait_id), + Ok((trait_id, warning)) => { + if let Some(warning) = warning { + errors.push(( + DefCollectorErrorKind::PathResolutionError(warning).into(), + trait_impl.file_id, + )); + } + Some(trait_id) + } Err(error) => { errors.push((error.into(), trait_impl.file_id)); None @@ -362,11 +371,13 @@ pub(crate) fn resolve_trait_by_path( def_maps: &BTreeMap, module: ModuleId, path: Path, -) -> Result { +) -> Result<(TraitId, Option), DefCollectorErrorKind> { let path_resolver = StandardPathResolver::new(module); match path_resolver.resolve(def_maps, path.clone()) { - Ok(ModuleDefId::TraitId(trait_id)) => Ok(trait_id), + Ok(PathResolution { module_def_id: ModuleDefId::TraitId(trait_id), error }) => { + Ok((trait_id, error)) + } Ok(_) => Err(DefCollectorErrorKind::NotATrait { not_a_trait_name: path }), Err(_) => Err(DefCollectorErrorKind::TraitNotFound { trait_path: path }), } diff --git a/compiler/noirc_frontend/src/hir/type_check/mod.rs b/compiler/noirc_frontend/src/hir/type_check/mod.rs index c5a04c33883..cdfc19b3a33 100644 --- a/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -434,7 +434,9 @@ mod test { use crate::graph::CrateId; use crate::hir::def_map::{ModuleData, ModuleId}; - use crate::hir::resolution::import::PathResolutionError; + use crate::hir::resolution::import::{ + PathResolution, PathResolutionError, PathResolutionResult, + }; use crate::hir_def::expr::HirIdent; use crate::hir_def::stmt::HirLetStatement; use crate::hir_def::stmt::HirPattern::Identifier; @@ -502,6 +504,7 @@ mod test { pattern: Identifier(z), r#type: Type::FieldElement, expression: expr_id, + attributes: vec![], }; let stmt_id = interner.push_stmt(HirStatement::Let(let_stmt)); let expr_id = interner @@ -644,12 +647,13 @@ mod test { &self, _def_maps: &BTreeMap, path: Path, - ) -> Result { + ) -> PathResolutionResult { // Not here that foo::bar and hello::foo::bar would fetch the same thing let name = path.segments.last().unwrap(); self.0 .get(&name.0.contents) .cloned() + .map(|module_def_id| PathResolution { module_def_id, error: None }) .ok_or_else(move || PathResolutionError::Unresolved(name.clone())) } diff --git a/compiler/noirc_frontend/src/hir_def/stmt.rs b/compiler/noirc_frontend/src/hir_def/stmt.rs index c5e287b393c..4c9a33d3dc0 100644 --- a/compiler/noirc_frontend/src/hir_def/stmt.rs +++ b/compiler/noirc_frontend/src/hir_def/stmt.rs @@ -1,4 +1,5 @@ use super::expr::HirIdent; +use crate::macros_api::SecondaryAttribute; use crate::node_interner::ExprId; use crate::{Ident, Type}; use fm::FileId; @@ -26,6 +27,7 @@ pub struct HirLetStatement { pub pattern: HirPattern, pub r#type: Type, pub expression: ExprId, + pub attributes: Vec, } impl HirLetStatement { diff --git a/compiler/noirc_frontend/src/blns/LICENSE b/compiler/noirc_frontend/src/lexer/blns/LICENSE similarity index 100% rename from compiler/noirc_frontend/src/blns/LICENSE rename to compiler/noirc_frontend/src/lexer/blns/LICENSE diff --git a/compiler/noirc_frontend/src/blns/README.md b/compiler/noirc_frontend/src/lexer/blns/README.md similarity index 100% rename from compiler/noirc_frontend/src/blns/README.md rename to compiler/noirc_frontend/src/lexer/blns/README.md diff --git a/compiler/noirc_frontend/src/blns/blns.base64.json b/compiler/noirc_frontend/src/lexer/blns/blns.base64.json similarity index 100% rename from compiler/noirc_frontend/src/blns/blns.base64.json rename to compiler/noirc_frontend/src/lexer/blns/blns.base64.json diff --git a/compiler/noirc_frontend/src/lexer/lexer.rs b/compiler/noirc_frontend/src/lexer/lexer.rs index c4b6bb288dc..265b9e4b5a3 100644 --- a/compiler/noirc_frontend/src/lexer/lexer.rs +++ b/compiler/noirc_frontend/src/lexer/lexer.rs @@ -1160,7 +1160,7 @@ mod tests { fn test_big_list_of_naughty_strings() { use std::mem::discriminant; - let blns_contents = include_str!(env!("BLNS_JSON_PATH")); + let blns_contents = include_str!("./blns/blns.base64.json"); let blns_base64: Vec = serde_json::from_str(blns_contents).expect("BLNS json invalid"); for blns_base64_str in blns_base64 { @@ -1178,7 +1178,7 @@ mod tests { .as_ref() .map(|token_discriminator| { discriminant(token_discriminator) - == discriminant(&next_token.token()) + == discriminant(next_token.token()) }) .unwrap_or(true); diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index f8378cdd84b..357b1ead593 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -510,6 +510,7 @@ impl Attribute { Attribute::Secondary(SecondaryAttribute::ContractLibraryMethod) } ["event"] => Attribute::Secondary(SecondaryAttribute::Event), + ["abi", tag] => Attribute::Secondary(SecondaryAttribute::Abi(tag.to_string())), ["export"] => Attribute::Secondary(SecondaryAttribute::Export), ["deprecated", name] => { if !name.starts_with('"') && !name.ends_with('"') { @@ -604,6 +605,7 @@ pub enum SecondaryAttribute { Export, Field(String), Custom(String), + Abi(String), } impl fmt::Display for SecondaryAttribute { @@ -618,6 +620,7 @@ impl fmt::Display for SecondaryAttribute { SecondaryAttribute::Event => write!(f, "#[event]"), SecondaryAttribute::Export => write!(f, "#[export]"), SecondaryAttribute::Field(ref k) => write!(f, "#[field({k})]"), + SecondaryAttribute::Abi(ref k) => write!(f, "#[abi({k})]"), } } } @@ -640,7 +643,9 @@ impl AsRef for SecondaryAttribute { match self { SecondaryAttribute::Deprecated(Some(string)) => string, SecondaryAttribute::Deprecated(None) => "", - SecondaryAttribute::Custom(string) | SecondaryAttribute::Field(string) => string, + SecondaryAttribute::Custom(string) + | SecondaryAttribute::Field(string) + | SecondaryAttribute::Abi(string) => string, SecondaryAttribute::ContractLibraryMethod => "", SecondaryAttribute::Event | SecondaryAttribute::Export => "", } diff --git a/compiler/noirc_frontend/src/lib.rs b/compiler/noirc_frontend/src/lib.rs index 6ce6f4325e4..93d7960faf5 100644 --- a/compiler/noirc_frontend/src/lib.rs +++ b/compiler/noirc_frontend/src/lib.rs @@ -45,7 +45,6 @@ pub mod macros_api { pub use noirc_errors::Span; pub use crate::graph::CrateId; - use crate::hir::def_collector::dc_crate::{UnresolvedFunctions, UnresolvedTraitImpl}; pub use crate::hir::def_collector::errors::MacroError; pub use crate::hir_def::expr::{HirExpression, HirLiteral}; pub use crate::hir_def::stmt::HirStatement; @@ -76,15 +75,6 @@ pub mod macros_api { context: &HirContext, ) -> Result; - // TODO(#4653): generalize this function - fn process_collected_defs( - &self, - _crate_id: &CrateId, - _context: &mut HirContext, - _collected_trait_impls: &[UnresolvedTraitImpl], - _collected_functions: &mut [UnresolvedFunctions], - ) -> Result<(), (MacroError, FileId)>; - /// Function to manipulate the AST after type checking has been completed. /// The AST after type checking has been done is called the HIR. fn process_typed_ast( diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index dcfceccdb57..ffd760d6d7f 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -146,6 +146,7 @@ pub struct NodeInterner { // Maps GlobalId -> GlobalInfo // NOTE: currently only used for checking repeat globals and restricting their scope to a module globals: Vec, + global_attributes: HashMap>, next_type_variable_id: std::cell::Cell, @@ -480,6 +481,7 @@ impl Default for NodeInterner { field_indices: HashMap::new(), next_type_variable_id: std::cell::Cell::new(0), globals: Vec::new(), + global_attributes: HashMap::new(), struct_methods: HashMap::new(), primitive_methods: HashMap::new(), type_alias_ref: Vec::new(), @@ -647,11 +649,13 @@ impl NodeInterner { local_id: LocalModuleId, let_statement: StmtId, file: FileId, + attributes: Vec, ) -> GlobalId { let id = GlobalId(self.globals.len()); let location = Location::new(ident.span(), file); let name = ident.to_string(); let definition_id = self.push_definition(name, false, DefinitionKind::Global(id), location); + self.globals.push(GlobalInfo { id, definition_id, @@ -660,6 +664,7 @@ impl NodeInterner { let_statement, location, }); + self.global_attributes.insert(id, attributes); id } @@ -673,9 +678,10 @@ impl NodeInterner { name: Ident, local_id: LocalModuleId, file: FileId, + attributes: Vec, ) -> GlobalId { let statement = self.push_stmt(HirStatement::Error); - self.push_global(name, local_id, statement, file) + self.push_global(name, local_id, statement, file, attributes) } /// Intern an empty function. @@ -838,6 +844,10 @@ impl NodeInterner { &self.struct_attributes[struct_id] } + pub fn global_attributes(&self, global_id: &GlobalId) -> &[SecondaryAttribute] { + &self.global_attributes[global_id] + } + /// Returns the interned statement corresponding to `stmt_id` pub fn statement(&self, stmt_id: &StmtId) -> HirStatement { let def = diff --git a/compiler/noirc_frontend/src/parser/parser.rs b/compiler/noirc_frontend/src/parser/parser.rs index 7ecff12163a..0a21465fe87 100644 --- a/compiler/noirc_frontend/src/parser/parser.rs +++ b/compiler/noirc_frontend/src/parser/parser.rs @@ -158,14 +158,17 @@ fn implementation() -> impl NoirParser { /// global_declaration: 'global' ident global_type_annotation '=' literal fn global_declaration() -> impl NoirParser { - let p = ignore_then_commit( - keyword(Keyword::Global).labelled(ParsingRuleLabel::Global), - ident().map(Pattern::Identifier), - ); + let p = attributes::attributes() + .then_ignore(keyword(Keyword::Global).labelled(ParsingRuleLabel::Global)) + .then(ident().map(Pattern::Identifier)); let p = then_commit(p, optional_type_annotation()); let p = then_commit_ignore(p, just(Token::Assign)); let p = then_commit(p, expression()); - p.map(LetStatement::new_let).map(TopLevelStatement::Global) + p.validate(|(((attributes, pattern), r#type), expression), span, emit| { + let global_attributes = attributes::validate_secondary_attributes(attributes, span, emit); + LetStatement { pattern, r#type, expression, attributes: global_attributes } + }) + .map(TopLevelStatement::Global) } /// submodule: 'mod' ident '{' module '}' @@ -1242,7 +1245,7 @@ where mod test { use super::test_helpers::*; use super::*; - use crate::{ArrayLiteral, Literal}; + use crate::ArrayLiteral; #[test] fn parse_infix() { diff --git a/compiler/noirc_frontend/src/parser/parser/attributes.rs b/compiler/noirc_frontend/src/parser/parser/attributes.rs index 4b256a95c8b..47add6f82e0 100644 --- a/compiler/noirc_frontend/src/parser/parser/attributes.rs +++ b/compiler/noirc_frontend/src/parser/parser/attributes.rs @@ -2,6 +2,7 @@ use chumsky::Parser; use noirc_errors::Span; use crate::{ + macros_api::SecondaryAttribute, parser::{NoirParser, ParserError, ParserErrorReason}, token::{Attribute, Attributes, Token, TokenKind}, }; @@ -44,3 +45,25 @@ pub(super) fn validate_attributes( Attributes { function: primary, secondary } } + +pub(super) fn validate_secondary_attributes( + attributes: Vec, + span: Span, + emit: &mut dyn FnMut(ParserError), +) -> Vec { + let mut struct_attributes = vec![]; + + for attribute in attributes { + match attribute { + Attribute::Function(..) => { + emit(ParserError::with_reason( + ParserErrorReason::NoFunctionAttributesAllowedOnStruct, + span, + )); + } + Attribute::Secondary(attr) => struct_attributes.push(attr), + } + } + + struct_attributes +} diff --git a/compiler/noirc_frontend/src/parser/parser/literals.rs b/compiler/noirc_frontend/src/parser/parser/literals.rs index 32f4f03de2e..83d7b832d27 100644 --- a/compiler/noirc_frontend/src/parser/parser/literals.rs +++ b/compiler/noirc_frontend/src/parser/parser/literals.rs @@ -105,6 +105,7 @@ mod test { Case { source: r#" r#"foo" "#, expect: "(none)", errors: 2 }, // empty string Case { source: r#"r"""#, expect: r#"r"""#, errors: 0 }, + #[allow(clippy::needless_raw_string_hashes)] Case { source: r####"r###""###"####, expect: r####"r###""###"####, errors: 0 }, // miscellaneous Case { source: r##" r#\"foo\"# "##, expect: "plain::r", errors: 2 }, diff --git a/compiler/noirc_frontend/src/parser/parser/structs.rs b/compiler/noirc_frontend/src/parser/parser/structs.rs index 0212f56783f..87e58f69efb 100644 --- a/compiler/noirc_frontend/src/parser/parser/structs.rs +++ b/compiler/noirc_frontend/src/parser/parser/structs.rs @@ -1,17 +1,15 @@ use chumsky::prelude::*; -use noirc_errors::Span; use crate::{ - macros_api::SecondaryAttribute, parser::{ parser::{ - attributes::attributes, + attributes::{attributes, validate_secondary_attributes}, function, parse_type, primitives::{ident, keyword}, }, - NoirParser, ParserError, ParserErrorReason, TopLevelStatement, + NoirParser, TopLevelStatement, }, - token::{Attribute, Keyword, Token}, + token::{Keyword, Token}, Ident, NoirStruct, UnresolvedType, }; @@ -35,7 +33,7 @@ pub(super) fn struct_definition() -> impl NoirParser { .then(function::generics()) .then(fields) .validate(|(((raw_attributes, name), generics), fields), span, emit| { - let attributes = validate_struct_attributes(raw_attributes, span, emit); + let attributes = validate_secondary_attributes(raw_attributes, span, emit); TopLevelStatement::Struct(NoirStruct { name, attributes, generics, fields, span }) }) } @@ -48,28 +46,6 @@ fn struct_fields() -> impl NoirParser> { .allow_trailing() } -fn validate_struct_attributes( - attributes: Vec, - span: Span, - emit: &mut dyn FnMut(ParserError), -) -> Vec { - let mut struct_attributes = vec![]; - - for attribute in attributes { - match attribute { - Attribute::Function(..) => { - emit(ParserError::with_reason( - ParserErrorReason::NoFunctionAttributesAllowedOnStruct, - span, - )); - } - Attribute::Secondary(attr) => struct_attributes.push(attr), - } - } - - struct_attributes -} - #[cfg(test)] mod test { use super::*; diff --git a/compiler/wasm/README.md b/compiler/wasm/README.md index 52f7e83e19e..fd534722622 100644 --- a/compiler/wasm/README.md +++ b/compiler/wasm/README.md @@ -26,17 +26,3 @@ for (const path of files) { } const myCompiledCode = await compile(fm); ``` - -## Building from source - -Outside of the [noir repo](https://github.com/noir-lang/noir), this package can be built using the command below: - -```bash -nix build -L github:noir-lang/noir/master#noir_wasm -``` - -If you are within the noir repo and would like to build local changes, you can use: - -```bash -nix build -L #noir_wasm -``` diff --git a/compiler/wasm/src/compile.rs b/compiler/wasm/src/compile.rs index 9e6fca1126e..de157a1fe20 100644 --- a/compiler/wasm/src/compile.rs +++ b/compiler/wasm/src/compile.rs @@ -30,11 +30,16 @@ export type DependencyGraph = { library_dependencies: Readonly>; } +export type ContractOutputsArtifact = { + structs: Record>; + globals: Record>; +} + export type ContractArtifact = { noir_version: string; name: string; functions: Array; - events: Array; + outputs: ContractOutputsArtifact; file_map: Record; }; @@ -218,7 +223,7 @@ pub fn compile_contract( noir_version: String::from(NOIR_ARTIFACT_VERSION_STRING), name: optimized_contract.name, functions, - events: optimized_contract.events, + outputs: optimized_contract.outputs.into(), file_map: optimized_contract.file_map, }; diff --git a/compiler/wasm/src/compile_new.rs b/compiler/wasm/src/compile_new.rs index d6b382f669f..c187fe7f3de 100644 --- a/compiler/wasm/src/compile_new.rs +++ b/compiler/wasm/src/compile_new.rs @@ -146,7 +146,7 @@ impl CompilerContext { noir_version: String::from(NOIR_ARTIFACT_VERSION_STRING), name: optimized_contract.name, functions, - events: optimized_contract.events, + outputs: optimized_contract.outputs.into(), file_map: optimized_contract.file_map, }; diff --git a/compiler/wasm/src/types/noir_artifact.ts b/compiler/wasm/src/types/noir_artifact.ts index 935c99043da..f241b539dc7 100644 --- a/compiler/wasm/src/types/noir_artifact.ts +++ b/compiler/wasm/src/types/noir_artifact.ts @@ -1,35 +1,55 @@ import { Abi, AbiType } from '@noir-lang/types'; /** - * A named type. + * A basic value. */ -export interface ABIVariable { +export interface BasicValue { /** - * The name of the variable. - */ - name: string; - /** - * The type of the variable. + * The kind of the value. */ - type: AbiType; + kind: T; + value: V; } /** - * A contract event. + * An exported value. */ -export interface EventAbi { +export type AbiValue = + | BasicValue<'boolean', boolean> + | BasicValue<'string', string> + | BasicValue<'array', AbiValue[]> + | TupleValue + | IntegerValue + | StructValue; + +export type TypedStructFieldValue = { name: string; value: T }; + +export interface StructValue { + kind: 'struct'; + fields: TypedStructFieldValue[]; +} + +export interface TupleValue { + kind: 'tuple'; + fields: AbiValue[]; +} + +export interface IntegerValue extends BasicValue<'integer', string> { + sign: boolean; +} + +/** + * A named type. + */ +export interface ABIVariable { /** - * The event name. + * The name of the variable. */ name: string; /** - * Fully qualified name of the event. - */ - path: string; - /** - * The fields of the event. + * The type of the variable. */ - fields: ABIVariable[]; + type: AbiType; } /** @@ -60,8 +80,11 @@ export interface ContractArtifact { noir_version: string; /** The functions of the contract. */ functions: NoirFunctionEntry[]; - /** The events of the contract */ - events: EventAbi[]; + + outputs: { + structs: Record; + globals: Record; + }; /** The map of file ID to the source code and path of the file. */ file_map: DebugFileMap; } diff --git a/default.nix b/default.nix deleted file mode 100644 index 9e230590a61..00000000000 --- a/default.nix +++ /dev/null @@ -1,13 +0,0 @@ -let - lock = builtins.fromJSON (builtins.readFile ./flake.lock); - flakeCompatRev = lock.nodes.flake-compat.locked.rev; - flakeCompatHash = lock.nodes.flake-compat.locked.narHash; - flakeCompat = fetchTarball { - url = "https://github.com/edolstra/flake-compat/archive/${flakeCompatRev}.tar.gz"; - sha256 = flakeCompatHash; - }; - compat = import flakeCompat { - src = ./.; - }; -in -compat.defaultNix diff --git a/docs/docs/getting_started/installation/other_install_methods.md b/docs/docs/getting_started/installation/other_install_methods.md index a35e34aaf9c..3634723562b 100644 --- a/docs/docs/getting_started/installation/other_install_methods.md +++ b/docs/docs/getting_started/installation/other_install_methods.md @@ -1,6 +1,6 @@ --- -title: Alternative Install Methods -description: There are different ways to install Nargo, the one-stop shop and command-line tool for developing Noir programs. This guide explains other methods that don't rely on noirup, such as compiling from source, installing from binaries, and using WSL for windows +title: Alternative Installations +description: There are different ways to install Nargo, the one-stop shop and command-line tool for developing Noir programs. This guide explains how to specify which version to install when using noirup, and using WSL for windows. keywords: [ Installation Nargo @@ -12,10 +12,7 @@ keywords: [ Linux Nix Direnv - Shell & editor experience - Building and testing Uninstalling Nargo - Noir vs code extension, ] sidebar_position: 1 --- @@ -86,146 +83,7 @@ With `noirup`, you can easily switch between different Nargo versions, including noirup --path ./path/to/local/source ``` -## Alternate Installation Methods (No Longer Recommended) - -While the following methods are available, they are no longer recommended. We advise using noirup for a more efficient and flexible installation experience. - -However, there are other methods for installing Nargo: - -- [Binaries](#option-1-installing-from-binaries) -- [Compiling from Source](#option-2-compile-from-source) -- [WSL for Windows](#option-3-wsl-for-windows) - -### Option 1: Installing from Binaries - -See [GitHub Releases](https://github.com/noir-lang/noir/releases) for the latest and previous -platform specific binaries. - -#### Step 1 - -Paste and run the following in the terminal to extract and install the binary: - -> **macOS / Linux:** If you are prompted with `Permission denied` when running commands, prepend -> `sudo` and re-run it. - -##### macOS (Apple Silicon) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-aarch64-apple-darwin.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ -echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ -source ~/.zshrc -``` - -##### macOS (Intel) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-x86_64-apple-darwin.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ -echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ -source ~/.zshrc -``` - -##### Linux (Bash) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ -echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ -source ~/.bashrc -``` - -#### Step 2 - -Check if the installation was successful by running `nargo --version`. You should get a version number. - -> **macOS:** If you are prompted with an OS alert, right-click and open the _nargo_ executable from -> Finder. Close the new terminal popped up and `nargo` should now be accessible. - -### Option 2: Compile from Source - -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). - -Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. - -#### Setting up your environment - -For the best experience, please follow these instructions to setup your environment: - -1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. -2. Create the file `~/.config/nix/nix.conf` with the contents: - -```ini -experimental-features = nix-command -extra-experimental-features = flakes -``` - -3. Install direnv into your Nix profile by running: - -```sh -nix profile install nixpkgs#direnv -``` - -4. Add direnv to your shell following [their guide](https://direnv.net/docs/hook.html). - 1. For bash or zshell, add `eval "$(direnv hook bash)"` or `eval "$(direnv hook zsh)"` to your ~/.bashrc or ~/.zshrc file, respectively. -5. Restart your shell. - -#### Shell & editor experience - -Now that your environment is set up, you can get to work on the project. - -1. Clone the repository, such as: - -```sh -git clone git@github.com:noir-lang/noir -``` - -> Replacing `noir` with whichever repository you want to work on. - -2. Navigate to the directory: - -```sh -cd noir -``` - -> Replacing `noir` with whichever repository you cloned. - -3. You should see a **direnv error** because projects aren't allowed by default. Make sure you've reviewed and trust our `.envrc` file, then you need to run: - -```sh -direnv allow -``` - -4. Now, wait awhile for all the native dependencies to be built. This will take some time and direnv will warn you that it is taking a long time, but we just need to let it run. - -5. Once you are presented with your prompt again, you can start your editor within the project directory (we recommend [VSCode](https://code.visualstudio.com/)): - -```sh -code . -``` - -6. (Recommended) When launching VSCode for the first time, you should be prompted to install our recommended plugins. We highly recommend installing these for the best development experience. - -#### Building and testing - -Assuming you are using `direnv` to populate your environment, building and testing the project can be done -with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `rust-toolchain.toml`, which is 1.73.0 at the time of this writing. - -If you want to build the entire project in an isolated sandbox, you can use Nix commands: - -1. `nix build .` (or `nix build . -L` for verbose output) to build the project in a Nix sandbox. -2. `nix flake check` (or `nix flake check -L` for verbose output) to run clippy and tests in a Nix sandbox. - -#### Without `direnv` - -If you have hesitations with using direnv, you can launch a subshell with `nix develop` and then launch your editor from within the subshell. However, if VSCode was already launched in the project directory, the environment won't be updated. - -Advanced: If you aren't using direnv nor launching your editor within the subshell, you can try to install Barretenberg and other global dependencies the package needs. This is an advanced workflow and likely won't receive support! - -### Option 3: WSL (for Windows) +## Installation on Windows The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). @@ -235,20 +93,10 @@ step 2: Follow the [Noirup instructions](#encouraged-installation-method-noirup) ## Uninstalling Nargo -### Noirup - -If you installed Nargo with `noirup` or through directly downloading binaries, you can uninstall Nargo by removing the files in `~/.nargo`, `~/nargo`, and `~/noir_cache`. This ensures that all installed binaries, configurations, and cache related to Nargo are fully removed from your system. +If you installed Nargo with `noirup`, you can uninstall Nargo by removing the files in `~/.nargo`, `~/nargo`, and `~/noir_cache`. This ensures that all installed binaries, configurations, and cache related to Nargo are fully removed from your system. ```bash rm -r ~/.nargo rm -r ~/nargo rm -r ~/noir_cache ``` - -### Nix - -If you installed Nargo with Nix or compiled it from source, you can remove the binary located at `~/.nix-profile/bin/nargo`. - -```bash -rm ~/.nix-profile/bin/nargo -``` diff --git a/docs/docs/getting_started/tooling/noir_codegen.md b/docs/docs/getting_started/tooling/noir_codegen.md new file mode 100644 index 00000000000..d65151da0ab --- /dev/null +++ b/docs/docs/getting_started/tooling/noir_codegen.md @@ -0,0 +1,113 @@ +--- +title: Noir Codegen for TypeScript +description: Learn how to use Noir codegen to generate TypeScript bindings +keywords: [Nargo, Noir, compile, TypeScript] +sidebar_position: 2 +--- + +When using TypeScript, it is extra work to interpret Noir program outputs in a type-safe way. Third party libraries may exist for popular Noir programs, but they are either hard to find or unmaintained. + +Now you can generate TypeScript bindings for your Noir programs in two steps: +1. Exporting Noir functions using `nargo export` +2. Using the TypeScript module `noir_codegen` to generate TypeScript binding + +**Note:** you can only export functions from a Noir *library* (not binary or contract program types). + +## Installation + +### Your TypeScript project + +If you don't already have a TypeScript project you can add the module with `yarn` (or `npm`), then initialize it: + +```bash +yarn add typescript -D +npx tsc --init +``` + +### Add TypeScript module - `noir_codegen` + +The following command will add the module to your project's devDependencies: + +```bash +yarn add @noir-lang/noir_codegen -D +``` + +### Nargo library +Make sure you have Nargo, v0.25.0 or greater, installed. If you don't, follow the [installation guide](../installation/index.md). + +If you're in a new project, make a `circuits` folder and create a new Noir library: + +```bash +mkdir circuits && cd circuits +nargo new --lib myNoirLib +``` + +## Usage + +### Export ABI of specified functions + +First go to the `.nr` files in your Noir library, and add the `#[export]` macro to each function that you want to use in TypeScript. + +```rust +#[export] +fn your_function(... +``` + +From your Noir library (where `Nargo.toml` is), run the following command: + +```bash +nargo export +``` + +You will now have an `export` directory with a .json file per exported function. + +You can also specify the directory of Noir programs using `--program-dir`, for example: + +```bash +nargo export --program-dir=./circuits/myNoirLib +``` + +### Generate TypeScript bindings from exported functions + +To use the `noir-codegen` package we added to the TypeScript project: + +```bash +yarn noir-codegen ./export/your_function.json +``` + +This creates an `exports` directory with an `index.ts` file containing all exported functions. + +**Note:** adding `--out-dir` allows you to specify an output dir for your TypeScript bindings to go. Eg: + +```bash +yarn noir-codegen ./export/*.json --out-dir ./path/to/output/dir +``` + +## Example .nr function to .ts output + +Consider a Noir library with this function: + +```rust +#[export] +fn not_equal(x: Field, y: Field) -> bool { + x != y +} +``` + +After the export and codegen steps, you should have an `index.ts` like: + +```typescript +export type Field = string; + + +export const is_equal_circuit: CompiledCircuit = {"abi":{"parameters":[{"name":"x","type":{"kind":"field"},"visibility":"private"},{"name":"y","type":{"kind":"field"},"visibility":"private"}],"param_witnesses":{"x":[{"start":0,"end":1}],"y":[{"start":1,"end":2}]},"return_type":{"abi_type":{"kind":"boolean"},"visibility":"private"},"return_witnesses":[4]},"bytecode":"H4sIAAAAAAAA/7WUMQ7DIAxFQ0Krrr2JjSGYLVcpKrn/CaqqDQN12WK+hPBgmWd/wEyHbF1SS923uhOs3pfoChI+wKXMAXzIKyNj4PB0TFTYc0w5RUjoqeAeEu1wqK0F54RGkWvW44LPzExnlkbMEs4JNZmN8PxS42uHv82T8a3Jeyn2Ks+VLPcO558HmyLMCDOXAXXtpPt4R/Rt9T36ss6dS9HGPx/eG17nGegKBQAA"}; + +export async function is_equal(x: Field, y: Field, foreignCallHandler?: ForeignCallHandler): Promise { + const program = new Noir(is_equal_circuit); + const args: InputMap = { x, y }; + const { returnValue } = await program.execute(args, foreignCallHandler); + return returnValue as boolean; +} +``` + +Now the `is_equal()` function and relevant types are readily available for use in TypeScript. diff --git a/docs/package.json b/docs/package.json index 78560707795..f9e95fc02f8 100644 --- a/docs/package.json +++ b/docs/package.json @@ -5,10 +5,11 @@ "scripts": { "preprocess": "yarn workspace @noir-lang/acvm_js build && ./scripts/codegen_nargo_reference.sh && yarn node ./scripts/preprocess/index.js", "start": "yarn preprocess && docusaurus start", - "build": "yarn preprocess && yarn version::stables && docusaurus build", + "build": "yarn preprocess && docusaurus build", + "clean": "rm -rf ./processed-docs ./processed-docs ./build", "version::stables": "ts-node ./scripts/setStable.ts", "serve": "serve build", - "version": "yarn preprocess && docusaurus build && docusaurus docs:version" + "version": "yarn version::stables && ./scripts/cut_version.sh" }, "dependencies": { "@docusaurus/core": "^3.0.1", diff --git a/docs/scripts/cut_version.sh b/docs/scripts/cut_version.sh new file mode 100755 index 00000000000..4000707328c --- /dev/null +++ b/docs/scripts/cut_version.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +set -eu + +cd $(dirname "$0")/.. + +VERSION=$1 + +# We assume that the new release tag has been made on github, so setStable.ts will add this to `versions.json`. +# We don't have a version of the docs for this release however (that's what we're doing right now!) so we need to remove it. +jq 'map(select(. != "'"$VERSION"'"))' versions.json > tmp.json && mv tmp.json versions.json + +# We need to build the docs in order to perform all necessary preprocessing. +yarn build + +# Finally cut the actual new docs version. +yarn docusaurus docs:version $VERSION diff --git a/flake.nix b/flake.nix deleted file mode 100644 index 1cb421a49ef..00000000000 --- a/flake.nix +++ /dev/null @@ -1,260 +0,0 @@ -{ - description = "Build the Noir programming language"; - - # All of these inputs (a.k.a. dependencies) need to align with inputs we - # use so they use the `inputs.*.follows` syntax to reference our inputs - inputs = { - nixpkgs = { - url = "github:NixOS/nixpkgs/nixos-23.05"; - }; - - flake-utils = { - url = "github:numtide/flake-utils"; - }; - - flake-compat = { - url = "github:edolstra/flake-compat"; - flake = false; - }; - - fenix = { - url = "github:nix-community/fenix"; - inputs = { - nixpkgs.follows = "nixpkgs"; - }; - }; - - crane = { - url = "github:ipetkov/crane"; - inputs = { - nixpkgs.follows = "nixpkgs"; - flake-utils.follows = "flake-utils"; - flake-compat.follows = "flake-compat"; - }; - }; - }; - - outputs = - { self, nixpkgs, crane, flake-utils, fenix, ... }: - flake-utils.lib.eachDefaultSystem (system: - let - pkgs = import nixpkgs { - inherit system; - }; - - rustToolchain = fenix.packages.${system}.fromToolchainFile { - file = ./rust-toolchain.toml; - sha256 = "sha256-rLP8+fTxnPHoR96ZJiCa/5Ans1OojI7MLsmSqR2ip8o="; - }; - - craneLib = (crane.mkLib pkgs).overrideToolchain rustToolchain; - - # The `self.rev` property is only available when the working tree is not dirty - GIT_COMMIT = if (self ? rev) then self.rev else "unknown"; - GIT_DIRTY = if (self ? rev) then "false" else "true"; - - extraBuildInputs = [ ] ++ pkgs.lib.optionals pkgs.stdenv.isDarwin [ - # Need libiconv and apple Security on Darwin. See https://github.com/ipetkov/crane/issues/156 - pkgs.libiconv - pkgs.darwin.apple_sdk.frameworks.Security - ]; - - environment = { - # We enable backtraces on any failure for help with debugging - RUST_BACKTRACE = "1"; - - # We download the Wasm version of `acvm_backend` in the barretenberg releases for the ACVM `blackbox_solver` - BARRETENBERG_ARCHIVE = pkgs.fetchurl { - url = "https://github.com/AztecProtocol/barretenberg/releases/download/barretenberg-v0.4.5/acvm_backend.wasm.tar.gz"; - sha256 = "sha256-xONt5pTKWf/YbVnX/NXl/VNBbtKd+CP7CLkB1jf0RHw="; - }; - }; - - # Configuration shared between builds - config = { - # x-release-please-start-version - version = "0.26.0"; - # x-release-please-end - - src = pkgs.lib.cleanSourceWith { - src = craneLib.path ./.; - # Custom filter with various file extensions that we rely upon to build packages - # Currently: `.nr`, `.sol`, `.sh`, `.json`, `.md` and `.wasm` - filter = path: type: - (builtins.match ".*\.(nr|sol|sh|json|md|wasm|txt)$" path != null) || (craneLib.filterCargoSources path type); - }; - - # TODO(#1198): It'd be nice to include these flags when running `cargo clippy` in a devShell. - cargoClippyExtraArgs = "--all-targets -- -D warnings"; - - # TODO(#1198): It'd be nice to include this flag when running `cargo test` in a devShell. - cargoTestExtraArgs = "--workspace"; - }; - - # Combine the environment and other configuration needed for Crane to build our Rust packages - nativeConfig = environment // config // { - nativeBuildInputs = [ ]; - - buildInputs = [ ] ++ extraBuildInputs; - }; - - # Combine the environmnet and other configuration needed for Crane to build our Wasm packages - wasmConfig = environment // config // { - CARGO_TARGET_DIR = "./target"; - - nativeBuildInputs = with pkgs; [ - which - git - jq - rustToolchain - wasm-bindgen-cli - binaryen - ]; - - buildInputs = [ ] ++ extraBuildInputs; - }; - - # Build *just* the cargo dependencies, so we can reuse all of that work between runs - native-cargo-artifacts = craneLib.buildDepsOnly (nativeConfig // { - pname = "nargo"; - }); - noirc-abi-wasm-cargo-artifacts = craneLib.buildDepsOnly (wasmConfig // { - pname = "noirc_abi_wasm"; - }); - acvm-js-cargo-artifacts = craneLib.buildDepsOnly (wasmConfig // { - pname = "acvm_js"; - }); - - nargo = craneLib.buildPackage (nativeConfig // { - pname = "nargo"; - - inherit GIT_COMMIT GIT_DIRTY; - - cargoArtifacts = native-cargo-artifacts; - - # We don't want to run tests because they don't work in the Nix sandbox - doCheck = false; - }); - - noirc_abi_wasm = craneLib.buildPackage (wasmConfig // rec { - pname = "noirc_abi_wasm"; - - inherit GIT_COMMIT GIT_DIRTY; - - cargoArtifacts = noirc-abi-wasm-cargo-artifacts; - - cargoExtraArgs = "--package ${pname} --target wasm32-unknown-unknown"; - - buildPhaseCargoCommand = '' - bash tooling/noirc_abi_wasm/buildPhaseCargoCommand.sh release - ''; - - installPhase = '' - bash tooling/noirc_abi_wasm/installPhase.sh - ''; - - # We don't want to run tests because they don't work in the Nix sandbox - doCheck = false; - }); - - acvm_js = craneLib.buildPackage (wasmConfig // rec { - pname = "acvm_js"; - - inherit GIT_COMMIT GIT_DIRTY; - - cargoArtifacts = acvm-js-cargo-artifacts; - - cargoExtraArgs = "--package ${pname} --target wasm32-unknown-unknown"; - - buildPhaseCargoCommand = '' - bash acvm-repo/acvm_js/buildPhaseCargoCommand.sh release - ''; - - installPhase = '' - bash acvm-repo/acvm_js/installPhase.sh - ''; - - # We don't want to run tests because they don't work in the Nix sandbox - doCheck = false; - }); - - wasm-bindgen-cli = pkgs.callPackage ./wasm-bindgen-cli.nix { - rustPlatform = pkgs.makeRustPlatform { - rustc = rustToolchain; - cargo = rustToolchain; - }; - }; - in - { - # We use `checks` to run `cargo clippy` and `cargo fmt` since we disable checks in the primary derivations - checks = { - cargo-clippy = craneLib.cargoClippy (nativeConfig // { - pname = "noir"; - - inherit GIT_COMMIT GIT_DIRTY; - - cargoArtifacts = native-cargo-artifacts; - }); - - cargo-fmt = craneLib.cargoFmt (nativeConfig // { - pname = "noir"; - - inherit GIT_COMMIT GIT_DIRTY; - - cargoArtifacts = native-cargo-artifacts; - }); - }; - - packages = { - default = nargo; - - # Nix flakes cannot build more than one derivation in one command (see https://github.com/NixOS/nix/issues/5591) - # so we use `symlinkJoin` to build everything as the "all" package. - all = pkgs.symlinkJoin { name = "all"; paths = [ nargo noirc_abi_wasm acvm_js ]; }; - all_wasm = pkgs.symlinkJoin { name = "all_wasm"; paths = [ noirc_abi_wasm acvm_js ]; }; - - # We also export individual packages to enable `nix build .#nargo -L`, etc. - inherit nargo; - inherit noirc_abi_wasm; - inherit acvm_js; - - # We expose the `*-cargo-artifacts` derivations so we can cache our cargo dependencies in CI - inherit native-cargo-artifacts; - inherit noirc-abi-wasm-cargo-artifacts; - inherit acvm-js-cargo-artifacts; - }; - - # Setup the environment to match the environment settings, the inputs from our checks derivations, - # and extra tooling via `nativeBuildInputs` - devShells.default = pkgs.mkShell (environment // { - inputsFrom = [ - nargo - noirc_abi_wasm - acvm_js - ]; - - # Additional tools that weren't included as `nativeBuildInputs` of any of the derivations in `inputsFrom` - nativeBuildInputs = with pkgs; [ - # Rust toolchain - rustToolchain - # Other tools - starship - yarn - nodejs-18_x - # Used by the `bb` binary - curl - gzip - # This ensures the right lldb is in the environment for running rust-lldb - llvmPackages.lldb - # Nix tools - nil - nixpkgs-fmt - ]; - - shellHook = '' - eval "$(starship init bash)" - ''; - }); - }); -} - diff --git a/noir_stdlib/src/field/bn254.nr b/noir_stdlib/src/field/bn254.nr index 765f8a9d849..d70310be391 100644 --- a/noir_stdlib/src/field/bn254.nr +++ b/noir_stdlib/src/field/bn254.nr @@ -21,11 +21,23 @@ unconstrained fn decompose_unsafe(x: Field) -> (Field, Field) { (low, high) } +// Assert that (alo > blo && ahi >= bhi) || (alo <= blo && ahi > bhi) +fn assert_gt_limbs(a: (Field, Field), b: (Field, Field)) { + let (alo, ahi) = a; + let (blo, bhi) = b; + let borrow = lte_unsafe(alo, blo, 16); + + let rlo = alo - blo - 1 + (borrow as Field) * TWO_POW_128; + let rhi = ahi - bhi - (borrow as Field); + + rlo.assert_max_bit_size(128); + rhi.assert_max_bit_size(128); +} + /// Decompose a single field into two 16 byte fields. pub fn decompose(x: Field) -> (Field, Field) { // Take hints of the decomposition let (xlo, xhi) = decompose_unsafe(x); - let borrow = lt_unsafe(PLO, xlo, 16); // Range check the limbs xlo.assert_max_bit_size(128); @@ -34,13 +46,8 @@ pub fn decompose(x: Field) -> (Field, Field) { // Check that the decomposition is correct assert_eq(x, xlo + TWO_POW_128 * xhi); - // Check that (xlo < plo && xhi <= phi) || (xlo >= plo && xhi < phi) - let rlo = PLO - xlo + (borrow as Field) * TWO_POW_128; - let rhi = PHI - xhi - (borrow as Field); - - rlo.assert_max_bit_size(128); - rhi.assert_max_bit_size(128); - + // Assert that the decomposition of P is greater than the decomposition of x + assert_gt_limbs((PLO, PHI), (xlo, xhi)); (xlo, xhi) } @@ -69,17 +76,11 @@ unconstrained fn lte_unsafe(x: Field, y: Field, num_bytes: u32) -> bool { pub fn assert_gt(a: Field, b: Field) { // Decompose a and b - let (alo, ahi) = decompose(a); - let (blo, bhi) = decompose(b); - - let borrow = lte_unsafe(alo, blo, 16); + let a_limbs = decompose(a); + let b_limbs = decompose(b); - // Assert that (alo > blo && ahi >= bhi) || (alo <= blo && ahi > bhi) - let rlo = alo - blo - 1 + (borrow as Field) * TWO_POW_128; - let rhi = ahi - bhi - (borrow as Field); - - rlo.assert_max_bit_size(128); - rhi.assert_max_bit_size(128); + // Assert that a_limbs is greater than b_limbs + assert_gt_limbs(a_limbs, b_limbs) } pub fn assert_lt(a: Field, b: Field) { diff --git a/package.json b/package.json index 049c1634dd2..8abaced7bdd 100644 --- a/package.json +++ b/package.json @@ -13,23 +13,16 @@ "docs" ], "scripts": { - "build": "yarn workspaces foreach --parallel --topological-dev --verbose run build", + "build": "yarn workspaces foreach -vp --topological-dev --exclude \"{docs,@noir-lang/root}\" run build", "test": "yarn workspaces foreach --parallel --verbose run test", "test:integration": "yarn workspace integration-tests test", "clean:workspaces": "yarn workspaces foreach --exclude @noir-lang/root run clean", - "clean:root": "rm -rf ./result ./target ./packages", + "clean:root": "rm -rf ./target ./packages", "clean": "yarn clean:workspaces && yarn clean:root", "lint": "yarn workspaces foreach --verbose run lint", "spellcheck": "cspell '**/*.{md,rs}' -c ./cspell.json", - "install:acvm_js": "yarn workspace @noir-lang/acvm_js run install:from:nix", - "install:noir_wasm": "yarn workspace @noir-lang/noir_wasm run install:from:nix", - "install:noirc_abi_wasm": "yarn workspace @noir-lang/noirc_abi run install:from:nix", - "install:from:nix": "yarn install:acvm_js && yarn install:noir_wasm && yarn install:noirc_abi_wasm", - "build:types": "yarn workspace @noir-lang/types run build", - "build:backend_barretenberg": "yarn workspace @noir-lang/backend_barretenberg run build", - "build:noir_js": "yarn workspace @noir-lang/noir_js run build", - "build:js:only": "yarn workspaces foreach -vtp --from \"{@noir-lang/types,@noir-lang/backend_barretenberg,@noir-lang/noir_js,@noir-lang/noir_codegen}\" run build", - "prepare:publish": "yarn clean && yarn install:from:nix && yarn build:js:only", + "prepare:publish": "yarn clean && yarn build", + "build:js:only": "yarn workspaces foreach -vtp --exclude \"{@noir-lang/acvm_js,@noir-lang/noirc_abi,@noir-lang/noir_wasm,docs,@noir-lang/root}\" run build", "nightly:version": "yarn workspaces foreach run nightly:version", "publish:all": "yarn install && yarn workspaces foreach run publish" }, diff --git a/release-please-config.json b/release-please-config.json index 217a86303a1..0ba192754a0 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -13,7 +13,6 @@ "include-component-in-tag": false, "extra-files": [ "Cargo.toml", - "flake.nix", { "type": "json", "path": "compiler/wasm/package.json", @@ -82,4 +81,4 @@ "sentence-case" ], "bootstrap-sha": "690cfc0468de0b9aee53ccfe832c71c16e61e5fc" -} \ No newline at end of file +} diff --git a/shell.nix b/shell.nix deleted file mode 100644 index b72d4a4697b..00000000000 --- a/shell.nix +++ /dev/null @@ -1,13 +0,0 @@ -let - lock = builtins.fromJSON (builtins.readFile ./flake.lock); - flakeCompatRev = lock.nodes.flake-compat.locked.rev; - flakeCompatHash = lock.nodes.flake-compat.locked.narHash; - flakeCompat = fetchTarball { - url = "https://github.com/edolstra/flake-compat/archive/${flakeCompatRev}.tar.gz"; - sha256 = flakeCompatHash; - }; - compat = import flakeCompat { - src = ./.; - }; -in -compat.shellNix diff --git a/test_programs/compile_success_contract/fold_non_contract_method/Nargo.toml b/test_programs/compile_success_contract/fold_non_contract_method/Nargo.toml new file mode 100644 index 00000000000..ff64bbb6f1b --- /dev/null +++ b/test_programs/compile_success_contract/fold_non_contract_method/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "fold_non_contract_method" +type = "contract" +authors = [""] +compiler_version = ">=0.26.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_success_contract/fold_non_contract_method/src/main.nr b/test_programs/compile_success_contract/fold_non_contract_method/src/main.nr new file mode 100644 index 00000000000..2e00ffa1381 --- /dev/null +++ b/test_programs/compile_success_contract/fold_non_contract_method/src/main.nr @@ -0,0 +1,18 @@ +contract Foo { + use crate::times_10; + + fn double(x: Field) -> pub Field { + x * 2 + } + fn triple(x: Field) -> pub Field { + x * 3 + } + fn times_40(x: Field) -> pub Field { + times_10(x) * 4 + } +} + +#[fold] +fn times_10(x: Field) -> Field { + x * 10 +} diff --git a/test_programs/execution_success/array_to_slice_constant_length/Nargo.toml b/test_programs/execution_success/array_to_slice_constant_length/Nargo.toml new file mode 100644 index 00000000000..b338cf9b6ae --- /dev/null +++ b/test_programs/execution_success/array_to_slice_constant_length/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "array_to_slice_constant_length" +type = "bin" +authors = [""] +compiler_version = ">=0.26.0" + +[dependencies] diff --git a/test_programs/execution_success/array_to_slice_constant_length/Prover.toml b/test_programs/execution_success/array_to_slice_constant_length/Prover.toml new file mode 100644 index 00000000000..a52e9d3c46a --- /dev/null +++ b/test_programs/execution_success/array_to_slice_constant_length/Prover.toml @@ -0,0 +1 @@ +val = "42" diff --git a/test_programs/execution_success/array_to_slice_constant_length/src/main.nr b/test_programs/execution_success/array_to_slice_constant_length/src/main.nr new file mode 100644 index 00000000000..e81dd4a0c5f --- /dev/null +++ b/test_programs/execution_success/array_to_slice_constant_length/src/main.nr @@ -0,0 +1,10 @@ +// Regression test for https://github.com/noir-lang/noir/issues/4722 + +unconstrained fn return_array(val: Field) -> [Field; 1] { + [val; 1] +} + +fn main(val: Field) { + let array = return_array(val); + assert_constant(array.as_slice().len()); +} diff --git a/test_programs/execution_success/brillig_slice_input/Nargo.toml b/test_programs/execution_success/brillig_slice_input/Nargo.toml new file mode 100644 index 00000000000..a1c8cc3242b --- /dev/null +++ b/test_programs/execution_success/brillig_slice_input/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "brillig_slice_input" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/brillig_slice_input/src/main.nr b/test_programs/execution_success/brillig_slice_input/src/main.nr new file mode 100644 index 00000000000..09a9d9aef9d --- /dev/null +++ b/test_programs/execution_success/brillig_slice_input/src/main.nr @@ -0,0 +1,40 @@ +struct Point { + x: Field, + y: Field, +} + +unconstrained fn sum_slice(slice: [[Point; 2]]) -> Field { + let mut sum = 0; + for i in 0..slice.len() { + for j in 0..slice[i].len() { + sum += slice[i][j].x + slice[i][j].y; + } + } + sum +} + +fn main() { + let mut slice = &[]; + slice = slice.push_back([ + Point { + x: 13, + y: 14, + }, + Point { + x: 20, + y: 8, + } + ]); + slice = slice.push_back([ + Point { + x: 15, + y: 5, + }, + Point { + x: 12, + y: 13, + } + ]); + let brillig_sum = sum_slice(slice); + assert_eq(brillig_sum, 100); +} diff --git a/test_programs/execution_success/fold_call_witness_condition/Nargo.toml b/test_programs/execution_success/fold_call_witness_condition/Nargo.toml new file mode 100644 index 00000000000..cedaea348c5 --- /dev/null +++ b/test_programs/execution_success/fold_call_witness_condition/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "fold_call_witness_condition" +type = "bin" +authors = [""] +compiler_version = ">=0.26.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/fold_call_witness_condition/Prover.toml b/test_programs/execution_success/fold_call_witness_condition/Prover.toml new file mode 100644 index 00000000000..a4d6339b661 --- /dev/null +++ b/test_programs/execution_success/fold_call_witness_condition/Prover.toml @@ -0,0 +1,3 @@ +x = "10" +y = "10" +enable = false diff --git a/test_programs/execution_success/fold_call_witness_condition/src/main.nr b/test_programs/execution_success/fold_call_witness_condition/src/main.nr new file mode 100644 index 00000000000..5dc75e4a99f --- /dev/null +++ b/test_programs/execution_success/fold_call_witness_condition/src/main.nr @@ -0,0 +1,16 @@ +global NUM_RESULTS = 2; +fn main(x: Field, y: pub Field, enable: bool) -> pub [Field; NUM_RESULTS] { + let mut result = [0; NUM_RESULTS]; + for i in 0..NUM_RESULTS { + if enable { + result[i] = return_value(x, y); + } + } + result +} + +#[fold] +fn return_value(x: Field, y: Field) -> Field { + assert(x != y); + x +} diff --git a/tooling/debugger/build.rs b/tooling/debugger/build.rs index 26a8bc64b0e..ebdf2036894 100644 --- a/tooling/debugger/build.rs +++ b/tooling/debugger/build.rs @@ -7,8 +7,7 @@ use std::{env, fs}; const GIT_COMMIT: &&str = &"GIT_COMMIT"; fn main() { - // Only use build_data if the environment variable isn't set - // The environment variable is always set when working via Nix + // Only use build_data if the environment variable isn't set. if std::env::var(GIT_COMMIT).is_err() { build_data::set_GIT_COMMIT(); build_data::set_GIT_DIRTY(); diff --git a/tooling/debugger/ignored-tests.txt b/tooling/debugger/ignored-tests.txt index 5a42ef36e7e..4507aeb8545 100644 --- a/tooling/debugger/ignored-tests.txt +++ b/tooling/debugger/ignored-tests.txt @@ -14,3 +14,4 @@ signed_comparison to_bytes_integration fold_basic fold_basic_nested_call +fold_call_witness_condition diff --git a/tooling/debugger/src/context.rs b/tooling/debugger/src/context.rs index ba12a20460d..1acd581b2be 100644 --- a/tooling/debugger/src/context.rs +++ b/tooling/debugger/src/context.rs @@ -128,9 +128,7 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { line: i64, ) -> Option { let line = line as usize; - let Some(line_to_opcodes) = self.source_to_opcodes.get(file_id) else { - return None; - }; + let line_to_opcodes = self.source_to_opcodes.get(file_id)?; let found_index = match line_to_opcodes.binary_search_by(|x| x.0.cmp(&line)) { Ok(index) => { // move backwards to find the first opcode which matches the line @@ -631,7 +629,6 @@ fn build_source_to_opcode_debug_mappings( #[cfg(test)] mod tests { use super::*; - use crate::context::{DebugCommandResult, DebugContext}; use crate::foreign_calls::DefaultDebugForeignCallExecutor; use acvm::{ @@ -647,8 +644,6 @@ mod tests { BinaryFieldOp, HeapValueType, MemoryAddress, Opcode as BrilligOpcode, ValueOrArray, }, }; - use nargo::artifacts::debug::DebugArtifact; - use std::collections::BTreeMap; #[test] fn test_resolve_foreign_calls_stepping_into_brillig() { diff --git a/tooling/debugger/src/foreign_calls.rs b/tooling/debugger/src/foreign_calls.rs index aae2212fd54..f11ac22cd75 100644 --- a/tooling/debugger/src/foreign_calls.rs +++ b/tooling/debugger/src/foreign_calls.rs @@ -65,7 +65,7 @@ impl DefaultDebugForeignCallExecutor { pub fn load_artifact(&mut self, artifact: &DebugArtifact) { // TODO: handle loading from the correct DebugInfo when we support // debugging contracts - let Some(info) = artifact.debug_symbols.get(0) else { + let Some(info) = artifact.debug_symbols.first() else { return; }; self.debug_vars.insert_debug_info(info); diff --git a/tooling/nargo/src/artifacts/contract.rs b/tooling/nargo/src/artifacts/contract.rs index c0316a6d1a2..868fb4404fd 100644 --- a/tooling/nargo/src/artifacts/contract.rs +++ b/tooling/nargo/src/artifacts/contract.rs @@ -1,14 +1,26 @@ use acvm::acir::circuit::Program; -use noirc_abi::{Abi, ContractEvent}; -use noirc_driver::{CompiledContract, ContractFunction}; +use noirc_abi::{Abi, AbiType, AbiValue}; +use noirc_driver::{CompiledContract, CompiledContractOutputs, ContractFunction}; use serde::{Deserialize, Serialize}; use noirc_driver::DebugFile; use noirc_errors::debug_info::DebugInfo; -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashMap}; use fm::FileId; +#[derive(Serialize, Deserialize)] +pub struct ContractOutputsArtifact { + pub structs: HashMap>, + pub globals: HashMap>, +} + +impl From for ContractOutputsArtifact { + fn from(outputs: CompiledContractOutputs) -> Self { + ContractOutputsArtifact { structs: outputs.structs, globals: outputs.globals } + } +} + #[derive(Serialize, Deserialize)] pub struct ContractArtifact { /// Version of noir used to compile this contract @@ -17,8 +29,8 @@ pub struct ContractArtifact { pub name: String, /// Each of the contract's functions are compiled into a separate program stored in this `Vec`. pub functions: Vec, - /// All the events defined inside the contract scope. - pub events: Vec, + + pub outputs: ContractOutputsArtifact, /// Map of file Id to the source code so locations in debug info can be mapped to source code they point to. pub file_map: BTreeMap, } @@ -29,7 +41,7 @@ impl From for ContractArtifact { noir_version: contract.noir_version, name: contract.name, functions: contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(), - events: contract.events, + outputs: contract.outputs.into(), file_map: contract.file_map, } } diff --git a/tooling/nargo/src/artifacts/debug_vars.rs b/tooling/nargo/src/artifacts/debug_vars.rs index 8e5c2bc46a4..6a42a4c3311 100644 --- a/tooling/nargo/src/artifacts/debug_vars.rs +++ b/tooling/nargo/src/artifacts/debug_vars.rs @@ -36,9 +36,7 @@ impl DebugVars { fn lookup_var(&self, var_id: DebugVarId) -> Option<(&str, &PrintableType)> { self.variables.get(&var_id).and_then(|debug_var| { - let Some(ptype) = self.types.get(&debug_var.debug_type_id) else { - return None; - }; + let ptype = self.types.get(&debug_var.debug_type_id)?; Some((debug_var.name.as_str(), ptype)) }) } diff --git a/tooling/nargo/src/artifacts/program.rs b/tooling/nargo/src/artifacts/program.rs index 046db1cd8fa..9e660cbd359 100644 --- a/tooling/nargo/src/artifacts/program.rs +++ b/tooling/nargo/src/artifacts/program.rs @@ -34,6 +34,8 @@ pub struct ProgramArtifact { /// Map of file Id to the source code so locations in debug info can be mapped to source code they point to. pub file_map: BTreeMap, + + pub names: Vec, } impl From for ProgramArtifact { @@ -45,6 +47,7 @@ impl From for ProgramArtifact { bytecode: compiled_program.program, debug_symbols: compiled_program.debug, file_map: compiled_program.file_map, + names: compiled_program.names, } } } @@ -59,6 +62,7 @@ impl From for CompiledProgram { debug: program.debug_symbols, file_map: program.file_map, warnings: vec![], + names: program.names, } } } diff --git a/tooling/nargo_cli/build.rs b/tooling/nargo_cli/build.rs index a796eeac326..bf97dfb3e96 100644 --- a/tooling/nargo_cli/build.rs +++ b/tooling/nargo_cli/build.rs @@ -16,8 +16,7 @@ const GIT_COMMIT: &&str = &"GIT_COMMIT"; fn main() { check_rustc_version(); - // Only use build_data if the environment variable isn't set - // The environment variable is always set when working via Nix + // Only use build_data if the environment variable isn't set. if std::env::var(GIT_COMMIT).is_err() { build_data::set_GIT_COMMIT(); build_data::set_GIT_DIRTY(); @@ -245,7 +244,7 @@ fn compile_success_empty_{test_name}() {{ let json: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap_or_else(|e| {{ panic!("JSON was not well-formatted {{:?}}\n\n{{:?}}", e, std::str::from_utf8(&output.stdout)) }}); - let num_opcodes = &json["programs"][0]["acir_opcodes"]; + let num_opcodes = &json["programs"][0]["functions"][0]["acir_opcodes"]; assert_eq!(num_opcodes.as_u64().expect("number of opcodes should fit in a u64"), 0); }} "#, diff --git a/tooling/nargo_cli/src/cli/check_cmd.rs b/tooling/nargo_cli/src/cli/check_cmd.rs index 897073f4e20..2b729e44b8a 100644 --- a/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/tooling/nargo_cli/src/cli/check_cmd.rs @@ -34,6 +34,10 @@ pub(crate) struct CheckCommand { #[clap(long, conflicts_with = "package")] workspace: bool, + /// Force overwrite of existing files + #[clap(long = "overwrite")] + allow_overwrite: bool, + #[clap(flatten)] compile_options: CompileOptions, } @@ -58,18 +62,29 @@ pub(crate) fn run( let parsed_files = parse_all(&workspace_file_manager); for package in &workspace { - check_package(&workspace_file_manager, &parsed_files, package, &args.compile_options)?; - println!("[{}] Constraint system successfully built!", package.name); + let any_file_written = check_package( + &workspace_file_manager, + &parsed_files, + package, + &args.compile_options, + args.allow_overwrite, + )?; + if any_file_written { + println!("[{}] Constraint system successfully built!", package.name); + } } Ok(()) } +/// Evaluates the necessity to create or update Prover.toml and Verifier.toml based on the allow_overwrite flag and files' existence. +/// Returns `true` if any file was generated or updated, `false` otherwise. fn check_package( file_manager: &FileManager, parsed_files: &ParsedFiles, package: &Package, compile_options: &CompileOptions, -) -> Result<(), CompileError> { + allow_overwrite: bool, +) -> Result { let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); check_crate_and_report_errors( &mut context, @@ -81,27 +96,39 @@ fn check_package( if package.is_library() || package.is_contract() { // Libraries do not have ABIs while contracts have many, so we cannot generate a `Prover.toml` file. - Ok(()) + Ok(false) } else { // XXX: We can have a --overwrite flag to determine if you want to overwrite the Prover/Verifier.toml files if let Some((parameters, return_type)) = compute_function_abi(&context, &crate_id) { let path_to_prover_input = package.prover_input_path(); let path_to_verifier_input = package.verifier_input_path(); - // If they are not available, then create them and populate them based on the ABI - if !path_to_prover_input.exists() { + // Before writing the file, check if it exists and whether overwrite is set + let should_write_prover = !path_to_prover_input.exists() || allow_overwrite; + let should_write_verifier = !path_to_verifier_input.exists() || allow_overwrite; + + if should_write_prover { let prover_toml = create_input_toml_template(parameters.clone(), None); write_to_file(prover_toml.as_bytes(), &path_to_prover_input); + } else { + eprintln!("Note: Prover.toml already exists. Use --overwrite to force overwrite."); } - if !path_to_verifier_input.exists() { + + if should_write_verifier { let public_inputs = parameters.into_iter().filter(|param| param.is_public()).collect(); let verifier_toml = create_input_toml_template(public_inputs, return_type); write_to_file(verifier_toml.as_bytes(), &path_to_verifier_input); + } else { + eprintln!( + "Note: Verifier.toml already exists. Use --overwrite to force overwrite." + ); } - Ok(()) + let any_file_written = should_write_prover || should_write_verifier; + + Ok(any_file_written) } else { Err(CompileError::MissingMainFunction(package.name.clone())) } diff --git a/tooling/nargo_cli/src/cli/info_cmd.rs b/tooling/nargo_cli/src/cli/info_cmd.rs index 49924622392..72784013e17 100644 --- a/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/tooling/nargo_cli/src/cli/info_cmd.rs @@ -1,6 +1,6 @@ use std::collections::HashMap; -use acvm::acir::circuit::ExpressionWidth; +use acvm::acir::circuit::{ExpressionWidth, Program}; use backend_interface::BackendError; use clap::Args; use iter_extended::vecmap; @@ -24,9 +24,9 @@ use crate::errors::CliError; use super::{compile_cmd::compile_workspace, NargoConfig}; -/// Provides detailed information on a circuit +/// Provides detailed information on each of a program's function (represented by a single circuit) /// -/// Current information provided: +/// Current information provided per circuit: /// 1. The number of ACIR opcodes /// 2. Counts the final number gates in the circuit used by a backend #[derive(Debug, Clone, Args)] @@ -114,6 +114,7 @@ pub(crate) fn run( let binary_packages = workspace.into_iter().filter(|package| package.is_binary()).zip(compiled_programs); + let program_info = binary_packages .par_bridge() .map(|(package, program)| { @@ -134,10 +135,13 @@ pub(crate) fn run( } else { // Otherwise print human-readable table. if !info_report.programs.is_empty() { - let mut program_table = table!([Fm->"Package", Fm->"Expression Width", Fm->"ACIR Opcodes", Fm->"Backend Circuit Size"]); + let mut program_table = table!([Fm->"Package", Fm->"Function", Fm->"Expression Width", Fm->"ACIR Opcodes", Fm->"Backend Circuit Size"]); - for program in info_report.programs { - program_table.add_row(program.into()); + for program_info in info_report.programs { + let program_rows: Vec = program_info.into(); + for row in program_rows { + program_table.add_row(row); + } } program_table.printstd(); } @@ -223,18 +227,20 @@ struct ProgramInfo { name: String, #[serde(skip)] expression_width: ExpressionWidth, - acir_opcodes: usize, - circuit_size: u32, + functions: Vec, } -impl From for Row { +impl From for Vec { fn from(program_info: ProgramInfo) -> Self { - row![ - Fm->format!("{}", program_info.name), - format!("{:?}", program_info.expression_width), - Fc->format!("{}", program_info.acir_opcodes), - Fc->format!("{}", program_info.circuit_size), - ] + vecmap(program_info.functions, |function| { + row![ + Fm->format!("{}", program_info.name), + Fc->format!("{}", function.name), + format!("{:?}", program_info.expression_width), + Fc->format!("{}", function.acir_opcodes), + Fc->format!("{}", function.circuit_size), + ] + }) } } @@ -243,6 +249,7 @@ struct ContractInfo { name: String, #[serde(skip)] expression_width: ExpressionWidth, + // TODO(https://github.com/noir-lang/noir/issues/4720): Settle on how to display contract functions with non-inlined Acir calls functions: Vec, } @@ -273,13 +280,22 @@ fn count_opcodes_and_gates_in_program( package: &Package, expression_width: ExpressionWidth, ) -> Result { - Ok(ProgramInfo { - name: package.name.to_string(), - expression_width, - // TODO(https://github.com/noir-lang/noir/issues/4428) - acir_opcodes: compiled_program.program.functions[0].opcodes.len(), - circuit_size: backend.get_exact_circuit_size(&compiled_program.program)?, - }) + let functions = compiled_program + .program + .functions + .into_par_iter() + .enumerate() + .map(|(i, function)| -> Result<_, BackendError> { + Ok(FunctionInfo { + name: compiled_program.names[i].clone(), + acir_opcodes: function.opcodes.len(), + circuit_size: backend + .get_exact_circuit_size(&Program { functions: vec![function] })?, + }) + }) + .collect::>()?; + + Ok(ProgramInfo { name: package.name.to_string(), expression_width, functions }) } fn count_opcodes_and_gates_in_contract( @@ -293,7 +309,7 @@ fn count_opcodes_and_gates_in_contract( .map(|function| -> Result<_, BackendError> { Ok(FunctionInfo { name: function.name, - // TODO(https://github.com/noir-lang/noir/issues/4428) + // TODO(https://github.com/noir-lang/noir/issues/4720) acir_opcodes: function.bytecode.functions[0].opcodes.len(), circuit_size: backend.get_exact_circuit_size(&function.bytecode)?, }) diff --git a/tooling/nargo_fmt/build.rs b/tooling/nargo_fmt/build.rs index c356b403ae5..6f41768c1dc 100644 --- a/tooling/nargo_fmt/build.rs +++ b/tooling/nargo_fmt/build.rs @@ -58,10 +58,10 @@ fn format_{test_name}() {{ let expected_output = r#"{output_source}"#; - let (parsed_module, _errors) = noirc_frontend::parse_program(&input); + let (parsed_module, _errors) = noirc_frontend::parse_program(input); let config = nargo_fmt::Config::of("{config}").unwrap(); - let fmt_text = nargo_fmt::format(&input, parsed_module, &config); + let fmt_text = nargo_fmt::format(input, parsed_module, &config); if std::env::var("UPDATE_EXPECT").is_ok() {{ std::fs::write("{output_source_path}", fmt_text.clone()).unwrap(); diff --git a/tooling/noirc_abi/src/lib.rs b/tooling/noirc_abi/src/lib.rs index d0dcb373963..89a60b0ed26 100644 --- a/tooling/noirc_abi/src/lib.rs +++ b/tooling/noirc_abi/src/lib.rs @@ -10,9 +10,7 @@ use acvm::{ use errors::AbiError; use input_parser::InputValue; use iter_extended::{try_btree_map, try_vecmap, vecmap}; -use noirc_frontend::{ - hir::Context, Signedness, StructType, Type, TypeBinding, TypeVariableKind, Visibility, -}; +use noirc_frontend::{hir::Context, Signedness, Type, TypeBinding, TypeVariableKind, Visibility}; use serde::{Deserialize, Serialize}; use std::ops::Range; use std::{collections::BTreeMap, str}; @@ -515,31 +513,35 @@ fn decode_string_value(field_elements: &[FieldElement]) -> String { final_string.to_owned() } -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ContractEvent { - /// Event name - name: String, - /// The fully qualified path to the event definition - path: String, - - /// Fields of the event - #[serde( - serialize_with = "serialization::serialize_struct_fields", - deserialize_with = "serialization::deserialize_struct_fields" - )] - fields: Vec<(String, AbiType)>, -} - -impl ContractEvent { - pub fn from_struct_type(context: &Context, struct_type: &StructType) -> Self { - let fields = vecmap(struct_type.get_fields(&[]), |(name, typ)| { - (name, AbiType::from_type(context, &typ)) - }); - // For the ABI, we always want to resolve the struct paths from the root crate - let path = context.fully_qualified_struct_path(context.root_crate_id(), struct_type.id); - - Self { name: struct_type.name.0.contents.clone(), path, fields } - } +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", rename_all = "lowercase")] +pub enum AbiValue { + Field { + value: FieldElement, + }, + Integer { + sign: bool, + value: String, + }, + Boolean { + value: bool, + }, + String { + value: String, + }, + Array { + value: Vec, + }, + Struct { + #[serde( + serialize_with = "serialization::serialize_struct_field_values", + deserialize_with = "serialization::deserialize_struct_field_values" + )] + fields: Vec<(String, AbiValue)>, + }, + Tuple { + fields: Vec, + }, } fn range_to_vec(ranges: &[Range]) -> Vec { diff --git a/tooling/noirc_abi/src/serialization.rs b/tooling/noirc_abi/src/serialization.rs index ed838803fab..4f91d9b7dfd 100644 --- a/tooling/noirc_abi/src/serialization.rs +++ b/tooling/noirc_abi/src/serialization.rs @@ -1,8 +1,7 @@ +use crate::{AbiType, AbiValue}; use iter_extended::vecmap; use serde::{Deserialize, Deserializer, Serialize, Serializer}; -use crate::AbiType; - // This module exposes a custom serializer and deserializer for `BTreeMap` // (representing the fields of a struct) to serialize it as a `Vec`. // @@ -41,6 +40,37 @@ where Ok(vecmap(fields_vector, |StructField { name, typ }| (name, typ))) } +#[derive(Serialize, Deserialize)] +struct StructFieldValue { + name: String, + value: AbiValue, +} + +pub(crate) fn serialize_struct_field_values( + fields: &[(String, AbiValue)], + s: S, +) -> Result +where + S: Serializer, +{ + let fields_vector = vecmap(fields, |(name, value)| StructFieldValue { + name: name.to_owned(), + value: value.to_owned(), + }); + + fields_vector.serialize(s) +} + +pub(crate) fn deserialize_struct_field_values<'de, D>( + deserializer: D, +) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + let fields_vector = Vec::::deserialize(deserializer)?; + Ok(vecmap(fields_vector, |StructFieldValue { name, value }| (name, value))) +} + #[cfg(test)] mod tests { use crate::{AbiParameter, AbiType, AbiVisibility, Sign}; diff --git a/tooling/noirc_abi_wasm/README.md b/tooling/noirc_abi_wasm/README.md index 77bc1f5fae2..2b0cf9b74d4 100644 --- a/tooling/noirc_abi_wasm/README.md +++ b/tooling/noirc_abi_wasm/README.md @@ -1,17 +1,3 @@ # Noir Lang ABI JavaScript Package This JavaScript package enables users to ABI encode inputs to a Noir program, i.e. generating an initial witness. - -## Building from source - -Outside of the [noir repo](https://github.com/noir-lang/noir), this package can be built using the command below: - -```bash -nix build -L github:noir-lang/noir/master#abi_wasm -``` - -If you are within the noir repo and would like to build local changes, you can use: - -```bash -nix build -L #abi_wasm -``` diff --git a/tooling/noirc_abi_wasm/build.rs b/tooling/noirc_abi_wasm/build.rs index 3b96be74ef3..7a6eb861de2 100644 --- a/tooling/noirc_abi_wasm/build.rs +++ b/tooling/noirc_abi_wasm/build.rs @@ -1,8 +1,7 @@ const GIT_COMMIT: &&str = &"GIT_COMMIT"; fn main() { - // Only use build_data if the environment variable isn't set - // The environment variable is always set when working via Nix + // Only use build_data if the environment variable isn't set. if std::env::var(GIT_COMMIT).is_err() { build_data::set_GIT_COMMIT(); build_data::set_GIT_DIRTY(); diff --git a/tooling/noirc_abi_wasm/build.sh b/tooling/noirc_abi_wasm/build.sh index 24af149bcea..58724dee02c 100755 --- a/tooling/noirc_abi_wasm/build.sh +++ b/tooling/noirc_abi_wasm/build.sh @@ -6,13 +6,6 @@ function require_command { exit 1 fi } -function check_installed { - if ! command -v "$1" >/dev/null 2>&1; then - echo "$1 is not installed. Please install it." >&2 - return 1 - fi - return 0 -} function run_or_fail { "$@" local status=$? @@ -21,27 +14,39 @@ function run_or_fail { exit $status fi } +function run_if_available { + if command -v "$1" >/dev/null 2>&1; then + "$@" + else + echo "$1 is not installed. Please install it to use this feature." >&2 + fi +} require_command jq require_command cargo require_command wasm-bindgen -check_installed wasm-opt self_path=$(dirname "$(readlink -f "$0")") -export pname=$(cargo read-manifest | jq -r '.name') -export CARGO_TARGET_DIR=$self_path/target +pname=$(cargo read-manifest | jq -r '.name') -rm -rf $self_path/outputs >/dev/null 2>&1 -rm -rf $self_path/result >/dev/null 2>&1 +NODE_DIR=$self_path/nodejs +BROWSER_DIR=$self_path/web -if [ -n "$out" ]; then - echo "Will install package to $out (defined outside installPhase.sh script)" -else - export out="$self_path/outputs/out" - echo "Will install package to $out" +# Clear out the existing build artifacts as these aren't automatically removed by wasm-bindgen. +if [ -d ./pkg/ ]; then + rm -r $NODE_DIR + rm -r $BROWSER_DIR fi -run_or_fail $self_path/buildPhaseCargoCommand.sh -run_or_fail $self_path/installPhase.sh +TARGET=wasm32-unknown-unknown +WASM_BINARY=${self_path}/../../target/$TARGET/release/${pname}.wasm + +NODE_WASM=${NODE_DIR}/${pname}_bg.wasm +BROWSER_WASM=${BROWSER_DIR}/${pname}_bg.wasm -ln -s $out $self_path/result +# Build the new wasm package +run_or_fail cargo build --lib --release --target $TARGET --package ${pname} +run_or_fail wasm-bindgen $WASM_BINARY --out-dir $NODE_DIR --typescript --target nodejs +run_or_fail wasm-bindgen $WASM_BINARY --out-dir $BROWSER_DIR --typescript --target web +run_if_available wasm-opt $NODE_WASM -o $NODE_WASM -O +run_if_available wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O diff --git a/tooling/noirc_abi_wasm/buildPhaseCargoCommand.sh b/tooling/noirc_abi_wasm/buildPhaseCargoCommand.sh deleted file mode 100755 index 1188d00953e..00000000000 --- a/tooling/noirc_abi_wasm/buildPhaseCargoCommand.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env bash - -function run_or_fail { - "$@" - local status=$? - if [ $status -ne 0 ]; then - echo "Command '$*' failed with exit code $status" >&2 - exit $status - fi -} -function run_if_available { - if command -v "$1" >/dev/null 2>&1; then - "$@" - else - echo "$1 is not installed. Please install it to use this feature." >&2 - fi -} - -export self_path=$(dirname "$(readlink -f "$0")") - -# Clear out the existing build artifacts as these aren't automatically removed by wasm-pack. -if [ -d ./pkg/ ]; then - rm -rf $self_path/pkg/ -fi - -TARGET=wasm32-unknown-unknown -WASM_BINARY=$CARGO_TARGET_DIR/$TARGET/release/${pname}.wasm - -NODE_DIR=$self_path/nodejs/ -BROWSER_DIR=$self_path/web/ -NODE_WASM=${NODE_DIR}/${pname}_bg.wasm -BROWSER_WASM=${BROWSER_DIR}/${pname}_bg.wasm - -# Build the new wasm package -run_or_fail cargo build --lib --release --target $TARGET --package ${pname} -run_or_fail wasm-bindgen $WASM_BINARY --out-dir $NODE_DIR --typescript --target nodejs -run_or_fail wasm-bindgen $WASM_BINARY --out-dir $BROWSER_DIR --typescript --target web -run_if_available wasm-opt $NODE_WASM -o $NODE_WASM -O -run_if_available wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O \ No newline at end of file diff --git a/tooling/noirc_abi_wasm/installPhase.sh b/tooling/noirc_abi_wasm/installPhase.sh deleted file mode 100755 index d9b94f2d171..00000000000 --- a/tooling/noirc_abi_wasm/installPhase.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env bash -export self_path=$(dirname "$(readlink -f "$0")") - -export out_path=$out/noirc_abi_wasm - -mkdir -p $out_path -cp $self_path/README.md $out_path/ -cp $self_path/package.json $out_path/ -cp -r $self_path/nodejs $out_path/ -cp -r $self_path/web $out_path/ diff --git a/tooling/noirc_abi_wasm/package.json b/tooling/noirc_abi_wasm/package.json index e93a8e6a5e3..14e528c3b15 100644 --- a/tooling/noirc_abi_wasm/package.json +++ b/tooling/noirc_abi_wasm/package.json @@ -33,9 +33,7 @@ "clean": "chmod u+w web nodejs || true && rm -rf ./nodejs ./web ./target ./result", "nightly:version": "jq --arg new_version \"-$(git rev-parse --short HEAD)$1\" '.version = .version + $new_version' package.json > package-tmp.json && mv package-tmp.json package.json", "publish": "echo 📡 publishing `$npm_package_name` && yarn npm publish", - "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0", - "build:nix": "nix build -L .#noirc_abi_wasm", - "install:from:nix": "yarn clean && yarn build:nix && cp -rL ./result/noirc_abi_wasm/nodejs ./ && cp -rL ./result/noirc_abi_wasm/web ./" + "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { "@noir-lang/types": "workspace:*" diff --git a/wasm-bindgen-cli.nix b/wasm-bindgen-cli.nix deleted file mode 100644 index 7c3910f032e..00000000000 --- a/wasm-bindgen-cli.nix +++ /dev/null @@ -1,43 +0,0 @@ -{ lib -, rustPlatform -, fetchCrate -, nodejs -, pkg-config -, openssl -, stdenv -, curl -, darwin -, libiconv -, runCommand -}: - -rustPlatform.buildRustPackage rec { - pname = "wasm-bindgen-cli"; - version = "0.2.86"; - - src = fetchCrate { - inherit pname version; - sha256 = "sha256-56EOiLbdgAcoTrkyvB3t9TjtLaRvGxFUXx4haLwE2QY="; - }; - - cargoSha256 = "sha256-4CPBmz92PuPN6KeGDTdYPAf5+vTFk9EN5Cmx4QJy6yI="; - - nativeBuildInputs = [ pkg-config ]; - - buildInputs = [ openssl ] ++ lib.optionals stdenv.isDarwin [ - curl - # Need libiconv and apple Security on Darwin. See https://github.com/ipetkov/crane/issues/156 - libiconv - darwin.apple_sdk.frameworks.Security - ]; - - doCheck = false; - - meta = with lib; { - homepage = "https://rustwasm.github.io/docs/wasm-bindgen/"; - license = with licenses; [ asl20 /* or */ mit ]; - description = "Facilitating high-level interactions between wasm modules and JavaScript"; - maintainers = with maintainers; [ nitsky rizary ]; - mainProgram = "wasm-bindgen"; - }; -} From 061ba3934678fc0e9dcbf533c57ff897b30220ab Mon Sep 17 00:00:00 2001 From: vezenovm Date: Wed, 10 Apr 2024 15:42:03 +0000 Subject: [PATCH 2/2] update to 0.33.0 --- tooling/bb_abstraction_leaks/build.rs | 2 +- tooling/noir_js_backend_barretenberg/package.json | 2 +- yarn.lock | 10 +++++----- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/tooling/bb_abstraction_leaks/build.rs b/tooling/bb_abstraction_leaks/build.rs index 52f7783851a..e055d7a3a5f 100644 --- a/tooling/bb_abstraction_leaks/build.rs +++ b/tooling/bb_abstraction_leaks/build.rs @@ -10,7 +10,7 @@ use const_format::formatcp; const USERNAME: &str = "AztecProtocol"; const REPO: &str = "aztec-packages"; -const VERSION: &str = "0.32.0"; +const VERSION: &str = "0.33.0"; const TAG: &str = formatcp!("aztec-packages-v{}", VERSION); const API_URL: &str = diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index 251dd80c2f4..e71bf296fbf 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -42,7 +42,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "0.32.0", + "@aztec/bb.js": "0.33.0", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, diff --git a/yarn.lock b/yarn.lock index a39ae9921da..0fdad4ad2ad 100644 --- a/yarn.lock +++ b/yarn.lock @@ -221,9 +221,9 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@npm:0.32.0": - version: 0.32.0 - resolution: "@aztec/bb.js@npm:0.32.0" +"@aztec/bb.js@npm:0.33.0": + version: 0.33.0 + resolution: "@aztec/bb.js@npm:0.33.0" dependencies: comlink: ^4.4.1 commander: ^10.0.1 @@ -231,7 +231,7 @@ __metadata: tslib: ^2.4.0 bin: bb.js: dest/node/main.js - checksum: 0919957e141ae0a65cfab961dce122fa06de628a10b7cb661d31d8ed4793ce80980fcf315620ceffffa45581db941bad43c392f4b2aa9becaaf7d2faaba01ffc + checksum: 16244a52ef1cb5efca582e863a3521d04f0fb66b02cd584b904e6e65f684e392eec56679439d1a831127e126d117bf0e116166fc4b24efdd6e1ebe9097efed06 languageName: node linkType: hard @@ -4396,7 +4396,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: - "@aztec/bb.js": 0.32.0 + "@aztec/bb.js": 0.33.0 "@noir-lang/types": "workspace:*" "@types/node": ^20.6.2 "@types/prettier": ^3