diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6d6db808cf3d..d696c0d41bf3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,7 @@ jobs: with: runner_label: ${{ inputs.username || github.actor }}-x86 ebs_cache_size_gb: 256 - runner_concurrency: 20 + runner_concurrency: 50 subaction: ${{ inputs.runner_action || 'start' }} # Use instance types with low interruption rate in us-east-2 https://aws.amazon.com/ec2/spot/instance-advisor/ ec2_instance_type: r6in.32xlarge r6a.32xlarge i4i.32xlarge r7iz.32xlarge @@ -52,11 +52,14 @@ jobs: run: earthly-ci ./yarn-project+export-e2e-test-images # We base our e2e list used in e2e-x86 off the targets in ./yarn-project/end-to-end # (Note ARM uses just 2 tests as a smoketest) - - name: Create list of end-to-end jobs + - name: Create list of non-bench end-to-end jobs id: e2e_list - run: echo "list=$(earthly ls ./yarn-project/end-to-end | grep -v '+base' | sed 's/+//' | jq -R . | jq -cs .)" >> $GITHUB_OUTPUT + run: echo "list=$(earthly ls ./yarn-project/end-to-end | grep -v '+base' | grep -v '+bench' | sed 's/+//' | jq -R . | jq -cs .)" >> $GITHUB_OUTPUT + - name: Create list of bench end-to-end jobs + id: bench_list + run: echo "list=$(earthly ls ./yarn-project/end-to-end | grep '+bench' | sed 's/+//' | jq -R . | jq -cs .)" >> $GITHUB_OUTPUT - # all the end-to-end integration tests for aztec + # all the non-bench end-to-end integration tests for aztec e2e: needs: build runs-on: ${{ inputs.username || github.actor }}-x86 @@ -81,6 +84,31 @@ jobs: --no-output \ +${{ matrix.test }} --skip_build=true + # all the benchmarking end-to-end integration tests for aztec (not required to merge) + bench-e2e: + needs: build + runs-on: ${{ inputs.username || github.actor }}-x86 + strategy: + fail-fast: false + matrix: + test: ${{ fromJson( needs.build.outputs.bench_list )}} + steps: + - uses: actions/checkout@v4 + with: { ref: "${{ github.event.pull_request.head.sha }}" } + - uses: ./.github/ci-setup-action + with: + dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" + # must be globally unique for build x runner + concurrency_key: e2e-${{ inputs.username || github.actor }}-x86-${{ matrix.test }} + - name: Test + working-directory: ./yarn-project/end-to-end/ + timeout-minutes: 25 + run: earthly-ci -P \ + --secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \ + --secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \ + --no-output \ + +${{ matrix.test }} --skip_build=true + # bench-summary: # needs: e2e # runs-on: ${{ inputs.username || github.actor }}-x86 @@ -109,10 +137,31 @@ jobs: # working-directory: ./yarn-project/scripts # run: earthly-ci -P --secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} --secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} --secret AZTEC_BOT_COMMENTER_GITHUB_TOKEN=${{ secrets.AZTEC_BOT_GITHUB_TOKEN }} +bench-comment + noir-format: + needs: setup + runs-on: ${{ inputs.username || github.actor }}-x86 + steps: + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } + - uses: ./.github/ci-setup-action + with: + dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" + concurrency_key: noir-format-${{ inputs.username || github.actor }}-x86 + - name: "Format Noir" + working-directory: ./noir/ + timeout-minutes: 25 + run: earthly-ci --no-output ./+format + - name: "Format noir-projects" + working-directory: ./noir-projects/ + timeout-minutes: 25 + run: earthly-ci --no-output ./+format + # barretenberg (prover) native and AVM (public VM) tests # only ran on x86 for resource reasons (memory intensive) bb-native-tests: - needs: build + needs: setup runs-on: ${{ inputs.username || github.actor }}-x86 steps: - { @@ -244,6 +293,22 @@ jobs: timeout-minutes: 25 run: earthly-ci --no-output ./yarn-project/+prover-client-test + client-proof-tests: + needs: build + runs-on: ${{ github.actor }}-x86 + steps: + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } + - uses: ./.github/ci-setup-action + with: + dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" + concurrency_key: client-proof-tests-${{ github.actor }}-x86 + - name: "Client Proof Tests" + timeout-minutes: 25 + run: earthly-ci --no-output ./yarn-project/+run-e2e --test=client_prover_integration/client_prover_integration.test.ts + build-acir-tests: needs: build runs-on: ${{ github.actor }}-x86 @@ -419,30 +484,6 @@ jobs: merge-check: runs-on: ubuntu-latest - needs: - - e2e - - bb-native-tests - - bb-bench - - yarn-project-formatting - - yarn-project-test - - prover-client-test - - noir-packages-test - - noir-test - if: always() - steps: - - run: | - echo "e2e status: ${{ needs.e2e.result }}" - echo "bb-native-tests status: ${{ needs.bb-native-tests.result }}" - echo "bb-bench status: ${{ needs.bb-bench.result }}" - echo "yarn-project-formatting status: ${{ needs.yarn-project-formatting.result }}" - echo "yarn-project-test status: ${{ needs.yarn-project-test.result }}" - if [[ "${{ needs.e2e.result }}" != 'success' || "${{ needs.bb-native-tests.result }}" != 'success' || "${{ needs.bb-bench.result }}" != 'success' || "${{ needs.yarn-project-formatting.result }}" != 'success' || "${{ needs.yarn-project-test.result }}" != 'success' ]]; then - echo "Pull request merging not allowed due to failures." - exit 1 - fi - echo "Pull request merging now allowed." - - notify: needs: [ e2e, @@ -451,7 +492,28 @@ jobs: yarn-project-formatting, yarn-project-test, prover-client-test, + bb-js-test, + barretenberg-acir-tests-bb-js, + barretenberg-acir-tests-bb, + barretenberg-acir-tests-sol, + noir-test, + noir-packages-test, + client-proof-tests, ] + if: always() + steps: + - run: | + failed=${{ contains(needs.*.result, 'failure') }} + if $failed + then + echo "At least one job failed, merging not allowed." + exit 1 + fi + echo "All jobs succeeded, merge allowed." + + notify: + needs: + - merge-check runs-on: ubuntu-latest if: ${{ github.ref == 'refs/heads/master' && failure() }} steps: diff --git a/.github/workflows/setup-runner.yml b/.github/workflows/setup-runner.yml index dff8f10cff3e..658e5b581ba2 100644 --- a/.github/workflows/setup-runner.yml +++ b/.github/workflows/setup-runner.yml @@ -147,4 +147,6 @@ jobs: fi - name: Run Earthly Bootstrap - run: earthly bootstrap + run: | + earthly bootstrap + touch /run/.earthly-bootstrap # Used in `earthly-ci` wrapper to check that earthly has been bootstrapped ok diff --git a/.github/workflows/start-spot.yml b/.github/workflows/start-spot.yml index b4ed1f3ca1b6..eb13f205cb41 100644 --- a/.github/workflows/start-spot.yml +++ b/.github/workflows/start-spot.yml @@ -16,7 +16,7 @@ jobs: with: runner_label: ${{ inputs.username || github.actor }}-x86 ebs_cache_size_gb: 256 - runner_concurrency: 20 + runner_concurrency: 50 subaction: ${{ inputs.action }} # Use instance types with low interruption rate in us-east-2 https://aws.amazon.com/ec2/spot/instance-advisor/ ec2_instance_type: r6in.32xlarge r6a.32xlarge i4i.32xlarge r7iz.32xlarge diff --git a/.noir-sync-commit b/.noir-sync-commit index caa81d0f7be8..5fe0fbedd165 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -a87c655c6c8c077c71e3372cc9181b7870348a3d +95d4d133d1eb5e0eb44cd928d8183d890e970a13 diff --git a/.release-please-manifest.json b/.release-please-manifest.json index bcd45cb551ad..1bd543620ee8 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,7 +1,7 @@ { - ".": "0.37.0", + ".": "0.38.0", "yarn-project/cli": "0.35.1", - "yarn-project/aztec": "0.37.0", - "barretenberg": "0.37.0", - "barretenberg/ts": "0.37.0" + "yarn-project/aztec": "0.38.0", + "barretenberg": "0.38.0", + "barretenberg/ts": "0.38.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index ff67eb0ae7ff..f4848f7f584b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,112 @@ # Changelog +## [0.38.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.37.0...aztec-packages-v0.38.0) (2024-05-07) + + +### ⚠ BREAKING CHANGES + +* AES blackbox ([#6016](https://github.com/AztecProtocol/aztec-packages/issues/6016)) + +### Features + +* `multi_scalar_mul` blackbox func ([#6097](https://github.com/AztecProtocol/aztec-packages/issues/6097)) ([f6b1ba6](https://github.com/AztecProtocol/aztec-packages/commit/f6b1ba60daf37a5a6466ca1e5ee7be70354af485)) +* Add `Neg` trait to stdlib (https://github.com/noir-lang/noir/pull/4983) ([02d3d17](https://github.com/AztecProtocol/aztec-packages/commit/02d3d177e86683aa77680127c3e6738bc22fdc02)) +* Add ciphertext computation for log header ([#6175](https://github.com/AztecProtocol/aztec-packages/issues/6175)) ([3e05534](https://github.com/AztecProtocol/aztec-packages/commit/3e0553456535cd32743f7cf33e51ffd8a36ff75d)) +* Add proving retries ([#6145](https://github.com/AztecProtocol/aztec-packages/issues/6145)) ([39ab99c](https://github.com/AztecProtocol/aztec-packages/commit/39ab99c3d0c819094b7eb39edd22c81322ca4627)) +* Add public teardown to circuit structs ([#6191](https://github.com/AztecProtocol/aztec-packages/issues/6191)) ([03e1b93](https://github.com/AztecProtocol/aztec-packages/commit/03e1b937db09dc64ac73960285849c4dd88e1f01)) +* AES blackbox ([#6016](https://github.com/AztecProtocol/aztec-packages/issues/6016)) ([e4b97a8](https://github.com/AztecProtocol/aztec-packages/commit/e4b97a8cd7574a828c2a54b4a93b5ced79df6abf)) +* Always including debug data in a function artifact ([#6223](https://github.com/AztecProtocol/aztec-packages/issues/6223)) ([5d6d22c](https://github.com/AztecProtocol/aztec-packages/commit/5d6d22ca416c6471428b56a55968e859334caa6a)) +* **avm-simulator:** Consider previous pending nullifiers across enqueued calls ([#6188](https://github.com/AztecProtocol/aztec-packages/issues/6188)) ([4676431](https://github.com/AztecProtocol/aztec-packages/commit/4676431ecf18003c6648e914effb1c3087108f0f)) +* **avm-simulator:** Make storage work across enqueued calls ([#6181](https://github.com/AztecProtocol/aztec-packages/issues/6181)) ([8e218a2](https://github.com/AztecProtocol/aztec-packages/commit/8e218a22c1f85e7b0de4afc4219a860e6bbab7fb)) +* **avm:** Add TransactionFee opcode to simulator ([#6210](https://github.com/AztecProtocol/aztec-packages/issues/6210)) ([fcac844](https://github.com/AztecProtocol/aztec-packages/commit/fcac84451f657bb4a70c496538b443dda5bc961e)) +* Complex outputs from acir call (https://github.com/noir-lang/noir/pull/4952) ([3ed41a0](https://github.com/AztecProtocol/aztec-packages/commit/3ed41a08c1fef80a6b8eecf4618dcc9be891e4c0)) +* Expose set_public_teardown_function in private context ([#6199](https://github.com/AztecProtocol/aztec-packages/issues/6199)) ([4d8b51c](https://github.com/AztecProtocol/aztec-packages/commit/4d8b51caf477ff83390ec6b40f11b0768e57903f)) +* Handle empty response foreign calls without an external resolver (https://github.com/noir-lang/noir/pull/4959) ([3ed41a0](https://github.com/AztecProtocol/aztec-packages/commit/3ed41a08c1fef80a6b8eecf4618dcc9be891e4c0)) +* Hash logs inside circuit ([#5934](https://github.com/AztecProtocol/aztec-packages/issues/5934)) ([6b99527](https://github.com/AztecProtocol/aztec-packages/commit/6b99527881345d7aa0dc90cfc61832432d817587)) +* Honk flows exposed through wasm ([#6096](https://github.com/AztecProtocol/aztec-packages/issues/6096)) ([c9b3206](https://github.com/AztecProtocol/aztec-packages/commit/c9b32061b2849442516ff0395b69d9a230191234)) +* Implement `From` array trait for `BoundedVec` (https://github.com/noir-lang/noir/pull/4927) ([02d3d17](https://github.com/AztecProtocol/aztec-packages/commit/02d3d177e86683aa77680127c3e6738bc22fdc02)) +* Include transaction fee in txreceipt ([#6139](https://github.com/AztecProtocol/aztec-packages/issues/6139)) ([6785512](https://github.com/AztecProtocol/aztec-packages/commit/6785512fff9dfec77bec5ce1580880c7ae21dce8)) +* Making keys getters complete ([#6171](https://github.com/AztecProtocol/aztec-packages/issues/6171)) ([e85dde9](https://github.com/AztecProtocol/aztec-packages/commit/e85dde9743c4e2e6c2f0dfd7bf487a2b4234d2b5)) +* Move noir-tests to earthly ([#6185](https://github.com/AztecProtocol/aztec-packages/issues/6185)) ([4daea40](https://github.com/AztecProtocol/aztec-packages/commit/4daea40fc8d994f25321ee6359ad37321ccd99dd)) +* Note hash read requests fixes and refactoring ([#6125](https://github.com/AztecProtocol/aztec-packages/issues/6125)) ([9d03f34](https://github.com/AztecProtocol/aztec-packages/commit/9d03f34ca023c954832889ee8eef65aca60f1b1b)) +* Optimize array sets in if conditions (alternate version) (https://github.com/noir-lang/noir/pull/4716) ([3ed41a0](https://github.com/AztecProtocol/aztec-packages/commit/3ed41a08c1fef80a6b8eecf4618dcc9be891e4c0)) +* Osxcross ([#6099](https://github.com/AztecProtocol/aztec-packages/issues/6099)) ([6cc924d](https://github.com/AztecProtocol/aztec-packages/commit/6cc924dc44a36d9ef2aeda05ea69a120898fc272)) +* Parsing non-string assertion payloads in noir js ([#6079](https://github.com/AztecProtocol/aztec-packages/issues/6079)) ([fbd78fd](https://github.com/AztecProtocol/aztec-packages/commit/fbd78fdc53071f3548971dfb4832a440512f4687)) +* Proving benchmark ([#6051](https://github.com/AztecProtocol/aztec-packages/issues/6051)) ([644bd85](https://github.com/AztecProtocol/aztec-packages/commit/644bd8525f6de8b71d6cc299baf3fda94b68abbb)) +* Proving the private kernels and app circuits ([#6112](https://github.com/AztecProtocol/aztec-packages/issues/6112)) ([4a43fab](https://github.com/AztecProtocol/aztec-packages/commit/4a43fab043d9974a80c259703ebe2e0027e8ae57)) +* Publish transaction_fee ([#6126](https://github.com/AztecProtocol/aztec-packages/issues/6126)) ([6f3a036](https://github.com/AztecProtocol/aztec-packages/commit/6f3a036585da589e04eb35b823ed2aaa7135bae5)) +* Recursive folding verifier and decider as ultra circuits and circuit simulator ([#6150](https://github.com/AztecProtocol/aztec-packages/issues/6150)) ([acc8641](https://github.com/AztecProtocol/aztec-packages/commit/acc86416668ccfd6425ee3af4a898f2e8513168b)) +* Reproducible ClientIVC proofs ([#6227](https://github.com/AztecProtocol/aztec-packages/issues/6227)) ([c145757](https://github.com/AztecProtocol/aztec-packages/commit/c145757a13ba4ff881c4bb05c4caaee7351053b3)) +* Run noir-packages-test in Earthly ([#6174](https://github.com/AztecProtocol/aztec-packages/issues/6174)) ([58e40c9](https://github.com/AztecProtocol/aztec-packages/commit/58e40c9125e6d7b30abf7a4cbb170bbfc15e2037)) +* Set aztec private functions to be recursive ([#6192](https://github.com/AztecProtocol/aztec-packages/issues/6192)) ([22625f8](https://github.com/AztecProtocol/aztec-packages/commit/22625f845f22703dc0d6e661fa36a0f67e6c719e)) +* Use actual tx fee in gas token when charging fee ([#6166](https://github.com/AztecProtocol/aztec-packages/issues/6166)) ([8418eac](https://github.com/AztecProtocol/aztec-packages/commit/8418eac301fc9761cc29efd901ca5f719c3dfa09)) + + +### Bug Fixes + +* **abstract-phase-manager:** Get available gas from latest kernel output ([#6102](https://github.com/AztecProtocol/aztec-packages/issues/6102)) ([0fa509b](https://github.com/AztecProtocol/aztec-packages/commit/0fa509b68da7a8ab1b5865d17a7cf4cb197eb8b3)) +* Aztec-run not exposing port for builder ([#6241](https://github.com/AztecProtocol/aztec-packages/issues/6241)) ([a80c091](https://github.com/AztecProtocol/aztec-packages/commit/a80c0911c629852d72bbff48b22af3b178b191b2)) +* Boxes use base image ([#6120](https://github.com/AztecProtocol/aztec-packages/issues/6120)) ([ef2589a](https://github.com/AztecProtocol/aztec-packages/commit/ef2589a41f72981e5245f294695c5da8d4f04d0e)) +* Compute the correct slice length when coercing from a literal array of complex types (https://github.com/noir-lang/noir/pull/4986) ([02d3d17](https://github.com/AztecProtocol/aztec-packages/commit/02d3d177e86683aa77680127c3e6738bc22fdc02)) +* Correct circuit size estimation for UltraHonk ([#6164](https://github.com/AztecProtocol/aztec-packages/issues/6164)) ([ed84fe3](https://github.com/AztecProtocol/aztec-packages/commit/ed84fe3bcc29c69b1e9d9caafd2c2c2134a67dce)) +* Docs release ci setup ([#6159](https://github.com/AztecProtocol/aztec-packages/issues/6159)) ([6d5cfe6](https://github.com/AztecProtocol/aztec-packages/commit/6d5cfe65dadf56b3f9094a2662b32792dd1a9520)) +* **docs:** Fix broken link in tree implementations page ([#6143](https://github.com/AztecProtocol/aztec-packages/issues/6143)) ([b39f1db](https://github.com/AztecProtocol/aztec-packages/commit/b39f1db91942096eb1768a37ba9ecfb94d4e1313)) +* **docs:** Update sandbox reference ([#6094](https://github.com/AztecProtocol/aztec-packages/issues/6094)) ([0641085](https://github.com/AztecProtocol/aztec-packages/commit/06410858fd1b6d0d8a1c225a08b8c6628ad9ddcc)) +* Increase default number of proving agents ([#6146](https://github.com/AztecProtocol/aztec-packages/issues/6146)) ([5ade36e](https://github.com/AztecProtocol/aztec-packages/commit/5ade36e63ad9d521efe62e889836de5e891e6d0b)) +* Install aztec-builder ([#6149](https://github.com/AztecProtocol/aztec-packages/issues/6149)) ([0497dcf](https://github.com/AztecProtocol/aztec-packages/commit/0497dcf4876b9e7bd7e7459f8d49a6167fd57323)) +* Move remove_if_else pass after second inlining (https://github.com/noir-lang/noir/pull/4976) ([02d3d17](https://github.com/AztecProtocol/aztec-packages/commit/02d3d177e86683aa77680127c3e6738bc22fdc02)) +* **public-kernel:** Only validate start-gas for execution requests ([#6100](https://github.com/AztecProtocol/aztec-packages/issues/6100)) ([3ec9303](https://github.com/AztecProtocol/aztec-packages/commit/3ec9303c4fe25eb8bf5b81e58dcf989acc8ac7e6)) +* Registering PublicDataWitness in JsonRpcServer ([#6243](https://github.com/AztecProtocol/aztec-packages/issues/6243)) ([e8c4455](https://github.com/AztecProtocol/aztec-packages/commit/e8c4455339ac0b4c7444aba7ff1308c10af4d139)) +* Scope netlify to yarn bin ([#6162](https://github.com/AztecProtocol/aztec-packages/issues/6162)) ([be8e3c0](https://github.com/AztecProtocol/aztec-packages/commit/be8e3c00837f7b823b74dfad7ef0875265ae35fe)) +* Set index and value to 0 for array_get with predicate (https://github.com/noir-lang/noir/pull/4971) ([02d3d17](https://github.com/AztecProtocol/aztec-packages/commit/02d3d177e86683aa77680127c3e6738bc22fdc02)) +* Set up the ci runner for doc deployment ([#6160](https://github.com/AztecProtocol/aztec-packages/issues/6160)) ([e295900](https://github.com/AztecProtocol/aztec-packages/commit/e2959004c132f87b876e7b08ed3b2c3eb99622bf)) +* Sporadic failure of GoblinRecursionTests.Vanilla ([#6218](https://github.com/AztecProtocol/aztec-packages/issues/6218)) ([f4ecea5](https://github.com/AztecProtocol/aztec-packages/commit/f4ecea5a83bcc88fd11698ac5c8e174c2461a74b)) +* Use annotated type when checking declaration (https://github.com/noir-lang/noir/pull/4966) ([3ed41a0](https://github.com/AztecProtocol/aztec-packages/commit/3ed41a08c1fef80a6b8eecf4618dcc9be891e4c0)) +* Use pushed build images. ([#6154](https://github.com/AztecProtocol/aztec-packages/issues/6154)) ([426f7a7](https://github.com/AztecProtocol/aztec-packages/commit/426f7a7c0911512058d5d5d49a3ed9f2ab5ed4e0)) +* Use random id for proving jobs ([#6084](https://github.com/AztecProtocol/aztec-packages/issues/6084)) ([0e0fc58](https://github.com/AztecProtocol/aztec-packages/commit/0e0fc585b9329371e5f89accf10ff1b7a08749c0)) +* Various aztec-builder issues ([#6233](https://github.com/AztecProtocol/aztec-packages/issues/6233)) ([9a644ba](https://github.com/AztecProtocol/aztec-packages/commit/9a644baeae7c46250ced9942ce30f3f8694efe7f)) + + +### Miscellaneous + +* Add avm team as codeowners for public context ([#6247](https://github.com/AztecProtocol/aztec-packages/issues/6247)) ([c571ff0](https://github.com/AztecProtocol/aztec-packages/commit/c571ff0545d54819dd5b386e1bbd932dbe603819)) +* **avm-simulator:** Avm's nested calls now stay internal and properly track PublicExecutionResult ([#6165](https://github.com/AztecProtocol/aztec-packages/issues/6165)) ([9fd4f39](https://github.com/AztecProtocol/aztec-packages/commit/9fd4f39e48793262d8d84e4ac0990c80072dcca3)) +* **avm-simulator:** Make shifts take u8 ([#5905](https://github.com/AztecProtocol/aztec-packages/issues/5905)) ([4719ff1](https://github.com/AztecProtocol/aztec-packages/commit/4719ff19e71e27965a3ccf75b7356a27389ee766)) +* **avm-simulator:** Track recursive public execution result in avm-simulator for integration with old kernel ([#6106](https://github.com/AztecProtocol/aztec-packages/issues/6106)) ([df3bcc6](https://github.com/AztecProtocol/aztec-packages/commit/df3bcc6315ba6ded3a352f7374888504ecc48eb9)) +* **aztec-macros:** Avm function return types are auto tagged as `pub` ([#6250](https://github.com/AztecProtocol/aztec-packages/issues/6250)) ([0e828f3](https://github.com/AztecProtocol/aztec-packages/commit/0e828f3914078850b9a8e1e928c886c59cfab64e)) +* **aztec-nr:** Create a 'with_selector' version of `emit_unencrypted_log` in avm context ([#6248](https://github.com/AztecProtocol/aztec-packages/issues/6248)) ([fda6442](https://github.com/AztecProtocol/aztec-packages/commit/fda64425ed673e2f4f4f7edc231b7a563ec5b0cc)) +* Bump bb.js timeouts ([#6196](https://github.com/AztecProtocol/aztec-packages/issues/6196)) ([acab3de](https://github.com/AztecProtocol/aztec-packages/commit/acab3de86aae9ce5078795ba1ed0626d0c018565)) +* Check root parity is only enqueued once its deps are ready ([#6015](https://github.com/AztecProtocol/aztec-packages/issues/6015)) ([c1120d1](https://github.com/AztecProtocol/aztec-packages/commit/c1120d16a68550934ab6744f8759b41f3dcdf4eb)) +* **ci:** Fix restarts with fresh spot, acir test fixes, non-mandatory benches ([#6226](https://github.com/AztecProtocol/aztec-packages/issues/6226)) ([adb7f37](https://github.com/AztecProtocol/aztec-packages/commit/adb7f37a4ad01acf1ef197189a1e78323cae8f0b)) +* **ci:** Force earthly prune if corrupted cache ([#6152](https://github.com/AztecProtocol/aztec-packages/issues/6152)) ([3910314](https://github.com/AztecProtocol/aztec-packages/commit/39103141a56f7f71fffb2d4164f0c4f432704a81)) +* **ci:** Improve dependency structure ([#6200](https://github.com/AztecProtocol/aztec-packages/issues/6200)) ([3abc862](https://github.com/AztecProtocol/aztec-packages/commit/3abc862f77b883382e6f03ec66c5fd93efef9989)) +* **ci:** Migrate `protocol-circuits-gate-diff` to earthly ([#6204](https://github.com/AztecProtocol/aztec-packages/issues/6204)) ([4b43295](https://github.com/AztecProtocol/aztec-packages/commit/4b432951a9fe46ca1b0e0d38ebafe523bebf04eb)) +* **ci:** More stable spot request ([#6212](https://github.com/AztecProtocol/aztec-packages/issues/6212)) ([00156b5](https://github.com/AztecProtocol/aztec-packages/commit/00156b566dbc2973ddc8a61550000e980f9c3454)) +* **ci:** Optimize e2e build ([#6202](https://github.com/AztecProtocol/aztec-packages/issues/6202)) ([4614059](https://github.com/AztecProtocol/aztec-packages/commit/4614059c9667d4b42063d47a2b4cc5b24d54db9b)) +* **ci:** Rollback earthly prune ([#6208](https://github.com/AztecProtocol/aztec-packages/issues/6208)) ([3ccc6ac](https://github.com/AztecProtocol/aztec-packages/commit/3ccc6acae834f9add0548c0ca044e65a2e13b08b)) +* **ci:** Try to make base image more stable ([#6144](https://github.com/AztecProtocol/aztec-packages/issues/6144)) ([979a22d](https://github.com/AztecProtocol/aztec-packages/commit/979a22d5668f5b46c350f2355b60da8bd59e2cda)) +* Debug log oracle calls return nothing ([#6209](https://github.com/AztecProtocol/aztec-packages/issues/6209)) ([151d3a3](https://github.com/AztecProtocol/aztec-packages/commit/151d3a3feaad5cf59041eac1b47f2bc31d1dbcf2)) +* **docs:** Fix some typos in specs of private kernel initial ([#6224](https://github.com/AztecProtocol/aztec-packages/issues/6224)) ([ead54c4](https://github.com/AztecProtocol/aztec-packages/commit/ead54c479ce221f6eed2b31fe37db82e615897ea)) +* E2e workaround ([#6158](https://github.com/AztecProtocol/aztec-packages/issues/6158)) ([7794d78](https://github.com/AztecProtocol/aztec-packages/commit/7794d788cb9675dbb4714f850e3a39d6dd3ce990)) +* Migrate acir tests to earthly ([#6142](https://github.com/AztecProtocol/aztec-packages/issues/6142)) ([18c8ea8](https://github.com/AztecProtocol/aztec-packages/commit/18c8ea8eb5f9fd1cb51c116d6d1976c774d51bc1)) +* Misc AVM migration prep changes ([#6253](https://github.com/AztecProtocol/aztec-packages/issues/6253)) ([fe19404](https://github.com/AztecProtocol/aztec-packages/commit/fe194043b6a7b7256b39b1db786b4df754b14890)) +* Nuking `GrumpkinScalar` ([#6240](https://github.com/AztecProtocol/aztec-packages/issues/6240)) ([d2df10d](https://github.com/AztecProtocol/aztec-packages/commit/d2df10d78036f6fb4e0dae5c7287e4523bd8b47d)) +* Release Noir(0.29.0) (https://github.com/noir-lang/noir/pull/4905) ([02d3d17](https://github.com/AztecProtocol/aztec-packages/commit/02d3d177e86683aa77680127c3e6738bc22fdc02)) +* Rename instruction checks for side effects (https://github.com/noir-lang/noir/pull/4945) ([3ed41a0](https://github.com/AztecProtocol/aztec-packages/commit/3ed41a08c1fef80a6b8eecf4618dcc9be891e4c0)) +* Replace relative paths to noir-protocol-circuits ([cf543a6](https://github.com/AztecProtocol/aztec-packages/commit/cf543a6ea944e49e9fff71e52620718385456428)) +* Replace relative paths to noir-protocol-circuits ([53cf7bb](https://github.com/AztecProtocol/aztec-packages/commit/53cf7bbc008fc1dae4c295901153d6751bf9eacd)) +* Replace relative paths to noir-protocol-circuits ([ca29cea](https://github.com/AztecProtocol/aztec-packages/commit/ca29cea33adda120adc90b3a32163625271af319)) +* Replace relative paths to noir-protocol-circuits ([08e538b](https://github.com/AztecProtocol/aztec-packages/commit/08e538b3ef0805270c498b3d65443378cf720985)) +* Speedup static_call test ([#6157](https://github.com/AztecProtocol/aztec-packages/issues/6157)) ([abe8875](https://github.com/AztecProtocol/aztec-packages/commit/abe8875fe40703419fcf12653a21d734e8028b4d)) +* Switch Noir JS to use execute program instead of circuit (https://github.com/noir-lang/noir/pull/4965) ([3ed41a0](https://github.com/AztecProtocol/aztec-packages/commit/3ed41a08c1fef80a6b8eecf4618dcc9be891e4c0)) +* Use correct call type ([#6064](https://github.com/AztecProtocol/aztec-packages/issues/6064)) ([b3ae289](https://github.com/AztecProtocol/aztec-packages/commit/b3ae289748954229aac7ae2e1fe72483ede79a52)) + + +### Documentation + +* Add GlobalVariables to CombinedConstantData ([#6071](https://github.com/AztecProtocol/aztec-packages/issues/6071)) ([cf026d2](https://github.com/AztecProtocol/aztec-packages/commit/cf026d2c5928ce081bfac1e0d85260075b06f418)) +* Update fees kernel tracking docs ([#6151](https://github.com/AztecProtocol/aztec-packages/issues/6151)) ([7d80428](https://github.com/AztecProtocol/aztec-packages/commit/7d804287889164873c5fdec452a9af0144bbe183)) + ## [0.37.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.36.0...aztec-packages-v0.37.0) (2024-05-02) diff --git a/CODEOWNERS b/CODEOWNERS index efa2632379ca..cdd57834a49f 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -2,7 +2,15 @@ /build_manifest.yml @charlielye # Notify the AVM team of any changes to public oracle. -/yarn-project/simulator/src/public/public_execution_context.ts @Maddiaa0 @fcarreiro +/yarn-project/simulator/src/public/public_execution_context.ts @Maddiaa0 @fcarreiro @dbanks12 # Notify the AVM team of changes to generated PIL code barretenberg/cpp/src/barretenberg/**/generated/* @Maddiaa0 @jeanmon @IlyasRidhuan + +# Notify the AVM team of any changes to public context or avm context. +/noir-projects/aztec-nr/aztec/src/context/inputs/public_context_inputs.nr @fcarreiro @dbanks12 +/noir-projects/aztec-nr/aztec/src/context/inputs/avm_context_inputs.nr @fcarreiro @dbanks12 +/noir-projects/aztec-nr/aztec/src/context/public_context.nr @fcarreiro @dbanks12 +/noir-projects/aztec-nr/aztec/src/context/avm_context.nr @fcarreiro @dbanks12 +/noir-projects/aztec-nr/aztec/src/context/interface.nr @fcarreiro @dbanks12 + diff --git a/avm-transpiler/Cargo.lock b/avm-transpiler/Cargo.lock index 7e530d6cc7b4..e0b11bd742a8 100644 --- a/avm-transpiler/Cargo.lock +++ b/avm-transpiler/Cargo.lock @@ -4,7 +4,7 @@ version = 3 [[package]] name = "acir" -version = "0.44.0" +version = "0.45.0" dependencies = [ "acir_field", "base64 0.21.7", @@ -18,7 +18,7 @@ dependencies = [ [[package]] name = "acir_field" -version = "0.44.0" +version = "0.45.0" dependencies = [ "ark-bn254", "ark-ff", @@ -31,7 +31,7 @@ dependencies = [ [[package]] name = "acvm" -version = "0.44.0" +version = "0.45.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -44,13 +44,14 @@ dependencies = [ [[package]] name = "acvm_blackbox_solver" -version = "0.44.0" +version = "0.45.0" dependencies = [ "acir", "blake2", "blake3", "k256", "keccak", + "libaes", "num-bigint", "p256", "sha2", @@ -319,7 +320,7 @@ dependencies = [ [[package]] name = "aztec_macros" -version = "0.28.0" +version = "0.29.0" dependencies = [ "convert_case", "iter-extended", @@ -430,7 +431,7 @@ dependencies = [ [[package]] name = "brillig" -version = "0.44.0" +version = "0.45.0" dependencies = [ "acir_field", "serde", @@ -438,7 +439,7 @@ dependencies = [ [[package]] name = "brillig_vm" -version = "0.44.0" +version = "0.45.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -850,7 +851,7 @@ dependencies = [ [[package]] name = "fm" -version = "0.28.0" +version = "0.29.0" dependencies = [ "codespan-reporting", "serde", @@ -1028,7 +1029,7 @@ dependencies = [ [[package]] name = "iter-extended" -version = "0.28.0" +version = "0.29.0" [[package]] name = "itertools" @@ -1127,6 +1128,12 @@ dependencies = [ "regex-automata", ] +[[package]] +name = "libaes" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82903360c009b816f5ab72a9b68158c27c301ee2c3f20655b55c5e589e7d3bb7" + [[package]] name = "libc" version = "0.2.153" @@ -1193,7 +1200,7 @@ checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" [[package]] name = "noirc_abi" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "iter-extended", @@ -1209,11 +1216,11 @@ dependencies = [ [[package]] name = "noirc_arena" -version = "0.28.0" +version = "0.29.0" [[package]] name = "noirc_driver" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "aztec_macros", @@ -1234,7 +1241,7 @@ dependencies = [ [[package]] name = "noirc_errors" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "base64 0.21.7", @@ -1252,7 +1259,7 @@ dependencies = [ [[package]] name = "noirc_evaluator" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "chrono", @@ -1269,7 +1276,7 @@ dependencies = [ [[package]] name = "noirc_frontend" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "chumsky", @@ -1294,7 +1301,7 @@ dependencies = [ [[package]] name = "noirc_printable_type" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "iter-extended", diff --git a/avm-transpiler/src/transpile.rs b/avm-transpiler/src/transpile.rs index ecbc8f16f0c7..314dc77414ba 100644 --- a/avm-transpiler/src/transpile.rs +++ b/avm-transpiler/src/transpile.rs @@ -507,28 +507,42 @@ fn handle_emit_unencrypted_log( inputs.len() ); } - let (event_offset, message_array) = match &inputs[..] { - [ValueOrArray::MemoryAddress(offset), ValueOrArray::HeapArray(array)] => { - (offset.to_usize() as u32, array) + let event_offset = match &inputs[0] { + ValueOrArray::MemoryAddress(offset) => offset.to_usize() as u32, + _ => panic!( + "Unexpected inputs[0] (event) for ForeignCall::EMITUNENCRYPTEDLOG: {:?}", + inputs[0] + ), + }; + let (message_offset, message_size, message_offset_indirect) = match &inputs[1] { + ValueOrArray::HeapArray(array) => { + // Heap array, so offset to array is an indirect memory offset + (array.pointer.to_usize() as u32, array.size as u32, true) } + ValueOrArray::MemoryAddress(single_val) => (single_val.to_usize() as u32, 1 as u32, false), _ => panic!( "Unexpected inputs for ForeignCall::EMITUNENCRYPTEDLOG: {:?}", inputs ), }; + let indirect_flag = if message_offset_indirect { + FIRST_OPERAND_INDIRECT + } else { + 0 + }; avm_instrs.push(AvmInstruction { opcode: AvmOpcode::EMITUNENCRYPTEDLOG, // The message array from Brillig is indirect. - indirect: Some(FIRST_OPERAND_INDIRECT), + indirect: Some(indirect_flag), operands: vec![ AvmOperand::U32 { value: event_offset, }, AvmOperand::U32 { - value: message_array.pointer.to_usize() as u32, + value: message_offset, }, AvmOperand::U32 { - value: message_array.size as u32, + value: message_size, }, ], ..Default::default() diff --git a/aztec-up/bin/.aztec-run b/aztec-up/bin/.aztec-run index cb72171601e8..77cfbcf3c258 100755 --- a/aztec-up/bin/.aztec-run +++ b/aztec-up/bin/.aztec-run @@ -98,9 +98,9 @@ while [[ "$#" -gt 0 ]]; do esac done -# Dynamic port assignment based on IMAGE containing '/aztec' +# Dynamic port assignment based on IMAGE containing '/aztec' and not containing 'builder' (to exclude aztec-builder) port_assignment="" -if [[ "$IMAGE" == *"/aztec"* ]]; then +if [[ "$IMAGE" == *"/aztec"* ]] && [[ "$IMAGE" != *"builder"* ]]; then port_assignment="-p $AZTEC_PORT:$AZTEC_PORT" fi diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 0835cb4328a8..3b68178f0910 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = 8b31aae4675961c2f7c7f27d2f8b79edf8f68f12 - parent = 4daea40fc8d994f25321ee6359ad37321ccd99dd + commit = a0f30c4760a4fe7db9680377d97cd7a75b048fdb + parent = b2c019b6b11c3aaa98d8bbb79b77b42a5f87f0d0 method = merge cmdver = 0.4.6 diff --git a/barretenberg/CHANGELOG.md b/barretenberg/CHANGELOG.md index fa599e687dc1..08ba7134b53e 100644 --- a/barretenberg/CHANGELOG.md +++ b/barretenberg/CHANGELOG.md @@ -1,5 +1,34 @@ # Changelog +## [0.38.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.37.0...barretenberg-v0.38.0) (2024-05-07) + + +### ⚠ BREAKING CHANGES + +* AES blackbox ([#6016](https://github.com/AztecProtocol/aztec-packages/issues/6016)) + +### Features + +* `multi_scalar_mul` blackbox func ([#6097](https://github.com/AztecProtocol/aztec-packages/issues/6097)) ([f6b1ba6](https://github.com/AztecProtocol/aztec-packages/commit/f6b1ba60daf37a5a6466ca1e5ee7be70354af485)) +* AES blackbox ([#6016](https://github.com/AztecProtocol/aztec-packages/issues/6016)) ([e4b97a8](https://github.com/AztecProtocol/aztec-packages/commit/e4b97a8cd7574a828c2a54b4a93b5ced79df6abf)) +* **avm:** Add TransactionFee opcode to simulator ([#6210](https://github.com/AztecProtocol/aztec-packages/issues/6210)) ([fcac844](https://github.com/AztecProtocol/aztec-packages/commit/fcac84451f657bb4a70c496538b443dda5bc961e)) +* Honk flows exposed through wasm ([#6096](https://github.com/AztecProtocol/aztec-packages/issues/6096)) ([c9b3206](https://github.com/AztecProtocol/aztec-packages/commit/c9b32061b2849442516ff0395b69d9a230191234)) +* Osxcross ([#6099](https://github.com/AztecProtocol/aztec-packages/issues/6099)) ([6cc924d](https://github.com/AztecProtocol/aztec-packages/commit/6cc924dc44a36d9ef2aeda05ea69a120898fc272)) +* Recursive folding verifier and decider as ultra circuits and circuit simulator ([#6150](https://github.com/AztecProtocol/aztec-packages/issues/6150)) ([acc8641](https://github.com/AztecProtocol/aztec-packages/commit/acc86416668ccfd6425ee3af4a898f2e8513168b)) +* Reproducible ClientIVC proofs ([#6227](https://github.com/AztecProtocol/aztec-packages/issues/6227)) ([c145757](https://github.com/AztecProtocol/aztec-packages/commit/c145757a13ba4ff881c4bb05c4caaee7351053b3)) + + +### Bug Fixes + +* Correct circuit size estimation for UltraHonk ([#6164](https://github.com/AztecProtocol/aztec-packages/issues/6164)) ([ed84fe3](https://github.com/AztecProtocol/aztec-packages/commit/ed84fe3bcc29c69b1e9d9caafd2c2c2134a67dce)) +* Sporadic failure of GoblinRecursionTests.Vanilla ([#6218](https://github.com/AztecProtocol/aztec-packages/issues/6218)) ([f4ecea5](https://github.com/AztecProtocol/aztec-packages/commit/f4ecea5a83bcc88fd11698ac5c8e174c2461a74b)) + + +### Miscellaneous + +* **ci:** Fix restarts with fresh spot, acir test fixes, non-mandatory benches ([#6226](https://github.com/AztecProtocol/aztec-packages/issues/6226)) ([adb7f37](https://github.com/AztecProtocol/aztec-packages/commit/adb7f37a4ad01acf1ef197189a1e78323cae8f0b)) +* Migrate acir tests to earthly ([#6142](https://github.com/AztecProtocol/aztec-packages/issues/6142)) ([18c8ea8](https://github.com/AztecProtocol/aztec-packages/commit/18c8ea8eb5f9fd1cb51c116d6d1976c774d51bc1)) + ## [0.37.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.36.0...barretenberg-v0.37.0) (2024-05-02) diff --git a/barretenberg/acir_tests/Dockerfile.bb b/barretenberg/acir_tests/Dockerfile.bb deleted file mode 100644 index 20cc12846dfc..000000000000 --- a/barretenberg/acir_tests/Dockerfile.bb +++ /dev/null @@ -1,26 +0,0 @@ -FROM aztecprotocol/barretenberg-x86_64-linux-clang-assert -FROM aztecprotocol/noir-compile-acir-tests as noir-acir-tests - -FROM node:18.19.0 -RUN apt update && apt install git bash curl jq coreutils -y -COPY --from=0 /usr/src/barretenberg/cpp/build /usr/src/barretenberg/cpp/build -COPY --from=noir-acir-tests /usr/src/noir/noir-repo/test_programs /usr/src/noir/noir-repo/test_programs -WORKDIR /usr/src/barretenberg/acir_tests -COPY . . -# Run every acir test through native bb build prove_then_verify flow for UltraPlonk. -# This ensures we test independent pk construction through real/garbage witness data paths. -RUN FLOW=prove_then_verify ./run_acir_tests.sh -# Construct and separately verify a UltraHonk proof for a single program -RUN FLOW=prove_then_verify_ultra_honk ./run_acir_tests.sh double_verify_nested_proof -# Construct and separately verify a GoblinUltraHonk proof for all acir programs -RUN FLOW=prove_then_verify_goblin_ultra_honk ./run_acir_tests.sh -# Construct and verify a UltraHonk proof for a single program -RUN FLOW=prove_and_verify_ultra_honk ./run_acir_tests.sh double_verify_nested_proof -# Construct and verify a Goblin UltraHonk (GUH) proof for a single arbitrary program -RUN FLOW=prove_and_verify_goblin_ultra_honk ./run_acir_tests.sh 6_array -# Construct and verify a UltraHonk proof for all ACIR programs using the new witness stack workflow -RUN FLOW=prove_and_verify_ultra_honk_program ./run_acir_tests.sh -# This is a "full" Goblin flow. It constructs and verifies four proofs: GoblinUltraHonk, ECCVM, Translator, and merge -RUN FLOW=prove_and_verify_goblin ./run_acir_tests.sh 6_array -# Run 1_mul through native bb build, all_cmds flow, to test all cli args. -RUN VERBOSE=1 FLOW=all_cmds ./run_acir_tests.sh 1_mul diff --git a/barretenberg/cpp/CMakeLists.txt b/barretenberg/cpp/CMakeLists.txt index 24b3836c41ee..5a63e8b373f8 100644 --- a/barretenberg/cpp/CMakeLists.txt +++ b/barretenberg/cpp/CMakeLists.txt @@ -6,7 +6,7 @@ cmake_minimum_required(VERSION 3.24 FATAL_ERROR) project( Barretenberg DESCRIPTION "BN254 elliptic curve library, and PLONK SNARK prover" - VERSION 0.37.0 # x-release-please-version + VERSION 0.38.0 # x-release-please-version LANGUAGES CXX C ) # Insert version into `bb` config file diff --git a/barretenberg/cpp/Earthfile b/barretenberg/cpp/Earthfile index 7d3c42a6e64c..7d97dbfd1918 100644 --- a/barretenberg/cpp/Earthfile +++ b/barretenberg/cpp/Earthfile @@ -180,6 +180,9 @@ test-clang-format: test: ARG hardware_concurrency="" + # prefetch + BUILD +test-binaries + BUILD +preset-release-assert-test BUILD +test-clang-format BUILD ./srs_db/+build # prefetch FROM +source diff --git a/barretenberg/cpp/scripts/analyze_client_ivc_bench.py b/barretenberg/cpp/scripts/analyze_client_ivc_bench.py index 46a37826efae..07809f1f1cc7 100644 --- a/barretenberg/cpp/scripts/analyze_client_ivc_bench.py +++ b/barretenberg/cpp/scripts/analyze_client_ivc_bench.py @@ -71,3 +71,39 @@ print(f"{key:<{max_label_length}}{time_ms:>8.0f} {time_ms/total_time_ms:>8.2%}") +# Relations breakdown +# Note: The timings here are off likely because the tracking is occuring in a hot loop but +# they should be meaningful relative to one another +print('\nRelation contributions (times to be interpreted relatively):') +relations = [ + "Arithmetic::accumulate(t)", + "Permutation::accumulate(t)", + "Lookup::accumulate(t)", + "DeltaRange::accumulate(t)", + "Elliptic::accumulate(t)", + "Auxiliary::accumulate(t)", + "EccOp::accumulate(t)", + "DatabusRead::accumulate(t)", + "PoseidonExt::accumulate(t)", + "PoseidonInt::accumulate(t)", +] +with open(PREFIX/IVC_BENCH_JSON, "r") as read_file: + read_result = json.load(read_file) + for _bench in read_result["benchmarks"]: + if _bench["name"] == BENCHMARK: + bench = _bench +bench_components = dict(filter(lambda x: x[0] in relations, bench.items())) + +# For each kept time, get the proportion over all kept times. +sum_of_kept_times_ms = sum(float(time) + for _, time in bench_components.items())/1e6 +max_label_length = max(len(label) for label in relations) +column = {"function": "function", "ms": "ms", "%": "% sum"} +print( + f"{column['function']:<{max_label_length}}{column['ms']:>8} {column['%']:>8}") +for key in relations: + if key not in bench: + time_ms = 0 + else: + time_ms = bench[key]/1e6 + print(f"{key:<{max_label_length}}{time_ms:>8.0f} {time_ms/sum_of_kept_times_ms:>8.2%}") \ No newline at end of file diff --git a/barretenberg/cpp/scripts/benchmark_client_ivc.sh b/barretenberg/cpp/scripts/benchmark_client_ivc.sh index 17a193c6d829..7991ef879407 100755 --- a/barretenberg/cpp/scripts/benchmark_client_ivc.sh +++ b/barretenberg/cpp/scripts/benchmark_client_ivc.sh @@ -2,6 +2,7 @@ set -eu TARGET="client_ivc_bench" +# Note: to run structured trace version, change "Full" to "FullStructured" here and in analyze script FILTER="ClientIVCBench/Full/6$" BUILD_DIR=build-op-count-time diff --git a/barretenberg/cpp/scripts/compare_branch_vs_baseline.sh b/barretenberg/cpp/scripts/compare_branch_vs_baseline.sh new file mode 100755 index 000000000000..f798893781e7 --- /dev/null +++ b/barretenberg/cpp/scripts/compare_branch_vs_baseline.sh @@ -0,0 +1,61 @@ +#!/usr/bin/env bash + +# Install requirements (numpy + scipy) for comparison script if necessary. +# Note: By default, installation will occur in $HOME/.local/bin. +# pip3 install --user -r $BUILD_DIR/_deps/benchmark-src/requirements.txt + + +# This script is used to compare a suite of benchmarks between baseline (default: master) and +# the branch from which the script is run. Simply check out the branch of interest, ensure +# it is up to date with local master, and run the script. + +# Specify the benchmark suite and the "baseline" branch against which to compare +BENCHMARK=${1:-goblin_bench} +FILTER=${2:-""} +PRESET=${3:-clang16} +BUILD_DIR=${4:-build} +HARDWARE_CONCURRENCY=${HARDWARE_CONCURRENCY:-16} + +BASELINE_BRANCH="master" +BENCH_TOOLS_DIR="$BUILD_DIR/_deps/benchmark-src/tools" + +echo -e "\nComparing $BENCHMARK between $BASELINE_BRANCH and current branch:" + +# Move above script dir. +cd $(dirname $0)/.. + +# Configure and build benchmark in feature branch +echo -e "\nConfiguring and building $BENCHMARK in current feature branch...\n" +cmake --preset $PRESET +cmake --build --preset $PRESET --target $BENCHMARK + +# Run bench in current branch +echo -e "\nRunning benchmark in feature branch.." +./scripts/benchmark.sh $BENCHMARK\ + "./$BENCHMARK --benchmark_filter=$FILTER\ + --benchmark_out=results_after.json\ + --benchmark_out_format=json"\ + $PRESET + $BUILD_DIR + +# Configure and build benchmark in $BASELINE branch +echo -e "\nConfiguring and building $BENCHMARK in $BASELINE_BRANCH...\n" +git checkout $BASELINE_BRANCH +cmake --preset $PRESET +cmake --build --preset $PRESET --target $BENCHMARK + +# Run bench in current branch +echo -e "\nRunning benchmark in feature branch.." +./scripts/benchmark.sh $BENCHMARK\ + "./$BENCHMARK --benchmark_filter=$FILTER\ + --benchmark_out=results_before.json\ + --benchmark_out_format=json"\ + $PRESET + $BUILD_DIR + +# Call compare.py on the results (json) to get high level statistics. +# See docs at https://github.com/google/benchmark/blob/main/docs/tools.md for more details. +$BENCH_TOOLS_DIR/compare.py benchmarks $BUILD_DIR/results_before.json $BUILD_DIR/results_after.json + +# Return to branch from which the script was called +git checkout - \ No newline at end of file diff --git a/barretenberg/cpp/scripts/compare_branch_vs_baseline_remote.sh b/barretenberg/cpp/scripts/compare_branch_vs_baseline_remote.sh new file mode 100755 index 000000000000..edd23d05119a --- /dev/null +++ b/barretenberg/cpp/scripts/compare_branch_vs_baseline_remote.sh @@ -0,0 +1,55 @@ +#!/usr/bin/env bash + +# Install requirements (numpy + scipy) for comparison script if necessary. +# Note: By default, installation will occur in $HOME/.local/bin. +# pip3 install --user -r $BUILD_DIR/_deps/benchmark-src/requirements.txt + + +# This script is used to compare a suite of benchmarks between baseline (default: master) and +# the branch from which the script is run. Simply check out the branch of interest, ensure +# it is up to date with local master, and run the script. + +# Specify the benchmark suite and the "baseline" branch against which to compare +BENCHMARK=${1:-goblin_bench} +FILTER=${2:-"*."} +PRESET=${3:-clang16} +BUILD_DIR=${4:-build} +HARDWARE_CONCURRENCY=${HARDWARE_CONCURRENCY:-16} + +BASELINE_BRANCH="master" +BENCH_TOOLS_DIR="$BUILD_DIR/_deps/benchmark-src/tools" + +echo -e "\nComparing $BENCHMARK between $BASELINE_BRANCH and current branch:" + +# Move above script dir. +cd $(dirname $0)/.. + +# Run benchmark in current branch +echo -e "\nRunning benchmark in feature branch.." +./scripts/benchmark_remote.sh $BENCHMARK\ + "./$BENCHMARK --benchmark_filter=$FILTER\ + --benchmark_out=results_after.json\ + --benchmark_out_format=json"\ + $PRESET\ + $BUILD_DIR + +scp $BB_SSH_KEY $BB_SSH_INSTANCE:$BB_SSH_CPP_PATH/build/results_after.json $BUILD_DIR/ + +# Run benchmark in baseline branch +git checkout $BASELINE_BRANCH +echo -e "\nRunning benchmark in feature branch.." +./scripts/benchmark_remote.sh $BENCHMARK\ + "./$BENCHMARK --benchmark_filter=$FILTER\ + --benchmark_out=results_before.json\ + --benchmark_out_format=json"\ + $PRESET\ + $BUILD_DIR + +scp $BB_SSH_KEY $BB_SSH_INSTANCE:$BB_SSH_CPP_PATH/build/results_before.json $BUILD_DIR/ + +# Call compare.py on the results (json) to get high level statistics. +# See docs at https://github.com/google/benchmark/blob/main/docs/tools.md for more details. +$BENCH_TOOLS_DIR/compare.py benchmarks $BUILD_DIR/results_before.json $BUILD_DIR/results_after.json + +# Return to branch from which the script was called +git checkout - \ No newline at end of file diff --git a/barretenberg/cpp/scripts/compare_branch_vs_baseline_remote_wasm.sh b/barretenberg/cpp/scripts/compare_branch_vs_baseline_remote_wasm.sh new file mode 100755 index 000000000000..d7732ffc41a8 --- /dev/null +++ b/barretenberg/cpp/scripts/compare_branch_vs_baseline_remote_wasm.sh @@ -0,0 +1,52 @@ +#!/usr/bin/env bash + +# Install requirements (numpy + scipy) for comparison script if necessary. +# Note: By default, installation will occur in $HOME/.local/bin. +# pip3 install --user -r $BUILD_DIR/_deps/benchmark-src/requirements.txt + + +# This script is used to compare a suite of benchmarks between baseline (default: master) and +# the branch from which the script is run. Simply check out the branch of interest, ensure +# it is up to date with local master, and run the script. + +# Specify the benchmark suite and the "baseline" branch against which to compare +BENCHMARK=${1:-goblin_bench} +FILTER=${2:-"*."} +PRESET=${3:-wasm-threads} +BUILD_DIR=${4:-build-wasm-threads} +HARDWARE_CONCURRENCY=${HARDWARE_CONCURRENCY:-16} + +BASELINE_BRANCH="master" +BENCH_TOOLS_DIR="$BUILD_DIR/_deps/benchmark-src/tools" + +echo -e "\nComparing $BENCHMARK between $BASELINE_BRANCH and current branch:" + +# Move above script dir. +cd $(dirname $0)/.. + +# Run benchmark in feature branch +echo -e "\nRunning benchmark in feature branch.." +./scripts/benchmark_wasm_remote.sh $BENCHMARK\ + "./$BENCHMARK --benchmark_filter=$FILTER\ + --benchmark_out=../results_after.json\ + --benchmark_out_format=json" + +scp $BB_SSH_KEY $BB_SSH_INSTANCE:$BB_SSH_CPP_PATH/results_after.json $BUILD_DIR/ + +# Run benchmark in $BASELINE branch + +echo -e "\nRunning benchmark in baseline branch.." +git checkout $BASELINE_BRANCH +./scripts/benchmark_wasm_remote.sh $BENCHMARK\ + "./$BENCHMARK --benchmark_filter=$FILTER\ + --benchmark_out=../results_before.json\ + --benchmark_out_format=json" + +scp $BB_SSH_KEY $BB_SSH_INSTANCE:$BB_SSH_CPP_PATH/results_before.json $BUILD_DIR/ + +# Call compare.py on the results (json) to get high level statistics. +# See docs at https://github.com/google/benchmark/blob/main/docs/tools.md for more details. +$BENCH_TOOLS_DIR/compare.py benchmarks $BUILD_DIR/results_before.json $BUILD_DIR/results_after.json + +# Return to branch from which the script was called +git checkout - \ No newline at end of file diff --git a/barretenberg/cpp/scripts/compare_client_ivc_bench.sh b/barretenberg/cpp/scripts/compare_client_ivc_bench.sh new file mode 100755 index 000000000000..aa4179d6df07 --- /dev/null +++ b/barretenberg/cpp/scripts/compare_client_ivc_bench.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash +set -eu + +./scripts/compare_branch_vs_baseline_remote_wasm.sh client_ivc_bench 'Full/6$' \ No newline at end of file diff --git a/barretenberg/cpp/src/CMakeLists.txt b/barretenberg/cpp/src/CMakeLists.txt index 7f7b588c58de..a9809f12c616 100644 --- a/barretenberg/cpp/src/CMakeLists.txt +++ b/barretenberg/cpp/src/CMakeLists.txt @@ -179,6 +179,7 @@ if(WASM) $ $ $ + $ $ $ $ diff --git a/barretenberg/cpp/src/barretenberg/benchmark/client_ivc_bench/client_ivc.bench.cpp b/barretenberg/cpp/src/barretenberg/benchmark/client_ivc_bench/client_ivc.bench.cpp index dcbcd0c39a5f..ded7acc08f14 100644 --- a/barretenberg/cpp/src/barretenberg/benchmark/client_ivc_bench/client_ivc.bench.cpp +++ b/barretenberg/cpp/src/barretenberg/benchmark/client_ivc_bench/client_ivc.bench.cpp @@ -173,6 +173,25 @@ BENCHMARK_DEFINE_F(ClientIVCBench, Full)(benchmark::State& state) } } +/** + * @brief Benchmark the prover work for the full PG-Goblin IVC protocol + * + */ +BENCHMARK_DEFINE_F(ClientIVCBench, FullStructured)(benchmark::State& state) +{ + ClientIVC ivc; + ivc.structured_flag = true; + ivc.precompute_folding_verification_keys(); + for (auto _ : state) { + BB_REPORT_OP_COUNT_IN_BENCH(state); + // Perform a specified number of iterations of function/kernel accumulation + perform_ivc_accumulation_rounds(state, ivc); + + // Construct IVC scheme proof (fold, decider, merge, eccvm, translator) + ivc.prove(); + } +} + /** * @brief Benchmark only the accumulation rounds * @@ -252,6 +271,7 @@ BENCHMARK_DEFINE_F(ClientIVCBench, Translator)(benchmark::State& state) ->Arg(1 << 6) BENCHMARK_REGISTER_F(ClientIVCBench, Full)->Unit(benchmark::kMillisecond)->ARGS; +BENCHMARK_REGISTER_F(ClientIVCBench, FullStructured)->Unit(benchmark::kMillisecond)->ARGS; BENCHMARK_REGISTER_F(ClientIVCBench, Accumulate)->Unit(benchmark::kMillisecond)->ARGS; BENCHMARK_REGISTER_F(ClientIVCBench, Decide)->Unit(benchmark::kMillisecond)->ARGS; BENCHMARK_REGISTER_F(ClientIVCBench, ECCVM)->Unit(benchmark::kMillisecond)->ARGS; diff --git a/barretenberg/cpp/src/barretenberg/benchmark/compare_branch_vs_baseline.sh b/barretenberg/cpp/src/barretenberg/benchmark/compare_branch_vs_baseline.sh deleted file mode 100755 index 34ee2ce171dc..000000000000 --- a/barretenberg/cpp/src/barretenberg/benchmark/compare_branch_vs_baseline.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env bash - -# This script is used to compare a suite of benchmarks between baseline (default: master) and -# the branch from which the script is run. Simply check out the branch of interest, ensure -# it is up to date with local master, and run the script. - -# Specify the benchmark suite and the "baseline" branch against which to compare -BENCH_TARGET=${1:?"Please provide the name of a benchmark target."} -BASELINE_BRANCH="master" - -echo -e "\nComparing $BENCH_TARGET between $BASELINE_BRANCH and current branch:" -# Set some directories -BASE_DIR="$HOME/aztec-packages/barretenberg/cpp" -BUILD_DIR="$BASE_DIR/build-bench" # matches build dir specified in bench preset -BENCH_RESULTS_DIR="$BASE_DIR/tmp_bench_results" -BENCH_TOOLS_DIR="$BUILD_DIR/_deps/benchmark-src/tools" - -# Install requirements (numpy + scipy) for comparison script if necessary. -# Note: By default, installation will occur in $HOME/.local/bin. -pip3 install --user -r $BUILD_DIR/_deps/benchmark-src/requirements.txt - -# Create temporary directory for benchmark results (json) -cd $BASE_DIR -mkdir $BENCH_RESULTS_DIR - -# Build and run bench in current branch -echo -e "\nConfiguring and building $BENCH_TARGET in current feature branch..\n" -rm -rf $BUILD_DIR -cmake --preset bench > /dev/null && cmake --build --preset bench --target $BENCH_TARGET -cd build-bench -BRANCH_RESULTS="$BENCH_RESULTS_DIR/results_branch.json" -echo -e "\nRunning $BENCH_TARGET in feature branch.." -bin/$BENCH_TARGET --benchmark_format=json > $BRANCH_RESULTS - -# Checkout baseline branch, run benchmarks, save results in json format -echo -e "\nConfiguring and building $BENCH_TARGET in $BASELINE_BRANCH branch..\n" -git checkout master > /dev/null -cd $BASE_DIR -rm -rf $BUILD_DIR -cmake --preset bench > /dev/null && cmake --build --preset bench --target $BENCH_TARGET -cd build-bench -BASELINE_RESULTS="$BENCH_RESULTS_DIR/results_baseline.json" -echo -e "\nRunning $BENCH_TARGET in master.." -bin/$BENCH_TARGET --benchmark_format=json > $BASELINE_RESULTS - -# Call compare.py on the results (json) to get high level statistics. -# See docs at https://github.com/google/benchmark/blob/main/docs/tools.md for more details. -$BENCH_TOOLS_DIR/compare.py benchmarks $BASELINE_RESULTS $BRANCH_RESULTS - -# Return to branch from which the script was called -git checkout - - -# Delete the temporary results directory and its contents -rm -r $BENCH_RESULTS_DIR diff --git a/barretenberg/cpp/src/barretenberg/benchmark/relations_bench/relations.bench.cpp b/barretenberg/cpp/src/barretenberg/benchmark/relations_bench/relations.bench.cpp index 48959e431e9b..af040b8da4b8 100644 --- a/barretenberg/cpp/src/barretenberg/benchmark/relations_bench/relations.bench.cpp +++ b/barretenberg/cpp/src/barretenberg/benchmark/relations_bench/relations.bench.cpp @@ -1,4 +1,5 @@ #include "barretenberg/eccvm/eccvm_flavor.hpp" +#include "barretenberg/protogalaxy/protogalaxy_prover.hpp" #include "barretenberg/stdlib_circuit_builders/goblin_ultra_flavor.hpp" #include "barretenberg/stdlib_circuit_builders/ultra_flavor.hpp" #include "barretenberg/translator_vm/goblin_translator_flavor.hpp" @@ -13,46 +14,109 @@ namespace bb::benchmark::relations { using Fr = bb::fr; using Fq = grumpkin::fr; -template void execute_relation(::benchmark::State& state) +// Generic helper for executing Relation::accumulate for the template specified input type +template +void execute_relation(::benchmark::State& state) { using FF = typename Flavor::FF; - using AllValues = typename Flavor::AllValues; - using SumcheckArrayOfValuesOverSubrelations = typename Relation::SumcheckArrayOfValuesOverSubrelations; auto params = bb::RelationParameters::get_random(); - // Extract an array containing all the polynomial evaluations at a given row i - AllValues new_value{}; - // Define the appropriate SumcheckArrayOfValuesOverSubrelations type for this relation and initialize to zero - SumcheckArrayOfValuesOverSubrelations accumulator; - // Evaluate each constraint in the relation and check that each is satisfied + // Instantiate zero-initialized inputs and accumulator + Input input{}; + Accumulator accumulator; for (auto _ : state) { - Relation::accumulate(accumulator, new_value, params, 1); + Relation::accumulate(accumulator, input, params, 1); } } -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); - -BENCHMARK(execute_relation>); - -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); - -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); + +// Single execution of relation on values (FF), e.g. Sumcheck verifier / PG perturbator work +template void execute_relation_for_values(::benchmark::State& state) +{ + using Input = typename Flavor::AllValues; + using Accumulator = typename Relation::SumcheckArrayOfValuesOverSubrelations; + + execute_relation(state); +} + +// Single execution of relation on Sumcheck univariates, i.e. Sumcheck/Decider prover work +template void execute_relation_for_univariates(::benchmark::State& state) +{ + using Input = typename Flavor::ExtendedEdges; + using Accumulator = typename Relation::SumcheckTupleOfUnivariatesOverSubrelations; + + execute_relation(state); +} + +// Single execution of relation on PG univariates, i.e. PG combiner work +template void execute_relation_for_pg_univariates(::benchmark::State& state) +{ + using ProverInstances = ProverInstances_; + using ProtoGalaxyProver = ProtoGalaxyProver_; + using Input = ProtoGalaxyProver::ExtendedUnivariates; + using Accumulator = typename Relation::template ProtogalaxyTupleOfUnivariatesOverSubrelations; + + execute_relation(state); +} + +// Ultra relations (PG prover combiner work) +BENCHMARK(execute_relation_for_pg_univariates>); +BENCHMARK(execute_relation_for_pg_univariates>); +BENCHMARK(execute_relation_for_pg_univariates>); +BENCHMARK(execute_relation_for_pg_univariates>); +BENCHMARK(execute_relation_for_pg_univariates>); +BENCHMARK(execute_relation_for_pg_univariates>); + +// Goblin-Ultra only relations (PG prover combiner work) +BENCHMARK(execute_relation_for_pg_univariates>); +BENCHMARK(execute_relation_for_pg_univariates>); +BENCHMARK(execute_relation_for_pg_univariates>); +BENCHMARK(execute_relation_for_pg_univariates>); + +// Ultra relations (Sumcheck prover work) +BENCHMARK(execute_relation_for_univariates>); +BENCHMARK(execute_relation_for_univariates>); +BENCHMARK(execute_relation_for_univariates>); +BENCHMARK(execute_relation_for_univariates>); +BENCHMARK(execute_relation_for_univariates>); +BENCHMARK(execute_relation_for_univariates>); + +// Goblin-Ultra only relations (Sumcheck prover work) +BENCHMARK(execute_relation_for_univariates>); +BENCHMARK(execute_relation_for_univariates>); +BENCHMARK(execute_relation_for_univariates>); +BENCHMARK(execute_relation_for_univariates>); + +// Ultra relations (verifier work) +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); + +// Goblin-Ultra only relations (verifier work) +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); + +// Translator VM +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); + +// ECCVM +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); } // namespace bb::benchmark::relations diff --git a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.cpp b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.cpp index eef0001e0c2a..758a313658be 100644 --- a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.cpp +++ b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.cpp @@ -11,7 +11,7 @@ namespace bb { void ClientIVC::initialize(ClientCircuit& circuit) { goblin.merge(circuit); // Construct new merge proof - prover_fold_output.accumulator = std::make_shared(circuit); + prover_fold_output.accumulator = std::make_shared(circuit, structured_flag); } /** @@ -24,7 +24,7 @@ void ClientIVC::initialize(ClientCircuit& circuit) ClientIVC::FoldProof ClientIVC::accumulate(ClientCircuit& circuit) { goblin.merge(circuit); // Add recursive merge verifier and construct new merge proof - prover_instance = std::make_shared(circuit); + prover_instance = std::make_shared(circuit, structured_flag); FoldingProver folding_prover({ prover_fold_output.accumulator, prover_instance }); prover_fold_output = folding_prover.fold_instances(); return prover_fold_output.folding_data; @@ -53,7 +53,7 @@ bool ClientIVC::verify(Proof& proof, const std::vector& ver // Decider verification ClientIVC::FoldingVerifier folding_verifier({ verifier_instances[0], verifier_instances[1] }); - auto verifier_accumulator = folding_verifier.verify_folding_proof(proof.fold_proof); + auto verifier_accumulator = folding_verifier.verify_folding_proof(proof.folding_proof); ClientIVC::DeciderVerifier decider_verifier(verifier_accumulator); bool decision = decider_verifier.verify_proof(proof.decider_proof); diff --git a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.hpp b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.hpp index f2e0ee309c3b..54c6a7991ff5 100644 --- a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.hpp +++ b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.hpp @@ -37,9 +37,24 @@ class ClientIVC { // A full proof for the IVC scheme struct Proof { - FoldProof fold_proof; // final fold proof + FoldProof folding_proof; // final fold proof HonkProof decider_proof; Goblin::Proof goblin_proof; + + std::vector to_buffer() const + { + size_t proof_size = folding_proof.size() + decider_proof.size() + goblin_proof.size(); + + std::vector result; + result.reserve(proof_size); + const auto insert = [&result](const std::vector& buf) { + result.insert(result.end(), buf.begin(), buf.end()); + }; + insert(folding_proof); + insert(decider_proof); + insert(goblin_proof.to_buffer()); + return result; + } }; struct PrecomputedVerificationKeys { @@ -63,6 +78,9 @@ class ClientIVC { // be needed in the real IVC as they are provided as inputs std::shared_ptr prover_instance; + // A flag indicating whether or not to construct a structured trace in the ProverInstance + bool structured_flag = false; + void initialize(ClientCircuit& circuit); FoldProof accumulate(ClientCircuit& circuit); diff --git a/barretenberg/cpp/src/barretenberg/dsl/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/dsl/CMakeLists.txt index 9a62acd63560..158c5752774a 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/CMakeLists.txt +++ b/barretenberg/cpp/src/barretenberg/dsl/CMakeLists.txt @@ -2,6 +2,7 @@ barretenberg_module( dsl plonk stdlib_sha256 + stdlib_aes128 stdlib_keccak stdlib_poseidon2 crypto_merkle_tree diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp index 32972a287810..f74228a115f0 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp @@ -31,6 +31,11 @@ void build_constraints(Builder& builder, AcirFormat const& constraint_system, bo builder.create_range_constraint(constraint.witness, constraint.num_bits, ""); } + // Add aes128 constraints + for (const auto& constraint : constraint_system.aes128_constraints) { + create_aes128_constraints(builder, constraint); + } + // Add sha256 constraints for (const auto& constraint : constraint_system.sha256_constraints) { create_sha256_constraints(builder, constraint); @@ -84,14 +89,10 @@ void build_constraints(Builder& builder, AcirFormat const& constraint_system, bo for (const auto& constraint : constraint_system.poseidon2_constraints) { create_poseidon2_permutations(builder, constraint); } - // Add fixed base scalar mul constraints - for (const auto& constraint : constraint_system.fixed_base_scalar_mul_constraints) { - create_fixed_base_constraint(builder, constraint); - } - // Add variable base scalar mul constraints - for (const auto& constraint : constraint_system.variable_base_scalar_mul_constraints) { - create_variable_base_constraint(builder, constraint); + // Add multi scalar mul constraints + for (const auto& constraint : constraint_system.multi_scalar_mul_constraints) { + create_multi_scalar_mul_constraint(builder, constraint); } // Add ec add constraints diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp index a7f5b4757375..8b7823260d09 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp @@ -1,4 +1,5 @@ #pragma once +#include "aes128_constraint.hpp" #include "barretenberg/common/slab_allocator.hpp" #include "barretenberg/serialize/msgpack.hpp" #include "bigint_constraint.hpp" @@ -8,16 +9,15 @@ #include "ec_operations.hpp" #include "ecdsa_secp256k1.hpp" #include "ecdsa_secp256r1.hpp" -#include "fixed_base_scalar_mul.hpp" #include "keccak_constraint.hpp" #include "logic_constraint.hpp" +#include "multi_scalar_mul.hpp" #include "pedersen.hpp" #include "poseidon2_constraint.hpp" #include "range_constraint.hpp" #include "recursion_constraint.hpp" #include "schnorr_verify.hpp" #include "sha256_constraint.hpp" -#include "variable_base_scalar_mul.hpp" #include namespace acir_format { @@ -36,6 +36,7 @@ struct AcirFormat { std::vector logic_constraints; std::vector range_constraints; + std::vector aes128_constraints; std::vector sha256_constraints; std::vector sha256_compression; std::vector schnorr_constraints; @@ -48,8 +49,7 @@ struct AcirFormat { std::vector pedersen_constraints; std::vector pedersen_hash_constraints; std::vector poseidon2_constraints; - std::vector fixed_base_scalar_mul_constraints; - std::vector variable_base_scalar_mul_constraints; + std::vector multi_scalar_mul_constraints; std::vector ec_add_constraints; std::vector recursion_constraints; std::vector bigint_from_le_bytes_constraints; @@ -71,6 +71,7 @@ struct AcirFormat { public_inputs, logic_constraints, range_constraints, + aes128_constraints, sha256_constraints, sha256_compression, schnorr_constraints, @@ -83,8 +84,7 @@ struct AcirFormat { pedersen_constraints, pedersen_hash_constraints, poseidon2_constraints, - fixed_base_scalar_mul_constraints, - variable_base_scalar_mul_constraints, + multi_scalar_mul_constraints, ec_add_constraints, recursion_constraints, poly_triple_constraints, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp index 7de0f847b1f7..2d23b057c640 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp @@ -35,6 +35,7 @@ TEST_F(AcirFormatTests, TestASingleConstraintNoPubInputs) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -47,8 +48,7 @@ TEST_F(AcirFormatTests, TestASingleConstraintNoPubInputs) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, @@ -152,6 +152,7 @@ TEST_F(AcirFormatTests, TestLogicGateFromNoirCircuit) .public_inputs = { 1 }, .logic_constraints = { logic_constraint }, .range_constraints = { range_a, range_b }, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -164,8 +165,7 @@ TEST_F(AcirFormatTests, TestLogicGateFromNoirCircuit) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, @@ -221,6 +221,7 @@ TEST_F(AcirFormatTests, TestSchnorrVerifyPass) .public_inputs = {}, .logic_constraints = {}, .range_constraints = range_constraints, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = { schnorr_constraint }, @@ -233,8 +234,7 @@ TEST_F(AcirFormatTests, TestSchnorrVerifyPass) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, @@ -317,6 +317,7 @@ TEST_F(AcirFormatTests, TestSchnorrVerifySmallRange) .public_inputs = {}, .logic_constraints = {}, .range_constraints = range_constraints, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = { schnorr_constraint }, @@ -329,8 +330,7 @@ TEST_F(AcirFormatTests, TestSchnorrVerifySmallRange) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, @@ -432,6 +432,7 @@ TEST_F(AcirFormatTests, TestVarKeccak) .public_inputs = {}, .logic_constraints = {}, .range_constraints = { range_a, range_b, range_c, range_d }, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -444,8 +445,7 @@ TEST_F(AcirFormatTests, TestVarKeccak) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, @@ -480,6 +480,7 @@ TEST_F(AcirFormatTests, TestKeccakPermutation) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -492,8 +493,7 @@ TEST_F(AcirFormatTests, TestKeccakPermutation) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.hpp index f31fc73c806a..110087d40af4 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.hpp @@ -2,6 +2,7 @@ #include "acir_format.hpp" #include "barretenberg/common/container.hpp" #include "barretenberg/common/throw_or_abort.hpp" +#include "barretenberg/dsl/acir_format/aes128_constraint.hpp" #include "barretenberg/dsl/acir_format/bigint_constraint.hpp" #include "barretenberg/dsl/acir_format/blake2s_constraint.hpp" #include "barretenberg/dsl/acir_format/blake3_constraint.hpp" @@ -222,6 +223,31 @@ void handle_blackbox_func_call(Program::Opcode::BlackBoxFuncCall const& arg, Aci .witness = arg.input.witness.value, .num_bits = arg.input.num_bits, }); + } else if constexpr (std::is_same_v) { + af.aes128_constraints.push_back(AES128Constraint{ + .inputs = map(arg.inputs, + [](auto& e) { + return AES128Input{ + .witness = e.witness.value, + .num_bits = e.num_bits, + }; + }), + .iv = map(arg.iv, + [](auto& e) { + return AES128Input{ + .witness = e.witness.value, + .num_bits = e.num_bits, + }; + }), + .key = map(arg.key, + [](auto& e) { + return AES128Input{ + .witness = e.witness.value, + .num_bits = e.num_bits, + }; + }), + .outputs = map(arg.outputs, [](auto& e) { return e.value; }), + }); } else if constexpr (std::is_same_v) { af.sha256_constraints.push_back(Sha256Constraint{ .inputs = map(arg.inputs, @@ -310,19 +336,10 @@ void handle_blackbox_func_call(Program::Opcode::BlackBoxFuncCall const& arg, Aci .result = arg.output.value, .signature = map(arg.signature, [](auto& e) { return e.witness.value; }), }); - } else if constexpr (std::is_same_v) { - af.fixed_base_scalar_mul_constraints.push_back(FixedBaseScalarMul{ - .low = arg.low.witness.value, - .high = arg.high.witness.value, - .pub_key_x = arg.outputs[0].value, - .pub_key_y = arg.outputs[1].value, - }); - } else if constexpr (std::is_same_v) { - af.variable_base_scalar_mul_constraints.push_back(VariableBaseScalarMul{ - .point_x = arg.point_x.witness.value, - .point_y = arg.point_y.witness.value, - .scalar_low = arg.scalar_low.witness.value, - .scalar_high = arg.scalar_high.witness.value, + } else if constexpr (std::is_same_v) { + af.multi_scalar_mul_constraints.push_back(MultiScalarMul{ + .points = map(arg.points, [](auto& e) { return e.witness.value; }), + .scalars = map(arg.scalars, [](auto& e) { return e.witness.value; }), .out_point_x = arg.outputs[0].value, .out_point_y = arg.outputs[1].value, }); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/aes128_constraint.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/aes128_constraint.cpp new file mode 100644 index 000000000000..48777aa8136c --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/aes128_constraint.cpp @@ -0,0 +1,77 @@ +#include "aes128_constraint.hpp" +#include "barretenberg/stdlib/encryption/aes128/aes128.hpp" +#include +#include +#include + +namespace acir_format { + +template void create_aes128_constraints(Builder& builder, const AES128Constraint& constraint) +{ + + using field_ct = bb::stdlib::field_t; + + // Packs 16 bytes from the inputs (plaintext, iv, key) into a field element + const auto convert_input = [&](std::span inputs, size_t padding) { + field_ct converted = 0; + for (size_t i = 0; i < 16 - padding; ++i) { + converted *= 256; + field_ct byte = field_ct::from_witness_index(&builder, inputs[i].witness); + converted += byte; + } + for (size_t i = 0; i < padding; ++i) { + converted *= 256; + field_ct byte = padding; + converted += byte; + } + return converted; + }; + + // Packs 16 bytes from the outputs (witness indexes) into a field element for comparison + const auto convert_output = [&](std::span outputs) { + field_ct converted = 0; + for (const auto& output : outputs) { + converted *= 256; + field_ct byte = field_ct::from_witness_index(&builder, output); + converted += byte; + } + return converted; + }; + + const size_t padding_size = 16 - constraint.inputs.size() % 16; + + // Perform the conversions from array of bytes to field elements + std::vector converted_inputs; + for (size_t i = 0; i < constraint.inputs.size(); i += 16) { + field_ct to_add; + if (i + 16 > constraint.inputs.size()) { + to_add = convert_input( + std::span{ &constraint.inputs[i], 16 - padding_size }, + padding_size); + } else { + to_add = convert_input(std::span{ &constraint.inputs[i], 16 }, 0); + } + converted_inputs.emplace_back(to_add); + } + + std::vector converted_outputs; + for (size_t i = 0; i < constraint.outputs.size(); i += 16) { + std::span outputs{ &constraint.outputs[i], 16 }; + converted_outputs.emplace_back(convert_output(outputs)); + } + + const std::vector output_bytes = bb::stdlib::aes128::encrypt_buffer_cbc( + converted_inputs, convert_input(constraint.iv, 0), convert_input(constraint.key, 0)); + + for (size_t i = 0; i < output_bytes.size(); ++i) { + builder.assert_equal(output_bytes[i].normalize().witness_index, converted_outputs[i].normalize().witness_index); + } +} + +template void create_aes128_constraints(UltraCircuitBuilder& builder, + const AES128Constraint& constraint); + +template void create_aes128_constraints(GoblinUltraCircuitBuilder& builder, + const AES128Constraint& constraint); + +} // namespace acir_format diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/aes128_constraint.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/aes128_constraint.hpp new file mode 100644 index 000000000000..b0833c1e4473 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/aes128_constraint.hpp @@ -0,0 +1,31 @@ +#pragma once +#include "barretenberg/dsl/types.hpp" +#include "barretenberg/serialize/msgpack.hpp" +#include +#include + +namespace acir_format { + +struct AES128Input { + uint32_t witness; + uint32_t num_bits; + + // For serialization, update with any new fields + MSGPACK_FIELDS(witness, num_bits); + friend bool operator==(AES128Input const& lhs, AES128Input const& rhs) = default; +}; + +struct AES128Constraint { + std::vector inputs; + std::array iv; + std::array key; + std::vector outputs; + + // For serialization, update with any new fields + MSGPACK_FIELDS(inputs, iv, key, outputs); + friend bool operator==(AES128Constraint const& lhs, AES128Constraint const& rhs) = default; +}; + +template void create_aes128_constraints(Builder& builder, const AES128Constraint& constraint); + +} // namespace acir_format diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp index 3b32c7f26950..863737703ef3 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp @@ -172,6 +172,7 @@ TEST_F(BigIntTests, TestBigIntConstraintMultiple) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -184,8 +185,7 @@ TEST_F(BigIntTests, TestBigIntConstraintMultiple) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, @@ -241,6 +241,7 @@ TEST_F(BigIntTests, TestBigIntConstraintSimple) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -253,8 +254,7 @@ TEST_F(BigIntTests, TestBigIntConstraintSimple) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = { from_le_bytes_constraint_bigint1 }, @@ -295,6 +295,7 @@ TEST_F(BigIntTests, TestBigIntConstraintReuse) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -307,8 +308,7 @@ TEST_F(BigIntTests, TestBigIntConstraintReuse) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, @@ -353,6 +353,7 @@ TEST_F(BigIntTests, TestBigIntConstraintReuse2) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -365,8 +366,7 @@ TEST_F(BigIntTests, TestBigIntConstraintReuse2) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, @@ -432,6 +432,7 @@ TEST_F(BigIntTests, TestBigIntDIV) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -444,8 +445,7 @@ TEST_F(BigIntTests, TestBigIntDIV) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = { from_le_bytes_constraint_bigint1, from_le_bytes_constraint_bigint2 }, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp index 75b9150d335c..7cb3e5955bdc 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp @@ -114,6 +114,7 @@ TEST_F(UltraPlonkRAM, TestBlockConstraint) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -126,8 +127,7 @@ TEST_F(UltraPlonkRAM, TestBlockConstraint) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp index bdda21409a17..fb676af0a8bf 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp @@ -54,6 +54,7 @@ TEST_F(EcOperations, TestECOperations) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -66,8 +67,7 @@ TEST_F(EcOperations, TestECOperations) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = { ec_add_constraint }, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp index c494cc13e798..20dddfe4abe3 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp @@ -94,6 +94,7 @@ TEST_F(ECDSASecp256k1, TestECDSAConstraintSucceed) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -106,8 +107,7 @@ TEST_F(ECDSASecp256k1, TestECDSAConstraintSucceed) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, @@ -144,6 +144,7 @@ TEST_F(ECDSASecp256k1, TestECDSACompilesForVerifier) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -156,8 +157,7 @@ TEST_F(ECDSASecp256k1, TestECDSACompilesForVerifier) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, @@ -189,6 +189,7 @@ TEST_F(ECDSASecp256k1, TestECDSAConstraintFail) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -201,8 +202,7 @@ TEST_F(ECDSASecp256k1, TestECDSAConstraintFail) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp index 6728445d2371..6217149fdf05 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp @@ -128,6 +128,7 @@ TEST(ECDSASecp256r1, test_hardcoded) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -140,8 +141,7 @@ TEST(ECDSASecp256r1, test_hardcoded) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, @@ -180,6 +180,7 @@ TEST(ECDSASecp256r1, TestECDSAConstraintSucceed) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -192,8 +193,7 @@ TEST(ECDSASecp256r1, TestECDSAConstraintSucceed) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, @@ -230,6 +230,7 @@ TEST(ECDSASecp256r1, TestECDSACompilesForVerifier) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -242,8 +243,7 @@ TEST(ECDSASecp256r1, TestECDSACompilesForVerifier) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, @@ -275,6 +275,7 @@ TEST(ECDSASecp256r1, TestECDSAConstraintFail) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -287,8 +288,7 @@ TEST(ECDSASecp256r1, TestECDSAConstraintFail) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/fixed_base_scalar_mul.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/fixed_base_scalar_mul.cpp deleted file mode 100644 index 517a2baf6eee..000000000000 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/fixed_base_scalar_mul.cpp +++ /dev/null @@ -1,33 +0,0 @@ -#include "fixed_base_scalar_mul.hpp" -#include "barretenberg/dsl/types.hpp" -#include "barretenberg/ecc/curves/bn254/fr.hpp" -#include "barretenberg/ecc/curves/grumpkin/grumpkin.hpp" -#include "barretenberg/plonk_honk_shared/arithmetization/gate_data.hpp" - -namespace acir_format { - -template void create_fixed_base_constraint(Builder& builder, const FixedBaseScalarMul& input) -{ - using cycle_group_ct = bb::stdlib::cycle_group; - using cycle_scalar_ct = typename bb::stdlib::cycle_group::cycle_scalar; - using field_ct = bb::stdlib::field_t; - - // We reconstruct the scalar from the low and high limbs - field_ct low_as_field = field_ct::from_witness_index(&builder, input.low); - field_ct high_as_field = field_ct::from_witness_index(&builder, input.high); - cycle_scalar_ct scalar(low_as_field, high_as_field); - - // We multiply the scalar with G1 to get the result - auto result = cycle_group_ct(grumpkin::g1::affine_one) * scalar; - - // Finally we add the constraints - builder.assert_equal(result.x.get_witness_index(), input.pub_key_x); - builder.assert_equal(result.y.get_witness_index(), input.pub_key_y); -} - -template void create_fixed_base_constraint(UltraCircuitBuilder& builder, - const FixedBaseScalarMul& input); -template void create_fixed_base_constraint(GoblinUltraCircuitBuilder& builder, - const FixedBaseScalarMul& input); - -} // namespace acir_format diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/fixed_base_scalar_mul.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/fixed_base_scalar_mul.hpp deleted file mode 100644 index ef7d634870bc..000000000000 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/fixed_base_scalar_mul.hpp +++ /dev/null @@ -1,21 +0,0 @@ -#pragma once -#include "barretenberg/dsl/types.hpp" -#include "barretenberg/serialize/msgpack.hpp" -#include - -namespace acir_format { - -struct FixedBaseScalarMul { - uint32_t low; - uint32_t high; - uint32_t pub_key_x; - uint32_t pub_key_y; - - // for serialization, update with any new fields - MSGPACK_FIELDS(low, high, pub_key_x, pub_key_y); - friend bool operator==(FixedBaseScalarMul const& lhs, FixedBaseScalarMul const& rhs) = default; -}; - -template void create_fixed_base_constraint(Builder& builder, const FixedBaseScalarMul& input); - -} // namespace acir_format diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/multi_scalar_mul.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/multi_scalar_mul.cpp new file mode 100644 index 000000000000..83354d97c767 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/multi_scalar_mul.cpp @@ -0,0 +1,48 @@ +#include "multi_scalar_mul.hpp" +#include "barretenberg/dsl/types.hpp" +#include "barretenberg/ecc/curves/bn254/fr.hpp" +#include "barretenberg/ecc/curves/grumpkin/grumpkin.hpp" +#include "barretenberg/plonk_honk_shared/arithmetization/gate_data.hpp" +#include "barretenberg/stdlib/primitives/biggroup/biggroup.hpp" + +namespace acir_format { + +template void create_multi_scalar_mul_constraint(Builder& builder, const MultiScalarMul& input) +{ + using cycle_group_ct = bb::stdlib::cycle_group; + using cycle_scalar_ct = typename bb::stdlib::cycle_group::cycle_scalar; + using field_ct = bb::stdlib::field_t; + + std::vector points; + std::vector scalars; + + for (size_t i = 0; i < input.points.size(); i += 2) { + // Instantiate the input point/variable base as `cycle_group_ct` + auto point_x = field_ct::from_witness_index(&builder, input.points[i]); + auto point_y = field_ct::from_witness_index(&builder, input.points[i + 1]); + cycle_group_ct input_point(point_x, point_y, false); + + // Reconstruct the scalar from the low and high limbs + field_ct scalar_low_as_field = field_ct::from_witness_index(&builder, input.scalars[i]); + field_ct scalar_high_as_field = field_ct::from_witness_index(&builder, input.scalars[i + 1]); + cycle_scalar_ct scalar(scalar_low_as_field, scalar_high_as_field); + + // Add the point and scalar to the vectors + points.push_back(input_point); + scalars.push_back(scalar); + } + + // Call batch_mul to multiply the points and scalars and sum the results + auto output_point = cycle_group_ct::batch_mul(scalars, points); + + // Add the constraints + builder.assert_equal(output_point.x.get_witness_index(), input.out_point_x); + builder.assert_equal(output_point.y.get_witness_index(), input.out_point_y); +} + +template void create_multi_scalar_mul_constraint(UltraCircuitBuilder& builder, + const MultiScalarMul& input); +template void create_multi_scalar_mul_constraint(GoblinUltraCircuitBuilder& builder, + const MultiScalarMul& input); + +} // namespace acir_format diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/multi_scalar_mul.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/multi_scalar_mul.hpp new file mode 100644 index 000000000000..12b070076f9f --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/multi_scalar_mul.hpp @@ -0,0 +1,21 @@ +#pragma once +#include "barretenberg/dsl/types.hpp" +#include "barretenberg/serialize/msgpack.hpp" +#include + +namespace acir_format { + +struct MultiScalarMul { + std::vector points; + std::vector scalars; + uint32_t out_point_x; + uint32_t out_point_y; + + // for serialization, update with any new fields + MSGPACK_FIELDS(points, scalars, out_point_x, out_point_y); + friend bool operator==(MultiScalarMul const& lhs, MultiScalarMul const& rhs) = default; +}; + +template void create_multi_scalar_mul_constraint(Builder& builder, const MultiScalarMul& input); + +} // namespace acir_format diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp index f672505b4d70..d35a9d369746 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp @@ -34,6 +34,7 @@ TEST_F(Poseidon2Tests, TestPoseidon2Permutation) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -46,8 +47,7 @@ TEST_F(Poseidon2Tests, TestPoseidon2Permutation) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = { poseidon2_constraint }, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp index 97e53d30c627..0b12a4119512 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp @@ -86,6 +86,7 @@ Builder create_inner_circuit() .public_inputs = { 1, 2 }, .logic_constraints = { logic_constraint }, .range_constraints = { range_a, range_b }, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -98,8 +99,7 @@ Builder create_inner_circuit() .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, @@ -244,6 +244,7 @@ Builder create_outer_circuit(std::vector& inner_circuits) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -256,8 +257,7 @@ Builder create_outer_circuit(std::vector& inner_circuits) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = recursion_constraints, .bigint_from_le_bytes_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp index bdfb6605ad24..9fb0e2b3a35c 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp @@ -24,6 +24,17 @@ struct FunctionInput { struct BlackBoxFuncCall { + struct AES128Encrypt { + std::vector inputs; + std::array iv; + std::array key; + std::vector outputs; + + friend bool operator==(const AES128Encrypt&, const AES128Encrypt&); + std::vector bincodeSerialize() const; + static AES128Encrypt bincodeDeserialize(std::vector); + }; + struct AND { Program::FunctionInput lhs; Program::FunctionInput rhs; @@ -135,26 +146,14 @@ struct BlackBoxFuncCall { static EcdsaSecp256r1 bincodeDeserialize(std::vector); }; - struct FixedBaseScalarMul { - Program::FunctionInput low; - Program::FunctionInput high; + struct MultiScalarMul { + std::vector points; + std::vector scalars; std::array outputs; - friend bool operator==(const FixedBaseScalarMul&, const FixedBaseScalarMul&); + friend bool operator==(const MultiScalarMul&, const MultiScalarMul&); std::vector bincodeSerialize() const; - static FixedBaseScalarMul bincodeDeserialize(std::vector); - }; - - struct VariableBaseScalarMul { - Program::FunctionInput point_x; - Program::FunctionInput point_y; - Program::FunctionInput scalar_low; - Program::FunctionInput scalar_high; - std::array outputs; - - friend bool operator==(const VariableBaseScalarMul&, const VariableBaseScalarMul&); - std::vector bincodeSerialize() const; - static VariableBaseScalarMul bincodeDeserialize(std::vector); + static MultiScalarMul bincodeDeserialize(std::vector); }; struct EmbeddedCurveAdd { @@ -278,7 +277,8 @@ struct BlackBoxFuncCall { static Sha256Compression bincodeDeserialize(std::vector); }; - std::variant); }; +struct BlockType { + + struct Memory { + friend bool operator==(const Memory&, const Memory&); + std::vector bincodeSerialize() const; + static Memory bincodeDeserialize(std::vector); + }; + + struct CallData { + friend bool operator==(const CallData&, const CallData&); + std::vector bincodeSerialize() const; + static CallData bincodeDeserialize(std::vector); + }; + + struct ReturnData { + friend bool operator==(const ReturnData&, const ReturnData&); + std::vector bincodeSerialize() const; + static ReturnData bincodeDeserialize(std::vector); + }; + + std::variant value; + + friend bool operator==(const BlockType&, const BlockType&); + std::vector bincodeSerialize() const; + static BlockType bincodeDeserialize(std::vector); +}; + struct Expression { std::vector> mul_terms; std::vector> linear_combinations; @@ -454,6 +480,7 @@ struct Opcode { struct MemoryInit { Program::BlockId block_id; std::vector init; + Program::BlockType block_type; friend bool operator==(const MemoryInit&, const MemoryInit&); std::vector bincodeSerialize() const; @@ -655,6 +682,18 @@ struct HeapVector { struct BlackBoxOp { + struct AES128Encrypt { + Program::HeapVector inputs; + Program::HeapArray iv; + Program::HeapArray key; + Program::MemoryAddress length; + Program::HeapVector outputs; + + friend bool operator==(const AES128Encrypt&, const AES128Encrypt&); + std::vector bincodeSerialize() const; + static AES128Encrypt bincodeDeserialize(std::vector); + }; + struct Sha256 { Program::HeapVector message; Program::HeapArray output; @@ -756,26 +795,14 @@ struct BlackBoxOp { static PedersenHash bincodeDeserialize(std::vector); }; - struct FixedBaseScalarMul { - Program::MemoryAddress low; - Program::MemoryAddress high; - Program::HeapArray result; - - friend bool operator==(const FixedBaseScalarMul&, const FixedBaseScalarMul&); - std::vector bincodeSerialize() const; - static FixedBaseScalarMul bincodeDeserialize(std::vector); - }; - - struct VariableBaseScalarMul { - Program::MemoryAddress point_x; - Program::MemoryAddress point_y; - Program::MemoryAddress scalar_low; - Program::MemoryAddress scalar_high; - Program::HeapArray result; + struct MultiScalarMul { + Program::HeapVector points; + Program::HeapVector scalars; + Program::HeapArray outputs; - friend bool operator==(const VariableBaseScalarMul&, const VariableBaseScalarMul&); + friend bool operator==(const MultiScalarMul&, const MultiScalarMul&); std::vector bincodeSerialize() const; - static VariableBaseScalarMul bincodeDeserialize(std::vector); + static MultiScalarMul bincodeDeserialize(std::vector); }; struct EmbeddedCurveAdd { @@ -869,7 +896,8 @@ struct BlackBoxOp { static Sha256Compression bincodeDeserialize(std::vector); }; - std::variant::dese namespace Program { +inline bool operator==(const BlackBoxFuncCall::AES128Encrypt& lhs, const BlackBoxFuncCall::AES128Encrypt& rhs) +{ + if (!(lhs.inputs == rhs.inputs)) { + return false; + } + if (!(lhs.iv == rhs.iv)) { + return false; + } + if (!(lhs.key == rhs.key)) { + return false; + } + if (!(lhs.outputs == rhs.outputs)) { + return false; + } + return true; +} + +inline std::vector BlackBoxFuncCall::AES128Encrypt::bincodeSerialize() const +{ + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); +} + +inline BlackBoxFuncCall::AES128Encrypt BlackBoxFuncCall::AES128Encrypt::bincodeDeserialize(std::vector input) +{ + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw_or_abort("Some input bytes were not read"); + } + return value; +} + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize( + const Program::BlackBoxFuncCall::AES128Encrypt& obj, Serializer& serializer) +{ + serde::Serializable::serialize(obj.inputs, serializer); + serde::Serializable::serialize(obj.iv, serializer); + serde::Serializable::serialize(obj.key, serializer); + serde::Serializable::serialize(obj.outputs, serializer); +} + +template <> +template +Program::BlackBoxFuncCall::AES128Encrypt serde::Deserializable::deserialize( + Deserializer& deserializer) +{ + Program::BlackBoxFuncCall::AES128Encrypt obj; + obj.inputs = serde::Deserializable::deserialize(deserializer); + obj.iv = serde::Deserializable::deserialize(deserializer); + obj.key = serde::Deserializable::deserialize(deserializer); + obj.outputs = serde::Deserializable::deserialize(deserializer); + return obj; +} + +namespace Program { + inline bool operator==(const BlackBoxFuncCall::AND& lhs, const BlackBoxFuncCall::AND& rhs) { if (!(lhs.lhs == rhs.lhs)) { @@ -3036,12 +3125,12 @@ Program::BlackBoxFuncCall::EcdsaSecp256r1 serde::Deserializable BlackBoxFuncCall::FixedBaseScalarMul::bincodeSerialize() const +inline std::vector BlackBoxFuncCall::MultiScalarMul::bincodeSerialize() const { auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); + serde::Serializable::serialize(*this, serializer); return std::move(serializer).bytes(); } -inline BlackBoxFuncCall::FixedBaseScalarMul BlackBoxFuncCall::FixedBaseScalarMul::bincodeDeserialize( - std::vector input) +inline BlackBoxFuncCall::MultiScalarMul BlackBoxFuncCall::MultiScalarMul::bincodeDeserialize(std::vector input) { auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); + auto value = serde::Deserializable::deserialize(deserializer); if (deserializer.get_buffer_offset() < input.size()) { throw_or_abort("Some input bytes were not read"); } @@ -3072,91 +3160,22 @@ inline BlackBoxFuncCall::FixedBaseScalarMul BlackBoxFuncCall::FixedBaseScalarMul template <> template -void serde::Serializable::serialize( - const Program::BlackBoxFuncCall::FixedBaseScalarMul& obj, Serializer& serializer) +void serde::Serializable::serialize( + const Program::BlackBoxFuncCall::MultiScalarMul& obj, Serializer& serializer) { - serde::Serializable::serialize(obj.low, serializer); - serde::Serializable::serialize(obj.high, serializer); + serde::Serializable::serialize(obj.points, serializer); + serde::Serializable::serialize(obj.scalars, serializer); serde::Serializable::serialize(obj.outputs, serializer); } template <> template -Program::BlackBoxFuncCall::FixedBaseScalarMul serde::Deserializable< - Program::BlackBoxFuncCall::FixedBaseScalarMul>::deserialize(Deserializer& deserializer) -{ - Program::BlackBoxFuncCall::FixedBaseScalarMul obj; - obj.low = serde::Deserializable::deserialize(deserializer); - obj.high = serde::Deserializable::deserialize(deserializer); - obj.outputs = serde::Deserializable::deserialize(deserializer); - return obj; -} - -namespace Program { - -inline bool operator==(const BlackBoxFuncCall::VariableBaseScalarMul& lhs, - const BlackBoxFuncCall::VariableBaseScalarMul& rhs) -{ - if (!(lhs.point_x == rhs.point_x)) { - return false; - } - if (!(lhs.point_y == rhs.point_y)) { - return false; - } - if (!(lhs.scalar_low == rhs.scalar_low)) { - return false; - } - if (!(lhs.scalar_high == rhs.scalar_high)) { - return false; - } - if (!(lhs.outputs == rhs.outputs)) { - return false; - } - return true; -} - -inline std::vector BlackBoxFuncCall::VariableBaseScalarMul::bincodeSerialize() const -{ - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); -} - -inline BlackBoxFuncCall::VariableBaseScalarMul BlackBoxFuncCall::VariableBaseScalarMul::bincodeDeserialize( - std::vector input) -{ - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw_or_abort("Some input bytes were not read"); - } - return value; -} - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize( - const Program::BlackBoxFuncCall::VariableBaseScalarMul& obj, Serializer& serializer) -{ - serde::Serializable::serialize(obj.point_x, serializer); - serde::Serializable::serialize(obj.point_y, serializer); - serde::Serializable::serialize(obj.scalar_low, serializer); - serde::Serializable::serialize(obj.scalar_high, serializer); - serde::Serializable::serialize(obj.outputs, serializer); -} - -template <> -template -Program::BlackBoxFuncCall::VariableBaseScalarMul serde::Deserializable< - Program::BlackBoxFuncCall::VariableBaseScalarMul>::deserialize(Deserializer& deserializer) +Program::BlackBoxFuncCall::MultiScalarMul serde::Deserializable::deserialize( + Deserializer& deserializer) { - Program::BlackBoxFuncCall::VariableBaseScalarMul obj; - obj.point_x = serde::Deserializable::deserialize(deserializer); - obj.point_y = serde::Deserializable::deserialize(deserializer); - obj.scalar_low = serde::Deserializable::deserialize(deserializer); - obj.scalar_high = serde::Deserializable::deserialize(deserializer); + Program::BlackBoxFuncCall::MultiScalarMul obj; + obj.points = serde::Deserializable::deserialize(deserializer); + obj.scalars = serde::Deserializable::deserialize(deserializer); obj.outputs = serde::Deserializable::deserialize(deserializer); return obj; } @@ -3909,6 +3928,71 @@ Program::BlackBoxOp serde::Deserializable::deserialize(Dese namespace Program { +inline bool operator==(const BlackBoxOp::AES128Encrypt& lhs, const BlackBoxOp::AES128Encrypt& rhs) +{ + if (!(lhs.inputs == rhs.inputs)) { + return false; + } + if (!(lhs.iv == rhs.iv)) { + return false; + } + if (!(lhs.key == rhs.key)) { + return false; + } + if (!(lhs.length == rhs.length)) { + return false; + } + if (!(lhs.outputs == rhs.outputs)) { + return false; + } + return true; +} + +inline std::vector BlackBoxOp::AES128Encrypt::bincodeSerialize() const +{ + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); +} + +inline BlackBoxOp::AES128Encrypt BlackBoxOp::AES128Encrypt::bincodeDeserialize(std::vector input) +{ + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw_or_abort("Some input bytes were not read"); + } + return value; +} + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlackBoxOp::AES128Encrypt& obj, + Serializer& serializer) +{ + serde::Serializable::serialize(obj.inputs, serializer); + serde::Serializable::serialize(obj.iv, serializer); + serde::Serializable::serialize(obj.key, serializer); + serde::Serializable::serialize(obj.outputs, serializer); +} + +template <> +template +Program::BlackBoxOp::AES128Encrypt serde::Deserializable::deserialize( + Deserializer& deserializer) +{ + Program::BlackBoxOp::AES128Encrypt obj; + obj.inputs = serde::Deserializable::deserialize(deserializer); + obj.iv = serde::Deserializable::deserialize(deserializer); + obj.key = serde::Deserializable::deserialize(deserializer); + obj.outputs = serde::Deserializable::deserialize(deserializer); + return obj; +} + +namespace Program { + inline bool operator==(const BlackBoxOp::Sha256& lhs, const BlackBoxOp::Sha256& rhs) { if (!(lhs.message == rhs.message)) { @@ -4482,31 +4566,31 @@ Program::BlackBoxOp::PedersenHash serde::Deserializable BlackBoxOp::FixedBaseScalarMul::bincodeSerialize() const +inline std::vector BlackBoxOp::MultiScalarMul::bincodeSerialize() const { auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); + serde::Serializable::serialize(*this, serializer); return std::move(serializer).bytes(); } -inline BlackBoxOp::FixedBaseScalarMul BlackBoxOp::FixedBaseScalarMul::bincodeDeserialize(std::vector input) +inline BlackBoxOp::MultiScalarMul BlackBoxOp::MultiScalarMul::bincodeDeserialize(std::vector input) { auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); + auto value = serde::Deserializable::deserialize(deserializer); if (deserializer.get_buffer_offset() < input.size()) { throw_or_abort("Some input bytes were not read"); } @@ -4517,91 +4601,23 @@ inline BlackBoxOp::FixedBaseScalarMul BlackBoxOp::FixedBaseScalarMul::bincodeDes template <> template -void serde::Serializable::serialize( - const Program::BlackBoxOp::FixedBaseScalarMul& obj, Serializer& serializer) +void serde::Serializable::serialize(const Program::BlackBoxOp::MultiScalarMul& obj, + Serializer& serializer) { - serde::Serializable::serialize(obj.low, serializer); - serde::Serializable::serialize(obj.high, serializer); - serde::Serializable::serialize(obj.result, serializer); + serde::Serializable::serialize(obj.points, serializer); + serde::Serializable::serialize(obj.scalars, serializer); + serde::Serializable::serialize(obj.outputs, serializer); } template <> template -Program::BlackBoxOp::FixedBaseScalarMul serde::Deserializable::deserialize( +Program::BlackBoxOp::MultiScalarMul serde::Deserializable::deserialize( Deserializer& deserializer) { - Program::BlackBoxOp::FixedBaseScalarMul obj; - obj.low = serde::Deserializable::deserialize(deserializer); - obj.high = serde::Deserializable::deserialize(deserializer); - obj.result = serde::Deserializable::deserialize(deserializer); - return obj; -} - -namespace Program { - -inline bool operator==(const BlackBoxOp::VariableBaseScalarMul& lhs, const BlackBoxOp::VariableBaseScalarMul& rhs) -{ - if (!(lhs.point_x == rhs.point_x)) { - return false; - } - if (!(lhs.point_y == rhs.point_y)) { - return false; - } - if (!(lhs.scalar_low == rhs.scalar_low)) { - return false; - } - if (!(lhs.scalar_high == rhs.scalar_high)) { - return false; - } - if (!(lhs.result == rhs.result)) { - return false; - } - return true; -} - -inline std::vector BlackBoxOp::VariableBaseScalarMul::bincodeSerialize() const -{ - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); -} - -inline BlackBoxOp::VariableBaseScalarMul BlackBoxOp::VariableBaseScalarMul::bincodeDeserialize( - std::vector input) -{ - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw_or_abort("Some input bytes were not read"); - } - return value; -} - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize( - const Program::BlackBoxOp::VariableBaseScalarMul& obj, Serializer& serializer) -{ - serde::Serializable::serialize(obj.point_x, serializer); - serde::Serializable::serialize(obj.point_y, serializer); - serde::Serializable::serialize(obj.scalar_low, serializer); - serde::Serializable::serialize(obj.scalar_high, serializer); - serde::Serializable::serialize(obj.result, serializer); -} - -template <> -template -Program::BlackBoxOp::VariableBaseScalarMul serde::Deserializable< - Program::BlackBoxOp::VariableBaseScalarMul>::deserialize(Deserializer& deserializer) -{ - Program::BlackBoxOp::VariableBaseScalarMul obj; - obj.point_x = serde::Deserializable::deserialize(deserializer); - obj.point_y = serde::Deserializable::deserialize(deserializer); - obj.scalar_low = serde::Deserializable::deserialize(deserializer); - obj.scalar_high = serde::Deserializable::deserialize(deserializer); - obj.result = serde::Deserializable::deserialize(deserializer); + Program::BlackBoxOp::MultiScalarMul obj; + obj.points = serde::Deserializable::deserialize(deserializer); + obj.scalars = serde::Deserializable::deserialize(deserializer); + obj.outputs = serde::Deserializable::deserialize(deserializer); return obj; } @@ -5174,6 +5190,177 @@ Program::BlockId serde::Deserializable::deserialize(Deserializ namespace Program { +inline bool operator==(const BlockType& lhs, const BlockType& rhs) +{ + if (!(lhs.value == rhs.value)) { + return false; + } + return true; +} + +inline std::vector BlockType::bincodeSerialize() const +{ + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); +} + +inline BlockType BlockType::bincodeDeserialize(std::vector input) +{ + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw_or_abort("Some input bytes were not read"); + } + return value; +} + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType& obj, Serializer& serializer) +{ + serializer.increase_container_depth(); + serde::Serializable::serialize(obj.value, serializer); + serializer.decrease_container_depth(); +} + +template <> +template +Program::BlockType serde::Deserializable::deserialize(Deserializer& deserializer) +{ + deserializer.increase_container_depth(); + Program::BlockType obj; + obj.value = serde::Deserializable::deserialize(deserializer); + deserializer.decrease_container_depth(); + return obj; +} + +namespace Program { + +inline bool operator==(const BlockType::Memory& lhs, const BlockType::Memory& rhs) +{ + return true; +} + +inline std::vector BlockType::Memory::bincodeSerialize() const +{ + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); +} + +inline BlockType::Memory BlockType::Memory::bincodeDeserialize(std::vector input) +{ + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw_or_abort("Some input bytes were not read"); + } + return value; +} + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::Memory& obj, + Serializer& serializer) +{} + +template <> +template +Program::BlockType::Memory serde::Deserializable::deserialize(Deserializer& deserializer) +{ + Program::BlockType::Memory obj; + return obj; +} + +namespace Program { + +inline bool operator==(const BlockType::CallData& lhs, const BlockType::CallData& rhs) +{ + return true; +} + +inline std::vector BlockType::CallData::bincodeSerialize() const +{ + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); +} + +inline BlockType::CallData BlockType::CallData::bincodeDeserialize(std::vector input) +{ + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw_or_abort("Some input bytes were not read"); + } + return value; +} + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::CallData& obj, + Serializer& serializer) +{} + +template <> +template +Program::BlockType::CallData serde::Deserializable::deserialize( + Deserializer& deserializer) +{ + Program::BlockType::CallData obj; + return obj; +} + +namespace Program { + +inline bool operator==(const BlockType::ReturnData& lhs, const BlockType::ReturnData& rhs) +{ + return true; +} + +inline std::vector BlockType::ReturnData::bincodeSerialize() const +{ + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); +} + +inline BlockType::ReturnData BlockType::ReturnData::bincodeDeserialize(std::vector input) +{ + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw_or_abort("Some input bytes were not read"); + } + return value; +} + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::ReturnData& obj, + Serializer& serializer) +{} + +template <> +template +Program::BlockType::ReturnData serde::Deserializable::deserialize( + Deserializer& deserializer) +{ + Program::BlockType::ReturnData obj; + return obj; +} + +namespace Program { + inline bool operator==(const BrilligBytecode& lhs, const BrilligBytecode& rhs) { if (!(lhs.bytecode == rhs.bytecode)) { @@ -7824,6 +8011,9 @@ inline bool operator==(const Opcode::MemoryInit& lhs, const Opcode::MemoryInit& if (!(lhs.init == rhs.init)) { return false; } + if (!(lhs.block_type == rhs.block_type)) { + return false; + } return true; } @@ -7853,6 +8043,7 @@ void serde::Serializable::serialize(const Program:: { serde::Serializable::serialize(obj.block_id, serializer); serde::Serializable::serialize(obj.init, serializer); + serde::Serializable::serialize(obj.block_type, serializer); } template <> @@ -7862,6 +8053,7 @@ Program::Opcode::MemoryInit serde::Deserializable:: Program::Opcode::MemoryInit obj; obj.block_id = serde::Deserializable::deserialize(deserializer); obj.init = serde::Deserializable::deserialize(deserializer); + obj.block_type = serde::Deserializable::deserialize(deserializer); return obj; } diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp index aa520806b2b7..4b78a9550e71 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp @@ -36,6 +36,7 @@ TEST_F(Sha256Tests, TestSha256Compression) .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = { sha256_compression }, .schnorr_constraints = {}, @@ -48,8 +49,7 @@ TEST_F(Sha256Tests, TestSha256Compression) .pedersen_constraints = {}, .pedersen_hash_constraints = {}, .poseidon2_constraints = {}, - .fixed_base_scalar_mul_constraints = {}, - .variable_base_scalar_mul_constraints = {}, + .multi_scalar_mul_constraints = {}, .ec_add_constraints = {}, .recursion_constraints = {}, .bigint_from_le_bytes_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/variable_base_scalar_mul.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/variable_base_scalar_mul.cpp deleted file mode 100644 index 6446b68158c4..000000000000 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/variable_base_scalar_mul.cpp +++ /dev/null @@ -1,38 +0,0 @@ -#include "variable_base_scalar_mul.hpp" -#include "barretenberg/dsl/types.hpp" -#include "barretenberg/ecc/curves/bn254/fr.hpp" -#include "barretenberg/ecc/curves/grumpkin/grumpkin.hpp" -#include "barretenberg/plonk_honk_shared/arithmetization/gate_data.hpp" - -namespace acir_format { - -template void create_variable_base_constraint(Builder& builder, const VariableBaseScalarMul& input) -{ - using cycle_group_ct = bb::stdlib::cycle_group; - using cycle_scalar_ct = typename bb::stdlib::cycle_group::cycle_scalar; - using field_ct = bb::stdlib::field_t; - - // We instantiate the input point/variable base as `cycle_group_ct` - auto point_x = field_ct::from_witness_index(&builder, input.point_x); - auto point_y = field_ct::from_witness_index(&builder, input.point_y); - cycle_group_ct input_point(point_x, point_y, false); - - // We reconstruct the scalar from the low and high limbs - field_ct scalar_low_as_field = field_ct::from_witness_index(&builder, input.scalar_low); - field_ct scalar_high_as_field = field_ct::from_witness_index(&builder, input.scalar_high); - cycle_scalar_ct scalar(scalar_low_as_field, scalar_high_as_field); - - // We multiply the scalar with input point/variable base to get the result - auto result = input_point * scalar; - - // Finally we add the constraints - builder.assert_equal(result.x.get_witness_index(), input.out_point_x); - builder.assert_equal(result.y.get_witness_index(), input.out_point_y); -} - -template void create_variable_base_constraint(UltraCircuitBuilder& builder, - const VariableBaseScalarMul& input); -template void create_variable_base_constraint(GoblinUltraCircuitBuilder& builder, - const VariableBaseScalarMul& input); - -} // namespace acir_format diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/variable_base_scalar_mul.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/variable_base_scalar_mul.hpp deleted file mode 100644 index d903df2cb322..000000000000 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/variable_base_scalar_mul.hpp +++ /dev/null @@ -1,23 +0,0 @@ -#pragma once -#include "barretenberg/dsl/types.hpp" -#include "barretenberg/serialize/msgpack.hpp" -#include - -namespace acir_format { - -struct VariableBaseScalarMul { - uint32_t point_x; - uint32_t point_y; - uint32_t scalar_low; - uint32_t scalar_high; - uint32_t out_point_x; - uint32_t out_point_y; - - // for serialization, update with any new fields - MSGPACK_FIELDS(point_x, point_y, scalar_low, scalar_high, out_point_x, out_point_y); - friend bool operator==(VariableBaseScalarMul const& lhs, VariableBaseScalarMul const& rhs) = default; -}; - -template void create_variable_base_constraint(Builder& builder, const VariableBaseScalarMul& input); - -} // namespace acir_format diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_builder_types.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_builder_types.hpp index 95abffe5120e..2db0d13abf28 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_builder_types.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_builder_types.hpp @@ -4,13 +4,12 @@ #include "barretenberg/ecc/curves/grumpkin/grumpkin.hpp" namespace bb::eccvm { - -static constexpr size_t NUM_SCALAR_BITS = 128; -static constexpr size_t WNAF_SLICE_BITS = 4; -static constexpr size_t NUM_WNAF_SLICES = (NUM_SCALAR_BITS + WNAF_SLICE_BITS - 1) / WNAF_SLICE_BITS; -static constexpr uint64_t WNAF_MASK = static_cast((1ULL << WNAF_SLICE_BITS) - 1ULL); -static constexpr size_t POINT_TABLE_SIZE = 1ULL << (WNAF_SLICE_BITS); -static constexpr size_t WNAF_SLICES_PER_ROW = 4; +static constexpr size_t NUM_SCALAR_BITS = 128; // The length of scalars handled by the ECCVVM +static constexpr size_t NUM_WNAF_DIGIT_BITS = 4; // Scalars are decompose into base 16 in wNAF form +static constexpr size_t NUM_WNAF_DIGITS_PER_SCALAR = NUM_SCALAR_BITS / NUM_WNAF_DIGIT_BITS; // 32 +static constexpr uint64_t WNAF_MASK = static_cast((1ULL << NUM_WNAF_DIGIT_BITS) - 1ULL); +static constexpr size_t POINT_TABLE_SIZE = 1ULL << (NUM_WNAF_DIGIT_BITS); +static constexpr size_t WNAF_DIGITS_PER_ROW = 4; static constexpr size_t ADDITIONS_PER_ROW = 4; template struct VMOperation { @@ -39,7 +38,7 @@ template struct ScalarMul { uint32_t pc; uint256_t scalar; typename CycleGroup::affine_element base_point; - std::array wnaf_slices; + std::array wnaf_digits; bool wnaf_skew; // size bumped by 1 to record base_point.dbl() std::array precomputed_table; diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_circuit_builder.hpp index b295133b12a2..7f49af86030b 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_circuit_builder.hpp @@ -24,11 +24,11 @@ class ECCVMCircuitBuilder { using AffineElement = typename CycleGroup::affine_element; static constexpr size_t NUM_SCALAR_BITS = bb::eccvm::NUM_SCALAR_BITS; - static constexpr size_t WNAF_SLICE_BITS = bb::eccvm::WNAF_SLICE_BITS; - static constexpr size_t NUM_WNAF_SLICES = bb::eccvm::NUM_WNAF_SLICES; + static constexpr size_t NUM_WNAF_DIGIT_BITS = bb::eccvm::NUM_WNAF_DIGIT_BITS; + static constexpr size_t NUM_WNAF_DIGITS_PER_SCALAR = bb::eccvm::NUM_WNAF_DIGITS_PER_SCALAR; static constexpr uint64_t WNAF_MASK = bb::eccvm::WNAF_MASK; static constexpr size_t POINT_TABLE_SIZE = bb::eccvm::POINT_TABLE_SIZE; - static constexpr size_t WNAF_SLICES_PER_ROW = bb::eccvm::WNAF_SLICES_PER_ROW; + static constexpr size_t WNAF_DIGITS_PER_ROW = bb::eccvm::WNAF_DIGITS_PER_ROW; static constexpr size_t ADDITIONS_PER_ROW = bb::eccvm::ADDITIONS_PER_ROW; using MSM = bb::eccvm::MSM; @@ -50,7 +50,8 @@ class ECCVMCircuitBuilder { /** * For input point [P], return { -15[P], -13[P], ..., -[P], [P], ..., 13[P], 15[P] } */ - const auto compute_precomputed_table = [](const AffineElement& base_point) { + const auto compute_precomputed_table = + [](const AffineElement& base_point) -> std::array { const auto d2 = Element(base_point).dbl(); std::array table; table[POINT_TABLE_SIZE] = d2; // need this for later @@ -69,10 +70,10 @@ class ECCVMCircuitBuilder { } return result; }; - const auto compute_wnaf_slices = [](uint256_t scalar) { - std::array output; + const auto compute_wnaf_digits = [](uint256_t scalar) -> std::array { + std::array output; int previous_slice = 0; - for (size_t i = 0; i < NUM_WNAF_SLICES; ++i) { + for (size_t i = 0; i < NUM_WNAF_DIGITS_PER_SCALAR; ++i) { // slice the scalar into 4-bit chunks, starting with the least significant bits uint64_t raw_slice = static_cast(scalar) & WNAF_MASK; @@ -86,19 +87,19 @@ class ECCVMCircuitBuilder { } else if (is_even) { // for other slices, if it's even, we add 1 to the slice value // and subtract 16 from the previous slice to preserve the total scalar sum - static constexpr int borrow_constant = static_cast(1ULL << WNAF_SLICE_BITS); + static constexpr int borrow_constant = static_cast(1ULL << NUM_WNAF_DIGIT_BITS); previous_slice -= borrow_constant; wnaf_slice += 1; } if (i > 0) { const size_t idx = i - 1; - output[NUM_WNAF_SLICES - idx - 1] = previous_slice; + output[NUM_WNAF_DIGITS_PER_SCALAR - idx - 1] = previous_slice; } previous_slice = wnaf_slice; // downshift raw_slice by 4 bits - scalar = scalar >> WNAF_SLICE_BITS; + scalar = scalar >> NUM_WNAF_DIGIT_BITS; } ASSERT(scalar == 0); @@ -108,8 +109,6 @@ class ECCVMCircuitBuilder { return output; }; - // a vector of MSMs = a vector of a vector of scalar muls - // each mul size_t msm_count = 0; size_t active_mul_count = 0; std::vector msm_opqueue_index; @@ -118,6 +117,7 @@ class ECCVMCircuitBuilder { const auto& raw_ops = op_queue->get_raw_ops(); size_t op_idx = 0; + // populate opqueue and mul indices for (const auto& op : raw_ops) { if (op.mul) { if (op.z1 != 0 || op.z2 != 0) { @@ -142,39 +142,38 @@ class ECCVMCircuitBuilder { msm_sizes.push_back(active_mul_count); msm_count++; } - std::vector msms_test(msm_count); + std::vector result(msm_count); for (size_t i = 0; i < msm_count; ++i) { - auto& msm = msms_test[i]; + auto& msm = result[i]; msm.resize(msm_sizes[i]); } run_loop_in_parallel(msm_opqueue_index.size(), [&](size_t start, size_t end) { for (size_t i = start; i < end; i++) { - const size_t opqueue_index = msm_opqueue_index[i]; - const auto& op = raw_ops[opqueue_index]; + const auto& op = raw_ops[msm_opqueue_index[i]]; auto [msm_index, mul_index] = msm_mul_index[i]; if (op.z1 != 0) { - ASSERT(msms_test.size() > msm_index); - ASSERT(msms_test[msm_index].size() > mul_index); - msms_test[msm_index][mul_index] = (ScalarMul{ + ASSERT(result.size() > msm_index); + ASSERT(result[msm_index].size() > mul_index); + result[msm_index][mul_index] = (ScalarMul{ .pc = 0, .scalar = op.z1, .base_point = op.base_point, - .wnaf_slices = compute_wnaf_slices(op.z1), + .wnaf_digits = compute_wnaf_digits(op.z1), .wnaf_skew = (op.z1 & 1) == 0, .precomputed_table = compute_precomputed_table(op.base_point), }); mul_index++; } if (op.z2 != 0) { - ASSERT(msms_test.size() > msm_index); - ASSERT(msms_test[msm_index].size() > mul_index); + ASSERT(result.size() > msm_index); + ASSERT(result[msm_index].size() > mul_index); auto endo_point = AffineElement{ op.base_point.x * FF::cube_root_of_unity(), -op.base_point.y }; - msms_test[msm_index][mul_index] = (ScalarMul{ + result[msm_index][mul_index] = (ScalarMul{ .pc = 0, .scalar = op.z2, .base_point = endo_point, - .wnaf_slices = compute_wnaf_slices(op.z2), + .wnaf_digits = compute_wnaf_digits(op.z2), .wnaf_skew = (op.z2 & 1) == 0, .precomputed_table = compute_precomputed_table(endo_point), }); @@ -191,7 +190,7 @@ class ECCVMCircuitBuilder { // sumcheck relations that involve pc (if we did the other way around, starting at 1 and ending at num_muls, // we create a discontinuity in pc values between the last transcript row and the following empty row) uint32_t pc = num_muls; - for (auto& msm : msms_test) { + for (auto& msm : result) { for (auto& mul : msm) { mul.pc = pc; pc--; @@ -199,7 +198,7 @@ class ECCVMCircuitBuilder { } ASSERT(pc == 0); - return msms_test; + return result; } static std::vector get_flattened_scalar_muls(const std::vector& msms) diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_circuit_builder.test.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_circuit_builder.test.cpp index 253c1af019ce..b5cb7698360d 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_circuit_builder.test.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_circuit_builder.test.cpp @@ -39,7 +39,7 @@ TEST(ECCVMCircuitBuilderTests, BaseCase) op_queue->mul_accumulate(c, x); ECCVMCircuitBuilder circuit{ op_queue }; - bool result = ECCVMTraceChecker::check(circuit); + bool result = ECCVMTraceChecker::check(circuit, &engine); EXPECT_EQ(result, true); } @@ -53,7 +53,7 @@ TEST(ECCVMCircuitBuilderTests, Add) op_queue->add_accumulate(a); ECCVMCircuitBuilder circuit{ op_queue }; - bool result = ECCVMTraceChecker::check(circuit); + bool result = ECCVMTraceChecker::check(circuit, &engine); EXPECT_EQ(result, true); } @@ -68,7 +68,7 @@ TEST(ECCVMCircuitBuilderTests, Mul) op_queue->mul_accumulate(a, x); ECCVMCircuitBuilder circuit{ op_queue }; - bool result = ECCVMTraceChecker::check(circuit); + bool result = ECCVMTraceChecker::check(circuit, &engine); EXPECT_EQ(result, true); } @@ -89,7 +89,7 @@ TEST(ECCVMCircuitBuilderTests, ShortMul) op_queue->eq_and_reset(); ECCVMCircuitBuilder circuit{ op_queue }; - bool result = ECCVMTraceChecker::check(circuit); + bool result = ECCVMTraceChecker::check(circuit, &engine); EXPECT_EQ(result, true); } @@ -106,7 +106,7 @@ TEST(ECCVMCircuitBuilderTests, EqFails) op_queue->add_erroneous_equality_op_for_testing(); ECCVMCircuitBuilder circuit{ op_queue }; - bool result = ECCVMTraceChecker::check(circuit); + bool result = ECCVMTraceChecker::check(circuit, &engine); EXPECT_EQ(result, false); } @@ -117,7 +117,7 @@ TEST(ECCVMCircuitBuilderTests, EmptyRow) op_queue->empty_row_for_testing(); ECCVMCircuitBuilder circuit{ op_queue }; - bool result = ECCVMTraceChecker::check(circuit); + bool result = ECCVMTraceChecker::check(circuit, &engine); EXPECT_EQ(result, true); } @@ -134,7 +134,7 @@ TEST(ECCVMCircuitBuilderTests, EmptyRowBetweenOps) op_queue->eq_and_reset(); ECCVMCircuitBuilder circuit{ op_queue }; - bool result = ECCVMTraceChecker::check(circuit); + bool result = ECCVMTraceChecker::check(circuit, &engine); EXPECT_EQ(result, true); } @@ -150,7 +150,7 @@ TEST(ECCVMCircuitBuilderTests, EndWithEq) op_queue->eq_and_reset(); ECCVMCircuitBuilder circuit{ op_queue }; - bool result = ECCVMTraceChecker::check(circuit); + bool result = ECCVMTraceChecker::check(circuit, &engine); EXPECT_EQ(result, true); } @@ -167,7 +167,7 @@ TEST(ECCVMCircuitBuilderTests, EndWithAdd) op_queue->add_accumulate(a); ECCVMCircuitBuilder circuit{ op_queue }; - bool result = ECCVMTraceChecker::check(circuit); + bool result = ECCVMTraceChecker::check(circuit, &engine); EXPECT_EQ(result, true); } @@ -184,7 +184,7 @@ TEST(ECCVMCircuitBuilderTests, EndWithMul) op_queue->mul_accumulate(a, x); ECCVMCircuitBuilder circuit{ op_queue }; - bool result = ECCVMTraceChecker::check(circuit); + bool result = ECCVMTraceChecker::check(circuit, &engine); EXPECT_EQ(result, true); } @@ -202,7 +202,7 @@ TEST(ECCVMCircuitBuilderTests, EndWithNoop) op_queue->empty_row_for_testing(); ECCVMCircuitBuilder circuit{ op_queue }; - bool result = ECCVMTraceChecker::check(circuit); + bool result = ECCVMTraceChecker::check(circuit, &engine); EXPECT_EQ(result, true); } @@ -240,6 +240,6 @@ TEST(ECCVMCircuitBuilderTests, MSM) compute_msms(j, op_queue); } ECCVMCircuitBuilder circuit{ op_queue }; - bool result = ECCVMTraceChecker::check(circuit); + bool result = ECCVMTraceChecker::check(circuit, &engine); EXPECT_EQ(result, true); } diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp index e1828ca8fe4d..759353edb0a8 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp @@ -34,6 +34,7 @@ class ECCVMFlavor { using CommitmentKey = bb::CommitmentKey; using VerifierCommitmentKey = bb::VerifierCommitmentKey; using RelationSeparator = FF; + using MSM = bb::eccvm::MSM; static constexpr size_t NUM_WIRES = 74; @@ -358,6 +359,7 @@ class ECCVMFlavor { ProverPolynomials& operator=(ProverPolynomials&& o) noexcept = default; ~ProverPolynomials() = default; [[nodiscard]] size_t get_polynomial_size() const { return this->lagrange_first.size(); } + /** * @brief Returns the evaluations of all prover polynomials at one point on the boolean hypercube, which * represents one row in the execution trace. @@ -460,33 +462,28 @@ class ECCVMFlavor { */ ProverPolynomials(const CircuitBuilder& builder) { - const auto msms = builder.get_msms(); - const auto flattened_muls = builder.get_flattened_scalar_muls(msms); - - std::array, 2> point_table_read_counts; - const auto transcript_state = ECCVMTranscriptBuilder::compute_transcript_state( - builder.op_queue->get_raw_ops(), builder.get_number_of_muls()); - const auto precompute_table_state = ECCVMPrecomputedTablesBuilder::compute_precompute_state(flattened_muls); - const auto msm_state = ECCVMMSMMBuilder::compute_msm_state( - msms, point_table_read_counts, builder.get_number_of_muls(), builder.op_queue->get_num_msm_rows()); - - const size_t msm_size = msm_state.size(); - const size_t transcript_size = transcript_state.size(); - const size_t precompute_table_size = precompute_table_state.size(); - - const size_t num_rows = std::max(precompute_table_size, std::max(msm_size, transcript_size)); - - const auto num_rows_log2 = static_cast(numeric::get_msb64(num_rows)); - size_t num_rows_pow2 = 1UL << (num_rows_log2 + (1UL << num_rows_log2 == num_rows ? 0 : 1)); + // compute rows for the three different sections of the ECCVM execution trace + const auto transcript_rows = + ECCVMTranscriptBuilder::compute_rows(builder.op_queue->get_raw_ops(), builder.get_number_of_muls()); + const std::vector msms = builder.get_msms(); + const auto point_table_rows = + ECCVMPointTablePrecomputationBuilder::compute_rows(CircuitBuilder::get_flattened_scalar_muls(msms)); + const auto [msm_rows, point_table_read_counts] = ECCVMMSMMBuilder::compute_rows( + msms, builder.get_number_of_muls(), builder.op_queue->get_num_msm_rows()); + + const size_t num_rows = std::max({ point_table_rows.size(), msm_rows.size(), transcript_rows.size() }); + const auto log_num_rows = static_cast(numeric::get_msb64(num_rows)); + const size_t dyadic_num_rows = 1UL << (log_num_rows + (1UL << log_num_rows == num_rows ? 0 : 1)); + + // allocate polynomials; define lagrange and lookup read count polynomials for (auto& poly : get_all()) { - poly = Polynomial(num_rows_pow2); + poly = Polynomial(dyadic_num_rows); } lagrange_first[0] = 1; lagrange_second[1] = 1; lagrange_last[lagrange_last.size() - 1] = 1; - for (size_t i = 0; i < point_table_read_counts[0].size(); ++i) { - // Explanation of off-by-one offset + // Explanation of off-by-one offset: // When computing the WNAF slice for a point at point counter value `pc` and a round index `round`, the // row number that computes the slice can be derived. This row number is then mapped to the index of // `lookup_read_counts`. We do this mapping in `ecc_msm_relation`. We are off-by-one because we add an @@ -495,106 +492,109 @@ class ECCVMFlavor { lookup_read_counts_0[i + 1] = point_table_read_counts[0][i]; lookup_read_counts_1[i + 1] = point_table_read_counts[1][i]; } - run_loop_in_parallel(transcript_state.size(), [&](size_t start, size_t end) { + + // compute polynomials for transcript columns + run_loop_in_parallel(transcript_rows.size(), [&](size_t start, size_t end) { for (size_t i = start; i < end; i++) { - transcript_accumulator_empty[i] = transcript_state[i].accumulator_empty; - transcript_add[i] = transcript_state[i].q_add; - transcript_mul[i] = transcript_state[i].q_mul; - transcript_eq[i] = transcript_state[i].q_eq; - transcript_reset_accumulator[i] = transcript_state[i].q_reset_accumulator; - transcript_msm_transition[i] = transcript_state[i].msm_transition; - transcript_pc[i] = transcript_state[i].pc; - transcript_msm_count[i] = transcript_state[i].msm_count; - transcript_Px[i] = transcript_state[i].base_x; - transcript_Py[i] = transcript_state[i].base_y; - transcript_z1[i] = transcript_state[i].z1; - transcript_z2[i] = transcript_state[i].z2; - transcript_z1zero[i] = transcript_state[i].z1_zero; - transcript_z2zero[i] = transcript_state[i].z2_zero; - transcript_op[i] = transcript_state[i].opcode; - transcript_accumulator_x[i] = transcript_state[i].accumulator_x; - transcript_accumulator_y[i] = transcript_state[i].accumulator_y; - transcript_msm_x[i] = transcript_state[i].msm_output_x; - transcript_msm_y[i] = transcript_state[i].msm_output_y; - transcript_collision_check[i] = transcript_state[i].collision_check; + transcript_accumulator_empty[i] = transcript_rows[i].accumulator_empty; + transcript_add[i] = transcript_rows[i].q_add; + transcript_mul[i] = transcript_rows[i].q_mul; + transcript_eq[i] = transcript_rows[i].q_eq; + transcript_reset_accumulator[i] = transcript_rows[i].q_reset_accumulator; + transcript_msm_transition[i] = transcript_rows[i].msm_transition; + transcript_pc[i] = transcript_rows[i].pc; + transcript_msm_count[i] = transcript_rows[i].msm_count; + transcript_Px[i] = transcript_rows[i].base_x; + transcript_Py[i] = transcript_rows[i].base_y; + transcript_z1[i] = transcript_rows[i].z1; + transcript_z2[i] = transcript_rows[i].z2; + transcript_z1zero[i] = transcript_rows[i].z1_zero; + transcript_z2zero[i] = transcript_rows[i].z2_zero; + transcript_op[i] = transcript_rows[i].opcode; + transcript_accumulator_x[i] = transcript_rows[i].accumulator_x; + transcript_accumulator_y[i] = transcript_rows[i].accumulator_y; + transcript_msm_x[i] = transcript_rows[i].msm_output_x; + transcript_msm_y[i] = transcript_rows[i].msm_output_y; + transcript_collision_check[i] = transcript_rows[i].collision_check; } }); // TODO(@zac-williamson) if final opcode resets accumulator, all subsequent "is_accumulator_empty" row // values must be 1. Ideally we find a way to tweak this so that empty rows that do nothing have column // values that are all zero (issue #2217) - if (transcript_state[transcript_state.size() - 1].accumulator_empty == 1) { - for (size_t i = transcript_state.size(); i < num_rows_pow2; ++i) { + if (transcript_rows[transcript_rows.size() - 1].accumulator_empty) { + for (size_t i = transcript_rows.size(); i < dyadic_num_rows; ++i) { transcript_accumulator_empty[i] = 1; } } - run_loop_in_parallel(precompute_table_state.size(), [&](size_t start, size_t end) { + + // compute polynomials for point table columns + run_loop_in_parallel(point_table_rows.size(), [&](size_t start, size_t end) { for (size_t i = start; i < end; i++) { // first row is always an empty row (to accommodate shifted polynomials which must have 0 as 1st - // coefficient). All other rows in the precompute_table_state represent active wnaf gates (i.e. + // coefficient). All other rows in the point_table_rows represent active wnaf gates (i.e. // precompute_select = 1) precompute_select[i] = (i != 0) ? 1 : 0; - precompute_pc[i] = precompute_table_state[i].pc; - precompute_point_transition[i] = static_cast(precompute_table_state[i].point_transition); - precompute_round[i] = precompute_table_state[i].round; - precompute_scalar_sum[i] = precompute_table_state[i].scalar_sum; - - precompute_s1hi[i] = precompute_table_state[i].s1; - precompute_s1lo[i] = precompute_table_state[i].s2; - precompute_s2hi[i] = precompute_table_state[i].s3; - precompute_s2lo[i] = precompute_table_state[i].s4; - precompute_s3hi[i] = precompute_table_state[i].s5; - precompute_s3lo[i] = precompute_table_state[i].s6; - precompute_s4hi[i] = precompute_table_state[i].s7; - precompute_s4lo[i] = precompute_table_state[i].s8; + precompute_pc[i] = point_table_rows[i].pc; + precompute_point_transition[i] = static_cast(point_table_rows[i].point_transition); + precompute_round[i] = point_table_rows[i].round; + precompute_scalar_sum[i] = point_table_rows[i].scalar_sum; + precompute_s1hi[i] = point_table_rows[i].s1; + precompute_s1lo[i] = point_table_rows[i].s2; + precompute_s2hi[i] = point_table_rows[i].s3; + precompute_s2lo[i] = point_table_rows[i].s4; + precompute_s3hi[i] = point_table_rows[i].s5; + precompute_s3lo[i] = point_table_rows[i].s6; + precompute_s4hi[i] = point_table_rows[i].s7; + precompute_s4lo[i] = point_table_rows[i].s8; // If skew is active (i.e. we need to subtract a base point from the msm result), // write `7` into rows.precompute_skew. `7`, in binary representation, equals `-1` when converted // into WNAF form - precompute_skew[i] = precompute_table_state[i].skew ? 7 : 0; - - precompute_dx[i] = precompute_table_state[i].precompute_double.x; - precompute_dy[i] = precompute_table_state[i].precompute_double.y; - precompute_tx[i] = precompute_table_state[i].precompute_accumulator.x; - precompute_ty[i] = precompute_table_state[i].precompute_accumulator.y; + precompute_skew[i] = point_table_rows[i].skew ? 7 : 0; + precompute_dx[i] = point_table_rows[i].precompute_double.x; + precompute_dy[i] = point_table_rows[i].precompute_double.y; + precompute_tx[i] = point_table_rows[i].precompute_accumulator.x; + precompute_ty[i] = point_table_rows[i].precompute_accumulator.y; } }); - run_loop_in_parallel(msm_state.size(), [&](size_t start, size_t end) { + // compute polynomials for the msm columns + run_loop_in_parallel(msm_rows.size(), [&](size_t start, size_t end) { for (size_t i = start; i < end; i++) { - msm_transition[i] = static_cast(msm_state[i].msm_transition); - msm_add[i] = static_cast(msm_state[i].q_add); - msm_double[i] = static_cast(msm_state[i].q_double); - msm_skew[i] = static_cast(msm_state[i].q_skew); - msm_accumulator_x[i] = msm_state[i].accumulator_x; - msm_accumulator_y[i] = msm_state[i].accumulator_y; - msm_pc[i] = msm_state[i].pc; - msm_size_of_msm[i] = msm_state[i].msm_size; - msm_count[i] = msm_state[i].msm_count; - msm_round[i] = msm_state[i].msm_round; - msm_add1[i] = static_cast(msm_state[i].add_state[0].add); - msm_add2[i] = static_cast(msm_state[i].add_state[1].add); - msm_add3[i] = static_cast(msm_state[i].add_state[2].add); - msm_add4[i] = static_cast(msm_state[i].add_state[3].add); - msm_x1[i] = msm_state[i].add_state[0].point.x; - msm_y1[i] = msm_state[i].add_state[0].point.y; - msm_x2[i] = msm_state[i].add_state[1].point.x; - msm_y2[i] = msm_state[i].add_state[1].point.y; - msm_x3[i] = msm_state[i].add_state[2].point.x; - msm_y3[i] = msm_state[i].add_state[2].point.y; - msm_x4[i] = msm_state[i].add_state[3].point.x; - msm_y4[i] = msm_state[i].add_state[3].point.y; - msm_collision_x1[i] = msm_state[i].add_state[0].collision_inverse; - msm_collision_x2[i] = msm_state[i].add_state[1].collision_inverse; - msm_collision_x3[i] = msm_state[i].add_state[2].collision_inverse; - msm_collision_x4[i] = msm_state[i].add_state[3].collision_inverse; - msm_lambda1[i] = msm_state[i].add_state[0].lambda; - msm_lambda2[i] = msm_state[i].add_state[1].lambda; - msm_lambda3[i] = msm_state[i].add_state[2].lambda; - msm_lambda4[i] = msm_state[i].add_state[3].lambda; - msm_slice1[i] = msm_state[i].add_state[0].slice; - msm_slice2[i] = msm_state[i].add_state[1].slice; - msm_slice3[i] = msm_state[i].add_state[2].slice; - msm_slice4[i] = msm_state[i].add_state[3].slice; + msm_transition[i] = static_cast(msm_rows[i].msm_transition); + msm_add[i] = static_cast(msm_rows[i].q_add); + msm_double[i] = static_cast(msm_rows[i].q_double); + msm_skew[i] = static_cast(msm_rows[i].q_skew); + msm_accumulator_x[i] = msm_rows[i].accumulator_x; + msm_accumulator_y[i] = msm_rows[i].accumulator_y; + msm_pc[i] = msm_rows[i].pc; + msm_size_of_msm[i] = msm_rows[i].msm_size; + msm_count[i] = msm_rows[i].msm_count; + msm_round[i] = msm_rows[i].msm_round; + msm_add1[i] = static_cast(msm_rows[i].add_state[0].add); + msm_add2[i] = static_cast(msm_rows[i].add_state[1].add); + msm_add3[i] = static_cast(msm_rows[i].add_state[2].add); + msm_add4[i] = static_cast(msm_rows[i].add_state[3].add); + msm_x1[i] = msm_rows[i].add_state[0].point.x; + msm_y1[i] = msm_rows[i].add_state[0].point.y; + msm_x2[i] = msm_rows[i].add_state[1].point.x; + msm_y2[i] = msm_rows[i].add_state[1].point.y; + msm_x3[i] = msm_rows[i].add_state[2].point.x; + msm_y3[i] = msm_rows[i].add_state[2].point.y; + msm_x4[i] = msm_rows[i].add_state[3].point.x; + msm_y4[i] = msm_rows[i].add_state[3].point.y; + msm_collision_x1[i] = msm_rows[i].add_state[0].collision_inverse; + msm_collision_x2[i] = msm_rows[i].add_state[1].collision_inverse; + msm_collision_x3[i] = msm_rows[i].add_state[2].collision_inverse; + msm_collision_x4[i] = msm_rows[i].add_state[3].collision_inverse; + msm_lambda1[i] = msm_rows[i].add_state[0].lambda; + msm_lambda2[i] = msm_rows[i].add_state[1].lambda; + msm_lambda3[i] = msm_rows[i].add_state[2].lambda; + msm_lambda4[i] = msm_rows[i].add_state[3].lambda; + msm_slice1[i] = msm_rows[i].add_state[0].slice; + msm_slice2[i] = msm_rows[i].add_state[1].slice; + msm_slice3[i] = msm_rows[i].add_state[2].slice; + msm_slice4[i] = msm_rows[i].add_state[3].slice; } }); this->set_shifted(); diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_trace_checker.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_trace_checker.cpp index c8acc2400bea..24c440f33037 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_trace_checker.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_trace_checker.cpp @@ -9,10 +9,10 @@ using Builder = typename ECCVMFlavor::CircuitBuilder; using FF = typename ECCVMFlavor::FF; using ProverPolynomials = typename ECCVMFlavor::ProverPolynomials; -bool ECCVMTraceChecker::check(Builder& builder) +bool ECCVMTraceChecker::check(Builder& builder, numeric::RNG* engine_ptr) { - const FF gamma = FF::random_element(); - const FF beta = FF::random_element(); + const FF gamma = FF::random_element(engine_ptr); + const FF beta = FF::random_element(engine_ptr); const FF beta_sqr = beta.sqr(); const FF beta_cube = beta_sqr * beta; auto eccvm_set_permutation_delta = diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_trace_checker.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_trace_checker.hpp index 1dc84fb33e88..fdb918dee41b 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_trace_checker.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_trace_checker.hpp @@ -4,6 +4,6 @@ namespace bb { class ECCVMTraceChecker { public: - static bool check(ECCVMCircuitBuilder&); + static bool check(ECCVMCircuitBuilder&, numeric::RNG* engine_ptr = nullptr); }; } // namespace bb \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/eccvm/msm_builder.hpp b/barretenberg/cpp/src/barretenberg/eccvm/msm_builder.hpp index 3be74f357aa1..69f4871eb91d 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/msm_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/msm_builder.hpp @@ -13,13 +13,15 @@ class ECCVMMSMMBuilder { using FF = curve::Grumpkin::ScalarField; using Element = typename CycleGroup::element; using AffineElement = typename CycleGroup::affine_element; + using MSM = bb::eccvm::MSM; static constexpr size_t ADDITIONS_PER_ROW = bb::eccvm::ADDITIONS_PER_ROW; - static constexpr size_t NUM_SCALAR_BITS = bb::eccvm::NUM_SCALAR_BITS; - static constexpr size_t WNAF_SLICE_BITS = bb::eccvm::WNAF_SLICE_BITS; + static constexpr size_t NUM_WNAF_DIGITS_PER_SCALAR = bb::eccvm::NUM_WNAF_DIGITS_PER_SCALAR; - struct alignas(64) MSMState { + struct alignas(64) MSMRow { + // counter over all half-length scalar muls used to compute the required MSMs uint32_t pc = 0; + // the number of points that will be scaled and summed uint32_t msm_size = 0; uint32_t msm_count = 0; uint32_t msm_round = 0; @@ -43,138 +45,138 @@ class ECCVMMSMMBuilder { FF accumulator_y = 0; }; - struct alignas(64) MSMRowTranscript { - std::array lambda_numerator; - std::array lambda_denominator; - Element accumulator_in; - Element accumulator_out; - }; - - struct alignas(64) AdditionTrace { - Element p1; - Element p2; - Element p3; - bool predicate; - bool is_double; - }; - /** * @brief Computes the row values for the Straus MSM columns of the ECCVM. * * For a detailed description of the Straus algorithm and its relation to the ECCVM, please see * https://hackmd.io/@aztec-network/rJ5xhuCsn * - * @param msms - * @param point_table_read_counts - * @param total_number_of_muls - * @return std::vector + * @param msms A vector of vectors of ScalarMuls. + * @param point_table_read_counts Table of read counts to be populated. + * @param total_number_of_muls A mul op in the OpQueue adds up to two muls, one for each nonzero z_i (i=1,2). + * @param num_msm_rows + * @return std::vector */ - static std::vector compute_msm_state(const std::vector>& msms, - std::array, 2>& point_table_read_counts, - const uint32_t total_number_of_muls, - const size_t num_msm_rows) + static std::tuple, std::array, 2>> compute_rows( + const std::vector& msms, const uint32_t total_number_of_muls, const size_t num_msm_rows) { - // N.B. the following comments refer to a "point lookup table" frequently. - // To perform a scalar multiplicaiton of a point [P] by a scalar x, we compute multiples of [P] and store in a - // table: specifically: -15[P], -13[P], ..., -3[P], -[P], [P], 3[P], ..., 15[P] when we define our point lookup - // table, we have 2 write columns and 4 read columns when we perform a read on a given row, we need to increment - // the read count on the respective write column by 1 we can define the following struture: 1st write column = - // positive 2nd write column = negative the row number is a function of pc and slice value row = pc_delta * - // rows_per_point_table + some function of the slice value pc_delta = total_number_of_muls - pc - // std::vector point_table_read_counts; - const size_t table_rows = static_cast(total_number_of_muls) * 8; - point_table_read_counts[0].reserve(table_rows); - point_table_read_counts[1].reserve(table_rows); - for (size_t i = 0; i < table_rows; ++i) { + // To perform a scalar multiplication of a point P by a scalar x, we precompute a table of points + // -15P, -13P, ..., -3P, -P, P, 3P, ..., 15P + // When we perform a scalar multiplication, we decompose x into base-16 wNAF digits then look these precomputed + // values up with digit-by-digit. We record read counts in a table with the following structure: + // 1st write column = positive wNAF digits + // 2nd write column = negative wNAF digits + // the row number is a function of pc and wnaf digit: + // point_idx = total_number_of_muls - pc + // row = point_idx * rows_per_point_table + (some function of the slice value) + // + // Illustration: + // Block Structure Table structure: + // | 0 | 1 | | Block_{0} | <-- pc = total_number_of_muls + // | - | - | | Block_{1} | <-- pc = total_number_of_muls-(num muls in msm 0) + // 1 | # | # | -1 | ... | ... + // 3 | # | # | -3 | Block_{total_number_of_muls-1} | <-- pc = num muls in last msm + // 5 | # | # | -5 + // 7 | # | # | -7 + // 9 | # | # | -9 + // 11 | # | # | -11 + // 13 | # | # | -13 + // 15 | # | # | -15 + + const size_t num_rows_in_read_counts_table = + static_cast(total_number_of_muls) * (eccvm::POINT_TABLE_SIZE >> 1); + std::array, 2> point_table_read_counts; + point_table_read_counts[0].reserve(num_rows_in_read_counts_table); + point_table_read_counts[1].reserve(num_rows_in_read_counts_table); + for (size_t i = 0; i < num_rows_in_read_counts_table; ++i) { point_table_read_counts[0].emplace_back(0); point_table_read_counts[1].emplace_back(0); } - const auto update_read_counts = [&](const size_t pc, const int slice) { - // When we compute our wnaf/point tables, we start with the point with the largest pc value. - // i.e. if we are reading a slice for point with a point counter value `pc`, - // its position in the wnaf/point table (relative to other points) will be `total_number_of_muls - pc` - const size_t pc_delta = total_number_of_muls - pc; - const size_t pc_offset = pc_delta * 8; - bool slice_negative = slice < 0; - const int slice_row = (slice + 15) / 2; - - const size_t column_index = slice_negative ? 1 : 0; + const auto update_read_count = [&point_table_read_counts](const size_t point_idx, const int slice) { /** - * When computing `point_table_read_counts`, we need the *table index* that a given point belongs to. - * the slice value is in *compressed* windowed-non-adjacent-form format: - * A non-compressed WNAF slice is in the range: `-15, -13, ..., 15` - * In compressed form, tney become `0, ..., 15` + * The wNAF digits for base 16 lie in the range -15, -13, ..., 13, 15. * The *point table* format is the following: - * (for positive point table) T[0] = P, T[1] = PT, ..., T[7] = 15P + * (for positive point table) T[0] = P, T[1] = 3P, ..., T[7] = 15P * (for negative point table) T[0] = -P, T[1] = -3P, ..., T[15] = -15P * i.e. if the slice value is negative, we can use the compressed WNAF directly as the table index - * if the slice value is positive, we must take `15 - compressedWNAF` to get the table index + * if the slice value is positive, we must take 15 - (compressed wNAF) to get the table index */ - if (slice_negative) { - point_table_read_counts[column_index][pc_offset + static_cast(slice_row)]++; + const size_t row_index_offset = point_idx * 8; + const bool digit_is_negative = slice < 0; + const auto relative_row_idx = static_cast((slice + 15) / 2); + const size_t column_index = digit_is_negative ? 1 : 0; + + if (digit_is_negative) { + point_table_read_counts[column_index][row_index_offset + relative_row_idx]++; } else { - point_table_read_counts[column_index][pc_offset + 15 - static_cast(slice_row)]++; + point_table_read_counts[column_index][row_index_offset + 15 - relative_row_idx]++; } }; // compute which row index each multiscalar multiplication will start at. - // also compute the program counter index that each multiscalar multiplication will start at. - // we use this information to populate the MSM row data across multiple threads - std::vector msm_row_indices; - std::vector pc_indices; - msm_row_indices.reserve(msms.size() + 1); - pc_indices.reserve(msms.size() + 1); - - msm_row_indices.push_back(1); - pc_indices.push_back(total_number_of_muls); + std::vector msm_row_counts; + msm_row_counts.reserve(msms.size() + 1); + msm_row_counts.push_back(1); + // compute the program counter (i.e. the index among all single scalar muls) that each multiscalar + // multiplication will start at. + std::vector pc_values; + pc_values.reserve(msms.size() + 1); + pc_values.push_back(total_number_of_muls); for (const auto& msm : msms) { - const size_t rows = ECCOpQueue::get_msm_row_count_for_single_msm(msm.size()); - msm_row_indices.push_back(msm_row_indices.back() + rows); - pc_indices.push_back(pc_indices.back() - msm.size()); + const size_t num_rows_required = ECCOpQueue::num_eccvm_msm_rows(msm.size()); + msm_row_counts.push_back(msm_row_counts.back() + num_rows_required); + pc_values.push_back(pc_values.back() - msm.size()); } + ASSERT(pc_values.back() == 0); - static constexpr size_t num_rounds = NUM_SCALAR_BITS / WNAF_SLICE_BITS; - std::vector msm_state(num_msm_rows); - // start with empty row (shiftable polynomials must have 0 as first coefficient) - msm_state[0] = (MSMState{}); + // compute the MSM rows + std::vector msm_rows(num_msm_rows); + // start with empty row (shiftable polynomials must have 0 as first coefficient) + msm_rows[0] = (MSMRow{}); // compute "read counts" so that we can determine the number of times entries in our log-derivative lookup // tables are called. - // Note: this part is single-threaded. THe amount of compute is low, however, so this is likely not a big + // Note: this part is single-threaded. The amount of compute is low, however, so this is likely not a big // concern. - for (size_t i = 0; i < msms.size(); ++i) { - - for (size_t j = 0; j < num_rounds; ++j) { - uint32_t pc = static_cast(pc_indices[i]); - const auto& msm = msms[i]; + for (size_t msm_idx = 0; msm_idx < msms.size(); ++msm_idx) { + for (size_t digit_idx = 0; digit_idx < NUM_WNAF_DIGITS_PER_SCALAR; ++digit_idx) { + auto pc = static_cast(pc_values[msm_idx]); + const auto& msm = msms[msm_idx]; const size_t msm_size = msm.size(); - const size_t rows_per_round = - (msm_size / ADDITIONS_PER_ROW) + (msm_size % ADDITIONS_PER_ROW != 0 ? 1 : 0); - - for (size_t k = 0; k < rows_per_round; ++k) { - const size_t points_per_row = - (k + 1) * ADDITIONS_PER_ROW > msm_size ? msm_size % ADDITIONS_PER_ROW : ADDITIONS_PER_ROW; - const size_t idx = k * ADDITIONS_PER_ROW; - for (size_t m = 0; m < ADDITIONS_PER_ROW; ++m) { - bool add = points_per_row > m; + const size_t num_rows_per_digit = + (msm_size / ADDITIONS_PER_ROW) + ((msm_size % ADDITIONS_PER_ROW != 0) ? 1 : 0); + + for (size_t relative_row_idx = 0; relative_row_idx < num_rows_per_digit; ++relative_row_idx) { + const size_t num_points_in_row = (relative_row_idx + 1) * ADDITIONS_PER_ROW > msm_size + ? (msm_size % ADDITIONS_PER_ROW) + : ADDITIONS_PER_ROW; + const size_t offset = relative_row_idx * ADDITIONS_PER_ROW; + for (size_t relative_point_idx = 0; relative_point_idx < ADDITIONS_PER_ROW; ++relative_point_idx) { + const size_t point_idx = offset + relative_point_idx; + const bool add = num_points_in_row > relative_point_idx; if (add) { - int slice = add ? msm[idx + m].wnaf_slices[j] : 0; - update_read_counts(pc - idx - m, slice); + int slice = msm[point_idx].wnaf_digits[digit_idx]; + // pc starts at total_number_of_muls and decreses non-uniformly to 0 + update_read_count((total_number_of_muls - pc) + point_idx, slice); } } } - if (j == num_rounds - 1) { - for (size_t k = 0; k < rows_per_round; ++k) { - const size_t points_per_row = - (k + 1) * ADDITIONS_PER_ROW > msm_size ? msm_size % ADDITIONS_PER_ROW : ADDITIONS_PER_ROW; - const size_t idx = k * ADDITIONS_PER_ROW; - for (size_t m = 0; m < 4; ++m) { - bool add = points_per_row > m; - + if (digit_idx == NUM_WNAF_DIGITS_PER_SCALAR - 1) { + for (size_t row_idx = 0; row_idx < num_rows_per_digit; ++row_idx) { + const size_t num_points_in_row = (row_idx + 1) * ADDITIONS_PER_ROW > msm_size + ? (msm_size % ADDITIONS_PER_ROW) + : ADDITIONS_PER_ROW; + const size_t offset = row_idx * ADDITIONS_PER_ROW; + for (size_t relative_point_idx = 0; relative_point_idx < ADDITIONS_PER_ROW; + ++relative_point_idx) { + bool add = num_points_in_row > relative_point_idx; + const size_t point_idx = offset + relative_point_idx; if (add) { - update_read_counts(pc - idx - m, msm[idx + m].wnaf_skew ? -1 : -15); + // pc starts at total_number_of_muls and decreses non-uniformly to 0 + int slice = msm[point_idx].wnaf_skew ? -1 : -15; + update_read_count((total_number_of_muls - pc) + point_idx, slice); } } } @@ -184,183 +186,183 @@ class ECCVMMSMMBuilder { // The execution trace data for the MSM columns requires knowledge of intermediate values from *affine* point // addition. The naive solution to compute this data requires 2 field inversions per in-circuit group addition - // evaluation. This is bad! To avoid this, we split the witness computation algorithm into 3 steps. Step 1: - // compute the execution trace group operations in *projective* coordinates Step 2: use batch inversion trick to - // convert all point traces into affine coordinates Step 3: populate the full execution trace, including the - // intermediate values from affine group operations This section sets up the data structures we need to store - // all intermediate ECC operations in projective form + // evaluation. This is bad! To avoid this, we split the witness computation algorithm into 3 steps. + // Step 1: compute the execution trace group operations in *projective* coordinates + // Step 2: use batch inversion trick to convert all points into affine coordinates + // Step 3: populate the full execution trace, including the intermediate values from affine group operations + // This section sets up the data structures we need to store all intermediate ECC operations in projective form const size_t num_point_adds_and_doubles = (num_msm_rows - 2) * 4; const size_t num_accumulators = num_msm_rows - 1; - const size_t num_points_in_trace = (num_point_adds_and_doubles * 3) + num_accumulators; + // In what fallows, either p1 + p2 = p3, or p1.dbl() = p3 // We create 1 vector to store the entire point trace. We split into multiple containers using std::span // (we want 1 vector object to more efficiently batch normalize points) - std::vector point_trace(num_points_in_trace); - // the point traces record group operations. Either p1 + p2 = p3, or p1.dbl() = p3 - std::span p1_trace(&point_trace[0], num_point_adds_and_doubles); - std::span p2_trace(&point_trace[num_point_adds_and_doubles], num_point_adds_and_doubles); - std::span p3_trace(&point_trace[num_point_adds_and_doubles * 2], num_point_adds_and_doubles); + static constexpr size_t NUM_POINTS_IN_ADDITION_RELATION = 3; + const size_t num_points_to_normalize = + (num_point_adds_and_doubles * NUM_POINTS_IN_ADDITION_RELATION) + num_accumulators; + std::vector points_to_normalize(num_points_to_normalize); + std::span p1_trace(&points_to_normalize[0], num_point_adds_and_doubles); + std::span p2_trace(&points_to_normalize[num_point_adds_and_doubles], num_point_adds_and_doubles); + std::span p3_trace(&points_to_normalize[num_point_adds_and_doubles * 2], num_point_adds_and_doubles); // operation_trace records whether an entry in the p1/p2/p3 trace represents a point addition or doubling std::vector operation_trace(num_point_adds_and_doubles); // accumulator_trace tracks the value of the ECCVM accumulator for each row - std::span accumulator_trace(&point_trace[num_point_adds_and_doubles * 3], num_accumulators); + std::span accumulator_trace(&points_to_normalize[num_point_adds_and_doubles * 3], num_accumulators); // we start the accumulator at the point at infinity accumulator_trace[0] = (CycleGroup::affine_point_at_infinity); - // populate point trace data, and the components of the MSM execution trace that do not relate to affine point + // TODO(https://github.com/AztecProtocol/barretenberg/issues/973): Reinstate multitreading? + // populate point trace, and the components of the MSM execution trace that do not relate to affine point // operations - run_loop_in_parallel(msms.size(), [&](size_t start, size_t end) { - for (size_t i = start; i < end; i++) { - Element accumulator = CycleGroup::affine_point_at_infinity; - const auto& msm = msms[i]; - size_t msm_row_index = msm_row_indices[i]; - const size_t msm_size = msm.size(); - const size_t rows_per_round = - (msm_size / ADDITIONS_PER_ROW) + (msm_size % ADDITIONS_PER_ROW != 0 ? 1 : 0); - size_t trace_index = (msm_row_indices[i] - 1) * 4; - - for (size_t j = 0; j < num_rounds; ++j) { - const uint32_t pc = static_cast(pc_indices[i]); - - for (size_t k = 0; k < rows_per_round; ++k) { - const size_t points_per_row = - (k + 1) * ADDITIONS_PER_ROW > msm_size ? msm_size % ADDITIONS_PER_ROW : ADDITIONS_PER_ROW; - auto& row = msm_state[msm_row_index]; - const size_t idx = k * ADDITIONS_PER_ROW; - row.msm_transition = (j == 0) && (k == 0); - for (size_t m = 0; m < ADDITIONS_PER_ROW; ++m) { - - auto& add_state = row.add_state[m]; - add_state.add = points_per_row > m; - int slice = add_state.add ? msm[idx + m].wnaf_slices[j] : 0; - // In the MSM columns in the ECCVM circuit, we can add up to 4 points per row. - // if `row.add_state[m].add = 1`, this indicates that we want to add the `m`'th point in - // the MSM columns into the MSM accumulator `add_state.slice` = A 4-bit WNAF slice of - // the scalar multiplier associated with the point we are adding (the specific slice - // chosen depends on the value of msm_round) (WNAF = windowed-non-adjacent-form. Value - // range is `-15, -13, - // ..., 15`) If `add_state.add = 1`, we want `add_state.slice` to be the *compressed* - // form of the WNAF slice value. (compressed = no gaps in the value range. i.e. -15, - // -13, ..., 15 maps to 0, ... , 15) - add_state.slice = add_state.add ? (slice + 15) / 2 : 0; - add_state.point = add_state.add - ? msm[idx + m].precomputed_table[static_cast(add_state.slice)] - : AffineElement{ 0, 0 }; - - // predicate logic: - // add_predicate should normally equal add_state.add - // However! if j == 0 AND k == 0 AND m == 0 this implies we are examing the 1st point - // addition of a new MSM In this case, we do NOT add the 1st point into the accumulator, - // instead we SET the accumulator to equal the 1st point. add_predicate is used to - // determine whether we add the output of a point addition into the accumulator, - // therefore if j == 0 AND k == 0 AND m == 0, add_predicate = 0 even if add_state.add = - // true - bool add_predicate = (m == 0 ? (j != 0 || k != 0) : add_state.add); - - Element p1 = (m == 0) ? Element(add_state.point) : accumulator; - Element p2 = (m == 0) ? accumulator : Element(add_state.point); - - accumulator = add_predicate ? (accumulator + add_state.point) : Element(p1); + for (size_t msm_idx = 0; msm_idx < msms.size(); msm_idx++) { + Element accumulator = CycleGroup::affine_point_at_infinity; + const auto& msm = msms[msm_idx]; + size_t msm_row_index = msm_row_counts[msm_idx]; + const size_t msm_size = msm.size(); + const size_t num_rows_per_digit = + (msm_size / ADDITIONS_PER_ROW) + ((msm_size % ADDITIONS_PER_ROW != 0) ? 1 : 0); + size_t trace_index = (msm_row_counts[msm_idx] - 1) * 4; + + for (size_t digit_idx = 0; digit_idx < NUM_WNAF_DIGITS_PER_SCALAR; ++digit_idx) { + const auto pc = static_cast(pc_values[msm_idx]); + for (size_t row_idx = 0; row_idx < num_rows_per_digit; ++row_idx) { + const size_t num_points_in_row = (row_idx + 1) * ADDITIONS_PER_ROW > msm_size + ? (msm_size % ADDITIONS_PER_ROW) + : ADDITIONS_PER_ROW; + auto& row = msm_rows[msm_row_index]; + const size_t offset = row_idx * ADDITIONS_PER_ROW; + row.msm_transition = (digit_idx == 0) && (row_idx == 0); + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + + auto& add_state = row.add_state[point_idx]; + add_state.add = num_points_in_row > point_idx; + int slice = add_state.add ? msm[offset + point_idx].wnaf_digits[digit_idx] : 0; + // In the MSM columns in the ECCVM circuit, we can add up to 4 points per row. + // if `row.add_state[point_idx].add = 1`, this indicates that we want to add the + // `point_idx`'th point in the MSM columns into the MSM accumulator `add_state.slice` = A + // 4-bit WNAF slice of the scalar multiplier associated with the point we are adding (the + // specific slice chosen depends on the value of msm_round) (WNAF = + // windowed-non-adjacent-form. Value range is `-15, -13, + // ..., 15`) If `add_state.add = 1`, we want `add_state.slice` to be the *compressed* + // form of the WNAF slice value. (compressed = no gaps in the value range. i.e. -15, + // -13, ..., 15 maps to 0, ... , 15) + add_state.slice = add_state.add ? (slice + 15) / 2 : 0; + add_state.point = + add_state.add + ? msm[offset + point_idx].precomputed_table[static_cast(add_state.slice)] + : AffineElement{ 0, 0 }; + + // predicate logic: + // add_predicate should normally equal add_state.add + // However! if digit_idx == 0 AND row_idx == 0 AND point_idx == 0 this implies we are + // examing the 1st point addition of a new MSM. In this case, we do NOT add the 1st point + // into the accumulator, instead we SET the accumulator to equal the 1st point. + // add_predicate is used to determine whether we add the output of a point addition into the + // accumulator, therefore if digit_idx == 0 AND row_idx == 0 AND point_idx == 0, + // add_predicate = 0 even if add_state.add = true + bool add_predicate = (point_idx == 0 ? (digit_idx != 0 || row_idx != 0) : add_state.add); + + Element p1 = (point_idx == 0) ? Element(add_state.point) : accumulator; + Element p2 = (point_idx == 0) ? accumulator : Element(add_state.point); + + accumulator = add_predicate ? (accumulator + add_state.point) : Element(p1); + p1_trace[trace_index] = p1; + p2_trace[trace_index] = p2; + p3_trace[trace_index] = accumulator; + operation_trace[trace_index] = false; + trace_index++; + } + accumulator_trace[msm_row_index] = accumulator; + row.q_add = true; + row.q_double = false; + row.q_skew = false; + row.msm_round = static_cast(digit_idx); + row.msm_size = static_cast(msm_size); + row.msm_count = static_cast(offset); + row.pc = pc; + msm_row_index++; + } + // doubling + if (digit_idx < NUM_WNAF_DIGITS_PER_SCALAR - 1) { + auto& row = msm_rows[msm_row_index]; + row.msm_transition = false; + row.msm_round = static_cast(digit_idx + 1); + row.msm_size = static_cast(msm_size); + row.msm_count = static_cast(0); + row.q_add = false; + row.q_double = true; + row.q_skew = false; + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + auto& add_state = row.add_state[point_idx]; + add_state.add = false; + add_state.slice = 0; + add_state.point = { 0, 0 }; + add_state.collision_inverse = 0; + + p1_trace[trace_index] = accumulator; + p2_trace[trace_index] = accumulator; + accumulator = accumulator.dbl(); + p3_trace[trace_index] = accumulator; + operation_trace[trace_index] = true; + trace_index++; + } + accumulator_trace[msm_row_index] = accumulator; + msm_row_index++; + } else { + for (size_t row_idx = 0; row_idx < num_rows_per_digit; ++row_idx) { + auto& row = msm_rows[msm_row_index]; + + const size_t num_points_in_row = (row_idx + 1) * ADDITIONS_PER_ROW > msm_size + ? msm_size % ADDITIONS_PER_ROW + : ADDITIONS_PER_ROW; + const size_t offset = row_idx * ADDITIONS_PER_ROW; + row.msm_transition = false; + Element acc_expected = accumulator; + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + auto& add_state = row.add_state[point_idx]; + add_state.add = num_points_in_row > point_idx; + add_state.slice = add_state.add ? msm[offset + point_idx].wnaf_skew ? 7 : 0 : 0; + + add_state.point = + add_state.add + ? msm[offset + point_idx].precomputed_table[static_cast(add_state.slice)] + : AffineElement{ 0, 0 }; + bool add_predicate = add_state.add ? msm[offset + point_idx].wnaf_skew : false; + auto p1 = accumulator; + accumulator = add_predicate ? accumulator + add_state.point : accumulator; p1_trace[trace_index] = p1; - p2_trace[trace_index] = p2; + p2_trace[trace_index] = add_state.point; p3_trace[trace_index] = accumulator; operation_trace[trace_index] = false; trace_index++; } - accumulator_trace[msm_row_index] = accumulator; - row.q_add = true; + row.q_add = false; row.q_double = false; - row.q_skew = false; - row.msm_round = static_cast(j); + row.q_skew = true; + row.msm_round = static_cast(digit_idx + 1); row.msm_size = static_cast(msm_size); - row.msm_count = static_cast(idx); + row.msm_count = static_cast(offset); row.pc = pc; - msm_row_index++; - } - // doubling - if (j < num_rounds - 1) { - auto& row = msm_state[msm_row_index]; - row.msm_transition = false; - row.msm_round = static_cast(j + 1); - row.msm_size = static_cast(msm_size); - row.msm_count = static_cast(0); - row.q_add = false; - row.q_double = true; - row.q_skew = false; - for (size_t m = 0; m < 4; ++m) { - - auto& add_state = row.add_state[m]; - add_state.add = false; - add_state.slice = 0; - add_state.point = { 0, 0 }; - add_state.collision_inverse = 0; - - p1_trace[trace_index] = accumulator; - p2_trace[trace_index] = accumulator; - accumulator = accumulator.dbl(); - p3_trace[trace_index] = accumulator; - operation_trace[trace_index] = true; - trace_index++; - } accumulator_trace[msm_row_index] = accumulator; msm_row_index++; - } else { - for (size_t k = 0; k < rows_per_round; ++k) { - auto& row = msm_state[msm_row_index]; - - const size_t points_per_row = (k + 1) * ADDITIONS_PER_ROW > msm_size - ? msm_size % ADDITIONS_PER_ROW - : ADDITIONS_PER_ROW; - const size_t idx = k * ADDITIONS_PER_ROW; - row.msm_transition = false; - - Element acc_expected = accumulator; - - for (size_t m = 0; m < 4; ++m) { - auto& add_state = row.add_state[m]; - add_state.add = points_per_row > m; - add_state.slice = add_state.add ? msm[idx + m].wnaf_skew ? 7 : 0 : 0; - - add_state.point = - add_state.add ? msm[idx + m].precomputed_table[static_cast(add_state.slice)] - : AffineElement{ 0, 0 }; - bool add_predicate = add_state.add ? msm[idx + m].wnaf_skew : false; - auto p1 = accumulator; - accumulator = add_predicate ? accumulator + add_state.point : accumulator; - p1_trace[trace_index] = p1; - p2_trace[trace_index] = add_state.point; - p3_trace[trace_index] = accumulator; - operation_trace[trace_index] = false; - trace_index++; - } - row.q_add = false; - row.q_double = false; - row.q_skew = true; - row.msm_round = static_cast(j + 1); - row.msm_size = static_cast(msm_size); - row.msm_count = static_cast(idx); - row.pc = pc; - accumulator_trace[msm_row_index] = accumulator; - msm_row_index++; - } } } } - }); + } // Normalize the points in the point trace - run_loop_in_parallel(point_trace.size(), [&](size_t start, size_t end) { - Element::batch_normalize(&point_trace[start], end - start); + run_loop_in_parallel(points_to_normalize.size(), [&](size_t start, size_t end) { + Element::batch_normalize(&points_to_normalize[start], end - start); }); // inverse_trace is used to compute the value of the `collision_inverse` column in the ECCVM. std::vector inverse_trace(num_point_adds_and_doubles); run_loop_in_parallel(num_point_adds_and_doubles, [&](size_t start, size_t end) { - for (size_t i = start; i < end; ++i) { - if (operation_trace[i]) { - inverse_trace[i] = (p1_trace[i].y + p1_trace[i].y); + for (size_t operation_idx = start; operation_idx < end; ++operation_idx) { + if (operation_trace[operation_idx]) { + inverse_trace[operation_idx] = (p1_trace[operation_idx].y + p1_trace[operation_idx].y); } else { - inverse_trace[i] = (p2_trace[i].x - p1_trace[i].x); + inverse_trace[operation_idx] = (p2_trace[operation_idx].x - p1_trace[operation_idx].x); } } FF::batch_invert(&inverse_trace[start], end - start); @@ -369,30 +371,70 @@ class ECCVMMSMMBuilder { // complete the computation of the ECCVM execution trace, by adding the affine intermediate point data // i.e. row.accumulator_x, row.accumulator_y, row.add_state[0...3].collision_inverse, // row.add_state[0...3].lambda - run_loop_in_parallel(msms.size(), [&](size_t start, size_t end) { - for (size_t i = start; i < end; i++) { - const auto& msm = msms[i]; - size_t trace_index = ((msm_row_indices[i] - 1) * ADDITIONS_PER_ROW); - size_t msm_row_index = msm_row_indices[i]; - // 1st MSM row will have accumulator equal to the previous MSM output - // (or point at infinity for 1st MSM) - size_t accumulator_index = msm_row_indices[i] - 1; - const size_t msm_size = msm.size(); - const size_t rows_per_round = - (msm_size / ADDITIONS_PER_ROW) + (msm_size % ADDITIONS_PER_ROW != 0 ? 1 : 0); + for (size_t msm_idx = 0; msm_idx < msms.size(); msm_idx++) { + const auto& msm = msms[msm_idx]; + size_t trace_index = ((msm_row_counts[msm_idx] - 1) * ADDITIONS_PER_ROW); + size_t msm_row_index = msm_row_counts[msm_idx]; + // 1st MSM row will have accumulator equal to the previous MSM output + // (or point at infinity for 1st MSM) + size_t accumulator_index = msm_row_counts[msm_idx] - 1; + const size_t msm_size = msm.size(); + const size_t num_rows_per_digit = + (msm_size / ADDITIONS_PER_ROW) + ((msm_size % ADDITIONS_PER_ROW != 0) ? 1 : 0); + + for (size_t digit_idx = 0; digit_idx < NUM_WNAF_DIGITS_PER_SCALAR; ++digit_idx) { + for (size_t row_idx = 0; row_idx < num_rows_per_digit; ++row_idx) { + auto& row = msm_rows[msm_row_index]; + const Element& normalized_accumulator = accumulator_trace[accumulator_index]; + const FF& acc_x = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.x; + const FF& acc_y = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.y; + row.accumulator_x = acc_x; + row.accumulator_y = acc_y; + + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + auto& add_state = row.add_state[point_idx]; + bool add_predicate = (point_idx == 0 ? (digit_idx != 0 || row_idx != 0) : add_state.add); + + const auto& inverse = inverse_trace[trace_index]; + const auto& p1 = p1_trace[trace_index]; + const auto& p2 = p2_trace[trace_index]; + add_state.collision_inverse = add_predicate ? inverse : 0; + add_state.lambda = add_predicate ? (p2.y - p1.y) * inverse : 0; + trace_index++; + } + accumulator_index++; + msm_row_index++; + } - for (size_t j = 0; j < num_rounds; ++j) { - for (size_t k = 0; k < rows_per_round; ++k) { - auto& row = msm_state[msm_row_index]; + if (digit_idx < NUM_WNAF_DIGITS_PER_SCALAR - 1) { + MSMRow& row = msm_rows[msm_row_index]; + const Element& normalized_accumulator = accumulator_trace[accumulator_index]; + const FF& acc_x = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.x; + const FF& acc_y = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.y; + row.accumulator_x = acc_x; + row.accumulator_y = acc_y; + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + auto& add_state = row.add_state[point_idx]; + add_state.collision_inverse = 0; + const FF& dx = p1_trace[trace_index].x; + const FF& inverse = inverse_trace[trace_index]; + add_state.lambda = ((dx + dx + dx) * dx) * inverse; + trace_index++; + } + accumulator_index++; + msm_row_index++; + } else { + for (size_t row_idx = 0; row_idx < num_rows_per_digit; ++row_idx) { + MSMRow& row = msm_rows[msm_row_index]; const Element& normalized_accumulator = accumulator_trace[accumulator_index]; + const size_t offset = row_idx * ADDITIONS_PER_ROW; const FF& acc_x = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.x; const FF& acc_y = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.y; row.accumulator_x = acc_x; row.accumulator_y = acc_y; - - for (size_t m = 0; m < ADDITIONS_PER_ROW; ++m) { - auto& add_state = row.add_state[m]; - bool add_predicate = (m == 0 ? (j != 0 || k != 0) : add_state.add); + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + auto& add_state = row.add_state[point_idx]; + bool add_predicate = add_state.add ? msm[offset + point_idx].wnaf_skew : false; const auto& inverse = inverse_trace[trace_index]; const auto& p1 = p1_trace[trace_index]; @@ -404,64 +446,16 @@ class ECCVMMSMMBuilder { accumulator_index++; msm_row_index++; } - - if (j < num_rounds - 1) { - MSMState& row = msm_state[msm_row_index]; - const Element& normalized_accumulator = accumulator_trace[accumulator_index]; - const FF& acc_x = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.x; - const FF& acc_y = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.y; - row.accumulator_x = acc_x; - row.accumulator_y = acc_y; - - for (size_t m = 0; m < 4; ++m) { - auto& add_state = row.add_state[m]; - add_state.collision_inverse = 0; - const FF& dx = p1_trace[trace_index].x; - const FF& inverse = inverse_trace[trace_index]; - add_state.lambda = ((dx + dx + dx) * dx) * inverse; - trace_index++; - } - accumulator_index++; - msm_row_index++; - } else { - for (size_t k = 0; k < rows_per_round; ++k) { - MSMState& row = msm_state[msm_row_index]; - const Element& normalized_accumulator = accumulator_trace[accumulator_index]; - - const size_t idx = k * ADDITIONS_PER_ROW; - - const FF& acc_x = - normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.x; - const FF& acc_y = - normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.y; - row.accumulator_x = acc_x; - row.accumulator_y = acc_y; - - for (size_t m = 0; m < ADDITIONS_PER_ROW; ++m) { - auto& add_state = row.add_state[m]; - bool add_predicate = add_state.add ? msm[idx + m].wnaf_skew : false; - - const auto& inverse = inverse_trace[trace_index]; - const auto& p1 = p1_trace[trace_index]; - const auto& p2 = p2_trace[trace_index]; - add_state.collision_inverse = add_predicate ? inverse : 0; - add_state.lambda = add_predicate ? (p2.y - p1.y) * inverse : 0; - trace_index++; - } - accumulator_index++; - msm_row_index++; - } - } } } - }); + } // populate the final row in the MSM execution trace. // we always require 1 extra row at the end of the trace, because the accumulator x/y coordinates for row `i` // are present at row `i+1` Element final_accumulator(accumulator_trace.back()); - MSMState& final_row = msm_state.back(); - final_row.pc = static_cast(pc_indices.back()); + MSMRow& final_row = msm_rows.back(); + final_row.pc = static_cast(pc_values.back()); final_row.msm_transition = true; final_row.accumulator_x = final_accumulator.is_point_at_infinity() ? 0 : final_accumulator.x; final_row.accumulator_y = final_accumulator.is_point_at_infinity() ? 0 : final_accumulator.y; @@ -470,12 +464,12 @@ class ECCVMMSMMBuilder { final_row.q_add = false; final_row.q_double = false; final_row.q_skew = false; - final_row.add_state = { typename MSMState::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, - typename MSMState::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, - typename MSMState::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, - typename MSMState::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 } }; + final_row.add_state = { typename MSMRow::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, + typename MSMRow::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, + typename MSMRow::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, + typename MSMRow::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 } }; - return msm_state; + return { msm_rows, point_table_read_counts }; } }; } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/eccvm/precomputed_tables_builder.hpp b/barretenberg/cpp/src/barretenberg/eccvm/precomputed_tables_builder.hpp index ed77be8f6a6f..c98e1d56b8b0 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/precomputed_tables_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/precomputed_tables_builder.hpp @@ -4,18 +4,18 @@ namespace bb { -class ECCVMPrecomputedTablesBuilder { +class ECCVMPointTablePrecomputationBuilder { public: using CycleGroup = bb::g1; using FF = grumpkin::fr; using Element = typename CycleGroup::element; using AffineElement = typename CycleGroup::affine_element; - static constexpr size_t NUM_WNAF_SLICES = bb::eccvm::NUM_WNAF_SLICES; - static constexpr size_t WNAF_SLICES_PER_ROW = bb::eccvm::WNAF_SLICES_PER_ROW; - static constexpr size_t WNAF_SLICE_BITS = bb::eccvm::WNAF_SLICE_BITS; + static constexpr size_t NUM_WNAF_DIGITS_PER_SCALAR = bb::eccvm::NUM_WNAF_DIGITS_PER_SCALAR; + static constexpr size_t WNAF_DIGITS_PER_ROW = bb::eccvm::WNAF_DIGITS_PER_ROW; + static constexpr size_t NUM_WNAF_DIGIT_BITS = bb::eccvm::NUM_WNAF_DIGIT_BITS; - struct PrecomputeState { + struct PointTablePrecoputationRow { int s1 = 0; int s2 = 0; int s3 = 0; @@ -33,31 +33,31 @@ class ECCVMPrecomputedTablesBuilder { AffineElement precompute_double{ 0, 0 }; }; - static std::vector compute_precompute_state( + static std::vector compute_rows( const std::vector>& ecc_muls) { - static constexpr size_t num_rows_per_scalar = NUM_WNAF_SLICES / WNAF_SLICES_PER_ROW; + static constexpr size_t num_rows_per_scalar = NUM_WNAF_DIGITS_PER_SCALAR / WNAF_DIGITS_PER_ROW; const size_t num_precompute_rows = num_rows_per_scalar * ecc_muls.size() + 1; - std::vector precompute_state(num_precompute_rows); + std::vector precompute_state(num_precompute_rows); // start with empty row (shiftable polynomials must have 0 as first coefficient) - precompute_state[0] = PrecomputeState{}; + precompute_state[0] = PointTablePrecoputationRow{}; // current impl doesn't work if not 4 - static_assert(WNAF_SLICES_PER_ROW == 4); + static_assert(WNAF_DIGITS_PER_ROW == 4); run_loop_in_parallel(ecc_muls.size(), [&](size_t start, size_t end) { for (size_t j = start; j < end; j++) { const auto& entry = ecc_muls[j]; - const auto& slices = entry.wnaf_slices; + const auto& slices = entry.wnaf_digits; uint256_t scalar_sum = 0; for (size_t i = 0; i < num_rows_per_scalar; ++i) { - PrecomputeState row; - const int slice0 = slices[i * WNAF_SLICES_PER_ROW]; - const int slice1 = slices[i * WNAF_SLICES_PER_ROW + 1]; - const int slice2 = slices[i * WNAF_SLICES_PER_ROW + 2]; - const int slice3 = slices[i * WNAF_SLICES_PER_ROW + 3]; + PointTablePrecoputationRow row; + const int slice0 = slices[i * WNAF_DIGITS_PER_ROW]; + const int slice1 = slices[i * WNAF_DIGITS_PER_ROW + 1]; + const int slice2 = slices[i * WNAF_DIGITS_PER_ROW + 2]; + const int slice3 = slices[i * WNAF_DIGITS_PER_ROW + 3]; const int slice0base2 = (slice0 + 15) / 2; const int slice1base2 = (slice1 + 15) / 2; @@ -85,7 +85,7 @@ class ECCVMPrecomputedTablesBuilder { bool chunk_negative = row_chunk < 0; - scalar_sum = scalar_sum << (WNAF_SLICE_BITS * WNAF_SLICES_PER_ROW); + scalar_sum = scalar_sum << (NUM_WNAF_DIGIT_BITS * WNAF_DIGITS_PER_ROW); if (chunk_negative) { scalar_sum -= static_cast(-row_chunk); } else { diff --git a/barretenberg/cpp/src/barretenberg/eccvm/transcript_builder.hpp b/barretenberg/cpp/src/barretenberg/eccvm/transcript_builder.hpp index 106d83b5d4b6..b3d93d3d1f82 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/transcript_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/transcript_builder.hpp @@ -11,7 +11,7 @@ class ECCVMTranscriptBuilder { using Element = typename CycleGroup::element; using AffineElement = typename CycleGroup::affine_element; - struct TranscriptState { + struct TranscriptRow { bool accumulator_empty = false; bool q_add = false; bool q_mul = false; @@ -57,12 +57,12 @@ class ECCVMTranscriptBuilder { return res; } }; - static std::vector compute_transcript_state( - const std::vector>& vm_operations, const uint32_t total_number_of_muls) + static std::vector compute_rows(const std::vector>& vm_operations, + const uint32_t total_number_of_muls) { const size_t num_transcript_entries = vm_operations.size() + 2; - std::vector transcript_state(num_transcript_entries); + std::vector transcript_state(num_transcript_entries); std::vector inverse_trace(num_transcript_entries - 2); VMState state{ .pc = total_number_of_muls, @@ -73,9 +73,9 @@ class ECCVMTranscriptBuilder { }; VMState updated_state; // add an empty row. 1st row all zeroes because of our shiftable polynomials - transcript_state[0] = (TranscriptState{}); + transcript_state[0] = (TranscriptRow{}); for (size_t i = 0; i < vm_operations.size(); ++i) { - TranscriptState& row = transcript_state[i + 1]; + TranscriptRow& row = transcript_state[i + 1]; const bb::eccvm::VMOperation& entry = vm_operations[i]; const bool is_mul = entry.mul; @@ -180,7 +180,7 @@ class ECCVMTranscriptBuilder { for (size_t i = 0; i < inverse_trace.size(); ++i) { transcript_state[i + 1].collision_check = inverse_trace[i]; } - TranscriptState& final_row = transcript_state.back(); + TranscriptRow& final_row = transcript_state.back(); final_row.pc = updated_state.pc; final_row.accumulator_x = (updated_state.accumulator.is_point_at_infinity()) ? 0 : updated_state.accumulator.x; final_row.accumulator_y = (updated_state.accumulator.is_point_at_infinity()) ? 0 : updated_state.accumulator.y; diff --git a/barretenberg/cpp/src/barretenberg/execution_trace/execution_trace.cpp b/barretenberg/cpp/src/barretenberg/execution_trace/execution_trace.cpp index d582089921ec..286728ca1468 100644 --- a/barretenberg/cpp/src/barretenberg/execution_trace/execution_trace.cpp +++ b/barretenberg/cpp/src/barretenberg/execution_trace/execution_trace.cpp @@ -118,7 +118,7 @@ typename ExecutionTrace_::TraceData ExecutionTrace_::construct_t // If the trace is structured, we populate the data from the next block at a fixed block size offset if (is_structured) { - offset += builder.FIXED_BLOCK_SIZE; + offset += block.get_fixed_size(); } else { // otherwise, the next block starts immediately following the previous one offset += block_size; } diff --git a/barretenberg/cpp/src/barretenberg/goblin/goblin.hpp b/barretenberg/cpp/src/barretenberg/goblin/goblin.hpp index f8b6adc61dd3..3823a5100598 100644 --- a/barretenberg/cpp/src/barretenberg/goblin/goblin.hpp +++ b/barretenberg/cpp/src/barretenberg/goblin/goblin.hpp @@ -52,21 +52,24 @@ class Goblin { HonkProof eccvm_proof; HonkProof translator_proof; TranslationEvaluations translation_evaluations; - std::vector to_buffer() + + size_t size() const { - // ACIRHACK: so much copying and duplication added here and elsewhere - std::vector translation_evaluations_buf; // = translation_evaluations.to_buffer(); - size_t proof_size = - merge_proof.size() + eccvm_proof.size() + translator_proof.size() + translation_evaluations_buf.size(); + return merge_proof.size() + eccvm_proof.size() + translator_proof.size() + TranslationEvaluations::size(); + }; - std::vector result(proof_size); + std::vector to_buffer() const + { + // ACIRHACK: so much copying and duplication added here and elsewhere + std::vector result; + result.reserve(size()); const auto insert = [&result](const std::vector& buf) { result.insert(result.end(), buf.begin(), buf.end()); }; insert(merge_proof); insert(eccvm_proof); insert(translator_proof); - insert(translation_evaluations_buf); + insert(translation_evaluations.to_buffer()); return result; } }; diff --git a/barretenberg/cpp/src/barretenberg/goblin/goblin_recursion.test.cpp b/barretenberg/cpp/src/barretenberg/goblin/goblin_recursion.test.cpp index 43280a800edd..814dfecd9c3d 100644 --- a/barretenberg/cpp/src/barretenberg/goblin/goblin_recursion.test.cpp +++ b/barretenberg/cpp/src/barretenberg/goblin/goblin_recursion.test.cpp @@ -33,12 +33,10 @@ class GoblinRecursionTests : public ::testing::Test { }; /** - * @brief A full Goblin test that mimicks the basic aztec client architecture - * @details + * @brief Test illustrating a Goblin-based IVC scheme + * @details Goblin is usd to accumulate recursive verifications of the GoblinUltraHonk proving system. */ -// TODO fix with https://github.com/AztecProtocol/barretenberg/issues/930 -// intermittent failures, presumably due to uninitialized memory -TEST_F(GoblinRecursionTests, DISABLED_Vanilla) +TEST_F(GoblinRecursionTests, Vanilla) { Goblin goblin; diff --git a/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp b/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp index e6457060f6f2..46586ff58da8 100644 --- a/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp +++ b/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp @@ -17,6 +17,7 @@ #include "barretenberg/stdlib_circuit_builders/mock_circuits.hpp" namespace bb { + class GoblinMockCircuits { public: using Curve = curve::BN254; @@ -120,8 +121,8 @@ class GoblinMockCircuits { { // Add some arbitrary ecc op gates for (size_t i = 0; i < 3; ++i) { - auto point = Point::random_element(); - auto scalar = FF::random_element(); + auto point = Point::random_element(&engine); + auto scalar = FF::random_element(&engine); builder.queue_ecc_add_accum(point); builder.queue_ecc_mul_accum(point, scalar); } diff --git a/barretenberg/cpp/src/barretenberg/goblin/translation_evaluations.hpp b/barretenberg/cpp/src/barretenberg/goblin/translation_evaluations.hpp index 6dcee0e24fd3..5495504296db 100644 --- a/barretenberg/cpp/src/barretenberg/goblin/translation_evaluations.hpp +++ b/barretenberg/cpp/src/barretenberg/goblin/translation_evaluations.hpp @@ -1,14 +1,18 @@ #pragma once #include "barretenberg/ecc/curves/bn254/fq.hpp" +#include "barretenberg/ecc/fields/field_conversion.hpp" namespace bb { struct TranslationEvaluations { fq op, Px, Py, z1, z2; - std::vector to_buffer() + static constexpr uint32_t NUM_EVALUATIONS = 5; + static size_t size() { return field_conversion::calc_num_bn254_frs() * NUM_EVALUATIONS; } + std::vector to_buffer() const { - std::vector result(5 * sizeof(fq)); + std::vector result; + result.reserve(size()); const auto insert = [&result](const fq& elt) { - std::vector buf = elt.to_buffer(); + std::vector buf = field_conversion::convert_to_bn254_frs(elt); result.insert(result.end(), buf.begin(), buf.end()); }; insert(op); diff --git a/barretenberg/cpp/src/barretenberg/numeric/random/engine.cpp b/barretenberg/cpp/src/barretenberg/numeric/random/engine.cpp index e8b5c4a940a8..0c3604cb92a6 100644 --- a/barretenberg/cpp/src/barretenberg/numeric/random/engine.cpp +++ b/barretenberg/cpp/src/barretenberg/numeric/random/engine.cpp @@ -79,7 +79,7 @@ class DebugEngine : public RNG { : engine(std::mt19937_64(12345)) {} - DebugEngine(std::seed_seq& seed) + DebugEngine(std::uint_fast64_t seed) : engine(std::mt19937_64(seed)) {} @@ -116,12 +116,12 @@ class DebugEngine : public RNG { /** * Used by tests to ensure consistent behavior. */ -RNG& get_debug_randomness(bool reset) +RNG& get_debug_randomness(bool reset, std::uint_fast64_t seed) { // static std::seed_seq seed({ 1, 2, 3, 4, 5 }); - static DebugEngine debug_engine; + static DebugEngine debug_engine = DebugEngine(); if (reset) { - debug_engine = DebugEngine(); + debug_engine = DebugEngine(seed); } return debug_engine; } diff --git a/barretenberg/cpp/src/barretenberg/numeric/random/engine.hpp b/barretenberg/cpp/src/barretenberg/numeric/random/engine.hpp index aad7932bbbfb..0e54341ea91a 100644 --- a/barretenberg/cpp/src/barretenberg/numeric/random/engine.hpp +++ b/barretenberg/cpp/src/barretenberg/numeric/random/engine.hpp @@ -4,6 +4,7 @@ #include "../uintx/uintx.hpp" #include "unistd.h" #include +#include namespace bb::numeric { @@ -45,7 +46,7 @@ class RNG { } }; -RNG& get_debug_randomness(bool reset = false); +RNG& get_debug_randomness(bool reset = false, std::uint_fast64_t seed = 12345); RNG& get_randomness(); } // namespace bb::numeric diff --git a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/arithmetization/arithmetization.hpp b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/arithmetization/arithmetization.hpp index c864af55e7c9..f71b77021eed 100644 --- a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/arithmetization/arithmetization.hpp +++ b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/arithmetization/arithmetization.hpp @@ -51,6 +51,8 @@ template class ExecutionTr bool has_ram_rom = false; // does the block contain RAM/ROM gates bool is_pub_inputs = false; // is this the public inputs block + uint32_t fixed_size; // Fixed size for use in structured trace + bool operator==(const ExecutionTraceBlock& other) const = default; size_t size() const { return std::get<0>(this->wires).size(); } @@ -64,6 +66,9 @@ template class ExecutionTr p.reserve(size_hint); } } + + uint32_t get_fixed_size() const { return fixed_size; } + void set_fixed_size(uint32_t size_in) { fixed_size = size_in; } }; // These are not magic numbers and they should not be written with global constants. These parameters are not @@ -119,7 +124,6 @@ template class UltraArith { public: static constexpr size_t NUM_WIRES = 4; static constexpr size_t NUM_SELECTORS = 11; - static constexpr size_t FIXED_BLOCK_SIZE = 1 << 10; // Size of each block in a structured trace (arbitrary for now) using FF = FF_; class UltraTraceBlock : public ExecutionTraceBlock { @@ -158,10 +162,24 @@ template class UltraArith { UltraTraceBlock aux; UltraTraceBlock lookup; + static constexpr uint32_t FIXED_BLOCK_SIZE = 1 << 10; // (Arbitrary for now) + std::array fixed_block_sizes{ + 1 << 3, // pub_inputs; + FIXED_BLOCK_SIZE, // arithmetic; + FIXED_BLOCK_SIZE, // delta_range; + FIXED_BLOCK_SIZE, // elliptic; + FIXED_BLOCK_SIZE, // aux; + FIXED_BLOCK_SIZE // lookup; + }; + TraceBlocks() { aux.has_ram_rom = true; pub_inputs.is_pub_inputs = true; + // Set fixed block sizes for use in structured trace + for (auto [block, size] : zip_view(this->get(), fixed_block_sizes)) { + block.set_fixed_size(size); + } } auto get() { return RefArray{ pub_inputs, arithmetic, delta_range, elliptic, aux, lookup }; } @@ -178,6 +196,31 @@ template class UltraArith { info(""); } + size_t get_total_structured_size() + { + size_t total_size = 0; + for (auto block : this->get()) { + total_size += block.get_fixed_size(); + } + return total_size; + } + + /** + * @brief Check that the number of rows populated in each block does not exceed the specified fixed size + * @note This check is only applicable when utilizing a structured trace + * + */ + void check_within_fixed_sizes() + { + for (auto block : this->get()) { + if (block.size() > block.get_fixed_size()) { + info("WARNING: Num gates in circuit block exceeds the specified fixed size - execution trace will " + "not be constructed correctly!"); + ASSERT(false); + } + } + } + bool operator==(const TraceBlocks& other) const = default; }; @@ -197,7 +240,6 @@ template class UltraHonkArith { public: static constexpr size_t NUM_WIRES = 4; static constexpr size_t NUM_SELECTORS = 14; - static constexpr size_t FIXED_BLOCK_SIZE = 1 << 10; // Size of each block in a structured trace (arbitrary for now) using FF = FF_; @@ -270,10 +312,32 @@ template class UltraHonkArith { UltraHonkTraceBlock poseidon_external; UltraHonkTraceBlock poseidon_internal; + // This is a set of fixed block sizes that accomodates the circuits currently processed in the ClientIvc bench. + // Note 1: The individual block sizes do NOT need to be powers of 2, this is just for conciseness. + // Note 2: Current sizes result in a full trace size of 2^18. It's not possible to define a fixed structure + // that accomdates both the kernel and the function circuit while remaining under 2^17. This is because the + // circuits differ in structure but are also both designed to be "full" within the 2^17 size. + std::array fixed_block_sizes{ + 1 << 10, // ecc_op; + 1 << 7, // pub_inputs; + 1 << 16, // arithmetic; + 1 << 15, // delta_range; + 1 << 14, // elliptic; + 1 << 16, // aux; + 1 << 15, // lookup; + 1 << 7, // busread; + 1 << 11, // poseidon_external; + 1 << 14 // poseidon_internal; + }; + TraceBlocks() { aux.has_ram_rom = true; pub_inputs.is_pub_inputs = true; + // Set fixed block sizes for use in structured trace + for (auto [block, size] : zip_view(this->get(), fixed_block_sizes)) { + block.set_fixed_size(size); + } } auto get() @@ -284,20 +348,40 @@ template class UltraHonkArith { void summarize() const { - info("Gate blocks summary:"); - info("goblin ecc op:\t", ecc_op.size()); - info("pub inputs:\t", pub_inputs.size()); - info("arithmetic:\t", arithmetic.size()); - info("delta range:\t", delta_range.size()); - info("elliptic:\t", elliptic.size()); - info("auxiliary:\t", aux.size()); - info("lookups:\t", lookup.size()); - info("busread:\t", busread.size()); - info("poseidon ext:\t", poseidon_external.size()); - info("poseidon int:\t", poseidon_internal.size()); + info("Gate blocks summary: (actual gates / fixed capacity)"); + info("goblin ecc op:\t", ecc_op.size(), "/", ecc_op.get_fixed_size()); + info("pub inputs:\t", pub_inputs.size(), "/", pub_inputs.get_fixed_size()); + info("arithmetic:\t", arithmetic.size(), "/", arithmetic.get_fixed_size()); + info("delta range:\t", delta_range.size(), "/", delta_range.get_fixed_size()); + info("elliptic:\t", elliptic.size(), "/", elliptic.get_fixed_size()); + info("auxiliary:\t", aux.size(), "/", aux.get_fixed_size()); + info("lookups:\t", lookup.size(), "/", lookup.get_fixed_size()); + info("busread:\t", busread.size(), "/", busread.get_fixed_size()); + info("poseidon ext:\t", poseidon_external.size(), "/", poseidon_external.get_fixed_size()); + info("poseidon int:\t", poseidon_internal.size(), "/", poseidon_internal.get_fixed_size()); info(""); } + size_t get_total_structured_size() + { + size_t total_size = 0; + for (auto block : this->get()) { + total_size += block.get_fixed_size(); + } + return total_size; + } + + void check_within_fixed_sizes() + { + for (auto block : this->get()) { + if (block.size() > block.get_fixed_size()) { + info("WARNING: Num gates in circuit block exceeds the specified fixed size - execution trace will " + "not be constructed correctly!"); + ASSERT(false); + } + } + } + bool operator==(const TraceBlocks& other) const = default; }; diff --git a/barretenberg/cpp/src/barretenberg/polynomials/univariate.hpp b/barretenberg/cpp/src/barretenberg/polynomials/univariate.hpp index 6471ba85b56e..e4821a242954 100644 --- a/barretenberg/cpp/src/barretenberg/polynomials/univariate.hpp +++ b/barretenberg/cpp/src/barretenberg/polynomials/univariate.hpp @@ -86,6 +86,20 @@ template to_buffer() const { return ::to_buffer(evaluations); } diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp index c03af2e5333e..6629142a5188 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp @@ -323,8 +323,23 @@ template class ProtoGalaxyProver_ { const FF& scaling_factor) { using Relation = std::tuple_element_t; - Relation::accumulate( - std::get(univariate_accumulators), extended_univariates, relation_parameters, scaling_factor); + + // Check if the relation is skippable to speed up accumulation + if constexpr (!isSkippable) { + // If not, accumulate normally + Relation::accumulate(std::get(univariate_accumulators), + extended_univariates, + relation_parameters, + scaling_factor); + } else { + // If so, only compute the contribution if the relation is active + if (!Relation::skip(extended_univariates)) { + Relation::accumulate(std::get(univariate_accumulators), + extended_univariates, + relation_parameters, + scaling_factor); + } + } // Repeat for the next relation. if constexpr (relation_idx + 1 < Flavor::NUM_RELATIONS) { @@ -349,9 +364,23 @@ template class ProtoGalaxyProver_ { const FF& scaling_factor) { using Relation = std::tuple_element_t; - Relation::accumulate( - std::get(univariate_accumulators), extended_univariates, relation_parameters, scaling_factor); - + // WORKTODO: disable skipping for the combiner for now.. + // Check if the relation is skippable to speed up accumulation + if constexpr (!isSkippable) { + // If not, accumulate normally + Relation::accumulate(std::get(univariate_accumulators), + extended_univariates, + relation_parameters, + scaling_factor); + } else { + // If so, only compute the contribution if the relation is active + if (!Relation::skip(extended_univariates)) { + Relation::accumulate(std::get(univariate_accumulators), + extended_univariates, + relation_parameters, + scaling_factor); + } + } // Repeat for the next relation. if constexpr (relation_idx + 1 < Flavor::NUM_RELATIONS) { accumulate_relation_univariates< diff --git a/barretenberg/cpp/src/barretenberg/relations/auxiliary_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/auxiliary_relation.hpp index ea8e4b40e73b..5bb956d8ac48 100644 --- a/barretenberg/cpp/src/barretenberg/relations/auxiliary_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/auxiliary_relation.hpp @@ -53,10 +53,7 @@ template class AuxiliaryRelationImpl { * @brief Returns true if the contribution from all subrelations for the provided inputs is identically zero * */ - template inline static bool skip(const AllEntities& in) - { - return (in.q_aux.value_at(0).is_zero() && in.q_aux.value_at(1).is_zero()); - } + template inline static bool skip(const AllEntities& in) { return in.q_aux.is_zero(); } /** * @brief Expression for the generalized permutation sort gate. @@ -98,7 +95,7 @@ template class AuxiliaryRelationImpl { const Parameters& params, const FF& scaling_factor) { - + BB_OP_COUNT_TIME_NAME("Auxiliary::accumulate"); // All subrelations have the same length so we use the same length view for all calculations using Accumulator = typename std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; diff --git a/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp index 7a68d156b38d..3c897ce39090 100644 --- a/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp @@ -63,6 +63,12 @@ template class DatabusLookupRelationImpl { true, false, true, false }; + template inline static bool skip([[maybe_unused]] const AllEntities& in) + { + // Ensure the input does not contain a read gate or data that is being read + return in.q_busread.is_zero() && in.calldata_read_counts.is_zero() && in.return_data_read_counts.is_zero(); + } + // Interface for easy access of databus components by column (bus_idx) template struct BusData; @@ -231,6 +237,7 @@ template class DatabusLookupRelationImpl { const Parameters& params, const FF& scaling_factor) { + BB_OP_COUNT_TIME_NAME("DatabusRead::accumulate"); using Accumulator = typename std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; diff --git a/barretenberg/cpp/src/barretenberg/relations/delta_range_constraint_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/delta_range_constraint_relation.hpp index 25429fbc0028..b2bef8ea7910 100644 --- a/barretenberg/cpp/src/barretenberg/relations/delta_range_constraint_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/delta_range_constraint_relation.hpp @@ -20,7 +20,7 @@ template class DeltaRangeConstraintRelationImpl { */ template inline static bool skip(const AllEntities& in) { - return (in.q_delta_range.value_at(0).is_zero() && in.q_delta_range.value_at(1).is_zero()); + return in.q_delta_range.is_zero(); } /** @@ -44,6 +44,7 @@ template class DeltaRangeConstraintRelationImpl { const Parameters&, const FF& scaling_factor) { + BB_OP_COUNT_TIME_NAME("DeltaRange::accumulate"); using Accumulator = std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; auto w_1 = View(in.w_l); diff --git a/barretenberg/cpp/src/barretenberg/relations/ecc_op_queue_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/ecc_op_queue_relation.hpp index faf2f0da162d..29d23be4e651 100644 --- a/barretenberg/cpp/src/barretenberg/relations/ecc_op_queue_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/ecc_op_queue_relation.hpp @@ -18,6 +18,13 @@ template class EccOpQueueRelationImpl { 3 // op-queue-wire vanishes sub-relation 4 }; + template inline static bool skip([[maybe_unused]] const AllEntities& in) + { + // The prover can skip execution of this relation altogether since an honest input will lead to a zero + // contribution at every row, even when the selector lagrange_ecc_op is on + return true; + } + /** * @brief Expression for the generalized permutation sort gate. * @details The relation is defined as C(in(X)...) = @@ -43,6 +50,7 @@ template class EccOpQueueRelationImpl { const Parameters&, const FF& scaling_factor) { + BB_OP_COUNT_TIME_NAME("EccOp::accumulate"); using Accumulator = std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; diff --git a/barretenberg/cpp/src/barretenberg/relations/elliptic_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/elliptic_relation.hpp index 2c0b2a850627..7fcd8df4b56f 100644 --- a/barretenberg/cpp/src/barretenberg/relations/elliptic_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/elliptic_relation.hpp @@ -18,10 +18,7 @@ template class EllipticRelationImpl { * @brief Returns true if the contribution from all subrelations for the provided inputs is identically zero * */ - template inline static bool skip(const AllEntities& in) - { - return (in.q_elliptic.value_at(0).is_zero() && in.q_elliptic.value_at(1).is_zero()); - } + template inline static bool skip(const AllEntities& in) { return in.q_elliptic.is_zero(); } // TODO(@zac-williamson #2609 find more generic way of doing this) static constexpr FF get_curve_b() @@ -51,6 +48,7 @@ template class EllipticRelationImpl { const Parameters&, const FF& scaling_factor) { + BB_OP_COUNT_TIME_NAME("Elliptic::accumulate"); // TODO(@zac - williamson #2608 when Pedersen refactor is completed, // replace old addition relations with these ones and // remove endomorphism coefficient in ecc add gate(not used)) diff --git a/barretenberg/cpp/src/barretenberg/relations/lookup_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/lookup_relation.hpp index 337469bd5c80..46b70df7cabf 100644 --- a/barretenberg/cpp/src/barretenberg/relations/lookup_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/lookup_relation.hpp @@ -184,7 +184,7 @@ template class LookupRelationImpl { const Parameters& params, const FF& scaling_factor) { - + BB_OP_COUNT_TIME_NAME("Lookup::accumulate"); { using Accumulator = std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; diff --git a/barretenberg/cpp/src/barretenberg/relations/permutation_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/permutation_relation.hpp index 8ff08be35d2c..b904fabeb956 100644 --- a/barretenberg/cpp/src/barretenberg/relations/permutation_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/permutation_relation.hpp @@ -25,8 +25,7 @@ template class UltraPermutationRelationImpl { { // If z_perm == z_perm_shift, this implies that none of the wire values for the present input are involved in // non-trivial copy constraints. - return (in.z_perm.value_at(0) == in.z_perm_shift.value_at(0) && - in.z_perm.value_at(1) == in.z_perm_shift.value_at(1)); + return (in.z_perm - in.z_perm_shift).is_zero(); } inline static auto& get_grand_product_polynomial(auto& in) { return in.z_perm; } @@ -96,6 +95,7 @@ template class UltraPermutationRelationImpl { const Parameters& params, const FF& scaling_factor) { + BB_OP_COUNT_TIME_NAME("Permutation::accumulate"); // Contribution (1) [&]() { using Accumulator = std::tuple_element_t<0, ContainerOverSubrelations>; diff --git a/barretenberg/cpp/src/barretenberg/relations/poseidon2_external_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/poseidon2_external_relation.hpp index 29d082b4a4e0..11de33a20796 100644 --- a/barretenberg/cpp/src/barretenberg/relations/poseidon2_external_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/poseidon2_external_relation.hpp @@ -19,7 +19,7 @@ template class Poseidon2ExternalRelationImpl { */ template inline static bool skip(const AllEntities& in) { - return (in.q_poseidon2_external.value_at(0).is_zero() && in.q_poseidon2_external.value_at(1).is_zero()); + return in.q_poseidon2_external.is_zero(); } /** @@ -52,6 +52,7 @@ template class Poseidon2ExternalRelationImpl { const Parameters&, const FF& scaling_factor) { + BB_OP_COUNT_TIME_NAME("PoseidonExt::accumulate"); using Accumulator = std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; auto w_l = View(in.w_l); diff --git a/barretenberg/cpp/src/barretenberg/relations/poseidon2_internal_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/poseidon2_internal_relation.hpp index e21999358682..0014db76971e 100644 --- a/barretenberg/cpp/src/barretenberg/relations/poseidon2_internal_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/poseidon2_internal_relation.hpp @@ -21,7 +21,7 @@ template class Poseidon2InternalRelationImpl { */ template inline static bool skip(const AllEntities& in) { - return (in.q_poseidon2_internal.value_at(0).is_zero() && in.q_poseidon2_internal.value_at(1).is_zero()); + return in.q_poseidon2_internal.is_zero(); } /** @@ -49,6 +49,7 @@ template class Poseidon2InternalRelationImpl { const Parameters&, const FF& scaling_factor) { + BB_OP_COUNT_TIME_NAME("PoseidonInt::accumulate"); using Accumulator = std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; auto w_l = View(in.w_l); diff --git a/barretenberg/cpp/src/barretenberg/relations/ultra_arithmetic_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/ultra_arithmetic_relation.hpp index 7a5a1e0d9178..d99b57fb7165 100644 --- a/barretenberg/cpp/src/barretenberg/relations/ultra_arithmetic_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/ultra_arithmetic_relation.hpp @@ -16,10 +16,7 @@ template class UltraArithmeticRelationImpl { * @brief Returns true if the contribution from all subrelations for the provided inputs is identically zero * */ - template inline static bool skip(const AllEntities& in) - { - return (in.q_arith.value_at(0).is_zero() && in.q_arith.value_at(1).is_zero()); - } + template inline static bool skip(const AllEntities& in) { return in.q_arith.is_zero(); } /** * @brief Expression for the Ultra Arithmetic gate. @@ -78,6 +75,7 @@ template class UltraArithmeticRelationImpl { const Parameters&, const FF& scaling_factor) { + BB_OP_COUNT_TIME_NAME("Arithmetic::accumulate"); { using Accumulator = std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; diff --git a/barretenberg/cpp/src/barretenberg/relations/utils.hpp b/barretenberg/cpp/src/barretenberg/relations/utils.hpp index 1a777af8fdcd..680f9190427b 100644 --- a/barretenberg/cpp/src/barretenberg/relations/utils.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/utils.hpp @@ -140,6 +140,34 @@ template class RelationUtils { } } + /** + * @brief Calculate the contribution of each relation to the expected value of the full Honk relation. + * + * @details For each relation, use the purported values (supplied by the prover) of the multivariates to + * calculate a contribution to the purported value of the full Honk relation. These are stored in `evaluations`. + * Adding these together, with appropriate scaling factors, produces the expected value of the full Honk + * relation. This value is checked against the final value of the target total sum (called sigma_0 in the + * thesis). + */ + template + // TODO(#224)(Cody): Input should be an array? + inline static void accumulate_relation_evaluations_without_skipping(PolynomialEvaluations evaluations, + RelationEvaluations& relation_evaluations, + const Parameters& relation_parameters, + const FF& partial_evaluation_result) + { + using Relation = std::tuple_element_t; + + Relation::accumulate( + std::get(relation_evaluations), evaluations, relation_parameters, partial_evaluation_result); + + // Repeat for the next relation. + if constexpr (relation_idx + 1 < NUM_RELATIONS) { + accumulate_relation_evaluations( + evaluations, relation_evaluations, relation_parameters, partial_evaluation_result); + } + } + /** * @brief Calculate the contribution of each relation to the expected value of the full Honk relation. * @@ -157,8 +185,23 @@ template class RelationUtils { const FF& partial_evaluation_result) { using Relation = std::tuple_element_t; - Relation::accumulate( - std::get(relation_evaluations), evaluations, relation_parameters, partial_evaluation_result); + + // Check if the relation is skippable to speed up accumulation + if constexpr (!isSkippable || !std::is_same_v) { + // If not, accumulate normally + Relation::accumulate(std::get(relation_evaluations), + evaluations, + relation_parameters, + partial_evaluation_result); + } else { + // If so, only compute the contribution if the relation is active + if (!Relation::skip(evaluations)) { + Relation::accumulate(std::get(relation_evaluations), + evaluations, + relation_parameters, + partial_evaluation_result); + } + } // Repeat for the next relation. if constexpr (relation_idx + 1 < NUM_RELATIONS) { diff --git a/barretenberg/cpp/src/barretenberg/stdlib/encryption/ecdsa/ecdsa_impl.hpp b/barretenberg/cpp/src/barretenberg/stdlib/encryption/ecdsa/ecdsa_impl.hpp index ae6731c06a7e..a40ba91f45da 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/encryption/ecdsa/ecdsa_impl.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/encryption/ecdsa/ecdsa_impl.hpp @@ -7,6 +7,10 @@ namespace bb::stdlib { +namespace { +auto& engine = numeric::get_debug_randomness(); +} + /** * @brief Verify ECDSA signature. Produces unsatisfiable constraints if signature fails * @@ -241,7 +245,7 @@ template void generate_ecdsa_verification_test_circuit(Builde crypto::ecdsa_key_pair account; for (size_t i = 0; i < num_iterations; i++) { // Generate unique signature for each iteration - account.private_key = curve::fr::random_element(); + account.private_key = curve::fr::random_element(&engine); account.public_key = curve::g1::one * account.private_key; crypto::ecdsa_signature signature = diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/goblin_ultra_circuit_builder.cpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/goblin_ultra_circuit_builder.cpp index 58542f3e151f..dd12ae511095 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/goblin_ultra_circuit_builder.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/goblin_ultra_circuit_builder.cpp @@ -92,7 +92,7 @@ template void GoblinUltraCircuitBuilder_::add_goblin_gates_to_ this->blocks.poseidon_internal, this->zero_idx, this->zero_idx, this->zero_idx, this->zero_idx); // add dummy mul accum op and an equality op - this->queue_ecc_mul_accum(bb::g1::affine_element::one() * FF::random_element(), FF::random_element()); + this->queue_ecc_mul_accum(bb::g1::affine_element::one(), 2); this->queue_ecc_eq(); } diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mock_circuits.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mock_circuits.hpp index 36937d8c3476..a7d72d7e53e4 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mock_circuits.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mock_circuits.hpp @@ -3,6 +3,9 @@ namespace bb { +namespace { +auto& engine = numeric::get_debug_randomness(); +} class MockCircuits { public: using Curve = curve::BN254; @@ -20,9 +23,9 @@ class MockCircuits { { // For good measure, include a gate with some public inputs for (size_t i = 0; i < num_gates; ++i) { - FF a = FF::random_element(); - FF b = FF::random_element(); - FF c = FF::random_element(); + FF a = FF::random_element(&engine); + FF b = FF::random_element(&engine); + FF c = FF::random_element(&engine); FF d = a + b + c; uint32_t a_idx = builder.add_public_variable(a); uint32_t b_idx = builder.add_variable(b); @@ -43,9 +46,9 @@ class MockCircuits { { // For good measure, include a gate with some public inputs for (size_t i = 0; i < num_gates; ++i) { - FF a = FF::random_element(); - FF b = FF::random_element(); - FF c = FF::random_element(); + FF a = FF::random_element(&engine); + FF b = FF::random_element(&engine); + FF c = FF::random_element(&engine); FF d = a + b + c; uint32_t a_idx = builder.add_variable(a); uint32_t b_idx = builder.add_variable(b); @@ -98,8 +101,8 @@ class MockCircuits { static void construct_goblin_ecc_op_circuit(GoblinUltraCircuitBuilder& builder) { // Add a mul accum op, an add accum op and an equality op - builder.queue_ecc_add_accum(Point::one() * FF::random_element()); - builder.queue_ecc_mul_accum(Point::one() * FF::random_element(), FF::random_element()); + builder.queue_ecc_add_accum(Point::one() * FF::random_element(&engine)); + builder.queue_ecc_mul_accum(Point::one() * FF::random_element(&engine), FF::random_element(&engine)); builder.queue_ecc_eq(); } }; diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/op_queue/ecc_op_queue.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/op_queue/ecc_op_queue.hpp index 4ef2ef12ef85..c3f04728cd35 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/op_queue/ecc_op_queue.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/op_queue/ecc_op_queue.hpp @@ -261,18 +261,19 @@ class ECCOpQueue { } /** - * @brief Get the number of rows in the 'msm' column section o the ECCVM, associated with a single multiscalar mul + * @brief Get the number of rows in the 'msm' column section of the ECCVM associated with a single multiscalar + * multiplication. * - * @param msm_count + * @param msm_size * @return uint32_t */ - static uint32_t get_msm_row_count_for_single_msm(const size_t msm_count) + static uint32_t num_eccvm_msm_rows(const size_t msm_size) { - const size_t rows_per_round = - (msm_count / eccvm::ADDITIONS_PER_ROW) + (msm_count % eccvm::ADDITIONS_PER_ROW != 0 ? 1 : 0); - constexpr size_t num_rounds = eccvm::NUM_SCALAR_BITS / eccvm::WNAF_SLICE_BITS; - const size_t num_rows_for_all_rounds = (num_rounds + 1) * rows_per_round; // + 1 round for skew - const size_t num_double_rounds = num_rounds - 1; + const size_t rows_per_wnaf_digit = + (msm_size / eccvm::ADDITIONS_PER_ROW) + ((msm_size % eccvm::ADDITIONS_PER_ROW != 0) ? 1 : 0); + const size_t num_rows_for_all_rounds = + (eccvm::NUM_WNAF_DIGITS_PER_SCALAR + 1) * rows_per_wnaf_digit; // + 1 round for skew + const size_t num_double_rounds = eccvm::NUM_WNAF_DIGITS_PER_SCALAR - 1; const size_t num_rows_for_msm = num_rows_for_all_rounds + num_double_rounds; return static_cast(num_rows_for_msm); @@ -287,7 +288,7 @@ class ECCOpQueue { { size_t msm_rows = num_msm_rows + 2; if (cached_active_msm_count > 0) { - msm_rows += get_msm_row_count_for_single_msm(cached_active_msm_count); + msm_rows += num_eccvm_msm_rows(cached_active_msm_count); } return msm_rows; } @@ -305,7 +306,7 @@ class ECCOpQueue { // add 1 row to start of precompute table section size_t precompute_rows = num_precompute_table_rows + 1; if (cached_active_msm_count > 0) { - msm_rows += get_msm_row_count_for_single_msm(cached_active_msm_count); + msm_rows += num_eccvm_msm_rows(cached_active_msm_count); precompute_rows += get_precompute_table_row_count_for_single_msm(cached_active_msm_count); } @@ -323,7 +324,7 @@ class ECCOpQueue { accumulator = accumulator + to_add; // Construct and store the operation in the ultra op format - auto ultra_op = construct_and_populate_ultra_ops(ADD_ACCUM, to_add); + UltraOp ultra_op = construct_and_populate_ultra_ops(ADD_ACCUM, to_add); // Store the raw operation raw_ops.emplace_back(ECCVMOperation{ @@ -353,7 +354,7 @@ class ECCOpQueue { accumulator = accumulator + to_mul * scalar; // Construct and store the operation in the ultra op format - auto ultra_op = construct_and_populate_ultra_ops(MUL_ACCUM, to_mul, scalar); + UltraOp ultra_op = construct_and_populate_ultra_ops(MUL_ACCUM, to_mul, scalar); // Store the raw operation raw_ops.emplace_back(ECCVMOperation{ @@ -383,7 +384,7 @@ class ECCOpQueue { accumulator.self_set_infinity(); // Construct and store the operation in the ultra op format - auto ultra_op = construct_and_populate_ultra_ops(EQUALITY, expected); + UltraOp ultra_op = construct_and_populate_ultra_ops(EQUALITY, expected); // Store raw operation raw_ops.emplace_back(ECCVMOperation{ @@ -404,7 +405,9 @@ class ECCOpQueue { private: /** - * @brief when inserting operations, update the number of multiplications in the latest scalar mul + * @brief Update cached_active_msm_count or update other row counts and reset cached_active_msm_count. + * @details To the OpQueue, an MSM is a sequence of successive mul opcodes (note that mul might better be called + * mul_add--its effect on the accumulator is += scalar * point). * * @param op */ @@ -418,7 +421,7 @@ class ECCOpQueue { cached_active_msm_count++; } } else if (cached_active_msm_count != 0) { - num_msm_rows += get_msm_row_count_for_single_msm(cached_active_msm_count); + num_msm_rows += num_eccvm_msm_rows(cached_active_msm_count); num_precompute_table_rows += get_precompute_table_row_count_for_single_msm(cached_active_msm_count); cached_num_muls += cached_active_msm_count; cached_active_msm_count = 0; @@ -433,7 +436,8 @@ class ECCOpQueue { */ static uint32_t get_precompute_table_row_count_for_single_msm(const size_t msm_count) { - constexpr size_t num_precompute_rows_per_scalar = eccvm::NUM_WNAF_SLICES / eccvm::WNAF_SLICES_PER_ROW; + constexpr size_t num_precompute_rows_per_scalar = + eccvm::NUM_WNAF_DIGITS_PER_SCALAR / eccvm::WNAF_DIGITS_PER_ROW; const size_t num_rows_for_precompute_table = msm_count * num_precompute_rows_per_scalar; return static_cast(num_rows_for_precompute_table); } diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/standard_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/standard_circuit_builder.hpp index a421a10adaa1..e6ede171ffa8 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/standard_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/standard_circuit_builder.hpp @@ -15,7 +15,6 @@ template class StandardCircuitBuilder_ : public CircuitBuilderBase using Arithmetization = StandardArith; using GateBlocks = typename Arithmetization::TraceBlocks; static constexpr size_t NUM_WIRES = Arithmetization::NUM_WIRES; - static constexpr size_t FIXED_BLOCK_SIZE = 0; // not used, for compatibility only // Keeping NUM_WIRES, at least temporarily, for backward compatibility static constexpr size_t program_width = Arithmetization::NUM_WIRES; static constexpr size_t num_selectors = Arithmetization::NUM_SELECTORS; diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp index 90dde82d76c0..feecbf0938d4 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp @@ -33,7 +33,6 @@ class UltraCircuitBuilder_ : public CircuitBuilderBase class ProverInstance_ { // If using a structured trace, ensure that no block exceeds the fixed size if (is_structured) { - for (auto& block : circuit.blocks.get()) { - ASSERT(block.size() <= circuit.FIXED_BLOCK_SIZE); - } + circuit.blocks.check_within_fixed_sizes(); } // TODO(https://github.com/AztecProtocol/barretenberg/issues/905): This is adding ops to the op queue but NOT to @@ -109,8 +107,7 @@ template class ProverInstance_ { */ size_t compute_structured_dyadic_size(Circuit& builder) { - size_t num_blocks = builder.blocks.get().size(); - size_t minimum_size = num_blocks * builder.FIXED_BLOCK_SIZE; + size_t minimum_size = builder.blocks.get_total_structured_size(); return builder.get_circuit_subgroup_size(minimum_size); } diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp index c1ac763379ab..51bdf4466058 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp @@ -336,7 +336,8 @@ template class SumcheckVerifierRound { const bb::PowPolynomial& pow_polynomial, const RelationSeparator alpha) { - Utils::template accumulate_relation_evaluations<>( + // The verifier should never skip computation of contributions from any relation + Utils::template accumulate_relation_evaluations_without_skipping<>( purported_evaluations, relation_evaluations, relation_parameters, pow_polynomial.partial_evaluation_result); auto running_challenge = FF(1); diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/goblin_ultra_composer.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/goblin_ultra_composer.test.cpp index dc5f8e76dc08..d33607a6bdbc 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/goblin_ultra_composer.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/goblin_ultra_composer.test.cpp @@ -88,7 +88,6 @@ TEST_F(GoblinUltraHonkComposerTests, BasicStructured) // Construct and verify Honk proof using a structured trace bool structured = true; auto instance = std::make_shared>(builder, structured); - builder.blocks.summarize(); GoblinUltraProver prover(instance); auto verification_key = std::make_shared(instance->proving_key); GoblinUltraVerifier verifier(verification_key); diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.test.cpp index 564afd23c4a7..330a2ab8c144 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.test.cpp @@ -16,10 +16,6 @@ using namespace bb; -namespace { -auto& engine = numeric::get_debug_randomness(); -} - using ProverInstance = ProverInstance_; using VerificationKey = UltraFlavor::VerificationKey; diff --git a/barretenberg/ts/CHANGELOG.md b/barretenberg/ts/CHANGELOG.md index 6da5a887ff7d..ef2feb322d56 100644 --- a/barretenberg/ts/CHANGELOG.md +++ b/barretenberg/ts/CHANGELOG.md @@ -1,5 +1,25 @@ # Changelog +## [0.38.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.37.0...barretenberg.js-v0.38.0) (2024-05-07) + + +### ⚠ BREAKING CHANGES + +* AES blackbox ([#6016](https://github.com/AztecProtocol/aztec-packages/issues/6016)) + +### Features + +* `multi_scalar_mul` blackbox func ([#6097](https://github.com/AztecProtocol/aztec-packages/issues/6097)) ([f6b1ba6](https://github.com/AztecProtocol/aztec-packages/commit/f6b1ba60daf37a5a6466ca1e5ee7be70354af485)) +* AES blackbox ([#6016](https://github.com/AztecProtocol/aztec-packages/issues/6016)) ([e4b97a8](https://github.com/AztecProtocol/aztec-packages/commit/e4b97a8cd7574a828c2a54b4a93b5ced79df6abf)) +* Honk flows exposed through wasm ([#6096](https://github.com/AztecProtocol/aztec-packages/issues/6096)) ([c9b3206](https://github.com/AztecProtocol/aztec-packages/commit/c9b32061b2849442516ff0395b69d9a230191234)) +* Run noir-packages-test in Earthly ([#6174](https://github.com/AztecProtocol/aztec-packages/issues/6174)) ([58e40c9](https://github.com/AztecProtocol/aztec-packages/commit/58e40c9125e6d7b30abf7a4cbb170bbfc15e2037)) + + +### Miscellaneous + +* Bump bb.js timeouts ([#6196](https://github.com/AztecProtocol/aztec-packages/issues/6196)) ([acab3de](https://github.com/AztecProtocol/aztec-packages/commit/acab3de86aae9ce5078795ba1ed0626d0c018565)) +* Migrate acir tests to earthly ([#6142](https://github.com/AztecProtocol/aztec-packages/issues/6142)) ([18c8ea8](https://github.com/AztecProtocol/aztec-packages/commit/18c8ea8eb5f9fd1cb51c116d6d1976c774d51bc1)) + ## [0.37.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.36.0...barretenberg.js-v0.37.0) (2024-05-02) diff --git a/barretenberg/ts/package.json b/barretenberg/ts/package.json index e5a546e963c4..ad49f2f53a4a 100644 --- a/barretenberg/ts/package.json +++ b/barretenberg/ts/package.json @@ -1,6 +1,6 @@ { "name": "@aztec/bb.js", - "version": "0.37.0", + "version": "0.38.0", "homepage": "https://github.com/AztecProtocol/aztec-packages/tree/master/barretenberg/ts", "license": "MIT", "type": "module", diff --git a/barretenberg/ts/src/info.ts b/barretenberg/ts/src/info.ts index 6032427bdd5a..f5ad9980591b 100644 --- a/barretenberg/ts/src/info.ts +++ b/barretenberg/ts/src/info.ts @@ -9,6 +9,7 @@ export const acvmInfoJson = { 'and', 'xor', 'range', + 'aes128_encrypt', 'sha256', 'blake2s', 'keccak256', @@ -17,7 +18,7 @@ export const acvmInfoJson = { 'pedersen_hash', 'ecdsa_secp256k1', 'ecdsa_secp256r1', - 'fixed_base_scalar_mul', + 'multi_scalar_mul', 'recursive_aggregation', ], }; diff --git a/cspell.json b/cspell.json index 6c2ce17a4077..6e0ff2962646 100644 --- a/cspell.json +++ b/cspell.json @@ -66,6 +66,12 @@ "defi", "delegatecall", "delegatecalls", + "demonomorphization", + "demonomorphize", + "demonomorphized", + "demonomorphizer", + "demonomorphizes", + "demonomorphizing", "deregistration", "devex", "devnet", diff --git a/docs/docs/misc/glossary/call_types.md b/docs/docs/misc/glossary/call_types.md new file mode 100644 index 000000000000..3de6d61d8344 --- /dev/null +++ b/docs/docs/misc/glossary/call_types.md @@ -0,0 +1,177 @@ +--- +## title: Call Types +--- + +# Understanding Call Types + +## What is a Call + +We say that a smart contract is called when one of its functions is invoked and its code is run. This means there'll be: + +- a caller +- arguments +- return values +- a call status (successful or failed) + +There are multiple types of calls, and some of the naming can make things **very** confusing. This page lists the different call types and execution modes, pointing out key differences between them. + +## Ethereum Call Types + +Even though we're discussing Aztec, its design is heavily influenced by Ethereum and many of the APIs and concepts are quite similar. It is therefore worthwhile to briefly review how things work there and what naming conventions are used to provide context to the Aztec-specific concepts. + +Broadly speaking, Ethereum contracts can be thought of as executing as a result of three different things: running certain EVM opcodes, running Solidity code (which compiles to EVM opcodes), or via the node JSON-RPC interface (e.g. when executing transactions). + +### EVM + +Certain opcodes allow contracts to make calls to other contracts, each with different semantics. We're particularly interested in `CALL` and `STATICCALL`, and how those relate to contract programming languages and client APIs. + +#### `CALL` + +This is the most common and basic type of call. It grants execution control to the caller until it eventually returns. No special semantics are in play here. Most Ethereum transactions spend the majority of their time in `CALL` contexts. + +#### `STATICCALL` + +This behaves almost exactly the same as `CALL`, with one key difference: any state-changing operations are forbidden and will immediately cause the call to fail. This includes writing to storage, emitting logs, or deploying new contracts. This call is used to query state on an external contract, e.g. to get data from a price oracle, check for access control permissions, etc. + +#### Others + +The `CREATE` and `CREATE2` opcodes (for contract deployment) also result in something similar to a `CALL` context, but all that's special about them has to do with how deployments work. `DELEGATECALL` (and `CALLCODE`) are somewhat complicated to understand but don't have any Aztec equivalents, so they are not worth covering. + +### Solidity + +Solidity (and other contract programming languages such as Vyper) compile down to EVM opcodes, but it is useful to understand how they map language concepts to the different call types. + +#### Mutating External Functions + +These are functions marked `payable` (which can receive ETH, which is a state change) or with no mutability declaration (sometimes called `nonpayable`). When one of these functions is called on a contract, the `CALL` opcode is emitted, meaning the callee can perform state changes, make further `CALL`s, etc. + +It is also possible to call such a function with `STATICCALL` manually (e.g. using assembly), but the execution will fail as soon as a state-changing opcode is executed. + +#### `view` + +An external function marked `view` will not be able to mutate state (write to storage, etc.), it can only _view_ the state. Solidity will emit the `STATICCALL` opcode when calling these functions, since its restrictions provide added safety to the caller (e.g. no risk of reentrancy). + +Note that it is entirely possible to use `CALL` to call a `view` function, and the result will be the exact same as if `STATICCALL` had been used. The reason why `STATICCALL` exists is so that _untrusted or unknown_ contracts can be called while still being able to reason about correctness. From the [EIP](https://eips.ethereum.org/EIPS/eip-214): + +> '`STATICCALL` adds a way to call other contracts and restrict what they can do in the simplest way. It can be safely assumed that the state of all accounts is the same before and after a static call.' + +### JSON-RPC + +From outside the EVM, calls to contracts are made via [JSON-RPC](https://ethereum.org/en/developers/docs/apis/json-rpc/) methods, typically from some client library that is aware of contract ABIs, such as [ethers.js](https://docs.ethers.org/v5) or [viem](https://viem.sh/). + +#### `eth_sendTransaction` + +This method is how transactions are sent to a node to get them to be broadcast and eventually included in a block. The specified `to` address will be called in a `CALL` context, with some notable properties: + +- there are no return values, even if the contract function invoked does return some data +- there is no explicit caller: it is instead derived from a provided signature + +Some client libraries choose to automatically issue `eth_sendTransaction` when calling functions from a contract ABI that are not marked as `view` - [ethers is a good example](https://docs.ethers.org/v5/getting-started/#getting-started--writing). Notably, this means that any return value is lost and not available to the calling client - the library typically returns a transaction receipt instead. If the return value is required, then the only option is to simulate the call `eth_call`. + +Note that it is possible to call non state-changing functions (i.e. `view`) with `eth_sendTransaction` - this is always meaningless. What transactions do is change the blockchain state, so all calling such a function achieves is for the caller to lose funds by paying for gas fees. The sole purpose of a `view` function is to return data, and `eth_sendTransaction` does not make the return value available. + +#### `eth_call` + +This method is the largest culprit of confusion around calls, but unfortunately requires understanding of all previous concepts in order to be explained. Its name is also quite unhelpful. + +What `eth_call` does is simulate a transaction (a call to a contract) given the current blockchain state. The behavior will be the exact same as `eth_sendTransaction`, except: + +- no actual transaction will be created +- while gas _will_ be measured, there'll be no transaction fees of any kind +- no signature is required: the `from` address is passed directly, and can be set to any value (even if the private key is unknown, or if they are contract addresses!) +- the return value of the called contract is available + +`eth_call` is typically used for one of the following: + +- query blockchain data, e.g. read token balances +- preview the state changes produced by a transaction, e.g. the transaction cost, token balance changes, etc + +Because some libraries ([such as ethers](https://docs.ethers.org/v5/getting-started/#getting-started--reading)) automatically use `eth_call` for `view` functions (which when called via Solidity result in the `STATICCALL` opcode), these concepts can be hard to tell apart. The following bears repeating: **an `eth_call`'s call context is the same as `eth_sendTransaction`, and it is a `CALL` context, not `STATICCALL`.** + +## Aztec Call Types + +Large parts of the Aztec Network's design are still not finalized, and the nitty-gritty of contract calls is no exception. This section won't therefore contain a thorough review of these, but rather list some of the main ways contracts can currently be interacted with, with analogies to Ethereum call types when applicable. + +While Ethereum contracts are defined by bytecode that runs on the EVM, Aztec contracts have multiple modes of execution depending on the function that is invoked. + +### Private Execution + +Contract functions marked with `#[aztec(private)]` can only be called privately, and as such 'run' in the user's device. Since they're circuits, their 'execution' is actually the generation of a zk-SNARK proof that'll later be sent to the sequencer for verification. + +#### Private Calls + +Private functions from other contracts can be called either regularly or statically by using the `.call()` and `.static_call` functions. They will also be 'executed' (i.e. proved) in the user's device, and `static_call` will fail if any state changes are attempted (like the EVM's `STATICCALL`). + +#include_code private_call /noir-projects/noir-contracts/contracts/lending_contract/src/main.nr rust + +Unlike the EVM however, private execution doesn't revert in the traditional way: in case of error (e.g. a failed assertion, a state changing operation in a static context, etc.) the proof generation simply fails and no transaction request is generated, spending no network gas or user funds. + +#### Public Calls + +Since public execution can only be performed by the sequencer, public functions cannot be executed in a private context. It is possible however to _enqueue_ a public function call during private execution, requesting the sequencer to run it during inclusion of the transaction. It will be [executed in public](#public-execution) normally, including the possibility to enqueue static public calls. + +Since the public call is made asynchronously, any return values or side effects are not available during private execution. If the public function fails once executed, the entire transaction is reverted inncluding state changes caused by the private part, such as new notes or nullifiers. Note that this does result in gas being spent, like in the case of the EVM. + +#include_code enqueue_public /noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr rust + +It is also possible to create public functions that can _only_ be invoked by privately enqueing a call from the same contract, which can very useful to update public state after private exection (e.g. update a token's supply after privately minting). This is achieved by annotating functions with `#[aztec(internal)]`. + +A common pattern is to enqueue public calls to check some validity condition on public state, e.g. that a deadline has not expired or that some public value is set. + +#include_code call-check-deadline /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust + +#include_code deadline /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust + +:::warning +Calling public functions privately leaks some privacy! The caller of the function and all arguments will be revelead, so exercise care when mixing the private and public domains. To learn about alternative ways to access public state privately, look into [Shared State](../../developers/contracts/references/storage/shared_state.md). +::: + +### Public Execution + +Contract functions marked with `#[aztec(public)]` can only be called publicly, and are executed by the sequencer. The computation model is very similar to the EVM: all state, parameters, etc. are known to the entire network, and no data is private. Static execution like the EVM's `STATICCALL` is possible too, with similar semantics (state can be accessed but not modified, etc.). + +Since private calls are always run in a user's device, it is not possible to perform any private execution from a public context. A reasonably good mental model for public execution is that of an EVM in which some work has already been done privately, and all that is know about it is its correctness and side-effects (new notes and nullifiers, enqueued public calls, etc.). A reverted public execution will also revert the private side-effects. + +Public functions in other contracts can be called both regularly and statically, just like on the EVM. + +#include_code public_call /noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr rust + +:::note +This is the same function that was called by privately enqueuing a call to it! Public functions can be called either directly in a public context, or asynchronously by enqueuing in a private context. +::: + +### Top-level Unconstrained + +Contract functions with the `unconstrained` Noir keyword are a special type of function still under development, and their semantics will likely change in the near future. They are used to perform state queries from an off-chain client, and are never included in any transaction. No guarantees are made on the correctness of the result since they rely exclusively on unconstrained oracle calls. + +A reasonable mental model for them is that of a `view` Solidity function that is never called in any transaction, and is only ever invoked via `eth_call`. Note that in these the caller assumes that the node is acting honestly by exectuing the true contract bytecode with correct blockchain state, the same way the Aztec version assumes the oracles are returning legitimate data. + +### aztec.js + +There are three different ways to execute an Aztec contract function using the `aztec.js` library, with close similarities to their [JSON-RPC counterparts](#json-rpc). + +#### `simulate` + +This is used to get a result out of an execution, either private or public. It creates no transaction and spends no gas. The mental model is fairly close to that of [`eth_call`](#eth_call), in that it can be used to call any type of function, simulate its execution and get a result out of it. `simulate` is also the only way to run [top-level unconstrained functions](#top-level-unconstrained). + +#include_code public_getter /noir-projects/noir-contracts/contracts/auth_contract/src/main.nr rust + +#include_code simulate_public_getter yarn-project/end-to-end/src/e2e_auth_contract.test.ts typescript + +:::warning +No correctness is guaranteed on the result of `simulate`! Correct execution is entirely optional and left up to the client that handles this request. +::: + +#### `prove` + +This creates and returns a transaction request, which includes proof of correct private execution and side-efects. The request is not broadcast however, and no gas is spent. It is typically used in testing contexts to inspect transaction parameters or to check for execution failure. + +#include_code local-tx-fails /yarn-project/end-to-end/src/guides/dapp_testing.test.ts typescript + +Like most Ethereum libraries, `prove` also simulates public execution to try to detect runtime errors that would only occur once the transaction is picked up by the sequencer. This makes `prove` very useful in testing environments, but users shuld be wary of both false positives and negatives in production environments, particularly if the node's data is stale. Public simulation can be skipped by setting the `skipPublicSimulation` flag. + +#### `send` + +This is the same as [`prove`](#prove) except it also broadcasts the transaction and returns a receipt. This is how transactions are sent, getting them to be included in blocks and spending gas. It is similar to [`eth_sendTransaction`](#eth_sendtransaction), except it also performs some work on the user's device, namely the production of the proof for the private part of the transaction. + +#include_code send_tx yarn-project/end-to-end/src/e2e_card_game.test.ts typescript diff --git a/docs/docs/misc/glossary.md b/docs/docs/misc/glossary/main.md similarity index 100% rename from docs/docs/misc/glossary.md rename to docs/docs/misc/glossary/main.md diff --git a/docs/docs/protocol-specs/circuits/private-kernel-initial.mdx b/docs/docs/protocol-specs/circuits/private-kernel-initial.mdx index 13d188829c6a..843f516772e0 100644 --- a/docs/docs/protocol-specs/circuits/private-kernel-initial.mdx +++ b/docs/docs/protocol-specs/circuits/private-kernel-initial.mdx @@ -168,8 +168,8 @@ The circuit undergoes the following validations for data within [`private_inputs For each `note_hash` at index `i` in `note_hashes`: - Find the `request_index` at [`hints`](#hints).`note_hash_range_hints[i]`, which is the index of the `private_call_requests` with the smallest `counter_start` that was emitted after the `note_hash`. - - If `request_index` equals `NM`, indicating no request was emitted after the `note_hash`, its counter must be greater the `counter_end` of the last request. - - If `request_index` equals `0`, indicating no request was emitted before the `note_hash`. Its counter must be less the `counter_start` of the first request. + - If `request_index` equals `NE`, indicating no request was emitted after the `note_hash`, its counter must be greater than the `counter_end` of the last request. + - If `request_index` equals `0`, indicating no request was emitted before the `note_hash`. Its counter must be less than the `counter_start` of the first request. - Otherwise, the request was emitted after the `note_hash`, and its immediate previous request was emitted before the `note_hash`. Its counter must fall between those two requests. The code simplifies as: diff --git a/docs/sidebars.js b/docs/sidebars.js index b2272172ad87..65a63a31870b 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -584,7 +584,15 @@ const sidebars = { defaultStyle: true, }, "misc/migration_notes", - "misc/glossary", + { + label: "Glossary", + type: "category", + link: { + type: "doc", + id: "misc/glossary/main", + }, + items: ["misc/glossary/call_types"], + }, { label: "Roadmap", type: "category", diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index 8a9200aaee69..a026721c12f9 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -89,7 +89,7 @@ library Constants { uint256 internal constant DEPLOYER_CONTRACT_INSTANCE_DEPLOYED_MAGIC_VALUE = 0x85864497636cf755ae7bde03f267ce01a520981c21c3682aaf82a631; uint256 internal constant DEPLOYER_CONTRACT_ADDRESS = - 0x0097949bb96834550868230a1b6cc242d1f662f7c52946245e4e73da1b8b2165; + 0x2e9c386f07e22a1d24e677ab70407b2dd0adbc7cafb9c822bf249685d6a2e4cc; uint256 internal constant DEFAULT_GAS_LIMIT = 1_000_000_000; uint256 internal constant DEFAULT_TEARDOWN_GAS_LIMIT = 100_000_000; uint256 internal constant DEFAULT_MAX_FEE_PER_GAS = 10; @@ -115,15 +115,18 @@ library Constants { uint256 internal constant GLOBAL_VARIABLES_LENGTH = 6 + GAS_FEES_LENGTH; uint256 internal constant APPEND_ONLY_TREE_SNAPSHOT_LENGTH = 2; uint256 internal constant L1_TO_L2_MESSAGE_LENGTH = 6; - uint256 internal constant L2_TO_L1_MESSAGE_LENGTH = 2; + uint256 internal constant L2_TO_L1_MESSAGE_LENGTH = 3; + uint256 internal constant SCOPED_L2_TO_L1_MESSAGE_LENGTH = L2_TO_L1_MESSAGE_LENGTH + 1; uint256 internal constant MAX_BLOCK_NUMBER_LENGTH = 2; uint256 internal constant NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = 3; - uint256 internal constant NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH = 4; + uint256 internal constant SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = + NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH + 1; uint256 internal constant PARTIAL_STATE_REFERENCE_LENGTH = 6; uint256 internal constant READ_REQUEST_LENGTH = 2; uint256 internal constant NOTE_HASH_LENGTH = 2; - uint256 internal constant NOTE_HASH_CONTEXT_LENGTH = 3; + uint256 internal constant SCOPED_NOTE_HASH_LENGTH = NOTE_HASH_LENGTH + 2; uint256 internal constant NULLIFIER_LENGTH = 3; + uint256 internal constant SCOPED_NULLIFIER_LENGTH = NULLIFIER_LENGTH + 1; uint256 internal constant SIDE_EFFECT_LENGTH = 2; uint256 internal constant STATE_REFERENCE_LENGTH = APPEND_ONLY_TREE_SNAPSHOT_LENGTH + PARTIAL_STATE_REFERENCE_LENGTH; diff --git a/noir-projects/Earthfile b/noir-projects/Earthfile index 645eddc4973f..907f009151dd 100644 --- a/noir-projects/Earthfile +++ b/noir-projects/Earthfile @@ -30,6 +30,19 @@ test: RUN cd aztec-nr && nargo test --silence-warnings RUN cd noir-contracts && nargo test --silence-warnings +format: + FROM +build + + # TODO: https://github.com/noir-lang/noir/issues/4980 + # WORKDIR /usr/src/noir-projects/noir-protocol-circuits + # RUN nargo fmt --check + + WORKDIR /usr/src/noir-projects/noir-contracts + RUN nargo fmt --check + + WORKDIR /usr/src/noir-projects/aztec-nr + RUN nargo fmt --check + gates-report: FROM +build WORKDIR /usr/src/noir-projects/noir-protocol-circuits @@ -38,4 +51,4 @@ gates-report: RUN NARGO_BACKEND_PATH=/usr/src/barretenberg/cpp/build/bin/bb nargo info --json > gates_report.json - SAVE ARTIFACT gates_report.json gates_report.json \ No newline at end of file + SAVE ARTIFACT gates_report.json gates_report.json diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 3469ce45505c..5aa17568bc3a 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = d26d7b585f785c65cc788ba8ce94dbb2dc23b07c + commit = 440d97fb931948aa90fcd6a1ee0206abdc468745 method = merge cmdver = 0.4.6 - parent = cf7076eecae98b6c66bcec809b1677ff2c348ab2 + parent = 7a81f4568348ceee1dde52ec2c93c5245420f880 diff --git a/noir-projects/aztec-nr/aztec/src/context/avm_context.nr b/noir-projects/aztec-nr/aztec/src/context/avm_context.nr index 30097e522f12..d7180bd83387 100644 --- a/noir-projects/aztec-nr/aztec/src/context/avm_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/avm_context.nr @@ -38,7 +38,7 @@ impl AvmContext { * Should be automatically convertible to [Field; N]. For example str works with * one char per field. Otherwise you can use CompressedString. */ - pub fn emit_unencrypted_log(&mut self, event_selector: Field, log: T) { + pub fn emit_unencrypted_log_with_selector(&mut self, event_selector: Field, log: T) { emit_unencrypted_log(event_selector, log); } pub fn note_hash_exists(self, note_hash: Field, leaf_index: Field) -> bool { @@ -88,7 +88,7 @@ impl PublicContextInterface for AvmContext { fn emit_unencrypted_log(&mut self, log: T) { let event_selector = 5; // Matches current PublicContext. - self.emit_unencrypted_log(event_selector, log); + self.emit_unencrypted_log_with_selector(event_selector, log); } fn consume_l1_to_l2_message(&mut self, content: Field, secret: Field, sender: EthAddress, leaf_index: Field) { diff --git a/noir-projects/aztec-nr/aztec/src/context/interface.nr b/noir-projects/aztec-nr/aztec/src/context/interface.nr index b0fa94a211ec..7f72656252b3 100644 --- a/noir-projects/aztec-nr/aztec/src/context/interface.nr +++ b/noir-projects/aztec-nr/aztec/src/context/interface.nr @@ -1,8 +1,6 @@ -use dep::protocol_types::{ - abis::function_selector::FunctionSelector, address::{AztecAddress, EthAddress}, header::Header, - traits::Deserialize -}; +use dep::protocol_types::{abis::function_selector::FunctionSelector, address::{AztecAddress, EthAddress}, traits::Deserialize}; +use crate::hash::hash_args; use crate::context::private_context::PrivateContext; use crate::context::public_context::PublicContext; use crate::context::avm_context::AvmContext; @@ -209,20 +207,22 @@ struct AvmCallInterface { target_contract: AztecAddress, selector: FunctionSelector, args: [Field], + gas_opts: GasOpts, } impl AvmCallInterface { - pub fn call(self, context: &mut AvmContext, gas_opts: GasOpts) -> T where T: Deserialize { - let returns = context.call_public_function(self.target_contract, self.selector, self.args, gas_opts); + pub fn with_gas(self: &mut Self, gas_opts: GasOpts) -> &mut Self { + self.gas_opts = gas_opts; + self + } + + pub fn call(self, context: &mut AvmContext) -> T where T: Deserialize { + let returns = context.call_public_function(self.target_contract, self.selector, self.args, self.gas_opts); returns.deserialize_into() } - pub fn static_call( - self, - context: &mut AvmContext, - gas_opts: GasOpts - ) -> T where T: Deserialize { - let returns = context.static_call_public_function(self.target_contract, self.selector, self.args, gas_opts); + pub fn static_call(self, context: &mut AvmContext) -> T where T: Deserialize { + let returns = context.static_call_public_function(self.target_contract, self.selector, self.args, self.gas_opts); returns.deserialize_into() } @@ -230,22 +230,43 @@ impl AvmCallInterface { let returns = context.delegate_call_public_function(self.target_contract, self.selector, self.args); returns.deserialize_into() } + + pub fn enqueue(self, context: &mut PrivateContext) { + let args_hash = hash_args(self.args); + context.call_public_function_with_packed_args(self.target_contract, self.selector, args_hash, false, false) + } + + pub fn static_enqueue(self, context: &mut PrivateContext) { + let args_hash = hash_args(self.args); + context.call_public_function_with_packed_args(self.target_contract, self.selector, args_hash, true, false) + } + + pub fn delegate_enqueue(self, context: &mut PrivateContext) { + let args_hash = hash_args(self.args); + context.call_public_function_with_packed_args(self.target_contract, self.selector, args_hash, false, true) + } } struct AvmVoidCallInterface { target_contract: AztecAddress, selector: FunctionSelector, args: [Field], + gas_opts: GasOpts, } impl AvmVoidCallInterface { - pub fn call(self, context: &mut AvmContext, gas_opts: GasOpts) { - let returns = context.call_public_function(self.target_contract, self.selector, self.args, gas_opts); + pub fn with_gas(self: &mut Self, gas_opts: GasOpts) -> &mut Self { + self.gas_opts = gas_opts; + self + } + + pub fn call(self, context: &mut AvmContext) { + let returns = context.call_public_function(self.target_contract, self.selector, self.args, self.gas_opts); returns.assert_empty() } - pub fn static_call(self, context: &mut AvmContext, gas_opts: GasOpts) { - let returns = context.static_call_public_function(self.target_contract, self.selector, self.args, gas_opts); + pub fn static_call(self, context: &mut AvmContext) { + let returns = context.static_call_public_function(self.target_contract, self.selector, self.args, self.gas_opts); returns.assert_empty() } @@ -253,4 +274,19 @@ impl AvmVoidCallInterface { let returns = context.delegate_call_public_function(self.target_contract, self.selector, self.args); returns.assert_empty() } + + pub fn enqueue(self, context: &mut PrivateContext) { + let args_hash = hash_args(self.args); + context.call_public_function_with_packed_args(self.target_contract, self.selector, args_hash, false, false) + } + + pub fn static_enqueue(self, context: &mut PrivateContext) { + let args_hash = hash_args(self.args); + context.call_public_function_with_packed_args(self.target_contract, self.selector, args_hash, true, false) + } + + pub fn delegate_enqueue(self, context: &mut PrivateContext) { + let args_hash = hash_args(self.args); + context.call_public_function_with_packed_args(self.target_contract, self.selector, args_hash, false, true) + } } diff --git a/noir-projects/aztec-nr/aztec/src/context/private_context.nr b/noir-projects/aztec-nr/aztec/src/context/private_context.nr index 0411ba566a01..9d7010e3107a 100644 --- a/noir-projects/aztec-nr/aztec/src/context/private_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/private_context.nr @@ -96,13 +96,11 @@ impl ContextInterface for PrivateContext { } fn push_new_note_hash(&mut self, note_hash: Field) { - self.new_note_hashes.push(NoteHash { value: note_hash, counter: self.side_effect_counter }); - self.side_effect_counter = self.side_effect_counter + 1; + self.new_note_hashes.push(NoteHash { value: note_hash, counter: self.next_counter() }); } fn push_new_nullifier(&mut self, nullifier: Field, nullified_note_hash: Field) { - self.new_nullifiers.push(Nullifier { value: nullifier, note_hash: nullified_note_hash, counter: self.side_effect_counter }); - self.side_effect_counter = self.side_effect_counter + 1; + self.new_nullifiers.push(Nullifier { value: nullifier, note_hash: nullified_note_hash, counter: self.next_counter() }); } } @@ -193,15 +191,13 @@ impl PrivateContext { } pub fn push_note_hash_read_request(&mut self, note_hash: Field) { - let side_effect = ReadRequest { value: note_hash, counter: self.side_effect_counter }; + let side_effect = ReadRequest { value: note_hash, counter: self.next_counter() }; self.note_hash_read_requests.push(side_effect); - self.side_effect_counter = self.side_effect_counter + 1; } pub fn push_nullifier_read_request(&mut self, nullifier: Field) { - let request = ReadRequest { value: nullifier, counter: self.side_effect_counter }; + let request = ReadRequest { value: nullifier, counter: self.next_counter() }; self.nullifier_read_requests.push(request); - self.side_effect_counter = self.side_effect_counter + 1; } pub fn request_app_nullifier_secret_key(&mut self, account: AztecAddress) -> Field { @@ -227,7 +223,7 @@ impl PrivateContext { // docs:start:context_message_portal pub fn message_portal(&mut self, recipient: EthAddress, content: Field) { // docs:end:context_message_portal - let message = L2ToL1Message { recipient, content }; + let message = L2ToL1Message { recipient, content, counter: self.next_counter() }; self.new_l2_to_l1_msgs.push(message); } @@ -259,9 +255,8 @@ impl PrivateContext { let contract_address = self.this_address(); let log_slice = log.to_be_bytes_arr(); let log_hash = compute_unencrypted_log_hash(contract_address, event_selector, log); - let side_effect = SideEffect { value: log_hash, counter: self.side_effect_counter }; + let side_effect = SideEffect { value: log_hash, counter: self.next_counter() }; self.unencrypted_logs_hashes.push(side_effect); - self.side_effect_counter = self.side_effect_counter + 1; // 44 = addr (32) + selector (4) + raw log len (4) + processed log len (4) self.unencrypted_log_preimages_length += 44 + log_slice.len().to_field(); // call oracle @@ -278,10 +273,10 @@ impl PrivateContext { pub fn emit_contract_class_unencrypted_log(&mut self, log: [Field; N]) { let event_selector = 5; // TODO: compute actual event selector. let contract_address = self.this_address(); - let log_hash = emit_contract_class_unencrypted_log_private_internal(contract_address, event_selector, log, self.side_effect_counter); - let side_effect = SideEffect { value: log_hash, counter: self.side_effect_counter }; + let counter = self.next_counter(); + let log_hash = emit_contract_class_unencrypted_log_private_internal(contract_address, event_selector, log, counter); + let side_effect = SideEffect { value: log_hash, counter }; self.unencrypted_logs_hashes.push(side_effect); - self.side_effect_counter = self.side_effect_counter + 1; // 44 = addr (32) + selector (4) + raw log len (4) + processed log len (4) self.unencrypted_log_preimages_length += 44 + N*32; } @@ -296,18 +291,18 @@ impl PrivateContext { ) where [Field; N]: LensForEncryptedLog { // TODO(1139): perform encryption in the circuit // The oracle call should come last, but we require the encrypted value for now + let counter = self.next_counter(); let encrypted_log: [Field; M] = emit_encrypted_log( contract_address, storage_slot, note_type_id, encryption_pub_key, preimage, - self.side_effect_counter + counter ); let log_hash = compute_encrypted_log_hash(encrypted_log); - let side_effect = SideEffect { value: log_hash, counter: self.side_effect_counter }; + let side_effect = SideEffect { value: log_hash, counter }; self.encrypted_logs_hashes.push(side_effect); - self.side_effect_counter = self.side_effect_counter + 1; let encrypted_log_byte_len = 112 + 32 * (N + 3); // + processed log len (4) self.encrypted_log_preimages_length += encrypted_log_byte_len + 4; @@ -600,6 +595,12 @@ impl PrivateContext { ); } } + + fn next_counter(&mut self) -> u32 { + let counter = self.side_effect_counter; + self.side_effect_counter += 1; + counter + } } impl Empty for PrivateContext { diff --git a/noir-projects/aztec-nr/aztec/src/context/public_context.nr b/noir-projects/aztec-nr/aztec/src/context/public_context.nr index a410a4accb17..ef0fff635bd4 100644 --- a/noir-projects/aztec-nr/aztec/src/context/public_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/public_context.nr @@ -131,16 +131,14 @@ impl PublicContext { // Keep private or ask the AVM team if you want to change it. fn push_nullifier_read_request(&mut self, nullifier: Field) { - let request = ReadRequest { value: nullifier, counter: self.side_effect_counter }; + let request = ReadRequest { value: nullifier, counter: self.next_counter() }; self.nullifier_read_requests.push(request); - self.side_effect_counter = self.side_effect_counter + 1; } // Keep private or ask the AVM team if you want to change it. fn push_nullifier_non_existent_read_request(&mut self, nullifier: Field) { - let request = ReadRequest { value: nullifier, counter: self.side_effect_counter }; + let request = ReadRequest { value: nullifier, counter: self.next_counter() }; self.nullifier_non_existent_read_requests.push(request); - self.side_effect_counter = self.side_effect_counter + 1; } pub fn finish(self) -> PublicCircuitPublicInputs { @@ -171,6 +169,12 @@ impl PublicContext { }; pub_circuit_pub_inputs } + + fn next_counter(&mut self) -> u32 { + let counter = self.side_effect_counter; + self.side_effect_counter += 1; + counter + } } impl ContextInterface for PublicContext { @@ -199,17 +203,15 @@ impl ContextInterface for PublicContext { } fn push_new_note_hash(&mut self, note_hash: Field) { - self.new_note_hashes.push(NoteHash { value: note_hash, counter: self.side_effect_counter }); - self.side_effect_counter = self.side_effect_counter + 1; + self.new_note_hashes.push(NoteHash { value: note_hash, counter: self.next_counter() }); } fn push_new_nullifier(&mut self, nullifier: Field, _nullified_note_hash: Field) { self.new_nullifiers.push(Nullifier { value: nullifier, note_hash: 0, // cannot nullify pending notes in public context - counter: self.side_effect_counter + counter: self.next_counter() }); - self.side_effect_counter = self.side_effect_counter + 1; } } @@ -249,7 +251,7 @@ impl PublicContextInterface for PublicContext { } fn message_portal(&mut self, recipient: EthAddress, content: Field) { - let message = L2ToL1Message { recipient, content }; + let message = L2ToL1Message { recipient, content, counter: self.next_counter() }; self.new_l2_to_l1_msgs.push(message); } @@ -281,9 +283,8 @@ impl PublicContextInterface for PublicContext { event_selector, log ); - let side_effect = SideEffect { value: log_hash, counter: self.side_effect_counter }; + let side_effect = SideEffect { value: log_hash, counter: self.next_counter() }; self.unencrypted_logs_hashes.push(side_effect); - self.side_effect_counter = self.side_effect_counter + 1; // 44 = addr (32) + selector (4) + raw log len (4) + processed log len (4) self.unencrypted_log_preimages_length = self.unencrypted_log_preimages_length + 44 + log_slice.len().to_field(); // Call oracle to broadcast log diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs.nr new file mode 100644 index 000000000000..a81b22fd87b4 --- /dev/null +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs.nr @@ -0,0 +1,2 @@ +mod header; +mod body; diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/body.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/body.nr new file mode 100644 index 000000000000..07bd08b46e37 --- /dev/null +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/body.nr @@ -0,0 +1,147 @@ +use crate::note::{note_interface::NoteInterface}; +use dep::protocol_types::{grumpkin_private_key::GrumpkinPrivateKey, grumpkin_point::GrumpkinPoint}; + +use crate::oracle::encryption::aes128_encrypt; +use crate::keys::point_to_symmetric_key::point_to_symmetric_key; + +struct EncryptedLogBody { + storage_slot: Field, + note_type_id: Field, + note: Note, +} + +impl EncryptedLogBody { + pub fn new( + storage_slot: Field, + note_type_id: Field, + note: Note + ) -> Self where Note: NoteInterface { + Self { storage_slot, note_type_id, note } + } + + pub fn compute_ciphertext( + self, + secret: GrumpkinPrivateKey, + point: GrumpkinPoint + ) -> [u8; M] where Note: NoteInterface { + // We need 32 bytes for every field in the note, and then we have 2 extra fields (storage_slot and note_type_id) + let serialized_note: [Field; N] = Note::serialize_content(self.note); + + // Work around not being able to use N directly beyond the size of the array above. + let N_ = serialized_note.len(); + + assert(N_ * 32 + 64 == M, "Invalid size of encrypted log body"); + + let mut buffer: [u8; M] = [0; M]; + + let storage_slot_bytes = self.storage_slot.to_be_bytes(32); + let note_type_id_bytes = self.note_type_id.to_be_bytes(32); + for i in 0..32 { + buffer[i] = storage_slot_bytes[i]; + buffer[32 + i] = note_type_id_bytes[i]; + } + + for i in 0..N_ { + let bytes = serialized_note[i].to_be_bytes(32); + for j in 0..32 { + buffer[64 + i * 32 + j] = bytes[j]; + } + } + + let full_key = point_to_symmetric_key(secret, point); + let mut sym_key = [0; 16]; + let mut iv = [0; 16]; + + for i in 0..16 { + sym_key[i] = full_key[i]; + iv[i] = full_key[i + 16]; + } + + aes128_encrypt(buffer, iv, sym_key) + } +} + +/* +// Test is semi broken, needs to be fixed along with #6172 +mod test { + use crate::encrypted_logs::body::EncryptedLogBody; + use dep::protocol_types::{address::AztecAddress, traits::Empty, constants::GENERATOR_INDEX__NOTE_NULLIFIER}; + + use crate::{ + note::{note_header::NoteHeader, note_interface::NoteInterface, utils::compute_note_hash_for_consumption}, + oracle::{unsafe_rand::unsafe_rand, nullifier_key::get_app_nullifier_secret_key, get_public_key::get_public_key}, + context::PrivateContext, hash::poseidon2_hash + }; + + use dep::protocol_types::{address::AztecAddress, grumpkin_private_key::GrumpkinPrivateKey, grumpkin_point::GrumpkinPoint}; + + struct AddressNote { + address: AztecAddress, + owner: AztecAddress, + randomness: Field, + header: NoteHeader, + } + + global BIB_BOB_ADDRESS_NOTE_LEN: Field = 3; + + impl NoteInterface for AddressNote { + fn compute_note_content_hash(self) -> Field {1} + + fn get_note_type_id() -> Field {2} + + fn get_header(self) -> NoteHeader { self.header} + + fn set_header(&mut self, header: NoteHeader) {self.header = header; } + + fn compute_nullifier(self, context: &mut PrivateContext) -> Field {1} + + fn compute_nullifier_without_context(self) -> Field {1} + + fn broadcast(self, context: &mut PrivateContext, slot: Field) {} + + fn serialize_content(self) -> [Field; BIB_BOB_ADDRESS_NOTE_LEN] { [self.address.to_field(), self.owner.to_field(), self.randomness]} + + fn deserialize_content(fields: [Field; BIB_BOB_ADDRESS_NOTE_LEN]) -> Self { + AddressNote { address: AztecAddress::from_field(fields[0]), owner: AztecAddress::from_field(fields[1]), randomness: fields[2], header: NoteHeader::empty() } + } + } + + impl AddressNote { + pub fn new(address: AztecAddress, owner: AztecAddress, randomness: Field) -> Self { + AddressNote { address, owner, randomness, header: NoteHeader::empty() } + } + // docs:end:address_note_def + } + + // @todo Issue(#6172) This is to be run as a test. But it is currently using the AES oracle so will fail there. + fn test_encrypted_log_body() { + let note = AddressNote::new( + AztecAddress::from_field(0x1), + AztecAddress::from_field(0x2), + 3 + ); + + let note_type_id = 1; + let storage_slot = 2; + let body = EncryptedLogBody::new(storage_slot, note_type_id, note); + + let secret = GrumpkinPrivateKey::new( + 0x0000000000000000000000000000000023b3127c127b1f29a7adff5cccf8fb06, + 0x00000000000000000000000000000000649e7ca01d9de27b21624098b897babd + ); + let point = GrumpkinPoint::new( + 0x2688431c705a5ff3e6c6f2573c9e3ba1c1026d2251d0dbbf2d810aa53fd1d186, + 0x1e96887b117afca01c00468264f4f80b5bb16d94c1808a448595f115556e5c8e + ); + + let ciphertext = body.compute_ciphertext(secret, point); + + let expected_body_ciphertext = [ + 131, 119, 105, 129, 244, 32, 151, 205, 12, 99, 93, 62, 10, 180, 72, 21, 36, 194, 14, 168, 0, 137, 126, 59, 151, 177, 136, 254, 153, 190, 92, 33, 40, 151, 178, 54, 34, 166, 124, 96, 117, 108, 168, 7, 147, 222, 81, 201, 254, 170, 244, 151, 60, 64, 226, 45, 156, 185, 53, 23, 121, 63, 243, 101, 134, 21, 167, 39, 226, 203, 162, 223, 28, 74, 244, 159, 54, 201, 192, 168, 19, 85, 103, 82, 148, 3, 153, 210, 89, 245, 171, 171, 12, 248, 40, 74, 199, 65, 96, 42, 84, 83, 48, 21, 188, 134, 45, 247, 134, 166, 109, 170, 68, 212, 99, 235, 74, 202, 162, 108, 130, 128, 122, 16, 79, 242, 30, 157, 26, 75, 57, 24, 18, 124, 217, 74, 155, 13, 171, 205, 194, 193, 103, 134, 224, 204, 46, 105, 135, 166, 192, 163, 186, 42, 71, 51, 156, 161, 8, 131 + ]; + + assert_eq(ciphertext, expected_body_ciphertext); + } +} + +*/ diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr new file mode 100644 index 000000000000..03b5a33e3d1a --- /dev/null +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr @@ -0,0 +1,57 @@ +use dep::protocol_types::{address::AztecAddress, grumpkin_private_key::GrumpkinPrivateKey, grumpkin_point::GrumpkinPoint}; + +use crate::oracle::encryption::aes128_encrypt; +use crate::keys::point_to_symmetric_key::point_to_symmetric_key; + +struct EncryptedLogHeader { + address: AztecAddress, +} + +impl EncryptedLogHeader { + fn new(address: AztecAddress) -> Self { + EncryptedLogHeader { address } + } + + // @todo Issue(#5901) Figure out if we return the bytes or fields for the log + fn compute_ciphertext(self, secret: GrumpkinPrivateKey, point: GrumpkinPoint) -> [u8; 32] { + let full_key = point_to_symmetric_key(secret, point); + let mut sym_key = [0; 16]; + let mut iv = [0; 16]; + let mut input = [0; 32]; + let input_slice = self.address.to_field().to_be_bytes(32); + + for i in 0..16 { + sym_key[i] = full_key[i]; + iv[i] = full_key[i + 16]; + + // We copy address on the following 2 lines in order to avoid having 2 loops + input[i] = input_slice[i]; + input[i + 16] = input_slice[i + 16]; + } + + // @todo Issue(#6172) This encryption is currently using an oracle. It is not actually constrained atm. + aes128_encrypt(input, iv, sym_key) + } +} + +// @todo Issue(#6172) This is to be run as a test. But it is currently using the AES oracle so will fail there. +fn test_encrypted_log_header() { + let address = AztecAddress::from_field(0xdeadbeef); + let header = EncryptedLogHeader::new(address); + let secret = GrumpkinPrivateKey::new( + 0x0000000000000000000000000000000023b3127c127b1f29a7adff5cccf8fb06, + 0x00000000000000000000000000000000649e7ca01d9de27b21624098b897babd + ); + let point = GrumpkinPoint::new( + 0x2688431c705a5ff3e6c6f2573c9e3ba1c1026d2251d0dbbf2d810aa53fd1d186, + 0x1e96887b117afca01c00468264f4f80b5bb16d94c1808a448595f115556e5c8e + ); + + let ciphertext = header.compute_ciphertext(secret, point); + + let expected_header_ciphertext = [ + 131, 119, 105, 129, 244, 32, 151, 205, 12, 99, 93, 62, 10, 180, 72, 21, 179, 36, 250, 95, 56, 167, 171, 16, 195, 164, 223, 57, 75, 5, 24, 119 + ]; + + assert_eq(ciphertext, expected_header_ciphertext); +} diff --git a/noir-projects/aztec-nr/aztec/src/hash.nr b/noir-projects/aztec-nr/aztec/src/hash.nr index db600f4f542c..989ecfb0f314 100644 --- a/noir-projects/aztec-nr/aztec/src/hash.nr +++ b/noir-projects/aztec-nr/aztec/src/hash.nr @@ -12,22 +12,20 @@ pub fn compute_secret_hash(secret: Field) -> Field { pedersen_hash([secret], GENERATOR_INDEX__SECRET_HASH) } -pub fn compute_encrypted_log_hash( - encrypted_log: [Field; M] -) -> Field where [Field; N]: LensForEncryptedLog { +pub fn compute_encrypted_log_hash(encrypted_log: [Field; M]) -> Field where [Field; N]: LensForEncryptedLog { let mut bytes = [0; L]; // Note that bytes.append(encrypted_log[i].to_be_bytes(31)) results in bound error - for i in 0..M-1 { + for i in 0..M - 1 { let to_add = encrypted_log[i].to_be_bytes(31); for j in 0..31 { bytes[i*31 + j] = to_add[j]; } } // can't assign as L - not in scope error for: L-31*(M-1) - let num_bytes = bytes.len() as u32 - 31*(M-1); - let to_add_final = encrypted_log[M-1].to_be_bytes(num_bytes); + let num_bytes = bytes.len() as u32 - 31 * (M - 1); + let to_add_final = encrypted_log[M - 1].to_be_bytes(num_bytes); for j in 0..num_bytes { - bytes[(M-1)*31 + j] = to_add_final[j]; + bytes[(M-1)*31 + j] = to_add_final[j]; } sha256_to_field(bytes) } @@ -35,7 +33,7 @@ pub fn compute_encrypted_log_hash( pub fn compute_unencrypted_log_hash( contract_address: AztecAddress, event_selector: Field, - log: T, + log: T ) -> Field where T: ToBytesForUnencryptedLog { let message_bytes: [u8; N] = log.to_be_bytes_arr(); // can't use N - not in scope error @@ -182,7 +180,7 @@ fn compute_enc_log_hash_304() { 0x00b938289e563b0fe01982cd9b8d9e33e3069046768ad01c0fb05e429e7b7909, 0x00fbcc257a3211f705b471eee763b0f43876a2b2178fab6d2b09bd2b7e086584, 0x000000000000008c3289b5793b7448f4d45ecde039d004b6f037cad10b5c2336 - ]; + ]; let hash = compute_encrypted_log_hash(input); assert(hash == 0x001e3c013994947fe28957a876bf1b2c3a69ac69cc92909efd4f2ae9b972f893); } @@ -190,34 +188,34 @@ fn compute_enc_log_hash_304() { #[test] fn compute_enc_log_hash_368() { let input = [ - 0x0000000000000000000000000000000000000000000000000000000000000000, - 0x002190697d2a50e229a7a077e0951073f7d51e46679f10466153c308b63b1ea9, - 0x00543e346facc6799b94514c9d461bcc836c04b083b13c2e4544a39130473c1e, - 0x000df76d59526f8f953bcc7d9f77cdaefd36435931f0d7348f794bc275b42ded, - 0x00a6d390ee1723af7f7ac1ae4fc81a266b2370fe07040a36d06dbe242e02413e, - 0x00acbce15b6af1fbe94bd0f7b70f11768265dff77bfe63398f2a053efdfdf26d, - 0x00b8b131b9f42c689beb095ba4f4a836d4d15c9068d0422e9add6ca82b786329, - 0x00661a6a654b38f0f97d404ef5553e0efea9ed670561ae86685b31bbb2824fac, - 0x00113a6b58edfaec0065b365f66ba8d8aa68254b8690035e8d671a17a843f0a1, - 0x0023f2d2eae8c4449bac8f268a3e62a3faace1fe1401f0efdc8b0ccfbc8fb271, - 0x00cf6603f8c61993dd2f662c719671c61727a2f4e925fb988b23d31feccd77d9, - 0x0000000000a402a84b7294671799c38dd805f6a827a3a12633fdf91a57debe1f - ]; + 0x0000000000000000000000000000000000000000000000000000000000000000, + 0x002190697d2a50e229a7a077e0951073f7d51e46679f10466153c308b63b1ea9, + 0x00543e346facc6799b94514c9d461bcc836c04b083b13c2e4544a39130473c1e, + 0x000df76d59526f8f953bcc7d9f77cdaefd36435931f0d7348f794bc275b42ded, + 0x00a6d390ee1723af7f7ac1ae4fc81a266b2370fe07040a36d06dbe242e02413e, + 0x00acbce15b6af1fbe94bd0f7b70f11768265dff77bfe63398f2a053efdfdf26d, + 0x00b8b131b9f42c689beb095ba4f4a836d4d15c9068d0422e9add6ca82b786329, + 0x00661a6a654b38f0f97d404ef5553e0efea9ed670561ae86685b31bbb2824fac, + 0x00113a6b58edfaec0065b365f66ba8d8aa68254b8690035e8d671a17a843f0a1, + 0x0023f2d2eae8c4449bac8f268a3e62a3faace1fe1401f0efdc8b0ccfbc8fb271, + 0x00cf6603f8c61993dd2f662c719671c61727a2f4e925fb988b23d31feccd77d9, + 0x0000000000a402a84b7294671799c38dd805f6a827a3a12633fdf91a57debe1f + ]; let hash = compute_encrypted_log_hash(input); - assert(hash == 0x00a0d651ac0cbc01b72430fa6a05d91738595af6e0229347b4c9968223387aeb); + assert(hash == 0x00a0d651ac0cbc01b72430fa6a05d91738595af6e0229347b4c9968223387aeb); } #[test] fn compute_unenc_log_hash_array() { let contract_address = AztecAddress::from_field(0x233a3e0df23b2b15b324194cb4a151f26c0b7333250781d34cc269d85dc334c6); let event_selector = 5; - let log = [ + let log = [ 0x20660de09f35f876e3e69d227b2a35166ad05f09d82d06366ec9b6f65a51fec2, 0x1b52bfe3b8689761916f76dc3d38aa8810860db325cd39ca611eed980091f01c, 0x2e559c4045c378a56ad13b9edb1e8de4e7ad3b3aa35cc7ba9ec77f7a68fa43a4, 0x25d0f689c4a4178a29d59306f2675824d19be6d25e44fa03b03f49c263053dd2, 0x2d513a722d6f352dc0961f156afdc5e31495b9f0e35cb069261a8e55e2df67fd - ]; + ]; let hash = compute_unencrypted_log_hash(contract_address, event_selector, log); assert(hash == 0x00846d6969c8c2f61d39cd2762efcb0abb14f88d59c2675910251ef2bcffe9a7); } diff --git a/noir-projects/aztec-nr/aztec/src/keys.nr b/noir-projects/aztec-nr/aztec/src/keys.nr index 3bef24ec2bff..dd41b77ea5f4 100644 --- a/noir-projects/aztec-nr/aztec/src/keys.nr +++ b/noir-projects/aztec-nr/aztec/src/keys.nr @@ -1,8 +1,7 @@ mod getters; mod point_to_symmetric_key; -use crate::keys::getters::{get_npk_m, get_ivpk_m, -// Commented out as it's currently not enabled in key registry +// Add once enabled in key registry: // get_ovpk_m, // get_tpk_m -}; +use crate::keys::getters::{get_npk_m, get_ivpk_m}; diff --git a/noir-projects/aztec-nr/aztec/src/keys/point_to_symmetric_key.nr b/noir-projects/aztec-nr/aztec/src/keys/point_to_symmetric_key.nr index b708d00e8bc1..e2f0edfcd70b 100644 --- a/noir-projects/aztec-nr/aztec/src/keys/point_to_symmetric_key.nr +++ b/noir-projects/aztec-nr/aztec/src/keys/point_to_symmetric_key.nr @@ -1,10 +1,13 @@ -use dep::protocol_types::{constants::GENERATOR_INDEX__SYMMETRIC_KEY, grumpkin_point::GrumpkinPoint, utils::arr_copy_slice}; -use dep::std::{hash::sha256, grumpkin_scalar::GrumpkinScalar, scalar_mul::variable_base_embedded_curve}; +use dep::protocol_types::{ + constants::GENERATOR_INDEX__SYMMETRIC_KEY, grumpkin_private_key::GrumpkinPrivateKey, + grumpkin_point::GrumpkinPoint, utils::arr_copy_slice +}; +use dep::std::{hash::sha256, embedded_curve_ops::multi_scalar_mul}; // TODO(#5726): This function is called deriveAESSecret in TS. I don't like point_to_symmetric_key name much since // point is not the only input of the function. Unify naming with TS once we have a better name. -pub fn point_to_symmetric_key(secret: GrumpkinScalar, point: GrumpkinPoint) -> [u8; 32] { - let shared_secret_fields = variable_base_embedded_curve(point.x, point.y, secret.low, secret.high); +pub fn point_to_symmetric_key(secret: GrumpkinPrivateKey, point: GrumpkinPoint) -> [u8; 32] { + let shared_secret_fields = multi_scalar_mul([point.x, point.y], [secret.low, secret.high]); // TODO(https://github.com/AztecProtocol/aztec-packages/issues/6061): make the func return Point struct directly let shared_secret = GrumpkinPoint::new(shared_secret_fields[0], shared_secret_fields[1]); let mut shared_secret_bytes_with_separator = [0 as u8; 65]; @@ -16,9 +19,9 @@ pub fn point_to_symmetric_key(secret: GrumpkinScalar, point: GrumpkinPoint) -> [ #[test] fn check_point_to_symmetric_key() { // Value taken from "derive shared secret" test in encrypt_buffer.test.ts - let secret = GrumpkinScalar::new( - 0x00000000000000000000000000000000649e7ca01d9de27b21624098b897babd, - 0x0000000000000000000000000000000023b3127c127b1f29a7adff5cccf8fb06 + let secret = GrumpkinPrivateKey::new( + 0x0000000000000000000000000000000023b3127c127b1f29a7adff5cccf8fb06, + 0x00000000000000000000000000000000649e7ca01d9de27b21624098b897babd ); let point = GrumpkinPoint::new( 0x2688431c705a5ff3e6c6f2573c9e3ba1c1026d2251d0dbbf2d810aa53fd1d186, diff --git a/noir-projects/aztec-nr/aztec/src/lib.nr b/noir-projects/aztec-nr/aztec/src/lib.nr index 5043c7e51358..ce6504e1f743 100644 --- a/noir-projects/aztec-nr/aztec/src/lib.nr +++ b/noir-projects/aztec-nr/aztec/src/lib.nr @@ -10,4 +10,5 @@ mod oracle; mod state_vars; mod prelude; mod public_storage; +mod encrypted_logs; use dep::protocol_types; diff --git a/noir-projects/aztec-nr/aztec/src/note/constants.nr b/noir-projects/aztec-nr/aztec/src/note/constants.nr index 66a404c1b3d7..3b238572e903 100644 --- a/noir-projects/aztec-nr/aztec/src/note/constants.nr +++ b/noir-projects/aztec-nr/aztec/src/note/constants.nr @@ -3,4 +3,4 @@ global MAX_NOTE_FIELDS_LENGTH: u64 = 20; // + 2 for EXTRA_DATA: [number_of_return_notes, contract_address] global GET_NOTE_ORACLE_RETURN_LENGTH: u64 = MAX_NOTE_FIELDS_LENGTH + 1 + 2; global MAX_NOTES_PER_PAGE: u64 = 10; -global VIEW_NOTE_ORACLE_RETURN_LENGTH: u64 = MAX_NOTES_PER_PAGE * (MAX_NOTE_FIELDS_LENGTH + 1) + 2; \ No newline at end of file +global VIEW_NOTE_ORACLE_RETURN_LENGTH: u64 = MAX_NOTES_PER_PAGE * (MAX_NOTE_FIELDS_LENGTH + 1) + 2; diff --git a/noir-projects/aztec-nr/aztec/src/note/note_getter.nr b/noir-projects/aztec-nr/aztec/src/note/note_getter.nr index 21fa5ad15306..24f1ba45dd9b 100644 --- a/noir-projects/aztec-nr/aztec/src/note/note_getter.nr +++ b/noir-projects/aztec-nr/aztec/src/note/note_getter.nr @@ -8,7 +8,10 @@ use crate::note::{ }; use crate::oracle; -fn extract_property_value_from_selector(serialized_note: [Field; N], selector: PropertySelector) -> Field { +fn extract_property_value_from_selector( + serialized_note: [Field; N], + selector: PropertySelector +) -> Field { // Selectors use PropertySelectors in order to locate note properties inside the serialized note. // This allows easier packing and custom (de)serialization schemas. A note property is located // inside the serialized note using the index inside the array, a byte offset and a length. @@ -26,7 +29,11 @@ fn extract_property_value_from_selector(serialized_note: [Field; N], selector value_field } -fn check_note_header(context: PrivateContext, storage_slot: Field, note: Note) where Note: NoteInterface { +fn check_note_header( + context: PrivateContext, + storage_slot: Field, + note: Note +) where Note: NoteInterface { let header = note.get_header(); let contract_address = context.this_address(); assert(header.contract_address.eq(contract_address)); diff --git a/noir-projects/aztec-nr/aztec/src/oracle/encryption.nr b/noir-projects/aztec-nr/aztec/src/oracle/encryption.nr index c80844324376..3532baec2230 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/encryption.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/encryption.nr @@ -1,4 +1,3 @@ - #[oracle(aes128Encrypt)] pub fn aes128_encrypt_oracle(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8; M] {} diff --git a/noir-projects/aztec-nr/aztec/src/oracle/keys.nr b/noir-projects/aztec-nr/aztec/src/oracle/keys.nr index d8737a0dd06e..a985e385e811 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/keys.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/keys.nr @@ -1,10 +1,4 @@ -use dep::protocol_types::{ - address::{ - AztecAddress, - PartialAddress, - }, - grumpkin_point::GrumpkinPoint, -}; +use dep::protocol_types::{address::{AztecAddress, PartialAddress}, grumpkin_point::GrumpkinPoint}; use crate::hash::poseidon2_hash; @@ -12,17 +6,17 @@ use crate::hash::poseidon2_hash; fn get_public_keys_and_partial_address_oracle(_address: AztecAddress) -> [Field; 9] {} unconstrained fn get_public_keys_and_partial_address_oracle_wrapper(address: AztecAddress) -> [Field; 9] { - get_public_keys_and_partial_address_oracle(address) + get_public_keys_and_partial_address_oracle(address) } fn get_public_keys_and_partial_address(address: AztecAddress) -> ([GrumpkinPoint; 4], PartialAddress) { - let result = get_public_keys_and_partial_address_oracle_wrapper(address); + let result = get_public_keys_and_partial_address_oracle_wrapper(address); - let nullifier_pub_key = GrumpkinPoint::new(result[0], result[1]); - let incoming_pub_key = GrumpkinPoint::new(result[2], result[3]); - let outgoing_pub_key = GrumpkinPoint::new(result[4], result[5]); - let tagging_pub_key = GrumpkinPoint::new(result[6], result[7]); - let partial_address = PartialAddress::from_field(result[8]); + let nullifier_pub_key = GrumpkinPoint::new(result[0], result[1]); + let incoming_pub_key = GrumpkinPoint::new(result[2], result[3]); + let outgoing_pub_key = GrumpkinPoint::new(result[4], result[5]); + let tagging_pub_key = GrumpkinPoint::new(result[6], result[7]); + let partial_address = PartialAddress::from_field(result[8]); - ([nullifier_pub_key, incoming_pub_key, outgoing_pub_key, tagging_pub_key], partial_address) -} \ No newline at end of file + ([nullifier_pub_key, incoming_pub_key, outgoing_pub_key, tagging_pub_key], partial_address) +} diff --git a/noir-projects/aztec-nr/aztec/src/oracle/logs.nr b/noir-projects/aztec-nr/aztec/src/oracle/logs.nr index 48df110c32a7..d692329a82f5 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/logs.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/logs.nr @@ -10,7 +10,7 @@ fn emit_encrypted_log_oracle( _note_type_id: Field, _encryption_pub_key: GrumpkinPoint, _preimage: [Field; N], - _counter: u32, + _counter: u32 ) -> [Field; M] {} unconstrained pub fn emit_encrypted_log( diff --git a/noir-projects/aztec-nr/aztec/src/oracle/nullifier_key.nr b/noir-projects/aztec-nr/aztec/src/oracle/nullifier_key.nr index 0926fca65e62..39282a12e2a4 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/nullifier_key.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/nullifier_key.nr @@ -15,7 +15,7 @@ unconstrained fn get_nullifier_keys_internal(account: AztecAddress) -> Nullifier NullifierKeys { account, master_nullifier_public_key: GrumpkinPoint { x: result[0], y: result[1] }, - app_nullifier_secret_key: result[2], + app_nullifier_secret_key: result[2] } } diff --git a/noir-projects/aztec-nr/tests/src/mock/test_note.nr b/noir-projects/aztec-nr/tests/src/mock/test_note.nr index bc4f262550f3..17f6d968d369 100644 --- a/noir-projects/aztec-nr/tests/src/mock/test_note.nr +++ b/noir-projects/aztec-nr/tests/src/mock/test_note.nr @@ -1,8 +1,5 @@ use dep::aztec::context::PrivateContext; -use dep::aztec::note::{ - note_header::NoteHeader, - note_interface::NoteInterface, -}; +use dep::aztec::note::{note_header::NoteHeader, note_interface::NoteInterface}; global TEST_NOTE_LENGTH = 1; diff --git a/noir-projects/aztec-nr/value-note/src/value_note.nr b/noir-projects/aztec-nr/value-note/src/value_note.nr index d6597caa3524..019ea4bf543b 100644 --- a/noir-projects/aztec-nr/value-note/src/value_note.nr +++ b/noir-projects/aztec-nr/value-note/src/value_note.nr @@ -1,8 +1,5 @@ use dep::aztec::{ - protocol_types::{ - address::AztecAddress, traits::{Deserialize, Serialize}, - constants::GENERATOR_INDEX__NOTE_NULLIFIER -}, + protocol_types::{address::AztecAddress, traits::{Deserialize, Serialize}, constants::GENERATOR_INDEX__NOTE_NULLIFIER}, note::{note_header::NoteHeader, note_interface::NoteInterface, utils::compute_note_hash_for_consumption}, oracle::{unsafe_rand::unsafe_rand, nullifier_key::get_app_nullifier_secret_key, get_public_key::get_public_key}, hash::poseidon2_hash, context::PrivateContext diff --git a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr index f049473ea572..2bf04c8628c9 100644 --- a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr @@ -48,8 +48,10 @@ contract AppSubscription { note.remaining_txs -= 1; storage.subscriptions.at(user_address).replace(&mut note, true); + // docs:start:enqueue_public let gas_limit = storage.gas_token_limit_per_tx.read_private(); GasToken::at(storage.gas_token_address.read_private()).pay_fee(gas_limit).enqueue(&mut context); + // docs:end:enqueue_public context.end_setup(); diff --git a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/subscription_note.nr b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/subscription_note.nr index 18a9123e1d4c..c2543a14707f 100644 --- a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/subscription_note.nr +++ b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/subscription_note.nr @@ -1,7 +1,7 @@ use dep::aztec::prelude::{AztecAddress, PrivateContext, NoteHeader, NoteInterface}; use dep::aztec::{ - protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER, note::utils::compute_note_hash_for_consumption, - hash::poseidon2_hash, + protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER, + note::utils::compute_note_hash_for_consumption, hash::poseidon2_hash, oracle::{nullifier_key::get_app_nullifier_secret_key, get_public_key::get_public_key} }; diff --git a/noir-projects/noir-contracts/contracts/auth_contract/src/main.nr b/noir-projects/noir-contracts/contracts/auth_contract/src/main.nr index 836e01bb41d9..0de4f7c20937 100644 --- a/noir-projects/noir-contracts/contracts/auth_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/auth_contract/src/main.nr @@ -33,12 +33,14 @@ contract Auth { // docs:end:shared_mutable_schedule } + // docs:start:public_getter #[aztec(public)] fn get_authorized() -> AztecAddress { // docs:start:shared_mutable_get_current_public storage.authorized.get_current_value_in_public() // docs:end:shared_mutable_get_current_public } + // docs:end:public_getter #[aztec(public)] fn get_scheduled_authorized() -> AztecAddress { diff --git a/noir-projects/noir-contracts/contracts/avm_acvm_interop_test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/avm_acvm_interop_test_contract/src/main.nr index bece62fc8dce..fa459a62255a 100644 --- a/noir-projects/noir-contracts/contracts/avm_acvm_interop_test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/avm_acvm_interop_test_contract/src/main.nr @@ -28,7 +28,7 @@ contract AvmAcvmInteropTest { } #[aztec(public)] - fn new_nullifier_acvm(nullifier: Field) -> pub Field { + fn new_nullifier_acvm(nullifier: Field) { context.push_new_nullifier(nullifier, 0); } diff --git a/noir-projects/noir-contracts/contracts/avm_nested_calls_test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/avm_nested_calls_test_contract/src/main.nr index 4a0611968ed3..a21791aaee41 100644 --- a/noir-projects/noir-contracts/contracts/avm_nested_calls_test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/avm_nested_calls_test_contract/src/main.nr @@ -26,6 +26,12 @@ contract AvmNestedCallsTest { arg_a + arg_b } + #[aztec(public-vm)] + fn assert_same(arg_a: Field, arg_b: Field) -> pub Field { + assert(arg_a == arg_b, "Values are not equal"); + 1 + } + // Use the standard context interface to emit a new nullifier #[aztec(public-vm)] fn new_nullifier(nullifier: Field) { @@ -40,36 +46,36 @@ contract AvmNestedCallsTest { l2_gas: Field, da_gas: Field ) -> pub Field { - AvmNestedCallsTest::at(context.this_address()).add_args_return(arg_a, arg_b).call(&mut context, GasOpts::new(l2_gas, da_gas)) + AvmNestedCallsTest::at(context.this_address()).add_args_return(arg_a, arg_b).with_gas(GasOpts::new(l2_gas, da_gas)).call(&mut context) } // Use the `call_public_function` wrapper to initiate a nested call to the add function #[aztec(public-vm)] fn nested_call_to_add(arg_a: Field, arg_b: Field) -> pub Field { - AvmNestedCallsTest::at(context.this_address()).add_args_return(arg_a, arg_b).call(&mut context, GasOpts::default()) + AvmNestedCallsTest::at(context.this_address()).add_args_return(arg_a, arg_b).call(&mut context) } // Indirectly call_static the external call opcode to initiate a nested call to the add function #[aztec(public-vm)] fn nested_static_call_to_add(arg_a: Field, arg_b: Field) -> pub Field { - AvmNestedCallsTest::at(context.this_address()).add_args_return(arg_a, arg_b).static_call(&mut context, GasOpts::default()) + AvmNestedCallsTest::at(context.this_address()).add_args_return(arg_a, arg_b).static_call(&mut context) } // Indirectly call_static `set_storage_single`. Should revert since it's accessing storage. #[aztec(public-vm)] fn nested_static_call_to_set_storage() { - AvmNestedCallsTest::at(context.this_address()).set_storage_single(20).static_call(&mut context, GasOpts::default()); + AvmNestedCallsTest::at(context.this_address()).set_storage_single(20).static_call(&mut context); } #[aztec(public-vm)] fn create_same_nullifier_in_nested_call(nestedAddress: AztecAddress, nullifier: Field) { context.push_new_nullifier(nullifier, 0); - AvmNestedCallsTest::at(nestedAddress).new_nullifier(nullifier).call(&mut context, GasOpts::default()); + AvmNestedCallsTest::at(nestedAddress).new_nullifier(nullifier).call(&mut context); } #[aztec(public-vm)] fn create_different_nullifier_in_nested_call(nestedAddress: AztecAddress, nullifier: Field) { context.push_new_nullifier(nullifier, 0); - AvmNestedCallsTest::at(nestedAddress).new_nullifier(nullifier + 1).call(&mut context, GasOpts::default()); + AvmNestedCallsTest::at(nestedAddress).new_nullifier(nullifier + 1).call(&mut context); } } diff --git a/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr index 03b9f8912dab..c26c4f2551a7 100644 --- a/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr @@ -60,10 +60,11 @@ contract AvmTest { } #[aztec(public-vm)] - fn read_storage_single() -> pub Field { + fn read_storage_single() -> Field { storage.single.read() } + // should still be able to use ` -> pub *` for return type even though macro forces `pub` #[aztec(public-vm)] fn set_read_storage_single(a: Field) -> pub Field { storage.single.write(a); @@ -76,20 +77,20 @@ contract AvmTest { } #[aztec(public-vm)] - fn read_storage_list() -> pub [Field; 2] { + fn read_storage_list() -> [Field; 2] { let note: Note = storage.list.read(); note.serialize() } #[aztec(public-vm)] - fn set_storage_map(to: AztecAddress, amount: u32) -> pub Field { + fn set_storage_map(to: AztecAddress, amount: u32) -> Field { storage.map.at(to).write(amount); // returns storage slot for key dep::std::hash::pedersen_hash([storage.map.storage_slot, to.to_field()]) } #[aztec(public-vm)] - fn add_storage_map(to: AztecAddress, amount: u32) -> pub Field { + fn add_storage_map(to: AztecAddress, amount: u32) -> Field { let new_balance = storage.map.at(to).read().add(amount); storage.map.at(to).write(new_balance); // returns storage slot for key @@ -97,12 +98,12 @@ contract AvmTest { } #[aztec(public-vm)] - fn read_storage_map(address: AztecAddress) -> pub u32 { + fn read_storage_map(address: AztecAddress) -> u32 { storage.map.at(address).read() } #[aztec(public-vm)] - fn add_args_return(arg_a: Field, arg_b: Field) -> pub Field { + fn add_args_return(arg_a: Field, arg_b: Field) -> Field { arg_a + arg_b } @@ -110,32 +111,32 @@ contract AvmTest { * General Opcodes ************************************************************************/ #[aztec(public-vm)] - fn set_opcode_u8() -> pub u8 { + fn set_opcode_u8() -> u8 { 8 as u8 } #[aztec(public-vm)] - fn set_opcode_u32() -> pub u32 { + fn set_opcode_u32() -> u32 { 1 << 30 as u8 } #[aztec(public-vm)] - fn set_opcode_u64() -> pub u64 { + fn set_opcode_u64() -> u64 { 1 << 60 as u8 } #[aztec(public-vm)] - fn set_opcode_small_field() -> pub Field { + fn set_opcode_small_field() -> Field { big_field_128_bits } #[aztec(public-vm)] - fn set_opcode_big_field() -> pub Field { + fn set_opcode_big_field() -> Field { big_field_136_bits } #[aztec(public-vm)] - fn add_u128(a: U128, b: U128) -> pub U128 { + fn add_u128(a: U128, b: U128) -> U128 { a + b } @@ -143,27 +144,27 @@ contract AvmTest { * Hashing functions ************************************************************************/ #[aztec(public-vm)] - fn keccak_hash(data: [u8; 10]) -> pub [u8; 32] { + fn keccak_hash(data: [u8; 10]) -> [u8; 32] { dep::std::hash::keccak256(data, data.len() as u32) } #[aztec(public-vm)] - fn poseidon2_hash(data: [Field; 10]) -> pub Field { + fn poseidon2_hash(data: [Field; 10]) -> Field { dep::std::hash::poseidon2::Poseidon2::hash(data, data.len()) } #[aztec(public-vm)] - fn sha256_hash(data: [u8; 10]) -> pub [u8; 32] { + fn sha256_hash(data: [u8; 10]) -> [u8; 32] { dep::std::hash::sha256(data) } #[aztec(public-vm)] - fn pedersen_hash(data: [Field; 10]) -> pub Field { + fn pedersen_hash(data: [Field; 10]) -> Field { dep::std::hash::pedersen_hash(data) } #[aztec(public-vm)] - fn pedersen_hash_with_index(data: [Field; 10]) -> pub Field { + fn pedersen_hash_with_index(data: [Field; 10]) -> Field { dep::std::hash::pedersen_hash_with_separator(data, /*index=*/ 20) } @@ -193,57 +194,57 @@ contract AvmTest { * AvmContext functions ************************************************************************/ #[aztec(public-vm)] - fn get_address() -> pub AztecAddress { + fn get_address() -> AztecAddress { context.this_address() } #[aztec(public-vm)] - fn get_storage_address() -> pub AztecAddress { + fn get_storage_address() -> AztecAddress { context.storage_address() } #[aztec(public-vm)] - fn get_sender() -> pub AztecAddress { + fn get_sender() -> AztecAddress { context.msg_sender() } #[aztec(public-vm)] - fn get_fee_per_l2_gas() -> pub Field { + fn get_fee_per_l2_gas() -> Field { context.fee_per_l2_gas() } #[aztec(public-vm)] - fn get_fee_per_da_gas() -> pub Field { + fn get_fee_per_da_gas() -> Field { context.fee_per_da_gas() } #[aztec(public-vm)] - fn get_transaction_fee() -> pub Field { + fn get_transaction_fee() -> Field { context.transaction_fee() } #[aztec(public-vm)] - fn get_chain_id() -> pub Field { + fn get_chain_id() -> Field { context.chain_id() } #[aztec(public-vm)] - fn get_version() -> pub Field { + fn get_version() -> Field { context.version() } #[aztec(public-vm)] - fn get_block_number() -> pub Field { + fn get_block_number() -> Field { context.block_number() } #[aztec(public-vm)] - fn get_timestamp() -> pub u64 { + fn get_timestamp() -> u64 { context.timestamp() } // #[aztec(public-vm)] - // fn get_contract_call_depth() -> pub Field { + // fn get_contract_call_depth() -> Field { // context.contract_call_depth() // } @@ -255,20 +256,20 @@ contract AvmTest { } #[aztec(public-vm)] - fn get_args_hash(_a: u8, _fields: [Field; 3]) -> pub Field { + fn get_args_hash(_a: u8, _fields: [Field; 3]) -> Field { context.get_args_hash() } #[aztec(public-vm)] fn emit_unencrypted_log() { - context.emit_unencrypted_log(/*event_selector=*/ 5, /*message=*/ [10, 20, 30]); - context.emit_unencrypted_log(/*event_selector=*/ 8, /*message=*/ "Hello, world!"); + context.emit_unencrypted_log_with_selector(/*event_selector=*/ 5, /*message=*/ [10, 20, 30]); + context.emit_unencrypted_log_with_selector(/*event_selector=*/ 8, /*message=*/ "Hello, world!"); let s: CompressedString<2,44> = CompressedString::from_string("A long time ago, in a galaxy far far away..."); - context.emit_unencrypted_log(/*event_selector=*/ 10, /*message=*/ s); + context.emit_unencrypted_log_with_selector(/*event_selector=*/ 10, /*message=*/ s); } #[aztec(public-vm)] - fn note_hash_exists(note_hash: Field, leaf_index: Field) -> pub bool { + fn note_hash_exists(note_hash: Field, leaf_index: Field) -> bool { context.note_hash_exists(note_hash, leaf_index) } @@ -286,7 +287,7 @@ contract AvmTest { // Use the standard context interface to check for a nullifier #[aztec(public-vm)] - fn nullifier_exists(nullifier: Field) -> pub bool { + fn nullifier_exists(nullifier: Field) -> bool { context.nullifier_exists(nullifier, context.this_address()) } @@ -312,7 +313,7 @@ contract AvmTest { } #[aztec(public-vm)] - fn l1_to_l2_msg_exists(msg_hash: Field, msg_leaf_index: Field) -> pub bool { + fn l1_to_l2_msg_exists(msg_hash: Field, msg_leaf_index: Field) -> bool { context.l1_to_l2_msg_exists(msg_hash, msg_leaf_index) } diff --git a/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr b/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr index 7ae2edbda8b0..c43ba634b10f 100644 --- a/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr +++ b/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr @@ -1,12 +1,9 @@ use dep::aztec::prelude::{AztecAddress, FunctionSelector, PrivateContext, NoteHeader, NoteGetterOptions, NoteViewerOptions}; use dep::aztec::{ - protocol_types::{ - traits::{ToField, Serialize, FromField}, - constants::MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, -}, + protocol_types::{traits::{ToField, Serialize, FromField}, constants::MAX_NOTE_HASH_READ_REQUESTS_PER_CALL}, context::{PublicContext, Context}, note::note_getter::view_notes, state_vars::PrivateSet, - note::constants::MAX_NOTES_PER_PAGE, + note::constants::MAX_NOTES_PER_PAGE }; use dep::std; use dep::std::{option::Option}; diff --git a/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr b/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr index c2b88b4726b2..d863ecaaaebf 100644 --- a/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr @@ -26,6 +26,7 @@ contract Counter { // docs:start:increment #[aztec(private)] fn increment(owner: AztecAddress) { + dep::aztec::oracle::debug_log::debug_log_format("Incrementing counter for owner {0}", [owner.to_field()]); let counters = storage.counters; counters.at(owner).add(1, owner); } diff --git a/noir-projects/noir-contracts/contracts/docs_example_contract/src/types/card_note.nr b/noir-projects/noir-contracts/contracts/docs_example_contract/src/types/card_note.nr index 684314291fb4..3f952146c2bf 100644 --- a/noir-projects/noir-contracts/contracts/docs_example_contract/src/types/card_note.nr +++ b/noir-projects/noir-contracts/contracts/docs_example_contract/src/types/card_note.nr @@ -2,7 +2,7 @@ use dep::aztec::prelude::{AztecAddress, NoteInterface, NoteHeader, PrivateContex use dep::aztec::{ note::{utils::compute_note_hash_for_consumption}, oracle::{nullifier_key::get_app_nullifier_secret_key, get_public_key::get_public_key}, - hash::poseidon2_hash, protocol_types::{traits::Empty, constants::GENERATOR_INDEX__NOTE_NULLIFIER}, + hash::poseidon2_hash, protocol_types::{traits::Empty, constants::GENERATOR_INDEX__NOTE_NULLIFIER} }; // Shows how to create a custom note diff --git a/noir-projects/noir-contracts/contracts/ecdsa_account_contract/src/ecdsa_public_key_note.nr b/noir-projects/noir-contracts/contracts/ecdsa_account_contract/src/ecdsa_public_key_note.nr index 6804c0f483a1..20fd400e9679 100644 --- a/noir-projects/noir-contracts/contracts/ecdsa_account_contract/src/ecdsa_public_key_note.nr +++ b/noir-projects/noir-contracts/contracts/ecdsa_account_contract/src/ecdsa_public_key_note.nr @@ -1,11 +1,9 @@ -use dep::aztec::prelude::{ - AztecAddress, FunctionSelector, NoteHeader, NoteInterface, NoteGetterOptions, PrivateContext -}; +use dep::aztec::prelude::{AztecAddress, FunctionSelector, NoteHeader, NoteInterface, NoteGetterOptions, PrivateContext}; use dep::aztec::{ note::utils::compute_note_hash_for_consumption, oracle::{nullifier_key::get_app_nullifier_secret_key, get_public_key::get_public_key}, - hash::poseidon2_hash, protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER, + hash::poseidon2_hash, protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER }; global ECDSA_PUBLIC_KEY_NOTE_LEN: Field = 5; diff --git a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr index f7636711d0e3..c877e8c7ff07 100644 --- a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr @@ -40,7 +40,9 @@ contract FPC { #[aztec(public)] #[aztec(internal)] fn pay_fee(refund_address: AztecAddress, amount: Field, asset: AztecAddress) { + // docs:start:public_call let refund = GasToken::at(storage.gas_token_address.read_public()).pay_fee(amount).call(&mut context); + // docs:end:public_call // Just do public refunds for the present Token::at(asset).transfer_public(context.this_address(), refund_address, refund, 0).call(&mut context); } diff --git a/noir-projects/noir-contracts/contracts/gas_token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/gas_token_contract/src/main.nr index 3b46f9b53fad..7ba446c54ed9 100644 --- a/noir-projects/noir-contracts/contracts/gas_token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/gas_token_contract/src/main.nr @@ -55,10 +55,11 @@ contract GasToken { fn pay_fee(fee_limit: Field) -> Field { let fee_limit_u128 = U128::from_integer(fee_limit); let fee = U128::from_integer(calculate_fee(context)); - dep::aztec::oracle::debug_log::debug_log_format( - "Gas token: paying fee {0} (limit {1})", - [fee.to_field(), fee_limit] - ); + // TODO(6252): implement debug logging in AVM + //dep::aztec::oracle::debug_log::debug_log_format( + // "Gas token: paying fee {0} (limit {1})", + // [fee.to_field(), fee_limit] + //); assert(fee <= fee_limit_u128, "Fee too high"); let sender_new_balance = storage.balances.at(context.msg_sender()).read() - fee; diff --git a/noir-projects/noir-contracts/contracts/key_registry_contract/src/main.nr b/noir-projects/noir-contracts/contracts/key_registry_contract/src/main.nr index f8e62b1e4d71..b985c829d269 100644 --- a/noir-projects/noir-contracts/contracts/key_registry_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/key_registry_contract/src/main.nr @@ -3,9 +3,7 @@ contract KeyRegistry { use dep::aztec::{ state_vars::{SharedMutable, Map}, - protocol_types::{ - grumpkin_point::GrumpkinPoint, address::{AztecAddress, PartialAddress}, - } + protocol_types::{grumpkin_point::GrumpkinPoint, address::{AztecAddress, PartialAddress}} }; global KEY_ROTATION_DELAY = 5; diff --git a/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr b/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr index 80d693340c6c..909f0417849e 100644 --- a/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr @@ -236,7 +236,9 @@ contract Lending { stable_coin: AztecAddress ) { let on_behalf_of = compute_identifier(secret, on_behalf_of, context.msg_sender().to_field()); + // docs:start:private_call let _ = Token::at(stable_coin).burn(from, amount, nonce).call(&mut context); + // docs:end:private_call let _ = Lending::at(context.this_address())._repay(AztecAddress::from_field(on_behalf_of), amount, stable_coin).enqueue(&mut context); } diff --git a/noir-projects/noir-contracts/contracts/pending_note_hashes_contract/src/main.nr b/noir-projects/noir-contracts/contracts/pending_note_hashes_contract/src/main.nr index f49828140bfd..9cce1d75274f 100644 --- a/noir-projects/noir-contracts/contracts/pending_note_hashes_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/pending_note_hashes_contract/src/main.nr @@ -4,9 +4,7 @@ // be read/nullified before their creation etc. contract PendingNoteHashes { // Libs - use dep::aztec::prelude::{ - AztecAddress, FunctionSelector, NoteHeader, NoteGetterOptions, PrivateContext, Map, PrivateSet - }; + use dep::aztec::prelude::{AztecAddress, FunctionSelector, NoteHeader, NoteGetterOptions, PrivateContext, Map, PrivateSet}; use dep::value_note::{balance_utils, filter::filter_notes_min_sum, value_note::{VALUE_NOTE_LEN, ValueNote}}; use dep::aztec::context::{PublicContext, Context}; diff --git a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/public_key_note.nr b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/public_key_note.nr index c7061cac9980..95fbe422f78e 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/public_key_note.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/public_key_note.nr @@ -2,7 +2,7 @@ use dep::aztec::prelude::{AztecAddress, NoteHeader, NoteInterface, PrivateContex use dep::aztec::{ note::utils::compute_note_hash_for_consumption, hash::poseidon2_hash, oracle::{nullifier_key::get_app_nullifier_secret_key, get_public_key::get_public_key}, - protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER, + protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER }; global PUBLIC_KEY_NOTE_LEN: Field = 3; diff --git a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr index 1a92cce5d600..5ab03eabf18a 100644 --- a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr @@ -10,10 +10,14 @@ contract Test { use dep::aztec::protocol_types::{ abis::private_circuit_public_inputs::PrivateCircuitPublicInputs, - constants::{MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, CANONICAL_KEY_REGISTRY_ADDRESS}, traits::{Serialize, ToField, FromField}, - grumpkin_point::GrumpkinPoint + constants::{MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, CANONICAL_KEY_REGISTRY_ADDRESS}, + traits::{Serialize, ToField, FromField}, grumpkin_point::GrumpkinPoint, + grumpkin_private_key::GrumpkinPrivateKey }; + use dep::aztec::encrypted_logs::header::EncryptedLogHeader; + use dep::aztec::encrypted_logs::body::EncryptedLogBody; + use dep::aztec::note::constants::MAX_NOTES_PER_PAGE; use dep::aztec::state_vars::{shared_mutable::SharedMutablePrivateGetter, map::derive_storage_slot_in_map}; @@ -342,6 +346,23 @@ contract Test { aes128_encrypt(input, iv, key) } + #[aztec(private)] + fn compute_note_header_ciphertext(secret: GrumpkinPrivateKey, point: GrumpkinPoint) -> [u8; 32] { + EncryptedLogHeader::new(context.this_address()).compute_ciphertext(secret, point) + } + + // 64 bytes + 32 * #fields = 96 bytes + #[aztec(private)] + fn compute_note_body_ciphertext( + secret: GrumpkinPrivateKey, + point: GrumpkinPoint, + storage_slot: Field, + value: Field + ) -> [u8; 96] { + let note = TestNote::new(value); + EncryptedLogBody::new(storage_slot, TestNote::get_note_type_id(), note).compute_ciphertext(secret, point) + } + #[aztec(public)] fn assert_public_global_vars( chain_id: Field, @@ -368,10 +389,11 @@ contract Test { assert(context.historical_header.hash() == header_hash, "Invalid header hash"); } - #[aztec(public)] - fn assert_header_public(header_hash: Field) { - assert(context.historical_header.hash() == header_hash, "Invalid header hash"); - } + // TODO(4840): add AVM opcodes for getting header (members) + //#[aztec(public)] + //fn assert_header_public(header_hash: Field) { + // assert(context.historical_header.hash() == header_hash, "Invalid header hash"); + //} #[aztec(private)] fn deploy_contract(target: AztecAddress) { @@ -424,15 +446,16 @@ contract Test { let derived_slot = derive_storage_slot_in_map(storage_slot_of_shared_mutable, address_to_get_in_registry); // It's a bit wonky because we need to know the delay for get_current_value_in_private to work correctly - let registry_private_getter: SharedMutablePrivateGetter = SharedMutablePrivateGetter::new(context, AztecAddress::from_field(CANONICAL_KEY_REGISTRY_ADDRESS), derived_slot); + let registry_private_getter: SharedMutablePrivateGetter = SharedMutablePrivateGetter::new( + context, + AztecAddress::from_field(CANONICAL_KEY_REGISTRY_ADDRESS), + derived_slot + ); registry_private_getter.get_current_value_in_private() } #[aztec(private)] - fn test_nullifier_key_freshness( - address: AztecAddress, - public_nullifying_key: GrumpkinPoint, - ) { + fn test_nullifier_key_freshness(address: AztecAddress, public_nullifying_key: GrumpkinPoint) { assert_eq(get_npk_m(&mut context, address), public_nullifying_key); } diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/types/transparent_note.nr b/noir-projects/noir-contracts/contracts/token_contract/src/types/transparent_note.nr index 9fc1d0737fc5..d5cf7197cef6 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/types/transparent_note.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/types/transparent_note.nr @@ -2,7 +2,7 @@ use dep::aztec::{ note::{note_getter_options::PropertySelector, utils::compute_note_hash_for_consumption}, hash::poseidon2_hash, prelude::{NoteHeader, NoteInterface, PrivateContext}, - protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER, + protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER }; global TRANSPARENT_NOTE_LEN: Field = 2; diff --git a/noir-projects/noir-contracts/contracts/uniswap_contract/src/main.nr b/noir-projects/noir-contracts/contracts/uniswap_contract/src/main.nr index 9ab011d9e0e4..afacb2205685 100644 --- a/noir-projects/noir-contracts/contracts/uniswap_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/uniswap_contract/src/main.nr @@ -223,7 +223,7 @@ contract Uniswap { token.eq(TokenBridge::at(token_bridge).get_token().static_call(&mut context)), "input_asset address is not the same as seen in the bridge contract" ); } - + // /// Unconstrained /// // this method exists solely for e2e tests to test that nonce gets incremented each time. @@ -231,5 +231,4 @@ contract Uniswap { storage.nonce_for_burn_approval.read() } // docs:end:assert_token_is_same - } diff --git a/noir-projects/noir-contracts/scripts/transpile.sh b/noir-projects/noir-contracts/scripts/transpile.sh index 9bea61f5ffaa..934f8982d557 100755 --- a/noir-projects/noir-contracts/scripts/transpile.sh +++ b/noir-projects/noir-contracts/scripts/transpile.sh @@ -2,4 +2,5 @@ set -eu TRANSPILER=${TRANSPILER:-../../avm-transpiler/target/release/avm-transpiler} -ls target/avm_*.json | parallel "$TRANSPILER {} {}" \ No newline at end of file +ls target/*.json | parallel "$TRANSPILER {} {}" + diff --git a/noir-projects/noir-protocol-circuits/crates/parity-root/src/main.nr b/noir-projects/noir-protocol-circuits/crates/parity-root/src/main.nr index 6f704d260f0f..7356387215ff 100644 --- a/noir-projects/noir-protocol-circuits/crates/parity-root/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/parity-root/src/main.nr @@ -1,6 +1,6 @@ use dep::parity_lib::{RootParityInputs, ParityPublicInputs}; #[recursive] -fn main(inputs: RootParityInputs) -> pub ParityPublicInputs { +fn main(inputs: RootParityInputs) -> pub ParityPublicInputs { inputs.root_parity_circuit() } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/common.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/common.nr index 618741fd50b1..940e0230db25 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/common.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/common.nr @@ -142,7 +142,9 @@ fn contract_logic(private_call: PrivateCallData) { } pub fn validate_previous_kernel_values(end: PrivateAccumulatedData) { - assert(end.new_nullifiers[0].value != 0, "The 0th nullifier in the accumulated nullifier array is zero"); + assert( + end.new_nullifiers[0].value() != 0, "The 0th nullifier in the accumulated nullifier array is zero" + ); } pub fn validate_call_against_request(private_call: PrivateCallData, request: CallRequest) { diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr index fee2d1293acb..5abc9c8f4f4a 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr @@ -3,13 +3,16 @@ use dep::types::{ abis::{ kernel_data::PrivateKernelData, kernel_circuit_public_inputs::{KernelCircuitPublicInputs, PrivateKernelCircuitPublicInputsBuilder, PublicKernelCircuitPublicInputs}, - note_hash::NoteHashContext, nullifier::Nullifier, side_effect::{SideEffect, Ordered}, gas::Gas + note_hash::ScopedNoteHash, nullifier::ScopedNullifier, side_effect::{SideEffect, Ordered}, gas::Gas }, constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX }, - hash::{compute_note_hash_nonce, compute_unique_siloed_note_hash}, + hash::{ + compute_l2_to_l1_hash, compute_note_hash_nonce, compute_unique_siloed_note_hash, silo_note_hash, + silo_nullifier +}, utils::arrays::{array_length, array_to_bounded_vec, assert_sorted_array} }; @@ -24,14 +27,14 @@ struct KernelCircuitPublicInputsComposer { public_inputs: PrivateKernelCircuitPublicInputsBuilder, previous_kernel: PrivateKernelData, // Final data - note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], - nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], + nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], // Hints transient_nullifier_indexes_for_note_hashes: [u64; MAX_NEW_NOTE_HASHES_PER_TX], transient_note_hash_indexes_for_nullifiers: [u64; MAX_NEW_NULLIFIERS_PER_TX], - sorted_note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], + sorted_note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], sorted_note_hashes_indexes: [u64; MAX_NEW_NOTE_HASHES_PER_TX], - sorted_nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + sorted_nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], sorted_nullifiers_indexes: [u64; MAX_NEW_NULLIFIERS_PER_TX], sorted_encrypted_log_hashes: [SideEffect; MAX_ENCRYPTED_LOGS_PER_TX], sorted_encrypted_log_hashes_indexes: [u64; MAX_ENCRYPTED_LOGS_PER_TX], @@ -42,13 +45,13 @@ struct KernelCircuitPublicInputsComposer { impl KernelCircuitPublicInputsComposer { pub fn new( previous_kernel: PrivateKernelData, - note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], - nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], + nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], transient_nullifier_indexes_for_note_hashes: [u64; MAX_NEW_NOTE_HASHES_PER_TX], transient_note_hash_indexes_for_nullifiers: [u64; MAX_NEW_NULLIFIERS_PER_TX], - sorted_note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], + sorted_note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], sorted_note_hashes_indexes: [u64; MAX_NEW_NOTE_HASHES_PER_TX], - sorted_nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + sorted_nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], sorted_nullifiers_indexes: [u64; MAX_NEW_NULLIFIERS_PER_TX], sorted_encrypted_log_hashes: [SideEffect; MAX_ENCRYPTED_LOGS_PER_TX], sorted_encrypted_log_hashes_indexes: [u64; MAX_ENCRYPTED_LOGS_PER_TX], @@ -122,20 +125,50 @@ impl KernelCircuitPublicInputsComposer { fn silo_values(&mut self) { self.silo_note_hashes(); - // TODO: Move siloing from init/inner circuits to here. + self.silo_nullifiers(); + self.silo_l2_to_l1_messages(); } fn silo_note_hashes(&mut self) { - let first_nullifier = self.public_inputs.end.new_nullifiers.get_unchecked(0); - assert(first_nullifier.value != 0, "The 0th nullifier in the accumulated nullifier array is zero"); + let first_nullifier = self.public_inputs.end.new_nullifiers.get_unchecked(0).value(); + assert(first_nullifier != 0, "The 0th nullifier in the accumulated nullifier array is zero"); let note_hashes = self.public_inputs.end.new_note_hashes.storage; for i in 0..MAX_NEW_NOTE_HASHES_PER_TX { let note_hash = note_hashes[i]; - if note_hash.value != 0 { - let nonce = compute_note_hash_nonce(first_nullifier.value, i); - let unique_note_hash = compute_unique_siloed_note_hash(nonce, note_hash.value); - self.public_inputs.end.new_note_hashes.storage[i].value = unique_note_hash; + if note_hash.value() != 0 { + let siloed = silo_note_hash(note_hash.contract_address, note_hash.value()); + let nonce = compute_note_hash_nonce(first_nullifier, i); + let unique_note_hash = compute_unique_siloed_note_hash(nonce, siloed); + self.public_inputs.end.new_note_hashes.storage[i].note_hash.value = unique_note_hash; + } + } + } + + fn silo_nullifiers(&mut self) { + let nullifiers = self.public_inputs.end.new_nullifiers.storage; + for i in 1..MAX_NEW_NOTE_HASHES_PER_TX { // i starts from 1 to skip the first nullifier. + let nullifier = nullifiers[i]; + if nullifier.value() != 0 { + let siloed = silo_nullifier(nullifier.contract_address, nullifier.value()); + self.public_inputs.end.new_nullifiers.storage[i].nullifier.value = siloed; + } + } + } + + fn silo_l2_to_l1_messages(&mut self) { + let l2_to_l1_msgs = self.public_inputs.end.new_l2_to_l1_msgs.storage; + let tx_context = self.previous_kernel.public_inputs.constants.tx_context; + for i in 0..l2_to_l1_msgs.len() { + let msg = l2_to_l1_msgs[i]; + if !msg.contract_address.is_zero() { + let siloed = compute_l2_to_l1_hash( + msg.contract_address, + tx_context.version, + tx_context.chain_id, + msg.message + ); + self.public_inputs.end.new_l2_to_l1_msgs.storage[i].message.content = siloed; } } } @@ -209,7 +242,7 @@ impl KernelCircuitPublicInputsComposer { assert(self.note_hashes[i].nullifier_counter == 0, "Unresolved transient note hash"); } for i in 0..self.nullifiers.len() { - assert(self.nullifiers[i].note_hash == 0, "Unresolved transient nullifier"); + assert(self.nullifiers[i].nullified_note_hash() == 0, "Unresolved transient nullifier"); } self.public_inputs.end.new_note_hashes = array_to_bounded_vec(self.note_hashes); diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_circuit_public_inputs_composer.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_circuit_public_inputs_composer.nr index f50048579aa1..dda3224b07d0 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_circuit_public_inputs_composer.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_circuit_public_inputs_composer.nr @@ -10,8 +10,7 @@ use dep::types::{ MAX_NEW_NOTE_HASHES_PER_CALL, MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL }, - hash::{compute_l2_to_l1_hash, silo_note_hash, silo_nullifier}, traits::is_empty, - transaction::tx_request::TxRequest, utils::arrays::array_to_bounded_vec + traits::is_empty, transaction::tx_request::TxRequest, utils::arrays::array_to_bounded_vec }; struct DataSource { @@ -38,7 +37,7 @@ impl PrivateKernelCircuitPublicInputsComposer { public_inputs.min_revertible_side_effect_counter = private_call_public_inputs.min_revertible_side_effect_counter; // Since it's the first iteration, we need to push the the tx hash nullifier into the `new_nullifiers` array - public_inputs.end.new_nullifiers.push(Nullifier { value: tx_request.hash(), note_hash: 0, counter: 0 }); + public_inputs.end.new_nullifiers.push(Nullifier { value: tx_request.hash(), note_hash: 0, counter: 0 }.scope(AztecAddress::zero())); // Note that we do not need to nullify the transaction request nonce anymore. // Should an account want to additionally use nonces for replay protection or handling cancellations, // they will be able to do so in the account contract logic: @@ -118,7 +117,7 @@ impl PrivateKernelCircuitPublicInputsComposer { for i in 0..read_requests.len() { let request = read_requests[i]; if !is_empty(request) { - self.public_inputs.validation_requests.note_hash_read_requests.push(request.to_context(source.storage_contract_address)); + self.public_inputs.validation_requests.note_hash_read_requests.push(request.scope(source.storage_contract_address)); } } } @@ -128,7 +127,7 @@ impl PrivateKernelCircuitPublicInputsComposer { for i in 0..nullifier_read_requests.len() { let request = nullifier_read_requests[i]; if !is_empty(request) { - self.public_inputs.validation_requests.nullifier_read_requests.push(request.to_context(source.storage_contract_address)); + self.public_inputs.validation_requests.nullifier_read_requests.push(request.scope(source.storage_contract_address)); } } } @@ -138,7 +137,7 @@ impl PrivateKernelCircuitPublicInputsComposer { for i in 0..nullifier_key_validation_requests.len() { let request = nullifier_key_validation_requests[i]; if !is_empty(request) { - self.public_inputs.validation_requests.nullifier_key_validation_requests.push(request.to_context(source.storage_contract_address)); + self.public_inputs.validation_requests.nullifier_key_validation_requests.push(request.scope(source.storage_contract_address)); } } } @@ -150,12 +149,9 @@ impl PrivateKernelCircuitPublicInputsComposer { if note_hash.value != 0 { let nullifier_counter = source.note_hash_nullifier_counters[i]; assert( - (nullifier_counter == 0) | (nullifier_counter > note_hash.counter), "invalid nullifier counter" + (nullifier_counter == 0) | (nullifier_counter > note_hash.counter), "Invalid nullifier counter" ); - - // TODO: Silo values in the tail circuit. - note_hash.value = silo_note_hash(source.storage_contract_address, note_hash.value); - self.public_inputs.end.new_note_hashes.push(note_hash.to_context(nullifier_counter)); + self.public_inputs.end.new_note_hashes.push(note_hash.scope(nullifier_counter, source.storage_contract_address)); } } } @@ -165,18 +161,7 @@ impl PrivateKernelCircuitPublicInputsComposer { for i in 0..nullifiers.len() { let nullifier = nullifiers[i]; if nullifier.value != 0 { - let siloed_note_hash = if nullifier.note_hash == 0 { - 0 - } else { - silo_note_hash(source.storage_contract_address, nullifier.note_hash) - }; - self.public_inputs.end.new_nullifiers.push( - Nullifier { - value: silo_nullifier(source.storage_contract_address, nullifier.value), - counter: nullifier.counter, - note_hash: siloed_note_hash - } - ); + self.public_inputs.end.new_nullifiers.push(nullifier.scope(source.storage_contract_address)); } } } @@ -186,13 +171,7 @@ impl PrivateKernelCircuitPublicInputsComposer { for i in 0..l2_to_l1_msgs.len() { let msg = l2_to_l1_msgs[i]; if !is_empty(msg) { - let hash = compute_l2_to_l1_hash( - source.storage_contract_address, - source.private_call_public_inputs.tx_context.version, - source.private_call_public_inputs.tx_context.chain_id, - msg - ); - self.public_inputs.end.new_l2_to_l1_msgs.push(hash); + self.public_inputs.end.new_l2_to_l1_msgs.push(msg.scope(source.storage_contract_address)); } } } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr index 769e5021dff4..64a08cdc7b11 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr @@ -150,7 +150,7 @@ mod tests { // Check the first nullifier is hash of the signed tx request let tx_hash = builder.tx_request.hash(); - assert_eq(public_inputs.end.new_nullifiers[0].value, tx_hash); + assert_eq(public_inputs.end.new_nullifiers[0].value(), tx_hash); // Log preimages length should increase by `(un)encrypted_log_preimages_length` from private input assert_eq( @@ -232,7 +232,7 @@ mod tests { builder.private_call.public_inputs.new_l2_to_l1_msgs.extend_from_array( [ L2ToL1Message::empty(), - L2ToL1Message { recipient: EthAddress::from_field(6), content: 9123 } + L2ToL1Message { recipient: EthAddress::from_field(6), content: 9123, counter: 0 } ] ); @@ -368,15 +368,13 @@ mod tests { let end_note_hash_read_requests = public_inputs.validation_requests.note_hash_read_requests; assert_eq(array_length(end_note_hash_read_requests), 2); - let request_context = end_note_hash_read_requests[0]; - assert_eq(request_context.value, request_0.value); - assert_eq(request_context.counter, request_0.counter); - assert_eq(request_context.contract_address, storage_contract_address); + let request = end_note_hash_read_requests[0]; + assert_eq(request.read_request, request_0); + assert_eq(request.contract_address, storage_contract_address); - let request_context = end_note_hash_read_requests[1]; - assert_eq(request_context.value, request_1.value); - assert_eq(request_context.counter, request_1.counter); - assert_eq(request_context.contract_address, storage_contract_address); + let request = end_note_hash_read_requests[1]; + assert_eq(request.read_request, request_1); + assert_eq(request.contract_address, storage_contract_address); } #[test] @@ -394,33 +392,30 @@ mod tests { let end_nullifier_read_requests = public_inputs.validation_requests.nullifier_read_requests; assert_eq(array_length(end_nullifier_read_requests), 2); - let request_context = end_nullifier_read_requests[0]; - assert_eq(request_context.value, request_0.value); - assert_eq(request_context.counter, request_0.counter); - assert_eq(request_context.contract_address, storage_contract_address); + let request = end_nullifier_read_requests[0]; + assert_eq(request.read_request, request_0); + assert_eq(request.contract_address, storage_contract_address); - let request_context = end_nullifier_read_requests[1]; - assert_eq(request_context.value, request_1.value); - assert_eq(request_context.counter, request_1.counter); - assert_eq(request_context.contract_address, storage_contract_address); + let request = end_nullifier_read_requests[1]; + assert_eq(request.read_request, request_1); + assert_eq(request.contract_address, storage_contract_address); } #[test] fn propagate_nullifier_key_validation_requests() { let mut builder = PrivateKernelInitInputsBuilder::new(); - let request = NullifierKeyValidationRequest { master_nullifier_public_key: GrumpkinPoint { x: 1, y: 2 }, app_nullifier_secret_key: 3 }; - builder.private_call.public_inputs.nullifier_key_validation_requests.push(request); + let request_0 = NullifierKeyValidationRequest { master_nullifier_public_key: GrumpkinPoint { x: 1, y: 2 }, app_nullifier_secret_key: 3 }; + builder.private_call.public_inputs.nullifier_key_validation_requests.push(request_0); let public_inputs = builder.execute(); assert_eq(array_length(public_inputs.validation_requests.nullifier_key_validation_requests), 1); - let request_context = public_inputs.validation_requests.nullifier_key_validation_requests[0]; - assert_eq(request_context.master_nullifier_public_key, request.master_nullifier_public_key); - assert_eq(request_context.app_nullifier_secret_key, request.app_nullifier_secret_key); + let request = public_inputs.validation_requests.nullifier_key_validation_requests[0]; + assert_eq(request.request, request_0); assert_eq( - request_context.contract_address, builder.private_call.public_inputs.call_context.storage_contract_address + request.contract_address, builder.private_call.public_inputs.call_context.storage_contract_address ); } } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_inner.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_inner.nr index e7671e53d0b3..6a291bafbfab 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_inner.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_inner.nr @@ -451,7 +451,7 @@ mod tests { builder.private_call.public_inputs.new_l2_to_l1_msgs.extend_from_array( [ L2ToL1Message::empty(), - L2ToL1Message { recipient: EthAddress::from_field(6), content: 888 } + L2ToL1Message { recipient: EthAddress::from_field(6), content: 888, counter: 0 } ] ); @@ -504,7 +504,7 @@ mod tests { assert_eq(public_inputs.end.new_note_hashes[2].nullifier_counter, 20); } - #[test(should_fail_with="invalid nullifier counter")] + #[test(should_fail_with="Invalid nullifier counter")] fn propagate_note_hashes_with_incorrect_nullifier_counters_fails() { let mut builder = PrivateKernelInnerInputsBuilder::new(); builder.private_call.public_inputs.new_note_hashes.push(NoteHash { value: 12, counter: 3 }); @@ -571,15 +571,13 @@ mod tests { assert_eq(end_note_hash_read_requests[0], prev_requests.storage[0]); assert_eq(end_note_hash_read_requests[1], prev_requests.storage[1]); - let request_context = end_note_hash_read_requests[2]; - assert_eq(request_context.value, cur_requests[0].value); - assert_eq(request_context.counter, cur_requests[0].counter); - assert_eq(request_context.contract_address, cur_storage_contract_address); + let request = end_note_hash_read_requests[2]; + assert_eq(request.read_request, cur_requests[0]); + assert_eq(request.contract_address, cur_storage_contract_address); - let request_context = end_note_hash_read_requests[3]; - assert_eq(request_context.value, cur_requests[1].value); - assert_eq(request_context.counter, cur_requests[1].counter); - assert_eq(request_context.contract_address, cur_storage_contract_address); + let request = end_note_hash_read_requests[3]; + assert_eq(request.read_request, cur_requests[1]); + assert_eq(request.contract_address, cur_storage_contract_address); } #[test] diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr index f38d494395b9..2471caad0bea 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr @@ -3,7 +3,7 @@ use dep::reset_kernel_lib::{NoteHashReadRequestHints, NullifierReadRequestHints, use dep::types::{ abis::{ kernel_data::PrivateKernelData, kernel_circuit_public_inputs::KernelCircuitPublicInputs, - note_hash::NoteHashContext, nullifier::Nullifier, side_effect::SideEffect + note_hash::ScopedNoteHash, nullifier::ScopedNullifier, side_effect::SideEffect }, constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, @@ -14,8 +14,8 @@ use dep::types::{ // Can just be KernelCircuitPublicInputs. struct PrivateKernelTailOutputs { - note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], - nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], + nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], } struct PrivateKernelTailHints { @@ -24,9 +24,9 @@ struct PrivateKernelTailHints { note_hash_read_request_hints: NoteHashReadRequestHints, nullifier_read_request_hints: NullifierReadRequestHints, master_nullifier_secret_keys: [GrumpkinPrivateKey; MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX], - sorted_new_note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], + sorted_new_note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], sorted_new_note_hashes_indexes: [u64; MAX_NEW_NOTE_HASHES_PER_TX], - sorted_new_nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + sorted_new_nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], sorted_new_nullifiers_indexes: [u64; MAX_NEW_NULLIFIERS_PER_TX], sorted_encrypted_log_hashes: [SideEffect; MAX_ENCRYPTED_LOGS_PER_TX], sorted_encrypted_log_hashes_indexes: [u64; MAX_ENCRYPTED_LOGS_PER_TX], @@ -49,7 +49,7 @@ impl PrivateKernelTailCircuitPrivateInputs { let note_hash_tree_root = previous_public_inputs.constants.historical_header.state.partial.note_hash_tree.root; let nullifier_tree_root = previous_public_inputs.constants.historical_header.state.partial.nullifier_tree.root; - let request_processor = PrivateValidationRequestProcessor { + PrivateValidationRequestProcessor { validation_requests: previous_public_inputs.validation_requests, note_hash_read_request_hints: self.hints.note_hash_read_request_hints, pending_note_hashes: previous_public_inputs.end.new_note_hashes, @@ -58,10 +58,9 @@ impl PrivateKernelTailCircuitPrivateInputs { pending_nullifiers: previous_public_inputs.end.new_nullifiers, nullifier_tree_root, master_nullifier_secret_keys: self.hints.master_nullifier_secret_keys - }; - request_processor.validate(); + }.validate(); - let mut composer = KernelCircuitPublicInputsComposer::new( + KernelCircuitPublicInputsComposer::new( self.previous_kernel, self.outputs.note_hashes, self.outputs.nullifiers, @@ -75,8 +74,7 @@ impl PrivateKernelTailCircuitPrivateInputs { self.hints.sorted_encrypted_log_hashes_indexes, self.hints.sorted_unencrypted_log_hashes, self.hints.sorted_unencrypted_log_hashes_indexes - ); - composer.compose().finish() + ).compose().finish() } } @@ -99,10 +97,10 @@ mod tests { use dep::types::{ abis::{ kernel_circuit_public_inputs::KernelCircuitPublicInputs, max_block_number::MaxBlockNumber, - note_hash::NoteHashContext, nullifier::Nullifier, side_effect::{SideEffect, Ordered}, gas::Gas + note_hash::{NoteHash, ScopedNoteHash}, nullifier::{Nullifier, ScopedNullifier}, side_effect::SideEffect, gas::Gas }, grumpkin_private_key::GrumpkinPrivateKey, - hash::{compute_note_hash_nonce, compute_unique_siloed_note_hash, sha256_to_field}, + hash::{compute_note_hash_nonce, compute_unique_siloed_note_hash, sha256_to_field, silo_note_hash, silo_nullifier}, tests::{fixture_builder::FixtureBuilder, sort::sort_get_sorted_hints}, utils::{arrays::{array_eq, array_length}}, traits::{Empty, is_empty, is_empty_array} }; @@ -134,21 +132,32 @@ mod tests { // A helper function that uses the first nullifer in the previous kernel to compute the unique siloed // note_hashes for the given note_hashes. - pub fn compute_unique_siloed_note_hashes( + pub fn compute_output_note_hashes( self, - note_hashes: [NoteHashContext; N] + note_hashes: [ScopedNoteHash; N] ) -> [Field; N] { let first_nullifier = self.previous_kernel.new_nullifiers.get_unchecked(0); let mut unique_siloed_note_hashes = [0; N]; for i in 0..N { - if note_hashes[i].value != 0 { - let nonce = compute_note_hash_nonce(first_nullifier.value, i); - unique_siloed_note_hashes[i] = compute_unique_siloed_note_hash(nonce, note_hashes[i].value); + let note_hash = note_hashes[i]; + if note_hash.value() != 0 { + let siloed = silo_note_hash(note_hash.contract_address, note_hash.value()); + let nonce = compute_note_hash_nonce(first_nullifier.value(), i); + unique_siloed_note_hashes[i] = compute_unique_siloed_note_hash(nonce, siloed); } } unique_siloed_note_hashes } + pub fn compute_output_nullifiers(_self: Self, nullifiers: [ScopedNullifier; N]) -> [Field; N] { + let mut output = [0; N]; + output[0] = nullifiers[0].value(); + for i in 1..N { + output[i] = silo_nullifier(nullifiers[i].contract_address, nullifiers[i].value()); + } + output + } + pub fn add_pending_note_hash_read_request(&mut self, note_hash_index: u64) { let read_request_index = self.previous_kernel.add_read_request_for_pending_note_hash(note_hash_index); let hint_index = self.note_hash_read_request_hints_builder.pending_read_hints.len(); @@ -167,8 +176,8 @@ mod tests { } pub fn nullify_pending_note_hash(&mut self, nullifier_index: u64, note_hash_index: u64) { - self.previous_kernel.new_note_hashes.storage[note_hash_index].nullifier_counter = self.previous_kernel.new_nullifiers.get(nullifier_index).counter; - self.previous_kernel.new_nullifiers.storage[nullifier_index].note_hash = self.previous_kernel.new_note_hashes.get(note_hash_index).value; + self.previous_kernel.new_note_hashes.storage[note_hash_index].nullifier_counter = self.previous_kernel.new_nullifiers.get(nullifier_index).counter(); + self.previous_kernel.new_nullifiers.storage[nullifier_index].nullifier.note_hash = self.previous_kernel.new_note_hashes.get(note_hash_index).note_hash.value; self.transient_nullifier_indexes_for_note_hashes[note_hash_index] = nullifier_index; self.transient_note_hash_indexes_for_nullifiers[nullifier_index] = note_hash_index; } @@ -176,14 +185,14 @@ mod tests { pub fn execute(&mut self) -> KernelCircuitPublicInputs { let sorted = sort_get_sorted_hints( self.previous_kernel.new_note_hashes.storage, - |a: NoteHashContext, b: NoteHashContext| a.counter < b.counter + |a: ScopedNoteHash, b: ScopedNoteHash| a.counter() < b.counter() ); let sorted_new_note_hashes = sorted.sorted_array; let sorted_new_note_hashes_indexes = sorted.sorted_index_hints; let sorted = sort_get_sorted_hints( self.previous_kernel.new_nullifiers.storage, - |a: Nullifier, b: Nullifier| a.counter < b.counter + |a: ScopedNullifier, b: ScopedNullifier| a.counter() < b.counter() ); let sorted_new_nullifiers = sorted.sorted_array; let sorted_new_nullifiers_indexes = sorted.sorted_index_hints; @@ -379,7 +388,7 @@ mod tests { builder.add_pending_nullifier_read_request(1); let nullifier_being_read = builder.previous_kernel.new_nullifiers.storage[2]; let mut read_request = builder.previous_kernel.nullifier_read_requests.pop(); - read_request.counter = nullifier_being_read.counter - 1; + read_request.read_request.counter = nullifier_being_read.counter() - 1; builder.previous_kernel.nullifier_read_requests.push(read_request); builder.failed(); @@ -394,15 +403,12 @@ mod tests { builder.nullify_pending_note_hash(1, 0); let new_nullifiers = builder.previous_kernel.new_nullifiers.storage; let public_inputs = builder.execute(); + assert(is_empty_array(public_inputs.end.new_note_hashes)); // The nullifier at index 1 is chopped. - assert( - array_eq( - public_inputs.end.new_nullifiers, - [new_nullifiers[0].value, new_nullifiers[2].value] - ) - ); + let expected_nullifiers = builder.compute_output_nullifiers([new_nullifiers[0], new_nullifiers[2]]); + assert(array_eq(public_inputs.end.new_nullifiers, expected_nullifiers)); } #[test] @@ -413,23 +419,16 @@ mod tests { // The nullifier at index 1 is nullifying the hash at index 0; builder.nullify_pending_note_hash(1, 0); let new_note_hashes = builder.previous_kernel.new_note_hashes.storage; - // The 0th hash will be chopped. - let unique_siloed_note_hashes = builder.compute_unique_siloed_note_hashes([new_note_hashes[1]]); let new_nullifiers = builder.previous_kernel.new_nullifiers.storage; let public_inputs = builder.execute(); - assert( - array_eq( - public_inputs.end.new_note_hashes, - [unique_siloed_note_hashes[0]] - ) - ); + + // The 0th hash is chopped. + let expected_note_hashes = builder.compute_output_note_hashes([new_note_hashes[1]]); + assert(array_eq(public_inputs.end.new_note_hashes, expected_note_hashes)); + // The nullifier at index 1 is chopped. - assert( - array_eq( - public_inputs.end.new_nullifiers, - [new_nullifiers[0].value, new_nullifiers[2].value] - ) - ); + let expected_nullifiers = builder.compute_output_nullifiers([new_nullifiers[0], new_nullifiers[2]]); + assert(array_eq(public_inputs.end.new_nullifiers, expected_nullifiers)); } #[test] @@ -444,9 +443,11 @@ mod tests { let new_nullifiers = builder.previous_kernel.new_nullifiers.storage; let public_inputs = builder.execute(); - // Only the first nullifier is left after squashing. assert(is_empty_array(public_inputs.end.new_note_hashes)); - assert(array_eq(public_inputs.end.new_nullifiers, [new_nullifiers[0].value])); + + // Only the first nullifier is left after squashing. + let expected_nullifiers = builder.compute_output_nullifiers([new_nullifiers[0]]); + assert(array_eq(public_inputs.end.new_nullifiers, expected_nullifiers)); } #[test] @@ -477,9 +478,11 @@ mod tests { let new_nullifiers = builder.previous_kernel.new_nullifiers.storage; let public_inputs = builder.execute(); - // Only the first nullifier is left after squashing. assert(is_empty_array(public_inputs.end.new_note_hashes)); - assert(array_eq(public_inputs.end.new_nullifiers, [new_nullifiers[0].value])); + + // Only the first nullifier is left after squashing. + let expected_nullifiers = builder.compute_output_nullifiers([new_nullifiers[0]]); + assert(array_eq(public_inputs.end.new_nullifiers, expected_nullifiers)); } #[test] @@ -492,8 +495,8 @@ mod tests { let sorted_note_hashes = builder.previous_kernel.new_note_hashes.storage; let sorted_nullifiers = builder.previous_kernel.new_nullifiers.storage; - let mut reversed_note_hashes = [NoteHashContext::empty(); 10]; - let mut reversed_nullifiers = [Nullifier::empty(); 10]; + let mut reversed_note_hashes = [ScopedNoteHash::empty(); 10]; + let mut reversed_nullifiers = [ScopedNullifier::empty(); 10]; for i in 0..10 { reversed_note_hashes[9 - i] = builder.previous_kernel.new_note_hashes.pop(); @@ -503,13 +506,13 @@ mod tests { builder.previous_kernel.new_note_hashes.extend_from_array(reversed_note_hashes); builder.previous_kernel.new_nullifiers.extend_from_array(reversed_nullifiers); - let sorted_unique_note_hashes = builder.compute_unique_siloed_note_hashes(sorted_note_hashes); - let public_inputs = builder.execute(); + let expected_note_hashes = builder.compute_output_note_hashes(sorted_note_hashes); + let expected_nullifiers = builder.compute_output_nullifiers(sorted_nullifiers); for i in 0..10 { - assert(public_inputs.end.new_note_hashes[i].eq(sorted_unique_note_hashes[i])); - assert(public_inputs.end.new_nullifiers[i].eq(sorted_nullifiers[i].value)); + assert(public_inputs.end.new_note_hashes[i].eq(expected_note_hashes[i])); + assert(public_inputs.end.new_nullifiers[i].eq(expected_nullifiers[i])); } } @@ -525,7 +528,7 @@ mod tests { assert_eq(public_inputs.end.gas_used, expected_gas); } - #[test(should_fail_with="Hinted note hash does not match")] + #[test(should_fail_with="Value of the hinted transient note hash does not match")] unconstrained fn wrong_transient_nullifier_index_for_note_hash_fails() { let mut builder = PrivateKernelTailInputsBuilder::new(); builder.previous_kernel.append_new_note_hashes(1); diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr index aa1726db1482..a16c3ea41ba6 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr @@ -3,7 +3,7 @@ use dep::reset_kernel_lib::{NoteHashReadRequestHints, NullifierReadRequestHints, use dep::types::{ abis::{ kernel_data::PrivateKernelData, kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs, - note_hash::NoteHashContext, nullifier::Nullifier, side_effect::SideEffect + note_hash::ScopedNoteHash, nullifier::ScopedNullifier, side_effect::SideEffect }, constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, @@ -14,8 +14,8 @@ use dep::types::{ // Can just be PublicKernelCircuitPublicInputs. struct PrivateKernelTailToPublicOutputs { - note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], - nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], + nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], } struct PrivateKernelTailToPublicHints { @@ -24,9 +24,9 @@ struct PrivateKernelTailToPublicHints { note_hash_read_request_hints: NoteHashReadRequestHints, nullifier_read_request_hints: NullifierReadRequestHints, master_nullifier_secret_keys: [GrumpkinPrivateKey; MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX], - sorted_new_note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], + sorted_new_note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], sorted_new_note_hashes_indexes: [u64; MAX_NEW_NOTE_HASHES_PER_TX], - sorted_new_nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + sorted_new_nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], sorted_new_nullifiers_indexes: [u64; MAX_NEW_NULLIFIERS_PER_TX], sorted_encrypted_log_hashes: [SideEffect; MAX_ENCRYPTED_LOGS_PER_TX], sorted_encrypted_log_hashes_indexes: [u64; MAX_ENCRYPTED_LOGS_PER_TX], @@ -49,7 +49,7 @@ impl PrivateKernelTailToPublicCircuitPrivateInputs { let note_hash_tree_root = previous_public_inputs.constants.historical_header.state.partial.note_hash_tree.root; let nullifier_tree_root = previous_public_inputs.constants.historical_header.state.partial.nullifier_tree.root; - let request_processor = PrivateValidationRequestProcessor { + PrivateValidationRequestProcessor { validation_requests: previous_public_inputs.validation_requests, note_hash_read_request_hints: self.hints.note_hash_read_request_hints, pending_note_hashes: previous_public_inputs.end.new_note_hashes, @@ -58,10 +58,9 @@ impl PrivateKernelTailToPublicCircuitPrivateInputs { pending_nullifiers: previous_public_inputs.end.new_nullifiers, nullifier_tree_root, master_nullifier_secret_keys: self.hints.master_nullifier_secret_keys - }; - request_processor.validate(); + }.validate(); - let mut composer = KernelCircuitPublicInputsComposer::new( + KernelCircuitPublicInputsComposer::new( self.previous_kernel, self.outputs.note_hashes, self.outputs.nullifiers, @@ -75,8 +74,7 @@ impl PrivateKernelTailToPublicCircuitPrivateInputs { self.hints.sorted_encrypted_log_hashes_indexes, self.hints.sorted_unencrypted_log_hashes, self.hints.sorted_unencrypted_log_hashes_indexes - ); - composer.compose_public().finish_to_public() + ).compose_public().finish_to_public() } } @@ -101,10 +99,11 @@ mod tests { use dep::types::{ abis::{ kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs, gas::Gas, - note_hash::{NoteHash, NoteHashContext}, nullifier::Nullifier, side_effect::{SideEffect, Ordered} + note_hash::{NoteHash, ScopedNoteHash}, nullifier::{Nullifier, ScopedNullifier}, + side_effect::SideEffect }, grumpkin_private_key::GrumpkinPrivateKey, - hash::{compute_note_hash_nonce, compute_unique_siloed_note_hash}, + hash::{compute_note_hash_nonce, compute_unique_siloed_note_hash, silo_note_hash, silo_nullifier}, tests::{fixture_builder::FixtureBuilder, sort::sort_get_sorted_hints}, utils::{arrays::{array_eq, array_length}}, traits::is_empty_array }; @@ -136,17 +135,16 @@ mod tests { // A helper function that uses the first nullifer in the previous kernel to compute the unique siloed // note_hashes for the given note_hashes. - pub fn compute_unique_siloed_note_hashes( - self, - note_hashes: [NoteHashContext; N] - ) -> [NoteHash; N] { - let first_nullifier = self.previous_kernel.new_nullifiers.get_unchecked(0); + pub fn compute_output_note_hashes(self, note_hashes: [ScopedNoteHash; N]) -> [NoteHash; N] { + let first_nullifier = self.previous_kernel.new_nullifiers.get_unchecked(0).value(); let mut unique_siloed_note_hashes = [NoteHash::empty(); N]; for i in 0..N { - if note_hashes[i].value != 0 { - let nonce = compute_note_hash_nonce(first_nullifier.value, i); + let note_hash = note_hashes[i]; + if note_hash.value() != 0 { + let siloed = silo_note_hash(note_hash.contract_address, note_hash.value()); + let nonce = compute_note_hash_nonce(first_nullifier, i); unique_siloed_note_hashes[i] = NoteHash { - value: compute_unique_siloed_note_hash(nonce, note_hashes[i].value), + value: compute_unique_siloed_note_hash(nonce, siloed), counter: 0, // Counter is cleared so it's not exposed to the public. }; } @@ -154,6 +152,18 @@ mod tests { unique_siloed_note_hashes } + pub fn compute_output_nullifiers( + _self: Self, + nullifiers: [ScopedNullifier; N] + ) -> [Nullifier; N] { + let mut output = [Nullifier::empty(); N]; + output[0].value = nullifiers[0].value(); + for i in 1..N { + output[i] = Nullifier { value: silo_nullifier(nullifiers[i].contract_address, nullifiers[i].value()), counter: 0, note_hash: 0 }; + } + output + } + pub fn add_pending_note_hash_read_request(&mut self, note_hash_index: u64) { let read_request_index = self.previous_kernel.add_read_request_for_pending_note_hash(note_hash_index); let hint_index = self.note_hash_read_request_hints_builder.pending_read_hints.len(); @@ -172,8 +182,8 @@ mod tests { } pub fn nullify_pending_note_hash(&mut self, nullifier_index: u64, note_hash_index: u64) { - self.previous_kernel.new_note_hashes.storage[note_hash_index].nullifier_counter = self.previous_kernel.new_nullifiers.get(nullifier_index).counter; - self.previous_kernel.new_nullifiers.storage[nullifier_index].note_hash = self.previous_kernel.new_note_hashes.get(note_hash_index).value; + self.previous_kernel.new_note_hashes.storage[note_hash_index].nullifier_counter = self.previous_kernel.new_nullifiers.get(nullifier_index).counter(); + self.previous_kernel.new_nullifiers.storage[nullifier_index].nullifier.note_hash = self.previous_kernel.new_note_hashes.get(note_hash_index).value(); self.transient_nullifier_indexes_for_note_hashes[note_hash_index] = nullifier_index; self.transient_note_hash_indexes_for_nullifiers[nullifier_index] = note_hash_index; } @@ -181,14 +191,14 @@ mod tests { pub fn execute(&mut self) -> PublicKernelCircuitPublicInputs { let sorted = sort_get_sorted_hints( self.previous_kernel.new_note_hashes.storage, - |a: NoteHashContext, b: NoteHashContext| a.counter < b.counter + |a: ScopedNoteHash, b: ScopedNoteHash| a.counter() < b.counter() ); let sorted_new_note_hashes = sorted.sorted_array; let sorted_new_note_hashes_indexes = sorted.sorted_index_hints; let sorted = sort_get_sorted_hints( self.previous_kernel.new_nullifiers.storage, - |a: Nullifier, b: Nullifier| a.counter < b.counter + |a: ScopedNullifier, b: ScopedNullifier| a.counter() < b.counter() ); let sorted_new_nullifiers = sorted.sorted_array; let sorted_new_nullifiers_indexes = sorted.sorted_index_hints; @@ -326,7 +336,7 @@ mod tests { builder.add_pending_nullifier_read_request(1); let nullifier_being_read = builder.previous_kernel.new_nullifiers.storage[2]; let mut read_request = builder.previous_kernel.nullifier_read_requests.pop(); - read_request.counter = nullifier_being_read.counter - 1; + read_request.read_request.counter = nullifier_being_read.counter() - 1; builder.previous_kernel.nullifier_read_requests.push(read_request); builder.failed(); @@ -341,15 +351,12 @@ mod tests { builder.nullify_pending_note_hash(1, 0); let new_nullifiers = builder.previous_kernel.new_nullifiers.storage; let public_inputs = builder.execute(); + assert(is_empty_array(public_inputs.end.new_note_hashes)); // The nullifier at index 1 is chopped. - assert( - array_eq( - public_inputs.end.new_nullifiers, - [new_nullifiers[0], new_nullifiers[2]] - ) - ); + let expected_nullifiers = builder.compute_output_nullifiers([new_nullifiers[0], new_nullifiers[2]]); + assert(array_eq(public_inputs.end.new_nullifiers, expected_nullifiers)); } #[test] @@ -360,23 +367,16 @@ mod tests { // The nullifier at index 1 is nullifying the hash at index 0; builder.nullify_pending_note_hash(1, 0); let new_note_hashes = builder.previous_kernel.new_note_hashes.storage; - // The 0th hash will be chopped. - let unique_siloed_note_hashes = builder.compute_unique_siloed_note_hashes([new_note_hashes[1]]); let new_nullifiers = builder.previous_kernel.new_nullifiers.storage; let public_inputs = builder.execute(); - assert( - array_eq( - public_inputs.end.new_note_hashes, - [unique_siloed_note_hashes[0]] - ) - ); + + // The 0th hash will be chopped. + let expected_note_hashes = builder.compute_output_note_hashes([new_note_hashes[1]]); + assert(array_eq(public_inputs.end.new_note_hashes, expected_note_hashes)); + // The nullifier at index 1 is chopped. - assert( - array_eq( - public_inputs.end.new_nullifiers, - [new_nullifiers[0], new_nullifiers[2]] - ) - ); + let expected_nullifiers = builder.compute_output_nullifiers([new_nullifiers[0], new_nullifiers[2]]); + assert(array_eq(public_inputs.end.new_nullifiers, expected_nullifiers)); } #[test] @@ -393,7 +393,8 @@ mod tests { // Only the first nullifier is left after squashing. assert(is_empty_array(public_inputs.end.new_note_hashes)); - assert(array_eq(public_inputs.end.new_nullifiers, [new_nullifiers[0]])); + let expected_nullifiers = builder.compute_output_nullifiers([new_nullifiers[0]]); + assert(array_eq(public_inputs.end.new_nullifiers, expected_nullifiers)); } #[test] @@ -406,8 +407,8 @@ mod tests { let sorted_note_hashes = builder.previous_kernel.new_note_hashes.storage; let sorted_nullifiers = builder.previous_kernel.new_nullifiers.storage; - let mut reversed_note_hashes = [NoteHashContext::empty(); 10]; - let mut reversed_nullifiers = [Nullifier::empty(); 10]; + let mut reversed_note_hashes = [ScopedNoteHash::empty(); 10]; + let mut reversed_nullifiers = [ScopedNullifier::empty(); 10]; for i in 0..10 { reversed_note_hashes[9 - i] = builder.previous_kernel.new_note_hashes.pop(); @@ -417,17 +418,17 @@ mod tests { builder.previous_kernel.new_note_hashes.extend_from_array(reversed_note_hashes); builder.previous_kernel.new_nullifiers.extend_from_array(reversed_nullifiers); - let sorted_unique_note_hashes = builder.compute_unique_siloed_note_hashes(sorted_note_hashes); - let public_inputs = builder.execute(); + let output_note_hashes = builder.compute_output_note_hashes(sorted_note_hashes); + let output_nullifiers = builder.compute_output_nullifiers(sorted_nullifiers); for i in 0..10 { - assert(public_inputs.end.new_note_hashes[i].eq(sorted_unique_note_hashes[i])); - assert(public_inputs.end.new_nullifiers[i].eq(sorted_nullifiers[i])); + assert(public_inputs.end.new_note_hashes[i].eq(output_note_hashes[i])); + assert(public_inputs.end.new_nullifiers[i].eq(output_nullifiers[i])); } } - #[test(should_fail_with="Hinted note hash does not match")] + #[test(should_fail_with="Value of the hinted transient note hash does not match")] unconstrained fn wrong_transient_nullifier_index_for_note_hash_fails() { let mut builder = PrivateKernelTailToPublicInputsBuilder::new(); builder.previous_kernel.append_new_note_hashes(1); @@ -487,17 +488,19 @@ mod tests { let new_nullifiers = builder.previous_kernel.new_nullifiers.storage; let public_inputs = builder.execute(); + let output_nullifiers = builder.compute_output_nullifiers(new_nullifiers); + assert( array_eq( public_inputs.end_non_revertible.new_nullifiers, - [new_nullifiers[0], new_nullifiers[1], new_nullifiers[2]] + [output_nullifiers[0], output_nullifiers[1], output_nullifiers[2]] ) ); assert( array_eq( public_inputs.end.new_nullifiers, - [new_nullifiers[3], new_nullifiers[4]] + [output_nullifiers[3], output_nullifiers[4]] ) ); @@ -521,7 +524,7 @@ mod tests { let new_note_hashes = builder.previous_kernel.new_note_hashes.storage; let public_inputs = builder.execute(); - let siloed_note_hashes = builder.compute_unique_siloed_note_hashes(new_note_hashes); + let siloed_note_hashes = builder.compute_output_note_hashes(new_note_hashes); assert( array_eq( diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/common.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/common.nr index adf8f2a1952a..83d5770a8066 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/common.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/common.nr @@ -4,7 +4,7 @@ use dep::types::{ kernel_circuit_public_inputs::PublicKernelCircuitPublicInputsBuilder, kernel_data::PublicKernelData, note_hash::NoteHash, nullifier::Nullifier, public_call_data::PublicCallData, public_data_read::PublicDataRead, public_data_update_request::PublicDataUpdateRequest, - read_request::ReadRequestContext, side_effect::SideEffect, global_variables::GlobalVariables, + side_effect::SideEffect, global_variables::GlobalVariables, combined_constant_data::CombinedConstantData }, address::AztecAddress, @@ -324,7 +324,7 @@ fn propagate_nullifier_read_requests( for i in 0..MAX_NULLIFIER_READ_REQUESTS_PER_CALL { let request = nullifier_read_requests[i]; if !is_empty(request) { - circuit_outputs.validation_requests.nullifier_read_requests.push(request.to_context(storage_contract_address)); + circuit_outputs.validation_requests.nullifier_read_requests.push(request.scope(storage_contract_address)); } } } @@ -340,7 +340,7 @@ fn propagate_nullifier_non_existent_read_requests( for i in 0..MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL { let request = nullifier_non_existent_read_requests[i]; if !is_empty(request) { - circuit_outputs.validation_requests.nullifier_non_existent_read_requests.push(request.to_context(storage_contract_address)); + circuit_outputs.validation_requests.nullifier_non_existent_read_requests.push(request.scope(storage_contract_address)); } } } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_app_logic.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_app_logic.nr index ddfd090a5466..6fd4e359211e 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_app_logic.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_app_logic.nr @@ -78,8 +78,9 @@ mod tests { use dep::types::{ abis::{ gas::Gas, kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs, - note_hash::{NoteHash, NoteHashContext}, nullifier::Nullifier, public_data_read::PublicDataRead, - public_data_update_request::PublicDataUpdateRequest, read_request::ReadRequest + note_hash::{NoteHash, ScopedNoteHash}, nullifier::{Nullifier, ScopedNullifier}, + public_data_read::PublicDataRead, public_data_update_request::PublicDataUpdateRequest, + read_request::ReadRequest }, address::{AztecAddress, EthAddress}, contract_class_id::ContractClassId, hash::{compute_l2_to_l1_hash, silo_note_hash, silo_nullifier}, @@ -186,7 +187,7 @@ mod tests { let contract_address = builder.public_call.contract_address; // Setup 2 new note hashes on the previous kernel. builder.previous_kernel.append_new_note_hashes(2); - let previous = builder.previous_kernel.new_note_hashes.storage.map(|n: NoteHashContext| n.to_note_hash()); + let previous = builder.previous_kernel.new_note_hashes.storage.map(|n: ScopedNoteHash| n.note_hash); // Setup 2 new note hashes on the current public inputs. let current = [ NoteHash { value: previous[1].value + 1, counter: 3 }, @@ -247,19 +248,19 @@ mod tests { // Setup 2 new nullifiers on the previous kernel. builder.previous_kernel.append_new_nullifiers(2); - let previous = builder.previous_kernel.new_nullifiers.storage; + let previous = builder.previous_kernel.new_nullifiers.storage.map(|n: ScopedNullifier| n.nullifier); // Setup 2 new note hashes on the current public inputs. let current = [ Nullifier { value: previous[1].value + 1, note_hash: 0, counter: 4 }, Nullifier { value: previous[1].value + 2, note_hash: 0, counter: 5 } ]; + builder.public_call.public_inputs.new_nullifiers.extend_from_array(current); let siloed = current.map( |current: Nullifier| Nullifier { value: silo_nullifier(contract_address, current.value), note_hash: current.note_hash, counter: current.counter } ); - builder.public_call.public_inputs.new_nullifiers.extend_from_array(current); // There are 2 revertible nullifiers in the previous kernel. // The tx nullifier is part of the non-revertible nullifiers. let new_nullifiers = [previous[0], previous[1], siloed[0], siloed[1]]; @@ -278,9 +279,9 @@ mod tests { // Setup 1 new l2 to l1 message on the previous kernel. let previous = [12345]; - builder.previous_kernel.new_l2_to_l1_msgs.extend_from_array(previous); + builder.previous_kernel.add_l2_to_l1_message(previous[0], portal_contract_address); // Setup 1 new l2 to l1 message on the current public inputs. - let current = [L2ToL1Message { recipient: portal_contract_address, content: 67890 }]; + let current = [L2ToL1Message { recipient: portal_contract_address, content: 67890, counter: 0 }]; builder.public_call.public_inputs.new_l2_to_l1_msgs.extend_from_array(current); let tx_context = builder.previous_kernel.tx_context; let version = tx_context.version; @@ -438,15 +439,13 @@ mod tests { let end_requests = public_inputs.validation_requests.nullifier_non_existent_read_requests; assert_eq(array_length(end_requests), 2); - let request_context = end_requests[0]; - assert_eq(request_context.value, request_0.value); - assert_eq(request_context.counter, request_0.counter); - assert_eq(request_context.contract_address, storage_contract_address); + let request = end_requests[0]; + assert_eq(request.read_request, request_0); + assert_eq(request.contract_address, storage_contract_address); - let request_context = end_requests[1]; - assert_eq(request_context.value, request_1.value); - assert_eq(request_context.counter, request_1.counter); - assert_eq(request_context.contract_address, storage_contract_address); + let request = end_requests[1]; + assert_eq(request.read_request, request_1); + assert_eq(request.contract_address, storage_contract_address); } #[test] diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_setup.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_setup.nr index 57baeb5d8513..248c89c0b7a5 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_setup.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_setup.nr @@ -483,15 +483,13 @@ mod tests { let end_requests = public_inputs.validation_requests.nullifier_non_existent_read_requests; assert_eq(array_length(end_requests), 2); - let request_context = end_requests[0]; - assert_eq(request_context.value, request_0.value); - assert_eq(request_context.counter, request_0.counter); - assert_eq(request_context.contract_address, storage_contract_address); - - let request_context = end_requests[1]; - assert_eq(request_context.value, request_1.value); - assert_eq(request_context.counter, request_1.counter); - assert_eq(request_context.contract_address, storage_contract_address); + let request = end_requests[0]; + assert_eq(request.read_request, request_0); + assert_eq(request.contract_address, storage_contract_address); + + let request = end_requests[1]; + assert_eq(request.read_request, request_1); + assert_eq(request.contract_address, storage_contract_address); } #[test(should_fail_with="Public call cannot be reverted")] diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_tail.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_tail.nr index ffe93c8dfd04..f8bc620c1004 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_tail.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_tail.nr @@ -57,7 +57,7 @@ impl PublicKernelTailCircuitPrivateInputs { hint.leaf_slot, exists_in_tree, hint.leaf_preimage, - MembershipWitness { leaf_index: hint.membership_witness.leaf_index, sibling_path: hint.membership_witness.sibling_path }, + hint.membership_witness, public_data_tree_root ); } @@ -115,19 +115,20 @@ mod tests { use dep::types::{ abis::{ kernel_circuit_public_inputs::KernelCircuitPublicInputs, kernel_data::PublicKernelData, - nullifier_leaf_preimage::NullifierLeafPreimage, membership_witness::PublicDataMembershipWitness + nullifier::ScopedNullifier, nullifier_leaf_preimage::NullifierLeafPreimage }, constants::{ MAX_NEW_NULLIFIERS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_PUBLIC_DATA_HINTS, MAX_PUBLIC_DATA_READS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, NULLIFIER_TREE_HEIGHT, NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, NULLIFIER_SUBTREE_HEIGHT, PUBLIC_DATA_SUBTREE_HEIGHT, PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH, PUBLIC_DATA_TREE_HEIGHT, MAX_ENCRYPTED_LOGS_PER_TX, - MAX_UNENCRYPTED_LOGS_PER_TX, + MAX_UNENCRYPTED_LOGS_PER_TX }, hash::{silo_nullifier, sha256_to_field}, public_data_tree_leaf_preimage::PublicDataTreeLeafPreimage, tests::{fixture_builder::FixtureBuilder, merkle_tree_utils::NonEmptyMerkleTree}, - partial_state_reference::PartialStateReference, utils::arrays::array_merge + partial_state_reference::PartialStateReference, utils::arrays::array_merge, + merkle_tree::MembershipWitness }; fn build_nullifier_tree() -> NonEmptyMerkleTree { @@ -208,19 +209,19 @@ mod tests { } pub fn add_nullifier(&mut self, unsiloed_nullifier: Field) { - self.previous_kernel.add_nullifier(unsiloed_nullifier); + self.previous_kernel.add_siloed_nullifier(unsiloed_nullifier); self.sync_counters(); self.set_nullifiers_for_non_existent_read_request_hints(); } pub fn append_nullifiers_revertible(&mut self, num_nullifiers: u64) { - self.previous_revertible.append_new_nullifiers(num_nullifiers); + self.previous_revertible.append_siloed_nullifiers(num_nullifiers); self.sync_counters(); self.set_nullifiers_for_non_existent_read_request_hints(); } pub fn append_nullifiers_non_revertible(&mut self, num_nullifiers: u64) { - self.previous_kernel.append_new_nullifiers(num_nullifiers); + self.previous_kernel.append_siloed_nullifiers(num_nullifiers); self.sync_counters(); self.set_nullifiers_for_non_existent_read_request_hints(); } @@ -229,7 +230,7 @@ mod tests { let nullifiers = array_merge( self.previous_kernel.new_nullifiers.storage, self.previous_revertible.new_nullifiers.storage - ); + ).map(|n: ScopedNullifier| n.nullifier); self.nullifier_non_existent_read_request_hints_builder.set_nullifiers(nullifiers); } @@ -264,7 +265,7 @@ mod tests { pub fn add_public_data_hint_for_settled_public_data(&mut self, leaf_index: u64) { let leaf_preimage = get_settled_public_data_leaves()[leaf_index]; - let membership_witness = PublicDataMembershipWitness { leaf_index: leaf_index as Field, sibling_path: self.public_data_tree.get_sibling_path(leaf_index) }; + let membership_witness = MembershipWitness { leaf_index: leaf_index as Field, sibling_path: self.public_data_tree.get_sibling_path(leaf_index) }; let hint = PublicDataHint { leaf_slot: leaf_preimage.slot, value: leaf_preimage.value, @@ -277,7 +278,7 @@ mod tests { pub fn add_public_data_hint_for_non_existent_public_data(&mut self, leaf_slot: Field, low_leaf_index: u64) { let leaf_preimage = get_settled_public_data_leaves()[low_leaf_index]; - let membership_witness = PublicDataMembershipWitness { + let membership_witness = MembershipWitness { leaf_index: low_leaf_index as Field, sibling_path: self.public_data_tree.get_sibling_path(low_leaf_index) }; @@ -360,18 +361,11 @@ mod tests { public_inputs.end.unencrypted_log_preimages_length, unencrypted_log_preimages_length + prev_unencrypted_log_preimages_length ); - let hash_bytes: [u8; MAX_ENCRYPTED_LOGS_PER_TX * 32] = prev_encrypted_logs_hash - .to_be_bytes(32) - .append(&[0; MAX_ENCRYPTED_LOGS_PER_TX * 32 - 32]) - .as_array(); + let hash_bytes: [u8; MAX_ENCRYPTED_LOGS_PER_TX * 32] = prev_encrypted_logs_hash.to_be_bytes(32).append(&[0; MAX_ENCRYPTED_LOGS_PER_TX * 32 - 32]).as_array(); let expected_encrypted_logs_hash = sha256_to_field(hash_bytes); assert_eq(public_inputs.end.encrypted_logs_hash, expected_encrypted_logs_hash); - let hash_bytes: [u8; MAX_UNENCRYPTED_LOGS_PER_TX * 32] = prev_unencrypted_logs_hash - .to_be_bytes(32) - .append(unencrypted_logs_hash.to_be_bytes(32)) - .append(&[0; MAX_UNENCRYPTED_LOGS_PER_TX * 32 - 64]) - .as_array(); + let hash_bytes: [u8; MAX_UNENCRYPTED_LOGS_PER_TX * 32] = prev_unencrypted_logs_hash.to_be_bytes(32).append(unencrypted_logs_hash.to_be_bytes(32)).append(&[0; MAX_UNENCRYPTED_LOGS_PER_TX * 32 - 64]).as_array(); let expected_unencrypted_logs_hash = sha256_to_field(hash_bytes); assert_eq(public_inputs.end.unencrypted_logs_hash, expected_unencrypted_logs_hash); } @@ -426,7 +420,7 @@ mod tests { builder.add_pending_revertible_nullifier_read_request(1); let nullifier_being_read = builder.previous_revertible.new_nullifiers.get(1); let mut read_request = builder.previous_kernel.nullifier_read_requests.pop(); - read_request.counter = nullifier_being_read.counter - 1; + read_request.read_request.counter = nullifier_being_read.counter() - 1; builder.previous_kernel.nullifier_read_requests.push(read_request); builder.failed(); diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/note_hash_read_request_reset.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/note_hash_read_request_reset.nr index 8c40d7c2cf34..a7f53d46d623 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/note_hash_read_request_reset.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/note_hash_read_request_reset.nr @@ -1,14 +1,14 @@ // This will be moved to a separate Read Request Reset Circuit. use crate::reset::read_request::{PendingReadHint, ReadRequestStatus, ReadValueHint, SettledReadHint}; use dep::types::{ - abis::{membership_witness::NoteHashMembershipWitness, note_hash_leaf_preimage::NoteHashLeafPreimage}, + abis::{note_hash_leaf_preimage::NoteHashLeafPreimage}, constants::{MAX_NOTE_HASH_READ_REQUESTS_PER_TX, NOTE_HASH_TREE_HEIGHT}, merkle_tree::MembershipWitness }; struct NoteHashSettledReadHint { read_request_index: u64, - membership_witness: NoteHashMembershipWitness, // Should be MembershipWitness when we can handle generics when converting to ts types. + membership_witness: MembershipWitness, leaf_preimage: NoteHashLeafPreimage, } @@ -20,7 +20,7 @@ impl ReadValueHint for NoteHashSettledReadHint { impl SettledReadHint for NoteHashSettledReadHint { fn membership_witness(self) -> MembershipWitness { - MembershipWitness { leaf_index: self.membership_witness.leaf_index, sibling_path: self.membership_witness.sibling_path } + self.membership_witness } fn leaf_preimage(self) -> NoteHashLeafPreimage { @@ -30,7 +30,7 @@ impl SettledReadHint for NoteHashSe fn nada(read_request_len: u64) -> Self { NoteHashSettledReadHint { read_request_index: read_request_len, - membership_witness: NoteHashMembershipWitness::empty(), + membership_witness: MembershipWitness::empty(), leaf_preimage: NoteHashLeafPreimage::empty() } } @@ -46,11 +46,8 @@ mod tests { use crate::note_hash_read_request_reset::NoteHashSettledReadHint; use crate::reset::read_request::{PendingReadHint, ReadRequestState, ReadRequestStatus, reset_read_requests}; use dep::types::{ - address::AztecAddress, - abis::{ - membership_witness::NoteHashMembershipWitness, note_hash::NoteHashContext, - note_hash_leaf_preimage::NoteHashLeafPreimage, read_request::ReadRequestContext - }, + address::AztecAddress, merkle_tree::MembershipWitness, + abis::{note_hash::NoteHash, note_hash_leaf_preimage::NoteHashLeafPreimage, read_request::ReadRequest}, constants::NOTE_HASH_TREE_HEIGHT, hash::silo_note_hash, tests::merkle_tree_utils::NonEmptyMerkleTree }; @@ -64,15 +61,17 @@ mod tests { // Create 5 read requests. 0 and 3 are reading settled note hashes. 1, 2 and 4 are reading pending note hashes. // TODO(#2847): Read request values for settled note hashes shouldn't have been siloed by apps. global read_requests = [ - ReadRequestContext { value: note_hashes[1], counter: 11, contract_address }, // settled - ReadRequestContext { value: inner_note_hashes[3], counter: 13, contract_address }, // pending - ReadRequestContext { value: inner_note_hashes[2], counter: 39, contract_address }, // pending - ReadRequestContext { value: note_hashes[0], counter: 46, contract_address }, // settled - ReadRequestContext { value: inner_note_hashes[3], counter: 78, contract_address }, // pending + ReadRequest { value: note_hashes[1], counter: 11 }.scope(contract_address), // settled + ReadRequest { value: inner_note_hashes[3], counter: 13 }.scope(contract_address), // pending + ReadRequest { value: inner_note_hashes[2], counter: 39 }.scope(contract_address), // pending + ReadRequest { value: note_hashes[0], counter: 46 }.scope(contract_address), // settled + ReadRequest { value: inner_note_hashes[3], counter: 78 }.scope(contract_address), // pending ]; - // TODO(#6122): Pending values shouldn't have been siloed at this point. - global pending_values = [NoteHashContext { value: note_hashes[2], counter: 2, nullifier_counter: 0 }, NoteHashContext { value: note_hashes[3], counter: 8, nullifier_counter: 0 }]; + global pending_values = [ + NoteHash { value: inner_note_hashes[2], counter: 2, }.scope(0, contract_address), + NoteHash { value: inner_note_hashes[3], counter: 8, }.scope(0, contract_address), + ]; global pending_read_hints = [ PendingReadHint { read_request_index: 1, pending_value_index: 1 }, PendingReadHint { read_request_index: 2, pending_value_index: 0 }, @@ -108,12 +107,12 @@ mod tests { let hints = [ NoteHashSettledReadHint { read_request_index: 0, - membership_witness: NoteHashMembershipWitness { leaf_index: 1, sibling_path: tree.get_sibling_path(1) }, + membership_witness: MembershipWitness { leaf_index: 1, sibling_path: tree.get_sibling_path(1) }, leaf_preimage: leaf_preimages[1] }, NoteHashSettledReadHint { read_request_index: 3, - membership_witness: NoteHashMembershipWitness { leaf_index: 0, sibling_path: tree.get_sibling_path(0) }, + membership_witness: MembershipWitness { leaf_index: 0, sibling_path: tree.get_sibling_path(0) }, leaf_preimage: leaf_preimages[0] } ]; @@ -159,7 +158,7 @@ mod tests { fn test_reset_note_hash_read_requests_wrong_hinted_value() { let mut tainted_pending_values = pending_values; // Tweak the value to be something different. - tainted_pending_values[0].value += 1; + tainted_pending_values[0].note_hash.value += 1; let (settled_read_hints, tree_root) = get_settled_read_hints(); let _ = reset_read_requests( @@ -178,7 +177,7 @@ mod tests { let pending_read = read_requests[hint.read_request_index]; let mut tainted_pending_values = pending_values; // Tweak the counter of the value to be greater than the read request. - tainted_pending_values[hint.pending_value_index].counter = pending_read.counter + 1; + tainted_pending_values[hint.pending_value_index].note_hash.counter = pending_read.counter() + 1; let (settled_read_hints, tree_root) = get_settled_read_hints(); let _ = reset_read_requests( @@ -197,7 +196,7 @@ mod tests { let pending_read = read_requests[hint.read_request_index]; let mut tainted_pending_values = pending_values; // Tweak the nullifier counter to be less than the read request. - tainted_pending_values[hint.pending_value_index].nullifier_counter = pending_read.counter - 1; + tainted_pending_values[hint.pending_value_index].nullifier_counter = pending_read.counter() - 1; let (settled_read_hints, tree_root) = get_settled_read_hints(); let _ = reset_read_requests( @@ -217,7 +216,7 @@ mod tests { let mut tained_read_requests = read_requests; let hint = settled_read_hints[0]; // Tweak the value of the first settled read to be something different. - tained_read_requests[hint.read_request_index].value += 1; + tained_read_requests[hint.read_request_index].read_request.value += 1; let _ = reset_read_requests( tained_read_requests, diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/nullifier_read_request_reset.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/nullifier_read_request_reset.nr index ba2c15edc39d..e7363f828c41 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/nullifier_read_request_reset.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/nullifier_read_request_reset.nr @@ -47,7 +47,7 @@ mod tests { use crate::reset::read_request::{PendingReadHint, ReadRequestState, ReadRequestStatus, reset_read_requests}; use dep::types::{ address::AztecAddress, - abis::{nullifier::Nullifier, nullifier_leaf_preimage::NullifierLeafPreimage, read_request::ReadRequestContext}, + abis::{nullifier::Nullifier, nullifier_leaf_preimage::NullifierLeafPreimage, read_request::ReadRequest}, constants::NULLIFIER_TREE_HEIGHT, hash::silo_nullifier, merkle_tree::MembershipWitness, tests::merkle_tree_utils::NonEmptyMerkleTree }; @@ -60,14 +60,17 @@ mod tests { // Create 5 read requests. 0 and 3 are reading settled nullifiers. 1, 2 and 4 are reading pending nullifiers. global read_requests = [ - ReadRequestContext { value: inner_nullifiers[1], counter: 11, contract_address }, // settled - ReadRequestContext { value: inner_nullifiers[3], counter: 13, contract_address }, // pending - ReadRequestContext { value: inner_nullifiers[2], counter: 39, contract_address }, // pending - ReadRequestContext { value: inner_nullifiers[0], counter: 46, contract_address }, // settled - ReadRequestContext { value: inner_nullifiers[3], counter: 78, contract_address }, // pending + ReadRequest { value: inner_nullifiers[1], counter: 11 }.scope(contract_address), // settled + ReadRequest { value: inner_nullifiers[3], counter: 13 }.scope(contract_address), // pending + ReadRequest { value: inner_nullifiers[2], counter: 39 }.scope(contract_address), // pending + ReadRequest { value: inner_nullifiers[0], counter: 46 }.scope(contract_address), // settled + ReadRequest { value: inner_nullifiers[3], counter: 78 }.scope(contract_address), // pending ]; - global pending_values = [Nullifier { value: nullifiers[2], counter: 2, note_hash: 0 }, Nullifier { value: nullifiers[3], counter: 8, note_hash: 0 }]; + global pending_values = [ + Nullifier { value: inner_nullifiers[2], counter: 2, note_hash: 0 }.scope(contract_address), + Nullifier { value: inner_nullifiers[3], counter: 8, note_hash: 0 }.scope(contract_address), + ]; global pending_read_hints = [ PendingReadHint { read_request_index: 1, pending_value_index: 1 }, PendingReadHint { read_request_index: 2, pending_value_index: 0 }, @@ -156,7 +159,7 @@ mod tests { fn test_reset_nullifier_read_requests_wrong_hinted_value() { let mut tainted_pending_values = pending_values; // Tweak the value to be something different. - tainted_pending_values[0].value += 1; + tainted_pending_values[0].nullifier.value += 1; let (settled_read_hints, tree_root) = get_settled_read_hints(); let _ = reset_read_requests( @@ -175,7 +178,7 @@ mod tests { let pending_read = read_requests[hint.read_request_index]; let mut tainted_pending_values = pending_values; // Tweak the counter of the value to be greater than the read request. - tainted_pending_values[hint.pending_value_index].counter = pending_read.counter + 1; + tainted_pending_values[hint.pending_value_index].nullifier.counter = pending_read.counter() + 1; let (settled_read_hints, tree_root) = get_settled_read_hints(); let _ = reset_read_requests( @@ -195,7 +198,7 @@ mod tests { let mut tained_read_requests = read_requests; let hint = settled_read_hints[0]; // Tweak the value of the first settled read to be something different. - tained_read_requests[hint.read_request_index].value += 1; + tained_read_requests[hint.read_request_index].read_request.value += 1; let _ = reset_read_requests( tained_read_requests, diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/private_validation_request_processor.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/private_validation_request_processor.nr index 2d5adcd31cd2..08d63cb14d87 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/private_validation_request_processor.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/private_validation_request_processor.nr @@ -3,7 +3,7 @@ use crate::{ nullifier_read_request_reset::NullifierReadRequestHints, reset::read_request::reset_read_requests }; use dep::types::{ - abis::{note_hash::NoteHashContext, nullifier::Nullifier, validation_requests::ValidationRequests}, + abis::{note_hash::ScopedNoteHash, nullifier::ScopedNullifier, validation_requests::ValidationRequests}, constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, GENERATOR_INDEX__NSK_M @@ -14,10 +14,10 @@ use dep::types::{ struct PrivateValidationRequestProcessor { validation_requests: ValidationRequests, note_hash_read_request_hints: NoteHashReadRequestHints, - pending_note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], + pending_note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], note_hash_tree_root: Field, nullifier_read_request_hints: NullifierReadRequestHints, - pending_nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + pending_nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], nullifier_tree_root: Field, master_nullifier_secret_keys: [GrumpkinPrivateKey; MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX], } @@ -62,8 +62,9 @@ impl PrivateValidationRequestProcessor { fn validate_nullifier_keys(self) { let requests = self.validation_requests.nullifier_key_validation_requests; for i in 0..MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX { - let request = requests[i]; + let request = requests[i].request; if !is_empty(request) { + let contract_address = requests[i].contract_address; let master_nullifier_secret_key = self.master_nullifier_secret_keys[i]; // First we check that derived public key matches master nullifier public key from request let master_nullifier_public_key = master_nullifier_secret_key.derive_public_key(); @@ -75,7 +76,7 @@ impl PrivateValidationRequestProcessor { let app_nullifier_secret_key = poseidon2_hash( [ - master_nullifier_secret_key.high, master_nullifier_secret_key.low, request.contract_address.to_field(), GENERATOR_INDEX__NSK_M + master_nullifier_secret_key.high, master_nullifier_secret_key.low, contract_address.to_field(), GENERATOR_INDEX__NSK_M ] ); assert( diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/public_validation_request_processor.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/public_validation_request_processor.nr index a3fd6a84cce1..a25b760e1d49 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/public_validation_request_processor.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/public_validation_request_processor.nr @@ -88,7 +88,7 @@ impl PublicValidationRequestProcessor { for i in 0..read_requests.len() { let read_request = read_requests[i]; if !is_empty(read_request) { - read_requests[i].value = silo_nullifier(read_request.contract_address, read_request.value); + read_requests[i].read_request.value = silo_nullifier(read_request.contract_address, read_request.value()); } } diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/non_existent_read_request.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/non_existent_read_request.nr index afb50e68ce53..ec1c8afde448 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/non_existent_read_request.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/non_existent_read_request.nr @@ -1,5 +1,5 @@ use dep::types::{ - abis::{side_effect::OrderedValue, read_request::ReadRequestContext}, + abis::{side_effect::OrderedValue, read_request::ScopedReadRequest}, merkle_tree::{assert_check_non_membership, IndexedTreeLeafPreimage, MembershipWitness}, traits::{Empty, is_empty} }; @@ -10,28 +10,28 @@ trait NonMembershipHint where LEAF_PREIMAGE: Indexed } fn check_no_matching_pending_value( - read_request: ReadRequestContext, + read_request: ScopedReadRequest, sorted_pending_values: BoundedVec, next_value_index: u64 ) -> bool where T: OrderedValue { if next_value_index == sorted_pending_values.len() { let highest_value = sorted_pending_values.get_unchecked(sorted_pending_values.len() - 1).value(); - highest_value.lt(read_request.value) + highest_value.lt(read_request.value()) } else { let next_value = sorted_pending_values.get_unchecked(next_value_index).value(); - let is_less_than_next = read_request.value.lt(next_value); + let is_less_than_next = read_request.value().lt(next_value); let is_greater_than_prev = if next_value_index == 0 { true } else { let prev_value = sorted_pending_values.get_unchecked(next_value_index - 1).value(); - prev_value.lt(read_request.value) + prev_value.lt(read_request.value()) }; is_less_than_next & is_greater_than_prev } } fn check_is_read_before_pending_value( - read_request: ReadRequestContext, + read_request: ScopedReadRequest, sorted_pending_values: BoundedVec, next_value_index: u64 ) -> bool where T: OrderedValue { @@ -39,8 +39,8 @@ fn check_is_read_before_pending_value( false } else { let pending = sorted_pending_values.get_unchecked(next_value_index); - if pending.value() == read_request.value { - assert(read_request.counter < pending.counter(), "Value exists in pending set"); + if pending.value() == read_request.value() { + assert(read_request.counter() < pending.counter(), "Value exists in pending set"); true } else { false @@ -52,7 +52,7 @@ fn check_is_read_before_pending_value( // Non existent read requests can only be verified at the end, after all pending values are present. // The values in read_requests and in sorted_pending_values should've been siloed before calling this. pub fn reset_non_existent_read_requests( - siloed_read_requests: [ReadRequestContext; N], + siloed_read_requests: [ScopedReadRequest; N], non_membership_hints: [NON_MEMBERSHIP_HINT; N], tree_root: Field, sorted_pending_values: BoundedVec, @@ -67,7 +67,7 @@ pub fn reset_non_existent_read_requests where LEAF_PREIMAGE: LeafPreim // - https://discourse.aztec.network/t/to-read-or-not-to-read/178 // - https://discourse.aztec.network/t/spending-notes-which-havent-yet-been-inserted/180 fn validate_pending_read_requests( - read_requests: [ReadRequestContext; READ_REQUEST_LEN], + read_requests: [ScopedReadRequest; READ_REQUEST_LEN], pending_values: [T; PENDING_VALUE_LEN], hints: [PendingReadHint; NUM_PENDING_READS] ) where T: Readable { @@ -76,7 +76,7 @@ fn validate_pending_read_requests( - read_requests: [ReadRequestContext; READ_REQUEST_LEN], + read_requests: [ScopedReadRequest; READ_REQUEST_LEN], hints: [H; NUM_SETTLED_READS], tree_root: Field ) where @@ -97,11 +97,11 @@ fn validate_settled_read_requests( - read_requests: [ReadRequestContext; READ_REQUEST_LEN], + read_requests: [ScopedReadRequest; READ_REQUEST_LEN], read_request_statuses: [ReadRequestStatus; READ_REQUEST_LEN], pending_read_hints: [T; NUM_PENDING_READS], settled_read_hints: [S; NUM_SETTLED_READS] -) -> BoundedVec where T: ReadValueHint, S: ReadValueHint { +) -> BoundedVec where T: ReadValueHint, S: ReadValueHint { let mut propagated_read_requests = BoundedVec::new(); for i in 0..READ_REQUEST_LEN { let read_request = read_requests[i]; @@ -124,13 +124,13 @@ fn propagate_unverified_read_requests( - read_requests: [ReadRequestContext; READ_REQUEST_LEN], + read_requests: [ScopedReadRequest; READ_REQUEST_LEN], pending_values: [P; PENDING_VALUE_LEN], read_request_statuses: [ReadRequestStatus; READ_REQUEST_LEN], pending_read_hints: [PendingReadHint; NUM_PENDING_READS], settled_read_hints: [H; NUM_SETTLED_READS], tree_root: Field -) -> BoundedVec where +) -> BoundedVec where P: Readable, H: SettledReadHint + ReadValueHint, LEAF_PREIMAGE: LeafPreimage + Readable { @@ -153,7 +153,8 @@ mod tests { validate_settled_read_requests }; use dep::types::{ - address::AztecAddress, abis::{read_request::ReadRequestContext, side_effect::Readable}, + address::AztecAddress, + abis::{read_request::{ReadRequest, ScopedReadRequest}, side_effect::Readable}, merkle_tree::{LeafPreimage, MembershipWitness}, tests::merkle_tree_utils::NonEmptyMerkleTree, traits::Empty }; @@ -168,8 +169,8 @@ mod tests { } impl Readable for TestValue { - fn assert_match_read_request(self, read_request: ReadRequestContext) { - let siloed_value = silo_test_value(read_request.value); + fn assert_match_read_request(self, read_request: ScopedReadRequest) { + let siloed_value = silo_test_value(read_request.value()); assert_eq(self.value, siloed_value, "Hinted test value does not match"); } } @@ -197,8 +198,8 @@ mod tests { } impl Readable for TestLeafPreimage { - fn assert_match_read_request(self, read_request: ReadRequestContext) { - let siloed_value = silo_test_value(read_request.value); + fn assert_match_read_request(self, read_request: ScopedReadRequest) { + let siloed_value = silo_test_value(read_request.value()); assert_eq(siloed_value, self.value, "Provided leaf preimage is not for target value"); } } @@ -241,10 +242,10 @@ mod tests { // Create 4 read requests. 0 and 3 are reading settled values. 1 and 2 are reading pending values. global read_requests = [ - ReadRequestContext { value: values[1], counter: 11, contract_address }, // settled - ReadRequestContext { value: values[3], counter: 13, contract_address }, // pending - ReadRequestContext { value: values[2], counter: 39, contract_address }, // pending - ReadRequestContext { value: values[0], counter: 46, contract_address }, // settled + ReadRequest { value: values[1], counter: 11 }.scope(contract_address), // settled + ReadRequest { value: values[3], counter: 13, }.scope(contract_address), // pending + ReadRequest { value: values[2], counter: 39, }.scope(contract_address), // pending + ReadRequest { value: values[0], counter: 46, }.scope(contract_address), // settled ]; global pending_values = [ diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/transient_data.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/transient_data.nr index 56ef524f2dd8..a109bdbeb786 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/transient_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/transient_data.nr @@ -1,10 +1,10 @@ -use dep::types::{abis::{note_hash::NoteHashContext, nullifier::Nullifier}, traits::is_empty}; +use dep::types::{abis::{note_hash::ScopedNoteHash, nullifier::ScopedNullifier}, traits::is_empty}; pub fn verify_squashed_transient_note_hashes_and_nullifiers( - note_hashes: [NoteHashContext; NUM_NOTE_HASHES], - nullifiers: [Nullifier; NUM_NULLIFIERS], - expected_note_hashes: [NoteHashContext; NUM_NOTE_HASHES], - expected_nullifiers: [Nullifier; NUM_NULLIFIERS], + note_hashes: [ScopedNoteHash; NUM_NOTE_HASHES], + nullifiers: [ScopedNullifier; NUM_NULLIFIERS], + expected_note_hashes: [ScopedNoteHash; NUM_NOTE_HASHES], + expected_nullifiers: [ScopedNullifier; NUM_NULLIFIERS], transient_nullifier_indexes_for_note_hashes: [u64; NUM_NOTE_HASHES], transient_note_hash_indexes_for_nullifiers: [u64; NUM_NULLIFIERS] ) { @@ -18,14 +18,19 @@ pub fn verify_squashed_transient_note_hashes_and_nullifiers note_hash.counter); + // assert(nullifier.counter > note_hash.counter()); note_hashes_removed += 1; @@ -62,35 +67,40 @@ pub fn verify_squashed_transient_note_hashes_and_nullifiers { num_note_hashes: u64, num_nullifiers: u64, - note_hashes: [NoteHashContext; NUM_NOTE_HASHES], - nullifiers: [Nullifier; NUM_NULLIFIERS], - expected_note_hashes: [NoteHashContext; NUM_NOTE_HASHES], - expected_nullifiers: [Nullifier; NUM_NULLIFIERS], + note_hashes: [ScopedNoteHash; NUM_NOTE_HASHES], + nullifiers: [ScopedNullifier; NUM_NULLIFIERS], + expected_note_hashes: [ScopedNoteHash; NUM_NOTE_HASHES], + expected_nullifiers: [ScopedNullifier; NUM_NULLIFIERS], transient_nullifier_indexes_for_note_hashes: [u64; NUM_NOTE_HASHES], transient_note_hash_indexes_for_nullifiers: [u64; NUM_NULLIFIERS], } impl TestDataBuilder { pub fn default() -> TestDataBuilder<3, 3> { + let contract_address = AztecAddress::from_field(987654); + let note_hashes = [ - NoteHashContext { value: 11, counter: 100, nullifier_counter: 500 }, - NoteHashContext { value: 22, counter: 200, nullifier_counter: 0 }, - NoteHashContext { value: 33, counter: 300, nullifier_counter: 400 } + NoteHash { value: 11, counter: 100 }.scope(500, contract_address), + NoteHash { value: 22, counter: 200 }.scope(0, contract_address), + NoteHash { value: 33, counter: 300 }.scope(400, contract_address) ]; let nullifiers = [ - Nullifier { value: 44, counter: 400, note_hash: 33 }, - Nullifier { value: 55, counter: 500, note_hash: 11 }, - Nullifier { value: 66, counter: 600, note_hash: 0 } + Nullifier { value: 44, counter: 400, note_hash: 33 }.scope(contract_address), + Nullifier { value: 55, counter: 500, note_hash: 11 }.scope(contract_address), + Nullifier { value: 66, counter: 600, note_hash: 0 }.scope(contract_address) ]; - let expected_note_hashes = [note_hashes[1], NoteHashContext::empty(), NoteHashContext::empty()]; - let expected_nullifiers = [nullifiers[2], Nullifier::empty(), Nullifier::empty()]; + let expected_note_hashes = [note_hashes[1], ScopedNoteHash::empty(), ScopedNoteHash::empty()]; + let expected_nullifiers = [nullifiers[2], ScopedNullifier::empty(), ScopedNullifier::empty()]; let transient_nullifier_indexes_for_note_hashes = [1, 3, 0]; let transient_note_hash_indexes_for_nullifiers = [2, 0, 3]; @@ -108,20 +118,22 @@ mod tests { } pub fn default_all_clear() -> TestDataBuilder<3, 3> { + let contract_address = AztecAddress::from_field(987654); + let note_hashes = [ - NoteHashContext { value: 11, counter: 100, nullifier_counter: 500 }, - NoteHashContext { value: 22, counter: 200, nullifier_counter: 600 }, - NoteHashContext { value: 33, counter: 300, nullifier_counter: 400 } + NoteHash { value: 11, counter: 100 }.scope(500, contract_address), + NoteHash { value: 22, counter: 200 }.scope(600, contract_address), + NoteHash { value: 33, counter: 300 }.scope(400, contract_address) ]; let nullifiers = [ - Nullifier { value: 44, counter: 400, note_hash: 33 }, - Nullifier { value: 55, counter: 500, note_hash: 11 }, - Nullifier { value: 66, counter: 600, note_hash: 22 } + Nullifier { value: 44, counter: 400, note_hash: 33 }.scope(contract_address), + Nullifier { value: 55, counter: 500, note_hash: 11 }.scope(contract_address), + Nullifier { value: 66, counter: 600, note_hash: 22 }.scope(contract_address) ]; - let expected_note_hashes = [NoteHashContext::empty(); 3]; - let expected_nullifiers = [Nullifier::empty(); 3]; + let expected_note_hashes = [ScopedNoteHash::empty(); 3]; + let expected_nullifiers = [ScopedNullifier::empty(); 3]; let transient_nullifier_indexes_for_note_hashes = [1, 2, 0]; let transient_note_hash_indexes_for_nullifiers = [2, 0, 1]; @@ -175,16 +187,25 @@ mod tests { builder.verify(); } - #[test(should_fail_with="Hinted note hash does not match")] + #[test(should_fail_with="Value of the hinted transient note hash does not match")] fn mismatch_note_hash_value() { let mut builder = TestDataBuilder::default_all_clear(); - builder.note_hashes[1].value += 1; + builder.note_hashes[1].note_hash.value += 1; + + builder.verify(); + } + + #[test(should_fail_with="Contract address of the hinted transient note hash does not match")] + fn mismatch_contract_address() { + let mut builder = TestDataBuilder::default_all_clear(); + + builder.note_hashes[1].contract_address.inner += 1; builder.verify(); } - #[test(should_fail_with="Hinted nullifier counter does not match")] + #[test(should_fail_with="Nullifier counter of the hinted transient note hash does not match")] fn mismatch_nullifier_counter() { let mut builder = TestDataBuilder::default_all_clear(); @@ -197,7 +218,7 @@ mod tests { fn unexpected_note_hash_value() { let mut builder = TestDataBuilder::default_all_clear(); - builder.expected_note_hashes[2].value = 11; + builder.expected_note_hashes[2].note_hash.value = 11; builder.verify(); } @@ -206,7 +227,7 @@ mod tests { fn wrong_expected_note_hash_value() { let mut builder = TestDataBuilder::default(); - builder.expected_note_hashes[0].value += 1; + builder.expected_note_hashes[0].note_hash.value += 1; builder.verify(); } @@ -215,7 +236,7 @@ mod tests { fn wrong_expected_note_hash_counter() { let mut builder = TestDataBuilder::default(); - builder.expected_note_hashes[0].counter += 1; + builder.expected_note_hashes[0].note_hash.counter += 1; builder.verify(); } @@ -233,7 +254,7 @@ mod tests { fn unexpected_nullifier_value() { let mut builder = TestDataBuilder::default_all_clear(); - builder.expected_nullifiers[2].value = 11; + builder.expected_nullifiers[2].nullifier.value = 11; builder.verify(); } @@ -242,7 +263,7 @@ mod tests { fn wrong_expected_nullifier_value() { let mut builder = TestDataBuilder::default(); - builder.expected_nullifiers[0].value += 1; + builder.expected_nullifiers[0].nullifier.value += 1; builder.verify(); } @@ -251,7 +272,7 @@ mod tests { fn wrong_expected_nullifier_counter() { let mut builder = TestDataBuilder::default(); - builder.expected_nullifiers[0].counter += 1; + builder.expected_nullifiers[0].nullifier.counter += 1; builder.verify(); } diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/tests/squash_transient_data.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/tests/squash_transient_data.nr index 48446480718e..264ff0af1674 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/tests/squash_transient_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/tests/squash_transient_data.nr @@ -1,7 +1,7 @@ -use dep::types::abis::{note_hash::NoteHashContext, nullifier::Nullifier}; +use dep::types::abis::{note_hash::ScopedNoteHash, nullifier::ScopedNullifier}; -pub fn squash_transient_note_hashes(note_hashes: [NoteHashContext; N]) -> [NoteHashContext; N] { - let mut final_note_hashes = [NoteHashContext::empty(); N]; +pub fn squash_transient_note_hashes(note_hashes: [ScopedNoteHash; N]) -> [ScopedNoteHash; N] { + let mut final_note_hashes = [ScopedNoteHash::empty(); N]; let mut num_note_hashes = 0; for i in 0..N { @@ -15,13 +15,13 @@ pub fn squash_transient_note_hashes(note_hashes: [NoteHashContext; N]) -> [No final_note_hashes } -pub fn squash_transient_nullifiers(nullifiers: [Nullifier; N]) -> [Nullifier; N] { - let mut final_nullifiers = [Nullifier::empty(); N]; +pub fn squash_transient_nullifiers(nullifiers: [ScopedNullifier; N]) -> [ScopedNullifier; N] { + let mut final_nullifiers = [ScopedNullifier::empty(); N]; let mut num_nullifiers = 0; for i in 0..N { let nullifier = nullifiers[i]; - if nullifier.note_hash == 0 { + if nullifier.nullified_note_hash() == 0 { final_nullifiers[num_nullifiers] = nullifier; num_nullifiers += 1; } diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/types/public_data_hint.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/types/public_data_hint.nr index 28a3eb74cb0c..2bd7316a9a9f 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/types/public_data_hint.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/types/public_data_hint.nr @@ -1,14 +1,14 @@ use crate::reset::{mutable_data_read_request::LeafDataHint}; use dep::types::{ - abis::membership_witness::PublicDataMembershipWitness, - public_data_tree_leaf_preimage::PublicDataTreeLeafPreimage + public_data_tree_leaf_preimage::PublicDataTreeLeafPreimage, merkle_tree::MembershipWitness, + constants::PUBLIC_DATA_TREE_HEIGHT }; struct PublicDataHint { leaf_slot: Field, value: Field, override_counter: u32, - membership_witness: PublicDataMembershipWitness, // Should be MembershipWitness when we can handle generics when converting to ts types. + membership_witness: MembershipWitness, // Should be MembershipWitness when we can handle generics when converting to ts types. leaf_preimage: PublicDataTreeLeafPreimage, } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/previous_rollup_data.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/previous_rollup_data.nr index aecee269f9e8..07abf8e362cd 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/previous_rollup_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/previous_rollup_data.nr @@ -1,9 +1,7 @@ use crate::abis::base_or_merge_rollup_public_inputs::BaseOrMergeRollupPublicInputs; use dep::types::{ - abis::membership_witness::VKMembershipWitness, - constants::ROLLUP_VK_TREE_HEIGHT, - mocked::{Proof, VerificationKey}, - traits::Empty + constants::ROLLUP_VK_TREE_HEIGHT, mocked::{Proof, VerificationKey}, traits::Empty, + merkle_tree::MembershipWitness }; struct PreviousRollupData{ @@ -11,7 +9,7 @@ struct PreviousRollupData{ proof : Proof, vk : VerificationKey, vk_index : u32, - vk_sibling_path : VKMembershipWitness, + vk_sibling_path : MembershipWitness, } impl Empty for PreviousRollupData { @@ -21,7 +19,7 @@ impl Empty for PreviousRollupData { proof : Proof::empty(), vk : VerificationKey::empty(), vk_index : 0 as u32, - vk_sibling_path : VKMembershipWitness::empty(), + vk_sibling_path : MembershipWitness::empty(), } } -} \ No newline at end of file +} diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/base_rollup_inputs.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/base_rollup_inputs.nr index 3df978857add..238cc1dbd131 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/base_rollup_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/base_rollup_inputs.nr @@ -9,10 +9,9 @@ use crate::{ use dep::types::{ hash::sha256_to_field, abis::{ - append_only_tree_snapshot::AppendOnlyTreeSnapshot, - membership_witness::{ArchiveRootMembershipWitness, NullifierMembershipWitness, PublicDataMembershipWitness}, - nullifier_leaf_preimage::NullifierLeafPreimage, public_data_update_request::PublicDataUpdateRequest, - public_data_read::PublicDataRead, kernel_data::KernelData + append_only_tree_snapshot::AppendOnlyTreeSnapshot, nullifier_leaf_preimage::NullifierLeafPreimage, + public_data_update_request::PublicDataUpdateRequest, public_data_read::PublicDataRead, + kernel_data::KernelData }, constants::{ NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, @@ -20,7 +19,7 @@ use dep::types::{ MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, MAX_PUBLIC_DATA_READS_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, NUM_ENCRYPTED_LOGS_HASHES_PER_TX, MAX_NEW_L2_TO_L1_MSGS_PER_TX, NUM_UNENCRYPTED_LOGS_HASHES_PER_TX, NULLIFIER_SUBTREE_HEIGHT, NULLIFIER_TREE_HEIGHT, PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH, - PUBLIC_DATA_SUBTREE_HEIGHT + PUBLIC_DATA_SUBTREE_HEIGHT, ARCHIVE_HEIGHT }, merkle_tree::{ append_only_tree, assert_check_membership, calculate_empty_tree_root, calculate_subtree_root, @@ -43,9 +42,9 @@ struct BaseRollupInputs { sorted_public_data_writes: [PublicDataTreeLeaf; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], sorted_public_data_writes_indexes: [u64; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], low_public_data_writes_preimages: [PublicDataTreeLeafPreimage; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], - low_public_data_writes_witnesses: [PublicDataMembershipWitness; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], + low_public_data_writes_witnesses: [MembershipWitness; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], - archive_root_membership_witness: ArchiveRootMembershipWitness, + archive_root_membership_witness: MembershipWitness, constants: ConstantRollupData, } @@ -148,7 +147,7 @@ impl BaseRollupInputs { self.state_diff_hints.nullifier_subtree_sibling_path, self.state_diff_hints.nullifier_predecessor_preimages, self.state_diff_hints.nullifier_predecessor_membership_witnesses.map( - |witness: NullifierMembershipWitness| { + |witness: MembershipWitness| { MembershipWitness { leaf_index: witness.leaf_index, sibling_path: witness.sibling_path, @@ -266,7 +265,7 @@ fn insert_public_data_update_requests( sorted_public_data_writes: [PublicDataTreeLeaf; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], sorted_public_data_writes_indexes: [u64; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], low_public_data_writes_preimages: [PublicDataTreeLeafPreimage; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], - low_public_data_writes_witnesses: [PublicDataMembershipWitness; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], + low_public_data_writes_witnesses: [MembershipWitness; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], public_data_writes_subtree_sibling_path: [Field; PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH] ) -> AppendOnlyTreeSnapshot { indexed_tree::batch_insert( @@ -277,7 +276,7 @@ fn insert_public_data_update_requests( public_data_writes_subtree_sibling_path, low_public_data_writes_preimages, low_public_data_writes_witnesses.map( - |witness: PublicDataMembershipWitness| { + |witness: MembershipWitness| { MembershipWitness { leaf_index: witness.leaf_index, sibling_path: witness.sibling_path, @@ -369,12 +368,11 @@ mod tests { use dep::types::{ abis::{ append_only_tree_snapshot::AppendOnlyTreeSnapshot, - membership_witness::{ArchiveRootMembershipWitness, NullifierMembershipWitness, PublicDataMembershipWitness}, nullifier_leaf_preimage::NullifierLeafPreimage, public_data_read::PublicDataRead, public_data_update_request::PublicDataUpdateRequest, kernel_data::KernelData, side_effect::SideEffect, accumulated_data::CombinedAccumulatedData }, - address::{AztecAddress, EthAddress}, + merkle_tree::MembershipWitness, address::{AztecAddress, EthAddress}, constants::{ ARCHIVE_HEIGHT, MAX_PUBLIC_DATA_READS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, @@ -409,12 +407,12 @@ mod tests { snapshot: AppendOnlyTreeSnapshot, public_data_writes: BoundedVec<(u64, PublicDataTreeLeaf), 2>, mut pre_existing_public_data: [PublicDataTreeLeafPreimage; EXISTING_LEAVES] - ) -> ([Field; 35], [PublicDataTreeLeaf; 32], [u64; 32], [PublicDataTreeLeafPreimage; 32], [PublicDataMembershipWitness; 32], [PublicDataTreeLeafPreimage; EXISTING_LEAVES]) { + ) -> ([Field; 35], [PublicDataTreeLeaf; 32], [u64; 32], [PublicDataTreeLeafPreimage; 32], [MembershipWitness; 32], [PublicDataTreeLeafPreimage; EXISTING_LEAVES]) { let mut subtree_path = [0; PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH]; let mut sorted_public_data_writes = [PublicDataTreeLeaf::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX]; let mut sorted_public_data_writes_indexes = [0 as u64; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX]; let mut low_public_data_writes_preimages = [PublicDataTreeLeafPreimage::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX]; - let mut low_public_data_writes_witnesses = [PublicDataMembershipWitness::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX]; + let mut low_public_data_writes_witnesses = [MembershipWitness::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX]; let mut new_subtree = [PublicDataTreeLeafPreimage::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX]; for i in 0..MAX_PUBLIC_DATA_WRITES_PER_TEST { @@ -458,7 +456,7 @@ mod tests { }; } low_public_data_writes_preimages[i] = low_leaf; - low_public_data_writes_witnesses[i] = PublicDataMembershipWitness { + low_public_data_writes_witnesses[i] = MembershipWitness { leaf_index: low_leaf_index as Field, sibling_path: public_data_tree.get_sibling_path(low_leaf_index) }; @@ -526,9 +524,9 @@ mod tests { nullifier_tree: &mut NonEmptyMerkleTree, kernel_data: &mut KernelData, start_nullifier_tree_snapshot: AppendOnlyTreeSnapshot - ) -> ([NullifierLeafPreimage; MAX_NEW_NULLIFIERS_PER_TX], [NullifierMembershipWitness; MAX_NEW_NULLIFIERS_PER_TX], [Field; MAX_NEW_NULLIFIERS_PER_TX], [u64; MAX_NEW_NULLIFIERS_PER_TX]) { + ) -> ([NullifierLeafPreimage; MAX_NEW_NULLIFIERS_PER_TX], [MembershipWitness; MAX_NEW_NULLIFIERS_PER_TX], [Field; MAX_NEW_NULLIFIERS_PER_TX], [u64; MAX_NEW_NULLIFIERS_PER_TX]) { let mut nullifier_predecessor_preimages = [NullifierLeafPreimage::empty(); MAX_NEW_NULLIFIERS_PER_TX]; - let mut low_nullifier_membership_witness = [NullifierMembershipWitness::empty(); MAX_NEW_NULLIFIERS_PER_TX]; + let mut low_nullifier_membership_witness = [MembershipWitness::empty(); MAX_NEW_NULLIFIERS_PER_TX]; let sorted_new_nullifier_tuples = sort_high_to_low( self.new_nullifiers.storage.map(|insertion: NullifierInsertion| insertion.value), @@ -562,7 +560,7 @@ mod tests { let mut low_preimage = pre_existing_nullifiers[low_index]; nullifier_predecessor_preimages[i] = low_preimage; - low_nullifier_membership_witness[i] = NullifierMembershipWitness { + low_nullifier_membership_witness[i] = MembershipWitness { leaf_index: low_index as Field, sibling_path: nullifier_tree.get_sibling_path(low_index) }; @@ -687,7 +685,7 @@ mod tests { sorted_public_data_writes_indexes, low_public_data_writes_preimages, low_public_data_writes_witnesses, - archive_root_membership_witness: ArchiveRootMembershipWitness { leaf_index: 0, sibling_path: start_archive.get_sibling_path(0) }, + archive_root_membership_witness: MembershipWitness { leaf_index: 0, sibling_path: start_archive.get_sibling_path(0) }, constants: self.constants } } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/state_diff_hints.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/state_diff_hints.nr index 9c7dd03d70d1..6af5eb698afa 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/state_diff_hints.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/state_diff_hints.nr @@ -1,14 +1,16 @@ use dep::types::{ - abis::{membership_witness::NullifierMembershipWitness, nullifier_leaf_preimage::NullifierLeafPreimage}, + abis::{nullifier_leaf_preimage::NullifierLeafPreimage}, constants::{ MAX_NEW_NULLIFIERS_PER_TX, NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, - NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH -} + NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH, + NULLIFIER_TREE_HEIGHT +}, + merkle_tree::MembershipWitness }; struct StateDiffHints { nullifier_predecessor_preimages: [NullifierLeafPreimage; MAX_NEW_NULLIFIERS_PER_TX], - nullifier_predecessor_membership_witnesses: [NullifierMembershipWitness; MAX_NEW_NULLIFIERS_PER_TX], + nullifier_predecessor_membership_witnesses: [MembershipWitness; MAX_NEW_NULLIFIERS_PER_TX], sorted_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_TX], sorted_nullifier_indexes: [u64; MAX_NEW_NULLIFIERS_PER_TX], diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr index 2d0566b9c0d7..2c2a9325bcb1 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr @@ -6,9 +6,6 @@ mod function_selector; mod function_data; mod global_variables; - -mod membership_witness; - mod note_hash_leaf_preimage; mod nullifier_leaf_preimage; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/combined_accumulated_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/combined_accumulated_data.nr index e6907de06f49..1a49b8de968d 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/combined_accumulated_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/combined_accumulated_data.nr @@ -48,7 +48,10 @@ impl CombinedAccumulatedData { CombinedAccumulatedData { new_note_hashes: array_merge(non_revertible.new_note_hashes, revertible.new_note_hashes).map(|n: NoteHash| n.value), new_nullifiers: array_merge(non_revertible.new_nullifiers, revertible.new_nullifiers).map(|n: Nullifier| n.value), - new_l2_to_l1_msgs: revertible.new_l2_to_l1_msgs, + new_l2_to_l1_msgs: array_merge( + non_revertible.new_l2_to_l1_msgs, + revertible.new_l2_to_l1_msgs + ), encrypted_logs_hash, unencrypted_logs_hash, encrypted_log_preimages_length: non_revertible.encrypted_log_preimages_length diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data.nr index 31c73652ee0a..12c19d640b1a 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data.nr @@ -1,8 +1,9 @@ use crate::{ abis::{ - call_request::CallRequest, gas::Gas, note_hash::NoteHashContext, nullifier::Nullifier, + call_request::CallRequest, gas::Gas, note_hash::ScopedNoteHash, nullifier::ScopedNullifier, side_effect::SideEffect -} +}, + messaging::l2_to_l1_message::ScopedL2ToL1Message }; use crate::constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX, @@ -11,9 +12,9 @@ use crate::constants::{ }; struct PrivateAccumulatedData { - new_note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], - new_nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], - new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_TX], + new_note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], + new_nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], + new_l2_to_l1_msgs: [ScopedL2ToL1Message; MAX_NEW_L2_TO_L1_MSGS_PER_TX], encrypted_logs_hashes: [SideEffect; MAX_ENCRYPTED_LOGS_PER_TX], unencrypted_logs_hashes: [SideEffect; MAX_UNENCRYPTED_LOGS_PER_TX], diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data_builder.nr index 984a1a292a11..085971032792 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data_builder.nr @@ -7,7 +7,7 @@ use crate::{ private_accumulated_data::PrivateAccumulatedData, public_accumulated_data::PublicAccumulatedData, public_accumulated_data_builder::PublicAccumulatedDataBuilder }, - call_request::CallRequest, note_hash::{NoteHash, NoteHashContext}, nullifier::Nullifier, + call_request::CallRequest, note_hash::{NoteHash, ScopedNoteHash}, nullifier::ScopedNullifier, public_data_update_request::PublicDataUpdateRequest, side_effect::SideEffect }, constants::{ @@ -16,7 +16,7 @@ use crate::{ MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX, DA_BYTES_PER_FIELD, DA_GAS_PER_BYTE }, - traits::{Empty, is_empty} + messaging::l2_to_l1_message::ScopedL2ToL1Message, traits::{Empty, is_empty} }; // Builds via PrivateKernelCircuitPublicInputsBuilder: @@ -24,9 +24,9 @@ use crate::{ // .to_combined: KernelCircuitPublicInputs.end // .split_to_public: PublicKernelCircuitPublicInputs.(end,end_non_revertible) struct PrivateAccumulatedDataBuilder { - new_note_hashes: BoundedVec, - new_nullifiers: BoundedVec, - new_l2_to_l1_msgs: BoundedVec, + new_note_hashes: BoundedVec, + new_nullifiers: BoundedVec, + new_l2_to_l1_msgs: BoundedVec, encrypted_logs_hashes: BoundedVec, unencrypted_logs_hashes: BoundedVec, @@ -63,9 +63,9 @@ impl PrivateAccumulatedDataBuilder { let gas_used = self.to_metered_gas_used() + Gas::tx_overhead() + teardown_gas; CombinedAccumulatedData { - new_note_hashes: self.new_note_hashes.storage.map(|n: NoteHashContext| n.value), - new_nullifiers: self.new_nullifiers.storage.map(|n: Nullifier| n.value), - new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage, + new_note_hashes: self.new_note_hashes.storage.map(|n: ScopedNoteHash| n.note_hash.value), + new_nullifiers: self.new_nullifiers.storage.map(|n: ScopedNullifier| n.nullifier.value), + new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage.map(|m: ScopedL2ToL1Message| m.message.content), encrypted_logs_hash, unencrypted_logs_hash, encrypted_log_preimages_length: self.encrypted_log_preimages_length, @@ -124,7 +124,7 @@ impl PrivateAccumulatedDataBuilder { for i in 0..MAX_NEW_NOTE_HASHES_PER_TX { let note_hash = self.new_note_hashes.storage[i]; let public_note_hash = note_hash.expose_to_public(); - if note_hash.counter < min_revertible_side_effect_counter { + if note_hash.counter() < min_revertible_side_effect_counter { non_revertible_builder.new_note_hashes.push(public_note_hash); if !is_empty(public_note_hash) { non_revertible_da_gas_used += DA_GAS_PER_FIELD ; @@ -139,19 +139,29 @@ impl PrivateAccumulatedDataBuilder { for i in 0..MAX_NEW_NULLIFIERS_PER_TX { let nullifier = self.new_nullifiers.storage[i]; - if nullifier.counter < min_revertible_side_effect_counter { - non_revertible_builder.new_nullifiers.push(nullifier); - if !is_empty(nullifier) { + let public_nullifier = nullifier.expose_to_public(); + if nullifier.counter() < min_revertible_side_effect_counter { + non_revertible_builder.new_nullifiers.push(public_nullifier); + if !is_empty(public_nullifier) { non_revertible_da_gas_used += DA_GAS_PER_FIELD; } } else { - revertible_builder.new_nullifiers.push(nullifier); - if !is_empty(nullifier) { + revertible_builder.new_nullifiers.push(public_nullifier); + if !is_empty(public_nullifier) { revertible_da_gas_used += DA_GAS_PER_FIELD; } } } + for i in 0..MAX_NEW_L2_TO_L1_MSGS_PER_TX { + let msg = self.new_l2_to_l1_msgs.storage[i]; + if msg.counter() < min_revertible_side_effect_counter { + non_revertible_builder.new_l2_to_l1_msgs.push(msg.message.content); + } else { + revertible_builder.new_l2_to_l1_msgs.push(msg.message.content); + } + } + // TODO(gas): add AVM_STARTUP_L2_GAS here for i in 0..MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX { let call_stack_item = self.public_call_stack.storage[i]; @@ -180,7 +190,6 @@ impl PrivateAccumulatedDataBuilder { } } - revertible_builder.new_l2_to_l1_msgs = self.new_l2_to_l1_msgs; // TODO(1641) & TODO(4712): Once we track logs with more info, including individual lens, split here revertible_builder.encrypted_log_preimages_length = self.encrypted_log_preimages_length; revertible_builder.unencrypted_log_preimages_length = self.unencrypted_log_preimages_length; @@ -197,25 +206,35 @@ mod tests { use crate::{ abis::{ accumulated_data::private_accumulated_data_builder::PrivateAccumulatedDataBuilder, gas::Gas, - call_request::CallRequest, caller_context::CallerContext, - note_hash::{NoteHash, NoteHashContext}, nullifier::Nullifier, - public_data_update_request::PublicDataUpdateRequest, side_effect::SideEffect + call_request::CallRequest, caller_context::CallerContext, note_hash::NoteHash, + nullifier::Nullifier, public_data_update_request::PublicDataUpdateRequest, + side_effect::SideEffect }, - address::AztecAddress, utils::arrays::array_eq, constants::{DA_BYTES_PER_FIELD, DA_GAS_PER_BYTE} + address::{AztecAddress, EthAddress}, messaging::l2_to_l1_message::L2ToL1Message, + utils::arrays::array_eq, constants::{DA_BYTES_PER_FIELD, DA_GAS_PER_BYTE} }; #[test] unconstrained fn splits_revertible_and_non_revertible() { let mut builder = PrivateAccumulatedDataBuilder::empty(); + let contract_address = AztecAddress::from_field(8989); + + let min_revertible_side_effect_counter = 7; + + // Non revertible: counter < 7 let non_revertible_note_hashes = [ - NoteHashContext { value: 1, counter: 1, nullifier_counter: 20 }, - NoteHashContext { value: 2, counter: 3, nullifier_counter: 5 } + NoteHash { value: 1, counter: 1 }.scope(20, contract_address), + NoteHash { value: 2, counter: 3 }.scope(5, contract_address) ]; let non_revertible_nullifiers = [ - Nullifier { value: 10, note_hash: 1, counter: 2 }, - Nullifier { value: 20, note_hash: 2, counter: 4 } + Nullifier { value: 10, note_hash: 1, counter: 2 }.scope(contract_address), + Nullifier { value: 20, note_hash: 2, counter: 4 }.scope(contract_address) + ]; + + let non_revertible_l2_to_l1_messages = [ + L2ToL1Message { recipient: EthAddress::from_field(3030), content: 333333, counter: 5 }.scope(AztecAddress::from_field(9900)) ]; let non_revertible_public_stack = [ @@ -235,14 +254,20 @@ mod tests { } ]; + // Revertible: counter >= 7 + let revertible_note_hashes = [ - NoteHashContext { value: 3, counter: 7, nullifier_counter: 15 }, - NoteHashContext { value: 4, counter: 10, nullifier_counter: 0 } + NoteHash { value: 3, counter: 7 }.scope(15, contract_address), + NoteHash { value: 4, counter: 10 }.scope(0, contract_address) ]; let revertible_nullifiers = [ - Nullifier { value: 30, note_hash: 3, counter: 8 }, - Nullifier { value: 40, note_hash: 4, counter: 11 } + Nullifier { value: 30, note_hash: 3, counter: 8 }.scope(contract_address), + Nullifier { value: 40, note_hash: 4, counter: 11 }.scope(contract_address) + ]; + + let revertible_l2_to_l1_messages = [ + L2ToL1Message { recipient: EthAddress::from_field(3030), content: 444444, counter: 13 }.scope(AztecAddress::from_field(7788)) ]; let revertible_public_call_stack = [ @@ -261,10 +286,13 @@ mod tests { builder.new_nullifiers.extend_from_array(non_revertible_nullifiers); builder.new_nullifiers.extend_from_array(revertible_nullifiers); + builder.new_l2_to_l1_msgs.extend_from_array(non_revertible_l2_to_l1_messages); + builder.new_l2_to_l1_msgs.extend_from_array(revertible_l2_to_l1_messages); + builder.public_call_stack.extend_from_array(non_revertible_public_stack); builder.public_call_stack.extend_from_array(revertible_public_call_stack); - let (non_revertible, revertible) = builder.split_to_public(7, Gas::new(42, 17)); + let (non_revertible, revertible) = builder.split_to_public(min_revertible_side_effect_counter, Gas::new(42, 17)); assert( array_eq( @@ -275,7 +303,16 @@ mod tests { ] ) ); - assert(array_eq(non_revertible.new_nullifiers, non_revertible_nullifiers)); + assert( + array_eq( + non_revertible.new_nullifiers, + [ + Nullifier { value: 10, note_hash: 0, counter: 0 }, + Nullifier { value: 20, note_hash: 0, counter: 0 } + ] + ) + ); + assert(array_eq(non_revertible.new_l2_to_l1_msgs, [333333])); assert(array_eq(non_revertible.public_call_stack, non_revertible_public_stack)); assert( @@ -287,7 +324,16 @@ mod tests { ] ) ); - assert(array_eq(revertible.new_nullifiers, revertible_nullifiers)); + assert( + array_eq( + revertible.new_nullifiers, + [ + Nullifier { value: 30, note_hash: 0, counter: 0 }, + Nullifier { value: 40, note_hash: 0, counter: 0 } + ] + ) + ); + assert(array_eq(revertible.new_l2_to_l1_msgs, [444444])); assert(array_eq(revertible.public_call_stack, revertible_public_call_stack)); assert_eq( diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/append_only_tree_snapshot.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/append_only_tree_snapshot.nr index 12c74237a569..83925d95897f 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/append_only_tree_snapshot.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/append_only_tree_snapshot.nr @@ -26,4 +26,4 @@ impl Eq for AppendOnlyTreeSnapshot { fn eq(self, other : AppendOnlyTreeSnapshot) -> bool { (self.root == other.root) & (self.next_available_leaf_index == other.next_available_leaf_index) } -} \ No newline at end of file +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/membership_witness.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/membership_witness.nr deleted file mode 100644 index e0dfc960f086..000000000000 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/membership_witness.nr +++ /dev/null @@ -1,77 +0,0 @@ -use crate::{ - constants::{ - FUNCTION_TREE_HEIGHT, NULLIFIER_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT, ROLLUP_VK_TREE_HEIGHT, - ARCHIVE_HEIGHT, PUBLIC_DATA_TREE_HEIGHT -}, - traits::Empty -}; - -// TODO(Kev): Instead of doing `MembershipWitness` we are forced -// to do this new struct because the typescript bindings generator -// does not have logic to monomorphize these properly. See the file named -// `typechain-type-alias` in the folder `bug-collecting-crate` -struct FunctionLeafMembershipWitness{ - leaf_index: Field, - sibling_path: [Field; FUNCTION_TREE_HEIGHT] -} - -struct VKMembershipWitness{ - leaf_index: Field, - sibling_path: [Field; ROLLUP_VK_TREE_HEIGHT] -} - -struct NullifierMembershipWitness{ - leaf_index: Field, - sibling_path: [Field; NULLIFIER_TREE_HEIGHT] -} - -struct PublicDataMembershipWitness{ - leaf_index: Field, - sibling_path: [Field; PUBLIC_DATA_TREE_HEIGHT] -} - -struct ArchiveRootMembershipWitness{ - leaf_index: Field, - sibling_path: [Field; ARCHIVE_HEIGHT] -} - -struct NoteHashMembershipWitness { - leaf_index: Field, - sibling_path: [Field; NOTE_HASH_TREE_HEIGHT], -} - -impl Empty for VKMembershipWitness { - fn empty() -> Self { - VKMembershipWitness { - leaf_index: 0, - sibling_path: [0; ROLLUP_VK_TREE_HEIGHT] - } - } -} - -impl Empty for NullifierMembershipWitness { - fn empty() -> Self { - NullifierMembershipWitness { - leaf_index: 0, - sibling_path: [0; NULLIFIER_TREE_HEIGHT] - } - } -} - -impl Empty for PublicDataMembershipWitness { - fn empty() -> Self { - PublicDataMembershipWitness { - leaf_index: 0, - sibling_path: [0; PUBLIC_DATA_TREE_HEIGHT] - } - } -} - -impl Empty for NoteHashMembershipWitness { - fn empty() -> Self { - NoteHashMembershipWitness { - leaf_index: 0, - sibling_path: [0; NOTE_HASH_TREE_HEIGHT] - } - } -} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash.nr index 64c95058f834..53a248718c20 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash.nr @@ -1,8 +1,8 @@ use crate::{ - abis::read_request::ReadRequestContext, address::AztecAddress, + abis::read_request::ScopedReadRequest, address::AztecAddress, abis::side_effect::{Ordered, OrderedValue, Readable}, - constants::{NOTE_HASH_LENGTH, NOTE_HASH_CONTEXT_LENGTH}, hash::silo_note_hash, - traits::{Empty, Serialize, Deserialize} + constants::{NOTE_HASH_LENGTH, SCOPED_NOTE_HASH_LENGTH}, traits::{Empty, Serialize, Deserialize}, + utils::{arrays::array_concat, reader::Reader} }; use dep::std::cmp::Eq; @@ -11,21 +11,6 @@ struct NoteHash { counter: u32, } -impl Ordered for NoteHash { - fn counter(self) -> u32 { - self.counter - } -} - -impl OrderedValue for NoteHash { - fn value(self) -> Field { - self.value - } - fn counter(self) -> u32 { - self.counter - } -} - impl Eq for NoteHash { fn eq(self, other: NoteHash) -> bool { (self.value == other.value) @@ -58,87 +43,85 @@ impl Deserialize for NoteHash { } impl NoteHash { - pub fn to_context(self, nullifier_counter: u32) -> NoteHashContext { - NoteHashContext { value: self.value, counter: self.counter, nullifier_counter } + pub fn scope(self, nullifier_counter: u32, contract_address: AztecAddress) -> ScopedNoteHash { + ScopedNoteHash { note_hash: self, nullifier_counter, contract_address } } } -struct NoteHashContext { - value: Field, - counter: u32, +struct ScopedNoteHash { + note_hash: NoteHash, nullifier_counter: u32, + contract_address: AztecAddress, } -impl Ordered for NoteHashContext { +impl Ordered for ScopedNoteHash { fn counter(self) -> u32 { - self.counter + self.note_hash.counter } } -impl OrderedValue for NoteHashContext { +impl OrderedValue for ScopedNoteHash { fn value(self) -> Field { - self.value + self.note_hash.value } fn counter(self) -> u32 { - self.counter + self.note_hash.counter } } -impl Eq for NoteHashContext { - fn eq(self, other: NoteHashContext) -> bool { - (self.value == other.value) - & (self.counter == other.counter) +impl Eq for ScopedNoteHash { + fn eq(self, other: ScopedNoteHash) -> bool { + (self.note_hash == other.note_hash) & (self.nullifier_counter == other.nullifier_counter) + & (self.contract_address == other.contract_address) } } -impl Empty for NoteHashContext { +impl Empty for ScopedNoteHash { fn empty() -> Self { - NoteHashContext { - value: 0, - counter: 0, + ScopedNoteHash { + note_hash: NoteHash::empty(), nullifier_counter: 0, + contract_address: AztecAddress::zero(), } } } -impl Serialize for NoteHashContext { - fn serialize(self) -> [Field; NOTE_HASH_CONTEXT_LENGTH] { - [self.value, self.counter as Field, self.nullifier_counter as Field] +impl Serialize for ScopedNoteHash { + fn serialize(self) -> [Field; SCOPED_NOTE_HASH_LENGTH] { + array_concat(self.note_hash.serialize(), [self.nullifier_counter as Field, self.contract_address.to_field()]) } } -impl Deserialize for NoteHashContext { - fn deserialize(values: [Field; NOTE_HASH_CONTEXT_LENGTH]) -> Self { - Self { - value: values[0], - counter: values[1] as u32, - nullifier_counter: values[2] as u32, - } +impl Deserialize for ScopedNoteHash { + fn deserialize(values: [Field; SCOPED_NOTE_HASH_LENGTH]) -> Self { + let mut reader = Reader::new(values); + let res = Self { + note_hash: reader.read_struct(NoteHash::deserialize), + nullifier_counter: reader.read_u32(), + contract_address: reader.read_struct(AztecAddress::deserialize), + }; + reader.finish(); + res } } -impl Readable for NoteHashContext { - fn assert_match_read_request(self, read_request: ReadRequestContext) { - // TODO(#6122) - let siloed_value = silo_note_hash(read_request.contract_address, read_request.value); - assert_eq(self.value, siloed_value, "Value of the note hash does not match read request"); +impl Readable for ScopedNoteHash { + fn assert_match_read_request(self, read_request: ScopedReadRequest) { + assert_eq(self.note_hash.value, read_request.value(), "Value of the note hash does not match read request"); + assert_eq(self.contract_address, read_request.contract_address, "Contract address of the note hash does not match read request"); assert( - read_request.counter > self.counter, "Read request counter must be greater than the counter of the note hash" + read_request.counter() > self.note_hash.counter, "Read request counter must be greater than the counter of the note hash" ); assert( - (self.nullifier_counter == 0) | (read_request.counter < self.nullifier_counter), "Read request counter must be less than the nullifier counter of the note hash" + (self.nullifier_counter == 0) | (read_request.counter() < self.nullifier_counter), "Read request counter must be less than the nullifier counter of the note hash" ); } } -impl NoteHashContext { - pub fn to_note_hash(self) -> NoteHash { - NoteHash { value: self.value, counter: self.counter } - } - +impl ScopedNoteHash { pub fn expose_to_public(self) -> NoteHash { // Hide the actual counter when exposing it to the public kernel. - NoteHash { value: self.value, counter: 0 } + NoteHash { value: self.note_hash.value, counter: 0 } } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash_leaf_preimage.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash_leaf_preimage.nr index 5c9cf6ad4878..031325e6430a 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash_leaf_preimage.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash_leaf_preimage.nr @@ -1,7 +1,7 @@ global NOTE_HASH_LEAF_PREIMAGE_LENGTH: u64 = 1; use crate::{ - abis::{read_request::ReadRequestContext, side_effect::Readable}, hash::silo_note_hash, + abis::{read_request::ScopedReadRequest, side_effect::Readable}, hash::silo_note_hash, merkle_tree::leaf_preimage::LeafPreimage, traits::{Empty, Hash} }; @@ -28,11 +28,11 @@ impl LeafPreimage for NoteHashLeafPreimage { } impl Readable for NoteHashLeafPreimage { - fn assert_match_read_request(self, read_request: ReadRequestContext) { + fn assert_match_read_request(self, read_request: ScopedReadRequest) { // TODO(#2847): Read request value shouldn't have been siloed by apps. // let siloed_value = silo_note_hash(read_request.contract_address, read_request.value); // assert_eq(self.value, siloed_value, "Value of the note hash leaf does not match read request"); - assert_eq(self.value, read_request.value, "Value of the note hash leaf does not match read request"); + assert_eq(self.value, read_request.value(), "Value of the note hash leaf does not match read request"); } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier.nr index b32a81ee264d..da4c140d43d5 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier.nr @@ -1,7 +1,7 @@ use crate::{ - abis::{side_effect::{Ordered, OrderedValue, Readable}, read_request::ReadRequestContext}, - address::AztecAddress, constants::NULLIFIER_LENGTH, hash::silo_nullifier, - traits::{Empty, Hash, Serialize, Deserialize} + abis::{side_effect::{Ordered, OrderedValue, Readable}, read_request::ScopedReadRequest}, + address::AztecAddress, constants::{NULLIFIER_LENGTH, SCOPED_NULLIFIER_LENGTH}, hash::silo_nullifier, + traits::{Empty, Hash, Serialize, Deserialize}, utils::{arrays::array_concat, reader::Reader} }; struct Nullifier { @@ -10,12 +10,6 @@ struct Nullifier { note_hash: Field, } -impl Ordered for Nullifier { - fn counter(self) -> u32 { - self.counter - } -} - impl OrderedValue for Nullifier { fn value(self) -> Field { self.value @@ -60,11 +54,94 @@ impl Deserialize for Nullifier { } impl Readable for Nullifier { - fn assert_match_read_request(self, read_request: ReadRequestContext) { - let siloed_value = silo_nullifier(read_request.contract_address, read_request.value); - assert_eq(self.value, siloed_value, "Value of the nullifier does not match read request"); + fn assert_match_read_request(self, read_request: ScopedReadRequest) { + // Public kernels output Nullifier instead of ScopedNullifier. + // The nullifier value has been siloed. + let siloed_request_value = silo_nullifier(read_request.contract_address, read_request.value()); + assert_eq(self.value, siloed_request_value, "Value of the nullifier does not match read request"); + assert( + read_request.counter() > self.counter, "Read request counter must be greater than the counter of the nullifier" + ); + } +} + +impl Nullifier { + pub fn scope(self, contract_address: AztecAddress) -> ScopedNullifier { + ScopedNullifier { nullifier: self, contract_address } + } +} + +struct ScopedNullifier { + nullifier: Nullifier, + contract_address: AztecAddress, +} + +impl Ordered for ScopedNullifier { + fn counter(self) -> u32 { + self.nullifier.counter + } +} + +impl OrderedValue for ScopedNullifier { + fn value(self) -> Field { + self.nullifier.value + } + fn counter(self) -> u32 { + self.nullifier.counter + } +} + +impl Eq for ScopedNullifier { + fn eq(self, other: ScopedNullifier) -> bool { + (self.nullifier == other.nullifier) + & (self.contract_address == other.contract_address) + } +} + +impl Empty for ScopedNullifier { + fn empty() -> Self { + ScopedNullifier { + nullifier: Nullifier::empty(), + contract_address: AztecAddress::empty(), + } + } +} + +impl Serialize for ScopedNullifier { + fn serialize(self) -> [Field; SCOPED_NULLIFIER_LENGTH] { + array_concat(self.nullifier.serialize(), [self.contract_address.to_field()]) + } +} + +impl Deserialize for ScopedNullifier { + fn deserialize(values: [Field; SCOPED_NULLIFIER_LENGTH]) -> Self { + let mut reader = Reader::new(values); + let res = Self { + nullifier: reader.read_struct(Nullifier::deserialize), + contract_address: AztecAddress::from_field(values[3]), + }; + reader.finish(); + res + } +} + +impl Readable for ScopedNullifier { + fn assert_match_read_request(self, read_request: ScopedReadRequest) { + assert_eq(self.nullifier.value, read_request.value(), "Value of the nullifier does not match read request"); + assert_eq(self.contract_address, read_request.contract_address, "Contract address of the nullifier does not match read request"); assert( - read_request.counter > self.counter, "Read request counter must be greater than the counter of the nullifier" + read_request.counter() > self.nullifier.counter, "Read request counter must be greater than the counter of the nullifier" ); } } + +impl ScopedNullifier { + pub fn nullified_note_hash(self) -> Field { + self.nullifier.note_hash + } + + pub fn expose_to_public(self) -> Nullifier { + // Hide the actual counter and note hash when exposing it to the public kernel. + Nullifier { value: self.nullifier.value, counter: 0, note_hash: 0 } + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_key_validation_request.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_key_validation_request.nr index 1a08e9f7a2fb..19789c938a6f 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_key_validation_request.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_key_validation_request.nr @@ -1,14 +1,14 @@ use dep::std::cmp::Eq; use crate::{ address::AztecAddress, - constants::{NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH, NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH}, + constants::{SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH, NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH}, traits::{Empty, Serialize, Deserialize}, grumpkin_point::GrumpkinPoint, - grumpkin_private_key::GrumpkinPrivateKey + utils::{arrays::array_concat, reader::Reader} }; struct NullifierKeyValidationRequest { master_nullifier_public_key: GrumpkinPoint, - app_nullifier_secret_key: Field, // not a GrumpkinScalar because it's output of poseidon2 + app_nullifier_secret_key: Field, // not a grumpkin scalar because it's output of poseidon2 } impl Eq for NullifierKeyValidationRequest { @@ -47,57 +47,47 @@ impl Deserialize for NullifierKeyValida } impl NullifierKeyValidationRequest { - pub fn to_context(self, contract_address: AztecAddress) -> NullifierKeyValidationRequestContext { - NullifierKeyValidationRequestContext { - master_nullifier_public_key: self.master_nullifier_public_key, - app_nullifier_secret_key: self.app_nullifier_secret_key, - contract_address - } + pub fn scope(self, contract_address: AztecAddress) -> ScopedNullifierKeyValidationRequest { + ScopedNullifierKeyValidationRequest { request: self, contract_address } } } -struct NullifierKeyValidationRequestContext { - master_nullifier_public_key: GrumpkinPoint, - app_nullifier_secret_key: Field, +struct ScopedNullifierKeyValidationRequest { + request: NullifierKeyValidationRequest, contract_address: AztecAddress, } -impl Eq for NullifierKeyValidationRequestContext { - fn eq(self, request: NullifierKeyValidationRequestContext) -> bool { - (request.master_nullifier_public_key.eq(self.master_nullifier_public_key)) - & (request.app_nullifier_secret_key.eq(self.app_nullifier_secret_key)) - & (request.contract_address.eq(self.contract_address)) +impl Eq for ScopedNullifierKeyValidationRequest { + fn eq(self, other: ScopedNullifierKeyValidationRequest) -> bool { + (self.request.eq(other.request)) + & (self.contract_address.eq(other.contract_address)) } } -impl Empty for NullifierKeyValidationRequestContext { +impl Empty for ScopedNullifierKeyValidationRequest { fn empty() -> Self { - NullifierKeyValidationRequestContext { - master_nullifier_public_key: GrumpkinPoint::zero(), - app_nullifier_secret_key: 0, + ScopedNullifierKeyValidationRequest { + request: NullifierKeyValidationRequest::empty(), contract_address: AztecAddress::zero(), } } } -impl Serialize for NullifierKeyValidationRequestContext { - fn serialize(self) -> [Field; NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH] { - [ - self.master_nullifier_public_key.x, - self.master_nullifier_public_key.y, - self.app_nullifier_secret_key, - self.contract_address.to_field(), - ] +impl Serialize for ScopedNullifierKeyValidationRequest { + fn serialize(self) -> [Field; SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH] { + array_concat(self.request.serialize(), [self.contract_address.to_field()]) } } -impl Deserialize for NullifierKeyValidationRequestContext { - fn deserialize(fields: [Field; NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH]) -> Self { - Self { - master_nullifier_public_key: GrumpkinPoint::new(fields[0], fields[1]), - app_nullifier_secret_key: fields[2], +impl Deserialize for ScopedNullifierKeyValidationRequest { + fn deserialize(fields: [Field; SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH]) -> Self { + let mut reader = Reader::new(fields); + let res = Self { + request: reader.read_struct(NullifierKeyValidationRequest::deserialize), contract_address: AztecAddress::from_field(fields[3]), - } + }; + reader.finish(); + res } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_leaf_preimage.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_leaf_preimage.nr index 0dcba717633a..2eaf66edc90e 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_leaf_preimage.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_leaf_preimage.nr @@ -1,7 +1,7 @@ global NULLIFIER_LEAF_PREIMAGE_LENGTH: u64 = 3; use crate::{ - abis::{read_request::ReadRequestContext, side_effect::Readable}, hash::silo_nullifier, + abis::{read_request::ScopedReadRequest, side_effect::Readable}, hash::silo_nullifier, merkle_tree::leaf_preimage::{LeafPreimage, IndexedTreeLeafPreimage}, traits::{Empty, Hash} }; @@ -56,8 +56,8 @@ impl IndexedTreeLeafPreimage for NullifierLeafPreimage { } impl Readable for NullifierLeafPreimage { - fn assert_match_read_request(self, read_request: ReadRequestContext) { - let siloed_value = silo_nullifier(read_request.contract_address, read_request.value); + fn assert_match_read_request(self, read_request: ScopedReadRequest) { + let siloed_value = silo_nullifier(read_request.contract_address, read_request.value()); assert_eq(self.nullifier, siloed_value, "Value of the nullifier leaf does not match read request"); } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_call_stack_item.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_call_stack_item.nr index 652bceb0fe19..a24fd6e98818 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_call_stack_item.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_call_stack_item.nr @@ -85,6 +85,6 @@ fn empty_hash() { let hash = item.hash(); // Value from private_call_stack_item.test.ts "computes empty item hash" test - let test_data_empty_hash = 0x2485b8cfe671417410382ba6dfc803de70d9d45008a1b30c31b34d7c4de92106; + let test_data_empty_hash = 0x2a1bab3d40feb5234df51a7a6665998920119fd60f5c1e4d9ff3f1128a5f8f81; assert_eq(hash, test_data_empty_hash); } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr index e48226fa1f44..fe7429aef2af 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr @@ -215,6 +215,6 @@ fn empty_hash() { let inputs = PrivateCircuitPublicInputs::empty(); let hash = inputs.hash(); // Value from private_circuit_public_inputs.test.ts "computes empty item hash" test - let test_data_empty_hash = 0x249d46b5a3e35f6489e793cd604e375634d4bfdac762ec06b5f8f03016bb4257; + let test_data_empty_hash = 0x09cc3ed80b2171f093828087431d66777514912b4e7baddb418ab5f1ddbbfd5a; assert_eq(hash, test_data_empty_hash); } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel/private_call_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel/private_call_data.nr index e00c6c79a47c..760189375dd7 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel/private_call_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel/private_call_data.nr @@ -1,11 +1,9 @@ -use crate::abis::{ - call_request::CallRequest, private_call_stack_item::PrivateCallStackItem, - membership_witness::FunctionLeafMembershipWitness -}; +use crate::abis::{call_request::CallRequest, private_call_stack_item::PrivateCallStackItem}; use crate::address::{SaltedInitializationHash, PublicKeysHash, EthAddress}; use crate::contract_class_id::ContractClassId; use crate::mocked::{Proof, VerificationKey}; -use crate::constants::{MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL}; +use crate::constants::{MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, FUNCTION_TREE_HEIGHT}; +use crate::merkle_tree::membership::MembershipWitness; struct PrivateCallData { call_stack_item: PrivateCallStackItem, @@ -20,7 +18,7 @@ struct PrivateCallData { public_keys_hash: PublicKeysHash, contract_class_artifact_hash: Field, contract_class_public_bytecode_commitment: Field, - function_leaf_membership_witness: FunctionLeafMembershipWitness, + function_leaf_membership_witness: MembershipWitness, acir_hash: Field, } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_call_stack_item.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_call_stack_item.nr index bdd3aa1c570b..8c24533dd6b1 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_call_stack_item.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_call_stack_item.nr @@ -69,7 +69,7 @@ mod tests { let call_stack_item = PublicCallStackItem { contract_address, public_inputs, is_execution_request: true, function_data }; // Value from public_call_stack_item.test.ts "Computes a callstack item request hash" test - let test_data_call_stack_item_request_hash = 0x1595b195f0faa3a492109039dc807b291d0edd81a5e3a380866d5098ffd505dd; + let test_data_call_stack_item_request_hash = 0x1177a69fbc37f0ebdf290025414ff72504497840f174896bd427d0f30ec21c55; assert_eq(call_stack_item.hash(), test_data_call_stack_item_request_hash); } @@ -87,7 +87,7 @@ mod tests { let call_stack_item = PublicCallStackItem { contract_address, public_inputs, is_execution_request: false, function_data }; // Value from public_call_stack_item.test.ts "Computes a callstack item hash" test - let test_data_call_stack_item_hash = 0x1122a7d7e6174b7e5d111c8eb0233564d3a1ffd755afc7ce4b594d738e2770d7; + let test_data_call_stack_item_hash = 0x0f7624c0d5ea65fcec318c4d34cb3fcbf9c67435aebbf1548b3c90ef641424f8; assert_eq(call_stack_item.hash(), test_data_call_stack_item_hash); } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_circuit_public_inputs.nr index 41fadb37de33..aafdd024ec80 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_circuit_public_inputs.nr @@ -195,6 +195,6 @@ fn empty_hash() { let hash = inputs.hash(); // Value from public_circuit_public_inputs.test.ts "computes empty item hash" test - let test_data_empty_hash = 0x1a2da219bb2e3ac24519fd844365c4f656fc3ba8c58f2960706d25bceb4d1769; + let test_data_empty_hash = 0x132559f41b7adc7388e0cd52b91fd6837c296b2f9ec1b6d2ed046f7a56db18f8; assert_eq(hash, test_data_empty_hash); } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/read_request.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/read_request.nr index 7bb0e0ffa428..9cac3dc5c889 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/read_request.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/read_request.nr @@ -1,25 +1,16 @@ use crate::{ - abis::side_effect::OrderedValue, traits::{Empty, Serialize, Deserialize}, address::AztecAddress, - constants::READ_REQUEST_LENGTH + traits::{Empty, Serialize, Deserialize}, address::AztecAddress, constants::READ_REQUEST_LENGTH, + utils::{arrays::array_concat, reader::Reader} }; use dep::std::cmp::Eq; -global READ_REQUEST_CONTEXT_SERIALIZED_LEN = 3; +global SCOPED_READ_REQUEST_SERIALIZED_LEN = READ_REQUEST_LENGTH + 1; struct ReadRequest { value: Field, counter: u32, } -impl OrderedValue for ReadRequest { - fn value(self) -> Field { - self.value - } - fn counter(self) -> u32 { - self.counter - } -} - impl Eq for ReadRequest { fn eq(self, read_request: ReadRequest) -> bool { (self.value == read_request.value) @@ -52,57 +43,55 @@ impl Deserialize for ReadRequest { } impl ReadRequest { - pub fn to_context(self, contract_address: AztecAddress) -> ReadRequestContext { - ReadRequestContext { value: self.value, counter: self.counter, contract_address } + pub fn scope(self, contract_address: AztecAddress) -> ScopedReadRequest { + ScopedReadRequest { read_request: self, contract_address } } } -struct ReadRequestContext { - value: Field, - counter: u32, +struct ScopedReadRequest { + read_request: ReadRequest, contract_address: AztecAddress, } -impl OrderedValue for ReadRequestContext { - fn value(self) -> Field { - self.value - } - fn counter(self) -> u32 { - self.counter +impl Eq for ScopedReadRequest { + fn eq(self, other: ScopedReadRequest) -> bool { + (self.read_request == other.read_request) + & (self.contract_address.eq(other.contract_address)) } } -impl Eq for ReadRequestContext { - fn eq(self, read_request: ReadRequestContext) -> bool { - (self.value == read_request.value) - & (self.counter == read_request.counter) - & (self.contract_address.eq(read_request.contract_address)) - } -} - -impl Empty for ReadRequestContext { +impl Empty for ScopedReadRequest { fn empty() -> Self { - ReadRequestContext { - value: 0, - counter: 0, + ScopedReadRequest { + read_request: ReadRequest::empty(), contract_address: AztecAddress::empty(), } } } -impl Serialize for ReadRequestContext { - fn serialize(self) -> [Field; READ_REQUEST_CONTEXT_SERIALIZED_LEN] { - [self.value, self.counter as Field, self.contract_address.to_field()] +impl Serialize for ScopedReadRequest { + fn serialize(self) -> [Field; SCOPED_READ_REQUEST_SERIALIZED_LEN] { + array_concat(self.read_request.serialize(), [self.contract_address.to_field()]) } } -impl Deserialize for ReadRequestContext { - fn deserialize(values: [Field; READ_REQUEST_CONTEXT_SERIALIZED_LEN]) -> Self { - Self { - value: values[0], - counter: values[1] as u32, +impl Deserialize for ScopedReadRequest { + fn deserialize(values: [Field; SCOPED_READ_REQUEST_SERIALIZED_LEN]) -> Self { + let mut reader = Reader::new(values); + let res = Self { + read_request: reader.read_struct(ReadRequest::deserialize), contract_address: AztecAddress::from_field(values[2]), - } + }; + reader.finish(); + res } } +impl ScopedReadRequest { + pub fn value(self) -> Field { + self.read_request.value + } + pub fn counter(self) -> u32 { + self.read_request.counter + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/side_effect.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/side_effect.nr index 2f1de297ac49..78b54a59a0bd 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/side_effect.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/side_effect.nr @@ -1,5 +1,5 @@ use crate::{ - abis::read_request::ReadRequestContext, address::AztecAddress, + abis::read_request::ScopedReadRequest, address::AztecAddress, constants::{GENERATOR_INDEX__SIDE_EFFECT, SIDE_EFFECT_LENGTH}, traits::{Empty, Hash, Serialize, Deserialize} }; @@ -15,7 +15,7 @@ trait OrderedValue where T: Eq { } trait Readable { - fn assert_match_read_request(self, read_request: ReadRequestContext); + fn assert_match_read_request(self, read_request: ScopedReadRequest); } struct SideEffect { diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests.nr index c49acd2e9121..850afddbade2 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests.nr @@ -1,8 +1,8 @@ use crate::{ abis::{ max_block_number::MaxBlockNumber, - nullifier_key_validation_request::NullifierKeyValidationRequestContext, - public_data_read::PublicDataRead, read_request::ReadRequestContext, + nullifier_key_validation_request::ScopedNullifierKeyValidationRequest, + public_data_read::PublicDataRead, read_request::ScopedReadRequest, validation_requests::rollup_validation_requests::RollupValidationRequests }, constants::{ @@ -15,9 +15,9 @@ use crate::{ // TODO - Use specific structs for private and public: PrivateValidationRequests vs PublicValidationRequests struct ValidationRequests { for_rollup: RollupValidationRequests, - note_hash_read_requests: [ReadRequestContext; MAX_NOTE_HASH_READ_REQUESTS_PER_TX], - nullifier_read_requests: [ReadRequestContext; MAX_NULLIFIER_READ_REQUESTS_PER_TX], - nullifier_non_existent_read_requests: [ReadRequestContext; MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX], - nullifier_key_validation_requests: [NullifierKeyValidationRequestContext; MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX], + note_hash_read_requests: [ScopedReadRequest; MAX_NOTE_HASH_READ_REQUESTS_PER_TX], + nullifier_read_requests: [ScopedReadRequest; MAX_NULLIFIER_READ_REQUESTS_PER_TX], + nullifier_non_existent_read_requests: [ScopedReadRequest; MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX], + nullifier_key_validation_requests: [ScopedNullifierKeyValidationRequest; MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX], public_data_reads: [PublicDataRead; MAX_PUBLIC_DATA_READS_PER_TX], } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests_builder.nr index 7aa661a9defd..6fe9d71310a9 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests_builder.nr @@ -1,8 +1,8 @@ use crate::{ abis::{ max_block_number::MaxBlockNumber, - nullifier_key_validation_request::NullifierKeyValidationRequestContext, - public_data_read::PublicDataRead, read_request::ReadRequestContext, + nullifier_key_validation_request::ScopedNullifierKeyValidationRequest, + public_data_read::PublicDataRead, read_request::ScopedReadRequest, validation_requests::validation_requests::ValidationRequests, validation_requests::rollup_validation_requests::RollupValidationRequests }, @@ -16,10 +16,10 @@ use crate::{ struct ValidationRequestsBuilder { max_block_number: MaxBlockNumber, - note_hash_read_requests: BoundedVec, - nullifier_read_requests: BoundedVec, - nullifier_non_existent_read_requests: BoundedVec, - nullifier_key_validation_requests: BoundedVec, + note_hash_read_requests: BoundedVec, + nullifier_read_requests: BoundedVec, + nullifier_non_existent_read_requests: BoundedVec, + nullifier_key_validation_requests: BoundedVec, public_data_reads: BoundedVec, } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/address/aztec_address.nr b/noir-projects/noir-protocol-circuits/crates/types/src/address/aztec_address.nr index 06463ce268c7..6413bedf15ee 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/address/aztec_address.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/address/aztec_address.nr @@ -55,9 +55,7 @@ impl AztecAddress { pub fn compute(pub_keys_hash: PublicKeysHash, partial_address: PartialAddress) -> AztecAddress { AztecAddress::from_field( - poseidon2_hash( - [pub_keys_hash.to_field(), partial_address.to_field(), GENERATOR_INDEX__CONTRACT_ADDRESS_V1] - ) + poseidon2_hash([pub_keys_hash.to_field(), partial_address.to_field(), GENERATOR_INDEX__CONTRACT_ADDRESS_V1]) ) } @@ -66,13 +64,13 @@ impl AztecAddress { incoming_public_key: GrumpkinPoint, outgoing_public_key: GrumpkinPoint, tagging_public_key: GrumpkinPoint, - partial_address: PartialAddress, + partial_address: PartialAddress ) -> AztecAddress { let public_keys_hash = PublicKeysHash::compute_new( nullifier_public_key, incoming_public_key, outgoing_public_key, - tagging_public_key, + tagging_public_key ); let computed_address = AztecAddress::compute(public_keys_hash, partial_address); diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/address/public_keys_hash.nr b/noir-projects/noir-protocol-circuits/crates/types/src/address/public_keys_hash.nr index bff82cc1644c..f91d1383a19f 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/address/public_keys_hash.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/address/public_keys_hash.nr @@ -1,7 +1,7 @@ use crate::{ - constants::{GENERATOR_INDEX__PARTIAL_ADDRESS, GENERATOR_INDEX__PUBLIC_KEYS_HASH}, hash::pedersen_hash, grumpkin_point::GrumpkinPoint, - traits::{ToField, Serialize, Deserialize}, - hash::poseidon2_hash, + constants::{GENERATOR_INDEX__PARTIAL_ADDRESS, GENERATOR_INDEX__PUBLIC_KEYS_HASH}, + hash::pedersen_hash, grumpkin_point::GrumpkinPoint, traits::{ToField, Serialize, Deserialize}, + hash::poseidon2_hash }; // Public keys hash. Used in the computation of an address. @@ -59,17 +59,19 @@ impl PublicKeysHash { tagging_public_key: GrumpkinPoint ) -> Self { PublicKeysHash::from_field( - poseidon2_hash([ - nullifier_public_key.x, - nullifier_public_key.y, - incoming_public_key.x, - incoming_public_key.y, - outgoing_public_key.x, - outgoing_public_key.y, - tagging_public_key.x, - tagging_public_key.y, - GENERATOR_INDEX__PUBLIC_KEYS_HASH, - ]) + poseidon2_hash( + [ + nullifier_public_key.x, + nullifier_public_key.y, + incoming_public_key.x, + incoming_public_key.y, + outgoing_public_key.x, + outgoing_public_key.y, + tagging_public_key.x, + tagging_public_key.y, + GENERATOR_INDEX__PUBLIC_KEYS_HASH + ] + ) ) } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index ad82df9c822f..5e1631cc24d5 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -126,7 +126,7 @@ global REGISTERER_UNCONSTRAINED_FUNCTION_BROADCASTED_MAGIC_VALUE = 0xe7af8166354 // CONTRACT INSTANCE CONSTANTS // sha224sum 'struct ContractInstanceDeployed' global DEPLOYER_CONTRACT_INSTANCE_DEPLOYED_MAGIC_VALUE = 0x85864497636cf755ae7bde03f267ce01a520981c21c3682aaf82a631; -global DEPLOYER_CONTRACT_ADDRESS = 0x0097949bb96834550868230a1b6cc242d1f662f7c52946245e4e73da1b8b2165; +global DEPLOYER_CONTRACT_ADDRESS = 0x2e9c386f07e22a1d24e677ab70407b2dd0adbc7cafb9c822bf249685d6a2e4cc; // GAS DEFAULTS global DEFAULT_GAS_LIMIT: u32 = 1_000_000_000; @@ -155,15 +155,17 @@ global FUNCTION_LEAF_PREIMAGE_LENGTH: u64 = 5; global GLOBAL_VARIABLES_LENGTH: u64 = 6 + GAS_FEES_LENGTH; global APPEND_ONLY_TREE_SNAPSHOT_LENGTH = 2; global L1_TO_L2_MESSAGE_LENGTH: u64 = 6; -global L2_TO_L1_MESSAGE_LENGTH: u64 = 2; +global L2_TO_L1_MESSAGE_LENGTH: u64 = 3; +global SCOPED_L2_TO_L1_MESSAGE_LENGTH = L2_TO_L1_MESSAGE_LENGTH + 1; global MAX_BLOCK_NUMBER_LENGTH: u64 = 2; // 1 for the option flag, 1 for the value global NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = 3; -global NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH = 4; +global SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH + 1; global PARTIAL_STATE_REFERENCE_LENGTH: u64 = 6; global READ_REQUEST_LENGTH = 2; global NOTE_HASH_LENGTH = 2; -global NOTE_HASH_CONTEXT_LENGTH = 3; +global SCOPED_NOTE_HASH_LENGTH = NOTE_HASH_LENGTH + 2; global NULLIFIER_LENGTH = 3; +global SCOPED_NULLIFIER_LENGTH = NULLIFIER_LENGTH + 1; global SIDE_EFFECT_LENGTH = 2; global STATE_REFERENCE_LENGTH: u64 = APPEND_ONLY_TREE_SNAPSHOT_LENGTH + PARTIAL_STATE_REFERENCE_LENGTH; global TX_CONTEXT_LENGTH: u64 = 2 + GAS_SETTINGS_LENGTH; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/debug_log.nr b/noir-projects/noir-protocol-circuits/crates/types/src/debug_log.nr index a701336d9267..d9892f1617ef 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/debug_log.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/debug_log.nr @@ -2,21 +2,21 @@ // WARNING: sometimes when using debug logs the ACVM errors with: `thrown: "solver opcode resolution error: cannot solve opcode: expression has too many unknowns x155"` #[oracle(debugLog)] -fn debug_log_oracle(_msg: T, _num_args: Field) -> Field {} +fn debug_log_oracle(_msg: T, _num_args: Field) {} #[oracle(debugLog)] -fn debug_log_format_oracle(_msg: T, _args: [Field; N], _num_args: Field) -> Field {} +fn debug_log_format_oracle(_msg: T, _args: [Field; N], _num_args: Field) {} #[oracle(debugLog)] -fn debug_log_field_oracle(_field: Field) -> Field {} +fn debug_log_field_oracle(_field: Field) {} #[oracle(debugLog)] -fn debug_log_array_oracle(_arbitrary_array: [T; N]) -> Field {} +fn debug_log_array_oracle(_arbitrary_array: [T; N]) {} #[oracle(debugLogWithPrefix)] -fn debug_log_array_with_prefix_oracle(_prefix: S, _arbitrary_array: [T; N]) -> Field {} +fn debug_log_array_with_prefix_oracle(_prefix: S, _arbitrary_array: [T; N]) {} /// NOTE: call this with a str msg of length > 1 /// Example: /// `debug_log("blah blah this is a debug string");` unconstrained pub fn debug_log(msg: T) { - assert(debug_log_oracle(msg, 0) == 0); + debug_log_oracle(msg, 0); } /// NOTE: call this with a str msg of form @@ -26,23 +26,23 @@ unconstrained pub fn debug_log(msg: T) { /// Example: /// debug_log_format("get_2(slot:{0}) =>\n\t0:{1}\n\t1:{2}", [storage_slot, note0_hash, note1_hash]); unconstrained pub fn debug_log_format(msg: T, args: [Field; N]) { - assert(debug_log_format_oracle(msg, args, args.len() as Field) == 0); + debug_log_format_oracle(msg, args, args.len() as Field); } /// Example: /// `debug_log_field(my_field);` unconstrained pub fn debug_log_field(field: Field) { - assert(debug_log_field_oracle(field) == 0); + debug_log_field_oracle(field); } /// Example: /// `debug_log_array(my_array);` unconstrained fn debug_log_array(arbitrary_array: [T; N]) { - assert(debug_log_array_oracle(arbitrary_array) == 0); + debug_log_array_oracle(arbitrary_array); } /// Example: /// `debug_log_array_with_prefix("Prefix", my_array);` unconstrained pub fn debug_log_array_with_prefix(prefix: S, arbitrary_array: [T; N]) { - assert(debug_log_array_with_prefix_oracle(prefix, arbitrary_array) == 0); + debug_log_array_with_prefix_oracle(prefix, arbitrary_array); } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/grumpkin_private_key.nr b/noir-projects/noir-protocol-circuits/crates/types/src/grumpkin_private_key.nr index 23b6dddd143f..f9d8526ddb89 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/grumpkin_private_key.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/grumpkin_private_key.nr @@ -1,8 +1,5 @@ -use dep::std::{cmp::Eq, grumpkin_scalar::GrumpkinScalar, grumpkin_scalar_mul::grumpkin_fixed_base}; -use crate::{ - grumpkin_point::GrumpkinPoint, - traits::Empty -}; +use dep::std::{cmp::Eq, embedded_curve_ops::fixed_base_scalar_mul}; +use crate::{grumpkin_point::GrumpkinPoint, traits::Empty}; global GRUMPKIN_PRIVATE_KEY_SERIALIZED_LEN: Field = 2; @@ -41,7 +38,7 @@ impl GrumpkinPrivateKey { } pub fn derive_public_key(self) -> GrumpkinPoint { - let public_key = grumpkin_fixed_base(GrumpkinScalar { high: self.high, low: self.low }); + let public_key = fixed_base_scalar_mul(self.low, self.high); GrumpkinPoint { x: public_key[0], y: public_key[1] } } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr b/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr index 31740a66be75..efb7f6b38c33 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr @@ -202,7 +202,7 @@ fn compute_l2_l1_hash() { assert(hash_result == 0xb393978842a0fa3d3e1470196f098f473f9678e72463cb65ec4ab5581856c2); // Non-zero case - let message = L2ToL1Message { recipient: EthAddress::from_field(3), content: 5 }; + let message = L2ToL1Message { recipient: EthAddress::from_field(3), content: 5, counter: 1234 }; let hash_result = compute_l2_to_l1_hash(AztecAddress::from_field(1), 2, 4, message); assert(hash_result == 0x3f88c1044a05e5340ed20466276500f6d45ca5603913b9091e957161734e16); } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/messaging/l2_to_l1_message.nr b/noir-projects/noir-protocol-circuits/crates/types/src/messaging/l2_to_l1_message.nr index 928239bf935a..8f21f8e2c77f 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/messaging/l2_to_l1_message.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/messaging/l2_to_l1_message.nr @@ -1,9 +1,14 @@ -use crate::{address::EthAddress, constants::L2_TO_L1_MESSAGE_LENGTH, traits::{Deserialize, Empty, Serialize}}; +use crate::{ + address::{AztecAddress, EthAddress}, + constants::{L2_TO_L1_MESSAGE_LENGTH, SCOPED_L2_TO_L1_MESSAGE_LENGTH}, abis::side_effect::Ordered, + traits::{Deserialize, Empty, Serialize}, utils::{arrays::array_concat, reader::Reader} +}; // Note: Not to be confused with L2ToL1Msg in Solidity struct L2ToL1Message { recipient: EthAddress, content: Field, + counter: u32, } impl Empty for L2ToL1Message { @@ -11,19 +16,20 @@ impl Empty for L2ToL1Message { Self { recipient: EthAddress::empty(), content: 0, + counter: 0, } } } impl Eq for L2ToL1Message { fn eq(self, other: Self) -> bool { - (self.recipient == other.recipient) & (self.content == other.content) + (self.recipient == other.recipient) & (self.content == other.content) & (self.counter == other.counter) } } impl Serialize for L2ToL1Message { fn serialize(self) -> [Field; L2_TO_L1_MESSAGE_LENGTH] { - [self.recipient.to_field(), self.content] + [self.recipient.to_field(), self.content, self.counter as Field] } } @@ -32,6 +38,58 @@ impl Deserialize for L2ToL1Message { Self { recipient: EthAddress::from_field(values[0]), content: values[1], + counter: values[2] as u32, } } } + +impl L2ToL1Message { + pub fn scope(self, contract_address: AztecAddress) -> ScopedL2ToL1Message { + ScopedL2ToL1Message { message: self, contract_address } + } +} + +struct ScopedL2ToL1Message { + message: L2ToL1Message, + contract_address: AztecAddress, +} + +impl Ordered for ScopedL2ToL1Message { + fn counter(self) -> u32 { + self.message.counter + } +} + +impl Eq for ScopedL2ToL1Message { + fn eq(self, other: ScopedL2ToL1Message) -> bool { + (self.message == other.message) + & (self.contract_address == other.contract_address) + } +} + +impl Empty for ScopedL2ToL1Message { + fn empty() -> Self { + ScopedL2ToL1Message { + message: L2ToL1Message::empty(), + contract_address: AztecAddress::empty(), + } + } +} + +impl Serialize for ScopedL2ToL1Message { + fn serialize(self) -> [Field; SCOPED_L2_TO_L1_MESSAGE_LENGTH] { + array_concat(self.message.serialize(), [self.contract_address.to_field()]) + } +} + +impl Deserialize for ScopedL2ToL1Message { + fn deserialize(values: [Field; SCOPED_L2_TO_L1_MESSAGE_LENGTH]) -> Self { + let mut reader = Reader::new(values); + let res = Self { + message: reader.read_struct(L2ToL1Message::deserialize), + contract_address: reader.read_struct(AztecAddress::deserialize), + }; + reader.finish(); + res + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/recursion.nr b/noir-projects/noir-protocol-circuits/crates/types/src/recursion.nr index f2e54c0a05b6..80f02c240e9d 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/recursion.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/recursion.nr @@ -1,2 +1,2 @@ mod verification_key; -mod proof; \ No newline at end of file +mod proof; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/recursion/verification_key.nr b/noir-projects/noir-protocol-circuits/crates/types/src/recursion/verification_key.nr index 3412fc4294b0..c4cc9b2230fb 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/recursion/verification_key.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/recursion/verification_key.nr @@ -1,8 +1,4 @@ - -use crate::{ - traits::{Serialize, Deserialize, Empty}, - constants::{ VERIFICATION_KEY_LENGTH_IN_FIELDS }, -}; +use crate::{traits::{Serialize, Deserialize, Empty}, constants::VERIFICATION_KEY_LENGTH_IN_FIELDS}; global SERIALIZED_VERIFICATION_KEY_LENGTH = VERIFICATION_KEY_LENGTH_IN_FIELDS + 1; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr index 189e3aeecfef..077007c2b5ae 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr @@ -6,13 +6,13 @@ use crate::{ global_variables::GlobalVariables, combined_constant_data::CombinedConstantData, kernel_circuit_public_inputs::{KernelCircuitPublicInputs, PrivateKernelCircuitPublicInputs, PublicKernelCircuitPublicInputs}, kernel_data::{PrivateKernelData, PublicKernelData, KernelData}, max_block_number::MaxBlockNumber, - note_hash::NoteHashContext, nullifier::Nullifier, - nullifier_key_validation_request::NullifierKeyValidationRequestContext, + note_hash::{NoteHash, ScopedNoteHash}, nullifier::{Nullifier, ScopedNullifier}, + nullifier_key_validation_request::ScopedNullifierKeyValidationRequest, public_data_read::PublicDataRead, public_data_update_request::PublicDataUpdateRequest, - read_request::ReadRequestContext, side_effect::SideEffect, + read_request::{ReadRequest, ScopedReadRequest}, side_effect::SideEffect, validation_requests::{ValidationRequests, ValidationRequestsBuilder} }, - address::AztecAddress, + address::{AztecAddress, EthAddress}, constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NEW_L2_TO_L1_MSGS_PER_TX, MAX_PUBLIC_DATA_READS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, @@ -21,7 +21,8 @@ use crate::{ MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, VK_TREE_HEIGHT, MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX }, - hash::{silo_note_hash, silo_nullifier}, header::Header, + hash::silo_nullifier, header::Header, + messaging::l2_to_l1_message::{L2ToL1Message, ScopedL2ToL1Message}, mocked::{AggregationObject, Proof, VerificationKey}, partial_state_reference::PartialStateReference, tests::fixtures, transaction::tx_context::TxContext, traits::Empty }; @@ -37,9 +38,9 @@ struct FixtureBuilder { public_teardown_call_request: CallRequest, // Accumulated data. - new_note_hashes: BoundedVec, - new_nullifiers: BoundedVec, - new_l2_to_l1_msgs: BoundedVec, + new_note_hashes: BoundedVec, + new_nullifiers: BoundedVec, + new_l2_to_l1_msgs: BoundedVec, encrypted_logs_hashes: BoundedVec, unencrypted_logs_hashes: BoundedVec, encrypted_logs_hash: Field, @@ -54,10 +55,10 @@ struct FixtureBuilder { // Validation requests. max_block_number: MaxBlockNumber, - note_hash_read_requests: BoundedVec, - nullifier_read_requests: BoundedVec, - nullifier_non_existent_read_requests: BoundedVec, - nullifier_key_validation_requests: BoundedVec, + note_hash_read_requests: BoundedVec, + nullifier_read_requests: BoundedVec, + nullifier_non_existent_read_requests: BoundedVec, + nullifier_key_validation_requests: BoundedVec, public_data_reads: BoundedVec, // Proof. @@ -142,9 +143,9 @@ impl FixtureBuilder { pub fn to_public_accumulated_data(self) -> PublicAccumulatedData { PublicAccumulatedData { - new_note_hashes: self.new_note_hashes.storage.map(|n: NoteHashContext| n.to_note_hash()), - new_nullifiers: self.new_nullifiers.storage, - new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage, + new_note_hashes: self.new_note_hashes.storage.map(|n: ScopedNoteHash| n.note_hash), + new_nullifiers: self.new_nullifiers.storage.map(|n: ScopedNullifier| n.nullifier), + new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage.map(|m: ScopedL2ToL1Message| m.message.content), encrypted_logs_hashes: self.encrypted_logs_hashes.storage, unencrypted_logs_hashes: self.unencrypted_logs_hashes.storage, encrypted_log_preimages_length: self.encrypted_log_preimages_length, @@ -157,9 +158,9 @@ impl FixtureBuilder { pub fn to_combined_accumulated_data(self) -> CombinedAccumulatedData { CombinedAccumulatedData { - new_note_hashes: self.new_note_hashes.storage.map(|n: NoteHashContext| n.value), - new_nullifiers: self.new_nullifiers.storage.map(|n: Nullifier| n.value), - new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage, + new_note_hashes: self.new_note_hashes.storage.map(|n: ScopedNoteHash| n.note_hash.value), + new_nullifiers: self.new_nullifiers.storage.map(|n: ScopedNullifier| n.nullifier.value), + new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage.map(|m: ScopedL2ToL1Message| m.message.content), encrypted_logs_hash: self.encrypted_logs_hash, unencrypted_logs_hash: self.unencrypted_logs_hash, encrypted_log_preimages_length: self.encrypted_log_preimages_length, @@ -253,7 +254,7 @@ impl FixtureBuilder { } pub fn add_new_note_hash(&mut self, value: Field) { - self.new_note_hashes.push(NoteHashContext { value, counter: self.next_counter(), nullifier_counter: 0 }); + self.new_note_hashes.push(NoteHash { value, counter: self.next_counter() }.scope(0, self.storage_contract_address)); } pub fn append_new_note_hashes(&mut self, num_new_note_hashes: u64) { @@ -261,15 +262,20 @@ impl FixtureBuilder { for i in 0..MAX_NEW_NOTE_HASHES_PER_TX { if i < num_new_note_hashes { let mocked_value = self.get_mocked_note_hash_value(index_offset + i); - let value = silo_note_hash(self.storage_contract_address, mocked_value); - self.add_new_note_hash(value); + self.add_new_note_hash(mocked_value); } } } - pub fn add_nullifier(&mut self, unsiloed_nullifier: Field) { - let value = silo_nullifier(self.storage_contract_address, unsiloed_nullifier); - self.new_nullifiers.push(Nullifier { value, note_hash: 0, counter: self.next_counter() }); + pub fn add_nullifier(&mut self, value: Field) { + self.new_nullifiers.push( + Nullifier { value, counter: self.next_counter(), note_hash: 0 }.scope(self.storage_contract_address) + ); + } + + pub fn add_siloed_nullifier(&mut self, value: Field) { + let siloed_value = silo_nullifier(self.storage_contract_address, value); + self.add_nullifier(siloed_value); } pub fn append_new_nullifiers(&mut self, num_extra_nullifier: u64) { @@ -282,6 +288,22 @@ impl FixtureBuilder { } } + pub fn append_siloed_nullifiers(&mut self, num_extra_nullifier: u64) { + let index_offset = self.new_nullifiers.len(); + for i in 0..MAX_NEW_NULLIFIERS_PER_TX { + if i < num_extra_nullifier { + let mocked_value = self.get_mocked_nullifier_value(index_offset + i); + self.add_siloed_nullifier(mocked_value); + } + } + } + + pub fn add_l2_to_l1_message(&mut self, content: Field, recipient: EthAddress) { + self.new_l2_to_l1_msgs.push( + L2ToL1Message { recipient, content, counter: self.next_counter() }.scope(self.storage_contract_address) + ); + } + pub fn add_public_data_update_request(&mut self, leaf_slot: Field, value: Field) { let update_request = PublicDataUpdateRequest { leaf_slot, new_value: value }; self.public_data_update_requests.push(update_request); @@ -319,7 +341,7 @@ impl FixtureBuilder { pub fn add_read_request_for_pending_note_hash(&mut self, note_hash_index: u64) -> u64 { let read_request_index = self.note_hash_read_requests.len(); let value = self.get_mocked_note_hash_value(note_hash_index); - let read_request = ReadRequestContext { value, counter: self.next_counter(), contract_address: self.storage_contract_address }; + let read_request = ReadRequest { value, counter: self.next_counter() }.scope(self.storage_contract_address); self.note_hash_read_requests.push(read_request); read_request_index } @@ -328,11 +350,7 @@ impl FixtureBuilder { let value_offset = self.note_hash_read_requests.len(); for i in 0..MAX_NOTE_HASH_READ_REQUESTS_PER_TX { if i < num_reads { - let read_request = ReadRequestContext { - value: (value_offset + i + 789) as Field, - counter: self.next_counter(), - contract_address: self.storage_contract_address - }; + let read_request = ReadRequest { value: (value_offset + i + 789) as Field, counter: self.next_counter() }.scope(self.storage_contract_address); self.note_hash_read_requests.push(read_request); } } @@ -340,22 +358,14 @@ impl FixtureBuilder { pub fn add_read_request_for_pending_nullifier(&mut self, nullifier_index: u64) -> u64 { let read_request_index = self.nullifier_read_requests.len(); - let unsiloed_nullifier = self.get_mocked_nullifier_value(nullifier_index); - let read_request = ReadRequestContext { - value: unsiloed_nullifier, - counter: self.next_counter(), - contract_address: self.storage_contract_address - }; + let nullifier = self.get_mocked_nullifier_value(nullifier_index); + let read_request = ReadRequest { value: nullifier, counter: self.next_counter() }.scope(self.storage_contract_address); self.nullifier_read_requests.push(read_request); read_request_index } - pub fn add_non_existent_read_request_for_nullifier(&mut self, unsiloed_nullifier: Field) { - let read_request = ReadRequestContext { - value: unsiloed_nullifier, - counter: self.next_counter(), - contract_address: self.storage_contract_address - }; + pub fn add_non_existent_read_request_for_nullifier(&mut self, nullifier: Field) { + let read_request = ReadRequest { value: nullifier, counter: self.next_counter() }.scope(self.storage_contract_address); self.nullifier_non_existent_read_requests.push(read_request); } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/contract_functions.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/contract_functions.nr index a5f7642811a0..836e673b5e14 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/contract_functions.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/contract_functions.nr @@ -1,13 +1,12 @@ -use crate::abis::{ - function_data::FunctionData, function_selector::FunctionSelector, - membership_witness::FunctionLeafMembershipWitness -}; +use crate::abis::{function_data::FunctionData, function_selector::FunctionSelector}; +use crate::merkle_tree::membership::MembershipWitness; +use crate::constants::FUNCTION_TREE_HEIGHT; struct ContractFunction { data: FunctionData, vk_hash: Field, acir_hash: Field, - membership_witness: FunctionLeafMembershipWitness, + membership_witness: MembershipWitness, } // sibling_path taken from __snapshots__/noir_test_gen.test.ts.snap @@ -18,7 +17,7 @@ global default_private_function = ContractFunction { }, vk_hash: 0, acir_hash: 1111, - membership_witness: FunctionLeafMembershipWitness { + membership_witness: MembershipWitness { leaf_index: 0, sibling_path: [ 0x1e5cebe7a50c5c8fd1ebe19ed6bbf80f77819b12a2a28f334e895501e1cda574, @@ -37,7 +36,7 @@ global default_public_function = ContractFunction { }, vk_hash: 0, acir_hash: 3333, - membership_witness: FunctionLeafMembershipWitness { + membership_witness: MembershipWitness { leaf_index: 2, sibling_path: [ 0x2d72ef5ebb7c974e1f5a8bed092f1cf1bf0a0cb1eda28516221ca7e5811ecf15, diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr index c1b266fd6a05..a4c6a52930ef 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr @@ -2,16 +2,16 @@ use crate::{ abis::{ gas_settings::GasSettings, call_request::{CallerContext, CallRequest}, private_call_stack_item::PrivateCallStackItem, function_data::FunctionData, - max_block_number::MaxBlockNumber, membership_witness::FunctionLeafMembershipWitness, - private_circuit_public_inputs::PrivateCircuitPublicInputs, + max_block_number::MaxBlockNumber, private_circuit_public_inputs::PrivateCircuitPublicInputs, private_kernel::private_call_data::PrivateCallData, side_effect::SideEffect }, + merkle_tree::membership::MembershipWitness, address::{AztecAddress, EthAddress, SaltedInitializationHash, PublicKeysHash}, mocked::{Proof, VerificationKey}, tests::{fixtures, private_circuit_public_inputs_builder::PrivateCircuitPublicInputsBuilder}, transaction::{tx_request::TxRequest, tx_context::TxContext} }; -use crate::constants::{MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL}; +use crate::constants::{MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, FUNCTION_TREE_HEIGHT}; struct PrivateCallDataBuilder { // Values of PrivateCallStackItem. @@ -28,7 +28,7 @@ struct PrivateCallDataBuilder { public_keys_hash: PublicKeysHash, contract_class_artifact_hash: Field, contract_class_public_bytecode_commitment: Field, - function_leaf_membership_witness: FunctionLeafMembershipWitness, + function_leaf_membership_witness: MembershipWitness, acir_hash: Field, gas_settings: GasSettings, } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/utils/arrays.nr b/noir-projects/noir-protocol-circuits/crates/types/src/utils/arrays.nr index 4de0a3385251..52c277355c80 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/utils/arrays.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/utils/arrays.nr @@ -75,6 +75,18 @@ pub fn array_cp(array: [T; N]) -> [T; S] where T: Empty { result } +pub fn array_concat(array1: [T; N], array2: [T; M]) -> [T; S] { + assert_eq(N + M, S, "combined array length does not match return array length"); + let mut result = [array1[0]; S]; + for i in 1..N { + result[i] = array1[i]; + } + for i in 0..M { + result[i + N] = array2[i]; + } + result +} + pub fn array_merge(array1: [T; N], array2: [T; N]) -> [T; N] where T: Empty + Eq { let mut result: [T; N] = [T::empty(); N]; let mut i = 0; @@ -195,6 +207,21 @@ fn find_index_not_found() { assert_eq(index, 4); } +#[test] +fn test_array_concat() { + let array0 = [1, 2, 3]; + let array1 = [4, 5]; + let concated = array_concat(array0, array1); + assert_eq(concated, [1, 2, 3, 4, 5]); +} + +#[test(should_fail_with="combined array length does not match return array length")] +fn array_concat_fails_inconsistent_lengths() { + let array0 = [1, 2, 3]; + let array1 = [4, 5]; + let _concated: [Field; 4] = array_concat(array0, array1); +} + #[test] fn check_permutation_basic_test() { let original_array = [1, 2, 3]; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/utils/reader.nr b/noir-projects/noir-protocol-circuits/crates/types/src/utils/reader.nr index dffc118e7d8e..4f1b5ab95a18 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/utils/reader.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/utils/reader.nr @@ -14,6 +14,10 @@ impl Reader { result } + pub fn read_u32(&mut self) -> u32 { + self.read() as u32 + } + pub fn read_array(&mut self, mut result: [Field; K]) -> [Field; K] { for i in 0..K { result[i] = self.data[self.offset + i]; diff --git a/noir/.gitignore b/noir/.gitignore index 781ea857ba6b..b211695f37c5 100644 --- a/noir/.gitignore +++ b/noir/.gitignore @@ -1,2 +1,3 @@ **/package.tgz -packages \ No newline at end of file +packages +.earthly-staging \ No newline at end of file diff --git a/noir/Earthfile b/noir/Earthfile index 3fb400700b5c..435fab39481f 100644 --- a/noir/Earthfile +++ b/noir/Earthfile @@ -44,6 +44,19 @@ test: COPY noir-repo/.rustfmt.toml noir-repo/.rustfmt.toml RUN ./scripts/test_native.sh +format: + FROM +nargo + ENV PATH=$PATH:/usr/src/noir-repo/target/release + + COPY ./noir-repo/test_programs ./noir-repo/test_programs + COPY ./noir-repo/noir_stdlib ./noir-repo/noir_stdlib + + WORKDIR ./noir-repo/test_programs + RUN ./format.sh check + + WORKDIR ../noir_stdlib + RUN nargo fmt --check + packages-deps: BUILD ../barretenberg/ts/+build # prefetch @@ -106,64 +119,63 @@ packages: SAVE IMAGE --cache-hint packages-test-build: - FROM +packages-deps + FROM +packages-deps - COPY +nargo/nargo /usr/src/noir/noir-repo/target/release/nargo - COPY +nargo/acvm /usr/src/noir/noir-repo/target/release/acvm + COPY +nargo/nargo /usr/src/noir/noir-repo/target/release/nargo + COPY +nargo/acvm /usr/src/noir/noir-repo/target/release/acvm - ENV NARGO_BACKEND_PATH=/usr/src/barretenberg/ts/dest/node/main.js - ENV PATH=$PATH:/usr/src/noir/noir-repo/target/release + ENV NARGO_BACKEND_PATH=/usr/src/barretenberg/ts/dest/node/main.js + ENV PATH=$PATH:/usr/src/noir/noir-repo/target/release - WORKDIR /usr/src/barretenberg/ts - RUN yarn --immutable + WORKDIR /usr/src/barretenberg/ts + RUN yarn --immutable - WORKDIR /usr/src/noir/noir-repo - COPY --dir noir-repo/.github/scripts/wasm-bindgen-install.sh ./.github/scripts/wasm-bindgen-install.sh - RUN ./.github/scripts/wasm-bindgen-install.sh + WORKDIR /usr/src/noir/noir-repo + COPY --dir noir-repo/.github/scripts/wasm-bindgen-install.sh ./.github/scripts/wasm-bindgen-install.sh + RUN ./.github/scripts/wasm-bindgen-install.sh - ENV SOURCE_DATE_EPOCH=$(date +%s) - ENV GIT_DIRTY=false - ENV GIT_COMMIT=$COMMIT_HASH - RUN yarn build - # this builds text fixtures to be used in tests - RUN yarn workspace @noir-lang/noir_wasm run test:build_fixtures + ENV SOURCE_DATE_EPOCH=$(date +%s) + ENV GIT_DIRTY=false + ENV GIT_COMMIT=$COMMIT_HASH + RUN yarn build + # this builds text fixtures to be used in tests + RUN yarn workspace @noir-lang/noir_wasm run test:build_fixtures - SAVE ARTIFACT /usr/src /usr/src + SAVE ARTIFACT /usr/src /usr/src packages-test-node: - FROM +packages-test-build - ENV NODE_OPTIONS=--max_old_space_size=8192 - WORKDIR /usr/src/noir/noir-repo - RUN yarn workspaces foreach \ - --parallel \ - --verbose \ - --exclude @noir-lang/root \ # foreach includes the root workspace, ignore it - --exclude @noir-lang/noir_js \ # noir_js OOMs - --exclude integration-tests \ # separate node and browser tests - --exclude @noir-lang/noir_wasm \ - run test - RUN yarn workspaces foreach \ - --parallel \ - --verbose \ - --include integration-tests \ - --include @noir-lang/noir_wasm \ - run test:node + FROM +packages-test-build + ENV NODE_OPTIONS=--max_old_space_size=8192 + WORKDIR /usr/src/noir/noir-repo + RUN yarn workspaces foreach \ + --parallel \ + --verbose \ + --exclude @noir-lang/root \ # foreach includes the root workspace, ignore it + --exclude @noir-lang/noir_js \ # noir_js OOMs + --exclude integration-tests \ # separate node and browser tests + --exclude @noir-lang/noir_wasm \ + run test + RUN yarn workspaces foreach \ + --parallel \ + --verbose \ + --include integration-tests \ + --include @noir-lang/noir_wasm \ + run test:node packages-test-browser: - FROM node:18 - COPY --dir +packages-test-build/usr/src /usr - WORKDIR /usr/src/noir/noir-repo - RUN ./.github/scripts/playwright-install.sh - RUN yarn workspaces foreach \ - --parallel \ - --verbose \ - --include integration-tests \ - --include @noir-lang/noir_wasm \ - run test:browser + FROM node:18 + COPY --dir +packages-test-build/usr/src /usr + WORKDIR /usr/src/noir/noir-repo + RUN ./.github/scripts/playwright-install.sh + RUN yarn workspaces foreach \ + --verbose \ + --include integration-tests \ + --include @noir-lang/noir_wasm \ + run test:browser packages-test: - BUILD +packages-test-node - BUILD +packages-test-browser + BUILD +packages-test-node + BUILD +packages-test-browser run: # When running the container, mount the users home directory to same location. @@ -174,141 +186,101 @@ run: COPY +build/. /usr/src ENTRYPOINT ["/usr/bin/tini", "--", "/usr/src/nargo"] -build: - BUILD +nargo - BUILD +packages - build-acir-tests: - FROM ../build-images/+build - COPY +nargo/ /usr/src/noir-repo/target/release - ENV PATH="/usr/src/noir-repo/target/release:${PATH}" - WORKDIR /usr/src/noir-repo/test_programs - COPY ./noir-repo/test_programs/ /usr/src/noir-repo/test_programs/ - RUN /usr/src/noir-repo/target/release/nargo --version - # We run this with parallel compilation switched off, which isn't ideal. - # There seems to be problems with this when running under Earthly, see bottom of this file* - RUN ./rebuild.sh true - SAVE ARTIFACT /usr/src/noir-repo/test_programs/acir_artifacts/* + LOCALLY + # Prepare our exact dependency formula, this avoids problems with copied empty folders or build artifacts + RUN rm -rf .earthly-staging && mkdir -p .earthly-staging + RUN cp --parents $(git ls-files "noir-repo/test_programs/*.toml" "noir-repo/test_programs/*.nr" "noir-repo/test_programs/rebuild.sh") .earthly-staging + FROM ../build-images/+build + COPY +nargo/ /usr/src/noir-repo/target/release + ENV PATH="/usr/src/noir-repo/target/release:${PATH}" + WORKDIR /usr/src/noir-repo/test_programs + COPY .earthly-staging/noir-repo/test_programs /usr/src/noir-repo/test_programs/ + RUN /usr/src/noir-repo/target/release/nargo --version + # TODO(#6225): We have trouble with concurrency and pass 'true' to build in serial, see #6225 for details + RUN ./rebuild.sh true + SAVE ARTIFACT /usr/src/noir-repo/test_programs/acir_artifacts/* barretenberg-acir-tests-bb: - FROM ../build-images/+build - - COPY ../barretenberg/cpp/+preset-clang-assert/bin/bb /usr/src/barretenberg/cpp/build/bin/bb - COPY ../barretenberg/+acir-tests/ /usr/src/barretenberg/acir_tests - COPY +build-acir-tests/ /usr/src/acir_artifacts - - WORKDIR /usr/src/barretenberg/acir_tests - RUN rm -rf ./acir_tests - - ENV TEST_SRC /usr/src/acir_artifacts - ENV VERBOSE=1 - # Run every acir test through native bb build prove_then_verify flow for UltraPlonk. - # This ensures we test independent pk construction through real/garbage witness data paths. - RUN FLOW=prove_then_verify ./run_acir_tests.sh - # Construct and separately verify a UltraHonk proof for a single program - RUN FLOW=prove_then_verify_ultra_honk ./run_acir_tests.sh double_verify_nested_proof - # Construct and separately verify a GoblinUltraHonk proof for all acir programs - RUN FLOW=prove_then_verify_goblin_ultra_honk ./run_acir_tests.sh - # Construct and verify a UltraHonk proof for a single program - RUN FLOW=prove_and_verify_ultra_honk ./run_acir_tests.sh double_verify_nested_proof - # Construct and verify a Goblin UltraHonk (GUH) proof for a single arbitrary program - RUN FLOW=prove_and_verify_goblin_ultra_honk ./run_acir_tests.sh 6_array - # Construct and verify a UltraHonk proof for all ACIR programs using the new witness stack workflow - RUN FLOW=prove_and_verify_ultra_honk_program ./run_acir_tests.sh - # This is a "full" Goblin flow. It constructs and verifies four proofs: GoblinUltraHonk, ECCVM, Translator, and merge - RUN FLOW=prove_and_verify_goblin ./run_acir_tests.sh 6_array - # Run 1_mul through native bb build, all_cmds flow, to test all cli args. - RUN FLOW=all_cmds ./run_acir_tests.sh 1_mul + FROM ../build-images/+build + COPY ../barretenberg/cpp/+preset-clang-assert/bin/bb /usr/src/barretenberg/cpp/build/bin/bb + COPY ../barretenberg/+acir-tests/ /usr/src/barretenberg/acir_tests + COPY +build-acir-tests/ /usr/src/acir_artifacts + + WORKDIR /usr/src/barretenberg/acir_tests + RUN rm -rf ./acir_tests + + ENV TEST_SRC /usr/src/acir_artifacts + ENV VERBOSE=1 + # Run every acir test through native bb build prove_then_verify flow for UltraPlonk. + # This ensures we test independent pk construction through real/garbage witness data paths. + RUN FLOW=prove_then_verify ./run_acir_tests.sh + # Construct and separately verify a UltraHonk proof for a single program + RUN FLOW=prove_then_verify_ultra_honk ./run_acir_tests.sh double_verify_nested_proof + # Construct and separately verify a GoblinUltraHonk proof for all acir programs + RUN FLOW=prove_then_verify_goblin_ultra_honk ./run_acir_tests.sh + # Construct and verify a UltraHonk proof for a single program + RUN FLOW=prove_and_verify_ultra_honk ./run_acir_tests.sh double_verify_nested_proof + # Construct and verify a Goblin UltraHonk (GUH) proof for a single arbitrary program + RUN FLOW=prove_and_verify_goblin_ultra_honk ./run_acir_tests.sh 6_array + # Construct and verify a UltraHonk proof for all ACIR programs using the new witness stack workflow + RUN FLOW=prove_and_verify_ultra_honk_program ./run_acir_tests.sh + # This is a "full" Goblin flow. It constructs and verifies four proofs: GoblinUltraHonk, ECCVM, Translator, and merge + RUN FLOW=prove_and_verify_goblin ./run_acir_tests.sh 6_array + # Run 1_mul through native bb build, all_cmds flow, to test all cli args. + RUN FLOW=all_cmds ./run_acir_tests.sh 1_mul barretenberg-acir-tests-sol: - FROM ../build-images/+build + FROM ../build-images/+build - COPY ../barretenberg/cpp/+preset-sol/ /usr/src/barretenberg/cpp/build - COPY ../barretenberg/cpp/+preset-clang-assert/bin/bb /usr/src/barretenberg/cpp/build/bin/bb - COPY ../barretenberg/+acir-tests/ /usr/src/barretenberg/acir_tests - COPY ../barretenberg/+sol/ /usr/src/barretenberg/sol - COPY +build-acir-tests/ /usr/src/acir_artifacts + COPY ../barretenberg/cpp/+preset-sol/ /usr/src/barretenberg/cpp/build + COPY ../barretenberg/cpp/+preset-clang-assert/bin/bb /usr/src/barretenberg/cpp/build/bin/bb + COPY ../barretenberg/+acir-tests/ /usr/src/barretenberg/acir_tests + COPY ../barretenberg/+sol/ /usr/src/barretenberg/sol + COPY +build-acir-tests/ /usr/src/acir_artifacts - WORKDIR /usr/src/barretenberg/acir_tests + WORKDIR /usr/src/barretenberg/acir_tests - ENV TEST_SRC /usr/src/acir_artifacts - ENV VERBOSE=1 + ENV TEST_SRC /usr/src/acir_artifacts + ENV VERBOSE=1 - RUN (cd sol-test && yarn) - RUN PARALLEL=1 FLOW=sol ./run_acir_tests.sh assert_statement double_verify_proof double_verify_nested_proof + RUN (cd sol-test && yarn) + RUN PARALLEL=1 FLOW=sol ./run_acir_tests.sh assert_statement double_verify_proof double_verify_nested_proof barretenberg-acir-tests-bb.js: - # Playwright not supported on base image ubuntu:noble, results in unmet dependencies - FROM node:18.19.0 - RUN apt update && apt install -y curl jq lsof - - COPY ../barretenberg/ts/+build/build/ /usr/src/barretenberg/ts - COPY ../barretenberg/+acir-tests/ /usr/src/barretenberg/acir_tests - COPY +build-acir-tests/ /usr/src/acir_artifacts - - WORKDIR /usr/src/barretenberg/acir_tests - - # Build/install ts apps. - RUN cd browser-test-app && yarn && yarn build - RUN cd headless-test && yarn && npx playwright install && npx playwright install-deps - RUN cd ../ts && yarn - ENV VERBOSE=1 - ENV TEST_SRC /usr/src/acir_artifacts - - # Run double_verify_proof through bb.js on node to check 512k support. - RUN BIN=../ts/dest/node/main.js FLOW=prove_then_verify ./run_acir_tests.sh double_verify_proof - # Run a single arbitrary test not involving recursion through bb.js for UltraHonk - RUN BIN=../ts/dest/node/main.js FLOW=prove_and_verify_ultra_honk ./run_acir_tests.sh 6_array - # Run a single arbitrary test not involving recursion through bb.js for GoblinUltraHonk - RUN BIN=../ts/dest/node/main.js FLOW=prove_and_verify_goblin_ultra_honk ./run_acir_tests.sh 6_array - # Run a single arbitrary test not involving recursion through bb.js for full Goblin - RUN BIN=../ts/dest/node/main.js FLOW=prove_and_verify_goblin ./run_acir_tests.sh 6_array - # Run 1_mul through bb.js build, all_cmds flow, to test all cli args. - RUN BIN=../ts/dest/node/main.js FLOW=all_cmds ./run_acir_tests.sh 1_mul - # Run double_verify_proof through bb.js on chrome testing multi-threaded browser support. - # TODO: Currently headless webkit doesn't seem to have shared memory so skipping multi-threaded test. - RUN BROWSER=chrome THREAD_MODEL=mt ./run_acir_tests_browser.sh double_verify_proof - # Run 1_mul through bb.js on chrome/webkit testing single threaded browser support. - RUN BROWSER=chrome THREAD_MODEL=st ./run_acir_tests_browser.sh 1_mul - # Commenting for now as fails intermittently. Unreproducable on mainframe. - # See https://github.com/AztecProtocol/aztec-packages/issues/2104 - #RUN BROWSER=webkit THREAD_MODEL=st ./run_acir_tests_browser.sh 1_mul - -#* Analysis of compiling Acir tests inside/outside Earthly -# Each test run compiles the full suite, either in series or in parallel, either inside or outside Earthly. -# Each test prints the contents of the target directory of the eddsa circuit after compilation -# You can see that the 'Inside Earthly Parallel' run has an acir.gz file of a different size -# This results in a proof that fails verification -# -# Outside Earthly Parallel - -# [eddsa] Circuit witness successfully solved -# [eddsa] Witness saved to /mnt/user-data/phil/aztec3-packages/noir/noir-repo/test_programs/execution_success/eddsa/target/witness.gz -# total 2544 -# -rw-rw-r-- 1 phil phil 904034 May 3 10:40 acir.gz -# -rw-rw-r-- 1 phil phil 1696442 May 3 10:40 witness.gz - -# Outside Earthly Series - -# [eddsa] Circuit witness successfully solved -# [eddsa] Witness saved to /mnt/user-data/phil/aztec3-packages/noir/noir-repo/test_programs/execution_success/eddsa/target/witness.gz -# total 2544 -# -rw-rw-r-- 1 phil phil 904034 May 3 10:43 acir.gz -# -rw-rw-r-- 1 phil phil 1696442 May 3 10:43 witness.gz - -# Inside Earthly Parallel - -# +build-acir-tests | [eddsa] Circuit witness successfully solved -# +build-acir-tests | [eddsa] Witness saved to /usr/src/noir-repo/test_programs/execution_success/eddsa/target/witness.gz -# +build-acir-tests | total 2472 -# +build-acir-tests | -rw-r--r-- 1 root root 830340 May 3 10:47 acir.gz -# +build-acir-tests | -rw-r--r-- 1 root root 1696442 May 3 10:47 witness.gz - -# Inside Earthly Series - -# +build-acir-tests | [eddsa] Circuit witness successfully solved -# +build-acir-tests | [eddsa] Witness saved to /usr/src/noir-repo/test_programs/execution_success/eddsa/target/witness.gz -# +build-acir-tests | total 2544 -# +build-acir-tests | -rw-r--r-- 1 root root 904034 May 3 10:50 acir.gz -# +build-acir-tests | -rw-r--r-- 1 root root 1696442 May 3 10:51 witness.gz + # Playwright not supported on base image ubuntu:noble, results in unmet dependencies + FROM node:18.19.0 + RUN apt update && apt install -y curl jq lsof + + COPY ../barretenberg/ts/+build/build/ /usr/src/barretenberg/ts + COPY ../barretenberg/+acir-tests/ /usr/src/barretenberg/acir_tests + COPY +build-acir-tests/ /usr/src/acir_artifacts + + WORKDIR /usr/src/barretenberg/acir_tests + + # Build/install ts apps. + RUN cd browser-test-app && yarn && yarn build + RUN cd headless-test && yarn && npx playwright install && npx playwright install-deps + RUN cd ../ts && yarn + ENV VERBOSE=1 + ENV TEST_SRC /usr/src/acir_artifacts + + # Run double_verify_proof through bb.js on node to check 512k support. + RUN BIN=../ts/dest/node/main.js FLOW=prove_then_verify ./run_acir_tests.sh double_verify_proof + # Run a single arbitrary test not involving recursion through bb.js for UltraHonk + RUN BIN=../ts/dest/node/main.js FLOW=prove_and_verify_ultra_honk ./run_acir_tests.sh 6_array + # Run a single arbitrary test not involving recursion through bb.js for GoblinUltraHonk + RUN BIN=../ts/dest/node/main.js FLOW=prove_and_verify_goblin_ultra_honk ./run_acir_tests.sh 6_array + # Run a single arbitrary test not involving recursion through bb.js for full Goblin + RUN BIN=../ts/dest/node/main.js FLOW=prove_and_verify_goblin ./run_acir_tests.sh 6_array + # Run 1_mul through bb.js build, all_cmds flow, to test all cli args. + RUN BIN=../ts/dest/node/main.js FLOW=all_cmds ./run_acir_tests.sh 1_mul + # Run double_verify_proof through bb.js on chrome testing multi-threaded browser support. + # TODO: Currently headless webkit doesn't seem to have shared memory so skipping multi-threaded test. + RUN BROWSER=chrome THREAD_MODEL=mt ./run_acir_tests_browser.sh double_verify_proof + # Run 1_mul through bb.js on chrome/webkit testing single threaded browser support. + RUN BROWSER=chrome THREAD_MODEL=st ./run_acir_tests_browser.sh 1_mul + # Commenting for now as fails intermittently. Unreproducable on mainframe. + # See https://github.com/AztecProtocol/aztec-packages/issues/2104 + #RUN BROWSER=webkit THREAD_MODEL=st ./run_acir_tests_browser.sh 1_mul diff --git a/noir/noir-repo/.github/workflows/formatting.yml b/noir/noir-repo/.github/workflows/formatting.yml index 8166fb0f7c29..08c02af519f6 100644 --- a/noir/noir-repo/.github/workflows/formatting.yml +++ b/noir/noir-repo/.github/workflows/formatting.yml @@ -44,7 +44,7 @@ jobs: save-if: ${{ github.event_name != 'merge_group' }} - name: Run `cargo clippy` - run: cargo clippy --workspace --locked --release + run: cargo clippy --all-targets --workspace --locked --release - name: Run `cargo fmt` run: cargo fmt --all --check diff --git a/noir/noir-repo/.github/workflows/gates_report.yml b/noir/noir-repo/.github/workflows/gates_report.yml index ba4cb600c596..3d4bef1940e7 100644 --- a/noir/noir-repo/.github/workflows/gates_report.yml +++ b/noir/noir-repo/.github/workflows/gates_report.yml @@ -1,88 +1,88 @@ -name: Report gates diff +# name: Report gates diff -on: - push: - branches: - - master - pull_request: +# on: +# push: +# branches: +# - master +# pull_request: -jobs: - build-nargo: - runs-on: ubuntu-latest - strategy: - matrix: - target: [x86_64-unknown-linux-gnu] +# jobs: +# build-nargo: +# runs-on: ubuntu-latest +# strategy: +# matrix: +# target: [x86_64-unknown-linux-gnu] - steps: - - name: Checkout Noir repo - uses: actions/checkout@v4 +# steps: +# - name: Checkout Noir repo +# uses: actions/checkout@v4 - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.74.1 +# - name: Setup toolchain +# uses: dtolnay/rust-toolchain@1.74.1 - - uses: Swatinem/rust-cache@v2 - with: - key: ${{ matrix.target }} - cache-on-failure: true - save-if: ${{ github.event_name != 'merge_group' }} +# - uses: Swatinem/rust-cache@v2 +# with: +# key: ${{ matrix.target }} +# cache-on-failure: true +# save-if: ${{ github.event_name != 'merge_group' }} - - name: Build Nargo - run: cargo build --package nargo_cli --release +# - name: Build Nargo +# run: cargo build --package nargo_cli --release - - name: Package artifacts - run: | - mkdir dist - cp ./target/release/nargo ./dist/nargo - 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz +# - name: Package artifacts +# run: | +# mkdir dist +# cp ./target/release/nargo ./dist/nargo +# 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - - name: Upload artifact - uses: actions/upload-artifact@v4 - with: - name: nargo - path: ./dist/* - retention-days: 3 +# - name: Upload artifact +# uses: actions/upload-artifact@v4 +# with: +# name: nargo +# path: ./dist/* +# retention-days: 3 - compare_gas_reports: - needs: [build-nargo] - runs-on: ubuntu-latest - permissions: - pull-requests: write +# compare_gas_reports: +# needs: [build-nargo] +# runs-on: ubuntu-latest +# permissions: +# pull-requests: write - steps: - - uses: actions/checkout@v4 +# steps: +# - uses: actions/checkout@v4 - - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo +# - name: Download nargo binary +# uses: actions/download-artifact@v4 +# with: +# name: nargo +# path: ./nargo - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V +# - name: Set nargo on PATH +# run: | +# nargo_binary="${{ github.workspace }}/nargo/nargo" +# chmod +x $nargo_binary +# echo "$(dirname $nargo_binary)" >> $GITHUB_PATH +# export PATH="$PATH:$(dirname $nargo_binary)" +# nargo -V - - name: Generate gates report - working-directory: ./test_programs - run: | - ./gates_report.sh - mv gates_report.json ../gates_report.json +# - name: Generate gates report +# working-directory: ./test_programs +# run: | +# ./gates_report.sh +# mv gates_report.json ../gates_report.json - - name: Compare gates reports - id: gates_diff - uses: vezenovm/noir-gates-diff@acf12797860f237117e15c0d6e08d64253af52b6 - with: - report: gates_report.json - summaryQuantile: 0.9 # only display the 10% most significant circuit size diffs in the summary (defaults to 20%) +# - name: Compare gates reports +# id: gates_diff +# uses: vezenovm/noir-gates-diff@acf12797860f237117e15c0d6e08d64253af52b6 +# with: +# report: gates_report.json +# summaryQuantile: 0.9 # only display the 10% most significant circuit size diffs in the summary (defaults to 20%) - - name: Add gates diff to sticky comment - if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' - uses: marocchino/sticky-pull-request-comment@v2 - with: - # delete the comment in case changes no longer impact circuit sizes - delete: ${{ !steps.gates_diff.outputs.markdown }} - message: ${{ steps.gates_diff.outputs.markdown }} +# - name: Add gates diff to sticky comment +# if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' +# uses: marocchino/sticky-pull-request-comment@v2 +# with: +# # delete the comment in case changes no longer impact circuit sizes +# delete: ${{ !steps.gates_diff.outputs.markdown }} +# message: ${{ steps.gates_diff.outputs.markdown }} diff --git a/noir/noir-repo/.release-please-manifest.json b/noir/noir-repo/.release-please-manifest.json index e20b12e68ef6..447e12155b8d 100644 --- a/noir/noir-repo/.release-please-manifest.json +++ b/noir/noir-repo/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "0.28.0", - "acvm-repo": "0.44.0" -} \ No newline at end of file + ".": "0.29.0", + "acvm-repo": "0.45.0" +} diff --git a/noir/noir-repo/CHANGELOG.md b/noir/noir-repo/CHANGELOG.md index 148ecfac876f..7be8c387026d 100644 --- a/noir/noir-repo/CHANGELOG.md +++ b/noir/noir-repo/CHANGELOG.md @@ -1,5 +1,38 @@ # Changelog +## [0.29.0](https://github.com/noir-lang/noir/compare/v0.28.0...v0.29.0) (2024-05-03) + + +### ⚠ BREAKING CHANGES + +* use `distinct` return value witnesses by default ([#4951](https://github.com/noir-lang/noir/issues/4951)) +* Bit shift is restricted to u8 right operand ([#4907](https://github.com/noir-lang/noir/issues/4907)) + +### Features + +* Add `#[inline(tag)]` attribute and codegen ([#4913](https://github.com/noir-lang/noir/issues/4913)) ([1ec9cdc](https://github.com/noir-lang/noir/commit/1ec9cdc7013e867db3672d27e3a6104e4b7e7eef)) +* Add de-sugaring for `impl Trait` in function parameters ([#4919](https://github.com/noir-lang/noir/issues/4919)) ([8aad2e4](https://github.com/noir-lang/noir/commit/8aad2e45acbe08afc3902db95a83324f822c35eb)) +* Add variable size sha256 ([#4920](https://github.com/noir-lang/noir/issues/4920)) ([dbfca58](https://github.com/noir-lang/noir/commit/dbfca58a817ee1f1512e3e02138119f363c3d12b)) +* Bit shift is restricted to u8 right operand ([#4907](https://github.com/noir-lang/noir/issues/4907)) ([c4b0369](https://github.com/noir-lang/noir/commit/c4b03691feca17ef268acab523292f3051f672ea)) +* Complex outputs from acir call ([#4952](https://github.com/noir-lang/noir/issues/4952)) ([2e085b9](https://github.com/noir-lang/noir/commit/2e085b935b143c1305b70cd7ae86907b61a45fc0)) +* **experimental:** `comptime` globals ([#4918](https://github.com/noir-lang/noir/issues/4918)) ([8a3c7f1](https://github.com/noir-lang/noir/commit/8a3c7f1c11666ed5140a63a5aa296ef417c97bfa)) +* Handle `BrilligCall` opcodes in the debugger ([#4897](https://github.com/noir-lang/noir/issues/4897)) ([b380dc4](https://github.com/noir-lang/noir/commit/b380dc44de5c9f8de278ece3d531ebbc2c9238ba)) +* Handle `no_predicates` attribute ([#4942](https://github.com/noir-lang/noir/issues/4942)) ([0ce04d3](https://github.com/noir-lang/noir/commit/0ce04d3ea8734b76d96f5dd0fb2a6cdd4081969e)) +* Handle empty response foreign calls without an external resolver ([#4959](https://github.com/noir-lang/noir/issues/4959)) ([0154bde](https://github.com/noir-lang/noir/commit/0154bdef9f6dfe45497d77ecbf3904dcc138b8d7)) +* Optimize array sets in if conditions (alternate version) ([#4716](https://github.com/noir-lang/noir/issues/4716)) ([a87c655](https://github.com/noir-lang/noir/commit/a87c655c6c8c077c71e3372cc9181b7870348a3d)) +* Use `distinct` return value witnesses by default ([#4951](https://github.com/noir-lang/noir/issues/4951)) ([5f1b584](https://github.com/noir-lang/noir/commit/5f1b58470779e977293323d10ab9a8f0857ea29e)) + + +### Bug Fixes + +* Ban self-referential structs ([#4883](https://github.com/noir-lang/noir/issues/4883)) ([800f670](https://github.com/noir-lang/noir/commit/800f670b63a5a2ae08f09a86dae767089f7f67af)) +* Discard ref counts during unrolling ([#4923](https://github.com/noir-lang/noir/issues/4923)) ([91062db](https://github.com/noir-lang/noir/commit/91062db84a749bf191eae9ce487a2315cc74bfb2)) +* Ensure where clauses propagated to trait default definitions ([#4894](https://github.com/noir-lang/noir/issues/4894)) ([aaac0f6](https://github.com/noir-lang/noir/commit/aaac0f6bffbe11eb090145354f1b82919bb93cb7)) +* Move remove_if_else pass after second inlining ([#4976](https://github.com/noir-lang/noir/issues/4976)) ([96fb3e9](https://github.com/noir-lang/noir/commit/96fb3e94b3a2f7b586d17ea9445f44267f5d9c6d)) +* Nested array equality ([#4903](https://github.com/noir-lang/noir/issues/4903)) ([0cf2e2a](https://github.com/noir-lang/noir/commit/0cf2e2a1b8d247bed03ba5b7b1be5cd30f0d51b2)) +* Require for all foldable functions to use distinct return ([#4949](https://github.com/noir-lang/noir/issues/4949)) ([d4c6806](https://github.com/noir-lang/noir/commit/d4c68066ab35ce1c52510cf0c038fb627a0677c3)) +* Use annotated type when checking declaration ([#4966](https://github.com/noir-lang/noir/issues/4966)) ([f7fa696](https://github.com/noir-lang/noir/commit/f7fa69661006e1e10ddeecee1cdf8f024d6bc3e9)) + ## [0.28.0](https://github.com/noir-lang/noir/compare/v0.27.0...v0.28.0) (2024-04-24) diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index 2e31dee9a60b..a8c63c032aa2 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -4,7 +4,7 @@ version = 3 [[package]] name = "acir" -version = "0.44.0" +version = "0.45.0" dependencies = [ "acir_field", "base64 0.21.2", @@ -26,7 +26,7 @@ dependencies = [ [[package]] name = "acir_field" -version = "0.44.0" +version = "0.45.0" dependencies = [ "ark-bls12-381", "ark-bn254", @@ -40,7 +40,7 @@ dependencies = [ [[package]] name = "acvm" -version = "0.44.0" +version = "0.45.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -56,13 +56,14 @@ dependencies = [ [[package]] name = "acvm_blackbox_solver" -version = "0.44.0" +version = "0.45.0" dependencies = [ "acir", "blake2", "blake3", "k256", "keccak", + "libaes", "num-bigint", "p256", "sha2", @@ -92,7 +93,7 @@ dependencies = [ [[package]] name = "acvm_js" -version = "0.44.0" +version = "0.45.0" dependencies = [ "acvm", "bn254_blackbox_solver", @@ -441,7 +442,7 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "aztec_macros" -version = "0.28.0" +version = "0.29.0" dependencies = [ "convert_case 0.6.0", "iter-extended", @@ -452,7 +453,7 @@ dependencies = [ [[package]] name = "backend-interface" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "bb_abstraction_leaks", @@ -461,7 +462,6 @@ dependencies = [ "dirs", "flate2", "reqwest", - "serde", "serde_json", "tar", "tempfile", @@ -608,7 +608,7 @@ dependencies = [ [[package]] name = "bn254_blackbox_solver" -version = "0.44.0" +version = "0.45.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -630,7 +630,7 @@ dependencies = [ [[package]] name = "brillig" -version = "0.44.0" +version = "0.45.0" dependencies = [ "acir_field", "serde", @@ -638,7 +638,7 @@ dependencies = [ [[package]] name = "brillig_vm" -version = "0.44.0" +version = "0.45.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -1762,7 +1762,7 @@ dependencies = [ [[package]] name = "fm" -version = "0.28.0" +version = "0.29.0" dependencies = [ "codespan-reporting", "iter-extended", @@ -2383,7 +2383,7 @@ dependencies = [ [[package]] name = "iter-extended" -version = "0.28.0" +version = "0.29.0" [[package]] name = "itertools" @@ -2616,6 +2616,12 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" +[[package]] +name = "libaes" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82903360c009b816f5ab72a9b68158c27c301ee2c3f20655b55c5e589e7d3bb7" + [[package]] name = "libc" version = "0.2.151" @@ -2808,7 +2814,7 @@ checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" [[package]] name = "nargo" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "codespan-reporting", @@ -2834,7 +2840,7 @@ dependencies = [ [[package]] name = "nargo_cli" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "assert_cmd", @@ -2888,7 +2894,7 @@ dependencies = [ [[package]] name = "nargo_fmt" -version = "0.28.0" +version = "0.29.0" dependencies = [ "bytecount", "noirc_frontend", @@ -2900,7 +2906,7 @@ dependencies = [ [[package]] name = "nargo_toml" -version = "0.28.0" +version = "0.29.0" dependencies = [ "dirs", "fm", @@ -2979,7 +2985,7 @@ dependencies = [ [[package]] name = "noir_debugger" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "assert_cmd", @@ -3014,7 +3020,7 @@ dependencies = [ [[package]] name = "noir_lsp" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "async-lsp", @@ -3040,7 +3046,7 @@ dependencies = [ [[package]] name = "noir_wasm" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "build-data", @@ -3063,7 +3069,7 @@ dependencies = [ [[package]] name = "noirc_abi" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "iter-extended", @@ -3081,7 +3087,7 @@ dependencies = [ [[package]] name = "noirc_abi_wasm" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "build-data", @@ -3098,11 +3104,11 @@ dependencies = [ [[package]] name = "noirc_arena" -version = "0.28.0" +version = "0.29.0" [[package]] name = "noirc_driver" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "aztec_macros", @@ -3123,7 +3129,7 @@ dependencies = [ [[package]] name = "noirc_errors" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "base64 0.21.2", @@ -3141,7 +3147,7 @@ dependencies = [ [[package]] name = "noirc_evaluator" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "chrono", @@ -3158,7 +3164,7 @@ dependencies = [ [[package]] name = "noirc_frontend" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "base64 0.21.2", @@ -3187,7 +3193,7 @@ dependencies = [ [[package]] name = "noirc_printable_type" -version = "0.28.0" +version = "0.29.0" dependencies = [ "acvm", "iter-extended", diff --git a/noir/noir-repo/Cargo.toml b/noir/noir-repo/Cargo.toml index 6fe7f099e82c..f744d6d0cf59 100644 --- a/noir/noir-repo/Cargo.toml +++ b/noir/noir-repo/Cargo.toml @@ -41,7 +41,7 @@ resolver = "2" [workspace.package] # x-release-please-start-version -version = "0.28.0" +version = "0.29.0" # x-release-please-end authors = ["The Noir Team "] edition = "2021" @@ -52,13 +52,13 @@ repository = "https://github.com/noir-lang/noir/" [workspace.dependencies] # ACVM workspace dependencies -acir_field = { version = "0.44.0", path = "acvm-repo/acir_field", default-features = false } -acir = { version = "0.44.0", path = "acvm-repo/acir", default-features = false } -acvm = { version = "0.44.0", path = "acvm-repo/acvm" } -brillig = { version = "0.44.0", path = "acvm-repo/brillig", default-features = false } -brillig_vm = { version = "0.44.0", path = "acvm-repo/brillig_vm", default-features = false } -acvm_blackbox_solver = { version = "0.44.0", path = "acvm-repo/blackbox_solver", default-features = false } -bn254_blackbox_solver = { version = "0.44.0", path = "acvm-repo/bn254_blackbox_solver", default-features = false } +acir_field = { version = "0.45.0", path = "acvm-repo/acir_field", default-features = false } +acir = { version = "0.45.0", path = "acvm-repo/acir", default-features = false } +acvm = { version = "0.45.0", path = "acvm-repo/acvm" } +brillig = { version = "0.45.0", path = "acvm-repo/brillig", default-features = false } +brillig_vm = { version = "0.45.0", path = "acvm-repo/brillig_vm", default-features = false } +acvm_blackbox_solver = { version = "0.45.0", path = "acvm-repo/blackbox_solver", default-features = false } +bn254_blackbox_solver = { version = "0.45.0", path = "acvm-repo/bn254_blackbox_solver", default-features = false } # Noir compiler workspace dependencies fm = { path = "compiler/fm" } diff --git a/noir/noir-repo/acvm-repo/CHANGELOG.md b/noir/noir-repo/acvm-repo/CHANGELOG.md index b7b8ef3c4745..6ac04f6f83fd 100644 --- a/noir/noir-repo/acvm-repo/CHANGELOG.md +++ b/noir/noir-repo/acvm-repo/CHANGELOG.md @@ -5,6 +5,127 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.45.0](https://github.com/noir-lang/noir/compare/v0.44.0...v0.45.0) (2024-05-03) + + +### ⚠ BREAKING CHANGES + +* Bit shift is restricted to u8 right operand ([#4907](https://github.com/noir-lang/noir/issues/4907)) +* contract interfaces and better function calls (https://github.com/AztecProtocol/aztec-packages/pull/5687) +* change backend width to 4 (https://github.com/AztecProtocol/aztec-packages/pull/5374) +* Use fixed size arrays in black box functions where sizes are known (https://github.com/AztecProtocol/aztec-packages/pull/5620) +* trap with revert data (https://github.com/AztecProtocol/aztec-packages/pull/5732) +* **acir:** BrilligCall opcode (https://github.com/AztecProtocol/aztec-packages/pull/5709) +* remove fixed-length keccak256 (https://github.com/AztecProtocol/aztec-packages/pull/5617) +* storage_layout and `#[aztec(storage)]` (https://github.com/AztecProtocol/aztec-packages/pull/5387) +* **acir:** Add predicate to call opcode (https://github.com/AztecProtocol/aztec-packages/pull/5616) +* contract_abi-exports (https://github.com/AztecProtocol/aztec-packages/pull/5386) +* Brillig typed memory (https://github.com/AztecProtocol/aztec-packages/pull/5395) +* **acir:** Program and witness stack structure (https://github.com/AztecProtocol/aztec-packages/pull/5149) +* automatic NoteInterface and NoteGetterOptions auto select (https://github.com/AztecProtocol/aztec-packages/pull/4508) +* Acir call opcode (https://github.com/AztecProtocol/aztec-packages/pull/4773) +* Support contracts with no constructor (https://github.com/AztecProtocol/aztec-packages/pull/5175) +* Internal as a macro (https://github.com/AztecProtocol/aztec-packages/pull/4898) +* move noir out of yarn-project (https://github.com/AztecProtocol/aztec-packages/pull/4479) +* note type ids (https://github.com/AztecProtocol/aztec-packages/pull/4500) +* rename bigint_neg into bigint_sub (https://github.com/AztecProtocol/aztec-packages/pull/4420) +* Add expression width into acir (https://github.com/AztecProtocol/aztec-packages/pull/4014) +* init storage macro (https://github.com/AztecProtocol/aztec-packages/pull/4200) +* **acir:** Move `is_recursive` flag to be part of the circuit definition (https://github.com/AztecProtocol/aztec-packages/pull/4221) +* Sync commits from `aztec-packages` ([#4144](https://github.com/noir-lang/noir/issues/4144)) +* Breaking changes from aztec-packages ([#3955](https://github.com/noir-lang/noir/issues/3955)) + +### Features + +* Acir call opcode (https://github.com/AztecProtocol/aztec-packages/pull/4773) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* **acir_gen:** Brillig stdlib ([#4848](https://github.com/noir-lang/noir/issues/4848)) ([0c8175c](https://github.com/noir-lang/noir/commit/0c8175cb539efd9427c73ae5af0d48abe688ebab)) +* **acir_gen:** Fold attribute at compile-time and initial non inlined ACIR (https://github.com/AztecProtocol/aztec-packages/pull/5341) ([a0f7474](https://github.com/noir-lang/noir/commit/a0f7474ae6bd74132efdb945d2eb2383f3913cce)) +* **acir:** Add predicate to call opcode (https://github.com/AztecProtocol/aztec-packages/pull/5616) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* **acir:** BrilligCall opcode (https://github.com/AztecProtocol/aztec-packages/pull/5709) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* **acir:** Program and witness stack structure (https://github.com/AztecProtocol/aztec-packages/pull/5149) ([13eb71b](https://github.com/noir-lang/noir/commit/13eb71b8de44eb6aad9c37943ad06fc73db589f5)) +* **acvm_js:** Execute program ([#4694](https://github.com/noir-lang/noir/issues/4694)) ([386f6d0](https://github.com/noir-lang/noir/commit/386f6d0a5822912db878285cb001032a7c0ff622)) +* **acvm:** Execute multiple circuits (https://github.com/AztecProtocol/aztec-packages/pull/5380) ([a0f7474](https://github.com/noir-lang/noir/commit/a0f7474ae6bd74132efdb945d2eb2383f3913cce)) +* Add bit size to const opcode (https://github.com/AztecProtocol/aztec-packages/pull/4385) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Add CMOV instruction to brillig and brillig gen (https://github.com/AztecProtocol/aztec-packages/pull/5308) ([13eb71b](https://github.com/noir-lang/noir/commit/13eb71b8de44eb6aad9c37943ad06fc73db589f5)) +* Add expression width into acir (https://github.com/AztecProtocol/aztec-packages/pull/4014) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Add instrumentation for tracking variables in debugging ([#4122](https://github.com/noir-lang/noir/issues/4122)) ([c58d691](https://github.com/noir-lang/noir/commit/c58d69141b54a918cd1675400c00bfd48720f896)) +* Add poseidon2 opcode implementation for acvm/brillig, and Noir ([#4398](https://github.com/noir-lang/noir/issues/4398)) ([10e8292](https://github.com/noir-lang/noir/commit/10e82920798380f50046e52db4a20ca205191ab7)) +* Add return values to aztec fns (https://github.com/AztecProtocol/aztec-packages/pull/5389) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Add support for overriding expression width ([#4117](https://github.com/noir-lang/noir/issues/4117)) ([c8026d5](https://github.com/noir-lang/noir/commit/c8026d557d535b10fe455165d6445076df7a03de)) +* Added cast opcode and cast calldata (https://github.com/AztecProtocol/aztec-packages/pull/4423) ([78ef013](https://github.com/noir-lang/noir/commit/78ef0134b82e76a73dadb6c7975def22290e3a1a)) +* Allow brillig to read arrays directly from memory (https://github.com/AztecProtocol/aztec-packages/pull/4460) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Allow nested arrays and vectors in Brillig foreign calls (https://github.com/AztecProtocol/aztec-packages/pull/4478) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Allow variables and stack trace inspection in the debugger ([#4184](https://github.com/noir-lang/noir/issues/4184)) ([bf263fc](https://github.com/noir-lang/noir/commit/bf263fc8d843940f328a90f6366edd2671fb2682)) +* Automatic NoteInterface and NoteGetterOptions auto select (https://github.com/AztecProtocol/aztec-packages/pull/4508) ([13eb71b](https://github.com/noir-lang/noir/commit/13eb71b8de44eb6aad9c37943ad06fc73db589f5)) +* **avm:** Back in avm context with macro - refactor context (https://github.com/AztecProtocol/aztec-packages/pull/4438) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* **avm:** Brillig CONST of size > u128 (https://github.com/AztecProtocol/aztec-packages/pull/5217) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* **avm:** Integrate AVM with initializers (https://github.com/AztecProtocol/aztec-packages/pull/5469) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* **aztec-nr:** Initial work for aztec public vm macro (https://github.com/AztecProtocol/aztec-packages/pull/4400) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Backpropagate constants in ACIR during optimization ([#3926](https://github.com/noir-lang/noir/issues/3926)) ([aad0da0](https://github.com/noir-lang/noir/commit/aad0da024c69663f42e6913e674682d5864b26ae)) +* Bit shift is restricted to u8 right operand ([#4907](https://github.com/noir-lang/noir/issues/4907)) ([c4b0369](https://github.com/noir-lang/noir/commit/c4b03691feca17ef268acab523292f3051f672ea)) +* Breaking changes from aztec-packages ([#3955](https://github.com/noir-lang/noir/issues/3955)) ([5be049e](https://github.com/noir-lang/noir/commit/5be049eee6c342649462282ee04f6411e6ea392c)) +* Brillig heterogeneous memory cells (https://github.com/AztecProtocol/aztec-packages/pull/5608) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Brillig IR refactor (https://github.com/AztecProtocol/aztec-packages/pull/5233) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Brillig pointer codegen and execution (https://github.com/AztecProtocol/aztec-packages/pull/5737) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Brillig typed memory (https://github.com/AztecProtocol/aztec-packages/pull/5395) ([0bc18c4](https://github.com/noir-lang/noir/commit/0bc18c4f78171590dd58bded959f68f53a44cc8c)) +* Change backend width to 4 (https://github.com/AztecProtocol/aztec-packages/pull/5374) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Check initializer msg.sender matches deployer from address preimage (https://github.com/AztecProtocol/aztec-packages/pull/5222) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Contract interfaces and better function calls (https://github.com/AztecProtocol/aztec-packages/pull/5687) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Contract_abi-exports (https://github.com/AztecProtocol/aztec-packages/pull/5386) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Evaluation of dynamic assert messages ([#4101](https://github.com/noir-lang/noir/issues/4101)) ([c284e01](https://github.com/noir-lang/noir/commit/c284e01bfe20ceae4414dc123624b5cbb8b66d09)) +* Handle `BrilligCall` opcodes in the debugger ([#4897](https://github.com/noir-lang/noir/issues/4897)) ([b380dc4](https://github.com/noir-lang/noir/commit/b380dc44de5c9f8de278ece3d531ebbc2c9238ba)) +* Impl of missing functionality in new key store (https://github.com/AztecProtocol/aztec-packages/pull/5750) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Init storage macro (https://github.com/AztecProtocol/aztec-packages/pull/4200) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Initial Earthly CI (https://github.com/AztecProtocol/aztec-packages/pull/5069) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Internal as a macro (https://github.com/AztecProtocol/aztec-packages/pull/4898) ([5f57ebb](https://github.com/noir-lang/noir/commit/5f57ebb7ff4b810802f90699a10f4325ef904f2e)) +* **nargo:** Handle call stacks for multiple Acir calls ([#4711](https://github.com/noir-lang/noir/issues/4711)) ([5b23171](https://github.com/noir-lang/noir/commit/5b231714740447d82cde7cdbe65d4a8b46a31df4)) +* New brillig field operations and refactor of binary operations (https://github.com/AztecProtocol/aztec-packages/pull/5208) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Note type ids (https://github.com/AztecProtocol/aztec-packages/pull/4500) ([78ef013](https://github.com/noir-lang/noir/commit/78ef0134b82e76a73dadb6c7975def22290e3a1a)) +* Remove replacement of boolean range opcodes with `AssertZero` opcodes ([#4107](https://github.com/noir-lang/noir/issues/4107)) ([dac0e87](https://github.com/noir-lang/noir/commit/dac0e87ee3be3446b92bbb12ef4832fd493fcee3)) +* Restore hashing args via slice for performance (https://github.com/AztecProtocol/aztec-packages/pull/5539) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Signed integer division and modulus in brillig gen (https://github.com/AztecProtocol/aztec-packages/pull/5279) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* **simulator:** Fetch return values at circuit execution (https://github.com/AztecProtocol/aztec-packages/pull/5642) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Storage_layout and `#[aztec(storage)]` (https://github.com/AztecProtocol/aztec-packages/pull/5387) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Support contracts with no constructor (https://github.com/AztecProtocol/aztec-packages/pull/5175) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Sync `aztec-packages` ([#4011](https://github.com/noir-lang/noir/issues/4011)) ([fee2452](https://github.com/noir-lang/noir/commit/fee24523c427c27f0bdaf98ea09a852a2da3e94c)) +* Sync commits from `aztec-packages` ([#4068](https://github.com/noir-lang/noir/issues/4068)) ([7a8f3a3](https://github.com/noir-lang/noir/commit/7a8f3a33b57875e681e3d81e667e3570a1cdbdcc)) +* Sync commits from `aztec-packages` ([#4144](https://github.com/noir-lang/noir/issues/4144)) ([0205d3b](https://github.com/noir-lang/noir/commit/0205d3b4ad0cf5ffd775a43eb5af273a772cf138)) +* Sync from aztec-packages ([#4483](https://github.com/noir-lang/noir/issues/4483)) ([fe8f277](https://github.com/noir-lang/noir/commit/fe8f2776ccfde29209a2c3fc162311c99e4f59be)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5234) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5286) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5572) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5619) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5697) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5794) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5814) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5935) ([1b867b1](https://github.com/noir-lang/noir/commit/1b867b121fba5db3087ca845b4934e6732b23fd1)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5955) ([1b867b1](https://github.com/noir-lang/noir/commit/1b867b121fba5db3087ca845b4934e6732b23fd1)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5999) ([1b867b1](https://github.com/noir-lang/noir/commit/1b867b121fba5db3087ca845b4934e6732b23fd1)) +* Trap with revert data (https://github.com/AztecProtocol/aztec-packages/pull/5732) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Use fixed size arrays in black box functions where sizes are known (https://github.com/AztecProtocol/aztec-packages/pull/5620) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Variable length returns (https://github.com/AztecProtocol/aztec-packages/pull/5633) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) + + +### Bug Fixes + +* **acvm:** Mark outputs of Opcode::Call solvable ([#4708](https://github.com/noir-lang/noir/issues/4708)) ([8fea405](https://github.com/noir-lang/noir/commit/8fea40576f262bd5bb588923c0660d8967404e56)) +* Avoid huge unrolling in hash_args (https://github.com/AztecProtocol/aztec-packages/pull/5703) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Catch panics from EC point creation (e.g. the point is at infinity) ([#4790](https://github.com/noir-lang/noir/issues/4790)) ([645dba1](https://github.com/noir-lang/noir/commit/645dba192f16ef34018828186ffb297422a8dc73)) +* Don't reuse brillig with slice arguments (https://github.com/AztecProtocol/aztec-packages/pull/5800) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Issue 4682 and add solver for unconstrained bigintegers ([#4729](https://github.com/noir-lang/noir/issues/4729)) ([e4d33c1](https://github.com/noir-lang/noir/commit/e4d33c126a2795d9aaa6048d4e91b64cb4bbe4f2)) +* Noir test incorrect reporting (https://github.com/AztecProtocol/aztec-packages/pull/4925) ([5f57ebb](https://github.com/noir-lang/noir/commit/5f57ebb7ff4b810802f90699a10f4325ef904f2e)) +* Proper field inversion for bigints ([#4802](https://github.com/noir-lang/noir/issues/4802)) ([b46d0e3](https://github.com/noir-lang/noir/commit/b46d0e39f4252f8bbaa987f88d112e4c233b3d61)) +* Remove panic from `init_log_level` in `acvm_js` ([#4195](https://github.com/noir-lang/noir/issues/4195)) ([2e26530](https://github.com/noir-lang/noir/commit/2e26530bf53006c1ed4fee310bcaa905c95dd95b)) +* Return error rather instead of panicking on invalid circuit ([#3976](https://github.com/noir-lang/noir/issues/3976)) ([67201bf](https://github.com/noir-lang/noir/commit/67201bfc21a9c8858aa86be9cd47d463fb78d925)) + + +### Miscellaneous Chores + +* **acir:** Move `is_recursive` flag to be part of the circuit definition (https://github.com/AztecProtocol/aztec-packages/pull/4221) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Move noir out of yarn-project (https://github.com/AztecProtocol/aztec-packages/pull/4479) ([78ef013](https://github.com/noir-lang/noir/commit/78ef0134b82e76a73dadb6c7975def22290e3a1a)) +* Remove fixed-length keccak256 (https://github.com/AztecProtocol/aztec-packages/pull/5617) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Rename bigint_neg into bigint_sub (https://github.com/AztecProtocol/aztec-packages/pull/4420) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) + ## [0.44.0](https://github.com/noir-lang/noir/compare/v0.43.0...v0.44.0) (2024-04-24) diff --git a/noir/noir-repo/acvm-repo/acir/Cargo.toml b/noir/noir-repo/acvm-repo/acir/Cargo.toml index 96ba13e3b3b9..9e1a2f940c4a 100644 --- a/noir/noir-repo/acvm-repo/acir/Cargo.toml +++ b/noir/noir-repo/acvm-repo/acir/Cargo.toml @@ -2,7 +2,7 @@ name = "acir" description = "ACIR is the IR that the VM processes, it is analogous to LLVM IR" # x-release-please-start-version -version = "0.44.0" +version = "0.45.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/noir-repo/acvm-repo/acir/README.md b/noir/noir-repo/acvm-repo/acir/README.md index e72f7ea178d8..f7fccad0799c 100644 --- a/noir/noir-repo/acvm-repo/acir/README.md +++ b/noir/noir-repo/acvm-repo/acir/README.md @@ -76,6 +76,12 @@ Some more advanced computations assume that the proving system has an 'embedded The black box functions supported by ACIR are: +**AES128Encrypt**: ciphers the provided plaintext using AES128 in CBC mode, padding the input using PKCS#7. +- inputs: byte array [u8; N] +- iv: initialization vector [u8; 16] +- key: user key [u8; 16] +- outputs: byte vector [u8] of length `input.len() + (16 - input.len() % 16)`` + **AND**: performs the bitwise AND of lhs and rhs. bit_size must be the same for both inputs. - lhs: (witness, bit_size) - rhs: (witness, bit_size) @@ -139,18 +145,11 @@ Inputs and outputs are similar to SchnorrVerify, except that because we use a di **EcdsaSecp256r1**: Same as EcdsaSecp256k1, but done over another curve. -**FixedBaseScalarMul**: scalar multiplication with a fixed generator of the embedded curve -- input: low, high are 2 (field , 254), representing the low and high part of the input. For Barretenberg, they must both be less than 128 bits. -- output: x and y coordinates of $low*G+high*2^{128}*G$, where G is a fixed generator - -Because the Grumpkin scalar field is bigger than the ACIR field, we provide 2 ACIR fields representing the low and high parts of the Grumpkin scalar $a$: -$a=low+high*2^{128},$ with $low, high < 2^{128}$ - -**VariableBaseScalarMul**: scalar multiplication with a variable base/input point (P) of the embedded curve +**MultiScalarMul**: scalar multiplication with a variable base/input point (P) of the embedded curve - input: - point_x, point_y representing x and y coordinates of input point P - scalar_low, scalar_high are 2 (field , 254), representing the low and high part of the input scalar. For Barretenberg, they must both be less than 128 bits. -- output: x and y coordinates of $low*P+high*2^{128}*P$, where P is the input point P + points (FieldElement, N) a vector of x and y coordinates of input points [x1, y1, x2, y2,...]. + scalars (FieldElement, N) a vector of low and high limbs of input scalars [s1_low, s1_high, s2_low, s2_high, ...]. (FieldElement, N) For Barretenberg, they must both be less than 128 bits. +- output: (FieldElement, N) a vector of x and y coordinates of output points [op1_x, op1_y, op2_x, op2_y, ...]. Points computed as $s_low*P+s_high*2^{128}*P$ Because the Grumpkin scalar field is bigger than the ACIR field, we provide 2 ACIR fields representing the low and high parts of the Grumpkin scalar $a$: $a=low+high*2^{128},$ with $low, high < 2^{128}$ diff --git a/noir/noir-repo/acvm-repo/acir/benches/serialization.rs b/noir/noir-repo/acvm-repo/acir/benches/serialization.rs index e51726e3901c..a7f32b4a4c70 100644 --- a/noir/noir-repo/acvm-repo/acir/benches/serialization.rs +++ b/noir/noir-repo/acvm-repo/acir/benches/serialization.rs @@ -33,7 +33,7 @@ fn sample_program(num_opcodes: usize) -> Program { functions: vec![Circuit { current_witness_index: 4000, opcodes: assert_zero_opcodes.to_vec(), - expression_width: ExpressionWidth::Bounded { width: 3 }, + expression_width: ExpressionWidth::Bounded { width: 4 }, private_parameters: BTreeSet::from([Witness(1), Witness(2), Witness(3), Witness(4)]), public_parameters: PublicInputs(BTreeSet::from([Witness(5)])), return_values: PublicInputs(BTreeSet::from([Witness(6)])), diff --git a/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp b/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp index 1e5207c01cbb..5afcd68e987b 100644 --- a/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp +++ b/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp @@ -24,6 +24,17 @@ namespace Program { struct BlackBoxFuncCall { + struct AES128Encrypt { + std::vector inputs; + std::array iv; + std::array key; + std::vector outputs; + + friend bool operator==(const AES128Encrypt&, const AES128Encrypt&); + std::vector bincodeSerialize() const; + static AES128Encrypt bincodeDeserialize(std::vector); + }; + struct AND { Program::FunctionInput lhs; Program::FunctionInput rhs; @@ -135,26 +146,14 @@ namespace Program { static EcdsaSecp256r1 bincodeDeserialize(std::vector); }; - struct FixedBaseScalarMul { - Program::FunctionInput low; - Program::FunctionInput high; - std::array outputs; - - friend bool operator==(const FixedBaseScalarMul&, const FixedBaseScalarMul&); - std::vector bincodeSerialize() const; - static FixedBaseScalarMul bincodeDeserialize(std::vector); - }; - - struct VariableBaseScalarMul { - Program::FunctionInput point_x; - Program::FunctionInput point_y; - Program::FunctionInput scalar_low; - Program::FunctionInput scalar_high; + struct MultiScalarMul { + std::vector points; + std::vector scalars; std::array outputs; - friend bool operator==(const VariableBaseScalarMul&, const VariableBaseScalarMul&); + friend bool operator==(const MultiScalarMul&, const MultiScalarMul&); std::vector bincodeSerialize() const; - static VariableBaseScalarMul bincodeDeserialize(std::vector); + static MultiScalarMul bincodeDeserialize(std::vector); }; struct EmbeddedCurveAdd { @@ -278,7 +277,7 @@ namespace Program { static Sha256Compression bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; friend bool operator==(const BlackBoxFuncCall&, const BlackBoxFuncCall&); std::vector bincodeSerialize() const; @@ -293,6 +292,33 @@ namespace Program { static BlockId bincodeDeserialize(std::vector); }; + struct BlockType { + + struct Memory { + friend bool operator==(const Memory&, const Memory&); + std::vector bincodeSerialize() const; + static Memory bincodeDeserialize(std::vector); + }; + + struct CallData { + friend bool operator==(const CallData&, const CallData&); + std::vector bincodeSerialize() const; + static CallData bincodeDeserialize(std::vector); + }; + + struct ReturnData { + friend bool operator==(const ReturnData&, const ReturnData&); + std::vector bincodeSerialize() const; + static ReturnData bincodeDeserialize(std::vector); + }; + + std::variant value; + + friend bool operator==(const BlockType&, const BlockType&); + std::vector bincodeSerialize() const; + static BlockType bincodeDeserialize(std::vector); + }; + struct Expression { std::vector> mul_terms; std::vector> linear_combinations; @@ -429,6 +455,7 @@ namespace Program { struct MemoryInit { Program::BlockId block_id; std::vector init; + Program::BlockType block_type; friend bool operator==(const MemoryInit&, const MemoryInit&); std::vector bincodeSerialize() const; @@ -630,6 +657,17 @@ namespace Program { struct BlackBoxOp { + struct AES128Encrypt { + Program::HeapVector inputs; + Program::HeapArray iv; + Program::HeapArray key; + Program::HeapVector outputs; + + friend bool operator==(const AES128Encrypt&, const AES128Encrypt&); + std::vector bincodeSerialize() const; + static AES128Encrypt bincodeDeserialize(std::vector); + }; + struct Sha256 { Program::HeapVector message; Program::HeapArray output; @@ -731,26 +769,14 @@ namespace Program { static PedersenHash bincodeDeserialize(std::vector); }; - struct FixedBaseScalarMul { - Program::MemoryAddress low; - Program::MemoryAddress high; - Program::HeapArray result; - - friend bool operator==(const FixedBaseScalarMul&, const FixedBaseScalarMul&); - std::vector bincodeSerialize() const; - static FixedBaseScalarMul bincodeDeserialize(std::vector); - }; - - struct VariableBaseScalarMul { - Program::MemoryAddress point_x; - Program::MemoryAddress point_y; - Program::MemoryAddress scalar_low; - Program::MemoryAddress scalar_high; - Program::HeapArray result; + struct MultiScalarMul { + Program::HeapVector points; + Program::HeapVector scalars; + Program::HeapArray outputs; - friend bool operator==(const VariableBaseScalarMul&, const VariableBaseScalarMul&); + friend bool operator==(const MultiScalarMul&, const MultiScalarMul&); std::vector bincodeSerialize() const; - static VariableBaseScalarMul bincodeDeserialize(std::vector); + static MultiScalarMul bincodeDeserialize(std::vector); }; struct EmbeddedCurveAdd { @@ -844,7 +870,7 @@ namespace Program { static Sha256Compression bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; friend bool operator==(const BlackBoxOp&, const BlackBoxOp&); std::vector bincodeSerialize() const; @@ -2183,6 +2209,53 @@ Program::BlackBoxFuncCall serde::Deserializable::dese return obj; } +namespace Program { + + inline bool operator==(const BlackBoxFuncCall::AES128Encrypt &lhs, const BlackBoxFuncCall::AES128Encrypt &rhs) { + if (!(lhs.inputs == rhs.inputs)) { return false; } + if (!(lhs.iv == rhs.iv)) { return false; } + if (!(lhs.key == rhs.key)) { return false; } + if (!(lhs.outputs == rhs.outputs)) { return false; } + return true; + } + + inline std::vector BlackBoxFuncCall::AES128Encrypt::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlackBoxFuncCall::AES128Encrypt BlackBoxFuncCall::AES128Encrypt::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlackBoxFuncCall::AES128Encrypt &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.inputs, serializer); + serde::Serializable::serialize(obj.iv, serializer); + serde::Serializable::serialize(obj.key, serializer); + serde::Serializable::serialize(obj.outputs, serializer); +} + +template <> +template +Program::BlackBoxFuncCall::AES128Encrypt serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlackBoxFuncCall::AES128Encrypt obj; + obj.inputs = serde::Deserializable::deserialize(deserializer); + obj.iv = serde::Deserializable::deserialize(deserializer); + obj.key = serde::Deserializable::deserialize(deserializer); + obj.outputs = serde::Deserializable::deserialize(deserializer); + return obj; +} + namespace Program { inline bool operator==(const BlackBoxFuncCall::AND &lhs, const BlackBoxFuncCall::AND &rhs) { @@ -2672,22 +2745,22 @@ Program::BlackBoxFuncCall::EcdsaSecp256r1 serde::Deserializable BlackBoxFuncCall::FixedBaseScalarMul::bincodeSerialize() const { + inline std::vector BlackBoxFuncCall::MultiScalarMul::bincodeSerialize() const { auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); + serde::Serializable::serialize(*this, serializer); return std::move(serializer).bytes(); } - inline BlackBoxFuncCall::FixedBaseScalarMul BlackBoxFuncCall::FixedBaseScalarMul::bincodeDeserialize(std::vector input) { + inline BlackBoxFuncCall::MultiScalarMul BlackBoxFuncCall::MultiScalarMul::bincodeDeserialize(std::vector input) { auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); + auto value = serde::Deserializable::deserialize(deserializer); if (deserializer.get_buffer_offset() < input.size()) { throw serde::deserialization_error("Some input bytes were not read"); } @@ -2698,68 +2771,18 @@ namespace Program { template <> template -void serde::Serializable::serialize(const Program::BlackBoxFuncCall::FixedBaseScalarMul &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.low, serializer); - serde::Serializable::serialize(obj.high, serializer); +void serde::Serializable::serialize(const Program::BlackBoxFuncCall::MultiScalarMul &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.points, serializer); + serde::Serializable::serialize(obj.scalars, serializer); serde::Serializable::serialize(obj.outputs, serializer); } template <> template -Program::BlackBoxFuncCall::FixedBaseScalarMul serde::Deserializable::deserialize(Deserializer &deserializer) { - Program::BlackBoxFuncCall::FixedBaseScalarMul obj; - obj.low = serde::Deserializable::deserialize(deserializer); - obj.high = serde::Deserializable::deserialize(deserializer); - obj.outputs = serde::Deserializable::deserialize(deserializer); - return obj; -} - -namespace Program { - - inline bool operator==(const BlackBoxFuncCall::VariableBaseScalarMul &lhs, const BlackBoxFuncCall::VariableBaseScalarMul &rhs) { - if (!(lhs.point_x == rhs.point_x)) { return false; } - if (!(lhs.point_y == rhs.point_y)) { return false; } - if (!(lhs.scalar_low == rhs.scalar_low)) { return false; } - if (!(lhs.scalar_high == rhs.scalar_high)) { return false; } - if (!(lhs.outputs == rhs.outputs)) { return false; } - return true; - } - - inline std::vector BlackBoxFuncCall::VariableBaseScalarMul::bincodeSerialize() const { - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); - } - - inline BlackBoxFuncCall::VariableBaseScalarMul BlackBoxFuncCall::VariableBaseScalarMul::bincodeDeserialize(std::vector input) { - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw serde::deserialization_error("Some input bytes were not read"); - } - return value; - } - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize(const Program::BlackBoxFuncCall::VariableBaseScalarMul &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.point_x, serializer); - serde::Serializable::serialize(obj.point_y, serializer); - serde::Serializable::serialize(obj.scalar_low, serializer); - serde::Serializable::serialize(obj.scalar_high, serializer); - serde::Serializable::serialize(obj.outputs, serializer); -} - -template <> -template -Program::BlackBoxFuncCall::VariableBaseScalarMul serde::Deserializable::deserialize(Deserializer &deserializer) { - Program::BlackBoxFuncCall::VariableBaseScalarMul obj; - obj.point_x = serde::Deserializable::deserialize(deserializer); - obj.point_y = serde::Deserializable::deserialize(deserializer); - obj.scalar_low = serde::Deserializable::deserialize(deserializer); - obj.scalar_high = serde::Deserializable::deserialize(deserializer); +Program::BlackBoxFuncCall::MultiScalarMul serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlackBoxFuncCall::MultiScalarMul obj; + obj.points = serde::Deserializable::deserialize(deserializer); + obj.scalars = serde::Deserializable::deserialize(deserializer); obj.outputs = serde::Deserializable::deserialize(deserializer); return obj; } @@ -3337,6 +3360,53 @@ Program::BlackBoxOp serde::Deserializable::deserialize(Dese return obj; } +namespace Program { + + inline bool operator==(const BlackBoxOp::AES128Encrypt &lhs, const BlackBoxOp::AES128Encrypt &rhs) { + if (!(lhs.inputs == rhs.inputs)) { return false; } + if (!(lhs.iv == rhs.iv)) { return false; } + if (!(lhs.key == rhs.key)) { return false; } + if (!(lhs.outputs == rhs.outputs)) { return false; } + return true; + } + + inline std::vector BlackBoxOp::AES128Encrypt::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlackBoxOp::AES128Encrypt BlackBoxOp::AES128Encrypt::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlackBoxOp::AES128Encrypt &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.inputs, serializer); + serde::Serializable::serialize(obj.iv, serializer); + serde::Serializable::serialize(obj.key, serializer); + serde::Serializable::serialize(obj.outputs, serializer); +} + +template <> +template +Program::BlackBoxOp::AES128Encrypt serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlackBoxOp::AES128Encrypt obj; + obj.inputs = serde::Deserializable::deserialize(deserializer); + obj.iv = serde::Deserializable::deserialize(deserializer); + obj.key = serde::Deserializable::deserialize(deserializer); + obj.outputs = serde::Deserializable::deserialize(deserializer); + return obj; +} + namespace Program { inline bool operator==(const BlackBoxOp::Sha256 &lhs, const BlackBoxOp::Sha256 &rhs) { @@ -3782,68 +3852,22 @@ Program::BlackBoxOp::PedersenHash serde::Deserializable BlackBoxOp::FixedBaseScalarMul::bincodeSerialize() const { - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); - } - - inline BlackBoxOp::FixedBaseScalarMul BlackBoxOp::FixedBaseScalarMul::bincodeDeserialize(std::vector input) { - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw serde::deserialization_error("Some input bytes were not read"); - } - return value; - } - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize(const Program::BlackBoxOp::FixedBaseScalarMul &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.low, serializer); - serde::Serializable::serialize(obj.high, serializer); - serde::Serializable::serialize(obj.result, serializer); -} - -template <> -template -Program::BlackBoxOp::FixedBaseScalarMul serde::Deserializable::deserialize(Deserializer &deserializer) { - Program::BlackBoxOp::FixedBaseScalarMul obj; - obj.low = serde::Deserializable::deserialize(deserializer); - obj.high = serde::Deserializable::deserialize(deserializer); - obj.result = serde::Deserializable::deserialize(deserializer); - return obj; -} - -namespace Program { - - inline bool operator==(const BlackBoxOp::VariableBaseScalarMul &lhs, const BlackBoxOp::VariableBaseScalarMul &rhs) { - if (!(lhs.point_x == rhs.point_x)) { return false; } - if (!(lhs.point_y == rhs.point_y)) { return false; } - if (!(lhs.scalar_low == rhs.scalar_low)) { return false; } - if (!(lhs.scalar_high == rhs.scalar_high)) { return false; } - if (!(lhs.result == rhs.result)) { return false; } + inline bool operator==(const BlackBoxOp::MultiScalarMul &lhs, const BlackBoxOp::MultiScalarMul &rhs) { + if (!(lhs.points == rhs.points)) { return false; } + if (!(lhs.scalars == rhs.scalars)) { return false; } + if (!(lhs.outputs == rhs.outputs)) { return false; } return true; } - inline std::vector BlackBoxOp::VariableBaseScalarMul::bincodeSerialize() const { + inline std::vector BlackBoxOp::MultiScalarMul::bincodeSerialize() const { auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); + serde::Serializable::serialize(*this, serializer); return std::move(serializer).bytes(); } - inline BlackBoxOp::VariableBaseScalarMul BlackBoxOp::VariableBaseScalarMul::bincodeDeserialize(std::vector input) { + inline BlackBoxOp::MultiScalarMul BlackBoxOp::MultiScalarMul::bincodeDeserialize(std::vector input) { auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); + auto value = serde::Deserializable::deserialize(deserializer); if (deserializer.get_buffer_offset() < input.size()) { throw serde::deserialization_error("Some input bytes were not read"); } @@ -3854,23 +3878,19 @@ namespace Program { template <> template -void serde::Serializable::serialize(const Program::BlackBoxOp::VariableBaseScalarMul &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.point_x, serializer); - serde::Serializable::serialize(obj.point_y, serializer); - serde::Serializable::serialize(obj.scalar_low, serializer); - serde::Serializable::serialize(obj.scalar_high, serializer); - serde::Serializable::serialize(obj.result, serializer); +void serde::Serializable::serialize(const Program::BlackBoxOp::MultiScalarMul &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.points, serializer); + serde::Serializable::serialize(obj.scalars, serializer); + serde::Serializable::serialize(obj.outputs, serializer); } template <> template -Program::BlackBoxOp::VariableBaseScalarMul serde::Deserializable::deserialize(Deserializer &deserializer) { - Program::BlackBoxOp::VariableBaseScalarMul obj; - obj.point_x = serde::Deserializable::deserialize(deserializer); - obj.point_y = serde::Deserializable::deserialize(deserializer); - obj.scalar_low = serde::Deserializable::deserialize(deserializer); - obj.scalar_high = serde::Deserializable::deserialize(deserializer); - obj.result = serde::Deserializable::deserialize(deserializer); +Program::BlackBoxOp::MultiScalarMul serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlackBoxOp::MultiScalarMul obj; + obj.points = serde::Deserializable::deserialize(deserializer); + obj.scalars = serde::Deserializable::deserialize(deserializer); + obj.outputs = serde::Deserializable::deserialize(deserializer); return obj; } @@ -4315,6 +4335,153 @@ Program::BlockId serde::Deserializable::deserialize(Deserializ return obj; } +namespace Program { + + inline bool operator==(const BlockType &lhs, const BlockType &rhs) { + if (!(lhs.value == rhs.value)) { return false; } + return true; + } + + inline std::vector BlockType::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlockType BlockType::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType &obj, Serializer &serializer) { + serializer.increase_container_depth(); + serde::Serializable::serialize(obj.value, serializer); + serializer.decrease_container_depth(); +} + +template <> +template +Program::BlockType serde::Deserializable::deserialize(Deserializer &deserializer) { + deserializer.increase_container_depth(); + Program::BlockType obj; + obj.value = serde::Deserializable::deserialize(deserializer); + deserializer.decrease_container_depth(); + return obj; +} + +namespace Program { + + inline bool operator==(const BlockType::Memory &lhs, const BlockType::Memory &rhs) { + return true; + } + + inline std::vector BlockType::Memory::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlockType::Memory BlockType::Memory::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::Memory &obj, Serializer &serializer) { +} + +template <> +template +Program::BlockType::Memory serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlockType::Memory obj; + return obj; +} + +namespace Program { + + inline bool operator==(const BlockType::CallData &lhs, const BlockType::CallData &rhs) { + return true; + } + + inline std::vector BlockType::CallData::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlockType::CallData BlockType::CallData::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::CallData &obj, Serializer &serializer) { +} + +template <> +template +Program::BlockType::CallData serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlockType::CallData obj; + return obj; +} + +namespace Program { + + inline bool operator==(const BlockType::ReturnData &lhs, const BlockType::ReturnData &rhs) { + return true; + } + + inline std::vector BlockType::ReturnData::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlockType::ReturnData BlockType::ReturnData::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::ReturnData &obj, Serializer &serializer) { +} + +template <> +template +Program::BlockType::ReturnData serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlockType::ReturnData obj; + return obj; +} + namespace Program { inline bool operator==(const BrilligBytecode &lhs, const BrilligBytecode &rhs) { @@ -6451,6 +6618,7 @@ namespace Program { inline bool operator==(const Opcode::MemoryInit &lhs, const Opcode::MemoryInit &rhs) { if (!(lhs.block_id == rhs.block_id)) { return false; } if (!(lhs.init == rhs.init)) { return false; } + if (!(lhs.block_type == rhs.block_type)) { return false; } return true; } @@ -6476,6 +6644,7 @@ template void serde::Serializable::serialize(const Program::Opcode::MemoryInit &obj, Serializer &serializer) { serde::Serializable::serialize(obj.block_id, serializer); serde::Serializable::serialize(obj.init, serializer); + serde::Serializable::serialize(obj.block_type, serializer); } template <> @@ -6484,6 +6653,7 @@ Program::Opcode::MemoryInit serde::Deserializable:: Program::Opcode::MemoryInit obj; obj.block_id = serde::Deserializable::deserialize(deserializer); obj.init = serde::Deserializable::deserialize(deserializer); + obj.block_type = serde::Deserializable::deserialize(deserializer); return obj; } diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/black_box_functions.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/black_box_functions.rs index 9a43702a408c..33c14436c851 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/black_box_functions.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/black_box_functions.rs @@ -9,6 +9,8 @@ use strum_macros::EnumIter; #[derive(Clone, Debug, Hash, Copy, PartialEq, Eq, Serialize, Deserialize)] #[cfg_attr(test, derive(EnumIter))] pub enum BlackBoxFunc { + /// Encrypts the input using AES128. + AES128Encrypt, /// Bitwise AND. AND, /// Bitwise XOR. @@ -36,10 +38,8 @@ pub enum BlackBoxFunc { EcdsaSecp256k1, /// Verifies a ECDSA signature over the secp256r1 curve. EcdsaSecp256r1, - /// Performs scalar multiplication over the embedded curve on which [`FieldElement`][acir_field::FieldElement] is defined and a fixed base/generator point G1. - FixedBaseScalarMul, - /// Performs scalar multiplication over the embedded curve on which [`FieldElement`][acir_field::FieldElement] is defined and a variable base/input point P. - VariableBaseScalarMul, + /// Performs multi scalar multiplication over the embedded curve. + MultiScalarMul, /// Calculates the Keccak256 hash of the inputs. Keccak256, /// Keccak Permutation function of 1600 width @@ -76,6 +76,7 @@ impl std::fmt::Display for BlackBoxFunc { impl BlackBoxFunc { pub fn name(&self) -> &'static str { match self { + BlackBoxFunc::AES128Encrypt => "aes128_encrypt", BlackBoxFunc::SHA256 => "sha256", BlackBoxFunc::SchnorrVerify => "schnorr_verify", BlackBoxFunc::Blake2s => "blake2s", @@ -83,8 +84,7 @@ impl BlackBoxFunc { BlackBoxFunc::PedersenCommitment => "pedersen_commitment", BlackBoxFunc::PedersenHash => "pedersen_hash", BlackBoxFunc::EcdsaSecp256k1 => "ecdsa_secp256k1", - BlackBoxFunc::FixedBaseScalarMul => "fixed_base_scalar_mul", - BlackBoxFunc::VariableBaseScalarMul => "variable_base_scalar_mul", + BlackBoxFunc::MultiScalarMul => "multi_scalar_mul", BlackBoxFunc::EmbeddedCurveAdd => "embedded_curve_add", BlackBoxFunc::AND => "and", BlackBoxFunc::XOR => "xor", @@ -106,6 +106,7 @@ impl BlackBoxFunc { pub fn lookup(op_name: &str) -> Option { match op_name { + "aes128_encrypt" => Some(BlackBoxFunc::AES128Encrypt), "sha256" => Some(BlackBoxFunc::SHA256), "schnorr_verify" => Some(BlackBoxFunc::SchnorrVerify), "blake2s" => Some(BlackBoxFunc::Blake2s), @@ -114,8 +115,7 @@ impl BlackBoxFunc { "pedersen_hash" => Some(BlackBoxFunc::PedersenHash), "ecdsa_secp256k1" => Some(BlackBoxFunc::EcdsaSecp256k1), "ecdsa_secp256r1" => Some(BlackBoxFunc::EcdsaSecp256r1), - "fixed_base_scalar_mul" => Some(BlackBoxFunc::FixedBaseScalarMul), - "variable_base_scalar_mul" => Some(BlackBoxFunc::VariableBaseScalarMul), + "multi_scalar_mul" => Some(BlackBoxFunc::MultiScalarMul), "embedded_curve_add" => Some(BlackBoxFunc::EmbeddedCurveAdd), "and" => Some(BlackBoxFunc::AND), "xor" => Some(BlackBoxFunc::XOR), diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs index 7db317c41ab3..e6dc11dac780 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs @@ -11,6 +11,13 @@ mod memory_operation; pub use black_box_function_call::{BlackBoxFuncCall, FunctionInput}; pub use memory_operation::{BlockId, MemOp}; +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum BlockType { + Memory, + CallData, + ReturnData, +} + #[allow(clippy::large_enum_variant)] #[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum Opcode { @@ -30,6 +37,7 @@ pub enum Opcode { MemoryInit { block_id: BlockId, init: Vec, + block_type: BlockType, }, /// Calls to unconstrained functions BrilligCall { @@ -103,8 +111,12 @@ impl std::fmt::Display for Opcode { write!(f, "(id: {}, op {} at: {}) ", block_id.0, op.operation, op.index) } } - Opcode::MemoryInit { block_id, init } => { - write!(f, "INIT ")?; + Opcode::MemoryInit { block_id, init, block_type: databus } => { + match databus { + BlockType::Memory => write!(f, "INIT ")?, + BlockType::CallData => write!(f, "INIT CALLDATA ")?, + BlockType::ReturnData => write!(f, "INIT RETURNDATA ")?, + } write!(f, "(id: {}, len: {}) ", block_id.0, init.len()) } // We keep the display for a BrilligCall and circuit Call separate as they diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs index 5715019937c2..115a33c1c9d2 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs @@ -12,6 +12,12 @@ pub struct FunctionInput { #[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum BlackBoxFuncCall { + AES128Encrypt { + inputs: Vec, + iv: Box<[FunctionInput; 16]>, + key: Box<[FunctionInput; 16]>, + outputs: Vec, + }, AND { lhs: FunctionInput, rhs: FunctionInput, @@ -80,16 +86,9 @@ pub enum BlackBoxFuncCall { hashed_message: Box<[FunctionInput; 32]>, output: Witness, }, - FixedBaseScalarMul { - low: FunctionInput, - high: FunctionInput, - outputs: (Witness, Witness), - }, - VariableBaseScalarMul { - point_x: FunctionInput, - point_y: FunctionInput, - scalar_low: FunctionInput, - scalar_high: FunctionInput, + MultiScalarMul { + points: Vec, + scalars: Vec, outputs: (Witness, Witness), }, EmbeddedCurveAdd { @@ -184,6 +183,7 @@ pub enum BlackBoxFuncCall { impl BlackBoxFuncCall { pub fn get_black_box_func(&self) -> BlackBoxFunc { match self { + BlackBoxFuncCall::AES128Encrypt { .. } => BlackBoxFunc::AES128Encrypt, BlackBoxFuncCall::AND { .. } => BlackBoxFunc::AND, BlackBoxFuncCall::XOR { .. } => BlackBoxFunc::XOR, BlackBoxFuncCall::RANGE { .. } => BlackBoxFunc::RANGE, @@ -195,8 +195,7 @@ impl BlackBoxFuncCall { BlackBoxFuncCall::PedersenHash { .. } => BlackBoxFunc::PedersenHash, BlackBoxFuncCall::EcdsaSecp256k1 { .. } => BlackBoxFunc::EcdsaSecp256k1, BlackBoxFuncCall::EcdsaSecp256r1 { .. } => BlackBoxFunc::EcdsaSecp256r1, - BlackBoxFuncCall::FixedBaseScalarMul { .. } => BlackBoxFunc::FixedBaseScalarMul, - BlackBoxFuncCall::VariableBaseScalarMul { .. } => BlackBoxFunc::VariableBaseScalarMul, + BlackBoxFuncCall::MultiScalarMul { .. } => BlackBoxFunc::MultiScalarMul, BlackBoxFuncCall::EmbeddedCurveAdd { .. } => BlackBoxFunc::EmbeddedCurveAdd, BlackBoxFuncCall::Keccak256 { .. } => BlackBoxFunc::Keccak256, BlackBoxFuncCall::Keccakf1600 { .. } => BlackBoxFunc::Keccakf1600, @@ -218,7 +217,8 @@ impl BlackBoxFuncCall { pub fn get_inputs_vec(&self) -> Vec { match self { - BlackBoxFuncCall::SHA256 { inputs, .. } + BlackBoxFuncCall::AES128Encrypt { inputs, .. } + | BlackBoxFuncCall::SHA256 { inputs, .. } | BlackBoxFuncCall::Blake2s { inputs, .. } | BlackBoxFuncCall::Blake3 { inputs, .. } | BlackBoxFuncCall::PedersenCommitment { inputs, .. } @@ -239,15 +239,11 @@ impl BlackBoxFuncCall { | BlackBoxFuncCall::BigIntMul { .. } | BlackBoxFuncCall::BigIntDiv { .. } | BlackBoxFuncCall::BigIntToLeBytes { .. } => Vec::new(), - BlackBoxFuncCall::FixedBaseScalarMul { low, high, .. } => vec![*low, *high], - BlackBoxFuncCall::VariableBaseScalarMul { - point_x, - point_y, - scalar_low, - scalar_high, - .. - } => { - vec![*point_x, *point_y, *scalar_low, *scalar_high] + BlackBoxFuncCall::MultiScalarMul { points, scalars, .. } => { + let mut inputs: Vec = Vec::with_capacity(points.len() * 2); + inputs.extend(points.iter().copied()); + inputs.extend(scalars.iter().copied()); + inputs } BlackBoxFuncCall::EmbeddedCurveAdd { input1_x, input1_y, input2_x, input2_y, .. @@ -260,7 +256,8 @@ impl BlackBoxFuncCall { message, .. } => { - let mut inputs = Vec::with_capacity(2 + signature.len() + message.len()); + let mut inputs: Vec = + Vec::with_capacity(2 + signature.len() + message.len()); inputs.push(*public_key_x); inputs.push(*public_key_y); inputs.extend(signature.iter().copied()); @@ -337,7 +334,8 @@ impl BlackBoxFuncCall { BlackBoxFuncCall::Sha256Compression { outputs, .. } => outputs.to_vec(), - BlackBoxFuncCall::Poseidon2Permutation { outputs, .. } => outputs.to_vec(), + BlackBoxFuncCall::AES128Encrypt { outputs, .. } + | BlackBoxFuncCall::Poseidon2Permutation { outputs, .. } => outputs.to_vec(), BlackBoxFuncCall::AND { output, .. } | BlackBoxFuncCall::XOR { output, .. } @@ -345,8 +343,7 @@ impl BlackBoxFuncCall { | BlackBoxFuncCall::EcdsaSecp256k1 { output, .. } | BlackBoxFuncCall::PedersenHash { output, .. } | BlackBoxFuncCall::EcdsaSecp256r1 { output, .. } => vec![*output], - BlackBoxFuncCall::FixedBaseScalarMul { outputs, .. } - | BlackBoxFuncCall::VariableBaseScalarMul { outputs, .. } + BlackBoxFuncCall::MultiScalarMul { outputs, .. } | BlackBoxFuncCall::PedersenCommitment { outputs, .. } | BlackBoxFuncCall::EmbeddedCurveAdd { outputs, .. } => vec![outputs.0, outputs.1], BlackBoxFuncCall::RANGE { .. } diff --git a/noir/noir-repo/acvm-repo/acir/src/lib.rs b/noir/noir-repo/acvm-repo/acir/src/lib.rs index 24f27aae06fa..f60f1b46b6a4 100644 --- a/noir/noir-repo/acvm-repo/acir/src/lib.rs +++ b/noir/noir-repo/acvm-repo/acir/src/lib.rs @@ -41,7 +41,7 @@ mod reflection { circuit::{ brillig::{BrilligInputs, BrilligOutputs}, directives::Directive, - opcodes::BlackBoxFuncCall, + opcodes::{BlackBoxFuncCall, BlockType}, AssertionPayload, Circuit, ExpressionOrMemory, ExpressionWidth, Opcode, OpcodeLocation, Program, }, @@ -60,6 +60,7 @@ mod reflection { }; let mut tracer = Tracer::new(TracerConfig::default()); + tracer.trace_simple_type::().unwrap(); tracer.trace_simple_type::().unwrap(); tracer.trace_simple_type::().unwrap(); tracer.trace_simple_type::().unwrap(); diff --git a/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs b/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs index 2ad082410a1b..ecc1a26e3a44 100644 --- a/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs +++ b/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs @@ -58,47 +58,22 @@ fn addition_circuit() { } #[test] -fn fixed_base_scalar_mul_circuit() { - let fixed_base_scalar_mul = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::FixedBaseScalarMul { - low: FunctionInput { witness: Witness(1), num_bits: 128 }, - high: FunctionInput { witness: Witness(2), num_bits: 128 }, - outputs: (Witness(3), Witness(4)), +fn multi_scalar_mul_circuit() { + let multi_scalar_mul = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::MultiScalarMul { + points: vec![ + FunctionInput { witness: Witness(1), num_bits: 128 }, + FunctionInput { witness: Witness(2), num_bits: 128 }, + ], + scalars: vec![ + FunctionInput { witness: Witness(3), num_bits: 128 }, + FunctionInput { witness: Witness(4), num_bits: 128 }, + ], + outputs: (Witness(5), Witness(6)), }); - let circuit = Circuit { - current_witness_index: 5, - opcodes: vec![fixed_base_scalar_mul], - private_parameters: BTreeSet::from([Witness(1), Witness(2)]), - return_values: PublicInputs(BTreeSet::from_iter(vec![Witness(3), Witness(4)])), - ..Circuit::default() - }; - let program = Program { functions: vec![circuit], unconstrained_functions: vec![] }; - - let bytes = Program::serialize_program(&program); - - let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 138, 81, 10, 0, 48, 8, 66, 87, 219, 190, 118, 233, - 29, 61, 35, 3, 19, 228, 137, 60, 91, 149, 139, 26, 119, 242, 145, 31, 117, 114, 163, 135, - 142, 139, 219, 91, 127, 117, 71, 2, 117, 84, 50, 98, 113, 0, 0, 0, - ]; - - assert_eq!(bytes, expected_serialization) -} - -#[test] -fn variable_base_scalar_mul_circuit() { - let variable_base_scalar_mul = - Opcode::BlackBoxFuncCall(BlackBoxFuncCall::VariableBaseScalarMul { - point_x: FunctionInput { witness: Witness(1), num_bits: 128 }, - point_y: FunctionInput { witness: Witness(2), num_bits: 128 }, - scalar_low: FunctionInput { witness: Witness(3), num_bits: 128 }, - scalar_high: FunctionInput { witness: Witness(4), num_bits: 128 }, - outputs: (Witness(5), Witness(6)), - }); - let circuit = Circuit { current_witness_index: 7, - opcodes: vec![variable_base_scalar_mul], + opcodes: vec![multi_scalar_mul], private_parameters: BTreeSet::from([Witness(1), Witness(2), Witness(3), Witness(4)]), return_values: PublicInputs(BTreeSet::from_iter(vec![Witness(5), Witness(6)])), ..Circuit::default() @@ -108,10 +83,10 @@ fn variable_base_scalar_mul_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 139, 65, 10, 0, 32, 8, 4, 213, 172, 46, 61, 186, - 167, 103, 52, 65, 185, 176, 140, 44, 142, 202, 73, 143, 42, 247, 230, 128, 51, 106, 176, - 64, 135, 53, 218, 112, 252, 113, 141, 223, 187, 9, 155, 36, 231, 203, 2, 176, 218, 19, 62, - 137, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 76, 65, 14, 0, 32, 8, 82, 179, 186, 244, 104, 159, + 30, 45, 218, 136, 141, 33, 40, 186, 93, 76, 208, 57, 31, 93, 96, 136, 47, 250, 146, 188, + 209, 39, 181, 131, 131, 187, 148, 110, 240, 246, 101, 38, 63, 180, 243, 97, 3, 125, 173, + 118, 131, 153, 0, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -137,11 +112,10 @@ fn pedersen_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 74, 9, 10, 0, 0, 4, 115, 149, 255, 127, 88, 8, 133, - 213, 218, 137, 80, 144, 32, 182, 79, 213, 151, 173, 61, 5, 121, 245, 91, 103, 255, 191, 3, - 7, 16, 26, 112, 158, 113, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 74, 73, 10, 0, 0, 4, 180, 29, 252, 255, 193, 66, 40, + 76, 77, 179, 34, 20, 36, 136, 237, 83, 245, 101, 107, 79, 65, 94, 253, 214, 217, 255, 239, + 192, 1, 43, 124, 181, 238, 113, 0, 0, 0, ]; - assert_eq!(bytes, expected_serialization) } @@ -184,7 +158,7 @@ fn schnorr_verify_circuit() { let expected_serialization: Vec = vec![ 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 210, 85, 78, 67, 81, 24, 133, 209, 226, 238, 238, 238, 238, 238, 165, 148, 82, 102, 193, 252, 135, 64, 232, 78, 87, 147, 114, 147, 147, 5, - 47, 132, 252, 251, 107, 41, 212, 191, 159, 218, 107, 241, 115, 236, 228, 111, 237, 181, + 47, 132, 252, 251, 107, 41, 212, 191, 159, 218, 107, 241, 115, 236, 226, 111, 237, 181, 178, 173, 246, 186, 107, 175, 157, 29, 236, 100, 23, 27, 175, 135, 189, 236, 99, 63, 7, 56, 200, 33, 14, 115, 132, 163, 28, 227, 56, 39, 56, 201, 41, 78, 115, 134, 179, 156, 227, 60, 23, 184, 200, 37, 46, 115, 133, 171, 92, 227, 58, 55, 184, 201, 45, 110, 115, 135, 187, @@ -196,8 +170,8 @@ fn schnorr_verify_circuit() { 180, 144, 14, 210, 64, 246, 95, 46, 212, 119, 207, 230, 217, 59, 91, 103, 231, 108, 156, 125, 183, 237, 186, 107, 207, 125, 59, 30, 218, 239, 216, 110, 167, 246, 58, 183, 211, 165, 125, 174, 237, 114, 107, 143, 123, 59, 60, 186, 255, 179, 187, 191, 186, 115, 209, 125, 75, - 238, 90, 118, 207, 138, 59, 54, 110, 214, 184, 91, 161, 233, 158, 255, 190, 63, 165, 188, - 93, 151, 233, 3, 0, 0, + 238, 90, 118, 207, 138, 59, 54, 110, 214, 184, 91, 161, 233, 158, 255, 190, 63, 71, 59, 68, + 130, 233, 3, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -373,7 +347,11 @@ fn complex_brillig_foreign_call() { fn memory_op_circuit() { let init = vec![Witness(1), Witness(2)]; - let memory_init = Opcode::MemoryInit { block_id: BlockId(0), init }; + let memory_init = Opcode::MemoryInit { + block_id: BlockId(0), + init, + block_type: acir::circuit::opcodes::BlockType::Memory, + }; let write = Opcode::MemoryOp { block_id: BlockId(0), op: MemOp::write_to_mem_index(FieldElement::from(1u128).into(), Witness(3).into()), @@ -397,11 +375,11 @@ fn memory_op_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 203, 180, 255, 216, 15, 250, - 255, 171, 10, 154, 16, 210, 45, 61, 52, 144, 13, 132, 49, 135, 84, 54, 218, 26, 134, 22, - 112, 5, 19, 180, 237, 61, 6, 88, 223, 208, 179, 125, 41, 216, 151, 227, 188, 52, 187, 92, - 253, 173, 92, 137, 190, 157, 143, 160, 254, 155, 45, 188, 148, 11, 38, 213, 237, 188, 16, - 35, 3, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 211, 180, 255, 216, 15, 250, + 255, 171, 10, 82, 176, 232, 150, 30, 26, 200, 118, 144, 49, 135, 8, 11, 117, 14, 169, 102, + 229, 162, 140, 78, 219, 206, 137, 174, 44, 111, 104, 217, 190, 24, 236, 75, 113, 94, 146, + 93, 174, 252, 86, 46, 71, 223, 78, 46, 104, 129, 253, 155, 45, 60, 195, 5, 3, 89, 11, 161, + 73, 39, 3, 0, 0, ]; assert_eq!(bytes, expected_serialization) diff --git a/noir/noir-repo/acvm-repo/acir_field/Cargo.toml b/noir/noir-repo/acvm-repo/acir_field/Cargo.toml index fcbe80ded2db..89ee161206e2 100644 --- a/noir/noir-repo/acvm-repo/acir_field/Cargo.toml +++ b/noir/noir-repo/acvm-repo/acir_field/Cargo.toml @@ -2,7 +2,7 @@ name = "acir_field" description = "The field implementation being used by ACIR." # x-release-please-start-version -version = "0.44.0" +version = "0.45.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/noir-repo/acvm-repo/acvm/Cargo.toml b/noir/noir-repo/acvm-repo/acvm/Cargo.toml index 0061eec5bc80..1b671d493663 100644 --- a/noir/noir-repo/acvm-repo/acvm/Cargo.toml +++ b/noir/noir-repo/acvm-repo/acvm/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm" description = "The virtual machine that processes ACIR given a backend/proof system." # x-release-please-start-version -version = "0.44.0" +version = "0.45.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/constant_backpropagation.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/constant_backpropagation.rs index 0e7d28104daf..5b778f63f079 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/constant_backpropagation.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/constant_backpropagation.rs @@ -282,7 +282,7 @@ mod tests { fn test_circuit(opcodes: Vec) -> Circuit { Circuit { current_witness_index: 1, - expression_width: ExpressionWidth::Bounded { width: 3 }, + expression_width: ExpressionWidth::Bounded { width: 4 }, opcodes, private_parameters: BTreeSet::new(), public_parameters: PublicInputs::default(), diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs index c6ca18d30ae4..0e1629717b36 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs @@ -164,7 +164,7 @@ mod tests { Circuit { current_witness_index: 1, - expression_width: ExpressionWidth::Bounded { width: 3 }, + expression_width: ExpressionWidth::Bounded { width: 4 }, opcodes, private_parameters: BTreeSet::new(), public_parameters: PublicInputs::default(), diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/aes128.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/aes128.rs new file mode 100644 index 000000000000..c02c59a174fd --- /dev/null +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/aes128.rs @@ -0,0 +1,32 @@ +use acir::{ + circuit::opcodes::FunctionInput, + native_types::{Witness, WitnessMap}, + FieldElement, +}; +use acvm_blackbox_solver::aes128_encrypt; + +use crate::{pwg::insert_value, OpcodeResolutionError}; + +use super::utils::{to_u8_array, to_u8_vec}; + +pub(super) fn solve_aes128_encryption_opcode( + initial_witness: &mut WitnessMap, + inputs: &[FunctionInput], + iv: &[FunctionInput; 16], + key: &[FunctionInput; 16], + outputs: &[Witness], +) -> Result<(), OpcodeResolutionError> { + let scalars = to_u8_vec(initial_witness, inputs)?; + + let iv = to_u8_array(initial_witness, iv)?; + let key = to_u8_array(initial_witness, key)?; + + let ciphertext = aes128_encrypt(&scalars, iv, key)?; + + // Write witness assignments + for (output_witness, value) in outputs.iter().zip(ciphertext.into_iter()) { + insert_value(output_witness, FieldElement::from(value as u128), initial_witness)?; + } + + Ok(()) +} diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/embedded_curve_ops.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/embedded_curve_ops.rs new file mode 100644 index 000000000000..ee35385fa814 --- /dev/null +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/embedded_curve_ops.rs @@ -0,0 +1,53 @@ +use acir::{ + circuit::opcodes::FunctionInput, + native_types::{Witness, WitnessMap}, +}; +use acvm_blackbox_solver::BlackBoxFunctionSolver; + +use crate::pwg::{insert_value, witness_to_value, OpcodeResolutionError}; + +pub(super) fn multi_scalar_mul( + backend: &impl BlackBoxFunctionSolver, + initial_witness: &mut WitnessMap, + points: &[FunctionInput], + scalars: &[FunctionInput], + outputs: (Witness, Witness), +) -> Result<(), OpcodeResolutionError> { + let points: Result, _> = + points.iter().map(|input| witness_to_value(initial_witness, input.witness)).collect(); + let points: Vec<_> = points?.into_iter().cloned().collect(); + + let scalars: Result, _> = + scalars.iter().map(|input| witness_to_value(initial_witness, input.witness)).collect(); + let scalars: Vec<_> = scalars?.into_iter().cloned().collect(); + + // Call the backend's multi-scalar multiplication function + let (res_x, res_y) = backend.multi_scalar_mul(&points, &scalars)?; + + // Insert the resulting point into the witness map + insert_value(&outputs.0, res_x, initial_witness)?; + insert_value(&outputs.1, res_y, initial_witness)?; + + Ok(()) +} + +pub(super) fn embedded_curve_add( + backend: &impl BlackBoxFunctionSolver, + initial_witness: &mut WitnessMap, + input1_x: FunctionInput, + input1_y: FunctionInput, + input2_x: FunctionInput, + input2_y: FunctionInput, + outputs: (Witness, Witness), +) -> Result<(), OpcodeResolutionError> { + let input1_x = witness_to_value(initial_witness, input1_x.witness)?; + let input1_y = witness_to_value(initial_witness, input1_y.witness)?; + let input2_x = witness_to_value(initial_witness, input2_x.witness)?; + let input2_y = witness_to_value(initial_witness, input2_y.witness)?; + let (res_x, res_y) = backend.ec_add(input1_x, input1_y, input2_x, input2_y)?; + + insert_value(&outputs.0, res_x, initial_witness)?; + insert_value(&outputs.1, res_y, initial_witness)?; + + Ok(()) +} diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/fixed_base_scalar_mul.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/fixed_base_scalar_mul.rs deleted file mode 100644 index 79e33ae8de53..000000000000 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/fixed_base_scalar_mul.rs +++ /dev/null @@ -1,70 +0,0 @@ -// TODO(https://github.com/noir-lang/noir/issues/4932): rename this file to something more generic -use acir::{ - circuit::opcodes::FunctionInput, - native_types::{Witness, WitnessMap}, -}; -use acvm_blackbox_solver::BlackBoxFunctionSolver; - -use crate::pwg::{insert_value, witness_to_value, OpcodeResolutionError}; - -pub(super) fn fixed_base_scalar_mul( - backend: &impl BlackBoxFunctionSolver, - initial_witness: &mut WitnessMap, - low: FunctionInput, - high: FunctionInput, - outputs: (Witness, Witness), -) -> Result<(), OpcodeResolutionError> { - let low = witness_to_value(initial_witness, low.witness)?; - let high = witness_to_value(initial_witness, high.witness)?; - - let (pub_x, pub_y) = backend.fixed_base_scalar_mul(low, high)?; - - insert_value(&outputs.0, pub_x, initial_witness)?; - insert_value(&outputs.1, pub_y, initial_witness)?; - - Ok(()) -} - -pub(super) fn variable_base_scalar_mul( - backend: &impl BlackBoxFunctionSolver, - initial_witness: &mut WitnessMap, - point_x: FunctionInput, - point_y: FunctionInput, - scalar_low: FunctionInput, - scalar_high: FunctionInput, - outputs: (Witness, Witness), -) -> Result<(), OpcodeResolutionError> { - let point_x = witness_to_value(initial_witness, point_x.witness)?; - let point_y = witness_to_value(initial_witness, point_y.witness)?; - let scalar_low = witness_to_value(initial_witness, scalar_low.witness)?; - let scalar_high = witness_to_value(initial_witness, scalar_high.witness)?; - - let (out_point_x, out_point_y) = - backend.variable_base_scalar_mul(point_x, point_y, scalar_low, scalar_high)?; - - insert_value(&outputs.0, out_point_x, initial_witness)?; - insert_value(&outputs.1, out_point_y, initial_witness)?; - - Ok(()) -} - -pub(super) fn embedded_curve_add( - backend: &impl BlackBoxFunctionSolver, - initial_witness: &mut WitnessMap, - input1_x: FunctionInput, - input1_y: FunctionInput, - input2_x: FunctionInput, - input2_y: FunctionInput, - outputs: (Witness, Witness), -) -> Result<(), OpcodeResolutionError> { - let input1_x = witness_to_value(initial_witness, input1_x.witness)?; - let input1_y = witness_to_value(initial_witness, input1_y.witness)?; - let input2_x = witness_to_value(initial_witness, input2_x.witness)?; - let input2_y = witness_to_value(initial_witness, input2_y.witness)?; - let (res_x, res_y) = backend.ec_add(input1_x, input1_y, input2_x, input2_y)?; - - insert_value(&outputs.0, res_x, initial_witness)?; - insert_value(&outputs.1, res_y, initial_witness)?; - - Ok(()) -} diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/mod.rs index 2487d511b502..a74f44b79dc2 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/mod.rs @@ -6,21 +6,24 @@ use acir::{ use acvm_blackbox_solver::{blake2s, blake3, keccak256, keccakf1600, sha256}; use self::{ - bigint::AcvmBigIntSolver, hash::solve_poseidon2_permutation_opcode, pedersen::pedersen_hash, + aes128::solve_aes128_encryption_opcode, bigint::AcvmBigIntSolver, + hash::solve_poseidon2_permutation_opcode, pedersen::pedersen_hash, }; use super::{insert_value, OpcodeNotSolvable, OpcodeResolutionError}; use crate::{pwg::witness_to_value, BlackBoxFunctionSolver}; +mod aes128; pub(crate) mod bigint; -mod fixed_base_scalar_mul; +mod embedded_curve_ops; mod hash; mod logic; mod pedersen; mod range; mod signature; +pub(crate) mod utils; -use fixed_base_scalar_mul::{embedded_curve_add, fixed_base_scalar_mul, variable_base_scalar_mul}; +use embedded_curve_ops::{embedded_curve_add, multi_scalar_mul}; // Hash functions should eventually be exposed for external consumers. use hash::{solve_generic_256_hash_opcode, solve_sha_256_permutation_opcode}; use logic::{and, xor}; @@ -68,6 +71,9 @@ pub(crate) fn solve( } match bb_func { + BlackBoxFuncCall::AES128Encrypt { inputs, iv, key, outputs } => { + solve_aes128_encryption_opcode(initial_witness, inputs, iv, key, outputs) + } BlackBoxFuncCall::AND { lhs, rhs, output } => and(initial_witness, lhs, rhs, output), BlackBoxFuncCall::XOR { lhs, rhs, output } => xor(initial_witness, lhs, rhs, output), BlackBoxFuncCall::RANGE { input } => solve_range_opcode(initial_witness, input), @@ -155,24 +161,9 @@ pub(crate) fn solve( message.as_ref(), *output, ), - BlackBoxFuncCall::FixedBaseScalarMul { low, high, outputs } => { - fixed_base_scalar_mul(backend, initial_witness, *low, *high, *outputs) - } - BlackBoxFuncCall::VariableBaseScalarMul { - point_x, - point_y, - scalar_low, - scalar_high, - outputs, - } => variable_base_scalar_mul( - backend, - initial_witness, - *point_x, - *point_y, - *scalar_low, - *scalar_high, - *outputs, - ), + BlackBoxFuncCall::MultiScalarMul { points, scalars, outputs } => { + multi_scalar_mul(backend, initial_witness, points, scalars, *outputs) + } BlackBoxFuncCall::EmbeddedCurveAdd { input1_x, input1_y, input2_x, input2_y, outputs } => { embedded_curve_add( backend, diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/ecdsa.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/ecdsa.rs index b113c8012512..ce2e57e0bd7f 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/ecdsa.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/ecdsa.rs @@ -5,9 +5,13 @@ use acir::{ }; use acvm_blackbox_solver::{ecdsa_secp256k1_verify, ecdsa_secp256r1_verify}; -use crate::{pwg::insert_value, OpcodeResolutionError}; - -use super::{to_u8_array, to_u8_vec}; +use crate::{ + pwg::{ + blackbox::utils::{to_u8_array, to_u8_vec}, + insert_value, + }, + OpcodeResolutionError, +}; pub(crate) fn secp256k1_prehashed( initial_witness: &mut WitnessMap, diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/mod.rs index bd223ecd0c97..0cfb96740b86 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/mod.rs @@ -1,36 +1,2 @@ -use acir::{circuit::opcodes::FunctionInput, native_types::WitnessMap}; - -use crate::pwg::{witness_to_value, OpcodeResolutionError}; - -fn to_u8_array( - initial_witness: &WitnessMap, - inputs: &[FunctionInput; N], -) -> Result<[u8; N], OpcodeResolutionError> { - let mut result = [0; N]; - for (it, input) in result.iter_mut().zip(inputs) { - let witness_value_bytes = witness_to_value(initial_witness, input.witness)?.to_be_bytes(); - let byte = witness_value_bytes - .last() - .expect("Field element must be represented by non-zero amount of bytes"); - *it = *byte; - } - Ok(result) -} - -fn to_u8_vec( - initial_witness: &WitnessMap, - inputs: &[FunctionInput], -) -> Result, OpcodeResolutionError> { - let mut result = Vec::with_capacity(inputs.len()); - for input in inputs { - let witness_value_bytes = witness_to_value(initial_witness, input.witness)?.to_be_bytes(); - let byte = witness_value_bytes - .last() - .expect("Field element must be represented by non-zero amount of bytes"); - result.push(*byte); - } - Ok(result) -} - pub(super) mod ecdsa; pub(super) mod schnorr; diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/schnorr.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/schnorr.rs index 3d0216fa2173..7b085d9ff476 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/schnorr.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/schnorr.rs @@ -1,6 +1,8 @@ -use super::{to_u8_array, to_u8_vec}; use crate::{ - pwg::{insert_value, witness_to_value, OpcodeResolutionError}, + pwg::{ + blackbox::utils::{to_u8_array, to_u8_vec}, + insert_value, witness_to_value, OpcodeResolutionError, + }, BlackBoxFunctionSolver, }; use acir::{ diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/utils.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/utils.rs new file mode 100644 index 000000000000..700f30890aeb --- /dev/null +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/utils.rs @@ -0,0 +1,33 @@ +use acir::{circuit::opcodes::FunctionInput, native_types::WitnessMap}; + +use crate::pwg::{witness_to_value, OpcodeResolutionError}; + +pub(crate) fn to_u8_array( + initial_witness: &WitnessMap, + inputs: &[FunctionInput; N], +) -> Result<[u8; N], OpcodeResolutionError> { + let mut result = [0; N]; + for (it, input) in result.iter_mut().zip(inputs) { + let witness_value_bytes = witness_to_value(initial_witness, input.witness)?.to_be_bytes(); + let byte = witness_value_bytes + .last() + .expect("Field element must be represented by non-zero amount of bytes"); + *it = *byte; + } + Ok(result) +} + +pub(crate) fn to_u8_vec( + initial_witness: &WitnessMap, + inputs: &[FunctionInput], +) -> Result, OpcodeResolutionError> { + let mut result = Vec::with_capacity(inputs.len()); + for input in inputs { + let witness_value_bytes = witness_to_value(initial_witness, input.witness)?.to_be_bytes(); + let byte = witness_value_bytes + .last() + .expect("Field element must be represented by non-zero amount of bytes"); + result.push(*byte); + } + Ok(result) +} diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs index a4219adbfa6b..f2649b93991c 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs @@ -335,7 +335,7 @@ impl<'a, B: BlackBoxFunctionSolver> ACVM<'a, B> { &mut self.bigint_solver, ), Opcode::Directive(directive) => solve_directives(&mut self.witness_map, directive), - Opcode::MemoryInit { block_id, init } => { + Opcode::MemoryInit { block_id, init, .. } => { let solver = self.block_solvers.entry(*block_id).or_default(); solver.init(init, &self.witness_map) } diff --git a/noir/noir-repo/acvm-repo/acvm/tests/solver.rs b/noir/noir-repo/acvm-repo/acvm/tests/solver.rs index df61083eee4c..495389d7b3e7 100644 --- a/noir/noir-repo/acvm-repo/acvm/tests/solver.rs +++ b/noir/noir-repo/acvm-repo/acvm/tests/solver.rs @@ -4,7 +4,7 @@ use acir::{ brillig::{BinaryFieldOp, HeapArray, MemoryAddress, Opcode as BrilligOpcode, ValueOrArray}, circuit::{ brillig::{BrilligBytecode, BrilligInputs, BrilligOutputs}, - opcodes::{BlockId, MemOp}, + opcodes::{BlockId, BlockType, MemOp}, Opcode, OpcodeLocation, }, native_types::{Expression, Witness, WitnessMap}, @@ -658,7 +658,11 @@ fn memory_operations() { let block_id = BlockId(0); - let init = Opcode::MemoryInit { block_id, init: (1..6).map(Witness).collect() }; + let init = Opcode::MemoryInit { + block_id, + init: (1..6).map(Witness).collect(), + block_type: BlockType::Memory, + }; let read_op = Opcode::MemoryOp { block_id, diff --git a/noir/noir-repo/acvm-repo/acvm_js/Cargo.toml b/noir/noir-repo/acvm-repo/acvm_js/Cargo.toml index b2db54a4e654..1675d6013511 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/Cargo.toml +++ b/noir/noir-repo/acvm-repo/acvm_js/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm_js" description = "Typescript wrapper around the ACVM allowing execution of ACIR code" # x-release-please-start-version -version = "0.44.0" +version = "0.45.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/noir-repo/acvm-repo/acvm_js/package.json b/noir/noir-repo/acvm-repo/acvm_js/package.json index 7da8bf84b022..5be2f164ac41 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/package.json +++ b/noir/noir-repo/acvm-repo/acvm_js/package.json @@ -1,6 +1,6 @@ { "name": "@noir-lang/acvm_js", - "version": "0.44.0", + "version": "0.45.0", "publishConfig": { "access": "public" }, diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts b/noir/noir-repo/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts index f6287c2ae8a1..625cc91cfe92 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts @@ -93,18 +93,8 @@ it('successfully executes a Pedersen opcode', async function () { expect(solvedWitness).to.be.deep.eq(expectedWitnessMap); }); -it('successfully executes a FixedBaseScalarMul opcode', async () => { - const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/fixed_base_scalar_mul'); - - const solvedWitness: WitnessMap = await executeCircuit(bytecode, initialWitnessMap, () => { - throw Error('unexpected oracle'); - }); - - expect(solvedWitness).to.be.deep.eq(expectedWitnessMap); -}); - -it('successfully executes a VariableBaseScalarMul opcode', async () => { - const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/variable_base_scalar_mul'); +it('successfully executes a MultiScalarMul opcode', async () => { + const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/multi_scalar_mul'); const solvedWitness: WitnessMap = await executeCircuit(bytecode, initialWitnessMap, () => { throw Error('unexpected oracle'); diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/node/execute_circuit.test.ts b/noir/noir-repo/acvm-repo/acvm_js/test/node/execute_circuit.test.ts index f9fd5c10b3ee..3f9bde2898e4 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/node/execute_circuit.test.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/node/execute_circuit.test.ts @@ -90,18 +90,8 @@ it('successfully executes a Pedersen opcode', async function () { expect(solvedWitness).to.be.deep.eq(expectedWitnessMap); }); -it('successfully executes a FixedBaseScalarMul opcode', async () => { - const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/fixed_base_scalar_mul'); - - const solvedWitness: WitnessMap = await executeCircuit(bytecode, initialWitnessMap, () => { - throw Error('unexpected oracle'); - }); - - expect(solvedWitness).to.be.deep.eq(expectedWitnessMap); -}); - -it('successfully executes a VariableBaseScalarMul opcode', async () => { - const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/variable_base_scalar_mul'); +it('successfully executes a MultiScalarMul opcode', async () => { + const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/multi_scalar_mul'); const solvedWitness: WitnessMap = await executeCircuit(bytecode, initialWitnessMap, () => { throw Error('unexpected oracle'); diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/fixed_base_scalar_mul.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/fixed_base_scalar_mul.ts deleted file mode 100644 index 97b5041121a8..000000000000 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/fixed_base_scalar_mul.ts +++ /dev/null @@ -1,17 +0,0 @@ -// See `fixed_base_scalar_mul_circuit` integration test in `acir/tests/test_program_serialization.rs`. -export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 138, 81, 10, 0, 48, 8, 66, 87, 219, 190, 118, 233, 29, 61, 35, 3, 19, 228, 137, - 60, 91, 149, 139, 26, 119, 242, 145, 31, 117, 114, 163, 135, 142, 139, 219, 91, 127, 117, 71, 2, 117, 84, 50, 98, 113, - 0, 0, 0, -]); -export const initialWitnessMap = new Map([ - [1, '0x0000000000000000000000000000000000000000000000000000000000000001'], - [2, '0x0000000000000000000000000000000000000000000000000000000000000000'], -]); - -export const expectedWitnessMap = new Map([ - [1, '0x0000000000000000000000000000000000000000000000000000000000000001'], - [2, '0x0000000000000000000000000000000000000000000000000000000000000000'], - [3, '0x0000000000000000000000000000000000000000000000000000000000000001'], - [4, '0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c'], -]); diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/memory_op.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/memory_op.ts index 20ea88c71308..f7443c2258b2 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/memory_op.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/memory_op.ts @@ -1,9 +1,9 @@ // See `memory_op_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 203, 180, 255, 216, 15, 250, 255, 171, 10, 154, 16, 210, - 45, 61, 52, 144, 13, 132, 49, 135, 84, 54, 218, 26, 134, 22, 112, 5, 19, 180, 237, 61, 6, 88, 223, 208, 179, 125, 41, - 216, 151, 227, 188, 52, 187, 92, 253, 173, 92, 137, 190, 157, 143, 160, 254, 155, 45, 188, 148, 11, 38, 213, 237, 188, - 16, 35, 3, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 211, 180, 255, 216, 15, 250, 255, 171, 10, 82, 176, 232, + 150, 30, 26, 200, 118, 144, 49, 135, 8, 11, 117, 14, 169, 102, 229, 162, 140, 78, 219, 206, 137, 174, 44, 111, 104, + 217, 190, 24, 236, 75, 113, 94, 146, 93, 174, 252, 86, 46, 71, 223, 78, 46, 104, 129, 253, 155, 45, 60, 195, 5, 3, 89, + 11, 161, 73, 39, 3, 0, 0, ]); export const initialWitnessMap = new Map([ diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/variable_base_scalar_mul.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts similarity index 69% rename from noir/noir-repo/acvm-repo/acvm_js/test/shared/variable_base_scalar_mul.ts rename to noir/noir-repo/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts index 400f7bf4e614..8ee0a067a3a5 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/variable_base_scalar_mul.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts @@ -1,8 +1,8 @@ -// See `variable_base_scalar_mul_circuit` integration test in `acir/tests/test_program_serialization.rs`. +// See `multi_scalar_mul_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 139, 65, 10, 0, 32, 8, 4, 213, 172, 46, 61, 186, 167, 103, 52, 65, 185, 176, - 140, 44, 142, 202, 73, 143, 42, 247, 230, 128, 51, 106, 176, 64, 135, 53, 218, 112, 252, 113, 141, 223, 187, 9, 155, - 36, 231, 203, 2, 176, 218, 19, 62, 137, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 76, 65, 14, 0, 32, 8, 82, 179, 186, 244, 104, 159, 30, 45, 218, 136, 141, 33, + 40, 186, 93, 76, 208, 57, 31, 93, 96, 136, 47, 250, 146, 188, 209, 39, 181, 131, 131, 187, 148, 110, 240, 246, 101, + 38, 63, 180, 243, 97, 3, 125, 173, 118, 131, 153, 0, 0, 0, ]); export const initialWitnessMap = new Map([ [1, '0x0000000000000000000000000000000000000000000000000000000000000001'], diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/pedersen.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/pedersen.ts index e8ddc893d879..6e3ec403d650 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/pedersen.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/pedersen.ts @@ -1,7 +1,7 @@ // See `pedersen_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 74, 9, 10, 0, 0, 4, 115, 149, 255, 127, 88, 8, 133, 213, 218, 137, 80, 144, 32, - 182, 79, 213, 151, 173, 61, 5, 121, 245, 91, 103, 255, 191, 3, 7, 16, 26, 112, 158, 113, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 74, 73, 10, 0, 0, 4, 180, 29, 252, 255, 193, 66, 40, 76, 77, 179, 34, 20, 36, + 136, 237, 83, 245, 101, 107, 79, 65, 94, 253, 214, 217, 255, 239, 192, 1, 43, 124, 181, 238, 113, 0, 0, 0, ]); export const initialWitnessMap = new Map([[1, '0x0000000000000000000000000000000000000000000000000000000000000001']]); diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/schnorr_verify.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/schnorr_verify.ts index a207aa12b2c1..05fcc47e3aa6 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/schnorr_verify.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/schnorr_verify.ts @@ -2,7 +2,7 @@ export const bytecode = Uint8Array.from([ 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 210, 85, 78, 67, 81, 24, 133, 209, 226, 238, 238, 238, 238, 238, 165, 148, 82, 102, 193, 252, 135, 64, 232, 78, 87, 147, 114, 147, 147, 5, 47, 132, 252, 251, 107, 41, 212, 191, 159, 218, 107, 241, - 115, 236, 228, 111, 237, 181, 178, 173, 246, 186, 107, 175, 157, 29, 236, 100, 23, 27, 175, 135, 189, 236, 99, 63, 7, + 115, 236, 226, 111, 237, 181, 178, 173, 246, 186, 107, 175, 157, 29, 236, 100, 23, 27, 175, 135, 189, 236, 99, 63, 7, 56, 200, 33, 14, 115, 132, 163, 28, 227, 56, 39, 56, 201, 41, 78, 115, 134, 179, 156, 227, 60, 23, 184, 200, 37, 46, 115, 133, 171, 92, 227, 58, 55, 184, 201, 45, 110, 115, 135, 187, 220, 227, 62, 15, 120, 200, 35, 30, 243, 132, 167, 60, 227, 57, 47, 120, 201, 43, 94, 243, 134, 183, 188, 227, 61, 31, 248, 200, 39, 62, 243, 133, 175, 77, 59, 230, 123, @@ -11,7 +11,7 @@ export const bytecode = Uint8Array.from([ 210, 72, 250, 72, 27, 233, 34, 77, 164, 135, 180, 144, 14, 210, 64, 246, 95, 46, 212, 119, 207, 230, 217, 59, 91, 103, 231, 108, 156, 125, 183, 237, 186, 107, 207, 125, 59, 30, 218, 239, 216, 110, 167, 246, 58, 183, 211, 165, 125, 174, 237, 114, 107, 143, 123, 59, 60, 186, 255, 179, 187, 191, 186, 115, 209, 125, 75, 238, 90, 118, 207, 138, 59, 54, 110, - 214, 184, 91, 161, 233, 158, 255, 190, 63, 165, 188, 93, 151, 233, 3, 0, 0, + 214, 184, 91, 161, 233, 158, 255, 190, 63, 71, 59, 68, 130, 233, 3, 0, 0, ]); export const initialWitnessMap = new Map([ diff --git a/noir/noir-repo/acvm-repo/blackbox_solver/Cargo.toml b/noir/noir-repo/acvm-repo/blackbox_solver/Cargo.toml index 893bed38905c..f40046acad62 100644 --- a/noir/noir-repo/acvm-repo/blackbox_solver/Cargo.toml +++ b/noir/noir-repo/acvm-repo/blackbox_solver/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm_blackbox_solver" description = "A solver for the blackbox functions found in ACIR and Brillig" # x-release-please-start-version -version = "0.44.0" +version = "0.45.0" # x-release-please-end authors.workspace = true edition.workspace = true @@ -37,6 +37,7 @@ p256 = { version = "0.11.0", features = [ "arithmetic", ] } +libaes = "0.7.0" [features] default = ["bn254"] diff --git a/noir/noir-repo/acvm-repo/blackbox_solver/src/aes128.rs b/noir/noir-repo/acvm-repo/blackbox_solver/src/aes128.rs new file mode 100644 index 000000000000..a4c6a2287443 --- /dev/null +++ b/noir/noir-repo/acvm-repo/blackbox_solver/src/aes128.rs @@ -0,0 +1,12 @@ +use crate::BlackBoxResolutionError; +use libaes::Cipher; + +pub fn aes128_encrypt( + inputs: &[u8], + iv: [u8; 16], + key: [u8; 16], +) -> Result, BlackBoxResolutionError> { + let cipher = Cipher::new_128(&key); + let encrypted = cipher.cbc_encrypt(&iv, inputs); + Ok(encrypted) +} diff --git a/noir/noir-repo/acvm-repo/blackbox_solver/src/curve_specific_solver.rs b/noir/noir-repo/acvm-repo/blackbox_solver/src/curve_specific_solver.rs index a809e21e2ca9..3403b0fe2325 100644 --- a/noir/noir-repo/acvm-repo/blackbox_solver/src/curve_specific_solver.rs +++ b/noir/noir-repo/acvm-repo/blackbox_solver/src/curve_specific_solver.rs @@ -24,17 +24,10 @@ pub trait BlackBoxFunctionSolver { inputs: &[FieldElement], domain_separator: u32, ) -> Result; - fn fixed_base_scalar_mul( + fn multi_scalar_mul( &self, - low: &FieldElement, - high: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError>; - fn variable_base_scalar_mul( - &self, - point_x: &FieldElement, - point_y: &FieldElement, - scalar_low: &FieldElement, - scalar_high: &FieldElement, + points: &[FieldElement], + scalars: &[FieldElement], ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError>; fn ec_add( &self, @@ -85,21 +78,12 @@ impl BlackBoxFunctionSolver for StubbedBlackBoxSolver { ) -> Result { Err(Self::fail(BlackBoxFunc::PedersenHash)) } - fn fixed_base_scalar_mul( - &self, - _low: &FieldElement, - _high: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - Err(Self::fail(BlackBoxFunc::FixedBaseScalarMul)) - } - fn variable_base_scalar_mul( + fn multi_scalar_mul( &self, - _point_x: &FieldElement, - _point_y: &FieldElement, - _scalar_low: &FieldElement, - _scalar_high: &FieldElement, + _points: &[FieldElement], + _scalars: &[FieldElement], ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - Err(Self::fail(BlackBoxFunc::VariableBaseScalarMul)) + Err(Self::fail(BlackBoxFunc::MultiScalarMul)) } fn ec_add( &self, diff --git a/noir/noir-repo/acvm-repo/blackbox_solver/src/lib.rs b/noir/noir-repo/acvm-repo/blackbox_solver/src/lib.rs index 0f57f2ce7da3..a68b52a2a620 100644 --- a/noir/noir-repo/acvm-repo/blackbox_solver/src/lib.rs +++ b/noir/noir-repo/acvm-repo/blackbox_solver/src/lib.rs @@ -10,11 +10,13 @@ use acir::BlackBoxFunc; use thiserror::Error; +mod aes128; mod bigint; mod curve_specific_solver; mod ecdsa; mod hash; +pub use aes128::aes128_encrypt; pub use bigint::BigIntSolver; pub use curve_specific_solver::{BlackBoxFunctionSolver, StubbedBlackBoxSolver}; pub use ecdsa::{ecdsa_secp256k1_verify, ecdsa_secp256r1_verify}; diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml index d856a57eb9bc..3a6c9b1d55bb 100644 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml +++ b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml @@ -2,7 +2,7 @@ name = "bn254_blackbox_solver" description = "Solvers for black box functions which are specific for the bn254 curve" # x-release-please-start-version -version = "0.44.0" +version = "0.45.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/fixed_base_scalar_mul.rs b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/embedded_curve_ops.rs similarity index 53% rename from noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/fixed_base_scalar_mul.rs rename to noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/embedded_curve_ops.rs index 2d7ffe1cf1c8..3f6d2ac86c1a 100644 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/fixed_base_scalar_mul.rs +++ b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/embedded_curve_ops.rs @@ -7,61 +7,59 @@ use acir::{BlackBoxFunc, FieldElement}; use crate::BlackBoxResolutionError; -/// Performs fixed-base scalar multiplication using the curve's generator point. -pub fn fixed_base_scalar_mul( - low: &FieldElement, - high: &FieldElement, +/// Performs multi scalar multiplication of points with scalars. +pub fn multi_scalar_mul( + points: &[FieldElement], + scalars: &[FieldElement], ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - let generator = grumpkin::SWAffine::generator(); - let generator_x = FieldElement::from_repr(*generator.x().unwrap()); - let generator_y = FieldElement::from_repr(*generator.y().unwrap()); + if points.len() != scalars.len() { + return Err(BlackBoxResolutionError::Failed( + BlackBoxFunc::MultiScalarMul, + "Points and scalars must have the same length".to_string(), + )); + } - variable_base_scalar_mul(&generator_x, &generator_y, low, high).map_err(|err| match err { - BlackBoxResolutionError::Failed(_, message) => { - BlackBoxResolutionError::Failed(BlackBoxFunc::FixedBaseScalarMul, message) + let mut output_point = grumpkin::SWAffine::zero(); + + for i in (0..points.len()).step_by(2) { + let point = create_point(points[i], points[i + 1]) + .map_err(|e| BlackBoxResolutionError::Failed(BlackBoxFunc::MultiScalarMul, e))?; + + let scalar_low: u128 = scalars[i].try_into_u128().ok_or_else(|| { + BlackBoxResolutionError::Failed( + BlackBoxFunc::MultiScalarMul, + format!("Limb {} is not less than 2^128", scalars[i].to_hex()), + ) + })?; + + let scalar_high: u128 = scalars[i + 1].try_into_u128().ok_or_else(|| { + BlackBoxResolutionError::Failed( + BlackBoxFunc::MultiScalarMul, + format!("Limb {} is not less than 2^128", scalars[i + 1].to_hex()), + ) + })?; + + let mut bytes = scalar_high.to_be_bytes().to_vec(); + bytes.extend_from_slice(&scalar_low.to_be_bytes()); + + // Check if this is smaller than the grumpkin modulus + let grumpkin_integer = BigUint::from_bytes_be(&bytes); + + if grumpkin_integer >= grumpkin::FrConfig::MODULUS.into() { + return Err(BlackBoxResolutionError::Failed( + BlackBoxFunc::MultiScalarMul, + format!("{} is not a valid grumpkin scalar", grumpkin_integer.to_str_radix(16)), + )); } - }) -} -pub fn variable_base_scalar_mul( - point_x: &FieldElement, - point_y: &FieldElement, - scalar_low: &FieldElement, - scalar_high: &FieldElement, -) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - let point1 = create_point(*point_x, *point_y) - .map_err(|e| BlackBoxResolutionError::Failed(BlackBoxFunc::VariableBaseScalarMul, e))?; - - let scalar_low: u128 = scalar_low.try_into_u128().ok_or_else(|| { - BlackBoxResolutionError::Failed( - BlackBoxFunc::VariableBaseScalarMul, - format!("Limb {} is not less than 2^128", scalar_low.to_hex()), - ) - })?; - - let scalar_high: u128 = scalar_high.try_into_u128().ok_or_else(|| { - BlackBoxResolutionError::Failed( - BlackBoxFunc::VariableBaseScalarMul, - format!("Limb {} is not less than 2^128", scalar_high.to_hex()), - ) - })?; - - let mut bytes = scalar_high.to_be_bytes().to_vec(); - bytes.extend_from_slice(&scalar_low.to_be_bytes()); - - // Check if this is smaller than the grumpkin modulus - let grumpkin_integer = BigUint::from_bytes_be(&bytes); - - if grumpkin_integer >= grumpkin::FrConfig::MODULUS.into() { - return Err(BlackBoxResolutionError::Failed( - BlackBoxFunc::VariableBaseScalarMul, - format!("{} is not a valid grumpkin scalar", grumpkin_integer.to_str_radix(16)), - )); + let iteration_output_point = + grumpkin::SWAffine::from(point.mul_bigint(grumpkin_integer.to_u64_digits())); + + output_point = grumpkin::SWAffine::from(output_point + iteration_output_point); } - let result = grumpkin::SWAffine::from(point1.mul_bigint(grumpkin_integer.to_u64_digits())); - if let Some((res_x, res_y)) = result.xy() { - Ok((FieldElement::from_repr(*res_x), FieldElement::from_repr(*res_y))) + if let Some((out_x, out_y)) = output_point.xy() { + Ok((FieldElement::from_repr(*out_x), FieldElement::from_repr(*out_y))) } else { Ok((FieldElement::zero(), FieldElement::zero())) } @@ -100,30 +98,36 @@ fn create_point(x: FieldElement, y: FieldElement) -> Result [FieldElement; 2] { + let generator = grumpkin::SWAffine::generator(); + let generator_x = FieldElement::from_repr(*generator.x().unwrap()); + let generator_y = FieldElement::from_repr(*generator.y().unwrap()); + [generator_x, generator_y] + } + #[test] fn smoke_test() -> Result<(), BlackBoxResolutionError> { - let input = FieldElement::one(); + // We check that multiplying 1 by generator results in the generator + let generator = get_generator(); - let res = fixed_base_scalar_mul(&input, &FieldElement::zero())?; - let x = "0000000000000000000000000000000000000000000000000000000000000001"; - let y = "0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c"; + let res = multi_scalar_mul(&generator, &[FieldElement::one(), FieldElement::zero()])?; - assert_eq!(x, res.0.to_hex()); - assert_eq!(y, res.1.to_hex()); + assert_eq!(generator[0], res.0); + assert_eq!(generator[1], res.1); Ok(()) } #[test] fn low_high_smoke_test() -> Result<(), BlackBoxResolutionError> { - let low = FieldElement::one(); - let high = FieldElement::from(2u128); + let points = get_generator(); + let scalars = [FieldElement::one(), FieldElement::from(2u128)]; - let res = fixed_base_scalar_mul(&low, &high)?; + let res = multi_scalar_mul(&points, &scalars)?; let x = "0702ab9c7038eeecc179b4f209991bcb68c7cb05bf4c532d804ccac36199c9a9"; let y = "23f10e9e43a3ae8d75d24154e796aae12ae7af546716e8f81a2564f1b5814130"; @@ -133,19 +137,21 @@ mod grumpkin_fixed_base_scalar_mul { } #[test] - fn rejects_invalid_limbs() { + fn rejects_invalid_scalar_limbs() { + let points = get_generator(); + let max_limb = FieldElement::from(u128::MAX); let invalid_limb = max_limb + FieldElement::one(); let expected_error = Err(BlackBoxResolutionError::Failed( - BlackBoxFunc::FixedBaseScalarMul, + BlackBoxFunc::MultiScalarMul, "Limb 0000000000000000000000000000000100000000000000000000000000000000 is not less than 2^128".into(), )); - let res = fixed_base_scalar_mul(&invalid_limb, &FieldElement::zero()); + let res = multi_scalar_mul(&points, &[FieldElement::one(), invalid_limb]); assert_eq!(res, expected_error); - let res = fixed_base_scalar_mul(&FieldElement::zero(), &invalid_limb); + let res = multi_scalar_mul(&points, &[invalid_limb, FieldElement::one()]); assert_eq!(res, expected_error); } @@ -153,60 +159,57 @@ mod grumpkin_fixed_base_scalar_mul { fn rejects_grumpkin_modulus() { let x = grumpkin::FrConfig::MODULUS.to_bytes_be(); - let high = FieldElement::from_be_bytes_reduce(&x[0..16]); let low = FieldElement::from_be_bytes_reduce(&x[16..32]); + let high = FieldElement::from_be_bytes_reduce(&x[0..16]); - let res = fixed_base_scalar_mul(&low, &high); + let res = multi_scalar_mul(&get_generator(), &[low, high]); assert_eq!( res, Err(BlackBoxResolutionError::Failed( - BlackBoxFunc::FixedBaseScalarMul, + BlackBoxFunc::MultiScalarMul, "30644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd47 is not a valid grumpkin scalar".into(), )) ); } #[test] - fn variable_base_matches_fixed_base_for_generator_on_input( - ) -> Result<(), BlackBoxResolutionError> { - let low = FieldElement::one(); - let high = FieldElement::from(2u128); - - let generator = grumpkin::SWAffine::generator(); - let generator_x = FieldElement::from_repr(*generator.x().unwrap()); - let generator_y = FieldElement::from_repr(*generator.y().unwrap()); - - let fixed_res = fixed_base_scalar_mul(&low, &high)?; - let variable_res = variable_base_scalar_mul(&generator_x, &generator_y, &low, &high)?; - - assert_eq!(fixed_res, variable_res); - Ok(()) - } - - #[test] - fn variable_base_scalar_mul_rejects_invalid_point() { + fn rejects_invalid_point() { let invalid_point_x = FieldElement::one(); let invalid_point_y = FieldElement::one(); let valid_scalar_low = FieldElement::zero(); let valid_scalar_high = FieldElement::zero(); - let res = variable_base_scalar_mul( - &invalid_point_x, - &invalid_point_y, - &valid_scalar_low, - &valid_scalar_high, + let res = multi_scalar_mul( + &[invalid_point_x, invalid_point_y], + &[valid_scalar_low, valid_scalar_high], ); assert_eq!( res, Err(BlackBoxResolutionError::Failed( - BlackBoxFunc::VariableBaseScalarMul, + BlackBoxFunc::MultiScalarMul, "Point (0000000000000000000000000000000000000000000000000000000000000001, 0000000000000000000000000000000000000000000000000000000000000001) is not on curve".into(), )) ); } + #[test] + fn throws_on_args_length_mismatch() { + let points = get_generator(); + let scalars = [FieldElement::from(2u128)]; + + let res = multi_scalar_mul(&points, &scalars); + + assert_eq!( + res, + Err(BlackBoxResolutionError::Failed( + BlackBoxFunc::MultiScalarMul, + "Points and scalars must have the same length".into(), + )) + ); + } + #[test] fn rejects_addition_of_points_not_in_curve() { let x = FieldElement::from(1u128); @@ -222,4 +225,17 @@ mod grumpkin_fixed_base_scalar_mul { )) ); } + + #[test] + fn output_of_msm_matches_add() -> Result<(), BlackBoxResolutionError> { + let points = get_generator(); + let scalars = [FieldElement::from(2u128), FieldElement::zero()]; + + let msm_res = multi_scalar_mul(&points, &scalars)?; + let add_res = embedded_curve_add(points[0], points[1], points[0], points[1])?; + + assert_eq!(msm_res.0, add_res.0); + assert_eq!(msm_res.1, add_res.1); + Ok(()) + } } diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/lib.rs b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/lib.rs index 9395260fe36a..4cb51b597552 100644 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/lib.rs +++ b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/lib.rs @@ -5,13 +5,11 @@ use acir::{BlackBoxFunc, FieldElement}; use acvm_blackbox_solver::{BlackBoxFunctionSolver, BlackBoxResolutionError}; -mod fixed_base_scalar_mul; +mod embedded_curve_ops; mod poseidon2; mod wasm; -pub use fixed_base_scalar_mul::{ - embedded_curve_add, fixed_base_scalar_mul, variable_base_scalar_mul, -}; +pub use embedded_curve_ops::{embedded_curve_add, multi_scalar_mul}; pub use poseidon2::poseidon2_permutation; use wasm::Barretenberg; @@ -91,22 +89,12 @@ impl BlackBoxFunctionSolver for Bn254BlackBoxSolver { }) } - fn fixed_base_scalar_mul( + fn multi_scalar_mul( &self, - low: &FieldElement, - high: &FieldElement, + points: &[FieldElement], + scalars: &[FieldElement], ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - fixed_base_scalar_mul(low, high) - } - - fn variable_base_scalar_mul( - &self, - point_x: &FieldElement, - point_y: &FieldElement, - low: &FieldElement, - high: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - variable_base_scalar_mul(point_x, point_y, low, high) + multi_scalar_mul(points, scalars) } fn ec_add( diff --git a/noir/noir-repo/acvm-repo/brillig/Cargo.toml b/noir/noir-repo/acvm-repo/brillig/Cargo.toml index 41c2cebdad9f..081abe022ae5 100644 --- a/noir/noir-repo/acvm-repo/brillig/Cargo.toml +++ b/noir/noir-repo/acvm-repo/brillig/Cargo.toml @@ -2,7 +2,7 @@ name = "brillig" description = "Brillig is the bytecode ACIR uses for non-determinism." # x-release-please-start-version -version = "0.44.0" +version = "0.45.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/noir-repo/acvm-repo/brillig/src/black_box.rs b/noir/noir-repo/acvm-repo/brillig/src/black_box.rs index f31a434c7725..15abc19ed90c 100644 --- a/noir/noir-repo/acvm-repo/brillig/src/black_box.rs +++ b/noir/noir-repo/acvm-repo/brillig/src/black_box.rs @@ -5,6 +5,13 @@ use serde::{Deserialize, Serialize}; /// They are implemented as native functions in the VM. #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] pub enum BlackBoxOp { + /// Encrypts a message using AES128. + AES128Encrypt { + inputs: HeapVector, + iv: HeapArray, + key: HeapArray, + outputs: HeapVector, + }, /// Calculates the SHA256 hash of the inputs. Sha256 { message: HeapVector, @@ -66,19 +73,11 @@ pub enum BlackBoxOp { domain_separator: MemoryAddress, output: MemoryAddress, }, - /// Performs scalar multiplication over the embedded curve. - FixedBaseScalarMul { - low: MemoryAddress, - high: MemoryAddress, - result: HeapArray, - }, - /// Performs scalar multiplication over the embedded curve with variable base point. - VariableBaseScalarMul { - point_x: MemoryAddress, - point_y: MemoryAddress, - scalar_low: MemoryAddress, - scalar_high: MemoryAddress, - result: HeapArray, + /// Performs multi scalar multiplication over the embedded curve. + MultiScalarMul { + points: HeapVector, + scalars: HeapVector, + outputs: HeapArray, }, /// Performs addition over the embedded curve. EmbeddedCurveAdd { diff --git a/noir/noir-repo/acvm-repo/brillig_vm/Cargo.toml b/noir/noir-repo/acvm-repo/brillig_vm/Cargo.toml index 3dcc05c08421..57cf3be974a5 100644 --- a/noir/noir-repo/acvm-repo/brillig_vm/Cargo.toml +++ b/noir/noir-repo/acvm-repo/brillig_vm/Cargo.toml @@ -2,7 +2,7 @@ name = "brillig_vm" description = "The virtual machine that processes Brillig bytecode, used to introduce non-determinism to the ACVM" # x-release-please-start-version -version = "0.44.0" +version = "0.45.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs b/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs index 9557cdae7b91..c999b5bf330e 100644 --- a/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs +++ b/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs @@ -2,8 +2,8 @@ use acir::brillig::{BlackBoxOp, HeapArray, HeapVector}; use acir::{BlackBoxFunc, FieldElement}; use acvm_blackbox_solver::BigIntSolver; use acvm_blackbox_solver::{ - blake2s, blake3, ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, keccak256, keccakf1600, - sha256, sha256compression, BlackBoxFunctionSolver, BlackBoxResolutionError, + aes128_encrypt, blake2s, blake3, ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, keccak256, + keccakf1600, sha256, sha256compression, BlackBoxFunctionSolver, BlackBoxResolutionError, }; use crate::memory::MemoryValue; @@ -38,6 +38,25 @@ pub(crate) fn evaluate_black_box( bigint_solver: &mut BigIntSolver, ) -> Result<(), BlackBoxResolutionError> { match op { + BlackBoxOp::AES128Encrypt { inputs, iv, key, outputs } => { + let bb_func = black_box_function_from_op(op); + + let inputs = to_u8_vec(read_heap_vector(memory, inputs)); + + let iv: [u8; 16] = to_u8_vec(read_heap_array(memory, iv)).try_into().map_err(|_| { + BlackBoxResolutionError::Failed(bb_func, "Invalid iv length".to_string()) + })?; + let key: [u8; 16] = + to_u8_vec(read_heap_array(memory, key)).try_into().map_err(|_| { + BlackBoxResolutionError::Failed(bb_func, "Invalid ley length".to_string()) + })?; + let ciphertext = aes128_encrypt(&inputs, iv, key)?; + + memory.write(outputs.size, ciphertext.len().into()); + memory.write_slice(memory.read_ref(outputs.pointer), &to_value_vec(&ciphertext)); + + Ok(()) + } BlackBoxOp::Sha256 { message, output } => { let message = to_u8_vec(read_heap_vector(memory, message)); let bytes = sha256(message.as_slice())?; @@ -136,26 +155,16 @@ pub(crate) fn evaluate_black_box( memory.write(*result, verified.into()); Ok(()) } - BlackBoxOp::FixedBaseScalarMul { low, high, result } => { - let low = memory.read(*low).try_into().unwrap(); - let high = memory.read(*high).try_into().unwrap(); - let (x, y) = solver.fixed_base_scalar_mul(&low, &high)?; + BlackBoxOp::MultiScalarMul { points, scalars, outputs: result } => { + let points: Vec = + read_heap_vector(memory, points).iter().map(|x| x.try_into().unwrap()).collect(); + let scalars: Vec = + read_heap_vector(memory, scalars).iter().map(|x| x.try_into().unwrap()).collect(); + + let (x, y) = solver.multi_scalar_mul(&points, &scalars)?; memory.write_slice(memory.read_ref(result.pointer), &[x.into(), y.into()]); Ok(()) } - BlackBoxOp::VariableBaseScalarMul { point_x, point_y, scalar_low, scalar_high, result } => { - let point_x = memory.read(*point_x).try_into().unwrap(); - let point_y = memory.read(*point_y).try_into().unwrap(); - let scalar_low = memory.read(*scalar_low).try_into().unwrap(); - let scalar_high = memory.read(*scalar_high).try_into().unwrap(); - let (out_point_x, out_point_y) = - solver.variable_base_scalar_mul(&point_x, &point_y, &scalar_low, &scalar_high)?; - memory.write_slice( - memory.read_ref(result.pointer), - &[out_point_x.into(), out_point_y.into()], - ); - Ok(()) - } BlackBoxOp::EmbeddedCurveAdd { input1_x, input1_y, input2_x, input2_y, result } => { let input1_x = memory.read(*input1_x).try_into().unwrap(); let input1_y = memory.read(*input1_y).try_into().unwrap(); @@ -291,6 +300,7 @@ pub(crate) fn evaluate_black_box( fn black_box_function_from_op(op: &BlackBoxOp) -> BlackBoxFunc { match op { + BlackBoxOp::AES128Encrypt { .. } => BlackBoxFunc::AES128Encrypt, BlackBoxOp::Sha256 { .. } => BlackBoxFunc::SHA256, BlackBoxOp::Blake2s { .. } => BlackBoxFunc::Blake2s, BlackBoxOp::Blake3 { .. } => BlackBoxFunc::Blake3, @@ -301,8 +311,7 @@ fn black_box_function_from_op(op: &BlackBoxOp) -> BlackBoxFunc { BlackBoxOp::SchnorrVerify { .. } => BlackBoxFunc::SchnorrVerify, BlackBoxOp::PedersenCommitment { .. } => BlackBoxFunc::PedersenCommitment, BlackBoxOp::PedersenHash { .. } => BlackBoxFunc::PedersenHash, - BlackBoxOp::FixedBaseScalarMul { .. } => BlackBoxFunc::FixedBaseScalarMul, - BlackBoxOp::VariableBaseScalarMul { .. } => BlackBoxFunc::VariableBaseScalarMul, + BlackBoxOp::MultiScalarMul { .. } => BlackBoxFunc::MultiScalarMul, BlackBoxOp::EmbeddedCurveAdd { .. } => BlackBoxFunc::EmbeddedCurveAdd, BlackBoxOp::BigIntAdd { .. } => BlackBoxFunc::BigIntAdd, BlackBoxOp::BigIntSub { .. } => BlackBoxFunc::BigIntSub, diff --git a/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs b/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs index 5f68ce98c8a7..1afe0a30068e 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs @@ -126,6 +126,7 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction) -> String { target_contract: self.target_contract, selector: {}, args: args_acc, + gas_opts: dep::aztec::context::gas::GasOpts::default(), }}", args, is_void, fn_selector, ); diff --git a/noir/noir-repo/aztec_macros/src/transforms/functions.rs b/noir/noir-repo/aztec_macros/src/transforms/functions.rs index 0a628f473ef5..90563c6085c8 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/functions.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/functions.rs @@ -1,10 +1,10 @@ use convert_case::{Case, Casing}; use noirc_errors::Span; -use noirc_frontend::ast; +use noirc_frontend::ast::{self, FunctionKind}; use noirc_frontend::ast::{ BlockExpression, ConstrainKind, ConstrainStatement, Expression, ExpressionKind, - ForLoopStatement, ForRange, FunctionKind, FunctionReturnType, Ident, Literal, NoirFunction, - NoirStruct, Param, PathKind, Pattern, Signedness, Statement, StatementKind, UnresolvedType, + ForLoopStatement, ForRange, FunctionReturnType, Ident, Literal, NoirFunction, NoirStruct, + Param, PathKind, Pattern, Signedness, Statement, StatementKind, UnresolvedType, UnresolvedTypeData, Visibility, }; @@ -103,6 +103,8 @@ pub fn transform_function( let return_type = create_return_type(&return_type_name); func.def.return_type = return_type; func.def.return_visibility = Visibility::Public; + } else { + func.def.return_visibility = Visibility::Public; } // Public functions should have unconstrained auto-inferred diff --git a/noir/noir-repo/compiler/noirc_driver/src/lib.rs b/noir/noir-repo/compiler/noirc_driver/src/lib.rs index ef874d45f888..5f1985b0553f 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/lib.rs @@ -54,8 +54,8 @@ pub const NOIR_ARTIFACT_VERSION_STRING: &str = #[derive(Args, Clone, Debug, Default)] pub struct CompileOptions { /// Override the expression width requested by the backend. - #[arg(long, value_parser = parse_expression_width)] - pub expression_width: Option, + #[arg(long, value_parser = parse_expression_width, default_value = "4")] + pub expression_width: ExpressionWidth, /// Force a full recompilation. #[arg(long = "force")] diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index 210e56b2ecba..d982d864d060 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -188,39 +188,20 @@ pub(crate) fn convert_black_box_call( unreachable!("ICE: Schnorr verify expects two registers for the public key, an array for signature, an array for the message hash and one result register") } } - BlackBoxFunc::FixedBaseScalarMul => { - if let ( - [BrilligVariable::SingleAddr(low), BrilligVariable::SingleAddr(high)], - [BrilligVariable::BrilligArray(result_array)], - ) = (function_arguments, function_results) - { - brillig_context.black_box_op_instruction(BlackBoxOp::FixedBaseScalarMul { - low: low.address, - high: high.address, - result: result_array.to_heap_array(), - }); - } else { - unreachable!( - "ICE: FixedBaseScalarMul expects two register arguments and one array result" - ) - } - } - BlackBoxFunc::VariableBaseScalarMul => { - if let ( - [BrilligVariable::SingleAddr(point_x), BrilligVariable::SingleAddr(point_y), BrilligVariable::SingleAddr(scalar_low), BrilligVariable::SingleAddr(scalar_high)], - [BrilligVariable::BrilligArray(result_array)], - ) = (function_arguments, function_results) + BlackBoxFunc::MultiScalarMul => { + if let ([points, scalars], [BrilligVariable::BrilligArray(outputs)]) = + (function_arguments, function_results) { - brillig_context.black_box_op_instruction(BlackBoxOp::VariableBaseScalarMul { - point_x: point_x.address, - point_y: point_y.address, - scalar_low: scalar_low.address, - scalar_high: scalar_high.address, - result: result_array.to_heap_array(), + let points = convert_array_or_vector(brillig_context, points, bb_func); + let scalars = convert_array_or_vector(brillig_context, scalars, bb_func); + brillig_context.black_box_op_instruction(BlackBoxOp::MultiScalarMul { + points: points.to_heap_vector(), + scalars: scalars.to_heap_vector(), + outputs: outputs.to_heap_array(), }); } else { unreachable!( - "ICE: VariableBaseScalarMul expects four register arguments and one array result" + "ICE: MultiScalarMul expects two register arguments and one array result" ) } } @@ -420,6 +401,28 @@ pub(crate) fn convert_black_box_call( unreachable!("ICE: Sha256Compression expects two array argument, one array result") } } + BlackBoxFunc::AES128Encrypt => { + if let ( + [inputs, BrilligVariable::BrilligArray(iv), BrilligVariable::BrilligArray(key)], + [BrilligVariable::SingleAddr(out_len), outputs], + ) = (function_arguments, function_results) + { + let inputs = convert_array_or_vector(brillig_context, inputs, bb_func); + let outputs = convert_array_or_vector(brillig_context, outputs, bb_func); + let output_vec = outputs.to_heap_vector(); + brillig_context.black_box_op_instruction(BlackBoxOp::AES128Encrypt { + inputs: inputs.to_heap_vector(), + iv: iv.to_heap_array(), + key: key.to_heap_array(), + outputs: output_vec, + }); + brillig_context.mov_instruction(out_len.address, output_vec.size); + // Returns slice, so we need to allocate memory for it after the fact + brillig_context.increase_free_memory_pointer_instruction(output_vec.size); + } else { + unreachable!("ICE: AES128Encrypt expects three array arguments, one array result") + } + } } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 873ebe51e6f0..f660c8e0b7a5 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -1328,7 +1328,15 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.binary_instruction(left, right, result_variable, brillig_binary_op); - self.add_overflow_check(brillig_binary_op, left, right, result_variable, is_signed); + self.add_overflow_check( + brillig_binary_op, + left, + right, + result_variable, + binary, + dfg, + is_signed, + ); } /// Splits a two's complement signed integer in the sign bit and the absolute value. @@ -1481,15 +1489,20 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.deallocate_single_addr(bias); } + #[allow(clippy::too_many_arguments)] fn add_overflow_check( &mut self, binary_operation: BrilligBinaryOp, left: SingleAddrVariable, right: SingleAddrVariable, result: SingleAddrVariable, + binary: &Binary, + dfg: &DataFlowGraph, is_signed: bool, ) { let bit_size = left.bit_size; + let max_lhs_bits = dfg.get_value_max_num_bits(binary.lhs); + let max_rhs_bits = dfg.get_value_max_num_bits(binary.rhs); if bit_size == FieldElement::max_num_bits() { return; @@ -1497,6 +1510,11 @@ impl<'block> BrilligBlock<'block> { match (binary_operation, is_signed) { (BrilligBinaryOp::Add, false) => { + if std::cmp::max(max_lhs_bits, max_rhs_bits) < bit_size { + // `left` and `right` have both been casted up from smaller types and so cannot overflow. + return; + } + let condition = SingleAddrVariable::new(self.brillig_context.allocate_register(), 1); // Check that lhs <= result @@ -1511,6 +1529,12 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.deallocate_single_addr(condition); } (BrilligBinaryOp::Sub, false) => { + if dfg.is_constant(binary.lhs) && max_lhs_bits > max_rhs_bits { + // `left` is a fixed constant and `right` is restricted such that `left - right > 0` + // Note strict inequality as `right > left` while `max_lhs_bits == max_rhs_bits` is possible. + return; + } + let condition = SingleAddrVariable::new(self.brillig_context.allocate_register(), 1); // Check that rhs <= lhs @@ -1527,39 +1551,36 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.deallocate_single_addr(condition); } (BrilligBinaryOp::Mul, false) => { - // Multiplication overflow is only possible for bit sizes > 1 - if bit_size > 1 { - let is_right_zero = - SingleAddrVariable::new(self.brillig_context.allocate_register(), 1); - let zero = - self.brillig_context.make_constant_instruction(0_usize.into(), bit_size); - self.brillig_context.binary_instruction( - zero, - right, - is_right_zero, - BrilligBinaryOp::Equals, - ); - self.brillig_context.codegen_if_not(is_right_zero.address, |ctx| { - let condition = SingleAddrVariable::new(ctx.allocate_register(), 1); - let division = SingleAddrVariable::new(ctx.allocate_register(), bit_size); - // Check that result / rhs == lhs - ctx.binary_instruction( - result, - right, - division, - BrilligBinaryOp::UnsignedDiv, - ); - ctx.binary_instruction(division, left, condition, BrilligBinaryOp::Equals); - ctx.codegen_constrain( - condition, - Some("attempt to multiply with overflow".to_string()), - ); - ctx.deallocate_single_addr(condition); - ctx.deallocate_single_addr(division); - }); - self.brillig_context.deallocate_single_addr(is_right_zero); - self.brillig_context.deallocate_single_addr(zero); + if bit_size == 1 || max_lhs_bits + max_rhs_bits <= bit_size { + // Either performing boolean multiplication (which cannot overflow), + // or `left` and `right` have both been casted up from smaller types and so cannot overflow. + return; } + + let is_right_zero = + SingleAddrVariable::new(self.brillig_context.allocate_register(), 1); + let zero = self.brillig_context.make_constant_instruction(0_usize.into(), bit_size); + self.brillig_context.binary_instruction( + zero, + right, + is_right_zero, + BrilligBinaryOp::Equals, + ); + self.brillig_context.codegen_if_not(is_right_zero.address, |ctx| { + let condition = SingleAddrVariable::new(ctx.allocate_register(), 1); + let division = SingleAddrVariable::new(ctx.allocate_register(), bit_size); + // Check that result / rhs == lhs + ctx.binary_instruction(result, right, division, BrilligBinaryOp::UnsignedDiv); + ctx.binary_instruction(division, left, condition, BrilligBinaryOp::Equals); + ctx.codegen_constrain( + condition, + Some("attempt to multiply with overflow".to_string()), + ); + ctx.deallocate_single_addr(condition); + ctx.deallocate_single_addr(division); + }); + self.brillig_context.deallocate_single_addr(is_right_zero); + self.brillig_context.deallocate_single_addr(zero); } _ => {} } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index b4ed59de59d9..fadcdb22c158 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -167,24 +167,14 @@ pub(crate) mod tests { ) -> Result { Ok(6_u128.into()) } - fn fixed_base_scalar_mul( + fn multi_scalar_mul( &self, - _low: &FieldElement, - _high: &FieldElement, + _points: &[FieldElement], + _scalars: &[FieldElement], ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { Ok((4_u128.into(), 5_u128.into())) } - fn variable_base_scalar_mul( - &self, - _point_x: &FieldElement, - _point_y: &FieldElement, - _scalar_low: &FieldElement, - _scalar_high: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - Ok((7_u128.into(), 8_u128.into())) - } - fn ec_add( &self, _input1_x: &FieldElement, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs index 8b00939b3a71..667ccf6ddbee 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs @@ -266,6 +266,16 @@ impl DebugShow { /// Debug function for black_box_op pub(crate) fn black_box_op_instruction(&self, op: &BlackBoxOp) { match op { + BlackBoxOp::AES128Encrypt { inputs, iv, key, outputs } => { + debug_println!( + self.enable_debug_trace, + " AES128 ENCRYPT {} {} {} -> {}", + inputs, + iv, + key, + outputs + ); + } BlackBoxOp::Sha256 { message, output } => { debug_println!(self.enable_debug_trace, " SHA256 {} -> {}", message, output); } @@ -315,30 +325,13 @@ impl DebugShow { result ); } - BlackBoxOp::FixedBaseScalarMul { low, high, result } => { + BlackBoxOp::MultiScalarMul { points, scalars, outputs } => { debug_println!( self.enable_debug_trace, - " FIXED_BASE_SCALAR_MUL {} {} -> {}", - low, - high, - result - ); - } - BlackBoxOp::VariableBaseScalarMul { - point_x, - point_y, - scalar_low, - scalar_high, - result, - } => { - debug_println!( - self.enable_debug_trace, - " VARIABLE_BASE_SCALAR_MUL ({} {}) ({} {}) -> {}", - point_x, - point_y, - scalar_low, - scalar_high, - result + " MULTI_SCALAR_MUL {} {} -> {}", + points, + scalars, + outputs ); } BlackBoxOp::EmbeddedCurveAdd { input1_x, input1_y, input2_x, input2_y, result } => { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs index e844bc30354f..69e5f6ddfcc0 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs @@ -68,6 +68,8 @@ pub(crate) fn optimize_into_acir( .run_pass(Ssa::mem2reg, "After Mem2Reg:") // Run the inlining pass again to handle functions with `InlineType::NoPredicates`. // Before flattening is run, we treat functions marked with the `InlineType::NoPredicates` as an entry point. + // This pass must come immediately following `mem2reg` as the succeeding passes + // may create an SSA which inlining fails to handle. .run_pass(Ssa::inline_functions_with_no_predicates, "After Inlining:") .run_pass(Ssa::remove_if_else, "After Remove IfElse:") .run_pass(Ssa::fold_constants, "After Constant Folding:") diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index 2d546bc7d86e..e8f6f7b281a6 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -8,7 +8,7 @@ use crate::ssa::ir::dfg::CallStack; use crate::ssa::ir::types::Type as SsaType; use crate::ssa::ir::{instruction::Endian, types::NumericType}; use acvm::acir::circuit::brillig::{BrilligInputs, BrilligOutputs}; -use acvm::acir::circuit::opcodes::{BlockId, MemOp}; +use acvm::acir::circuit::opcodes::{BlockId, BlockType, MemOp}; use acvm::acir::circuit::{AssertionPayload, ExpressionOrMemory, Opcode}; use acvm::blackbox_solver; use acvm::brillig_vm::{MemoryValue, VMStatus, VM}; @@ -1324,6 +1324,21 @@ impl AcirContext { self.big_int_ctx.new_big_int(FieldElement::from(modulus_id as u128)); (modulus, vec![result_id.bigint_id(), result_id.modulus_id()]) } + BlackBoxFunc::AES128Encrypt => { + let invalid_input = "aes128_encrypt - operation requires a plaintext to encrypt"; + let input_size = match inputs.first().expect(invalid_input) { + AcirValue::Array(values) => Ok::(values.len()), + AcirValue::DynamicArray(dyn_array) => Ok::(dyn_array.len), + _ => { + return Err(RuntimeError::InternalError(InternalError::General { + message: "aes128_encrypt requires an array of inputs".to_string(), + call_stack: self.get_call_stack(), + })); + } + }?; + output_count = input_size + (16 - input_size % 16); + (vec![], vec![FieldElement::from(output_count as u128)]) + } _ => (vec![], vec![]), }; @@ -1758,6 +1773,7 @@ impl AcirContext { block_id: BlockId, len: usize, optional_value: Option, + databus: BlockType, ) -> Result<(), InternalError> { let initialized_values = match optional_value { None => { @@ -1772,7 +1788,11 @@ impl AcirContext { } }; - self.acir_ir.push_opcode(Opcode::MemoryInit { block_id, init: initialized_values }); + self.acir_ir.push_opcode(Opcode::MemoryInit { + block_id, + init: initialized_values, + block_type: databus, + }); Ok(()) } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs index 2f4f4f9f6cc7..c1249ae41c85 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs @@ -188,6 +188,18 @@ impl GeneratedAcir { let outputs_clone = outputs.clone(); let black_box_func_call = match func_name { + BlackBoxFunc::AES128Encrypt => BlackBoxFuncCall::AES128Encrypt { + inputs: inputs[0].clone(), + iv: inputs[1] + .clone() + .try_into() + .expect("Compiler should generate correct size inputs"), + key: inputs[2] + .clone() + .try_into() + .expect("Compiler should generate correct size inputs"), + outputs, + }, BlackBoxFunc::AND => { BlackBoxFuncCall::AND { lhs: inputs[0][0], rhs: inputs[1][0], output: outputs[0] } } @@ -278,16 +290,9 @@ impl GeneratedAcir { output: outputs[0], } } - BlackBoxFunc::FixedBaseScalarMul => BlackBoxFuncCall::FixedBaseScalarMul { - low: inputs[0][0], - high: inputs[1][0], - outputs: (outputs[0], outputs[1]), - }, - BlackBoxFunc::VariableBaseScalarMul => BlackBoxFuncCall::VariableBaseScalarMul { - point_x: inputs[0][0], - point_y: inputs[1][0], - scalar_low: inputs[2][0], - scalar_high: inputs[3][0], + BlackBoxFunc::MultiScalarMul => BlackBoxFuncCall::MultiScalarMul { + points: inputs[0].clone(), + scalars: inputs[1].clone(), outputs: (outputs[0], outputs[1]), }, BlackBoxFunc::EmbeddedCurveAdd => BlackBoxFuncCall::EmbeddedCurveAdd { @@ -649,7 +654,8 @@ fn black_box_func_expected_input_size(name: BlackBoxFunc) -> Option { // All of the hash/cipher methods will take in a // variable number of inputs. - BlackBoxFunc::Keccak256 + BlackBoxFunc::AES128Encrypt + | BlackBoxFunc::Keccak256 | BlackBoxFunc::SHA256 | BlackBoxFunc::Blake2s | BlackBoxFunc::Blake3 @@ -672,13 +678,8 @@ fn black_box_func_expected_input_size(name: BlackBoxFunc) -> Option { | BlackBoxFunc::EcdsaSecp256k1 | BlackBoxFunc::EcdsaSecp256r1 => None, - // Inputs for fixed based scalar multiplication - // is the low and high limbs of the scalar - BlackBoxFunc::FixedBaseScalarMul => Some(2), - - // Inputs for variable based scalar multiplication are the x and y coordinates of the base point and low - // and high limbs of the scalar - BlackBoxFunc::VariableBaseScalarMul => Some(4), + // Inputs for multi scalar multiplication is an arbitrary number of [point, scalar] pairs. + BlackBoxFunc::MultiScalarMul => None, // Recursive aggregation has a variable number of inputs BlackBoxFunc::RecursiveAggregation => None, @@ -734,9 +735,7 @@ fn black_box_expected_output_size(name: BlackBoxFunc) -> Option { // Output of operations over the embedded curve // will be 2 field elements representing the point. - BlackBoxFunc::FixedBaseScalarMul - | BlackBoxFunc::VariableBaseScalarMul - | BlackBoxFunc::EmbeddedCurveAdd => Some(2), + BlackBoxFunc::MultiScalarMul | BlackBoxFunc::EmbeddedCurveAdd => Some(2), // Big integer operations return a big integer BlackBoxFunc::BigIntAdd @@ -750,6 +749,9 @@ fn black_box_expected_output_size(name: BlackBoxFunc) -> Option { // Recursive aggregation has a variable number of outputs BlackBoxFunc::RecursiveAggregation => None, + + // AES encryption returns a variable number of outputs + BlackBoxFunc::AES128Encrypt => None, } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 9a4d4be1145d..0de0c28be75b 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -29,6 +29,7 @@ use crate::brillig::brillig_ir::BrilligContext; use crate::brillig::{brillig_gen::brillig_fn::FunctionContext as BrilligFunctionContext, Brillig}; use crate::errors::{InternalError, InternalWarning, RuntimeError, SsaReport}; pub(crate) use acir_ir::generated_acir::GeneratedAcir; +use acvm::acir::circuit::opcodes::BlockType; use noirc_frontend::monomorphization::ast::InlineType; use acvm::acir::circuit::brillig::BrilligBytecode; @@ -976,13 +977,39 @@ impl<'a> Context<'a> { return Ok(()); } - let (new_index, new_value) = - self.convert_array_operation_inputs(array, dfg, index, store_value)?; + // Get an offset such that the type of the array at the offset is the same as the type at the 'index' + // If we find one, we will use it when computing the index under the enable_side_effect predicate + // If not, array_get(..) will use a fallback costing one multiplication in the worst case. + // cf. https://github.com/noir-lang/noir/pull/4971 + let array_id = dfg.resolve(array); + let array_typ = dfg.type_of_value(array_id); + // For simplicity we compute the offset only for simple arrays + let is_simple_array = dfg.instruction_results(instruction).len() == 1 + && can_omit_element_sizes_array(&array_typ); + let offset = if is_simple_array { + let result_type = dfg.type_of_value(dfg.instruction_results(instruction)[0]); + match array_typ { + Type::Array(item_type, _) | Type::Slice(item_type) => item_type + .iter() + .enumerate() + .find_map(|(index, typ)| (result_type == *typ).then_some(index)), + _ => None, + } + } else { + None + }; + let (new_index, new_value) = self.convert_array_operation_inputs( + array, + dfg, + index, + store_value, + offset.unwrap_or_default(), + )?; if let Some(new_value) = new_value { self.array_set(instruction, new_index, new_value, dfg, mutable_array_set)?; } else { - self.array_get(instruction, array, new_index, dfg)?; + self.array_get(instruction, array, new_index, dfg, offset.is_none())?; } Ok(()) @@ -1072,7 +1099,7 @@ impl<'a> Context<'a> { /// - new_index is the index of the array. ACIR memory operations work with a flat memory, so we fully flattened the specified index /// in case we have a nested array. The index for SSA array operations only represents the flattened index of the current array. /// Thus internal array element type sizes need to be computed to accurately transform the index. - /// - predicate_index is 0, or the index if the predicate is true + /// - predicate_index is offset, or the index if the predicate is true /// - new_value is the optional value when the operation is an array_set /// When there is a predicate, it is predicate*value + (1-predicate)*dummy, where dummy is the value of the array at the requested index. /// It is a dummy value because in the case of a false predicate, the value stored at the requested index will be itself. @@ -1082,14 +1109,18 @@ impl<'a> Context<'a> { dfg: &DataFlowGraph, index: ValueId, store_value: Option, + offset: usize, ) -> Result<(AcirVar, Option), RuntimeError> { let (array_id, array_typ, block_id) = self.check_array_is_initialized(array, dfg)?; let index_var = self.convert_numeric_value(index, dfg)?; let index_var = self.get_flattened_index(&array_typ, array_id, index_var, dfg)?; - let predicate_index = - self.acir_context.mul_var(index_var, self.current_side_effects_enabled_var)?; + // predicate_index = index*predicate + (1-predicate)*offset + let offset = self.acir_context.add_constant(offset); + let sub = self.acir_context.sub_var(index_var, offset)?; + let pred = self.acir_context.mul_var(sub, self.current_side_effects_enabled_var)?; + let predicate_index = self.acir_context.add_var(pred, offset)?; let new_value = if let Some(store) = store_value { let store_value = self.convert_value(store, dfg); @@ -1190,12 +1221,14 @@ impl<'a> Context<'a> { } /// Generates a read opcode for the array + /// `index_side_effect == false` means that we ensured `var_index` will have a type matching the value in the array fn array_get( &mut self, instruction: InstructionId, array: ValueId, mut var_index: AcirVar, dfg: &DataFlowGraph, + mut index_side_effect: bool, ) -> Result { let (array_id, _, block_id) = self.check_array_is_initialized(array, dfg)?; let results = dfg.instruction_results(instruction); @@ -1214,7 +1247,7 @@ impl<'a> Context<'a> { self.data_bus.call_data_map[&array_id] as i128, )); let new_index = self.acir_context.add_var(offset, bus_index)?; - return self.array_get(instruction, call_data, new_index, dfg); + return self.array_get(instruction, call_data, new_index, dfg, index_side_effect); } } @@ -1223,7 +1256,28 @@ impl<'a> Context<'a> { !res_typ.contains_slice_element(), "ICE: Nested slice result found during ACIR generation" ); - let value = self.array_get_value(&res_typ, block_id, &mut var_index)?; + let mut value = self.array_get_value(&res_typ, block_id, &mut var_index)?; + + if let AcirValue::Var(value_var, typ) = &value { + let array_id = dfg.resolve(array_id); + let array_typ = dfg.type_of_value(array_id); + if let (Type::Numeric(numeric_type), AcirType::NumericType(num)) = + (array_typ.first(), typ) + { + if numeric_type.bit_size() <= num.bit_size() { + // first element is compatible + index_side_effect = false; + } + } + // Fallback to multiplication if the index side_effects have not already been handled + if index_side_effect { + // Set the value to 0 if current_side_effects is 0, to ensure it fits in any value type + value = AcirValue::Var( + self.acir_context.mul_var(*value_var, self.current_side_effects_enabled_var)?, + typ.clone(), + ); + } + } self.define_result(dfg, instruction, value.clone()); @@ -1630,7 +1684,18 @@ impl<'a> Context<'a> { len: usize, value: Option, ) -> Result<(), InternalError> { - self.acir_context.initialize_array(array, len, value)?; + let databus = if self.data_bus.call_data.is_some() + && self.block_id(&self.data_bus.call_data.unwrap()) == array + { + BlockType::CallData + } else if self.data_bus.return_data.is_some() + && self.block_id(&self.data_bus.return_data.unwrap()) == array + { + BlockType::ReturnData + } else { + BlockType::Memory + }; + self.acir_context.initialize_array(array, len, value, databus)?; self.initialized_arrays.insert(array); Ok(()) } @@ -1784,15 +1849,15 @@ impl<'a> Context<'a> { let binary_type = AcirType::from(binary_type); let bit_count = binary_type.bit_size(); - - match binary.operator { + let num_type = binary_type.to_numeric_type(); + let result = match binary.operator { BinaryOp::Add => self.acir_context.add_var(lhs, rhs), BinaryOp::Sub => self.acir_context.sub_var(lhs, rhs), BinaryOp::Mul => self.acir_context.mul_var(lhs, rhs), BinaryOp::Div => self.acir_context.div_var( lhs, rhs, - binary_type, + binary_type.clone(), self.current_side_effects_enabled_var, ), // Note: that this produces unnecessary constraints when @@ -1816,7 +1881,71 @@ impl<'a> Context<'a> { BinaryOp::Shl | BinaryOp::Shr => unreachable!( "ICE - bit shift operators do not exist in ACIR and should have been replaced" ), + }?; + + if let NumericType::Unsigned { bit_size } = &num_type { + // Check for integer overflow + self.check_unsigned_overflow( + result, + *bit_size, + binary.lhs, + binary.rhs, + dfg, + binary.operator, + )?; } + + Ok(result) + } + + /// Adds a range check against the bit size of the result of addition, subtraction or multiplication + fn check_unsigned_overflow( + &mut self, + result: AcirVar, + bit_size: u32, + lhs: ValueId, + rhs: ValueId, + dfg: &DataFlowGraph, + op: BinaryOp, + ) -> Result<(), RuntimeError> { + // We try to optimize away operations that are guaranteed not to overflow + let max_lhs_bits = dfg.get_value_max_num_bits(lhs); + let max_rhs_bits = dfg.get_value_max_num_bits(rhs); + + let msg = match op { + BinaryOp::Add => { + if std::cmp::max(max_lhs_bits, max_rhs_bits) < bit_size { + // `lhs` and `rhs` have both been casted up from smaller types and so cannot overflow. + return Ok(()); + } + "attempt to add with overflow".to_string() + } + BinaryOp::Sub => { + if dfg.is_constant(lhs) && max_lhs_bits > max_rhs_bits { + // `lhs` is a fixed constant and `rhs` is restricted such that `lhs - rhs > 0` + // Note strict inequality as `rhs > lhs` while `max_lhs_bits == max_rhs_bits` is possible. + return Ok(()); + } + "attempt to subtract with overflow".to_string() + } + BinaryOp::Mul => { + if bit_size == 1 || max_lhs_bits + max_rhs_bits <= bit_size { + // Either performing boolean multiplication (which cannot overflow), + // or `lhs` and `rhs` have both been casted up from smaller types and so cannot overflow. + return Ok(()); + } + "attempt to multiply with overflow".to_string() + } + _ => return Ok(()), + }; + + let with_pred = self.acir_context.mul_var(result, self.current_side_effects_enabled_var)?; + self.acir_context.range_constrain_var( + with_pred, + &NumericType::Unsigned { bit_size }, + Some(msg), + )?; + Ok(()) } /// Operands in a binary operation are checked to have the same type. diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 3d7cb478f64c..7ad6a625f9c8 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -87,8 +87,18 @@ pub(super) fn simplify_call( Intrinsic::AsSlice => { let array = dfg.get_array_constant(arguments[0]); if let Some((array, array_type)) = array { - let slice_length = dfg.make_constant(array.len().into(), Type::length_type()); + // Compute the resulting slice length by dividing the flattened + // array length by the size of each array element + let elements_size = array_type.element_size(); let inner_element_types = array_type.element_types(); + assert_eq!( + 0, + array.len() % elements_size, + "expected array length to be multiple of its elements size" + ); + let slice_length_value = array.len() / elements_size; + let slice_length = + dfg.make_constant(slice_length_value.into(), Type::length_type()); let new_slice = dfg.make_array(array, Type::Slice(inner_element_types)); SimplifyResult::SimplifiedToMultiple(vec![slice_length, new_slice]) } else { @@ -454,8 +464,7 @@ fn simplify_black_box_func( simplify_signature(dfg, arguments, acvm::blackbox_solver::ecdsa_secp256r1_verify) } - BlackBoxFunc::FixedBaseScalarMul - | BlackBoxFunc::VariableBaseScalarMul + BlackBoxFunc::MultiScalarMul | BlackBoxFunc::SchnorrVerify | BlackBoxFunc::PedersenCommitment | BlackBoxFunc::PedersenHash @@ -483,6 +492,7 @@ fn simplify_black_box_func( ) } BlackBoxFunc::Sha256Compression => SimplifyResult::None, //TODO(Guillaume) + BlackBoxFunc::AES128Encrypt => SimplifyResult::None, } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs index 48036580d294..d72ad487f665 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -166,6 +166,14 @@ impl Type { other => panic!("element_types: Expected array or slice, found {other}"), } } + + pub(crate) fn first(&self) -> Type { + match self { + Type::Numeric(_) | Type::Function => self.clone(), + Type::Reference(typ) => typ.first(), + Type::Slice(element_types) | Type::Array(element_types, _) => element_types[0].first(), + } + } } /// Composite Types are essentially flattened struct or tuple types. diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs index c47d594545c8..80f6529b7b32 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs @@ -306,7 +306,7 @@ impl<'a> ValueMerger<'a> { // Arbitrarily limit this to looking at at most 10 past ArraySet operations. // If there are more than that, we assume 2 completely separate arrays are being merged. - let max_iters = 1; + let max_iters = 2; let mut seen_then = Vec::with_capacity(max_iters); let mut seen_else = Vec::with_capacity(max_iters); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs index 42727054503b..65a77552c791 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs @@ -109,7 +109,7 @@ impl Context<'_> { return InsertInstructionResult::SimplifiedTo(zero).first(); } } - let pow = self.numeric_constant(FieldElement::from(rhs_bit_size_pow_2), typ); + let pow = self.numeric_constant(FieldElement::from(rhs_bit_size_pow_2), typ.clone()); let max_lhs_bits = self.function.dfg.get_value_max_num_bits(lhs); @@ -123,15 +123,18 @@ impl Context<'_> { // we can safely cast to unsigned because overflow_checks prevent bit-shift with a negative value let rhs_unsigned = self.insert_cast(rhs, Type::unsigned(bit_size)); let pow = self.pow(base, rhs_unsigned); - let pow = self.insert_cast(pow, typ); + let pow = self.insert_cast(pow, typ.clone()); (FieldElement::max_num_bits(), self.insert_binary(predicate, BinaryOp::Mul, pow)) }; if max_bit <= bit_size { self.insert_binary(lhs, BinaryOp::Mul, pow) } else { - let result = self.insert_binary(lhs, BinaryOp::Mul, pow); - self.insert_truncate(result, bit_size, max_bit) + let lhs_field = self.insert_cast(lhs, Type::field()); + let pow_field = self.insert_cast(pow, Type::field()); + let result = self.insert_binary(lhs_field, BinaryOp::Mul, pow_field); + let result = self.insert_truncate(result, bit_size, max_bit); + self.insert_cast(result, typ) } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs index 02b9202b209f..ea37d857e580 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs @@ -108,17 +108,19 @@ impl Context { fn responds_to_side_effects_var(dfg: &DataFlowGraph, instruction: &Instruction) -> bool { use Instruction::*; match instruction { - Binary(binary) => { - if matches!(binary.operator, BinaryOp::Div | BinaryOp::Mod) { + Binary(binary) => match binary.operator { + BinaryOp::Add | BinaryOp::Sub | BinaryOp::Mul => { + dfg.type_of_value(binary.lhs).is_unsigned() + } + BinaryOp::Div | BinaryOp::Mod => { if let Some(rhs) = dfg.get_numeric_constant(binary.rhs) { rhs == FieldElement::zero() } else { true } - } else { - false } - } + _ => false, + }, Cast(_, _) | Not(_) diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index f7ecdc8870de..ebcbfbabe73e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -304,7 +304,7 @@ impl<'a> FunctionContext<'a> { /// Insert constraints ensuring that the operation does not overflow the bit size of the result /// - /// If the result is unsigned, we simply range check against the bit size + /// If the result is unsigned, overflow will be checked during acir-gen (cf. issue #4456), except for bit-shifts, because we will convert them to field multiplication /// /// If the result is signed, we just prepare it for check_signed_overflow() by casting it to /// an unsigned value representing the signed integer. @@ -351,51 +351,12 @@ impl<'a> FunctionContext<'a> { } Type::Numeric(NumericType::Unsigned { bit_size }) => { let dfg = &self.builder.current_function.dfg; - - let max_lhs_bits = self.builder.current_function.dfg.get_value_max_num_bits(lhs); - let max_rhs_bits = self.builder.current_function.dfg.get_value_max_num_bits(rhs); + let max_lhs_bits = dfg.get_value_max_num_bits(lhs); match operator { - BinaryOpKind::Add => { - if std::cmp::max(max_lhs_bits, max_rhs_bits) < bit_size { - // `lhs` and `rhs` have both been casted up from smaller types and so cannot overflow. - return result; - } - - let message = "attempt to add with overflow".to_string(); - self.builder.set_location(location).insert_range_check( - result, - bit_size, - Some(message), - ); - } - BinaryOpKind::Subtract => { - if dfg.is_constant(lhs) && max_lhs_bits > max_rhs_bits { - // `lhs` is a fixed constant and `rhs` is restricted such that `lhs - rhs > 0` - // Note strict inequality as `rhs > lhs` while `max_lhs_bits == max_rhs_bits` is possible. - return result; - } - - let message = "attempt to subtract with overflow".to_string(); - self.builder.set_location(location).insert_range_check( - result, - bit_size, - Some(message), - ); - } - BinaryOpKind::Multiply => { - if bit_size == 1 || max_lhs_bits + max_rhs_bits <= bit_size { - // Either performing boolean multiplication (which cannot overflow), - // or `lhs` and `rhs` have both been casted up from smaller types and so cannot overflow. - return result; - } - - let message = "attempt to multiply with overflow".to_string(); - self.builder.set_location(location).insert_range_check( - result, - bit_size, - Some(message), - ); + BinaryOpKind::Add | BinaryOpKind::Subtract | BinaryOpKind::Multiply => { + // Overflow check is deferred to acir-gen + return result; } BinaryOpKind::ShiftLeft => { if let Some(rhs_const) = dfg.get_numeric_constant(rhs) { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs index 254ec4a75908..1c5a5c610aa7 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs @@ -32,6 +32,7 @@ use iter_extended::vecmap; pub enum IntegerBitSize { One, Eight, + Sixteen, ThirtyTwo, SixtyFour, } @@ -48,6 +49,7 @@ impl From for u32 { match size { One => 1, Eight => 8, + Sixteen => 16, ThirtyTwo => 32, SixtyFour => 64, } @@ -64,6 +66,7 @@ impl TryFrom for IntegerBitSize { match value { 1 => Ok(One), 8 => Ok(Eight), + 16 => Ok(Sixteen), 32 => Ok(ThirtyTwo), 64 => Ok(SixtyFour), _ => Err(InvalidIntegerBitSizeError(value)), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index 26b7c212a30d..84df3a0a2443 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -401,6 +401,14 @@ impl<'a> Interpreter<'a> { let value = if is_negative { 0u8.wrapping_sub(value) } else { value }; Ok(Value::U8(value)) } + (Signedness::Unsigned, IntegerBitSize::Sixteen) => { + let value: u16 = + value.try_to_u64().and_then(|value| value.try_into().ok()).ok_or( + InterpreterError::IntegerOutOfRangeForType { value, typ, location }, + )?; + let value = if is_negative { 0u16.wrapping_sub(value) } else { value }; + Ok(Value::U16(value)) + } (Signedness::Unsigned, IntegerBitSize::ThirtyTwo) => { let value: u32 = value.try_to_u64().and_then(|value| value.try_into().ok()).ok_or( @@ -430,6 +438,14 @@ impl<'a> Interpreter<'a> { let value = if is_negative { -value } else { value }; Ok(Value::I8(value)) } + (Signedness::Signed, IntegerBitSize::Sixteen) => { + let value: i16 = + value.try_to_u64().and_then(|value| value.try_into().ok()).ok_or( + InterpreterError::IntegerOutOfRangeForType { value, typ, location }, + )?; + let value = if is_negative { -value } else { value }; + Ok(Value::I16(value)) + } (Signedness::Signed, IntegerBitSize::ThirtyTwo) => { let value: i32 = value.try_to_u64().and_then(|value| value.try_into().ok()).ok_or( @@ -509,9 +525,11 @@ impl<'a> Interpreter<'a> { crate::ast::UnaryOp::Minus => match rhs { Value::Field(value) => Ok(Value::Field(FieldElement::zero() - value)), Value::I8(value) => Ok(Value::I8(-value)), + Value::I16(value) => Ok(Value::I16(-value)), Value::I32(value) => Ok(Value::I32(-value)), Value::I64(value) => Ok(Value::I64(-value)), Value::U8(value) => Ok(Value::U8(0 - value)), + Value::U16(value) => Ok(Value::U16(0 - value)), Value::U32(value) => Ok(Value::U32(0 - value)), Value::U64(value) => Ok(Value::U64(0 - value)), value => { @@ -523,9 +541,11 @@ impl<'a> Interpreter<'a> { crate::ast::UnaryOp::Not => match rhs { Value::Bool(value) => Ok(Value::Bool(!value)), Value::I8(value) => Ok(Value::I8(!value)), + Value::I16(value) => Ok(Value::I16(!value)), Value::I32(value) => Ok(Value::I32(!value)), Value::I64(value) => Ok(Value::I64(!value)), Value::U8(value) => Ok(Value::U8(!value)), + Value::U16(value) => Ok(Value::U16(!value)), Value::U32(value) => Ok(Value::U32(!value)), Value::U64(value) => Ok(Value::U64(!value)), value => { @@ -559,9 +579,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Add => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Field(lhs + rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs + rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs + rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs + rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs + rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs + rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs + rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs + rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs + rhs)), (lhs, rhs) => { @@ -572,9 +594,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Subtract => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Field(lhs - rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs - rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs - rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs - rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs - rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs - rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs - rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs - rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs - rhs)), (lhs, rhs) => { @@ -585,9 +609,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Multiply => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Field(lhs * rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs * rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs * rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs * rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs * rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs * rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs * rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs * rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs * rhs)), (lhs, rhs) => { @@ -598,9 +624,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Divide => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Field(lhs / rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs / rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs / rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs / rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs / rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs / rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs / rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs / rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs / rhs)), (lhs, rhs) => { @@ -611,9 +639,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Equal => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs == rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs == rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs == rhs)), (lhs, rhs) => { @@ -624,9 +654,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::NotEqual => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs != rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs != rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs != rhs)), (lhs, rhs) => { @@ -637,9 +669,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Less => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs < rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs < rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs < rhs)), (lhs, rhs) => { @@ -650,9 +684,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::LessEqual => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs <= rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs <= rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs <= rhs)), (lhs, rhs) => { @@ -663,9 +699,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Greater => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs > rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs > rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs > rhs)), (lhs, rhs) => { @@ -676,9 +714,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::GreaterEqual => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs >= rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs >= rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs >= rhs)), (lhs, rhs) => { @@ -689,9 +729,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::And => match (lhs, rhs) { (Value::Bool(lhs), Value::Bool(rhs)) => Ok(Value::Bool(lhs & rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs & rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs & rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs & rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs & rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs & rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs & rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs & rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs & rhs)), (lhs, rhs) => { @@ -702,9 +744,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Or => match (lhs, rhs) { (Value::Bool(lhs), Value::Bool(rhs)) => Ok(Value::Bool(lhs | rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs | rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs | rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs | rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs | rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs | rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs | rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs | rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs | rhs)), (lhs, rhs) => { @@ -715,9 +759,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Xor => match (lhs, rhs) { (Value::Bool(lhs), Value::Bool(rhs)) => Ok(Value::Bool(lhs ^ rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs ^ rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs ^ rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs ^ rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs ^ rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs ^ rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs ^ rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs ^ rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs ^ rhs)), (lhs, rhs) => { @@ -727,9 +773,11 @@ impl<'a> Interpreter<'a> { }, BinaryOpKind::ShiftRight => match (lhs, rhs) { (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs >> rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs >> rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs >> rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs >> rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs >> rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs >> rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs >> rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs >> rhs)), (lhs, rhs) => { @@ -739,9 +787,11 @@ impl<'a> Interpreter<'a> { }, BinaryOpKind::ShiftLeft => match (lhs, rhs) { (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs << rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs << rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs << rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs << rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs << rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs << rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs << rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs << rhs)), (lhs, rhs) => { @@ -751,9 +801,11 @@ impl<'a> Interpreter<'a> { }, BinaryOpKind::Modulo => match (lhs, rhs) { (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs % rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs % rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs % rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs % rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs % rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs % rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs % rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs % rhs)), (lhs, rhs) => { @@ -795,9 +847,11 @@ impl<'a> Interpreter<'a> { value.try_to_u64().expect("index could not fit into u64") as usize } Value::I8(value) => value as usize, + Value::I16(value) => value as usize, Value::I32(value) => value as usize, Value::I64(value) => value as usize, Value::U8(value) => value as usize, + Value::U16(value) => value as usize, Value::U32(value) => value as usize, Value::U64(value) => value as usize, value => { @@ -908,9 +962,11 @@ impl<'a> Interpreter<'a> { let (mut lhs, lhs_is_negative) = match self.evaluate(cast.lhs)? { Value::Field(value) => (value, false), Value::U8(value) => ((value as u128).into(), false), + Value::U16(value) => ((value as u128).into(), false), Value::U32(value) => ((value as u128).into(), false), Value::U64(value) => ((value as u128).into(), false), Value::I8(value) => signed_int_to_field!(value), + Value::I16(value) => signed_int_to_field!(value), Value::I32(value) => signed_int_to_field!(value), Value::I64(value) => signed_int_to_field!(value), Value::Bool(value) => { @@ -946,6 +1002,9 @@ impl<'a> Interpreter<'a> { Err(InterpreterError::TypeUnsupported { typ: cast.r#type, location }) } (Signedness::Unsigned, IntegerBitSize::Eight) => cast_to_int!(lhs, to_u128, u8, U8), + (Signedness::Unsigned, IntegerBitSize::Sixteen) => { + cast_to_int!(lhs, to_u128, u16, U16) + } (Signedness::Unsigned, IntegerBitSize::ThirtyTwo) => { cast_to_int!(lhs, to_u128, u32, U32) } @@ -957,6 +1016,9 @@ impl<'a> Interpreter<'a> { Err(InterpreterError::TypeUnsupported { typ: cast.r#type, location }) } (Signedness::Signed, IntegerBitSize::Eight) => cast_to_int!(lhs, to_i128, i8, I8), + (Signedness::Signed, IntegerBitSize::Sixteen) => { + cast_to_int!(lhs, to_i128, i16, I16) + } (Signedness::Signed, IntegerBitSize::ThirtyTwo) => { cast_to_int!(lhs, to_i128, i32, I32) } @@ -1149,9 +1211,11 @@ impl<'a> Interpreter<'a> { let get_index = |this: &mut Self, expr| -> IResult<(_, fn(_) -> _)> { match this.evaluate(expr)? { Value::I8(value) => Ok((value as i128, |i| Value::I8(i as i8))), + Value::I16(value) => Ok((value as i128, |i| Value::I16(i as i16))), Value::I32(value) => Ok((value as i128, |i| Value::I32(i as i32))), Value::I64(value) => Ok((value as i128, |i| Value::I64(i as i64))), Value::U8(value) => Ok((value as i128, |i| Value::U8(i as u8))), + Value::U16(value) => Ok((value as i128, |i| Value::U16(i as u16))), Value::U32(value) => Ok((value as i128, |i| Value::U32(i as u32))), Value::U64(value) => Ok((value as i128, |i| Value::U64(i as u64))), value => { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs index 5a12eb7292cb..41475d3ccf48 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs @@ -103,6 +103,19 @@ fn for_loop() { assert_eq!(result, Value::U8(15)); } +#[test] +fn for_loop_u16() { + let program = "fn main() -> pub u16 { + let mut x = 0; + for i in 0 .. 6 { + x += i; + } + x + }"; + let result = interpret(program, vec!["main".into()]); + assert_eq!(result, Value::U16(15)); +} + #[test] fn for_loop_with_break() { let program = "unconstrained fn main() -> pub u32 { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs index 6845c6ac5a97..4e4a260871a2 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs @@ -22,9 +22,11 @@ pub enum Value { Bool(bool), Field(FieldElement), I8(i8), + I16(i16), I32(i32), I64(i64), U8(u8), + U16(u16), U32(u32), U64(u64), String(Rc), @@ -45,9 +47,11 @@ impl Value { Value::Bool(_) => Type::Bool, Value::Field(_) => Type::FieldElement, Value::I8(_) => Type::Integer(Signedness::Signed, IntegerBitSize::Eight), + Value::I16(_) => Type::Integer(Signedness::Signed, IntegerBitSize::Sixteen), Value::I32(_) => Type::Integer(Signedness::Signed, IntegerBitSize::ThirtyTwo), Value::I64(_) => Type::Integer(Signedness::Signed, IntegerBitSize::SixtyFour), Value::U8(_) => Type::Integer(Signedness::Unsigned, IntegerBitSize::Eight), + Value::U16(_) => Type::Integer(Signedness::Unsigned, IntegerBitSize::Sixteen), Value::U32(_) => Type::Integer(Signedness::Unsigned, IntegerBitSize::ThirtyTwo), Value::U64(_) => Type::Integer(Signedness::Unsigned, IntegerBitSize::SixtyFour), Value::String(value) => { @@ -87,6 +91,12 @@ impl Value { let value = (value as u128).into(); HirExpression::Literal(HirLiteral::Integer(value, negative)) } + Value::I16(value) => { + let negative = value < 0; + let value = value.abs(); + let value = (value as u128).into(); + HirExpression::Literal(HirLiteral::Integer(value, negative)) + } Value::I32(value) => { let negative = value < 0; let value = value.abs(); @@ -102,6 +112,9 @@ impl Value { Value::U8(value) => { HirExpression::Literal(HirLiteral::Integer((value as u128).into(), false)) } + Value::U16(value) => { + HirExpression::Literal(HirLiteral::Integer((value as u128).into(), false)) + } Value::U32(value) => { HirExpression::Literal(HirLiteral::Integer((value as u128).into(), false)) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs index 637f3c99e89c..f3b2a24c1f05 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs @@ -1787,7 +1787,7 @@ impl From<&Type> for PrintableType { match value { Type::FieldElement => PrintableType::Field, Type::Array(size, typ) => { - let length = size.evaluate_to_u64(); + let length = size.evaluate_to_u64().expect("Cannot print variable sized arrays"); let typ = typ.as_ref(); PrintableType::Array { length, typ: Box::new(typ.into()) } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs index 4bd501c07bab..15c27ee344c2 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs @@ -5,13 +5,14 @@ use noirc_errors::{ Location, }; -use super::HirType; use crate::hir_def::function::FunctionSignature; use crate::{ ast::{BinaryOpKind, IntegerBitSize, Signedness, Visibility}, token::{Attributes, FunctionAttribute}, }; +use super::HirType; + /// The monomorphized AST is expression-based, all statements are also /// folded into this expression enum. Compared to the HIR, the monomorphized /// AST has several differences: diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs index b627714d2a6f..b527284d1a9a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs @@ -1374,7 +1374,7 @@ mod test { fresh_statement(), true, ), - vec!["x as u8", "0 as Field", "(x + 3) as [Field; 8]"], + vec!["x as u8", "x as u16", "0 as Field", "(x + 3) as [Field; 8]"], ); parse_all_failing( atom_or_right_unary( @@ -1546,7 +1546,10 @@ mod test { // Let statements are not type checked here, so the parser will accept as // long as it is a type. Other statements such as Public are type checked // Because for now, they can only have one type - parse_all(declaration(expression()), vec!["let _ = 42", "let x = y", "let x : u8 = y"]); + parse_all( + declaration(expression()), + vec!["let _ = 42", "let x = y", "let x : u8 = y", "let x: u16 = y"], + ); } #[test] diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs index 5f99e9e347ac..6f7470807be3 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs @@ -1,1236 +1,1214 @@ +#![cfg(test)] + +#[cfg(test)] +mod name_shadowing; + // XXX: These tests repeat a lot of code // what we should do is have test cases which are passed to a test harness // A test harness will allow for more expressive and readable tests -#[cfg(test)] -mod test { - - use core::panic; - use std::collections::BTreeMap; - - use fm::FileId; - - use iter_extended::vecmap; - use noirc_errors::Location; - - use crate::hir::def_collector::dc_crate::CompilationError; - use crate::hir::def_collector::errors::{DefCollectorErrorKind, DuplicateType}; - use crate::hir::def_map::ModuleData; - use crate::hir::resolution::errors::ResolverError; - use crate::hir::resolution::import::PathResolutionError; - use crate::hir::type_check::TypeCheckError; - use crate::hir::Context; - use crate::node_interner::{NodeInterner, StmtId}; - - use crate::hir::def_collector::dc_crate::DefCollector; - use crate::hir_def::expr::HirExpression; - use crate::hir_def::stmt::HirStatement; - use crate::monomorphization::monomorphize; - use crate::parser::ParserErrorReason; - use crate::ParsedModule; - use crate::{ - hir::def_map::{CrateDefMap, LocalModuleId}, - parse_program, - }; - use fm::FileManager; - use noirc_arena::Arena; +use core::panic; +use std::collections::BTreeMap; + +use fm::FileId; + +use iter_extended::vecmap; +use noirc_errors::Location; + +use crate::hir::def_collector::dc_crate::CompilationError; +use crate::hir::def_collector::errors::{DefCollectorErrorKind, DuplicateType}; +use crate::hir::def_map::ModuleData; +use crate::hir::resolution::errors::ResolverError; +use crate::hir::resolution::import::PathResolutionError; +use crate::hir::type_check::TypeCheckError; +use crate::hir::Context; +use crate::node_interner::{NodeInterner, StmtId}; + +use crate::hir::def_collector::dc_crate::DefCollector; +use crate::hir_def::expr::HirExpression; +use crate::hir_def::stmt::HirStatement; +use crate::monomorphization::monomorphize; +use crate::parser::ParserErrorReason; +use crate::ParsedModule; +use crate::{ + hir::def_map::{CrateDefMap, LocalModuleId}, + parse_program, +}; +use fm::FileManager; +use noirc_arena::Arena; + +pub(crate) fn has_parser_error(errors: &[(CompilationError, FileId)]) -> bool { + errors.iter().any(|(e, _f)| matches!(e, CompilationError::ParseError(_))) +} - pub(crate) fn has_parser_error(errors: &[(CompilationError, FileId)]) -> bool { - errors.iter().any(|(e, _f)| matches!(e, CompilationError::ParseError(_))) - } +pub(crate) fn remove_experimental_warnings(errors: &mut Vec<(CompilationError, FileId)>) { + errors.retain(|(error, _)| match error { + CompilationError::ParseError(error) => { + !matches!(error.reason(), Some(ParserErrorReason::ExperimentalFeature(..))) + } + _ => true, + }); +} - pub(crate) fn remove_experimental_warnings(errors: &mut Vec<(CompilationError, FileId)>) { - errors.retain(|(error, _)| match error { - CompilationError::ParseError(error) => { - !matches!(error.reason(), Some(ParserErrorReason::ExperimentalFeature(..))) - } - _ => true, - }); - } - - pub(crate) fn get_program( - src: &str, - ) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { - let root = std::path::Path::new("/"); - let fm = FileManager::new(root); - - let mut context = Context::new(fm, Default::default()); - context.def_interner.populate_dummy_operator_traits(); - let root_file_id = FileId::dummy(); - let root_crate_id = context.crate_graph.add_crate_root(root_file_id); - - let (program, parser_errors) = parse_program(src); - let mut errors = vecmap(parser_errors, |e| (e.into(), root_file_id)); - remove_experimental_warnings(&mut errors); - - if !has_parser_error(&errors) { - // Allocate a default Module for the root, giving it a ModuleId - let mut modules: Arena = Arena::default(); - let location = Location::new(Default::default(), root_file_id); - let root = modules.insert(ModuleData::new(None, location, false)); - - let def_map = CrateDefMap { - root: LocalModuleId(root), - modules, - krate: root_crate_id, - extern_prelude: BTreeMap::new(), - }; +pub(crate) fn get_program(src: &str) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { + let root = std::path::Path::new("/"); + let fm = FileManager::new(root); + + let mut context = Context::new(fm, Default::default()); + context.def_interner.populate_dummy_operator_traits(); + let root_file_id = FileId::dummy(); + let root_crate_id = context.crate_graph.add_crate_root(root_file_id); + + let (program, parser_errors) = parse_program(src); + let mut errors = vecmap(parser_errors, |e| (e.into(), root_file_id)); + remove_experimental_warnings(&mut errors); + + if !has_parser_error(&errors) { + // Allocate a default Module for the root, giving it a ModuleId + let mut modules: Arena = Arena::default(); + let location = Location::new(Default::default(), root_file_id); + let root = modules.insert(ModuleData::new(None, location, false)); + + let def_map = CrateDefMap { + root: LocalModuleId(root), + modules, + krate: root_crate_id, + extern_prelude: BTreeMap::new(), + }; - // Now we want to populate the CrateDefMap using the DefCollector - errors.extend(DefCollector::collect( - def_map, - &mut context, - program.clone().into_sorted(), - root_file_id, - &[], // No macro processors - )); - } - (program, context, errors) + // Now we want to populate the CrateDefMap using the DefCollector + errors.extend(DefCollector::collect( + def_map, + &mut context, + program.clone().into_sorted(), + root_file_id, + &[], // No macro processors + )); } + (program, context, errors) +} - pub(crate) fn get_program_errors(src: &str) -> Vec<(CompilationError, FileId)> { - get_program(src).2 - } +pub(crate) fn get_program_errors(src: &str) -> Vec<(CompilationError, FileId)> { + get_program(src).2 +} - #[test] - fn check_trait_implemented_for_all_t() { - let src = " - trait Default { - fn default() -> Self; - } - - trait Eq { - fn eq(self, other: Self) -> bool; +#[test] +fn check_trait_implemented_for_all_t() { + let src = " + trait Default { + fn default() -> Self; + } + + trait Eq { + fn eq(self, other: Self) -> bool; + } + + trait IsDefault { + fn is_default(self) -> bool; + } + + impl IsDefault for T where T: Default + Eq { + fn is_default(self) -> bool { + self.eq(T::default()) } - - trait IsDefault { - fn is_default(self) -> bool; + } + + struct Foo { + a: u64, + } + + impl Eq for Foo { + fn eq(self, other: Foo) -> bool { self.a == other.a } + } + + impl Default for u64 { + fn default() -> Self { + 0 } - - impl IsDefault for T where T: Default + Eq { - fn is_default(self) -> bool { - self.eq(T::default()) - } + } + + impl Default for Foo { + fn default() -> Self { + Foo { a: Default::default() } } - - struct Foo { - a: u64, + } + + fn main(a: Foo) -> pub bool { + a.is_default() + }"; + + let errors = get_program_errors(src); + errors.iter().for_each(|err| println!("{:?}", err)); + assert!(errors.is_empty()); +} + +#[test] +fn check_trait_implementation_duplicate_method() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Field; + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + // Duplicate trait methods should not compile + fn default(x: Field, y: Field) -> Field { + y + 2 * x } - - impl Eq for Foo { - fn eq(self, other: Foo) -> bool { self.a == other.a } + // Duplicate trait methods should not compile + fn default(x: Field, y: Field) -> Field { + x + 2 * y } - - impl Default for u64 { - fn default() -> Self { - 0 + } + + fn main() {}"; + + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::Duplicate { + typ, + first_def, + second_def, + }) => { + assert_eq!(typ, &DuplicateType::TraitAssociatedFunction); + assert_eq!(first_def, "default"); + assert_eq!(second_def, "default"); } - } - - impl Default for Foo { - fn default() -> Self { - Foo { a: Default::default() } + _ => { + panic!("No other errors are expected! Found = {:?}", err); } - } - - fn main(a: Foo) -> pub bool { - a.is_default() - }"; - - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); + }; } +} - #[test] - fn check_trait_implementation_duplicate_method() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Field; - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_wrong_method_return_type() { + let src = " + trait Default { + fn default() -> Self; + } + + struct Foo { + } + + impl Default for Foo { + fn default() -> Field { + 0 } - - impl Default for Foo { - // Duplicate trait methods should not compile - fn default(x: Field, y: Field) -> Field { - y + 2 * x + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::TypeMismatch { + expected_typ, + expr_typ, + expr_span: _, + }) => { + assert_eq!(expected_typ, "Foo"); + assert_eq!(expr_typ, "Field"); } - // Duplicate trait methods should not compile - fn default(x: Field, y: Field) -> Field { - x + 2 * y + _ => { + panic!("No other errors are expected! Found = {:?}", err); } - } - - fn main() {}"; - - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::Duplicate { - typ, - first_def, - second_def, - }) => { - assert_eq!(typ, &DuplicateType::TraitAssociatedFunction); - assert_eq!(first_def, "default"); - assert_eq!(second_def, "default"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + }; } +} - #[test] - fn check_trait_wrong_method_return_type() { - let src = " - trait Default { - fn default() -> Self; - } - - struct Foo { +#[test] +fn check_trait_wrong_method_return_type2() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field, _y: Field) -> Field { + x } - - impl Default for Foo { - fn default() -> Field { - 0 + } + + fn main() { + }"; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::TypeMismatch { + expected_typ, + expr_typ, + expr_span: _, + }) => { + assert_eq!(expected_typ, "Foo"); + assert_eq!(expr_typ, "Field"); } - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::TypeMismatch { - expected_typ, - expr_typ, - expr_span: _, - }) => { - assert_eq!(expected_typ, "Foo"); - assert_eq!(expr_typ, "Field"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_method_return_type2() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_missing_implementation() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; + + fn method2(x: Field) -> Field; + + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field, y: Field) -> Self { + Self { bar: x, array: [x,y] } } - - impl Default for Foo { - fn default(x: Field, _y: Field) -> Field { - x + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::TraitMissingMethod { + trait_name, + method_name, + trait_impl_span: _, + }) => { + assert_eq!(trait_name, "Default"); + assert_eq!(method_name, "method2"); } - } - - fn main() { - }"; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::TypeMismatch { - expected_typ, - expr_typ, - expr_span: _, - }) => { - assert_eq!(expected_typ, "Foo"); - assert_eq!(expr_typ, "Field"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_missing_implementation() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - - fn method2(x: Field) -> Field; - - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_not_in_scope() { + let src = " + struct Foo { + bar: Field, + array: [Field; 2], + } + + // Default trait does not exist + impl Default for Foo { + fn default(x: Field, y: Field) -> Self { + Self { bar: x, array: [x,y] } } - - impl Default for Foo { - fn default(x: Field, y: Field) -> Self { - Self { bar: x, array: [x,y] } + } + + fn main() { + } + + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::TraitNotFound { + trait_path, + }) => { + assert_eq!(trait_path.as_string(), "Default"); } - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::TraitMissingMethod { - trait_name, - method_name, - trait_impl_span: _, - }) => { - assert_eq!(trait_name, "Default"); - assert_eq!(method_name, "method2"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_not_in_scope() { - let src = " - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_wrong_method_name() { + let src = " + trait Default { + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + // wrong trait name method should not compile + impl Default for Foo { + fn does_not_exist(x: Field, y: Field) -> Self { + Self { bar: x, array: [x,y] } } - - // Default trait does not exist - impl Default for Foo { - fn default(x: Field, y: Field) -> Self { - Self { bar: x, array: [x,y] } + } + + fn main() { + }"; + let compilation_errors = get_program_errors(src); + assert!(!has_parser_error(&compilation_errors)); + assert!( + compilation_errors.len() == 1, + "Expected 1 compilation error, got: {:?}", + compilation_errors + ); + + for (err, _file_id) in compilation_errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::MethodNotInTrait { + trait_name, + impl_method, + }) => { + assert_eq!(trait_name, "Default"); + assert_eq!(impl_method, "does_not_exist"); } - } - - fn main() { - } - - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::TraitNotFound { - trait_path, - }) => { - assert_eq!(trait_path.as_string(), "Default"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_method_name() { - let src = " - trait Default { - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_wrong_parameter() { + let src = " + trait Default { + fn default(x: Field) -> Self; + } + + struct Foo { + bar: u32, + } + + impl Default for Foo { + fn default(x: u32) -> Self { + Foo {bar: x} } - - // wrong trait name method should not compile - impl Default for Foo { - fn does_not_exist(x: Field, y: Field) -> Self { - Self { bar: x, array: [x,y] } + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::TraitMethodParameterTypeMismatch { + method_name, + expected_typ, + actual_typ, + .. + }) => { + assert_eq!(method_name, "default"); + assert_eq!(expected_typ, "Field"); + assert_eq!(actual_typ, "u32"); } - } - - fn main() { - }"; - let compilation_errors = get_program_errors(src); - assert!(!has_parser_error(&compilation_errors)); - assert!( - compilation_errors.len() == 1, - "Expected 1 compilation error, got: {:?}", - compilation_errors - ); - - for (err, _file_id) in compilation_errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::MethodNotInTrait { - trait_name, - impl_method, - }) => { - assert_eq!(trait_name, "Default"); - assert_eq!(impl_method, "does_not_exist"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_parameter() { - let src = " - trait Default { - fn default(x: Field) -> Self; - } - - struct Foo { - bar: u32, +#[test] +fn check_trait_wrong_parameter2() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field, y: Foo) -> Self { + Self { bar: x, array: [x, y.bar] } } - - impl Default for Foo { - fn default(x: u32) -> Self { - Foo {bar: x} + } + + fn main() { + }"; + + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::TraitMethodParameterTypeMismatch { + method_name, + expected_typ, + actual_typ, + .. + }) => { + assert_eq!(method_name, "default"); + assert_eq!(expected_typ, "Field"); + assert_eq!(actual_typ, "Foo"); } - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::TraitMethodParameterTypeMismatch { - method_name, - expected_typ, - actual_typ, - .. - }) => { - assert_eq!(method_name, "default"); - assert_eq!(expected_typ, "Field"); - assert_eq!(actual_typ, "u32"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_parameter2() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - } - - struct Foo { - bar: Field, - array: [Field; 2], - } - - impl Default for Foo { - fn default(x: Field, y: Foo) -> Self { - Self { bar: x, array: [x, y.bar] } +#[test] +fn check_trait_wrong_parameter_type() { + let src = " + trait Default { + fn default(x: Field, y: NotAType) -> Field; + } + + fn main(x: Field, y: Field) { + assert(y == x); + }"; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::ResolverError(ResolverError::PathResolutionError( + PathResolutionError::Unresolved(ident), + )) => { + assert_eq!(ident, "NotAType"); } - } - - fn main() { - }"; - - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::TraitMethodParameterTypeMismatch { - method_name, - expected_typ, - actual_typ, - .. - }) => { - assert_eq!(method_name, "default"); - assert_eq!(expected_typ, "Field"); - assert_eq!(actual_typ, "Foo"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_parameter_type() { - let src = " - trait Default { - fn default(x: Field, y: NotAType) -> Field; - } - - fn main(x: Field, y: Field) { - assert(y == x); - }"; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::ResolverError(ResolverError::PathResolutionError( - PathResolutionError::Unresolved(ident), - )) => { - assert_eq!(ident, "NotAType"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } +#[test] +fn check_trait_wrong_parameters_count() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; } - - #[test] - fn check_trait_wrong_parameters_count() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - } - - struct Foo { - bar: Field, - array: [Field; 2], + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field) -> Self { + Self { bar: x, array: [x, x] } } - - impl Default for Foo { - fn default(x: Field) -> Self { - Self { bar: x, array: [x, x] } + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::MismatchTraitImplNumParameters { + actual_num_parameters, + expected_num_parameters, + trait_name, + method_name, + .. + }) => { + assert_eq!(actual_num_parameters, &1_usize); + assert_eq!(expected_num_parameters, &2_usize); + assert_eq!(method_name, "default"); + assert_eq!(trait_name, "Default"); } - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::MismatchTraitImplNumParameters { - actual_num_parameters, - expected_num_parameters, - trait_name, - method_name, - .. - }) => { - assert_eq!(actual_num_parameters, &1_usize); - assert_eq!(expected_num_parameters, &2_usize); - assert_eq!(method_name, "default"); - assert_eq!(trait_name, "Default"); - } - _ => { - panic!("No other errors are expected in this test case! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected in this test case! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_impl_for_non_type() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Field; - } - - impl Default for main { - fn default(x: Field, y: Field) -> Field { - x + y - } - } +#[test] +fn check_trait_impl_for_non_type() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Field; + } - fn main() {} - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::ResolverError(ResolverError::Expected { - expected, got, .. - }) => { - assert_eq!(expected, "type"); - assert_eq!(got, "function"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; + impl Default for main { + fn default(x: Field, y: Field) -> Field { + x + y } } - #[test] - fn check_impl_struct_not_trait() { - let src = " - struct Foo { - bar: Field, - array: [Field; 2], - } - - struct Default { - x: Field, - z: Field, - } - - // Default is struct not a trait - impl Default for Foo { - fn default(x: Field, y: Field) -> Self { - Self { bar: x, array: [x,y] } + fn main() {} + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::ResolverError(ResolverError::Expected { expected, got, .. }) => { + assert_eq!(expected, "type"); + assert_eq!(got, "function"); } - } - - fn main() { - } - - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::NotATrait { - not_a_trait_name, - }) => { - assert_eq!(not_a_trait_name.to_string(), "plain::Default"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_duplicate_declaration() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_impl_struct_not_trait() { + let src = " + struct Foo { + bar: Field, + array: [Field; 2], + } + + struct Default { + x: Field, + z: Field, + } + + // Default is struct not a trait + impl Default for Foo { + fn default(x: Field, y: Field) -> Self { + Self { bar: x, array: [x,y] } } - - impl Default for Foo { - fn default(x: Field,y: Field) -> Self { - Self { bar: x, array: [x,y] } + } + + fn main() { + } + + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::NotATrait { + not_a_trait_name, + }) => { + assert_eq!(not_a_trait_name.to_string(), "plain::Default"); } - } - - - trait Default { - fn default(x: Field) -> Self; - } - - fn main() { - }"; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::Duplicate { - typ, - first_def, - second_def, - }) => { - assert_eq!(typ, &DuplicateType::Trait); - assert_eq!(first_def, "Default"); - assert_eq!(second_def, "Default"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_duplicate_implementation() { - let src = " - trait Default { - } - struct Foo { - bar: Field, - } - - impl Default for Foo { - } - impl Default for Foo { - } - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImpl { - .. - }) => (), - CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImplNote { - .. - }) => (), - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } +#[test] +fn check_trait_duplicate_declaration() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; } - - #[test] - fn check_trait_duplicate_implementation_with_alias() { - let src = " - trait Default { - } - - struct MyStruct { - } - - type MyType = MyStruct; - - impl Default for MyStruct { - } - - impl Default for MyType { - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImpl { - .. - }) => (), - CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImplNote { - .. - }) => (), - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field,y: Field) -> Self { + Self { bar: x, array: [x,y] } } } + + + trait Default { + fn default(x: Field) -> Self; + } + + fn main() { + }"; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::Duplicate { + typ, + first_def, + second_def, + }) => { + assert_eq!(typ, &DuplicateType::Trait); + assert_eq!(first_def, "Default"); + assert_eq!(second_def, "Default"); + } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; + } +} - #[test] - fn test_impl_self_within_default_def() { - let src = " - trait Bar { - fn ok(self) -> Self; - - fn ref_ok(self) -> Self { - self.ok() +#[test] +fn check_trait_duplicate_implementation() { + let src = " + trait Default { + } + struct Foo { + bar: Field, + } + + impl Default for Foo { + } + impl Default for Foo { + } + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImpl { + .. + }) => (), + CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImplNote { + .. + }) => (), + _ => { + panic!("No other errors are expected! Found = {:?}", err); } - } + }; + } +} - impl Bar for (T, T) where T: Bar { - fn ok(self) -> Self { - self +#[test] +fn check_trait_duplicate_implementation_with_alias() { + let src = " + trait Default { + } + + struct MyStruct { + } + + type MyType = MyStruct; + + impl Default for MyStruct { + } + + impl Default for MyType { + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImpl { + .. + }) => (), + CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImplNote { + .. + }) => (), + _ => { + panic!("No other errors are expected! Found = {:?}", err); } - }"; - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); + }; } +} - #[test] - fn check_trait_as_type_as_fn_parameter() { - let src = " - trait Eq { - fn eq(self, other: Self) -> bool; - } +#[test] +fn test_impl_self_within_default_def() { + let src = " + trait Bar { + fn ok(self) -> Self; - struct Foo { - a: u64, + fn ref_ok(self) -> Self { + self.ok() } + } - impl Eq for Foo { - fn eq(self, other: Foo) -> bool { self.a == other.a } + impl Bar for (T, T) where T: Bar { + fn ok(self) -> Self { + self } + }"; + let errors = get_program_errors(src); + errors.iter().for_each(|err| println!("{:?}", err)); + assert!(errors.is_empty()); +} - fn test_eq(x: impl Eq) -> bool { - x.eq(x) - } +#[test] +fn check_trait_as_type_as_fn_parameter() { + let src = " + trait Eq { + fn eq(self, other: Self) -> bool; + } - fn main(a: Foo) -> pub bool { - test_eq(a) - }"; + struct Foo { + a: u64, + } - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); + impl Eq for Foo { + fn eq(self, other: Foo) -> bool { self.a == other.a } } - #[test] - fn check_trait_as_type_as_two_fn_parameters() { - let src = " - trait Eq { - fn eq(self, other: Self) -> bool; - } + fn test_eq(x: impl Eq) -> bool { + x.eq(x) + } - trait Test { - fn test(self) -> bool; - } + fn main(a: Foo) -> pub bool { + test_eq(a) + }"; - struct Foo { - a: u64, - } + let errors = get_program_errors(src); + errors.iter().for_each(|err| println!("{:?}", err)); + assert!(errors.is_empty()); +} - impl Eq for Foo { - fn eq(self, other: Foo) -> bool { self.a == other.a } - } +#[test] +fn check_trait_as_type_as_two_fn_parameters() { + let src = " + trait Eq { + fn eq(self, other: Self) -> bool; + } - impl Test for u64 { - fn test(self) -> bool { self == self } - } + trait Test { + fn test(self) -> bool; + } - fn test_eq(x: impl Eq, y: impl Test) -> bool { - x.eq(x) == y.test() - } + struct Foo { + a: u64, + } - fn main(a: Foo, b: u64) -> pub bool { - test_eq(a, b) - }"; - - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); - } - - fn get_program_captures(src: &str) -> Vec> { - let (program, context, _errors) = get_program(src); - let interner = context.def_interner; - let mut all_captures: Vec> = Vec::new(); - for func in program.into_sorted().functions { - let func_id = interner.find_function(func.name()).unwrap(); - let hir_func = interner.function(&func_id); - // Iterate over function statements and apply filtering function - find_lambda_captures( - hir_func.block(&interner).statements(), - &interner, - &mut all_captures, - ); - } - all_captures - } - - fn find_lambda_captures( - stmts: &[StmtId], - interner: &NodeInterner, - result: &mut Vec>, - ) { - for stmt_id in stmts.iter() { - let hir_stmt = interner.statement(stmt_id); - let expr_id = match hir_stmt { - HirStatement::Expression(expr_id) => expr_id, - HirStatement::Let(let_stmt) => let_stmt.expression, - HirStatement::Assign(assign_stmt) => assign_stmt.expression, - HirStatement::Constrain(constr_stmt) => constr_stmt.0, - HirStatement::Semi(semi_expr) => semi_expr, - HirStatement::For(for_loop) => for_loop.block, - HirStatement::Error => panic!("Invalid HirStatement!"), - HirStatement::Break => panic!("Unexpected break"), - HirStatement::Continue => panic!("Unexpected continue"), - HirStatement::Comptime(_) => panic!("Unexpected comptime"), - }; - let expr = interner.expression(&expr_id); + impl Eq for Foo { + fn eq(self, other: Foo) -> bool { self.a == other.a } + } - get_lambda_captures(expr, interner, result); // TODO: dyn filter function as parameter - } + impl Test for u64 { + fn test(self) -> bool { self == self } } - fn get_lambda_captures( - expr: HirExpression, - interner: &NodeInterner, - result: &mut Vec>, - ) { - if let HirExpression::Lambda(lambda_expr) = expr { - let mut cur_capture = Vec::new(); + fn test_eq(x: impl Eq, y: impl Test) -> bool { + x.eq(x) == y.test() + } - for capture in lambda_expr.captures.iter() { - cur_capture.push(interner.definition(capture.ident.id).name.clone()); - } - result.push(cur_capture); + fn main(a: Foo, b: u64) -> pub bool { + test_eq(a, b) + }"; - // Check for other captures recursively within the lambda body - let hir_body_expr = interner.expression(&lambda_expr.body); - if let HirExpression::Block(block_expr) = hir_body_expr { - find_lambda_captures(block_expr.statements(), interner, result); - } - } + let errors = get_program_errors(src); + errors.iter().for_each(|err| println!("{:?}", err)); + assert!(errors.is_empty()); +} + +fn get_program_captures(src: &str) -> Vec> { + let (program, context, _errors) = get_program(src); + let interner = context.def_interner; + let mut all_captures: Vec> = Vec::new(); + for func in program.into_sorted().functions { + let func_id = interner.find_function(func.name()).unwrap(); + let hir_func = interner.function(&func_id); + // Iterate over function statements and apply filtering function + find_lambda_captures(hir_func.block(&interner).statements(), &interner, &mut all_captures); } + all_captures +} - #[test] - fn resolve_empty_function() { - let src = " - fn main() { +fn find_lambda_captures(stmts: &[StmtId], interner: &NodeInterner, result: &mut Vec>) { + for stmt_id in stmts.iter() { + let hir_stmt = interner.statement(stmt_id); + let expr_id = match hir_stmt { + HirStatement::Expression(expr_id) => expr_id, + HirStatement::Let(let_stmt) => let_stmt.expression, + HirStatement::Assign(assign_stmt) => assign_stmt.expression, + HirStatement::Constrain(constr_stmt) => constr_stmt.0, + HirStatement::Semi(semi_expr) => semi_expr, + HirStatement::For(for_loop) => for_loop.block, + HirStatement::Error => panic!("Invalid HirStatement!"), + HirStatement::Break => panic!("Unexpected break"), + HirStatement::Continue => panic!("Unexpected continue"), + HirStatement::Comptime(_) => panic!("Unexpected comptime"), + }; + let expr = interner.expression(&expr_id); - } - "; - assert!(get_program_errors(src).is_empty()); - } - #[test] - fn resolve_basic_function() { - let src = r#" - fn main(x : Field) { - let y = x + x; - assert(y == x); - } - "#; - assert!(get_program_errors(src).is_empty()); - } - #[test] - fn resolve_unused_var() { - let src = r#" - fn main(x : Field) { - let y = x + x; - assert(x == x); - } - "#; - - let errors = get_program_errors(src); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - // It should be regarding the unused variable - match &errors[0].0 { - CompilationError::ResolverError(ResolverError::UnusedVariable { ident }) => { - assert_eq!(&ident.0.contents, "y"); - } - _ => unreachable!("we should only have an unused var error"), - } + get_lambda_captures(expr, interner, result); // TODO: dyn filter function as parameter } +} - #[test] - fn resolve_unresolved_var() { - let src = r#" - fn main(x : Field) { - let y = x + x; - assert(y == z); - } - "#; - let errors = get_program_errors(src); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - // It should be regarding the unresolved var `z` (Maybe change to undeclared and special case) - match &errors[0].0 { - CompilationError::ResolverError(ResolverError::VariableNotDeclared { - name, - span: _, - }) => assert_eq!(name, "z"), - _ => unimplemented!("we should only have an unresolved variable"), +fn get_lambda_captures( + expr: HirExpression, + interner: &NodeInterner, + result: &mut Vec>, +) { + if let HirExpression::Lambda(lambda_expr) = expr { + let mut cur_capture = Vec::new(); + + for capture in lambda_expr.captures.iter() { + cur_capture.push(interner.definition(capture.ident.id).name.clone()); + } + result.push(cur_capture); + + // Check for other captures recursively within the lambda body + let hir_body_expr = interner.expression(&lambda_expr.body); + if let HirExpression::Block(block_expr) = hir_body_expr { + find_lambda_captures(block_expr.statements(), interner, result); } } +} + +#[test] +fn resolve_empty_function() { + let src = " + fn main() { - #[test] - fn unresolved_path() { - let src = " - fn main(x : Field) { - let _z = some::path::to::a::func(x); - } - "; - let errors = get_program_errors(src); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (compilation_error, _file_id) in errors { - match compilation_error { - CompilationError::ResolverError(err) => { - match err { - ResolverError::PathResolutionError(PathResolutionError::Unresolved( - name, - )) => { - assert_eq!(name.to_string(), "some"); - } - _ => unimplemented!("we should only have an unresolved function"), - }; - } - _ => unimplemented!(), - } } + "; + assert!(get_program_errors(src).is_empty()); +} +#[test] +fn resolve_basic_function() { + let src = r#" + fn main(x : Field) { + let y = x + x; + assert(y == x); + } + "#; + assert!(get_program_errors(src).is_empty()); +} +#[test] +fn resolve_unused_var() { + let src = r#" + fn main(x : Field) { + let y = x + x; + assert(x == x); + } + "#; + + let errors = get_program_errors(src); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + // It should be regarding the unused variable + match &errors[0].0 { + CompilationError::ResolverError(ResolverError::UnusedVariable { ident }) => { + assert_eq!(&ident.0.contents, "y"); + } + _ => unreachable!("we should only have an unused var error"), } +} - #[test] - fn resolve_literal_expr() { - let src = r#" - fn main(x : Field) { - let y = 5; - assert(y == x); - } - "#; - assert!(get_program_errors(src).is_empty()); +#[test] +fn resolve_unresolved_var() { + let src = r#" + fn main(x : Field) { + let y = x + x; + assert(y == z); + } + "#; + let errors = get_program_errors(src); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + // It should be regarding the unresolved var `z` (Maybe change to undeclared and special case) + match &errors[0].0 { + CompilationError::ResolverError(ResolverError::VariableNotDeclared { name, span: _ }) => { + assert_eq!(name, "z"); + } + _ => unimplemented!("we should only have an unresolved variable"), } +} - #[test] - fn multiple_resolution_errors() { - let src = r#" - fn main(x : Field) { - let y = foo::bar(x); - let z = y + a; - } - "#; - - let errors = get_program_errors(src); - assert!(errors.len() == 3, "Expected 3 errors, got: {:?}", errors); - - // Errors are: - // `a` is undeclared - // `z` is unused - // `foo::bar` does not exist - for (compilation_error, _file_id) in errors { - match compilation_error { - CompilationError::ResolverError(err) => { - match err { - ResolverError::UnusedVariable { ident } => { - assert_eq!(&ident.0.contents, "z"); - } - ResolverError::VariableNotDeclared { name, .. } => { - assert_eq!(name, "a"); - } - ResolverError::PathResolutionError(PathResolutionError::Unresolved( - name, - )) => { - assert_eq!(name.to_string(), "foo"); - } - _ => unimplemented!(), - }; - } - _ => unimplemented!(), +#[test] +fn unresolved_path() { + let src = " + fn main(x : Field) { + let _z = some::path::to::a::func(x); + } + "; + let errors = get_program_errors(src); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (compilation_error, _file_id) in errors { + match compilation_error { + CompilationError::ResolverError(err) => { + match err { + ResolverError::PathResolutionError(PathResolutionError::Unresolved(name)) => { + assert_eq!(name.to_string(), "some"); + } + _ => unimplemented!("we should only have an unresolved function"), + }; } + _ => unimplemented!(), } } +} - #[test] - fn resolve_prefix_expr() { - let src = r#" - fn main(x : Field) { - let _y = -x; - } - "#; - assert!(get_program_errors(src).is_empty()); - } +#[test] +fn resolve_literal_expr() { + let src = r#" + fn main(x : Field) { + let y = 5; + assert(y == x); + } + "#; + assert!(get_program_errors(src).is_empty()); +} - #[test] - fn resolve_for_expr() { - let src = r#" - fn main(x : u64) { - for i in 1..20 { - let _z = x + i; +#[test] +fn multiple_resolution_errors() { + let src = r#" + fn main(x : Field) { + let y = foo::bar(x); + let z = y + a; + } + "#; + + let errors = get_program_errors(src); + assert!(errors.len() == 3, "Expected 3 errors, got: {:?}", errors); + + // Errors are: + // `a` is undeclared + // `z` is unused + // `foo::bar` does not exist + for (compilation_error, _file_id) in errors { + match compilation_error { + CompilationError::ResolverError(err) => { + match err { + ResolverError::UnusedVariable { ident } => { + assert_eq!(&ident.0.contents, "z"); + } + ResolverError::VariableNotDeclared { name, .. } => { + assert_eq!(name, "a"); + } + ResolverError::PathResolutionError(PathResolutionError::Unresolved(name)) => { + assert_eq!(name.to_string(), "foo"); + } + _ => unimplemented!(), }; } - "#; - assert!(get_program_errors(src).is_empty()); + _ => unimplemented!(), + } } +} - #[test] - fn resolve_call_expr() { - let src = r#" - fn main(x : Field) { - let _z = foo(x); - } +#[test] +fn resolve_prefix_expr() { + let src = r#" + fn main(x : Field) { + let _y = -x; + } + "#; + assert!(get_program_errors(src).is_empty()); +} - fn foo(x : Field) -> Field { - x - } - "#; - assert!(get_program_errors(src).is_empty()); - } - - #[test] - fn resolve_shadowing() { - let src = r#" - fn main(x : Field) { - let x = foo(x); - let x = x; - let (x, x) = (x, x); - let _ = x; - } +#[test] +fn resolve_for_expr() { + let src = r#" + fn main(x : u64) { + for i in 1..20 { + let _z = x + i; + }; + } + "#; + assert!(get_program_errors(src).is_empty()); +} - fn foo(x : Field) -> Field { - x - } - "#; - assert!(get_program_errors(src).is_empty()); - } +#[test] +fn resolve_call_expr() { + let src = r#" + fn main(x : Field) { + let _z = foo(x); + } - #[test] - fn resolve_basic_closure() { - let src = r#" - fn main(x : Field) -> pub Field { - let closure = |y| y + x; - closure(x) - } - "#; - assert!(get_program_errors(src).is_empty()); - } + fn foo(x : Field) -> Field { + x + } + "#; + assert!(get_program_errors(src).is_empty()); +} - #[test] - fn resolve_simplified_closure() { - // based on bug https://github.com/noir-lang/noir/issues/1088 +#[test] +fn resolve_shadowing() { + let src = r#" + fn main(x : Field) { + let x = foo(x); + let x = x; + let (x, x) = (x, x); + let _ = x; + } - let src = r#"fn do_closure(x: Field) -> Field { - let y = x; - let ret_capture = || { - y - }; - ret_capture() - } - - fn main(x: Field) { - assert(do_closure(x) == 100); - } - - "#; - let parsed_captures = get_program_captures(src); - let expected_captures = vec![vec!["y".to_string()]]; - assert_eq!(expected_captures, parsed_captures); - } - - #[test] - fn resolve_complex_closures() { - let src = r#" - fn main(x: Field) -> pub Field { - let closure_without_captures = |x: Field| -> Field { x + x }; - let a = closure_without_captures(1); - - let closure_capturing_a_param = |y: Field| -> Field { y + x }; - let b = closure_capturing_a_param(2); - - let closure_capturing_a_local_var = |y: Field| -> Field { y + b }; - let c = closure_capturing_a_local_var(3); - - let closure_with_transitive_captures = |y: Field| -> Field { - let d = 5; - let nested_closure = |z: Field| -> Field { - let doubly_nested_closure = |w: Field| -> Field { w + x + b }; - a + z + y + d + x + doubly_nested_closure(4) + x + y - }; - let res = nested_closure(5); - res + fn foo(x : Field) -> Field { + x + } + "#; + assert!(get_program_errors(src).is_empty()); +} + +#[test] +fn resolve_basic_closure() { + let src = r#" + fn main(x : Field) -> pub Field { + let closure = |y| y + x; + closure(x) + } + "#; + assert!(get_program_errors(src).is_empty()); +} + +#[test] +fn resolve_simplified_closure() { + // based on bug https://github.com/noir-lang/noir/issues/1088 + + let src = r#"fn do_closure(x: Field) -> Field { + let y = x; + let ret_capture = || { + y + }; + ret_capture() + } + + fn main(x: Field) { + assert(do_closure(x) == 100); + } + + "#; + let parsed_captures = get_program_captures(src); + let expected_captures = vec![vec!["y".to_string()]]; + assert_eq!(expected_captures, parsed_captures); +} + +#[test] +fn resolve_complex_closures() { + let src = r#" + fn main(x: Field) -> pub Field { + let closure_without_captures = |x: Field| -> Field { x + x }; + let a = closure_without_captures(1); + + let closure_capturing_a_param = |y: Field| -> Field { y + x }; + let b = closure_capturing_a_param(2); + + let closure_capturing_a_local_var = |y: Field| -> Field { y + b }; + let c = closure_capturing_a_local_var(3); + + let closure_with_transitive_captures = |y: Field| -> Field { + let d = 5; + let nested_closure = |z: Field| -> Field { + let doubly_nested_closure = |w: Field| -> Field { w + x + b }; + a + z + y + d + x + doubly_nested_closure(4) + x + y }; + let res = nested_closure(5); + res + }; + + a + b + c + closure_with_transitive_captures(6) + } + "#; + assert!(get_program_errors(src).is_empty(), "there should be no errors"); + + let expected_captures = vec![ + vec![], + vec!["x".to_string()], + vec!["b".to_string()], + vec!["x".to_string(), "b".to_string(), "a".to_string()], + vec!["x".to_string(), "b".to_string(), "a".to_string(), "y".to_string(), "d".to_string()], + vec!["x".to_string(), "b".to_string()], + ]; + + let parsed_captures = get_program_captures(src); + + assert_eq!(expected_captures, parsed_captures); +} + +#[test] +fn resolve_fmt_strings() { + let src = r#" + fn main() { + let string = f"this is i: {i}"; + println(string); + + println(f"I want to print {0}"); + + let new_val = 10; + println(f"random_string{new_val}{new_val}"); + } + fn println(x : T) -> T { + x + } + "#; + + let errors = get_program_errors(src); + assert!(errors.len() == 5, "Expected 5 errors, got: {:?}", errors); - a + b + c + closure_with_transitive_captures(6) + for (err, _file_id) in errors { + match &err { + CompilationError::ResolverError(ResolverError::VariableNotDeclared { + name, .. + }) => { + assert_eq!(name, "i"); } - "#; - assert!(get_program_errors(src).is_empty(), "there should be no errors"); - - let expected_captures = vec![ - vec![], - vec!["x".to_string()], - vec!["b".to_string()], - vec!["x".to_string(), "b".to_string(), "a".to_string()], - vec![ - "x".to_string(), - "b".to_string(), - "a".to_string(), - "y".to_string(), - "d".to_string(), - ], - vec!["x".to_string(), "b".to_string()], - ]; - - let parsed_captures = get_program_captures(src); - - assert_eq!(expected_captures, parsed_captures); - } - - #[test] - fn resolve_fmt_strings() { - let src = r#" - fn main() { - let string = f"this is i: {i}"; - println(string); - - println(f"I want to print {0}"); - - let new_val = 10; - println(f"random_string{new_val}{new_val}"); + CompilationError::ResolverError(ResolverError::NumericConstantInFormatString { + name, + .. + }) => { + assert_eq!(name, "0"); } - fn println(x : T) -> T { - x + CompilationError::TypeError(TypeCheckError::UnusedResultError { + expr_type: _, + expr_span, + }) => { + let a = src.get(expr_span.start() as usize..expr_span.end() as usize).unwrap(); + assert!( + a == "println(string)" + || a == "println(f\"I want to print {0}\")" + || a == "println(f\"random_string{new_val}{new_val}\")" + ); } - "#; - - let errors = get_program_errors(src); - assert!(errors.len() == 5, "Expected 5 errors, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::ResolverError(ResolverError::VariableNotDeclared { - name, - .. - }) => { - assert_eq!(name, "i"); - } - CompilationError::ResolverError(ResolverError::NumericConstantInFormatString { - name, - .. - }) => { - assert_eq!(name, "0"); - } - CompilationError::TypeError(TypeCheckError::UnusedResultError { - expr_type: _, - expr_span, - }) => { - let a = src.get(expr_span.start() as usize..expr_span.end() as usize).unwrap(); - assert!( - a == "println(string)" - || a == "println(f\"I want to print {0}\")" - || a == "println(f\"random_string{new_val}{new_val}\")" - ); - } - _ => unimplemented!(), - }; - } + _ => unimplemented!(), + }; } +} - fn check_rewrite(src: &str, expected: &str) { - let (_program, mut context, _errors) = get_program(src); - let main_func_id = context.def_interner.find_function("main").unwrap(); - let program = monomorphize(main_func_id, &mut context.def_interner).unwrap(); - assert!(format!("{}", program) == expected); - } +fn check_rewrite(src: &str, expected: &str) { + let (_program, mut context, _errors) = get_program(src); + let main_func_id = context.def_interner.find_function("main").unwrap(); + let program = monomorphize(main_func_id, &mut context.def_interner).unwrap(); + assert!(format!("{}", program) == expected); +} - #[test] - fn simple_closure_with_no_captured_variables() { - let src = r#" - fn main() -> pub Field { - let x = 1; - let closure = || x; - closure() - } - "#; +#[test] +fn simple_closure_with_no_captured_variables() { + let src = r#" + fn main() -> pub Field { + let x = 1; + let closure = || x; + closure() + } + "#; - let expected_rewrite = r#"fn main$f0() -> Field { + let expected_rewrite = r#"fn main$f0() -> Field { let x$0 = 1; let closure$3 = { let closure_variable$2 = { @@ -1248,167 +1226,154 @@ fn lambda$f1(mut env$l1: (Field)) -> Field { env$l1.0 } "#; - check_rewrite(src, expected_rewrite); - } - - #[test] - fn deny_mutually_recursive_structs() { - let src = r#" - struct Foo { bar: Bar } - struct Bar { foo: Foo } - fn main() {} - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn deny_cyclic_globals() { - let src = r#" - global A = B; - global B = A; - fn main() {} - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn deny_cyclic_type_aliases() { - let src = r#" - type A = B; - type B = A; - fn main() {} - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn ensure_nested_type_aliases_type_check() { - let src = r#" - type A = B; - type B = u8; - fn main() { - let _a: A = 0 as u16; - } - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn type_aliases_in_entry_point() { - let src = r#" - type Foo = u8; - fn main(_x: Foo) {} - "#; - assert_eq!(get_program_errors(src).len(), 0); - } - - #[test] - fn operators_in_global_used_in_type() { - let src = r#" - global ONE = 1; - global COUNT = ONE + 2; - fn main() { - let _array: [Field; COUNT] = [1, 2, 3]; - } - "#; - assert_eq!(get_program_errors(src).len(), 0); - } + check_rewrite(src, expected_rewrite); +} - #[test] - fn break_and_continue_in_constrained_fn() { - let src = r#" - fn main() { - for i in 0 .. 10 { - if i == 2 { - continue; - } - if i == 5 { - break; - } +#[test] +fn deny_cyclic_globals() { + let src = r#" + global A = B; + global B = A; + fn main() {} + "#; + assert_eq!(get_program_errors(src).len(), 1); +} + +#[test] +fn deny_cyclic_type_aliases() { + let src = r#" + type A = B; + type B = A; + fn main() {} + "#; + assert_eq!(get_program_errors(src).len(), 1); +} + +#[test] +fn ensure_nested_type_aliases_type_check() { + let src = r#" + type A = B; + type B = u8; + fn main() { + let _a: A = 0 as u16; + } + "#; + assert_eq!(get_program_errors(src).len(), 1); +} + +#[test] +fn type_aliases_in_entry_point() { + let src = r#" + type Foo = u8; + fn main(_x: Foo) {} + "#; + assert_eq!(get_program_errors(src).len(), 0); +} + +#[test] +fn operators_in_global_used_in_type() { + let src = r#" + global ONE = 1; + global COUNT = ONE + 2; + fn main() { + let _array: [Field; COUNT] = [1, 2, 3]; + } + "#; + assert_eq!(get_program_errors(src).len(), 0); +} + +#[test] +fn break_and_continue_in_constrained_fn() { + let src = r#" + fn main() { + for i in 0 .. 10 { + if i == 2 { + continue; + } + if i == 5 { + break; } } - "#; - assert_eq!(get_program_errors(src).len(), 2); - } + } + "#; + assert_eq!(get_program_errors(src).len(), 2); +} - #[test] - fn break_and_continue_outside_loop() { - let src = r#" - unconstrained fn main() { - continue; - break; - } - "#; - assert_eq!(get_program_errors(src).len(), 2); - } +#[test] +fn break_and_continue_outside_loop() { + let src = r#" + unconstrained fn main() { + continue; + break; + } + "#; + assert_eq!(get_program_errors(src).len(), 2); +} - // Regression for #2540 - #[test] - fn for_loop_over_array() { - let src = r#" - fn hello(_array: [u1; N]) { - for _ in 0..N {} - } +// Regression for #2540 +#[test] +fn for_loop_over_array() { + let src = r#" + fn hello(_array: [u1; N]) { + for _ in 0..N {} + } - fn main() { - let array: [u1; 2] = [0, 1]; - hello(array); - } - "#; - assert_eq!(get_program_errors(src).len(), 0); - } - - // Regression for #4545 - #[test] - fn type_aliases_in_main() { - let src = r#" - type Outer = [u8; N]; - fn main(_arg: Outer<1>) {} - "#; - assert_eq!(get_program_errors(src).len(), 0); - } - - #[test] - fn ban_mutable_globals() { - // Mutable globals are only allowed in a comptime context - let src = r#" - mut global FOO: Field = 0; - fn main() {} - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn deny_inline_attribute_on_unconstrained() { - let src = r#" - #[no_predicates] - unconstrained fn foo(x: Field, y: Field) { - assert(x != y); - } - "#; - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - assert!(matches!( - errors[0].0, - CompilationError::ResolverError( - ResolverError::NoPredicatesAttributeOnUnconstrained { .. } - ) - )); - } + fn main() { + let array: [u1; 2] = [0, 1]; + hello(array); + } + "#; + assert_eq!(get_program_errors(src).len(), 0); +} - #[test] - fn deny_fold_attribute_on_unconstrained() { - let src = r#" - #[fold] - unconstrained fn foo(x: Field, y: Field) { - assert(x != y); - } - "#; - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - assert!(matches!( - errors[0].0, - CompilationError::ResolverError(ResolverError::FoldAttributeOnUnconstrained { .. }) - )); - } +// Regression for #4545 +#[test] +fn type_aliases_in_main() { + let src = r#" + type Outer = [u8; N]; + fn main(_arg: Outer<1>) {} + "#; + assert_eq!(get_program_errors(src).len(), 0); +} + +#[test] +fn ban_mutable_globals() { + // Mutable globals are only allowed in a comptime context + let src = r#" + mut global FOO: Field = 0; + fn main() {} + "#; + assert_eq!(get_program_errors(src).len(), 1); +} + +#[test] +fn deny_inline_attribute_on_unconstrained() { + let src = r#" + #[no_predicates] + unconstrained fn foo(x: Field, y: Field) { + assert(x != y); + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::NoPredicatesAttributeOnUnconstrained { .. }) + )); +} + +#[test] +fn deny_fold_attribute_on_unconstrained() { + let src = r#" + #[fold] + unconstrained fn foo(x: Field, y: Field) { + assert(x != y); + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::FoldAttributeOnUnconstrained { .. }) + )); } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests/name_shadowing.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests/name_shadowing.rs new file mode 100644 index 000000000000..b0d835100398 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests/name_shadowing.rs @@ -0,0 +1,419 @@ +#![cfg(test)] +use super::get_program_errors; +use std::collections::HashSet; + +#[test] +fn test_name_shadowing() { + let src = " + trait Default { + fn default() -> Self; + } + + impl Default for bool { + fn default() -> bool { + false + } + } + + impl Default for Field { + fn default() -> Field { + 0 + } + } + + impl Default for [T; N] where T: Default { + fn default() -> [T; N] { + [Default::default(); N] + } + } + + impl Default for (T, U) where T: Default, U: Default { + fn default() -> (T, U) { + (Default::default(), Default::default()) + } + } + + fn drop_var(_x: T, y: U) -> U { y } + + mod local_module { + use crate::{Default, drop_var}; + + global LOCAL_GLOBAL_N: Field = 0; + + global LOCAL_GLOBAL_M: Field = 1; + + struct LocalStruct { + field1: A, + field2: B, + field3: [A; N], + field4: ([A; N], [B; M]), + field5: &mut A, + } + + impl Default for LocalStruct where A: Default, B: Default { + fn default() -> Self { + let mut mut_field = &mut Default::default(); + Self { + field1: Default::default(), + field2: Default::default(), + field3: Default::default(), + field4: Default::default(), + field5: mut_field, + } + } + } + + trait DefinedInLocalModule1 { + fn trait_fn1(self, x: A); + fn trait_fn2(self, y: B); + fn trait_fn3(&mut self, x: A, y: B); + fn trait_fn4(self, x: [A; 0], y: [B]); + fn trait_fn5(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn trait_fn6(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn trait_fn7(self, _x: fn([A; 0]) -> B) -> Field { + drop_var(self, N + M) + } + } + + impl DefinedInLocalModule1 for LocalStruct { + fn trait_fn1(self, _x: A) { drop_var(self, ()) } + fn trait_fn2(self, _y: B) { drop_var(self, ()) } + fn trait_fn3(&mut self, _x: A, _y: B) { drop_var(self, ()) } + fn trait_fn4(self, _x: [A; 0], _y: [B]) { drop_var(self, ()) } + fn trait_fn5(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + fn trait_fn6(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + } + + pub fn local_fn4(_x: (A, B), _y: [Field; N], _z: [Field; M]) -> [A; 0] { + assert(LOCAL_GLOBAL_N != LOCAL_GLOBAL_M); + let x: Field = 0; + assert(x == 0); + let x: Field = 1; + assert(x == 1); + [] + } + } + + mod library { + use crate::{Default, drop_var}; + + mod library2 { + use crate::{Default, drop_var}; + + global IMPORT_GLOBAL_N_2: Field = 4; + + global IMPORT_GLOBAL_M_2: Field = 5; + + // When we re-export this type from another library and then use it in + // main, we get a panic + struct ReExportMeFromAnotherLib1 { + x : Field, + } + + struct PubLibLocalStruct3 { + pub_field1: A, + pub_field2: B, + pub_field3: [A; N], + pub_field4: ([A; N], [B; M]), + pub_field5: &mut A, + } + + impl Default for PubLibLocalStruct3 where A: Default, B: Default { + fn default() -> Self { + let mut mut_field = &mut Default::default(); + Self { + pub_field1: Default::default(), + pub_field2: Default::default(), + pub_field3: Default::default(), + pub_field4: Default::default(), + pub_field5: mut_field, + } + } + } + + trait PubLibDefinedInLocalModule3 { + fn pub_trait_fn1(self, x: A); + fn pub_trait_fn2(self, y: B); + fn pub_trait_fn3(&mut self, x: A, y: B); + fn pub_trait_fn4(self, x: [A; 0], y: [B]); + fn pub_trait_fn5(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn pub_trait_fn6(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn pub_trait_fn7(self, _x: fn([A; 0]) -> B) -> Field { + drop_var(self, N + M) + } + } + + impl PubLibDefinedInLocalModule3 for PubLibLocalStruct3 { + fn pub_trait_fn1(self, _x: A) { drop_var(self, ()) } + fn pub_trait_fn2(self, _y: B) { drop_var(self, ()) } + fn pub_trait_fn3(&mut self, _x: A, _y: B) { drop_var(self, ()) } + fn pub_trait_fn4(self, _x: [A; 0], _y: [B]) { drop_var(self, ()) } + fn pub_trait_fn5(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + fn pub_trait_fn6(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + } + + pub fn PubLiblocal_fn3(_x: (A, B), _y: [Field; N], _z: [Field; M]) -> [A; 0] { + assert(IMPORT_GLOBAL_N_2 != IMPORT_GLOBAL_M_2); + [] + } + } + + // Re-export + use library2::ReExportMeFromAnotherLib1; + + global IMPORT_GLOBAL_N_1: Field = 2; + + global IMPORT_GLOBAL_M_1: Field = 3; + + struct LibLocalStruct1 { + lib_field1: A, + lib_field2: B, + lib_field3: [A; N], + lib_field4: ([A; N], [B; M]), + lib_field5: &mut A, + } + + impl Default for LibLocalStruct1 where A: Default, B: Default { + fn default() -> Self { + let mut mut_field = &mut Default::default(); + Self { + lib_field1: Default::default(), + lib_field2: Default::default(), + lib_field3: Default::default(), + lib_field4: Default::default(), + lib_field5: mut_field, + } + } + } + + trait LibDefinedInLocalModule1 { + fn lib_trait_fn1(self, x: A); + fn lib_trait_fn2(self, y: B); + fn lib_trait_fn3(&mut self, x: A, y: B); + fn lib_trait_fn4(self, x: [A; 0], y: [B]); + fn lib_trait_fn5(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn lib_trait_fn6(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn lib_trait_fn7(self, _x: fn([A; 0]) -> B) -> Field { + drop_var(self, N + M) + } + } + + impl LibDefinedInLocalModule1 for LibLocalStruct1 { + fn lib_trait_fn1(self, _x: A) { drop_var(self, ()) } + fn lib_trait_fn2(self, _y: B) { drop_var(self, ()) } + fn lib_trait_fn3(&mut self, _x: A, _y: B) { drop_var(self, ()) } + fn lib_trait_fn4(self, _x: [A; 0], _y: [B]) { drop_var(self, ()) } + fn lib_trait_fn5(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + fn lib_trait_fn6(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + } + + pub fn Liblocal_fn1(_x: (A, B), _y: [Field; N], _z: [Field; M]) -> [A; 0] { + assert(IMPORT_GLOBAL_N_1 != IMPORT_GLOBAL_M_1); + [] + } + } + + mod library3 { + use crate::{Default, drop_var}; + + global IMPORT_GLOBAL_N_3: Field = 6; + + global IMPORT_GLOBAL_M_3: Field = 7; + + struct ReExportMeFromAnotherLib2 { + x : Field, + } + + struct PubCrateLibLocalStruct2 { + crate_field1: A, + crate_field2: B, + crate_field3: [A; N], + crate_field4: ([A; N], [B; M]), + crate_field5: &mut A, + } + + impl Default for PubCrateLibLocalStruct2 where A: Default, B: Default { + fn default() -> Self { + let mut mut_field = &mut Default::default(); + Self { + crate_field1: Default::default(), + crate_field2: Default::default(), + crate_field3: Default::default(), + crate_field4: Default::default(), + crate_field5: mut_field, + } + } + } + + trait PubCrateLibDefinedInLocalModule2 { + fn crate_trait_fn1(self, x: A); + fn crate_trait_fn2(self, y: B); + fn crate_trait_fn3(&mut self, x: A, y: B); + fn crate_trait_fn4(self, x: [A; 0], y: [B]); + fn crate_trait_fn5(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn crate_trait_fn6(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn crate_trait_fn7(self, _x: fn([A; 0]) -> B) -> Field { + drop_var(self, N + M) + } + } + + impl PubCrateLibDefinedInLocalModule2 for PubCrateLibLocalStruct2 { + fn crate_trait_fn1(self, _x: A) { drop_var(self, ()) } + fn crate_trait_fn2(self, _y: B) { drop_var(self, ()) } + fn crate_trait_fn3(&mut self, _x: A, _y: B) { drop_var(self, ()) } + fn crate_trait_fn4(self, _x: [A; 0], _y: [B]) { drop_var(self, ()) } + fn crate_trait_fn5(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, ()); [] } + fn crate_trait_fn6(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, ()); [] } + } + + pub(crate) fn PubCrateLiblocal_fn2(_x: (A, B), _y: [Field; N], _z: [Field; M]) -> [A; 0] { + assert(IMPORT_GLOBAL_N_3 != IMPORT_GLOBAL_M_3); + [] + } + } + + + use crate::local_module::{local_fn4, LocalStruct, DefinedInLocalModule1, LOCAL_GLOBAL_N, LOCAL_GLOBAL_M}; + + use library::{ReExportMeFromAnotherLib1, LibLocalStruct1, LibDefinedInLocalModule1, Liblocal_fn1, IMPORT_GLOBAL_N_1, IMPORT_GLOBAL_M_1}; + + // overlapping + // use library::library2::ReExportMeFromAnotherLib1; + use crate::library::library2::{PubLibLocalStruct3, PubLibDefinedInLocalModule3, PubLiblocal_fn3, IMPORT_GLOBAL_N_2, IMPORT_GLOBAL_M_2}; + + use library3::{ReExportMeFromAnotherLib2, PubCrateLibLocalStruct2, PubCrateLibDefinedInLocalModule2, PubCrateLiblocal_fn2, IMPORT_GLOBAL_N_3, IMPORT_GLOBAL_M_3}; + + + fn main(_x: ReExportMeFromAnotherLib1, _y: ReExportMeFromAnotherLib2) { + assert(LOCAL_GLOBAL_N != LOCAL_GLOBAL_M); + assert(IMPORT_GLOBAL_N_1 != IMPORT_GLOBAL_M_1); + assert(IMPORT_GLOBAL_N_2 != IMPORT_GLOBAL_M_2); + assert(IMPORT_GLOBAL_N_3 != IMPORT_GLOBAL_M_3); + + let x: LocalStruct = Default::default(); + assert(drop_var(x.trait_fn5([0; LOCAL_GLOBAL_N], [false; LOCAL_GLOBAL_M]), true)); + assert(drop_var(x.trait_fn6([0; LOCAL_GLOBAL_N], [false; LOCAL_GLOBAL_M]), true)); + + let x: LibLocalStruct1 = Default::default(); + assert(drop_var(x.lib_trait_fn5([0; IMPORT_GLOBAL_N_1], [false; IMPORT_GLOBAL_M_1]), true)); + assert(drop_var(x.lib_trait_fn6([0; IMPORT_GLOBAL_N_1], [false; IMPORT_GLOBAL_M_1]), true)); + + let x: PubLibLocalStruct3 = Default::default(); + assert(drop_var(x.pub_trait_fn5([0; IMPORT_GLOBAL_N_2], [false; IMPORT_GLOBAL_M_2]), true)); + assert(drop_var(x.pub_trait_fn6([0; IMPORT_GLOBAL_N_2], [false; IMPORT_GLOBAL_M_2]), true)); + + let x: PubCrateLibLocalStruct2 = Default::default(); + assert(drop_var(x.crate_trait_fn5([0; IMPORT_GLOBAL_N_3], [false; IMPORT_GLOBAL_M_3]), true)); + assert(drop_var(x.crate_trait_fn6([0; IMPORT_GLOBAL_N_3], [false; IMPORT_GLOBAL_M_3]), true)); + + assert(drop_var(local_fn2((0, 1), [], []), true)); + assert(drop_var(Liblocal_fn1((0, 1), [], []), true)); + assert(drop_var(PubLiblocal_fn4((0, 1), [], []), true)); + assert(drop_var(PubCrateLiblocal_fn3((0, 1), [], []), true)); + }"; + + // NOTE: these names must be "replacement-unique", i.e. + // replacing one in a discinct name should do nothing + let names_to_collapse = [ + "DefinedInLocalModule1", + "IMPORT_GLOBAL_M_1", + "IMPORT_GLOBAL_M_2", + "IMPORT_GLOBAL_M_3", + "IMPORT_GLOBAL_N_1", + "IMPORT_GLOBAL_N_2", + "IMPORT_GLOBAL_N_3", + "LOCAL_GLOBAL_M", + "LOCAL_GLOBAL_N", + "LibDefinedInLocalModule1", + "LibLocalStruct1", + "Liblocal_fn1", + "LocalStruct", + "PubCrateLibDefinedInLocalModule2", + "PubCrateLibLocalStruct2", + "PubCrateLiblocal_fn2", + "PubLibDefinedInLocalModule3", + "PubLibLocalStruct3", + "PubLiblocal_fn3", + "ReExportMeFromAnotherLib1", + "ReExportMeFromAnotherLib2", + "local_fn4", + "crate_field1", + "crate_field2", + "crate_field3", + "crate_field4", + "crate_field5", + "crate_trait_fn1", + "crate_trait_fn2", + "crate_trait_fn3", + "crate_trait_fn4", + "crate_trait_fn5", + "crate_trait_fn6", + "crate_trait_fn7", + "field1", + "field2", + "field3", + "field4", + "field5", + "lib_field1", + "lib_field2", + "lib_field3", + "lib_field4", + "lib_field5", + "lib_trait_fn1", + "lib_trait_fn2", + "lib_trait_fn3", + "lib_trait_fn4", + "lib_trait_fn5", + "lib_trait_fn6", + "lib_trait_fn7", + "pub_field1", + "pub_field2", + "pub_field3", + "pub_field4", + "pub_field5", + "pub_trait_fn1", + "pub_trait_fn2", + "pub_trait_fn3", + "pub_trait_fn4", + "pub_trait_fn5", + "pub_trait_fn6", + "pub_trait_fn7", + "trait_fn1", + "trait_fn2", + "trait_fn3", + "trait_fn4", + "trait_fn5", + "trait_fn6", + "trait_fn7", + ]; + + // TODO(https://github.com/noir-lang/noir/issues/4973): + // Name resolution panic from name shadowing test + let cases_to_skip = [ + (1, 21), + (2, 11), + (2, 21), + (3, 11), + (3, 18), + (3, 21), + (4, 21), + (5, 11), + (5, 21), + (6, 11), + (6, 18), + (6, 21), + ]; + let cases_to_skip: HashSet<(usize, usize)> = cases_to_skip.into_iter().collect(); + + for (i, x) in names_to_collapse.iter().enumerate() { + for (j, y) in names_to_collapse.iter().enumerate().filter(|(j, _)| i < *j) { + if !cases_to_skip.contains(&(i, j)) { + dbg!((i, j)); + + let modified_src = src.replace(x, y); + let errors = get_program_errors(&modified_src); + assert!(!errors.is_empty(), "Expected errors, got: {:?}", errors); + } + } + } +} diff --git a/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs b/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs index b9240203a5ee..cc0dbca247ec 100644 --- a/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs @@ -11,7 +11,7 @@ use thiserror::Error; pub enum PrintableType { Field, Array { - length: Option, + length: u64, #[serde(rename = "type")] typ: Box, }, @@ -186,7 +186,8 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Option { (_, PrintableType::MutableReference { .. }) => { output.push_str("<>"); } - (PrintableValue::Vec { array_elements, is_slice }, PrintableType::Array { typ, .. }) => { + (PrintableValue::Vec { array_elements, is_slice }, PrintableType::Array { typ, .. }) + | (PrintableValue::Vec { array_elements, is_slice }, PrintableType::Slice { typ }) => { if *is_slice { output.push('&') } @@ -317,19 +318,7 @@ pub fn decode_value( PrintableValue::Field(field_element) } - PrintableType::Array { length: None, typ } => { - let length = field_iterator - .next() - .expect("not enough data to decode variable array length") - .to_u128() as usize; - let mut array_elements = Vec::with_capacity(length); - for _ in 0..length { - array_elements.push(decode_value(field_iterator, typ)); - } - - PrintableValue::Vec { array_elements, is_slice: false } - } - PrintableType::Array { length: Some(length), typ } => { + PrintableType::Array { length, typ } => { let length = *length as usize; let mut array_elements = Vec::with_capacity(length); for _ in 0..length { diff --git a/noir/noir-repo/compiler/wasm/package.json b/noir/noir-repo/compiler/wasm/package.json index e8e7c0ab3c92..bccf937219e8 100644 --- a/noir/noir-repo/compiler/wasm/package.json +++ b/noir/noir-repo/compiler/wasm/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.28.0", + "version": "0.29.0", "license": "(MIT OR Apache-2.0)", "main": "dist/main.js", "types": "./dist/types/src/index.d.cts", diff --git a/noir/noir-repo/compiler/wasm/src/compile.rs b/noir/noir-repo/compiler/wasm/src/compile.rs index de157a1fe20b..57b17a6f79eb 100644 --- a/noir/noir-repo/compiler/wasm/src/compile.rs +++ b/noir/noir-repo/compiler/wasm/src/compile.rs @@ -1,3 +1,4 @@ +use acvm::acir::circuit::ExpressionWidth; use fm::FileManager; use gloo_utils::format::JsValueSerdeExt; use js_sys::{JsString, Object}; @@ -169,9 +170,10 @@ pub fn compile_program( console_error_panic_hook::set_once(); let (crate_id, mut context) = prepare_context(entry_point, dependency_graph, file_source_map)?; - let compile_options = CompileOptions::default(); - // For now we default to a bounded width of 3, though we can add it as a parameter - let expression_width = acvm::acir::circuit::ExpressionWidth::Bounded { width: 3 }; + let compile_options = CompileOptions { + expression_width: ExpressionWidth::Bounded { width: 4 }, + ..CompileOptions::default() + }; let compiled_program = noirc_driver::compile_main(&mut context, crate_id, &compile_options, None) @@ -184,7 +186,8 @@ pub fn compile_program( })? .0; - let optimized_program = nargo::ops::transform_program(compiled_program, expression_width); + let optimized_program = + nargo::ops::transform_program(compiled_program, compile_options.expression_width); let warnings = optimized_program.warnings.clone(); Ok(JsCompileProgramResult::new(optimized_program.into(), warnings)) @@ -199,9 +202,10 @@ pub fn compile_contract( console_error_panic_hook::set_once(); let (crate_id, mut context) = prepare_context(entry_point, dependency_graph, file_source_map)?; - let compile_options = CompileOptions::default(); - // For now we default to a bounded width of 3, though we can add it as a parameter - let expression_width = acvm::acir::circuit::ExpressionWidth::Bounded { width: 3 }; + let compile_options = CompileOptions { + expression_width: ExpressionWidth::Bounded { width: 4 }, + ..CompileOptions::default() + }; let compiled_contract = noirc_driver::compile_contract(&mut context, crate_id, &compile_options) @@ -214,7 +218,8 @@ pub fn compile_contract( })? .0; - let optimized_contract = nargo::ops::transform_contract(compiled_contract, expression_width); + let optimized_contract = + nargo::ops::transform_contract(compiled_contract, compile_options.expression_width); let functions = optimized_contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); diff --git a/noir/noir-repo/compiler/wasm/src/compile_new.rs b/noir/noir-repo/compiler/wasm/src/compile_new.rs index c187fe7f3de5..4f11cafb975e 100644 --- a/noir/noir-repo/compiler/wasm/src/compile_new.rs +++ b/noir/noir-repo/compiler/wasm/src/compile_new.rs @@ -3,6 +3,7 @@ use crate::compile::{ PathToFileSourceMap, }; use crate::errors::{CompileError, JsCompileError}; +use acvm::acir::circuit::ExpressionWidth; use nargo::artifacts::contract::{ContractArtifact, ContractFunctionArtifact}; use nargo::parse_all; use noirc_driver::{ @@ -96,11 +97,14 @@ impl CompilerContext { mut self, program_width: usize, ) -> Result { - let compile_options = CompileOptions::default(); - let np_language = acvm::acir::circuit::ExpressionWidth::Bounded { width: program_width }; + let expression_width = if program_width == 0 { + ExpressionWidth::Unbounded + } else { + ExpressionWidth::Bounded { width: 4 } + }; + let compile_options = CompileOptions { expression_width, ..CompileOptions::default() }; let root_crate_id = *self.context.root_crate_id(); - let compiled_program = compile_main(&mut self.context, root_crate_id, &compile_options, None) .map_err(|errs| { @@ -112,7 +116,8 @@ impl CompilerContext { })? .0; - let optimized_program = nargo::ops::transform_program(compiled_program, np_language); + let optimized_program = + nargo::ops::transform_program(compiled_program, compile_options.expression_width); let warnings = optimized_program.warnings.clone(); Ok(JsCompileProgramResult::new(optimized_program.into(), warnings)) @@ -122,10 +127,14 @@ impl CompilerContext { mut self, program_width: usize, ) -> Result { - let compile_options = CompileOptions::default(); - let np_language = acvm::acir::circuit::ExpressionWidth::Bounded { width: program_width }; - let root_crate_id = *self.context.root_crate_id(); + let expression_width = if program_width == 0 { + ExpressionWidth::Unbounded + } else { + ExpressionWidth::Bounded { width: 4 } + }; + let compile_options = CompileOptions { expression_width, ..CompileOptions::default() }; + let root_crate_id = *self.context.root_crate_id(); let compiled_contract = compile_contract(&mut self.context, root_crate_id, &compile_options) .map_err(|errs| { @@ -137,7 +146,8 @@ impl CompilerContext { })? .0; - let optimized_contract = nargo::ops::transform_contract(compiled_contract, np_language); + let optimized_contract = + nargo::ops::transform_contract(compiled_contract, compile_options.expression_width); let functions = optimized_contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); @@ -166,7 +176,7 @@ pub fn compile_program_( let compiler_context = prepare_compiler_context(entry_point, dependency_graph, file_source_map)?; - let program_width = 3; + let program_width = 4; compiler_context.compile_program(program_width) } @@ -183,7 +193,7 @@ pub fn compile_contract_( let compiler_context = prepare_compiler_context(entry_point, dependency_graph, file_source_map)?; - let program_width = 3; + let program_width = 4; compiler_context.compile_contract(program_width) } diff --git a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md index 1c6b375db49e..6b2d37739125 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md +++ b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md @@ -5,7 +5,9 @@ keywords: [noir, integer types, methods, examples, arithmetic] sidebar_position: 1 --- -An integer type is a range constrained field type. The Noir frontend supports both unsigned and signed integer types. The allowed sizes are 1, 8, 32 and 64 bits. +An integer type is a range constrained field type. +The Noir frontend supports both unsigned and signed integer types. +The allowed sizes are 1, 8, 16, 32 and 64 bits. :::info diff --git a/noir/noir-repo/docs/docs/noir/standard_library/black_box_fns.md b/noir/noir-repo/docs/docs/noir/standard_library/black_box_fns.md index be8c65679c31..eeead5809699 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/black_box_fns.md +++ b/noir/noir-repo/docs/docs/noir/standard_library/black_box_fns.md @@ -12,6 +12,7 @@ The ACVM spec defines a set of blackbox functions which backends will be expecte Here is a list of the current black box functions: +- [AES128](./cryptographic_primitives/ciphers.mdx#aes128) - [SHA256](./cryptographic_primitives/hashes.mdx#sha256) - [Schnorr signature verification](./cryptographic_primitives/schnorr.mdx) - [Blake2s](./cryptographic_primitives/hashes.mdx#blake2s) @@ -19,7 +20,7 @@ Here is a list of the current black box functions: - [Pedersen Hash](./cryptographic_primitives/hashes.mdx#pedersen_hash) - [Pedersen Commitment](./cryptographic_primitives/hashes.mdx#pedersen_commitment) - [ECDSA signature verification](./cryptographic_primitives/ecdsa_sig_verification.mdx) -- [Fixed base scalar multiplication](./cryptographic_primitives/scalar.mdx) +- [Embedded curve operations (MSM, addition, ...)](./cryptographic_primitives/embedded_curve_ops.mdx) - AND - XOR - RANGE diff --git a/noir/noir-repo/docs/docs/noir/standard_library/containers/boundedvec.md b/noir/noir-repo/docs/docs/noir/standard_library/containers/boundedvec.md index cd0f725f8708..ccce62562f81 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/containers/boundedvec.md +++ b/noir/noir-repo/docs/docs/noir/standard_library/containers/boundedvec.md @@ -196,6 +196,20 @@ Example: #include_code bounded-vec-extend-from-bounded-vec-example test_programs/noir_test_success/bounded_vec/src/main.nr rust +### from_array + +```rust +pub fn from_array(array: [T; Len]) -> Self +``` + +Creates a new vector, populating it with values derived from an array input. +The maximum length of the vector is determined based on the type signature. + +Example: +```rust +let bounded_vec: BoundedVec = BoundedVec::from_array([1, 2, 3]) +``` + ### any ```rust diff --git a/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/ciphers.mdx b/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/ciphers.mdx new file mode 100644 index 000000000000..0103791d2e43 --- /dev/null +++ b/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/ciphers.mdx @@ -0,0 +1,28 @@ +--- +title: Ciphers +description: + Learn about the implemented ciphers ready to use for any Noir project +keywords: + [ciphers, Noir project, aes128, encrypt] +sidebar_position: 0 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## aes128 + +Given a plaintext as an array of bytes, returns the corresponding aes128 ciphertext (CBC mode). Input padding is automatically performed using PKCS#7, so that the output length is `input.len() + (16 - input.len() % 16)`. + +#include_code aes128 noir_stdlib/src/aes128.nr rust + +```rust +fn main() { + let input: [u8; 4] = [0, 12, 3, 15] // Random bytes, will be padded to 16 bytes. + let iv: [u8; 16] = [0; 16]; // Initialisation vector + let key: [u8; 16] = [0; 16] // AES key + let ciphertext = std::aes128::aes128_encrypt(inputs.as_bytes(), iv.as_bytes(), key.as_bytes()); // In this case, the output length will be 16 bytes. +} +``` + + + \ No newline at end of file diff --git a/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/embedded_curve_ops.mdx b/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/embedded_curve_ops.mdx new file mode 100644 index 000000000000..f1122fc37d52 --- /dev/null +++ b/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/embedded_curve_ops.mdx @@ -0,0 +1,77 @@ +--- +title: Scalar multiplication +description: See how you can perform scalar multiplication in Noir +keywords: [cryptographic primitives, Noir project, scalar multiplication] +sidebar_position: 1 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +The following functions perform operations over the embedded curve whose coordinates are defined by the configured noir field. +For the BN254 scalar field, this is BabyJubJub or Grumpkin. + +:::note +Suffixes `_low` and `_high` denote low and high limbs of a scalar. +::: + +## embedded_curve_ops::multi_scalar_mul + +Performs multi scalar multiplication over the embedded curve. +The function accepts arbitrary amount of point-scalar pairs on the input, it multiplies the individual pairs over +the curve and returns a sum of the resulting points. + +Points represented as x and y coordinates [x1, y1, x2, y2, ...], scalars as low and high limbs [low1, high1, low2, high2, ...]. + +#include_code multi_scalar_mul noir_stdlib/src/embedded_curve_ops.nr rust + +example + +```rust +fn main(point_x: Field, point_y: Field, scalar_low: Field, scalar_high: Field) { + let point = std::embedded_curve_ops::multi_scalar_mul([point_x, point_y], [scalar_low, scalar_high]); + println(point); +} +``` + +## embedded_curve_ops::fixed_base_scalar_mul + +Performs fixed base scalar multiplication over the embedded curve (multiplies input scalar with a generator point). +The function accepts a single scalar on the input represented as 2 fields. + +#include_code fixed_base_scalar_mul noir_stdlib/src/embedded_curve_ops.nr rust + +example + +```rust +fn main(scalar_low: Field, scalar_high: Field) { + let point = std::embedded_curve_ops::fixed_base_scalar_mul(scalar_low, scalar_high); + println(point); +} +``` + +## embedded_curve_ops::embedded_curve_add + +Adds two points on the embedded curve. +This function takes two `EmbeddedCurvePoint` structures as parameters, representing points on the curve, and returns a new `EmbeddedCurvePoint` structure that represents their sum. + +### Parameters: +- `point1` (`EmbeddedCurvePoint`): The first point to add. +- `point2` (`EmbeddedCurvePoint`): The second point to add. + +### Returns: +- `EmbeddedCurvePoint`: The resulting point after the addition of `point1` and `point2`. + +#include_code embedded_curve_add noir_stdlib/src/embedded_curve_ops.nr rust + +example + +```rust +fn main() { + let point1 = EmbeddedCurvePoint { x: 1, y: 2 }; + let point2 = EmbeddedCurvePoint { x: 3, y: 4 }; + let result = std::embedded_curve_ops::embedded_curve_add(point1, point2); + println!("Resulting Point: ({}, {})", result.x, result.y); +} +``` + + diff --git a/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/scalar.mdx b/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/scalar.mdx deleted file mode 100644 index b835236a03e4..000000000000 --- a/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/scalar.mdx +++ /dev/null @@ -1,44 +0,0 @@ ---- -title: Scalar multiplication -description: See how you can perform scalar multiplications over a fixed and variable bases in Noir -keywords: [cryptographic primitives, Noir project, scalar multiplication] -sidebar_position: 1 ---- - -import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; - -## scalar_mul::fixed_base_embedded_curve - -Performs scalar multiplication of a fixed base/generator over the embedded curve whose coordinates are defined -by the configured noir field. For the BN254 scalar field, this is BabyJubJub or Grumpkin. Suffixes `_low` and -`_high` denote low and high limbs of the input scalar. - -#include_code fixed_base_embedded_curve noir_stdlib/src/scalar_mul.nr rust - -example - -```rust -fn main(scalar_low: Field, scalar_high: Field) { - let point = std::scalar_mul::fixed_base_embedded_curve(scalar_low, scalar_high); - println(point); -} -``` - -## scalar_mul::variable_base_embedded_curve - -Performs scalar multiplication of a variable base/input point over the embedded curve whose coordinates are defined -by the configured noir field. For the BN254 scalar field, this is BabyJubJub or Grumpkin. Suffixes `_low` and -`_high` denote low and high limbs of the input scalar. - -#include_code variable_base_embedded_curve noir_stdlib/src/scalar_mul.nr rust - -example - -```rust -fn main(point_x: Field, point_y: Field, scalar_low: Field, scalar_high: Field) { - let resulting_point = std::scalar_mul::fixed_base_embedded_curve(point_x, point_y, scalar_low, scalar_high); - println(resulting_point); -} -``` - - diff --git a/noir/noir-repo/docs/docs/noir/standard_library/traits.md b/noir/noir-repo/docs/docs/noir/standard_library/traits.md index 2536d9a943fb..96a7b8e2f22d 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/traits.md +++ b/noir/noir-repo/docs/docs/noir/standard_library/traits.md @@ -186,10 +186,10 @@ These traits abstract over addition, subtraction, multiplication, and division r Implementing these traits for a given type will also allow that type to be used with the corresponding operator for that trait (`+` for Add, etc) in addition to the normal method names. -#include_code add-trait noir_stdlib/src/ops.nr rust -#include_code sub-trait noir_stdlib/src/ops.nr rust -#include_code mul-trait noir_stdlib/src/ops.nr rust -#include_code div-trait noir_stdlib/src/ops.nr rust +#include_code add-trait noir_stdlib/src/ops/arith.nr rust +#include_code sub-trait noir_stdlib/src/ops/arith.nr rust +#include_code mul-trait noir_stdlib/src/ops/arith.nr rust +#include_code div-trait noir_stdlib/src/ops/arith.nr rust The implementations block below is given for the `Add` trait, but the same types that implement `Add` also implement `Sub`, `Mul`, and `Div`. @@ -211,7 +211,7 @@ impl Add for u64 { .. } ### `std::ops::Rem` -#include_code rem-trait noir_stdlib/src/ops.nr rust +#include_code rem-trait noir_stdlib/src/ops/arith.nr rust `Rem::rem(a, b)` is the remainder function returning the result of what is left after dividing `a` and `b`. Implementing `Rem` allows the `%` operator @@ -232,11 +232,29 @@ impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } ``` +### `std::ops::Neg` + +#include_code neg-trait noir_stdlib/src/ops/arith.nr rust + +`Neg::neg` is equivalent to the unary negation operator `-`. + +Implementations: +#include_code neg-trait-impls noir_stdlib/src/ops/arith.nr rust + +### `std::ops::Not` + +#include_code not-trait noir_stdlib/src/ops/bit.nr rust + +`Not::not` is equivalent to the unary bitwise NOT operator `!`. + +Implementations: +#include_code not-trait-impls noir_stdlib/src/ops/bit.nr rust + ### `std::ops::{ BitOr, BitAnd, BitXor }` -#include_code bitor-trait noir_stdlib/src/ops.nr rust -#include_code bitand-trait noir_stdlib/src/ops.nr rust -#include_code bitxor-trait noir_stdlib/src/ops.nr rust +#include_code bitor-trait noir_stdlib/src/ops/bit.nr rust +#include_code bitand-trait noir_stdlib/src/ops/bit.nr rust +#include_code bitxor-trait noir_stdlib/src/ops/bit.nr rust Traits for the bitwise operations `|`, `&`, and `^`. @@ -263,8 +281,8 @@ impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } ### `std::ops::{ Shl, Shr }` -#include_code shl-trait noir_stdlib/src/ops.nr rust -#include_code shr-trait noir_stdlib/src/ops.nr rust +#include_code shl-trait noir_stdlib/src/ops/bit.nr rust +#include_code shr-trait noir_stdlib/src/ops/bit.nr rust Traits for a bit shift left and bit shift right. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/explainers/explainer-oracle.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/explainers/explainer-oracle.md new file mode 100644 index 000000000000..b84ca5dd9861 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/explainers/explainer-oracle.md @@ -0,0 +1,57 @@ +--- +title: Oracles +description: This guide provides an in-depth understanding of how Oracles work in Noir programming. Learn how to use outside calculations in your programs, constrain oracles, and understand their uses and limitations. +keywords: + - Noir Programming + - Oracles + - JSON-RPC + - Foreign Call Handlers + - Constrained Functions + - Blockchain Programming +sidebar_position: 1 +--- + +If you've seen "The Matrix" you may recall "The Oracle" as Gloria Foster smoking cigarettes and baking cookies. While she appears to "know things", she is actually providing a calculation of a pre-determined future. Noir Oracles are similar, in a way. They don't calculate the future (yet), but they allow you to use outside calculations in your programs. + +![matrix oracle prediction](@site/static/img/memes/matrix_oracle.jpeg) + +A Noir program is usually self-contained. You can pass certain inputs to it, and it will generate a deterministic output for those inputs. But what if you wanted to defer some calculation to an outside process or source? + +Oracles are functions that provide this feature. + +## Use cases + +An example usage for Oracles is proving something on-chain. For example, proving that the ETH-USDC quote was below a certain target at a certain block time. Or even making more complex proofs like proving the ownership of an NFT as an anonymous login method. + +Another interesting use case is to defer expensive calculations to be made outside of the Noir program, and then constraining the result; similar to the use of [unconstrained functions](../noir/concepts//unconstrained.md). + +In short, anything that can be constrained in a Noir program but needs to be fetched from an external source is a great candidate to be used in oracles. + +## Constraining oracles + +Just like in The Matrix, Oracles are powerful. But with great power, comes great responsibility. Just because you're using them in a Noir program doesn't mean they're true. Noir has no superpowers. If you want to prove that Portugal won the Euro Cup 2016, you're still relying on potentially untrusted information. + +To give a concrete example, Alice wants to login to the [NounsDAO](https://nouns.wtf/) forum with her username "noir_nouner" by proving she owns a noun without revealing her ethereum address. Her Noir program could have a oracle call like this: + +```rust +#[oracle(getNoun)] +unconstrained fn get_noun(address: Field) -> Field +``` + +This oracle could naively resolve with the number of Nouns she possesses. However, it is useless as a trusted source, as the oracle could resolve to anything Alice wants. In order to make this oracle call actually useful, Alice would need to constrain the response from the oracle, by proving her address and the noun count belongs to the state tree of the contract. + +In short, **Oracles don't prove anything. Your Noir program does.** + +:::danger + +If you don't constrain the return of your oracle, you could be clearly opening an attack vector on your Noir program. Make double-triple sure that the return of an oracle call is constrained! + +::: + +## How to use Oracles + +On CLI, Nargo resolves oracles by making JSON RPC calls, which means it would require an RPC node to be running. + +In JavaScript, NoirJS accepts and resolves arbitrary call handlers (that is, not limited to JSON) as long as they matches the expected types the developer defines. Refer to [Foreign Call Handler](../reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md) to learn more about NoirJS's call handling. + +If you want to build using oracles, follow through to the [oracle guide](../how_to/how-to-oracles.md) for a simple example on how to do that. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/explainers/explainer-recursion.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/explainers/explainer-recursion.md new file mode 100644 index 000000000000..18846176ca74 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/explainers/explainer-recursion.md @@ -0,0 +1,176 @@ +--- +title: Recursive proofs +description: Explore the concept of recursive proofs in Zero-Knowledge programming. Understand how recursion works in Noir, a language for writing smart contracts on the EVM blockchain. Learn through practical examples like Alice and Bob's guessing game, Charlie's recursive merkle tree, and Daniel's reusable components. Discover how to use recursive proofs to optimize computational resources and improve efficiency. + +keywords: + [ + "Recursive Proofs", + "Zero-Knowledge Programming", + "Noir", + "EVM Blockchain", + "Smart Contracts", + "Recursion in Noir", + "Alice and Bob Guessing Game", + "Recursive Merkle Tree", + "Reusable Components", + "Optimizing Computational Resources", + "Improving Efficiency", + "Verification Key", + "Aggregation", + "Recursive zkSNARK schemes", + "PLONK", + "Proving and Verification Keys" + ] +sidebar_position: 1 +pagination_next: how_to/how-to-recursion +--- + +In programming, we tend to think of recursion as something calling itself. A classic example would be the calculation of the factorial of a number: + +```js +function factorial(n) { + if (n === 0 || n === 1) { + return 1; + } else { + return n * factorial(n - 1); + } +} +``` + +In this case, while `n` is not `1`, this function will keep calling itself until it hits the base case, bubbling up the result on the call stack: + +```md + Is `n` 1? <--------- + /\ / + / \ n = n -1 + / \ / + Yes No -------- +``` + +In Zero-Knowledge, recursion has some similarities. + +It is not a Noir function calling itself, but a proof being used as an input to another circuit. In short, you verify one proof *inside* another proof, returning the proof that both proofs are valid. + +This means that, given enough computational resources, you can prove the correctness of any arbitrary number of proofs in a single proof. This could be useful to design state channels (for which a common example would be [Bitcoin's Lightning Network](https://en.wikipedia.org/wiki/Lightning_Network)), to save on gas costs by settling one proof on-chain, or simply to make business logic less dependent on a consensus mechanism. + +## Examples + +Let us look at some of these examples + +### Alice and Bob - Guessing game + +Alice and Bob are friends, and they like guessing games. They want to play a guessing game online, but for that, they need a trusted third-party that knows both of their secrets and finishes the game once someone wins. + +So, they use zero-knowledge proofs. Alice tries to guess Bob's number, and Bob will generate a ZK proof stating whether she succeeded or failed. + +This ZK proof can go on a smart contract, revealing the winner and even giving prizes. However, this means every turn needs to be verified on-chain. This incurs some cost and waiting time that may simply make the game too expensive or time-consuming to be worth it. + +As a solution, Alice proposes the following: "what if Bob generates his proof, and instead of sending it on-chain, I verify it *within* my own proof before playing my own turn?". + +She can then generate a proof that she verified his proof, and so on. + +```md + Did you fail? <-------------------------- + / \ / + / \ n = n -1 + / \ / + Yes No / + | | / + | | / + | You win / + | / + | / +Generate proof of that / + + / + my own guess ---------------- +``` + +### Charlie - Recursive merkle tree + +Charlie is a concerned citizen, and wants to be sure his vote in an election is accounted for. He votes with a ZK proof, but he has no way of knowing that his ZK proof was included in the total vote count! + +If the vote collector puts all of the votes into a [Merkle tree](https://en.wikipedia.org/wiki/Merkle_tree), everyone can prove the verification of two proofs within one proof, as such: + +```md + abcd + __________|______________ + | | + ab cd + _____|_____ ______|______ + | | | | + alice bob charlie daniel +``` + +Doing this recursively allows us to arrive on a final proof `abcd` which if true, verifies the correctness of all the votes. + +### Daniel - Reusable components + +Daniel has a big circuit and a big headache. A part of his circuit is a setup phase that finishes with some assertions that need to be made. But that section alone takes most of the proving time, and is largely independent of the rest of the circuit. + +He might find it more efficient to generate a proof for that setup phase separately, and verify that proof recursively in the actual business logic section of his circuit. This will allow for parallelization of both proofs, which results in a considerable speedup. + +## What params do I need + +As you can see in the [recursion reference](noir/standard_library/recursion.md), a simple recursive proof requires: + +- The proof to verify +- The Verification Key of the circuit that generated the proof +- A hash of this verification key, as it's needed for some backends +- The public inputs for the proof + +:::info + +Recursive zkSNARK schemes do not necessarily "verify a proof" in the sense that you expect a true or false to be spit out by the verifier. Rather an aggregation object is built over the public inputs. + +So, taking the example of Alice and Bob and their guessing game: + +- Alice makes her guess. Her proof is *not* recursive: it doesn't verify any proof within it! It's just a standard `assert(x != y)` circuit +- Bob verifies Alice's proof and makes his own guess. In this circuit, he doesn't exactly *prove* the verification of Alice's proof. Instead, he *aggregates* his proof to Alice's proof. The actual verification is done when the full proof is verified, for example when using `nargo verify` or through the verifier smart contract. + +We can imagine recursive proofs a [relay race](https://en.wikipedia.org/wiki/Relay_race). The first runner doesn't have to receive the baton from anyone else, as he/she already starts with it. But when his/her turn is over, the next runner needs to receive it, run a bit more, and pass it along. Even though every runner could theoretically verify the baton mid-run (why not? 🏃🔍), only at the end of the race does the referee verify that the whole race is valid. + +::: + +## Some architecture + +As with everything in computer science, there's no one-size-fits all. But there are some patterns that could help understanding and implementing them. To give three examples: + +### Adding some logic to a proof verification + +This would be an approach for something like our guessing game, where proofs are sent back and forth and are verified by each opponent. This circuit would be divided in two sections: + +- A `recursive verification` section, which would be just the call to `std::verify_proof`, and that would be skipped on the first move (since there's no proof to verify) +- A `guessing` section, which is basically the logic part where the actual guessing happens + +In such a situation, and assuming Alice is first, she would skip the first part and try to guess Bob's number. Bob would then verify her proof on the first section of his run, and try to guess Alice's number on the second part, and so on. + +### Aggregating proofs + +In some one-way interaction situations, recursion would allow for aggregation of simple proofs that don't need to be immediately verified on-chain or elsewhere. + +To give a practical example, a barman wouldn't need to verify a "proof-of-age" on-chain every time he serves alcohol to a customer. Instead, the architecture would comprise two circuits: + +- A `main`, non-recursive circuit with some logic +- A `recursive` circuit meant to verify two proofs in one proof + +The customer's proofs would be intermediate, and made on their phones, and the barman could just verify them locally. He would then aggregate them into a final proof sent on-chain (or elsewhere) at the end of the day. + +### Recursively verifying different circuits + +Nothing prevents you from verifying different circuits in a recursive proof, for example: + +- A `circuit1` circuit +- A `circuit2` circuit +- A `recursive` circuit + +In this example, a regulator could verify that taxes were paid for a specific purchase by aggregating both a `payer` circuit (proving that a purchase was made and taxes were paid), and a `receipt` circuit (proving that the payment was received) + +## How fast is it + +At the time of writing, verifying recursive proofs is surprisingly fast. This is because most of the time is spent on generating the verification key that will be used to generate the next proof. So you are able to cache the verification key and reuse it later. + +Currently, Noir JS packages don't expose the functionality of loading proving and verification keys, but that feature exists in the underlying `bb.js` package. + +## How can I try it + +Learn more about using recursion in Nargo and NoirJS in the [how-to guide](../how_to/how-to-recursion.md) and see a full example in [noir-examples](https://github.com/noir-lang/noir-examples). diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/_category_.json new file mode 100644 index 000000000000..5d694210bbf3 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/hello_noir/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/hello_noir/_category_.json new file mode 100644 index 000000000000..23b560f610b8 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/hello_noir/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/hello_noir/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/hello_noir/index.md new file mode 100644 index 000000000000..743c4d8d6348 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/hello_noir/index.md @@ -0,0 +1,142 @@ +--- +title: Creating a Project +description: + Learn how to create and verify your first Noir program using Nargo, a programming language for + zero-knowledge proofs. +keywords: + [ + Nargo, + Noir, + zero-knowledge proofs, + programming language, + create Noir program, + verify Noir program, + step-by-step guide, + ] +sidebar_position: 1 + +--- + +Now that we have installed Nargo, it is time to make our first hello world program! + +## Create a Project Directory + +Noir code can live anywhere on your computer. Let us create a _projects_ folder in the home +directory to house our Noir programs. + +For Linux, macOS, and Windows PowerShell, create the directory and change directory into it by +running: + +```sh +mkdir ~/projects +cd ~/projects +``` + +## Create Our First Nargo Project + +Now that we are in the projects directory, create a new Nargo project by running: + +```sh +nargo new hello_world +``` + +> **Note:** `hello_world` can be any arbitrary project name, we are simply using `hello_world` for +> demonstration. +> +> In production, the common practice is to name the project folder as `circuits` for better +> identifiability when sitting alongside other folders in the codebase (e.g. `contracts`, `scripts`, +> `test`). + +A `hello_world` folder would be created. Similar to Rust, the folder houses _src/main.nr_ and +_Nargo.toml_ which contain the source code and environmental options of your Noir program +respectively. + +### Intro to Noir Syntax + +Let us take a closer look at _main.nr_. The default _main.nr_ generated should look like this: + +```rust +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` + +The first line of the program specifies the program's inputs: + +```rust +x : Field, y : pub Field +``` + +Program inputs in Noir are private by default (e.g. `x`), but can be labeled public using the +keyword `pub` (e.g. `y`). To learn more about private and public values, check the +[Data Types](../../noir/concepts/data_types/index.md) section. + +The next line of the program specifies its body: + +```rust +assert(x != y); +``` + +The Noir syntax `assert` can be interpreted as something similar to constraints in other zk-contract languages. + +For more Noir syntax, check the [Language Concepts](../../noir/concepts/comments.md) chapter. + +## Build In/Output Files + +Change directory into _hello_world_ and build in/output files for your Noir program by running: + +```sh +cd hello_world +nargo check +``` + +Two additional files would be generated in your project directory: + +_Prover.toml_ houses input values, and _Verifier.toml_ houses public values. + +## Prove Our Noir Program + +Now that the project is set up, we can create a proof of correct execution of our Noir program. + +Fill in input values for execution in the _Prover.toml_ file. For example: + +```toml +x = "1" +y = "2" +``` + +Prove the valid execution of your Noir program: + +```sh +nargo prove +``` + +A new folder _proofs_ would then be generated in your project directory, containing the proof file +`.proof`, where the project name is defined in Nargo.toml. + +The _Verifier.toml_ file would also be updated with the public values computed from program +execution (in this case the value of `y`): + +```toml +y = "0x0000000000000000000000000000000000000000000000000000000000000002" +``` + +> **Note:** Values in _Verifier.toml_ are computed as 32-byte hex values. + +## Verify Our Noir Program + +Once a proof is generated, we can verify correct execution of our Noir program by verifying the +proof file. + +Verify your proof by running: + +```sh +nargo verify +``` + +The verification will complete in silence if it is successful. If it fails, it will log the +corresponding error instead. + +Congratulations, you have now created and verified a proof for your very first Noir program! + +In the [next section](./project_breakdown.md), we will go into more detail on each step performed. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/hello_noir/project_breakdown.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/hello_noir/project_breakdown.md new file mode 100644 index 000000000000..6160a102c6c9 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/hello_noir/project_breakdown.md @@ -0,0 +1,199 @@ +--- +title: Project Breakdown +description: + Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML + files, and how to prove and verify your program. +keywords: + [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] +sidebar_position: 2 +--- + +This section breaks down our hello world program from the previous section. We elaborate on the project +structure and what the `prove` and `verify` commands did. + +## Anatomy of a Nargo Project + +Upon creating a new project with `nargo new` and building the in/output files with `nargo check` +commands, you would get a minimal Nargo project of the following structure: + + - src + - Prover.toml + - Verifier.toml + - Nargo.toml + +The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ +file will be generated within it. + +### Prover.toml + +_Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. + +### Verifier.toml + +_Verifier.toml_ contains public in/output values computed when executing the Noir program. + +### Nargo.toml + +_Nargo.toml_ contains the environmental options of your project. It contains a "package" section and a "dependencies" section. + +Example Nargo.toml: + +```toml +[package] +name = "noir_starter" +type = "bin" +authors = ["Alice"] +compiler_version = "0.9.0" +description = "Getting started with Noir" +entry = "circuit/main.nr" +license = "MIT" + +[dependencies] +ecrecover = {tag = "v0.9.0", git = "https://github.com/colinnielsen/ecrecover-noir.git"} +``` + +Nargo.toml for a [workspace](../../noir/modules_packages_crates/workspaces.md) will look a bit different. For example: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +#### Package section + +The package section defines a number of fields including: + +- `name` (**required**) - the name of the package +- `type` (**required**) - can be "bin", "lib", or "contract" to specify whether its a binary, library or Aztec contract +- `authors` (optional) - authors of the project +- `compiler_version` - specifies the version of the compiler to use. This is enforced by the compiler and follow's [Rust's versioning](https://doc.rust-lang.org/cargo/reference/manifest.html#the-version-field), so a `compiler_version = 0.18.0` will enforce Nargo version 0.18.0, `compiler_version = ^0.18.0` will enforce anything above 0.18.0 but below 0.19.0, etc. For more information, see how [Rust handles these operators](https://docs.rs/semver/latest/semver/enum.Op.html) +- `description` (optional) +- `entry` (optional) - a relative filepath to use as the entry point into your package (overrides the default of `src/lib.nr` or `src/main.nr`) +- `backend` (optional) +- `license` (optional) + +#### Dependencies section + +This is where you will specify any dependencies for your project. See the [Dependencies page](../../noir/modules_packages_crates/dependencies.md) for more info. + +`./proofs/` and `./contract/` directories will not be immediately visible until you create a proof or +verifier contract respectively. + +### main.nr + +The _main.nr_ file contains a `main` method, this method is the entry point into your Noir program. + +In our sample program, _main.nr_ looks like this: + +```rust +fn main(x : Field, y : Field) { + assert(x != y); +} +``` + +The parameters `x` and `y` can be seen as the API for the program and must be supplied by the +prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when +verifying the proof. + +The prover supplies the values for `x` and `y` in the _Prover.toml_ file. + +As for the program body, `assert` ensures that the condition to be satisfied (e.g. `x != y`) is +constrained by the proof of the execution of said program (i.e. if the condition was not met, the +verifier would reject the proof as an invalid proof). + +### Prover.toml + +The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and +public). + +In our hello world program the _Prover.toml_ file looks like this: + +```toml +x = "1" +y = "2" +``` + +When the command `nargo prove` is executed, two processes happen: + +1. Noir creates a proof that `x`, which holds the value of `1`, and `y`, which holds the value of `2`, + is not equal. This inequality constraint is due to the line `assert(x != y)`. + +2. Noir creates and stores the proof of this statement in the _proofs_ directory in a file called your-project.proof. So if your project is named "private_voting" (defined in the project Nargo.toml), the proof will be saved at `./proofs/private_voting.proof`. Opening this file will display the proof in hex format. + +#### Arrays of Structs + +The following code shows how to pass an array of structs to a Noir program to generate a proof. + +```rust +// main.nr +struct Foo { + bar: Field, + baz: Field, +} + +fn main(foos: [Foo; 3]) -> pub Field { + foos[2].bar + foos[2].baz +} +``` + +Prover.toml: + +```toml +[[foos]] # foos[0] +bar = 0 +baz = 0 + +[[foos]] # foos[1] +bar = 0 +baz = 0 + +[[foos]] # foos[2] +bar = 1 +baz = 2 +``` + +#### Custom toml files + +You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. + +This command looks for proof inputs in the default **Prover.toml** and generates the proof and saves it at `./proofs/.proof`: + +```bash +nargo prove +``` + +This command looks for proof inputs in the custom **OtherProver.toml** and generates proof and saves it at `./proofs/.proof`: + +```bash +nargo prove -p OtherProver +``` + +## Verifying a Proof + +When the command `nargo verify` is executed, two processes happen: + +1. Noir checks in the _proofs_ directory for a proof file with the project name (eg. test_project.proof) + +2. If that file is found, the proof's validity is checked + +> **Note:** The validity of the proof is linked to the current Noir program; if the program is +> changed and the verifier verifies the proof, it will fail because the proof is not valid for the +> _modified_ Noir program. + +In production, the prover and the verifier are usually two separate entities. A prover would +retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the +verifier. The verifier would then retrieve the public inputs, usually from external sources, and +verify the validity of the proof against it. + +Take a private asset transfer as an example: + +A person using a browser as the prover would retrieve private inputs locally (e.g. the user's private key) and +public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof +and submit it to the verifier smart contract. + +The verifier contract would then draw the user's encrypted balance directly from the blockchain and +verify the proof submitted against it. If the verification passes, additional functions in the +verifier contract could trigger (e.g. approve the asset transfer). + +Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/installation/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/installation/_category_.json new file mode 100644 index 000000000000..0c02fb5d4d79 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/installation/_category_.json @@ -0,0 +1,6 @@ +{ + "position": 0, + "label": "Install Nargo", + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/installation/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/installation/index.md new file mode 100644 index 000000000000..4ef86aa59147 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/installation/index.md @@ -0,0 +1,48 @@ +--- +title: Nargo Installation +description: + nargo is a command line tool for interacting with Noir programs. This page is a quick guide on how to install Nargo through the most common and easy method, noirup +keywords: [ + Nargo + Noir + Rust + Cargo + Noirup + Installation + Terminal Commands + Version Check + Nightlies + Specific Versions + Branches + Noirup Repository +] +pagination_next: getting_started/hello_noir/index +--- + +`nargo` is the one-stop-shop for almost everything related with Noir. The name comes from our love for Rust and its package manager `cargo`. + +With `nargo`, you can start new projects, compile, execute, prove, verify, test, generate solidity contracts, and do pretty much all that is available in Noir. + +Similarly to `rustup`, we also maintain an easy installation method that covers most machines: `noirup`. + +## Installing Noirup + +Open a terminal on your machine, and write: + +```bash +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +Close the terminal, open another one, and run + +```bash +noirup +``` + +Done. That's it. You should have the latest version working. You can check with `nargo --version`. + +You can also install nightlies, specific versions +or branches. Check out the [noirup repository](https://github.com/noir-lang/noirup) for more +information. + +Now we're ready to start working on [our first Noir program!](../hello_noir/index.md) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/installation/other_install_methods.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/installation/other_install_methods.md new file mode 100644 index 000000000000..3634723562bf --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/installation/other_install_methods.md @@ -0,0 +1,102 @@ +--- +title: Alternative Installations +description: There are different ways to install Nargo, the one-stop shop and command-line tool for developing Noir programs. This guide explains how to specify which version to install when using noirup, and using WSL for windows. +keywords: [ + Installation + Nargo + Noirup + Binaries + Compiling from Source + WSL for Windows + macOS + Linux + Nix + Direnv + Uninstalling Nargo + ] +sidebar_position: 1 +--- + +## Encouraged Installation Method: Noirup + +Noirup is the endorsed method for installing Nargo, streamlining the process of fetching binaries or compiling from source. It supports a range of options to cater to your specific needs, from nightly builds and specific versions to compiling from various sources. + +### Installing Noirup + +First, ensure you have `noirup` installed: + +```sh +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +### Fetching Binaries + +With `noirup`, you can easily switch between different Nargo versions, including nightly builds: + +- **Nightly Version**: Install the latest nightly build. + + ```sh + noirup --version nightly + ``` + +- **Specific Version**: Install a specific version of Nargo. + ```sh + noirup --version + ``` + +### Compiling from Source + +`noirup` also enables compiling Nargo from various sources: + +- **From a Specific Branch**: Install from the latest commit on a branch. + + ```sh + noirup --branch + ``` + +- **From a Fork**: Install from the main branch of a fork. + + ```sh + noirup --repo + ``` + +- **From a Specific Branch in a Fork**: Install from a specific branch in a fork. + + ```sh + noirup --repo --branch + ``` + +- **From a Specific Pull Request**: Install from a specific PR. + + ```sh + noirup --pr + ``` + +- **From a Specific Commit**: Install from a specific commit. + + ```sh + noirup -C + ``` + +- **From Local Source**: Compile and install from a local directory. + ```sh + noirup --path ./path/to/local/source + ``` + +## Installation on Windows + +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). + +Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. + +step 2: Follow the [Noirup instructions](#encouraged-installation-method-noirup). + +## Uninstalling Nargo + +If you installed Nargo with `noirup`, you can uninstall Nargo by removing the files in `~/.nargo`, `~/nargo`, and `~/noir_cache`. This ensures that all installed binaries, configurations, and cache related to Nargo are fully removed from your system. + +```bash +rm -r ~/.nargo +rm -r ~/nargo +rm -r ~/noir_cache +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/tooling/noir_codegen.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/tooling/noir_codegen.md new file mode 100644 index 000000000000..d65151da0ab2 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/getting_started/tooling/noir_codegen.md @@ -0,0 +1,113 @@ +--- +title: Noir Codegen for TypeScript +description: Learn how to use Noir codegen to generate TypeScript bindings +keywords: [Nargo, Noir, compile, TypeScript] +sidebar_position: 2 +--- + +When using TypeScript, it is extra work to interpret Noir program outputs in a type-safe way. Third party libraries may exist for popular Noir programs, but they are either hard to find or unmaintained. + +Now you can generate TypeScript bindings for your Noir programs in two steps: +1. Exporting Noir functions using `nargo export` +2. Using the TypeScript module `noir_codegen` to generate TypeScript binding + +**Note:** you can only export functions from a Noir *library* (not binary or contract program types). + +## Installation + +### Your TypeScript project + +If you don't already have a TypeScript project you can add the module with `yarn` (or `npm`), then initialize it: + +```bash +yarn add typescript -D +npx tsc --init +``` + +### Add TypeScript module - `noir_codegen` + +The following command will add the module to your project's devDependencies: + +```bash +yarn add @noir-lang/noir_codegen -D +``` + +### Nargo library +Make sure you have Nargo, v0.25.0 or greater, installed. If you don't, follow the [installation guide](../installation/index.md). + +If you're in a new project, make a `circuits` folder and create a new Noir library: + +```bash +mkdir circuits && cd circuits +nargo new --lib myNoirLib +``` + +## Usage + +### Export ABI of specified functions + +First go to the `.nr` files in your Noir library, and add the `#[export]` macro to each function that you want to use in TypeScript. + +```rust +#[export] +fn your_function(... +``` + +From your Noir library (where `Nargo.toml` is), run the following command: + +```bash +nargo export +``` + +You will now have an `export` directory with a .json file per exported function. + +You can also specify the directory of Noir programs using `--program-dir`, for example: + +```bash +nargo export --program-dir=./circuits/myNoirLib +``` + +### Generate TypeScript bindings from exported functions + +To use the `noir-codegen` package we added to the TypeScript project: + +```bash +yarn noir-codegen ./export/your_function.json +``` + +This creates an `exports` directory with an `index.ts` file containing all exported functions. + +**Note:** adding `--out-dir` allows you to specify an output dir for your TypeScript bindings to go. Eg: + +```bash +yarn noir-codegen ./export/*.json --out-dir ./path/to/output/dir +``` + +## Example .nr function to .ts output + +Consider a Noir library with this function: + +```rust +#[export] +fn not_equal(x: Field, y: Field) -> bool { + x != y +} +``` + +After the export and codegen steps, you should have an `index.ts` like: + +```typescript +export type Field = string; + + +export const is_equal_circuit: CompiledCircuit = {"abi":{"parameters":[{"name":"x","type":{"kind":"field"},"visibility":"private"},{"name":"y","type":{"kind":"field"},"visibility":"private"}],"param_witnesses":{"x":[{"start":0,"end":1}],"y":[{"start":1,"end":2}]},"return_type":{"abi_type":{"kind":"boolean"},"visibility":"private"},"return_witnesses":[4]},"bytecode":"H4sIAAAAAAAA/7WUMQ7DIAxFQ0Krrr2JjSGYLVcpKrn/CaqqDQN12WK+hPBgmWd/wEyHbF1SS923uhOs3pfoChI+wKXMAXzIKyNj4PB0TFTYc0w5RUjoqeAeEu1wqK0F54RGkWvW44LPzExnlkbMEs4JNZmN8PxS42uHv82T8a3Jeyn2Ks+VLPcO558HmyLMCDOXAXXtpPt4R/Rt9T36ss6dS9HGPx/eG17nGegKBQAA"}; + +export async function is_equal(x: Field, y: Field, foreignCallHandler?: ForeignCallHandler): Promise { + const program = new Noir(is_equal_circuit); + const args: InputMap = { x, y }; + const { returnValue } = await program.execute(args, foreignCallHandler); + return returnValue as boolean; +} +``` + +Now the `is_equal()` function and relevant types are readily available for use in TypeScript. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/_category_.json new file mode 100644 index 000000000000..23b560f610b8 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/debugger/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/debugger/_category_.json new file mode 100644 index 000000000000..cc2cbb1c2533 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/debugger/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Debugging", + "position": 5, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/debugger/debugging_with_the_repl.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/debugger/debugging_with_the_repl.md new file mode 100644 index 000000000000..09e5bae68ad7 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/debugger/debugging_with_the_repl.md @@ -0,0 +1,164 @@ +--- +title: Using the REPL Debugger +description: + Step by step guide on how to debug your Noir circuits with the REPL Debugger. +keywords: + [ + Nargo, + Noir CLI, + Noir Debugger, + REPL, + ] +sidebar_position: 1 +--- + +#### Pre-requisites + +In order to use the REPL debugger, first you need to install recent enough versions of Nargo and vscode-noir. + +## Debugging a simple circuit + +Let's debug a simple circuit: + +```rust +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` + +To start the REPL debugger, using a terminal, go to a Noir circuit's home directory. Then: + +`$ nargo debug` + +You should be seeing this in your terminal: + +``` +[main] Starting debugger +At ~/noir-examples/recursion/circuits/main/src/main.nr:1:9 + 1 -> fn main(x : Field, y : pub Field) { + 2 assert(x != y); + 3 } +> +``` + +The debugger displays the current Noir code location, and it is now waiting for us to drive it. + +Let's first take a look at the available commands. For that we'll use the `help` command. + +``` +> help +Available commands: + + opcodes display ACIR opcodes + into step into to the next opcode + next step until a new source location is reached + out step until a new source location is reached + and the current stack frame is finished + break LOCATION:OpcodeLocation add a breakpoint at an opcode location + over step until a new source location is reached + without diving into function calls + restart restart the debugging session + delete LOCATION:OpcodeLocation delete breakpoint at an opcode location + witness show witness map + witness index:u32 display a single witness from the witness map + witness index:u32 value:String update a witness with the given value + memset index:usize value:String update a memory cell with the given + value + continue continue execution until the end of the + program + vars show variable values available at this point + in execution + stacktrace display the current stack trace + memory show memory (valid when executing unconstrained code) + step step to the next ACIR opcode + +Other commands: + + help Show this help message + quit Quit repl + +``` + +Some commands operate only for unconstrained functions, such as `memory` and `memset`. If you try to use them while execution is paused at an ACIR opcode, the debugger will simply inform you that you are not executing unconstrained code: + +``` +> memory +Unconstrained VM memory not available +> +``` + +Before continuing, we can take a look at the initial witness map: + +``` +> witness +_0 = 1 +_1 = 2 +> +``` + +Cool, since `x==1`, `y==2`, and we want to check that `x != y`, our circuit should succeed. At this point we could intervene and use the witness setter command to change one of the witnesses. Let's set `y=3`, then back to 2, so we don't affect the expected result: + +``` +> witness +_0 = 1 +_1 = 2 +> witness 1 3 +_1 = 3 +> witness +_0 = 1 +_1 = 3 +> witness 1 2 +_1 = 2 +> witness +_0 = 1 +_1 = 2 +> +``` + +Now we can inspect the current state of local variables. For that we use the `vars` command. + +``` +> vars +> +``` + +We currently have no vars in context, since we are at the entry point of the program. Let's use `next` to execute until the next point in the program. + +``` +> vars +> next +At ~/noir-examples/recursion/circuits/main/src/main.nr:1:20 + 1 -> fn main(x : Field, y : pub Field) { + 2 assert(x != y); + 3 } +> vars +x:Field = 0x01 +``` + +As a result of stepping, the variable `x`, whose initial value comes from the witness map, is now in context and returned by `vars`. + +``` +> next + 1 fn main(x : Field, y : pub Field) { + 2 -> assert(x != y); + 3 } +> vars +y:Field = 0x02 +x:Field = 0x01 +``` + +Stepping again we can finally see both variables and their values. And now we can see that the next assertion should succeed. + +Let's continue to the end: + +``` +> continue +(Continuing execution...) +Finished execution +> q +[main] Circuit witness successfully solved +``` + +Upon quitting the debugger after a solved circuit, the resulting circuit witness gets saved, equivalent to what would happen if we had run the same circuit with `nargo execute`. + +We just went through the basics of debugging using Noir REPL debugger. For a comprehensive reference, check out [the reference page](../../reference/debugger/debugger_repl.md). diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/debugger/debugging_with_vs_code.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/debugger/debugging_with_vs_code.md new file mode 100644 index 000000000000..a5858c1a5eb5 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/debugger/debugging_with_vs_code.md @@ -0,0 +1,68 @@ +--- +title: Using the VS Code Debugger +description: + Step by step guide on how to debug your Noir circuits with the VS Code Debugger configuration and features. +keywords: + [ + Nargo, + Noir CLI, + Noir Debugger, + VS Code, + IDE, + ] +sidebar_position: 0 +--- + +This guide will show you how to use VS Code with the vscode-noir extension to debug a Noir project. + +#### Pre-requisites + +- Nargo +- vscode-noir +- A Noir project with a `Nargo.toml`, `Prover.toml` and at least one Noir (`.nr`) containing an entry point function (typically `main`). + +## Running the debugger + +The easiest way to start debugging is to open the file you want to debug, and press `F5`. This will cause the debugger to launch, using your `Prover.toml` file as input. + +You should see something like this: + +![Debugger launched](@site/static/img/debugger/1-started.png) + +Let's inspect the state of the program. For that, we open VS Code's _Debug pane_. Look for this icon: + +![Debug pane icon](@site/static/img/debugger/2-icon.png) + +You will now see two categories of variables: Locals and Witness Map. + +![Debug pane expanded](@site/static/img/debugger/3-debug-pane.png) + +1. **Locals**: variables of your program. At this point in execution this section is empty, but as we step through the code it will get populated by `x`, `result`, `digest`, etc. + +2. **Witness map**: these are initially populated from your project's `Prover.toml` file. In this example, they will be used to populate `x` and `result` at the beginning of the `main` function. + +Most of the time you will probably be focusing mostly on locals, as they represent the high level state of your program. + +You might be interested in inspecting the witness map in case you are trying to solve a really low level issue in the compiler or runtime itself, so this concerns mostly advanced or niche users. + +Let's step through the program, by using the debugger buttons or their corresponding keyboard shortcuts. + +![Debugger buttons](@site/static/img/debugger/4-debugger-buttons.png) + +Now we can see in the variables pane that there's values for `digest`, `result` and `x`. + +![Inspecting locals](@site/static/img/debugger/5-assert.png) + +We can also inspect the values of variables by directly hovering on them on the code. + +![Hover locals](@site/static/img/debugger/6-hover.png) + +Let's set a break point at the `keccak256` function, so we can continue execution up to the point when it's first invoked without having to go one step at a time. + +We just need to click the to the right of the line number 18. Once the breakpoint appears, we can click the `continue` button or use its corresponding keyboard shortcut (`F5` by default). + +![Breakpoint](@site/static/img/debugger/7-break.png) + +Now we are debugging the `keccak256` function, notice the _Call Stack pane_ at the lower right. This lets us inspect the current call stack of our process. + +That covers most of the current debugger functionalities. Check out [the reference](../../reference/debugger/debugger_vscode.md) for more details on how to configure the debugger. \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/how-to-oracles.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/how-to-oracles.md new file mode 100644 index 000000000000..8cf8035a5c4f --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/how-to-oracles.md @@ -0,0 +1,276 @@ +--- +title: How to use Oracles +description: Learn how to use oracles in your Noir program with examples in both Nargo and NoirJS. This guide also covers writing a JSON RPC server and providing custom foreign call handlers for NoirJS. +keywords: + - Noir Programming + - Oracles + - Nargo + - NoirJS + - JSON RPC Server + - Foreign Call Handlers +sidebar_position: 1 +--- + +This guide shows you how to use oracles in your Noir program. For the sake of clarity, it assumes that: + +- You have read the [explainer on Oracles](../explainers/explainer-oracle.md) and are comfortable with the concept. +- You have a Noir program to add oracles to. You can create one using the [vite-hardhat starter](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat) as a boilerplate. +- You understand the concept of a JSON-RPC server. Visit the [JSON-RPC website](https://www.jsonrpc.org/) if you need a refresher. +- You are comfortable with server-side JavaScript (e.g. Node.js, managing packages, etc.). + +For reference, you can find the snippets used in this tutorial on the [Aztec DevRel Repository](https://github.com/AztecProtocol/dev-rel/tree/main/code-snippets/how-to-oracles). + +## Rundown + +This guide has 3 major steps: + +1. How to modify our Noir program to make use of oracle calls as unconstrained functions +2. How to write a JSON RPC Server to resolve these oracle calls with Nargo +3. How to use them in Nargo and how to provide a custom resolver in NoirJS + +## Step 1 - Modify your Noir program + +An oracle is defined in a Noir program by defining two methods: + +- An unconstrained method - This tells the compiler that it is executing an [unconstrained functions](../noir/concepts//unconstrained.md). +- A decorated oracle method - This tells the compiler that this method is an RPC call. + +An example of an oracle that returns a `Field` would be: + +```rust +#[oracle(getSqrt)] +unconstrained fn sqrt(number: Field) -> Field { } + +unconstrained fn get_sqrt(number: Field) -> Field { + sqrt(number) +} +``` + +In this example, we're wrapping our oracle function in a unconstrained method, and decorating it with `oracle(getSqrt)`. We can then call the unconstrained function as we would call any other function: + +```rust +fn main(input: Field) { + let sqrt = get_sqrt(input); +} +``` + +In the next section, we will make this `getSqrt` (defined on the `sqrt` decorator) be a method of the RPC server Noir will use. + +:::danger + +As explained in the [Oracle Explainer](../explainers/explainer-oracle.md), this `main` function is unsafe unless you constrain its return value. For example: + +```rust +fn main(input: Field) { + let sqrt = get_sqrt(input); + assert(sqrt.pow_32(2) as u64 == input as u64); // <---- constrain the return of an oracle! +} +``` + +::: + +:::info + +Currently, oracles only work with single params or array params. For example: + +```rust +#[oracle(getSqrt)] +unconstrained fn sqrt([Field; 2]) -> [Field; 2] { } +``` + +::: + +## Step 2 - Write an RPC server + +Brillig will call *one* RPC server. Most likely you will have to write your own, and you can do it in whatever language you prefer. In this guide, we will do it in Javascript. + +Let's use the above example of an oracle that consumes an array with two `Field` and returns their square roots: + +```rust +#[oracle(getSqrt)] +unconstrained fn sqrt(input: [Field; 2]) -> [Field; 2] { } + +unconstrained fn get_sqrt(input: [Field; 2]) -> [Field; 2] { + sqrt(input) +} + +fn main(input: [Field; 2]) { + let sqrt = get_sqrt(input); + assert(sqrt[0].pow_32(2) as u64 == input[0] as u64); + assert(sqrt[1].pow_32(2) as u64 == input[1] as u64); +} +``` + +:::info + +Why square root? + +In general, computing square roots is computationally more expensive than multiplications, which takes a toll when speaking about ZK applications. In this case, instead of calculating the square root in Noir, we are using our oracle to offload that computation to be made in plain. In our circuit we can simply multiply the two values. + +::: + +Now, we should write the correspondent RPC server, starting with the [default JSON-RPC 2.0 boilerplate](https://www.npmjs.com/package/json-rpc-2.0#example): + +```js +import { JSONRPCServer } from "json-rpc-2.0"; +import express from "express"; +import bodyParser from "body-parser"; + +const app = express(); +app.use(bodyParser.json()); + +const server = new JSONRPCServer(); +app.post("/", (req, res) => { + const jsonRPCRequest = req.body; + server.receive(jsonRPCRequest).then((jsonRPCResponse) => { + if (jsonRPCResponse) { + res.json(jsonRPCResponse); + } else { + res.sendStatus(204); + } + }); +}); + +app.listen(5555); +``` + +Now, we will add our `getSqrt` method, as expected by the `#[oracle(getSqrt)]` decorator in our Noir code. It maps through the params array and returns their square roots: + +```js +server.addMethod("getSqrt", async (params) => { + const values = params[0].Array.map((field) => { + return `${Math.sqrt(parseInt(field, 16))}`; + }); + return { values: [{ Array: values }] }; +}); +``` + +:::tip + +Brillig expects an object with an array of values. Each value is an object declaring to be `Single` or `Array` and returning a field element *as a string*. For example: + +```json +{ "values": [{ "Array": ["1", "2"] }]} +{ "values": [{ "Single": "1" }]} +{ "values": [{ "Single": "1" }, { "Array": ["1", "2"] }]} +``` + +If you're using Typescript, the following types may be helpful in understanding the expected return value and making sure they're easy to follow: + +```js +interface SingleForeignCallParam { + Single: string, +} + +interface ArrayForeignCallParam { + Array: string[], +} + +type ForeignCallParam = SingleForeignCallParam | ArrayForeignCallParam; + +interface ForeignCallResult { + values: ForeignCallParam[], +} +``` + +::: + +## Step 3 - Usage with Nargo + +Using the [`nargo` CLI tool](../getting_started/installation/index.md), you can use oracles in the `nargo test`, `nargo execute` and `nargo prove` commands by passing a value to `--oracle-resolver`. For example: + +```bash +nargo test --oracle-resolver http://localhost:5555 +``` + +This tells `nargo` to use your RPC Server URL whenever it finds an oracle decorator. + +## Step 4 - Usage with NoirJS + +In a JS environment, an RPC server is not strictly necessary, as you may want to resolve your oracles without needing any JSON call at all. NoirJS simply expects that you pass a callback function when you generate proofs, and that callback function can be anything. + +For example, if your Noir program expects the host machine to provide CPU pseudo-randomness, you could simply pass it as the `foreignCallHandler`. You don't strictly need to create an RPC server to serve pseudo-randomness, as you may as well get it directly in your app: + +```js +const foreignCallHandler = (name, inputs) => crypto.randomBytes(16) // etc + +await noir.generateProof(inputs, foreignCallHandler) +``` + +As one can see, in NoirJS, the [`foreignCallHandler`](../reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md) function simply means "a callback function that returns a value of type [`ForeignCallOutput`](../reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md). It doesn't have to be an RPC call like in the case for Nargo. + +:::tip + +Does this mean you don't have to write an RPC server like in [Step #2](#step-2---write-an-rpc-server)? + +You don't technically have to, but then how would you run `nargo test` or `nargo prove`? To use both `Nargo` and `NoirJS` in your development flow, you will have to write a JSON RPC server. + +::: + +In this case, let's make `foreignCallHandler` call the JSON RPC Server we created in [Step #2](#step-2---write-an-rpc-server), by making it a JSON RPC Client. + +For example, using the same `getSqrt` program in [Step #1](#step-1---modify-your-noir-program) (comments in the code): + +```js +import { JSONRPCClient } from "json-rpc-2.0"; + +// declaring the JSONRPCClient +const client = new JSONRPCClient((jsonRPCRequest) => { +// hitting the same JSON RPC Server we coded above + return fetch("http://localhost:5555", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify(jsonRPCRequest), + }).then((response) => { + if (response.status === 200) { + return response + .json() + .then((jsonRPCResponse) => client.receive(jsonRPCResponse)); + } else if (jsonRPCRequest.id !== undefined) { + return Promise.reject(new Error(response.statusText)); + } + }); +}); + +// declaring a function that takes the name of the foreign call (getSqrt) and the inputs +const foreignCallHandler = async (name, input) => { + // notice that the "inputs" parameter contains *all* the inputs + // in this case we to make the RPC request with the first parameter "numbers", which would be input[0] + const oracleReturn = await client.request(name, [ + { Array: input[0].map((i) => i.toString("hex")) }, + ]); + return [oracleReturn.values[0].Array]; +}; + +// the rest of your NoirJS code +const input = { input: [4, 16] }; +const { witness } = await noir.execute(numbers, foreignCallHandler); +``` + +:::tip + +If you're in a NoirJS environment running your RPC server together with a frontend app, you'll probably hit a familiar problem in full-stack development: requests being blocked by [CORS](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) policy. For development only, you can simply install and use the [`cors` npm package](https://www.npmjs.com/package/cors) to get around the problem: + +```bash +yarn add cors +``` + +and use it as a middleware: + +```js +import cors from "cors"; + +const app = express(); +app.use(cors()) +``` + +::: + +## Conclusion + +Hopefully by the end of this guide, you should be able to: + +- Write your own logic around Oracles and how to write a JSON RPC server to make them work with your Nargo commands. +- Provide custom foreign call handlers for NoirJS. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/how-to-recursion.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/how-to-recursion.md new file mode 100644 index 000000000000..4c45bb87ae20 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/how-to-recursion.md @@ -0,0 +1,179 @@ +--- +title: How to use recursion on NoirJS +description: Learn how to implement recursion with NoirJS, a powerful tool for creating smart contracts on the EVM blockchain. This guide assumes familiarity with NoirJS, solidity verifiers, and the Barretenberg proving backend. Discover how to generate both final and intermediate proofs using `noir_js` and `backend_barretenberg`. +keywords: + [ + "NoirJS", + "EVM blockchain", + "smart contracts", + "recursion", + "solidity verifiers", + "Barretenberg backend", + "noir_js", + "backend_barretenberg", + "intermediate proofs", + "final proofs", + "nargo compile", + "json import", + "recursive circuit", + "recursive app" + ] +sidebar_position: 1 +--- + +This guide shows you how to use recursive proofs in your NoirJS app. For the sake of clarity, it is assumed that: + +- You already have a NoirJS app. If you don't, please visit the [NoirJS tutorial](../tutorials/noirjs_app.md) and the [reference](../reference/NoirJS/noir_js/index.md). +- You are familiar with what are recursive proofs and you have read the [recursion explainer](../explainers/explainer-recursion.md) +- You already built a recursive circuit following [the reference](../noir/standard_library/recursion.md), and understand how it works. + +It is also assumed that you're not using `noir_wasm` for compilation, and instead you've used [`nargo compile`](../reference/nargo_commands.md) to generate the `json` you're now importing into your project. However, the guide should work just the same if you're using `noir_wasm`. + +:::info + +As you've read in the [explainer](../explainers/explainer-recursion.md), a recursive proof is an intermediate proof. This means that it doesn't necessarily generate the final step that makes it verifiable in a smart contract. However, it is easy to verify within another circuit. + +While "standard" usage of NoirJS packages abstracts final proofs, it currently lacks the necessary interface to abstract away intermediate proofs. This means that these proofs need to be created by using the backend directly. + +In short: + +- `noir_js` generates *only* final proofs +- `backend_barretenberg` generates both types of proofs + +::: + +In a standard recursive app, you're also dealing with at least two circuits. For the purpose of this guide, we will assume the following: + +- `main`: a circuit of type `assert(x != y)`, where `main` is marked with a `#[recursive]` attribute. This attribute states that the backend should generate proofs that are friendly for verification within another circuit. +- `recursive`: a circuit that verifies `main` + +For a full example on how recursive proofs work, please refer to the [noir-examples](https://github.com/noir-lang/noir-examples) repository. We will *not* be using it as a reference for this guide. + +## Step 1: Setup + +In a common NoirJS app, you need to instantiate a backend with something like `const backend = new Backend(circuit)`. Then you feed it to the `noir_js` interface. + +For recursion, this doesn't happen, and the only need for `noir_js` is only to `execute` a circuit and get its witness and return value. Everything else is not interfaced, so it needs to happen on the `backend` object. + +It is also recommended that you instantiate the backend with as many threads as possible, to allow for maximum concurrency: + +```js +const backend = new Backend(circuit, { threads: 8 }) +``` + +:::tip +You can use the [`os.cpus()`](https://nodejs.org/api/os.html#oscpus) object in `nodejs` or [`navigator.hardwareConcurrency`](https://developer.mozilla.org/en-US/docs/Web/API/Navigator/hardwareConcurrency) on the browser to make the most out of those glorious cpu cores +::: + +## Step 2: Generating the witness and the proof for `main` + +After instantiating the backend, you should also instantiate `noir_js`. We will use it to execute the circuit and get the witness. + +```js +const noir = new Noir(circuit, backend) +const { witness } = noir.execute(input) +``` + +With this witness, you are now able to generate the intermediate proof for the main circuit: + +```js +const { proof, publicInputs } = await backend.generateProof(witness) +``` + +:::warning + +Always keep in mind what is actually happening on your development process, otherwise you'll quickly become confused about what circuit we are actually running and why! + +In this case, you can imagine that Alice (running the `main` circuit) is proving something to Bob (running the `recursive` circuit), and Bob is verifying her proof within his proof. + +With this in mind, it becomes clear that our intermediate proof is the one *meant to be verified within another circuit*, so it must be Alice's. Actually, the only final proof in this theoretical scenario would be the last one, sent on-chain. + +::: + +## Step 3 - Verification and proof artifacts + +Optionally, you are able to verify the intermediate proof: + +```js +const verified = await backend.verifyProof({ proof, publicInputs }) +``` + +This can be useful to make sure our intermediate proof was correctly generated. But the real goal is to do it within another circuit. For that, we need to generate recursive proof artifacts that will be passed to the circuit that is verifying the proof we just generated. Instead of passing the proof and verification key as a byte array, we pass them as fields which makes it cheaper to verify in a circuit: + +```js +const { proofAsFields, vkAsFields, vkHash } = await backend.generateRecursiveProofArtifacts( { publicInputs, proof }, publicInputsCount) +``` + +This call takes the public inputs and the proof, but also the public inputs count. While this is easily retrievable by simply counting the `publicInputs` length, the backend interface doesn't currently abstract it away. + +:::info + +The `proofAsFields` has a constant size `[Field; 93]` and verification keys in Barretenberg are always `[Field; 114]`. + +::: + +:::warning + +One common mistake is to forget *who* makes this call. + +In a situation where Alice is generating the `main` proof, if she generates the proof artifacts and sends them to Bob, which gladly takes them as true, this would mean Alice could prove anything! + +Instead, Bob needs to make sure *he* extracts the proof artifacts, using his own instance of the `main` circuit backend. This way, Alice has to provide a valid proof for the correct `main` circuit. + +::: + +## Step 4 - Recursive proof generation + +With the artifacts, generating a recursive proof is no different from a normal proof. You simply use the `backend` (with the recursive circuit) to generate it: + +```js +const recursiveInputs = { + verification_key: vkAsFields, // array of length 114 + proof: proofAsFields, // array of length 93 + size of public inputs + publicInputs: [mainInput.y], // using the example above, where `y` is the only public input + key_hash: vkHash, +} + +const { witness, returnValue } = noir.execute(recursiveInputs) // we're executing the recursive circuit now! +const { proof, publicInputs } = backend.generateProof(witness) +const verified = backend.verifyProof({ proof, publicInputs }) +``` + +You can obviously chain this proof into another proof. In fact, if you're using recursive proofs, you're probably interested of using them this way! + +:::tip + +Managing circuits and "who does what" can be confusing. To make sure your naming is consistent, you can keep them in an object. For example: + +```js +const circuits = { + main: mainJSON, + recursive: recursiveJSON +} +const backends = { + main: new BarretenbergBackend(circuits.main), + recursive: new BarretenbergBackend(circuits.recursive) +} +const noir_programs = { + main: new Noir(circuits.main, backends.main), + recursive: new Noir(circuits.recursive, backends.recursive) +} +``` + +This allows you to neatly call exactly the method you want without conflicting names: + +```js +// Alice runs this 👇 +const { witness: mainWitness } = await noir_programs.main.execute(input) +const proof = await backends.main.generateProof(mainWitness) + +// Bob runs this 👇 +const verified = await backends.main.verifyProof(proof) +const { proofAsFields, vkAsFields, vkHash } = await backends.main.generateRecursiveProofArtifacts( + proof, + numPublicInputs, +); +const recursiveProof = await noir_programs.recursive.generateProof(recursiveInputs) +``` + +::: diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/how-to-solidity-verifier.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/how-to-solidity-verifier.md new file mode 100644 index 000000000000..e3c7c1065dad --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/how-to-solidity-verifier.md @@ -0,0 +1,231 @@ +--- +title: Generate a Solidity Verifier +description: + Learn how to run the verifier as a smart contract on the blockchain. Compile a Solidity verifier + contract for your Noir program and deploy it on any EVM blockchain acting as a verifier smart + contract. Read more to find out +keywords: + [ + solidity verifier, + smart contract, + blockchain, + compiler, + plonk_vk.sol, + EVM blockchain, + verifying Noir programs, + proving backend, + Barretenberg, + ] +sidebar_position: 0 +pagination_next: tutorials/noirjs_app +--- + +Noir has the ability to generate a verifier contract in Solidity, which can be deployed in many EVM-compatible blockchains such as Ethereum. + +This allows for a powerful feature set, as one can make use of the conciseness and the privacy provided by Noir in an immutable ledger. Applications can range from simple P2P guessing games, to complex private DeFi interactions. + +This guide shows you how to generate a Solidity Verifier and deploy it on the [Remix IDE](https://remix.ethereum.org/). It is assumed that: + +- You are comfortable with the Solidity programming language and understand how contracts are deployed on the Ethereum network +- You have Noir installed and you have a Noir program. If you don't, [get started](../getting_started/installation/index.md) with Nargo and the example Hello Noir circuit +- You are comfortable navigating RemixIDE. If you aren't or you need a refresher, you can find some video tutorials [here](https://www.youtube.com/channel/UCjTUPyFEr2xDGN6Cg8nKDaA) that could help you. + +## Rundown + +Generating a Solidity Verifier contract is actually a one-command process. However, compiling it and deploying it can have some caveats. Here's the rundown of this guide: + +1. How to generate a solidity smart contract +2. How to compile the smart contract in the RemixIDE +3. How to deploy it to a testnet + +## Step 1 - Generate a contract + +This is by far the most straight-forward step. Just run: + +```sh +nargo codegen-verifier +``` + +A new `contract` folder would then be generated in your project directory, containing the Solidity +file `plonk_vk.sol`. It can be deployed to any EVM blockchain acting as a verifier smart contract. + +:::info + +It is possible to generate verifier contracts of Noir programs for other smart contract platforms as long as the proving backend supplies an implementation. + +Barretenberg, the default proving backend for Nargo, supports generation of verifier contracts, for the time being these are only in Solidity. +::: + +## Step 2 - Compiling + +We will mostly skip the details of RemixIDE, as the UI can change from version to version. For now, we can just open +Remix and create a blank workspace. + +![Create Workspace](@site/static/img/how-tos/solidity_verifier_1.png) + +We will create a new file to contain the contract Nargo generated, and copy-paste its content. + +:::warning + +You'll likely see a warning advising you to not trust pasted code. While it is an important warning, it is irrelevant in the context of this guide and can be ignored. We will not be deploying anywhere near a mainnet. + +::: + +To compile our the verifier, we can navigate to the compilation tab: + +![Compilation Tab](@site/static/img/how-tos/solidity_verifier_2.png) + +Remix should automatically match a suitable compiler version. However, hitting the "Compile" button will most likely generate a "Stack too deep" error: + +![Stack too deep](@site/static/img/how-tos/solidity_verifier_3.png) + +This is due to the verify function needing to put many variables on the stack, but enabling the optimizer resolves the issue. To do this, let's open the "Advanced Configurations" tab and enable optimization. The default 200 runs will suffice. + +:::info + +This time we will see a warning about an unused function parameter. This is expected, as the `verify` function doesn't use the `_proof` parameter inside a solidity block, it is loaded from calldata and used in assembly. + +::: + +![Compilation success](@site/static/img/how-tos/solidity_verifier_4.png) + +## Step 3 - Deploying + +At this point we should have a compiled contract read to deploy. If we navigate to the deploy section in Remix, we will see many different environments we can deploy to. The steps to deploy on each environment would be out-of-scope for this guide, so we will just use the default Remix VM. + +Looking closely, we will notice that our "Solidity Verifier" is actually three contracts working together: + +- An `UltraVerificationKey` library which simply stores the verification key for our circuit. +- An abstract contract `BaseUltraVerifier` containing most of the verifying logic. +- A main `UltraVerifier` contract that inherits from the Base and uses the Key contract. + +Remix will take care of the dependencies for us so we can simply deploy the UltraVerifier contract by selecting it and hitting "deploy": + +![Deploying UltraVerifier](@site/static/img/how-tos/solidity_verifier_5.png) + +A contract will show up in the "Deployed Contracts" section, where we can retrieve the Verification Key Hash. This is particularly useful for double-checking the deployer contract is the correct one. + +:::note + +Why "UltraVerifier"? + +To be precise, the Noir compiler (`nargo`) doesn't generate the verifier contract directly. It compiles the Noir code into an intermediate language (ACIR), which is then executed by the backend. So it is the backend that returns the verifier smart contract, not Noir. + +In this case, the Barretenberg Backend uses the UltraPlonk proving system, hence the "UltraVerifier" name. + +::: + +## Step 4 - Verifying + +To verify a proof using the Solidity verifier contract, we call the `verify` function in this extended contract: + +```solidity +function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool) +``` + +When using the default example in the [Hello Noir](../getting_started/hello_noir/index.md) guide, the easiest way to confirm that the verifier contract is doing its job is by calling the `verify` function via remix with the required parameters. For `_proof`, run `nargo prove` and use the string in `proof/.proof` (adding the hex `0x` prefix). We can also copy the public input from `Verifier.toml`, as it will be properly formatted as 32-byte strings: + +``` +0x...... , [0x0000.....02] +``` + +A programmatic example of how the `verify` function is called can be seen in the example zk voting application [here](https://github.com/noir-lang/noir-examples/blob/33e598c257e2402ea3a6b68dd4c5ad492bce1b0a/foundry-voting/src/zkVote.sol#L35): + +```solidity +function castVote(bytes calldata proof, uint proposalId, uint vote, bytes32 nullifierHash) public returns (bool) { + // ... + bytes32[] memory publicInputs = new bytes32[](4); + publicInputs[0] = merkleRoot; + publicInputs[1] = bytes32(proposalId); + publicInputs[2] = bytes32(vote); + publicInputs[3] = nullifierHash; + require(verifier.verify(proof, publicInputs), "Invalid proof"); +``` + +:::info[Return Values] + +A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in +Noir. + +Under the hood, the return value is passed as an input to the circuit and is checked at the end of +the circuit program. + +For example, if you have Noir program like this: + +```rust +fn main( + // Public inputs + pubkey_x: pub Field, + pubkey_y: pub Field, + // Private inputs + priv_key: Field, +) -> pub Field +``` + +the `verify` function will expect the public inputs array (second function parameter) to be of length 3, the two inputs and the return value. Like before, these values are populated in Verifier.toml after running `nargo prove`. + +Passing only two inputs will result in an error such as `PUBLIC_INPUT_COUNT_INVALID(3, 2)`. + +In this case, the inputs parameter to `verify` would be an array ordered as `[pubkey_x, pubkey_y, return]`. + +::: + +:::tip[Structs] + +You can pass structs to the verifier contract. They will be flattened so that the array of inputs is 1-dimensional array. + +For example, consider the following program: + +```rust +struct Type1 { + val1: Field, + val2: Field, +} + +struct Nested { + t1: Type1, + is_true: bool, +} + +fn main(x: pub Field, nested: pub Nested, y: pub Field) { + //... +} +``` + +The order of these inputs would be flattened to: `[x, nested.t1.val1, nested.t1.val2, nested.is_true, y]` + +::: + +The other function you can call is our entrypoint `verify` function, as defined above. + +:::tip + +It's worth noticing that the `verify` function is actually a `view` function. A `view` function does not alter the blockchain state, so it doesn't need to be distributed (i.e. it will run only on the executing node), and therefore doesn't cost any gas. + +This can be particularly useful in some situations. If Alice generated a proof and wants Bob to verify its correctness, Bob doesn't need to run Nargo, NoirJS, or any Noir specific infrastructure. He can simply make a call to the blockchain with the proof and verify it is correct without paying any gas. + +It would be incorrect to say that a Noir proof verification costs any gas at all. However, most of the time the result of `verify` is used to modify state (for example, to update a balance, a game state, etc). In that case the whole network needs to execute it, which does incur gas costs (calldata and execution, but not storage). + +::: + +## A Note on EVM chains + +ZK-SNARK verification depends on some precompiled cryptographic primitives such as Elliptic Curve Pairings (if you like complex math, you can read about EC Pairings [here](https://medium.com/@VitalikButerin/exploring-elliptic-curve-pairings-c73c1864e627)). Not all EVM chains support EC Pairings, notably some of the ZK-EVMs. This means that you won't be able to use the verifier contract in all of them. + +For example, chains like `zkSync ERA` and `Polygon zkEVM` do not currently support these precompiles, so proof verification via Solidity verifier contracts won't work. Here's a quick list of EVM chains that have been tested and are known to work: + +- Optimism +- Arbitrum +- Polygon PoS +- Scroll +- Celo + +If you test any other chains, please open a PR on this page to update the list. See [this doc](https://github.com/noir-lang/noir-starter/tree/main/with-foundry#testing-on-chain) for more info about testing verifier contracts on different EVM chains. + +## What's next + +Now that you know how to call a Noir Solidity Verifier on a smart contract using Remix, you should be comfortable with using it with some programmatic frameworks, such as [hardhat](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat) and [foundry](https://github.com/noir-lang/noir-starter/tree/main/with-foundry). + +You can find other tools, examples, boilerplates and libraries in the [awesome-noir](https://github.com/noir-lang/awesome-noir) repository. + +You should also be ready to write and deploy your first NoirJS app and start generating proofs on websites, phones, and NodeJS environments! Head on to the [NoirJS tutorial](../tutorials/noirjs_app.md) to learn how to do that. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/merkle-proof.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/merkle-proof.mdx new file mode 100644 index 000000000000..16c425bed766 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/merkle-proof.mdx @@ -0,0 +1,49 @@ +--- +title: Prove Merkle Tree Membership +description: + Learn how to use merkle membership proof in Noir to prove that a given leaf is a member of a + merkle tree with a specified root, at a given index. +keywords: + [merkle proof, merkle membership proof, Noir, rust, hash function, Pedersen, sha256, merkle tree] +sidebar_position: 4 +--- + +Let's walk through an example of a merkle membership proof in Noir that proves that a given leaf is +in a merkle tree. + +```rust +use dep::std; + +fn main(message : [Field; 62], index : Field, hashpath : [Field; 40], root : Field) { + let leaf = std::hash::hash_to_field(message.as_slice()); + let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); + assert(merkle_root == root); +} + +``` + +The message is hashed using `hash_to_field`. The specific hash function that is being used is chosen +by the backend. The only requirement is that this hash function can heuristically be used as a +random oracle. If only collision resistance is needed, then one can call `std::hash::pedersen_hash` +instead. + +```rust +let leaf = std::hash::hash_to_field(message.as_slice()); +``` + +The leaf is then passed to a compute_merkle_root function with the root, index and hashpath. The returned root can then be asserted to be the same as the provided root. + +```rust +let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); +assert (merkle_root == root); +``` + +> **Note:** It is possible to re-implement the merkle tree implementation without standard library. +> However, for most usecases, it is enough. In general, the standard library will always opt to be +> as conservative as possible, while striking a balance with efficiency. + +An example, the merkle membership proof, only requires a hash function that has collision +resistance, hence a hash function like Pedersen is allowed, which in most cases is more efficient +than the even more conservative sha256. + +[View an example on the starter repo](https://github.com/noir-lang/noir-examples/blob/3ea09545cabfa464124ec2f3ea8e60c608abe6df/stealthdrop/circuits/src/main.nr#L20) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/using-devcontainers.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/using-devcontainers.mdx new file mode 100644 index 000000000000..727ec6ca6672 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/how_to/using-devcontainers.mdx @@ -0,0 +1,110 @@ +--- +title: Developer Containers and Codespaces +description: "Learn how to set up a devcontainer in your GitHub repository for a seamless coding experience with Codespaces. Follow our easy 8-step guide to create your own Noir environment without installing Nargo locally." +keywords: ["Devcontainer", "Codespaces", "GitHub", "Noir Environment", "Docker Image", "Development Environment", "Remote Coding", "GitHub Codespaces", "Noir Programming", "Nargo", "VSCode Extensions", "Noirup"] +sidebar_position: 1 +--- + +Adding a developer container configuration file to your Noir project is one of the easiest way to unlock coding in browser. + +## What's a devcontainer after all? + +A [Developer Container](https://containers.dev/) (devcontainer for short) is a Docker image that comes preloaded with tools, extensions, and other tools you need to quickly get started or continue a project, without having to install Nargo locally. Think of it as a development environment in a box. + +There are many advantages to this: + +- It's platform and architecture agnostic +- You don't need to have an IDE installed, or Nargo, or use a terminal at all +- It's safer for using on a public machine or public network + +One of the best ways of using devcontainers is... not using your machine at all, for maximum control, performance, and ease of use. +Enter Codespaces. + +## Codespaces + +If a devcontainer is just a Docker image, then what stops you from provisioning a `p3dn.24xlarge` AWS EC2 instance with 92 vCPUs and 768 GiB RAM and using it to prove your 10-gate SNARK proof? + +Nothing! Except perhaps the 30-40$ per hour it will cost you. + +The problem is that provisioning takes time, and I bet you don't want to see the AWS console every time you want to code something real quick. + +Fortunately, there's an easy and free way to get a decent remote machine ready and loaded in less than 2 minutes: Codespaces. [Codespaces is a Github feature](https://github.com/features/codespaces) that allows you to code in a remote machine by using devcontainers, and it's pretty cool: + +- You can start coding Noir in less than a minute +- It uses the resources of a remote machine, so you can code on your grandma's phone if needed be +- It makes it easy to share work with your frens +- It's fully reusable, you can stop and restart whenever you need to + +:::info + +Don't take out your wallet just yet. Free GitHub accounts get about [15-60 hours of coding](https://github.com/features/codespaces) for free per month, depending on the size of your provisioned machine. + +::: + +## Tell me it's _actually_ easy + +It is! + +Github comes with a default codespace and you can use it to code your own devcontainer. That's exactly what we will be doing in this guide. + + + +8 simple steps: + +#### 1. Create a new repository on GitHub. + +#### 2. Click "Start coding with Codespaces". This will use the default image. + +#### 3. Create a folder called `.devcontainer` in the root of your repository. + +#### 4. Create a Dockerfile in that folder, and paste the following code: + +```docker +FROM --platform=linux/amd64 node:lts-bookworm-slim +SHELL ["/bin/bash", "-c"] +RUN apt update && apt install -y curl bash git tar gzip libc++-dev +RUN curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +ENV PATH="/root/.nargo/bin:$PATH" +RUN noirup +ENTRYPOINT ["nargo"] +``` +#### 5. Create a file called `devcontainer.json` in the same folder, and paste the following code: + +```json +{ + "name": "Noir on Codespaces", + "build": { + "context": ".", + "dockerfile": "Dockerfile" + }, + "customizations": { + "vscode": { + "extensions": ["noir-lang.vscode-noir"] + } + } +} +``` +#### 6. Commit and push your changes + +This will pull the new image and build it, so it could take a minute or so + +#### 8. Done! +Just wait for the build to finish, and there's your easy Noir environment. + + +Refer to [noir-starter](https://github.com/noir-lang/noir-starter/) as an example of how devcontainers can be used together with codespaces. + + + +## How do I use it? + +Using the codespace is obviously much easier than setting it up. +Just navigate to your repository and click "Code" -> "Open with Codespaces". It should take a few seconds to load, and you're ready to go. + +:::info + +If you really like the experience, you can add a badge to your readme, links to existing codespaces, and more. +Check out the [official docs](https://docs.github.com/en/codespaces/setting-up-your-project-for-codespaces/setting-up-your-repository/facilitating-quick-creation-and-resumption-of-codespaces) for more info. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/index.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/index.mdx new file mode 100644 index 000000000000..75086ddcdded --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/index.mdx @@ -0,0 +1,67 @@ +--- +title: Noir Lang +hide_title: true +description: + Learn about the public alpha release of Noir, a domain specific language heavily influenced by Rust that compiles to + an intermediate language which can be compiled to an arithmetic circuit or a rank-1 constraint system. +keywords: + [Noir, + Domain Specific Language, + Rust, + Intermediate Language, + Arithmetic Circuit, + Rank-1 Constraint System, + Ethereum Developers, + Protocol Developers, + Blockchain Developers, + Proving System, + Smart Contract Language] +sidebar_position: 0 +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +Noir Logo + +Noir is a Domain-Specific Language for SNARK proving systems developed by [Aztec Labs](https://aztec.network/). It allows you to generate complex Zero-Knowledge Programs (ZKP) by using simple and flexible syntax, requiring no previous knowledge on the underlying mathematics or cryptography. + +ZK programs are programs that can generate short proofs of a certain statement without revealing some details about it. You can read more about ZKPs [here](https://dev.to/spalladino/a-beginners-intro-to-coding-zero-knowledge-proofs-c56). + +## What's new about Noir? + +Noir works differently from most ZK languages by taking a two-pronged path. First, it compiles the program to an adaptable intermediate language known as ACIR. From there, depending on a given project's needs, ACIR can be further compiled into an arithmetic circuit for integration with the proving backend. + +:::info + +Noir is backend agnostic, which means it makes no assumptions on which proving backend powers the ZK proof. Being the language that powers [Aztec Contracts](https://docs.aztec.network/developers/contracts/main), it defaults to Aztec's Barretenberg proving backend. + +However, the ACIR output can be transformed to be compatible with other PLONK-based backends, or into a [rank-1 constraint system](https://www.rareskills.io/post/rank-1-constraint-system) suitable for backends such as Arkwork's Marlin. + +::: + +## Who is Noir for? + +Noir can be used both in complex cloud-based backends and in user's smartphones, requiring no knowledge on the underlying math or cryptography. From authorization systems that keep a password in the user's device, to complex on-chain verification of recursive proofs, Noir is designed to abstract away complexity without any significant overhead. Here are some examples of situations where Noir can be used: + + + + Noir Logo + + Aztec Contracts leverage Noir to allow for the storage and execution of private information. Writing an Aztec Contract is as easy as writing Noir, and Aztec developers can easily interact with the network storage and execution through the [Aztec.nr](https://docs.aztec.network/developers/contracts/main) library. + + + Soliditry Verifier Example + Noir can auto-generate Solidity verifier contracts that verify Noir proofs. This allows for non-interactive verification of proofs containing private information in an immutable system. This feature powers a multitude of use-case scenarios, from P2P chess tournaments, to [Aztec Layer-2 Blockchain](https://docs.aztec.network/) + + + Aztec Labs developed NoirJS, an easy interface to generate and verify Noir proofs in a Javascript environment. This allows for Noir to be used in webpages, mobile apps, games, and any other environment supporting JS execution in a standalone manner. + + + + +## Libraries + +Noir is meant to be easy to extend by simply importing Noir libraries just like in Rust. +The [awesome-noir repo](https://github.com/noir-lang/awesome-noir#libraries) is a collection of libraries developed by the Noir community. +Writing a new library is easy and makes code be composable and easy to reuse. See the section on [dependencies](noir/modules_packages_crates/dependencies.md) for more information. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/migration_notes.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/migration_notes.md new file mode 100644 index 000000000000..6bd740024e5b --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/migration_notes.md @@ -0,0 +1,105 @@ +--- +title: Migration notes +description: Read about migration notes from previous versions, which could solve problems while updating +keywords: [Noir, notes, migration, updating, upgrading] +--- + +Noir is in full-speed development. Things break fast, wild, and often. This page attempts to leave some notes on errors you might encounter when upgrading and how to resolve them until proper patches are built. + +### `backend encountered an error: libc++.so.1` + +Depending on your OS, you may encounter the following error when running `nargo prove` for the first time: + +```text +The backend encountered an error: "/home/codespace/.nargo/backends/acvm-backend-barretenberg/backend_binary: error while loading shared libraries: libc++.so.1: cannot open shared object file: No such file or directory\n" +``` + +Install the `libc++-dev` library with: + +```bash +sudo apt install libc++-dev +``` + +## ≥0.19 + +### Enforcing `compiler_version` + +From this version on, the compiler will check for the `compiler_version` field in `Nargo.toml`, and will error if it doesn't match the current Nargo version in use. + +To update, please make sure this field in `Nargo.toml` matches the output of `nargo --version`. + +## ≥0.14 + +The index of the [for loops](noir/concepts/control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: + +```rust +for i in 0..10 { + let i = i as Field; +} +``` + +## ≥v0.11.0 and Nargo backend + +From this version onwards, Nargo starts managing backends through the `nargo backend` command. Upgrading to the versions per usual steps might lead to: + +### `backend encountered an error` + +This is likely due to the existing locally installed version of proving backend (e.g. barretenberg) is incompatible with the version of Nargo in use. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo prove +``` + +with your Noir program. + +This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. + +### `backend encountered an error: illegal instruction` + +On certain Intel-based systems, an `illegal instruction` error may arise due to incompatibility of barretenberg with certain CPU instructions. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/barretenberg-js-binary/raw/master/run-bb.tar.gz +``` + +This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. + +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. + +Then run: + +``` +DESIRED_BINARY_VERSION=0.8.1 nargo info +``` + +This overrides the bb native binary with a bb.js node application instead, which should be compatible with most if not all hardware. This does come with the drawback of being generally slower than native binary. + +0.8.1 indicates bb.js version 0.8.1, so if you change that it will update to a different version or the default version in the script if none was supplied. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/_category_.json new file mode 100644 index 000000000000..7da08f8a8c5d --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Concepts", + "position": 0, + "collapsible": true, + "collapsed": true +} \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/assert.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/assert.md new file mode 100644 index 000000000000..bcff613a6952 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/assert.md @@ -0,0 +1,45 @@ +--- +title: Assert Function +description: + Learn about the assert function in Noir, which can be used to explicitly constrain the predicate or + comparison expression that follows to be true, and what happens if the expression is false at + runtime. +keywords: [Noir programming language, assert statement, predicate expression, comparison expression] +sidebar_position: 4 +--- + +Noir includes a special `assert` function which will explicitly constrain the predicate/comparison +expression that follows to be true. If this expression is false at runtime, the program will fail to +be proven. Example: + +```rust +fn main(x : Field, y : Field) { + assert(x == y); +} +``` + +> Assertions only work for predicate operations, such as `==`. If there's any ambiguity on the operation, the program will fail to compile. For example, it is unclear if `assert(x + y)` would check for `x + y == 0` or simply would return `true`. + +You can optionally provide a message to be logged when the assertion fails: + +```rust +assert(x == y, "x and y are not equal"); +``` + +Aside string literals, the optional message can be a format string or any other type supported as input for Noir's [print](../standard_library/logging.md) functions. This feature lets you incorporate runtime variables into your failed assertion logs: + +```rust +assert(x == y, f"Expected x == y, but got {x} == {y}"); +``` + +Using a variable as an assertion message directly: + +```rust +struct myStruct { + myField: Field +} + +let s = myStruct { myField: y }; +assert(s.myField == x, s); +``` + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/comments.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/comments.md new file mode 100644 index 000000000000..b51a85f5c949 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/comments.md @@ -0,0 +1,33 @@ +--- +title: Comments +description: + Learn how to write comments in Noir programming language. A comment is a line of code that is + ignored by the compiler, but it can be read by programmers. Single-line and multi-line comments + are supported in Noir. +keywords: [Noir programming language, comments, single-line comments, multi-line comments] +sidebar_position: 10 +--- + +A comment is a line in your codebase which the compiler ignores, however it can be read by +programmers. + +Here is a single line comment: + +```rust +// This is a comment and is ignored +``` + +`//` is used to tell the compiler to ignore the rest of the line. + +Noir also supports multi-line block comments. Start a block comment with `/*` and end the block with `*/`. + +Noir does not natively support doc comments. You may be able to use [Rust doc comments](https://doc.rust-lang.org/reference/comments.html) in your code to leverage some Rust documentation build tools with Noir code. + +```rust +/* + This is a block comment describing a complex function. +*/ +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/control_flow.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/control_flow.md new file mode 100644 index 000000000000..045d3c3a5f58 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/control_flow.md @@ -0,0 +1,77 @@ +--- +title: Control Flow +description: + Learn how to use loops and if expressions in the Noir programming language. Discover the syntax + and examples for for loops and if-else statements. +keywords: [Noir programming language, loops, for loop, if-else statements, Rust syntax] +sidebar_position: 2 +--- + +## If Expressions + +Noir supports `if-else` statements. The syntax is most similar to Rust's where it is not required +for the statement's conditional to be surrounded by parentheses. + +```rust +let a = 0; +let mut x: u32 = 0; + +if a == 0 { + if a != 0 { + x = 6; + } else { + x = 2; + } +} else { + x = 5; + assert(x == 5); +} +assert(x == 2); +``` + +## Loops + +Noir has one kind of loop: the `for` loop. `for` loops allow you to repeat a block of code multiple +times. + +The following block of code between the braces is run 10 times. + +```rust +for i in 0..10 { + // do something +} +``` + +The index for loops is of type `u64`. + +### Break and Continue + +In unconstrained code, `break` and `continue` are also allowed in `for` loops. These are only allowed +in unconstrained code since normal constrained code requires that Noir knows exactly how many iterations +a loop may have. `break` and `continue` can be used like so: + +```rust +for i in 0 .. 10 { + println("Iteration start") + + if i == 2 { + continue; + } + + if i == 5 { + break; + } + + println(i); +} +println("Loop end") +``` + +When used, `break` will end the current loop early and jump to the statement after the for loop. In the example +above, the `break` will stop the loop and jump to the `println("Loop end")`. + +`continue` will stop the current iteration of the loop, and jump to the start of the next iteration. In the example +above, `continue` will jump to `println("Iteration start")` when used. Note that the loop continues as normal after this. +The iteration variable `i` is still increased by one as normal when `continue` is used. + +`break` and `continue` cannot currently be used to jump out of more than a single loop at a time. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_bus.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_bus.md new file mode 100644 index 000000000000..e54fc861257b --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_bus.md @@ -0,0 +1,21 @@ +--- +title: Data Bus +sidebar_position: 13 +--- +**Disclaimer** this feature is experimental, do not use it! + +The data bus is an optimization that the backend can use to make recursion more efficient. +In order to use it, you must define some inputs of the program entry points (usually the `main()` +function) with the `call_data` modifier, and the return values with the `return_data` modifier. +These modifiers are incompatible with `pub` and `mut` modifiers. + +## Example + +```rust +fn main(mut x: u32, y: call_data u32, z: call_data [u32;4] ) -> return_data u32 { + let a = z[x]; + a+y +} +``` + +As a result, both call_data and return_data will be treated as private inputs and encapsulated into a read-only array each, for the backend to process. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/_category_.json new file mode 100644 index 000000000000..5d694210bbf3 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/arrays.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/arrays.md new file mode 100644 index 000000000000..efce3e95d322 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/arrays.md @@ -0,0 +1,251 @@ +--- +title: Arrays +description: + Dive into the Array data type in Noir. Grasp its methods, practical examples, and best practices for efficiently using Arrays in your Noir code. +keywords: + [ + noir, + array type, + methods, + examples, + indexing, + ] +sidebar_position: 4 +--- + +An array is one way of grouping together values into one compound type. Array types can be inferred +or explicitly specified via the syntax `[; ]`: + +```rust +fn main(x : Field, y : Field) { + let my_arr = [x, y]; + let your_arr: [Field; 2] = [x, y]; +} +``` + +Here, both `my_arr` and `your_arr` are instantiated as an array containing two `Field` elements. + +Array elements can be accessed using indexing: + +```rust +fn main() { + let a = [1, 2, 3, 4, 5]; + + let first = a[0]; + let second = a[1]; +} +``` + +All elements in an array must be of the same type (i.e. homogeneous). That is, an array cannot group +a `Field` value and a `u8` value together for example. + +You can write mutable arrays, like: + +```rust +fn main() { + let mut arr = [1, 2, 3, 4, 5]; + assert(arr[0] == 1); + + arr[0] = 42; + assert(arr[0] == 42); +} +``` + +You can instantiate a new array of a fixed size with the same value repeated for each element. The following example instantiates an array of length 32 where each element is of type Field and has the value 0. + +```rust +let array: [Field; 32] = [0; 32]; +``` + +Like in Rust, arrays in Noir are a fixed size. However, if you wish to convert an array to a [slice](./slices), you can just call `as_slice` on your array: + +```rust +let array: [Field; 32] = [0; 32]; +let sl = array.as_slice() +``` + +You can define multidimensional arrays: + +```rust +let array : [[Field; 2]; 2]; +let element = array[0][0]; +``` +However, multidimensional slices are not supported. For example, the following code will error at compile time: +```rust +let slice : [[Field]] = &[]; +``` + +## Types + +You can create arrays of primitive types or structs. There is not yet support for nested arrays +(arrays of arrays) or arrays of structs that contain arrays. + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for arrays. +Each of these functions are located within the generic impl `impl [T; N] {`. +So anywhere `self` appears, it refers to the variable `self: [T; N]`. + +### len + +Returns the length of an array + +```rust +fn len(self) -> Field +``` + +example + +```rust +fn main() { + let array = [42, 42]; + assert(array.len() == 2); +} +``` + +### sort + +Returns a new sorted array. The original array remains untouched. Notice that this function will +only work for arrays of fields or integers, not for any arbitrary type. This is because the sorting +logic it uses internally is optimized specifically for these values. If you need a sort function to +sort any type, you should use the function `sort_via` described below. + +```rust +fn sort(self) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32]; + let sorted = arr.sort(); + assert(sorted == [32, 42]); +} +``` + +### sort_via + +Sorts the array with a custom comparison function + +```rust +fn sort_via(self, ordering: fn(T, T) -> bool) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32] + let sorted_ascending = arr.sort_via(|a, b| a < b); + assert(sorted_ascending == [32, 42]); // verifies + + let sorted_descending = arr.sort_via(|a, b| a > b); + assert(sorted_descending == [32, 42]); // does not verify +} +``` + +### map + +Applies a function to each element of the array, returning a new array containing the mapped elements. + +```rust +fn map(self, f: fn(T) -> U) -> [U; N] +``` + +example + +```rust +let a = [1, 2, 3]; +let b = a.map(|a| a * 2); // b is now [2, 4, 6] +``` + +### fold + +Applies a function to each element of the array, returning the final accumulated value. The first +parameter is the initial value. + +```rust +fn fold(self, mut accumulator: U, f: fn(U, T) -> U) -> U +``` + +This is a left fold, so the given function will be applied to the accumulator and first element of +the array, then the second, and so on. For a given call the expected result would be equivalent to: + +```rust +let a1 = [1]; +let a2 = [1, 2]; +let a3 = [1, 2, 3]; + +let f = |a, b| a - b; +a1.fold(10, f) //=> f(10, 1) +a2.fold(10, f) //=> f(f(10, 1), 2) +a3.fold(10, f) //=> f(f(f(10, 1), 2), 3) +``` + +example: + +```rust + +fn main() { + let arr = [2, 2, 2, 2, 2]; + let folded = arr.fold(0, |a, b| a + b); + assert(folded == 10); +} + +``` + +### reduce + +Same as fold, but uses the first element as starting element. + +```rust +fn reduce(self, f: fn(T, T) -> T) -> T +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let reduced = arr.reduce(|a, b| a + b); + assert(reduced == 10); +} +``` + +### all + +Returns true if all the elements satisfy the given predicate + +```rust +fn all(self, predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let all = arr.all(|a| a == 2); + assert(all); +} +``` + +### any + +Returns true if any of the elements satisfy the given predicate + +```rust +fn any(self, predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 5]; + let any = arr.any(|a| a == 5); + assert(any); +} + +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/booleans.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/booleans.md new file mode 100644 index 000000000000..69826fcd724f --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/booleans.md @@ -0,0 +1,31 @@ +--- +title: Booleans +description: + Delve into the Boolean data type in Noir. Understand its methods, practical examples, and best practices for using Booleans in your Noir programs. +keywords: + [ + noir, + boolean type, + methods, + examples, + logical operations, + ] +sidebar_position: 2 +--- + + +The `bool` type in Noir has two possible values: `true` and `false`: + +```rust +fn main() { + let t = true; + let f: bool = false; +} +``` + +> **Note:** When returning a boolean value, it will show up as a value of 1 for `true` and 0 for +> `false` in _Verifier.toml_. + +The boolean type is most commonly used in conditionals like `if` expressions and `assert` +statements. More about conditionals is covered in the [Control Flow](../control_flow) and +[Assert Function](../assert) sections. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/fields.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/fields.md new file mode 100644 index 000000000000..a10a48107883 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/fields.md @@ -0,0 +1,192 @@ +--- +title: Fields +description: + Dive deep into the Field data type in Noir. Understand its methods, practical examples, and best practices to effectively use Fields in your Noir programs. +keywords: + [ + noir, + field type, + methods, + examples, + best practices, + ] +sidebar_position: 0 +--- + +The field type corresponds to the native field type of the proving backend. + +The size of a Noir field depends on the elliptic curve's finite field for the proving backend +adopted. For example, a field would be a 254-bit integer when paired with the default backend that +spans the Grumpkin curve. + +Fields support integer arithmetic and are often used as the default numeric type in Noir: + +```rust +fn main(x : Field, y : Field) { + let z = x + y; +} +``` + +`x`, `y` and `z` are all private fields in this example. Using the `let` keyword we defined a new +private value `z` constrained to be equal to `x + y`. + +If proving efficiency is of priority, fields should be used as a default for solving problems. +Smaller integer types (e.g. `u64`) incur extra range constraints. + +## Methods + +After declaring a Field, you can use these common methods on it: + +### to_le_bits + +Transforms the field into an array of bits, Little Endian. + +```rust +fn to_le_bits(_x : Field, _bit_size: u32) -> [u1] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_le_bits(32); +} +``` + +### to_be_bits + +Transforms the field into an array of bits, Big Endian. + +```rust +fn to_be_bits(_x : Field, _bit_size: u32) -> [u1] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_be_bits(32); +} +``` + +### to_le_bytes + +Transforms into an array of bytes, Little Endian + +```rust +fn to_le_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_le_bytes(4); +} +``` + +### to_be_bytes + +Transforms into an array of bytes, Big Endian + +```rust +fn to_be_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_be_bytes(4); +} +``` + +### to_le_radix + +Decomposes into a vector over the specified base, Little Endian + +```rust +fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_le_radix(256, 4); +} +``` + +### to_be_radix + +Decomposes into a vector over the specified base, Big Endian + +```rust +fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_be_radix(256, 4); +} +``` + +### pow_32 + +Returns the value to the power of the specified exponent + +```rust +fn pow_32(self, exponent: Field) -> Field +``` + +example: + +```rust +fn main() { + let field = 2 + let pow = field.pow_32(4); + assert(pow == 16); +} +``` + +### assert_max_bit_size + +Adds a constraint to specify that the field can be represented with `bit_size` number of bits + +```rust +fn assert_max_bit_size(self, bit_size: u32) +``` + +example: + +```rust +fn main() { + let field = 2 + field.assert_max_bit_size(32); +} +``` + +### sgn0 + +Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ \{0, ..., p-1\} is even, otherwise sgn0(x mod p) = 1. + +```rust +fn sgn0(self) -> u1 +``` + + +### lt + +Returns true if the field is less than the other field + +```rust +pub fn lt(self, another: Field) -> bool +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/function_types.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/function_types.md new file mode 100644 index 000000000000..f6121af17e24 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/function_types.md @@ -0,0 +1,26 @@ +--- +title: Function types +sidebar_position: 10 +--- + +Noir supports higher-order functions. The syntax for a function type is as follows: + +```rust +fn(arg1_type, arg2_type, ...) -> return_type +``` + +Example: + +```rust +fn assert_returns_100(f: fn() -> Field) { // f takes no args and returns a Field + assert(f() == 100); +} + +fn main() { + assert_returns_100(|| 100); // ok + assert_returns_100(|| 150); // fails +} +``` + +A function type also has an optional capture environment - this is necessary to support closures. +See [Lambdas](../lambdas.md) for more details. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/index.md new file mode 100644 index 000000000000..357813c147ab --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/index.md @@ -0,0 +1,110 @@ +--- +title: Data Types +description: + Get a clear understanding of the two categories of Noir data types - primitive types and compound + types. Learn about their characteristics, differences, and how to use them in your Noir + programming. +keywords: + [ + noir, + data types, + primitive types, + compound types, + private types, + public types, + ] +--- + +Every value in Noir has a type, which determines which operations are valid for it. + +All values in Noir are fundamentally composed of `Field` elements. For a more approachable +developing experience, abstractions are added on top to introduce different data types in Noir. + +Noir has two category of data types: primitive types (e.g. `Field`, integers, `bool`) and compound +types that group primitive types (e.g. arrays, tuples, structs). Each value can either be private or +public. + +## Private & Public Types + +A **private value** is known only to the Prover, while a **public value** is known by both the +Prover and Verifier. Mark values as `private` when the value should only be known to the prover. All +primitive types (including individual fields of compound types) in Noir are private by default, and +can be marked public when certain values are intended to be revealed to the Verifier. + +> **Note:** For public values defined in Noir programs paired with smart contract verifiers, once +> the proofs are verified on-chain the values can be considered known to everyone that has access to +> that blockchain. + +Public data types are treated no differently to private types apart from the fact that their values +will be revealed in proofs generated. Simply changing the value of a public type will not change the +circuit (where the same goes for changing values of private types as well). + +_Private values_ are also referred to as _witnesses_ sometimes. + +> **Note:** The terms private and public when applied to a type (e.g. `pub Field`) have a different +> meaning than when applied to a function (e.g. `pub fn foo() {}`). +> +> The former is a visibility modifier for the Prover to interpret if a value should be made known to +> the Verifier, while the latter is a visibility modifier for the compiler to interpret if a +> function should be made accessible to external Noir programs like in other languages. + +### pub Modifier + +All data types in Noir are private by default. Types are explicitly declared as public using the +`pub` modifier: + +```rust +fn main(x : Field, y : pub Field) -> pub Field { + x + y +} +``` + +In this example, `x` is **private** while `y` and `x + y` (the return value) are **public**. Note +that visibility is handled **per variable**, so it is perfectly valid to have one input that is +private and another that is public. + +> **Note:** Public types can only be declared through parameters on `main`. + +## Type Aliases + +A type alias is a new name for an existing type. Type aliases are declared with the keyword `type`: + +```rust +type Id = u8; + +fn main() { + let id: Id = 1; + let zero: u8 = 0; + assert(zero + 1 == id); +} +``` + +Type aliases can also be used with [generics](../generics.md): + +```rust +type Id = Size; + +fn main() { + let id: Id = 1; + let zero: u32 = 0; + assert(zero + 1 == id); +} +``` + +Type aliases can even refer to other aliases. An error will be issued if they form a cycle: + +```rust +// Ok! +type A = B; +type B = Field; + +type Bad1 = Bad2; + +// error: Dependency cycle found +type Bad2 = Bad1; +// ^^^^^^^^^^^ 'Bad2' recursively depends on itself: Bad2 -> Bad1 -> Bad2 +``` + +### BigInt + +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/integers.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/integers.md new file mode 100644 index 000000000000..1c6b375db49e --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/integers.md @@ -0,0 +1,155 @@ +--- +title: Integers +description: Explore the Integer data type in Noir. Learn about its methods, see real-world examples, and grasp how to efficiently use Integers in your Noir code. +keywords: [noir, integer types, methods, examples, arithmetic] +sidebar_position: 1 +--- + +An integer type is a range constrained field type. The Noir frontend supports both unsigned and signed integer types. The allowed sizes are 1, 8, 32 and 64 bits. + +:::info + +When an integer is defined in Noir without a specific type, it will default to `Field`. + +The one exception is for loop indices which default to `u64` since comparisons on `Field`s are not possible. + +::: + +## Unsigned Integers + +An unsigned integer type is specified first with the letter `u` (indicating its unsigned nature) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: u8 = 1; + let y: u8 = 1; + let z = x + y; + assert (z == 2); +} +``` + +The bit size determines the maximum value the integer type can store. For example, a `u8` variable can store a value in the range of 0 to 255 (i.e. $\\2^{8}-1\\$). + +## Signed Integers + +A signed integer type is specified first with the letter `i` (which stands for integer) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: i8 = -1; + let y: i8 = -1; + let z = x + y; + assert (z == -2); +} +``` + +The bit size determines the maximum and minimum range of value the integer type can store. For example, an `i8` variable can store a value in the range of -128 to 127 (i.e. $\\-2^{7}\\$ to $\\2^{7}-1\\$). + +## 128 bits Unsigned Integers + +The built-in structure `U128` allows you to use 128-bit unsigned integers almost like a native integer type. However, there are some differences to keep in mind: +- You cannot cast between a native integer and `U128` +- There is a higher performance cost when using `U128`, compared to a native type. + +Conversion between unsigned integer types and U128 are done through the use of `from_integer` and `to_integer` functions. `from_integer` also accepts the `Field` type as input. + +```rust +fn main() { + let x = U128::from_integer(23); + let y = U128::from_hex("0x7"); + let z = x + y; + assert(z.to_integer() == 30); +} +``` + +`U128` is implemented with two 64 bits limbs, representing the low and high bits, which explains the performance cost. You should expect `U128` to be twice more costly for addition and four times more costly for multiplication. +You can construct a U128 from its limbs: +```rust +fn main(x: u64, y: u64) { + let x = U128::from_u64s_be(x,y); + assert(z.hi == x as Field); + assert(z.lo == y as Field); +} +``` + +Note that the limbs are stored as Field elements in order to avoid unnecessary conversions. +Apart from this, most operations will work as usual: + +```rust +fn main(x: U128, y: U128) { + // multiplication + let c = x * y; + // addition and subtraction + let c = c - x + y; + // division + let c = x / y; + // bit operation; + let c = x & y | y; + // bit shift + let c = x << y; + // comparisons; + let c = x < y; + let c = x == y; +} +``` + +## Overflows + +Computations that exceed the type boundaries will result in overflow errors. This happens with both signed and unsigned integers. For example, attempting to prove: + +```rust +fn main(x: u8, y: u8) { + let z = x + y; +} +``` + +With: + +```toml +x = "255" +y = "1" +``` + +Would result in: + +``` +$ nargo prove +error: Assertion failed: 'attempt to add with overflow' +┌─ ~/src/main.nr:9:13 +│ +│ let z = x + y; +│ ----- +│ += Call stack: + ... +``` + +A similar error would happen with signed integers: + +```rust +fn main() { + let x: i8 = -118; + let y: i8 = -11; + let z = x + y; +} +``` + +### Wrapping methods + +Although integer overflow is expected to error, some use-cases rely on wrapping. For these use-cases, the standard library provides `wrapping` variants of certain common operations: + +```rust +fn wrapping_add(x: T, y: T) -> T; +fn wrapping_sub(x: T, y: T) -> T; +fn wrapping_mul(x: T, y: T) -> T; +``` + +Example of how it is used: + +```rust +use dep::std; + +fn main(x: u8, y: u8) -> pub u8 { + std::wrapping_add(x, y) +} +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/references.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/references.md new file mode 100644 index 000000000000..a5293d11cfb9 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/references.md @@ -0,0 +1,23 @@ +--- +title: References +sidebar_position: 9 +--- + +Noir supports first-class references. References are a bit like pointers: they point to a specific address that can be followed to access the data stored at that address. You can use Rust-like syntax to use pointers in Noir: the `&` operator references the variable, the `*` operator dereferences it. + +Example: + +```rust +fn main() { + let mut x = 2; + + // you can reference x as &mut and pass it to multiplyBy2 + multiplyBy2(&mut x); +} + +// you can access &mut here +fn multiplyBy2(x: &mut Field) { + // and dereference it with * + *x = *x * 2; +} +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/slices.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/slices.mdx new file mode 100644 index 000000000000..4eccc677b80b --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/slices.mdx @@ -0,0 +1,195 @@ +--- +title: Slices +description: Explore the Slice data type in Noir. Understand its methods, see real-world examples, and learn how to effectively use Slices in your Noir programs. +keywords: [noir, slice type, methods, examples, subarrays] +sidebar_position: 5 +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A slice is a dynamically-sized view into a sequence of elements. They can be resized at runtime, but because they don't own the data, they cannot be returned from a circuit. You can treat slices as arrays without a constrained size. + +```rust +use dep::std::slice; + +fn main() -> pub Field { + let mut slice: [Field] = &[0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +To write a slice literal, use a preceeding ampersand as in: `&[0; 2]` or +`&[1, 2, 3]`. + +It is important to note that slices are not references to arrays. In Noir, +`&[..]` is more similar to an immutable, growable vector. + +View the corresponding test file [here][test-file]. + +[test-file]: https://github.com/noir-lang/noir/blob/f387ec1475129732f72ba294877efdf6857135ac/crates/nargo_cli/tests/test_data_ssa_refactor/slices/src/main.nr + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for slices: + +### push_back + +Pushes a new element to the end of the slice, returning a new slice with a length one greater than the original unmodified slice. + +```rust +fn push_back(_self: [T], _elem: T) -> [T] +``` + +example: + +```rust +fn main() -> pub Field { + let mut slice: [Field] = &[0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +View the corresponding test file [here][test-file]. + +### push_front + +Returns a new array with the specified element inserted at index 0. The existing elements indexes are incremented by 1. + +```rust +fn push_front(_self: Self, _elem: T) -> Self +``` + +Example: + +```rust +let mut new_slice: [Field] = &[]; +new_slice = new_slice.push_front(20); +assert(new_slice[0] == 20); // returns true +``` + +View the corresponding test file [here][test-file]. + +### pop_front + +Returns a tuple of two items, the first element of the array and the rest of the array. + +```rust +fn pop_front(_self: Self) -> (T, Self) +``` + +Example: + +```rust +let (first_elem, rest_of_slice) = slice.pop_front(); +``` + +View the corresponding test file [here][test-file]. + +### pop_back + +Returns a tuple of two items, the beginning of the array with the last element omitted and the last element. + +```rust +fn pop_back(_self: Self) -> (Self, T) +``` + +Example: + +```rust +let (popped_slice, last_elem) = slice.pop_back(); +``` + +View the corresponding test file [here][test-file]. + +### append + +Loops over a slice and adds it to the end of another. + +```rust +fn append(mut self, other: Self) -> Self +``` + +Example: + +```rust +let append = &[1, 2].append(&[3, 4, 5]); +``` + +### insert + +Inserts an element at a specified index and shifts all following elements by 1. + +```rust +fn insert(_self: Self, _index: Field, _elem: T) -> Self +``` + +Example: + +```rust +new_slice = rest_of_slice.insert(2, 100); +assert(new_slice[2] == 100); +``` + +View the corresponding test file [here][test-file]. + +### remove + +Remove an element at a specified index, shifting all elements after it to the left, returning the altered slice and the removed element. + +```rust +fn remove(_self: Self, _index: Field) -> (Self, T) +``` + +Example: + +```rust +let (remove_slice, removed_elem) = slice.remove(3); +``` + +### len + +Returns the length of a slice + +```rust +fn len(self) -> Field +``` + +Example: + +```rust +fn main() { + let slice = &[42, 42]; + assert(slice.len() == 2); +} +``` + +### as_array + +Converts this slice into an array. + +Make sure to specify the size of the resulting array. +Panics if the resulting array length is different than the slice's length. + +```rust +fn as_array(self) -> [T; N] +``` + +Example: + +```rust +fn main() { + let slice = &[5, 6]; + + // Always specify the length of the resulting array! + let array: [Field; 2] = slice.as_array(); + + assert(array[0] == slice[0]); + assert(array[1] == slice[1]); +} +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/strings.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/strings.md new file mode 100644 index 000000000000..311dfd644168 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/strings.md @@ -0,0 +1,80 @@ +--- +title: Strings +description: + Discover the String data type in Noir. Learn about its methods, see real-world examples, and understand how to effectively manipulate and use Strings in Noir. +keywords: + [ + noir, + string type, + methods, + examples, + concatenation, + ] +sidebar_position: 3 +--- + + +The string type is a fixed length value defined with `str`. + +You can use strings in `assert()` functions or print them with +`println()`. See more about [Logging](../../standard_library/logging). + +```rust +use dep::std; + +fn main(message : pub str<11>, hex_as_string : str<4>) { + println(message); + assert(message == "hello world"); + assert(hex_as_string == "0x41"); +} +``` + +You can convert a `str` to a byte array by calling `as_bytes()` +or a vector by calling `as_bytes_vec()`. + +```rust +fn main() { + let message = "hello world"; + let message_bytes = message.as_bytes(); + let mut message_vec = message.as_bytes_vec(); + assert(message_bytes.len() == 11); + assert(message_bytes[0] == 104); + assert(message_bytes[0] == message_vec.get(0)); +} +``` + +## Escape characters + +You can use escape characters for your strings: + +| Escape Sequence | Description | +|-----------------|-----------------| +| `\r` | Carriage Return | +| `\n` | Newline | +| `\t` | Tab | +| `\0` | Null Character | +| `\"` | Double Quote | +| `\\` | Backslash | + +Example: + +```rust +let s = "Hello \"world" // prints "Hello "world" +let s = "hey \tyou"; // prints "hey you" +``` + +## Raw strings + +A raw string begins with the letter `r` and is optionally delimited by a number of hashes `#`. + +Escape characters are *not* processed within raw strings. All contents are interpreted literally. + +Example: + +```rust +let s = r"Hello world"; +let s = r#"Simon says "hello world""#; + +// Any number of hashes may be used (>= 1) as long as the string also terminates with the same number of hashes +let s = r#####"One "#, Two "##, Three "###, Four "####, Five will end the string."#####; +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/structs.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/structs.md new file mode 100644 index 000000000000..dbf68c99813c --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/structs.md @@ -0,0 +1,70 @@ +--- +title: Structs +description: + Explore the Struct data type in Noir. Learn about its methods, see real-world examples, and grasp how to effectively define and use Structs in your Noir programs. +keywords: + [ + noir, + struct type, + methods, + examples, + data structures, + ] +sidebar_position: 8 +--- + +A struct also allows for grouping multiple values of different types. Unlike tuples, we can also +name each field. + +> **Note:** The usage of _field_ here refers to each element of the struct and is unrelated to the +> field type of Noir. + +Defining a struct requires giving it a name and listing each field within as `: ` pairs: + +```rust +struct Animal { + hands: Field, + legs: Field, + eyes: u8, +} +``` + +An instance of a struct can then be created with actual values in `: ` pairs in any +order. Struct fields are accessible using their given names: + +```rust +fn main() { + let legs = 4; + + let dog = Animal { + eyes: 2, + hands: 0, + legs, + }; + + let zero = dog.hands; +} +``` + +Structs can also be destructured in a pattern, binding each field to a new variable: + +```rust +fn main() { + let Animal { hands, legs: feet, eyes } = get_octopus(); + + let ten = hands + feet + eyes as u8; +} + +fn get_octopus() -> Animal { + let octopus = Animal { + hands: 0, + legs: 8, + eyes: 2, + }; + + octopus +} +``` + +The new variables can be bound with names different from the original struct field names, as +showcased in the `legs --> feet` binding in the example above. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/tuples.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/tuples.md new file mode 100644 index 000000000000..2ec5c9c41135 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/data_types/tuples.md @@ -0,0 +1,48 @@ +--- +title: Tuples +description: + Dive into the Tuple data type in Noir. Understand its methods, practical examples, and best practices for efficiently using Tuples in your Noir code. +keywords: + [ + noir, + tuple type, + methods, + examples, + multi-value containers, + ] +sidebar_position: 7 +--- + +A tuple collects multiple values like an array, but with the added ability to collect values of +different types: + +```rust +fn main() { + let tup: (u8, u64, Field) = (255, 500, 1000); +} +``` + +One way to access tuple elements is via destructuring using pattern matching: + +```rust +fn main() { + let tup = (1, 2); + + let (one, two) = tup; + + let three = one + two; +} +``` + +Another way to access tuple elements is via direct member access, using a period (`.`) followed by +the index of the element we want to access. Index `0` corresponds to the first tuple element, `1` to +the second and so on: + +```rust +fn main() { + let tup = (5, 6, 7, 8); + + let five = tup.0; + let eight = tup.3; +} +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/functions.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/functions.md new file mode 100644 index 000000000000..f656cdfd97a1 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/functions.md @@ -0,0 +1,226 @@ +--- +title: Functions +description: + Learn how to declare functions and methods in Noir, a programming language with Rust semantics. + This guide covers parameter declaration, return types, call expressions, and more. +keywords: [Noir, Rust, functions, methods, parameter declaration, return types, call expressions] +sidebar_position: 1 +--- + +Functions in Noir follow the same semantics of Rust, though Noir does not support early returns. + +To declare a function the `fn` keyword is used. + +```rust +fn foo() {} +``` + +By default, functions are visible only within the package they are defined. To make them visible outside of that package (for example, as part of a [library](../modules_packages_crates/crates_and_packages.md#libraries)), you should mark them as `pub`: + +```rust +pub fn foo() {} +``` + +You can also restrict the visibility of the function to only the crate it was defined in, by specifying `pub(crate)`: + +```rust +pub(crate) fn foo() {} //foo can only be called within its crate +``` + +All parameters in a function must have a type and all types are known at compile time. The parameter +is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. + +```rust +fn foo(x : Field, y : Field){} +``` + +The return type of a function can be stated by using the `->` arrow notation. The function below +states that the foo function must return a `Field`. If the function returns no value, then the arrow +is omitted. + +```rust +fn foo(x : Field, y : Field) -> Field { + x + y +} +``` + +Note that a `return` keyword is unneeded in this case - the last expression in a function's body is +returned. + +## Main function + +If you're writing a binary, the `main` function is the starting point of your program. You can pass all types of expressions to it, as long as they have a fixed size at compile time: + +```rust +fn main(x : Field) // this is fine: passing a Field +fn main(x : [Field; 2]) // this is also fine: passing a Field with known size at compile-time +fn main(x : (Field, bool)) // 👌: passing a (Field, bool) tuple means size 2 +fn main(x : str<5>) // this is fine, as long as you pass a string of size 5 + +fn main(x : Vec) // can't compile, has variable size +fn main(x : [Field]) // can't compile, has variable size +fn main(....// i think you got it by now +``` + +Keep in mind [tests](../../tooling/testing.md) don't differentiate between `main` and any other function. The following snippet passes tests, but won't compile or prove: + +```rust +fn main(x : [Field]) { + assert(x[0] == 1); +} + +#[test] +fn test_one() { + main(&[1, 2]); +} +``` + +```bash +$ nargo test +[testing] Running 1 test functions +[testing] Testing test_one... ok +[testing] All tests passed + +$ nargo check +The application panicked (crashed). +Message: Cannot have variable sized arrays as a parameter to main +``` + +## Call Expressions + +Calling a function in Noir is executed by using the function name and passing in the necessary +arguments. + +Below we show how to call the `foo` function from the `main` function using a call expression: + +```rust +fn main(x : Field, y : Field) { + let z = foo(x); +} + +fn foo(x : Field) -> Field { + x + x +} +``` + +## Methods + +You can define methods in Noir on any struct type in scope. + +```rust +struct MyStruct { + foo: Field, + bar: Field, +} + +impl MyStruct { + fn new(foo: Field) -> MyStruct { + MyStruct { + foo, + bar: 2, + } + } + + fn sum(self) -> Field { + self.foo + self.bar + } +} + +fn main() { + let s = MyStruct::new(40); + assert(s.sum() == 42); +} +``` + +Methods are just syntactic sugar for functions, so if we wanted to we could also call `sum` as +follows: + +```rust +assert(MyStruct::sum(s) == 42); +``` + +It is also possible to specialize which method is chosen depending on the [generic](./generics.md) type that is used. In this example, the `foo` function returns different values depending on its type: + +```rust +struct Foo {} + +impl Foo { + fn foo(self) -> Field { 1 } +} + +impl Foo { + fn foo(self) -> Field { 2 } +} + +fn main() { + let f1: Foo = Foo{}; + let f2: Foo = Foo{}; + assert(f1.foo() + f2.foo() == 3); +} +``` + +Also note that impls with the same method name defined in them cannot overlap. For example, if we already have `foo` defined for `Foo` and `Foo` like we do above, we cannot also define `foo` in an `impl Foo` since it would be ambiguous which version of `foo` to choose. + +```rust +// Including this impl in the same project as the above snippet would +// cause an overlapping impls error +impl Foo { + fn foo(self) -> Field { 3 } +} +``` + +## Lambdas + +Lambdas are anonymous functions. They follow the syntax of Rust - `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +See [Lambdas](./lambdas.md) for more details. + +## Attributes + +Attributes are metadata that can be applied to a function, using the following syntax: `#[attribute(value)]`. + +Supported attributes include: + +- **builtin**: the function is implemented by the compiler, for efficiency purposes. +- **deprecated**: mark the function as _deprecated_. Calling the function will generate a warning: `warning: use of deprecated function` +- **field**: Used to enable conditional compilation of code depending on the field size. See below for more details +- **oracle**: mark the function as _oracle_; meaning it is an external unconstrained function, implemented in noir_js. See [Unconstrained](./unconstrained.md) and [NoirJS](../../reference/NoirJS/noir_js/index.md) for more details. +- **test**: mark the function as unit tests. See [Tests](../../tooling/testing.md) for more details + +### Field Attribute + +The field attribute defines which field the function is compatible for. The function is conditionally compiled, under the condition that the field attribute matches the Noir native field. +The field can be defined implicitly, by using the name of the elliptic curve usually associated to it - for instance bn254, bls12_381 - or explicitly by using the field (prime) order, in decimal or hexadecimal form. +As a result, it is possible to define multiple versions of a function with each version specialized for a different field attribute. This can be useful when a function requires different parameters depending on the underlying elliptic curve. + +Example: we define the function `foo()` three times below. Once for the default Noir bn254 curve, once for the field $\mathbb F_{23}$, which will normally never be used by Noir, and once again for the bls12_381 curve. + +```rust +#[field(bn254)] +fn foo() -> u32 { + 1 +} + +#[field(23)] +fn foo() -> u32 { + 2 +} + +// This commented code would not compile as foo would be defined twice because it is the same field as bn254 +// #[field(21888242871839275222246405745257275088548364400416034343698204186575808495617)] +// fn foo() -> u32 { +// 2 +// } + +#[field(bls12_381)] +fn foo() -> u32 { + 3 +} +``` + +If the field name is not known to Noir, it will discard the function. Field names are case insensitive. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/generics.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/generics.md new file mode 100644 index 000000000000..ddd42bf1f9b7 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/generics.md @@ -0,0 +1,106 @@ +--- +title: Generics +description: Learn how to use Generics in Noir +keywords: [Noir, Rust, generics, functions, structs] +sidebar_position: 7 +--- + +Generics allow you to use the same functions with multiple different concrete data types. You can +read more about the concept of generics in the Rust documentation +[here](https://doc.rust-lang.org/book/ch10-01-syntax.html). + +Here is a trivial example showing the identity function that supports any type. In Rust, it is +common to refer to the most general type as `T`. We follow the same convention in Noir. + +```rust +fn id(x: T) -> T { + x +} +``` + +## In Structs + +Generics are useful for specifying types in structs. For example, we can specify that a field in a +struct will be of a certain generic type. In this case `value` is of type `T`. + +```rust +struct RepeatedValue { + value: T, + count: Field, +} + +impl RepeatedValue { + fn print(self) { + for _i in 0 .. self.count { + println(self.value); + } + } +} + +fn main() { + let repeated = RepeatedValue { value: "Hello!", count: 2 }; + repeated.print(); +} +``` + +The `print` function will print `Hello!` an arbitrary number of times, twice in this case. + +If we want to be generic over array lengths (which are type-level integers), we can use numeric +generics. Using these looks just like using regular generics, but these generics can resolve to +integers at compile-time, rather than resolving to types. Here's an example of a struct that is +generic over the size of the array it contains internally: + +```rust +struct BigInt { + limbs: [u32; N], +} + +impl BigInt { + // `N` is in scope of all methods in the impl + fn first(first: BigInt, second: BigInt) -> Self { + assert(first.limbs != second.limbs); + first + + fn second(first: BigInt, second: Self) -> Self { + assert(first.limbs != second.limbs); + second + } +} +``` + +## Calling functions on generic parameters + +Since a generic type `T` can represent any type, how can we call functions on the underlying type? +In other words, how can we go from "any type `T`" to "any type `T` that has certain methods available?" + +This is what [traits](../concepts/traits) are for in Noir. Here's an example of a function generic over +any type `T` that implements the `Eq` trait for equality: + +```rust +fn first_element_is_equal(array1: [T; N], array2: [T; N]) -> bool + where T: Eq +{ + if (array1.len() == 0) | (array2.len() == 0) { + true + } else { + array1[0] == array2[0] + } +} + +fn main() { + assert(first_element_is_equal([1, 2, 3], [1, 5, 6])); + + // We can use first_element_is_equal for arrays of any type + // as long as we have an Eq impl for the types we pass in + let array = [MyStruct::new(), MyStruct::new()]; + assert(array_eq(array, array, MyStruct::eq)); +} + +impl Eq for MyStruct { + fn eq(self, other: MyStruct) -> bool { + self.foo == other.foo + } +} +``` + +You can find more details on traits and trait implementations on the [traits page](../concepts/traits). diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/globals.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/globals.md new file mode 100644 index 000000000000..063a3d89248d --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/globals.md @@ -0,0 +1,72 @@ +--- +title: Global Variables +description: + Learn about global variables in Noir. Discover how + to declare, modify, and use them in your programs. +keywords: [noir programming language, globals, global variables, constants] +sidebar_position: 8 +--- + +## Globals + + +Noir supports global variables. The global's type can be inferred by the compiler entirely: + +```rust +global N = 5; // Same as `global N: Field = 5` + +global TUPLE = (3, 2); + +fn main() { + assert(N == 5); + assert(N == TUPLE.0 + TUPLE.1); +} +``` + +:::info + +Globals can be defined as any expression, so long as they don't depend on themselves - otherwise there would be a dependency cycle! For example: + +```rust +global T = foo(T); // dependency error +``` + +::: + + +If they are initialized to a literal integer, globals can be used to specify an array's length: + +```rust +global N: Field = 2; + +fn main(y : [Field; N]) { + assert(y[0] == y[1]) +} +``` + +A global from another module can be imported or referenced externally like any other name: + +```rust +global N = 20; + +fn main() { + assert(my_submodule::N != N); +} + +mod my_submodule { + global N: Field = 10; +} +``` + +When a global is used, Noir replaces the name with its definition on each occurrence. +This means globals defined using function calls will repeat the call each time they're used: + +```rust +global RESULT = foo(); + +fn foo() -> [Field; 100] { ... } +``` + +This is usually fine since Noir will generally optimize any function call that does not +refer to a program input into a constant. It should be kept in mind however, if the called +function performs side-effects like `println`, as these will still occur on each use. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/lambdas.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/lambdas.md new file mode 100644 index 000000000000..be3c7e0b5caa --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/lambdas.md @@ -0,0 +1,81 @@ +--- +title: Lambdas +description: Learn how to use anonymous functions in Noir programming language. +keywords: [Noir programming language, lambda, closure, function, anonymous function] +sidebar_position: 9 +--- + +## Introduction + +Lambdas are anonymous functions. The syntax is `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +A block can be used as the body of a lambda, allowing you to declare local variables inside it: + +```rust +let cool = || { + let x = 100; + let y = 100; + x + y +} + +assert(cool() == 200); +``` + +## Closures + +Inside the body of a lambda, you can use variables defined in the enclosing function. Such lambdas are called **closures**. In this example `x` is defined inside `main` and is accessed from within the lambda: + +```rust +fn main() { + let x = 100; + let closure = || x + 150; + assert(closure() == 250); +} +``` + +## Passing closures to higher-order functions + +It may catch you by surprise that the following code fails to compile: + +```rust +fn foo(f: fn () -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // error :( +} +``` + +The reason is that the closure's capture environment affects its type - we have a closure that captures two Fields and `foo` +expects a regular function as an argument - those are incompatible. +:::note + +Variables contained within the `||` are the closure's parameters, and the expression that follows it is the closure's body. The capture environment is comprised of any variables used in the closure's body that are not parameters. + +E.g. in |x| x + y, y would be a captured variable, but x would not be, since it is a parameter of the closure. + +::: +The syntax for the type of a closure is `fn[env](args) -> ret_type`, where `env` is the capture environment of the closure - +in this example that's `(Field, Field)`. + +The best solution in our case is to make `foo` generic over the environment type of its parameter, so that it can be called +with closures with any environment, as well as with regular functions: + +```rust +fn foo(f: fn[Env]() -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // compiles fine + assert(foo(|| 60) == 60); // compiles fine +} +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/mutability.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/mutability.md new file mode 100644 index 000000000000..fdeef6a87c53 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/mutability.md @@ -0,0 +1,121 @@ +--- +title: Mutability +description: + Learn about mutable variables in Noir. Discover how + to declare, modify, and use them in your programs. +keywords: [noir programming language, mutability in noir, mutable variables] +sidebar_position: 8 +--- + +Variables in noir can be declared mutable via the `mut` keyword. Mutable variables can be reassigned +to via an assignment expression. + +```rust +let x = 2; +x = 3; // error: x must be mutable to be assigned to + +let mut y = 3; +let y = 4; // OK +``` + +The `mut` modifier can also apply to patterns: + +```rust +let (a, mut b) = (1, 2); +a = 11; // error: a must be mutable to be assigned to +b = 12; // OK + +let mut (c, d) = (3, 4); +c = 13; // OK +d = 14; // OK + +// etc. +let MyStruct { x: mut y } = MyStruct { x: a }; +// y is now in scope +``` + +Note that mutability in noir is local and everything is passed by value, so if a called function +mutates its parameters then the parent function will keep the old value of the parameters. + +```rust +fn main() -> pub Field { + let x = 3; + helper(x); + x // x is still 3 +} + +fn helper(mut x: i32) { + x = 4; +} +``` + +## Non-local mutability + +Non-local mutability can be achieved through the mutable reference type `&mut T`: + +```rust +fn set_to_zero(x: &mut Field) { + *x = 0; +} + +fn main() { + let mut y = 42; + set_to_zero(&mut y); + assert(*y == 0); +} +``` + +When creating a mutable reference, the original variable being referred to (`y` in this +example) must also be mutable. Since mutable references are a reference type, they must +be explicitly dereferenced via `*` to retrieve the underlying value. Note that this yields +a copy of the value, so mutating this copy will not change the original value behind the +reference: + +```rust +fn main() { + let mut x = 1; + let x_ref = &mut x; + + let mut y = *x_ref; + let y_ref = &mut y; + + x = 2; + *x_ref = 3; + + y = 4; + *y_ref = 5; + + assert(x == 3); + assert(*x_ref == 3); + assert(y == 5); + assert(*y_ref == 5); +} +``` + +Note that types in Noir are actually deeply immutable so the copy that occurs when +dereferencing is only a conceptual copy - no additional constraints will occur. + +Mutable references can also be stored within structs. Note that there is also +no lifetime parameter on these unlike rust. This is because the allocated memory +always lasts the entire program - as if it were an array of one element. + +```rust +struct Foo { + x: &mut Field +} + +impl Foo { + fn incr(mut self) { + *self.x += 1; + } +} + +fn main() { + let foo = Foo { x: &mut 0 }; + foo.incr(); + assert(*foo.x == 1); +} +``` + +In general, you should avoid non-local & shared mutability unless it is needed. Sticking +to only local mutability will improve readability and potentially improve compiler optimizations as well. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/ops.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/ops.md new file mode 100644 index 000000000000..c35c36c38a90 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/ops.md @@ -0,0 +1,98 @@ +--- +title: Logical Operations +description: + Learn about the supported arithmetic and logical operations in the Noir programming language. + Discover how to perform operations on private input types, integers, and booleans. +keywords: + [ + Noir programming language, + supported operations, + arithmetic operations, + logical operations, + predicate operators, + bitwise operations, + short-circuiting, + backend, + ] +sidebar_position: 3 +--- + +# Operations + +## Table of Supported Operations + +| Operation | Description | Requirements | +| :-------- | :------------------------------------------------------------: | -------------------------------------: | +| + | Adds two private input types together | Types must be private input | +| - | Subtracts two private input types together | Types must be private input | +| \* | Multiplies two private input types together | Types must be private input | +| / | Divides two private input types together | Types must be private input | +| ^ | XOR two private input types together | Types must be integer | +| & | AND two private input types together | Types must be integer | +| \| | OR two private input types together | Types must be integer | +| \<\< | Left shift an integer by another integer amount | Types must be integer, shift must be u8 | +| >> | Right shift an integer by another integer amount | Types must be integer, shift must be u8 | +| ! | Bitwise not of a value | Type must be integer or boolean | +| \< | returns a bool if one value is less than the other | Upper bound must have a known bit size | +| \<= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | +| > | returns a bool if one value is more than the other | Upper bound must have a known bit size | +| >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | +| == | returns a bool if one value is equal to the other | Both types must not be constants | +| != | returns a bool if one value is not equal to the other | Both types must not be constants | + +### Predicate Operators + +`<,<=, !=, == , >, >=` are known as predicate/comparison operations because they compare two values. +This differs from the operations such as `+` where the operands are used in _computation_. + +### Bitwise Operations Example + +```rust +fn main(x : Field) { + let y = x as u32; + let z = y & y; +} +``` + +`z` is implicitly constrained to be the result of `y & y`. The `&` operand is used to denote bitwise +`&`. + +> `x & x` would not compile as `x` is a `Field` and not an integer type. + +### Logical Operators + +Noir has no support for the logical operators `||` and `&&`. This is because encoding the +short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can +use the bitwise operators `|` and `&` which operate identically for booleans, just without the +short-circuiting. + +```rust +let my_val = 5; + +let mut flag = 1; +if (my_val > 6) | (my_val == 0) { + flag = 0; +} +assert(flag == 1); + +if (my_val != 10) & (my_val < 50) { + flag = 0; +} +assert(flag == 0); +``` + +### Shorthand operators + +Noir shorthand operators for most of the above operators, namely `+=, -=, *=, /=, %=, &=, |=, ^=, <<=`, and `>>=`. These allow for more concise syntax. For example: + +```rust +let mut i = 0; +i = i + 1; +``` + +could be written as: + +```rust +let mut i = 0; +i += 1; +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/oracles.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/oracles.md new file mode 100644 index 000000000000..aa380b5f7b87 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/oracles.md @@ -0,0 +1,31 @@ +--- +title: Oracles +description: Dive into how Noir supports Oracles via RPC calls, and learn how to declare an Oracle in Noir with our comprehensive guide. +keywords: + - Noir + - Oracles + - RPC Calls + - Unconstrained Functions + - Programming + - Blockchain +sidebar_position: 6 +--- + +:::note + +This is an experimental feature that is not fully documented. If you notice any outdated information or potential improvements to this page, pull request contributions are very welcome: https://github.com/noir-lang/noir + +::: + +Noir has support for Oracles via RPC calls. This means Noir will make an RPC call and use the return value for proof generation. + +Since Oracles are not resolved by Noir, they are [`unconstrained` functions](./unconstrained.md) + +You can declare an Oracle through the `#[oracle()]` flag. Example: + +```rust +#[oracle(get_number_sequence)] +unconstrained fn get_number_sequence(_size: Field) -> [Field] {} +``` + +The timeout for when using an external RPC oracle resolver can be set with the `NARGO_FOREIGN_CALL_TIMEOUT` environment variable. This timeout is in units of milliseconds. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/shadowing.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/shadowing.md new file mode 100644 index 000000000000..5ce6130d2011 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/shadowing.md @@ -0,0 +1,44 @@ +--- +title: Shadowing +sidebar_position: 12 +--- + +Noir allows for inheriting variables' values and re-declaring them with the same name similar to Rust, known as shadowing. + +For example, the following function is valid in Noir: + +```rust +fn main() { + let x = 5; + + { + let x = x * 2; + assert (x == 10); + } + + assert (x == 5); +} +``` + +In this example, a variable x is first defined with the value 5. + +The local scope that follows shadows the original x, i.e. creates a local mutable x based on the value of the original x. It is given a value of 2 times the original x. + +When we return to the main scope, x once again refers to just the original x, which stays at the value of 5. + +## Temporal mutability + +One way that shadowing is useful, in addition to ergonomics across scopes, is for temporarily mutating variables. + +```rust +fn main() { + let age = 30; + // age = age + 5; // Would error as `age` is immutable by default. + + let mut age = age + 5; // Temporarily mutates `age` with a new value. + + let age = age; // Locks `age`'s mutability again. + + assert (age == 35); +} +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/traits.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/traits.md new file mode 100644 index 000000000000..ef1445a59076 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/traits.md @@ -0,0 +1,389 @@ +--- +title: Traits +description: + Traits in Noir can be used to abstract out a common interface for functions across + several data types. +keywords: [noir programming language, traits, interfaces, generic, protocol] +sidebar_position: 14 +--- + +## Overview + +Traits in Noir are a useful abstraction similar to interfaces or protocols in other languages. Each trait defines +the interface of several methods contained within the trait. Types can then implement this trait by providing +implementations for these methods. For example in the program: + +```rust +struct Rectangle { + width: Field, + height: Field, +} + +impl Rectangle { + fn area(self) -> Field { + self.width * self.height + } +} + +fn log_area(r: Rectangle) { + println(r.area()); +} +``` + +We have a function `log_area` to log the area of a `Rectangle`. Now how should we change the program if we want this +function to work on `Triangle`s as well?: + +```rust +struct Triangle { + width: Field, + height: Field, +} + +impl Triangle { + fn area(self) -> Field { + self.width * self.height / 2 + } +} +``` + +Making `log_area` generic over all types `T` would be invalid since not all types have an `area` method. Instead, we can +introduce a new `Area` trait and make `log_area` generic over all types `T` that implement `Area`: + +```rust +trait Area { + fn area(self) -> Field; +} + +fn log_area(shape: T) where T: Area { + println(shape.area()); +} +``` + +We also need to explicitly implement `Area` for `Rectangle` and `Triangle`. We can do that by changing their existing +impls slightly. Note that the parameter types and return type of each of our `area` methods must match those defined +by the `Area` trait. + +```rust +impl Area for Rectangle { + fn area(self) -> Field { + self.width * self.height + } +} + +impl Area for Triangle { + fn area(self) -> Field { + self.width * self.height / 2 + } +} +``` + +Now we have a working program that is generic over any type of Shape that is used! Others can even use this program +as a library with their own types - such as `Circle` - as long as they also implement `Area` for these types. + +## Where Clauses + +As seen in `log_area` above, when we want to create a function or method that is generic over any type that implements +a trait, we can add a where clause to the generic function. + +```rust +fn log_area(shape: T) where T: Area { + println(shape.area()); +} +``` + +It is also possible to apply multiple trait constraints on the same variable at once by combining traits with the `+` +operator. Similarly, we can have multiple trait constraints by separating each with a comma: + +```rust +fn foo(elements: [T], thing: U) where + T: Default + Add + Eq, + U: Bar, +{ + let mut sum = T::default(); + + for element in elements { + sum += element; + } + + if sum == T::default() { + thing.bar(); + } +} +``` + +## Generic Implementations + +You can add generics to a trait implementation by adding the generic list after the `impl` keyword: + +```rust +trait Second { + fn second(self) -> Field; +} + +impl Second for (T, Field) { + fn second(self) -> Field { + self.1 + } +} +``` + +You can also implement a trait for every type this way: + +```rust +trait Debug { + fn debug(self); +} + +impl Debug for T { + fn debug(self) { + println(self); + } +} + +fn main() { + 1.debug(); +} +``` + +### Generic Trait Implementations With Where Clauses + +Where clauses can also be placed on trait implementations themselves to restrict generics in a similar way. +For example, while `impl Foo for T` implements the trait `Foo` for every type, `impl Foo for T where T: Bar` +will implement `Foo` only for types that also implement `Bar`. This is often used for implementing generic types. +For example, here is the implementation for array equality: + +```rust +impl Eq for [T; N] where T: Eq { + // Test if two arrays have the same elements. + // Because both arrays must have length N, we know their lengths already match. + fn eq(self, other: Self) -> bool { + let mut result = true; + + for i in 0 .. self.len() { + // The T: Eq constraint is needed to call == on the array elements here + result &= self[i] == other[i]; + } + + result + } +} +``` + +## Generic Traits + +Traits themselves can also be generic by placing the generic arguments after the trait name. These generics are in +scope of every item within the trait. + +```rust +trait Into { + // Convert `self` to type `T` + fn into(self) -> T; +} +``` + +When implementing generic traits the generic arguments of the trait must be specified. This is also true anytime +when referencing a generic trait (e.g. in a `where` clause). + +```rust +struct MyStruct { + array: [Field; 2], +} + +impl Into<[Field; 2]> for MyStruct { + fn into(self) -> [Field; 2] { + self.array + } +} + +fn as_array(x: T) -> [Field; 2] + where T: Into<[Field; 2]> +{ + x.into() +} + +fn main() { + let array = [1, 2]; + let my_struct = MyStruct { array }; + + assert_eq(as_array(my_struct), array); +} +``` + +## Trait Methods With No `self` + +A trait can contain any number of methods, each of which have access to the `Self` type which represents each type +that eventually implements the trait. Similarly, the `self` variable is available as well but is not required to be used. +For example, we can define a trait to create a default value for a type. This trait will need to return the `Self` type +but doesn't need to take any parameters: + +```rust +trait Default { + fn default() -> Self; +} +``` + +Implementing this trait can be done similarly to any other trait: + +```rust +impl Default for Field { + fn default() -> Field { + 0 + } +} + +struct MyType {} + +impl Default for MyType { + fn default() -> Field { + MyType {} + } +} +``` + +However, since there is no `self` parameter, we cannot call it via the method call syntax `object.method()`. +Instead, we'll need to refer to the function directly. This can be done either by referring to the +specific impl `MyType::default()` or referring to the trait itself `Default::default()`. In the later +case, type inference determines the impl that is selected. + +```rust +let my_struct = MyStruct::default(); + +let x: Field = Default::default(); +let result = x + Default::default(); +``` + +:::warning + +```rust +let _ = Default::default(); +``` + +If type inference cannot select which impl to use because of an ambiguous `Self` type, an impl will be +arbitrarily selected. This occurs most often when the result of a trait function call with no parameters +is unused. To avoid this, when calling a trait function with no `self` or `Self` parameters or return type, +always refer to it via the implementation type's namespace - e.g. `MyType::default()`. +This is set to change to an error in future Noir versions. + +::: + +## Default Method Implementations + +A trait can also have default implementations of its methods by giving a body to the desired functions. +Note that this body must be valid for all types that may implement the trait. As a result, the only +valid operations on `self` will be operations valid for any type or other operations on the trait itself. + +```rust +trait Numeric { + fn add(self, other: Self) -> Self; + + // Default implementation of double is (self + self) + fn double(self) -> Self { + self.add(self) + } +} +``` + +When implementing a trait with default functions, a type may choose to implement only the required functions: + +```rust +impl Numeric for Field { + fn add(self, other: Field) -> Field { + self + other + } +} +``` + +Or it may implement the optional methods as well: + +```rust +impl Numeric for u32 { + fn add(self, other: u32) -> u32 { + self + other + } + + fn double(self) -> u32 { + self * 2 + } +} +``` + +## Impl Specialization + +When implementing traits for a generic type it is possible to implement the trait for only a certain combination +of generics. This can be either as an optimization or because those specific generics are required to implement the trait. + +```rust +trait Sub { + fn sub(self, other: Self) -> Self; +} + +struct NonZero { + value: T, +} + +impl Sub for NonZero { + fn sub(self, other: Self) -> Self { + let value = self.value - other.value; + assert(value != 0); + NonZero { value } + } +} +``` + +## Overlapping Implementations + +Overlapping implementations are disallowed by Noir to ensure Noir's decision on which impl to select is never ambiguous. +This means if a trait `Foo` is already implemented +by a type `Bar` for all `T`, then we cannot also have a separate impl for `Bar` (or any other +type argument). Similarly, if there is an impl for all `T` such as `impl Debug for T`, we cannot create +any more impls to `Debug` for other types since it would be ambiguous which impl to choose for any given +method call. + +```rust +trait Trait {} + +// Previous impl defined here +impl Trait for (A, B) {} + +// error: Impl for type `(Field, Field)` overlaps with existing impl +impl Trait for (Field, Field) {} +``` + +## Trait Coherence + +Another restriction on trait implementations is coherence. This restriction ensures other crates cannot create +impls that may overlap with other impls, even if several unrelated crates are used as dependencies in the same +program. + +The coherence restriction is: to implement a trait, either the trait itself or the object type must be declared +in the crate the impl is in. + +In practice this often comes up when using types provided by libraries. If a library provides a type `Foo` that does +not implement a trait in the standard library such as `Default`, you may not `impl Default for Foo` in your own crate. +While restrictive, this prevents later issues or silent changes in the program if the `Foo` library later added its +own impl for `Default`. If you are a user of the `Foo` library in this scenario and need a trait not implemented by the +library your choices are to either submit a patch to the library or use the newtype pattern. + +### The Newtype Pattern + +The newtype pattern gets around the coherence restriction by creating a new wrapper type around the library type +that we cannot create `impl`s for. Since the new wrapper type is defined in our current crate, we can create +impls for any trait we need on it. + +```rust +struct Wrapper { + foo: dep::some_library::Foo, +} + +impl Default for Wrapper { + fn default() -> Wrapper { + Wrapper { + foo: dep::some_library::Foo::new(), + } + } +} +``` + +Since we have an impl for our own type, the behavior of this code will not change even if `some_library` is updated +to provide its own `impl Default for Foo`. The downside of this pattern is that it requires extra wrapping and +unwrapping of values when converting to and from the `Wrapper` and `Foo` types. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/unconstrained.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/unconstrained.md new file mode 100644 index 000000000000..b8e71fe65f08 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/concepts/unconstrained.md @@ -0,0 +1,99 @@ +--- +title: Unconstrained Functions +description: "Learn about what unconstrained functions in Noir are, how to use them and when you'd want to." + +keywords: [Noir programming language, unconstrained, open] +sidebar_position: 5 +--- + +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. + +## Why? + +Zero-knowledge (ZK) domain-specific languages (DSL) enable developers to generate ZK proofs from their programs by compiling code down to the constraints of an NP complete language (such as R1CS or PLONKish languages). However, the hard bounds of a constraint system can be very limiting to the functionality of a ZK DSL. + +Enabling a circuit language to perform unconstrained execution is a powerful tool. Said another way, unconstrained execution lets developers generate witnesses from code that does not generate any constraints. Being able to execute logic outside of a circuit is critical for both circuit performance and constructing proofs on information that is external to a circuit. + +Fetching information from somewhere external to a circuit can also be used to enable developers to improve circuit efficiency. + +A ZK DSL does not just prove computation, but proves that some computation was handled correctly. Thus, it is necessary that when we switch from performing some operation directly inside of a circuit to inside of an unconstrained environment that the appropriate constraints are still laid down elsewhere in the circuit. + +## Example + +An in depth example might help drive the point home. This example comes from the excellent [post](https://discord.com/channels/1113924620781883405/1124022445054111926/1128747641853972590) by Tom in the Noir Discord. + +Let's look at how we can optimize a function to turn a `u72` into an array of `u8`s. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u72 & 0xff) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 91 +Backend circuit size: 3619 +``` + +A lot of the operations in this function are optimized away by the compiler (all the bit-shifts turn into divisions by constants). However we can save a bunch of gates by casting to u8 a bit earlier. This automatically truncates the bit-shifted value to fit in a u8 which allows us to remove the AND against 0xff. This saves us ~480 gates in total. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 75 +Backend circuit size: 3143 +``` + +Those are some nice savings already but we can do better. This code is all constrained so we're proving every step of calculating out using num, but we don't actually care about how we calculate this, just that it's correct. This is where brillig comes in. + +It turns out that truncating a u72 into a u8 is hard to do inside a snark, each time we do as u8 we lay down 4 ACIR opcodes which get converted into multiple gates. It's actually much easier to calculate num from out than the other way around. All we need to do is multiply each element of out by a constant and add them all together, both relatively easy operations inside a snark. + +We can then run u72_to_u8 as unconstrained brillig code in order to calculate out, then use that result in our constrained function and assert that if we were to do the reverse calculation we'd get back num. This looks a little like the below: + +```rust +fn main(num: u72) -> pub [u8; 8] { + let out = u72_to_u8(num); + + let mut reconstructed_num: u72 = 0; + for i in 0..8 { + reconstructed_num += (out[i] as u72 << (56 - (8 * i))); + } + assert(num == reconstructed_num); + out +} + +unconstrained fn u72_to_u8(num: u72) -> [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8))) as u8; + } + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 78 +Backend circuit size: 2902 +``` + +This ends up taking off another ~250 gates from our circuit! We've ended up with more ACIR opcodes than before but they're easier for the backend to prove (resulting in fewer gates). + +Generally we want to use brillig whenever there's something that's easy to verify but hard to compute within the circuit. For example, if you wanted to calculate a square root of a number it'll be a much better idea to calculate this in brillig and then assert that if you square the result you get back your number. + +## Break and Continue + +In addition to loops over runtime bounds, `break` and `continue` are also available in unconstrained code. See [break and continue](../concepts/control_flow/#break-and-continue) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/modules_packages_crates/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/modules_packages_crates/_category_.json new file mode 100644 index 000000000000..1debcfe76753 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/modules_packages_crates/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Modules, Packages and Crates", + "position": 2, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/modules_packages_crates/crates_and_packages.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/modules_packages_crates/crates_and_packages.md new file mode 100644 index 000000000000..95ee9f52ab21 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/modules_packages_crates/crates_and_packages.md @@ -0,0 +1,43 @@ +--- +title: Crates and Packages +description: Learn how to use Crates and Packages in your Noir project +keywords: [Nargo, dependencies, package management, crates, package] +sidebar_position: 0 +--- + +## Crates + +A crate is the smallest amount of code that the Noir compiler considers at a time. +Crates can contain modules, and the modules may be defined in other files that get compiled with the crate, as we’ll see in the coming sections. + +### Crate Types + +A Noir crate can come in several forms: binaries, libraries or contracts. + +#### Binaries + +_Binary crates_ are programs which you can compile to an ACIR circuit which you can then create proofs against. Each must have a function called `main` that defines the ACIR circuit which is to be proved. + +#### Libraries + +_Library crates_ don't have a `main` function and they don't compile down to ACIR. Instead they define functionality intended to be shared with multiple projects, and eventually included in a binary crate. + +#### Contracts + +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/noir-projects/noir-contracts/contracts). + +### Crate Root + +Every crate has a root, which is the source file that the compiler starts, this is also known as the root module. The Noir compiler does not enforce any conditions on the name of the file which is the crate root, however if you are compiling via Nargo the crate root must be called `lib.nr` or `main.nr` for library or binary crates respectively. + +## Packages + +A Nargo _package_ is a collection of one of more crates that provides a set of functionality. A package must include a Nargo.toml file. + +A package _must_ contain either a library or a binary crate, but not both. + +### Differences from Cargo Packages + +One notable difference between Rust's Cargo and Noir's Nargo is that while Cargo allows a package to contain an unlimited number of binary crates and a single library crate, Nargo currently only allows a package to contain a single crate. + +In future this restriction may be lifted to allow a Nargo package to contain both a binary and library crate or multiple binary crates. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/modules_packages_crates/dependencies.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/modules_packages_crates/dependencies.md new file mode 100644 index 000000000000..04c1703d929b --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/modules_packages_crates/dependencies.md @@ -0,0 +1,124 @@ +--- +title: Dependencies +description: + Learn how to specify and manage dependencies in Nargo, allowing you to upload packages to GitHub + and use them easily in your project. +keywords: [Nargo, dependencies, GitHub, package management, versioning] +sidebar_position: 1 +--- + +Nargo allows you to upload packages to GitHub and use them as dependencies. + +## Specifying a dependency + +Specifying a dependency requires a tag to a specific commit and the git url to the url containing +the package. + +Currently, there are no requirements on the tag contents. If requirements are added, it would follow +semver 2.0 guidelines. + +> Note: Without a `tag` , there would be no versioning and dependencies would change each time you +> compile your project. + +For example, to add the [ecrecover-noir library](https://github.com/colinnielsen/ecrecover-noir) to your project, add it to `Nargo.toml`: + +```toml +# Nargo.toml + +[dependencies] +ecrecover = {tag = "v0.8.0", git = "https://github.com/colinnielsen/ecrecover-noir"} +``` + +If the module is in a subdirectory, you can define a subdirectory in your git repository, for example: + +```toml +# Nargo.toml + +[dependencies] +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "noir-contracts/contracts/easy_private_token_contract"} +``` + +## Specifying a local dependency + +You can also specify dependencies that are local to your machine. + +For example, this file structure has a library and binary crate + +```tree +├── binary_crate +│   ├── Nargo.toml +│   └── src +│   └── main.nr +└── lib_a + ├── Nargo.toml + └── src + └── lib.nr +``` + +Inside of the binary crate, you can specify: + +```toml +# Nargo.toml + +[dependencies] +lib_a = { path = "../lib_a" } +``` + +## Importing dependencies + +You can import a dependency to a Noir file using the following syntax. For example, to import the +ecrecover-noir library and local lib_a referenced above: + +```rust +use dep::ecrecover; +use dep::lib_a; +``` + +You can also import only the specific parts of dependency that you want to use, like so: + +```rust +use dep::std::hash::sha256; +use dep::std::scalar_mul::fixed_base_embedded_curve; +``` + +Lastly, as demonstrated in the +[elliptic curve example](../standard_library/cryptographic_primitives/ec_primitives#examples), you +can import multiple items in the same line by enclosing them in curly braces: + +```rust +use dep::std::ec::tecurve::affine::{Curve, Point}; +``` + +We don't have a way to consume libraries from inside a [workspace](./workspaces) as external dependencies right now. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +## Dependencies of Dependencies + +Note that when you import a dependency, you also get access to all of the dependencies of that package. + +For example, the [phy_vector](https://github.com/resurgencelabs/phy_vector) library imports an [fraction](https://github.com/resurgencelabs/fraction) library. If you're importing the phy_vector library, then you can access the functions in fractions library like so: + +```rust +use dep::phy_vector; + +fn main(x : Field, y : pub Field) { + //... + let f = phy_vector::fraction::toFraction(true, 2, 1); + //... +} +``` + +## Available Libraries + +Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). + +Some libraries that are available today include: + +- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/modules_packages_crates/modules.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/modules_packages_crates/modules.md new file mode 100644 index 000000000000..ae822a1cff4e --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/modules_packages_crates/modules.md @@ -0,0 +1,105 @@ +--- +title: Modules +description: + Learn how to organize your files using modules in Noir, following the same convention as Rust's + module system. Examples included. +keywords: [Noir, Rust, modules, organizing files, sub-modules] +sidebar_position: 2 +--- + +Noir's module system follows the same convention as the _newer_ version of Rust's module system. + +## Purpose of Modules + +Modules are used to organize files. Without modules all of your code would need to live in a single +file. In Noir, the compiler does not automatically scan all of your files to detect modules. This +must be done explicitly by the developer. + +## Examples + +### Importing a module in the crate root + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::hello_world(); +} +``` + +Filename : `src/foo.nr` + +```rust +fn from_foo() {} +``` + +In the above snippet, the crate root is the `src/main.nr` file. The compiler sees the module +declaration `mod foo` which prompts it to look for a foo.nr file. + +Visually this module hierarchy looks like the following : + +``` +crate + ├── main + │ + └── foo + └── from_foo + +``` + +### Importing a module throughout the tree + +All modules are accessible from the `crate::` namespace. + +``` +crate + ├── bar + ├── foo + └── main + +``` + +In the above snippet, if `bar` would like to use functions in `foo`, it can do so by `use crate::foo::function_name`. + +### Sub-modules + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::from_foo(); +} +``` + +Filename : `src/foo.nr` + +```rust +mod bar; +fn from_foo() {} +``` + +Filename : `src/foo/bar.nr` + +```rust +fn from_bar() {} +``` + +In the above snippet, we have added an extra module to the module tree; `bar`. `bar` is a submodule +of `foo` hence we declare bar in `foo.nr` with `mod bar`. Since `foo` is not the crate root, the +compiler looks for the file associated with the `bar` module in `src/foo/bar.nr` + +Visually the module hierarchy looks as follows: + +``` +crate + ├── main + │ + └── foo + ├── from_foo + └── bar + └── from_bar +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/modules_packages_crates/workspaces.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/modules_packages_crates/workspaces.md new file mode 100644 index 000000000000..513497f12bf7 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/modules_packages_crates/workspaces.md @@ -0,0 +1,42 @@ +--- +title: Workspaces +sidebar_position: 3 +--- + +Workspaces are a feature of nargo that allow you to manage multiple related Noir packages in a single repository. A workspace is essentially a group of related projects that share common build output directories and configurations. + +Each Noir project (with it's own Nargo.toml file) can be thought of as a package. Each package is expected to contain exactly one "named circuit", being the "name" defined in Nargo.toml with the program logic defined in `./src/main.nr`. + +For a project with the following structure: + +```tree +├── crates +│ ├── a +│ │ ├── Nargo.toml +│ │ └── Prover.toml +│ │ └── src +│ │ └── main.nr +│ └── b +│ ├── Nargo.toml +│ └── Prover.toml +│ └── src +│ └── main.nr +│ +└── Nargo.toml +``` + +You can define a workspace in Nargo.toml like so: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +`members` indicates which packages are included in the workspace. As such, all member packages of a workspace will be processed when the `--workspace` flag is used with various commands or if a `default-member` is not specified. + +`default-member` indicates which package various commands process by default. + +Libraries can be defined in a workspace. Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/_category_.json new file mode 100644 index 000000000000..af04c0933fdb --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Standard Library", + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/bigint.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/bigint.md new file mode 100644 index 000000000000..2bfdeec6631d --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/bigint.md @@ -0,0 +1,122 @@ +--- +title: Big Integers +description: How to use big integers from Noir standard library +keywords: + [ + Big Integer, + Noir programming language, + Noir libraries, + ] +--- + +The BigInt module in the standard library exposes some class of integers which do not fit (well) into a Noir native field. It implements modulo arithmetic, modulo a 'big' prime number. + +:::note + +The module can currently be considered as `Field`s with fixed modulo sizes used by a set of elliptic curves, in addition to just the native curve. [More work](https://github.com/noir-lang/noir/issues/510) is needed to achieve arbitrarily sized big integers. + +::: + +Currently 6 classes of integers (i.e 'big' prime numbers) are available in the module, namely: + +- BN254 Fq: Bn254Fq +- BN254 Fr: Bn254Fr +- Secp256k1 Fq: Secpk1Fq +- Secp256k1 Fr: Secpk1Fr +- Secp256r1 Fr: Secpr1Fr +- Secp256r1 Fq: Secpr1Fq + +Where XXX Fq and XXX Fr denote respectively the order of the base and scalar field of the (usual) elliptic curve XXX. +For instance the big integer 'Secpk1Fq' in the standard library refers to integers modulo $2^{256}-2^{32}-977$. + +Feel free to explore the source code for the other primes: + +```rust title="big_int_definition" showLineNumbers +struct BigInt { + pointer: u32, + modulus: u32, +} +``` +> Source code: noir_stdlib/src/bigint.nr#L14-L19 + + +## Example usage + +A common use-case is when constructing a big integer from its bytes representation, and performing arithmetic operations on it: + +```rust title="big_int_example" showLineNumbers +fn big_int_example(x: u8, y: u8) { + let a = Secpk1Fq::from_le_bytes(&[x, y, 0, 45, 2]); + let b = Secpk1Fq::from_le_bytes(&[y, x, 9]); + let c = (a + b) * b / a; + let d = c.to_le_bytes(); + println(d[0]); +} +``` +> Source code: test_programs/execution_success/bigint/src/main.nr#L70-L78 + + +## Methods + +The available operations for each big integer are: + +### from_le_bytes + +Construct a big integer from its little-endian bytes representation. Example: + +```rust + // Construct a big integer from a slice of bytes + let a = Secpk1Fq::from_le_bytes(&[x, y, 0, 45, 2]); + // Construct a big integer from an array of 32 bytes + let a = Secpk1Fq::from_le_bytes_32([1;32]); + ``` + +Sure, here's the formatted version of the remaining methods: + +### to_le_bytes + +Return the little-endian bytes representation of a big integer. Example: + +```rust +let bytes = a.to_le_bytes(); +``` + +### add + +Add two big integers. Example: + +```rust +let sum = a + b; +``` + +### sub + +Subtract two big integers. Example: + +```rust +let difference = a - b; +``` + +### mul + +Multiply two big integers. Example: + +```rust +let product = a * b; +``` + +### div + +Divide two big integers. Note that division is field division and not euclidean division. Example: + +```rust +let quotient = a / b; +``` + +### eq + +Compare two big integers. Example: + +```rust +let are_equal = a == b; +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/black_box_fns.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/black_box_fns.md new file mode 100644 index 000000000000..be8c65679c31 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/black_box_fns.md @@ -0,0 +1,31 @@ +--- +title: Black Box Functions +description: Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. +keywords: [noir, black box functions] +--- + +Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. This makes certain zk-snark unfriendly computations cheaper than if they were implemented in Noir. + +The ACVM spec defines a set of blackbox functions which backends will be expected to implement. This allows backends to use optimized implementations of these constraints if they have them, however they may also fallback to less efficient naive implementations if not. + +## Function list + +Here is a list of the current black box functions: + +- [SHA256](./cryptographic_primitives/hashes.mdx#sha256) +- [Schnorr signature verification](./cryptographic_primitives/schnorr.mdx) +- [Blake2s](./cryptographic_primitives/hashes.mdx#blake2s) +- [Blake3](./cryptographic_primitives/hashes.mdx#blake3) +- [Pedersen Hash](./cryptographic_primitives/hashes.mdx#pedersen_hash) +- [Pedersen Commitment](./cryptographic_primitives/hashes.mdx#pedersen_commitment) +- [ECDSA signature verification](./cryptographic_primitives/ecdsa_sig_verification.mdx) +- [Fixed base scalar multiplication](./cryptographic_primitives/scalar.mdx) +- AND +- XOR +- RANGE +- [Keccak256](./cryptographic_primitives/hashes.mdx#keccak256) +- [Recursive proof verification](./recursion) + +Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. + +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/bn254.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/bn254.md new file mode 100644 index 000000000000..3294f005dbb4 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/bn254.md @@ -0,0 +1,46 @@ +--- +title: Bn254 Field Library +--- + +Noir provides a module in standard library with some optimized functions for bn254 Fr in `std::field::bn254`. + +## decompose + +```rust +fn decompose(x: Field) -> (Field, Field) {} +``` + +Decomposes a single field into two fields, low and high. The low field contains the lower 16 bytes of the input field and the high field contains the upper 16 bytes of the input field. Both field results are range checked to 128 bits. + + +## assert_gt + +```rust +fn assert_gt(a: Field, b: Field) {} +``` + +Asserts that a > b. This will generate less constraints than using `assert(gt(a, b))`. + +## assert_lt + +```rust +fn assert_lt(a: Field, b: Field) {} +``` + +Asserts that a < b. This will generate less constraints than using `assert(lt(a, b))`. + +## gt + +```rust +fn gt(a: Field, b: Field) -> bool {} +``` + +Returns true if a > b. + +## lt + +```rust +fn lt(a: Field, b: Field) -> bool {} +``` + +Returns true if a < b. \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/containers/boundedvec.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/containers/boundedvec.md new file mode 100644 index 000000000000..ce4529f6e57e --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/containers/boundedvec.md @@ -0,0 +1,326 @@ +--- +title: Bounded Vectors +keywords: [noir, vector, bounded vector, slice] +sidebar_position: 1 +--- + +A `BoundedVec` is a growable storage similar to a `Vec` except that it +is bounded with a maximum possible length. Unlike `Vec`, `BoundedVec` is not implemented +via slices and thus is not subject to the same restrictions slices are (notably, nested +slices - and thus nested vectors as well - are disallowed). + +Since a BoundedVec is backed by a normal array under the hood, growing the BoundedVec by +pushing an additional element is also more efficient - the length only needs to be increased +by one. + +For these reasons `BoundedVec` should generally be preferred over `Vec` when there +is a reasonable maximum bound that can be placed on the vector. + +Example: + +```rust +let mut vector: BoundedVec = BoundedVec::new(); +for i in 0..5 { + vector.push(i); +} +assert(vector.len() == 5); +assert(vector.max_len() == 10); +``` + +## Methods + +### new + +```rust +pub fn new() -> Self +``` + +Creates a new, empty vector of length zero. + +Since this container is backed by an array internally, it still needs an initial value +to give each element. To resolve this, each element is zeroed internally. This value +is guaranteed to be inaccessible unless `get_unchecked` is used. + +Example: + +```rust +let empty_vector: BoundedVec = BoundedVec::new(); +assert(empty_vector.len() == 0); +``` + +Note that whenever calling `new` the maximum length of the vector should always be specified +via a type signature: + +```rust title="new_example" showLineNumbers +fn foo() -> BoundedVec { + // Ok! MaxLen is specified with a type annotation + let v1: BoundedVec = BoundedVec::new(); + let v2 = BoundedVec::new(); + + // Ok! MaxLen is known from the type of foo's return value + v2 +} + +fn bad() { + let mut v3 = BoundedVec::new(); + + // Not Ok! We don't know if v3's MaxLen is at least 1, and the compiler often infers 0 by default. + v3.push(5); +} +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L11-L27 + + +This defaulting of `MaxLen` (and numeric generics in general) to zero may change in future noir versions +but for now make sure to use type annotations when using bounded vectors. Otherwise, you will receive a constraint failure at runtime when the vec is pushed to. + +### get + +```rust +pub fn get(mut self: Self, index: u64) -> T { +``` + +Retrieves an element from the vector at the given index, starting from zero. + +If the given index is equal to or greater than the length of the vector, this +will issue a constraint failure. + +Example: + +```rust +fn foo(v: BoundedVec) { + let first = v.get(0); + let last = v.get(v.len() - 1); + assert(first != last); +} +``` + +### get_unchecked + +```rust +pub fn get_unchecked(mut self: Self, index: u64) -> T { +``` + +Retrieves an element from the vector at the given index, starting from zero, without +performing a bounds check. + +Since this function does not perform a bounds check on length before accessing the element, +it is unsafe! Use at your own risk! + +Example: + +```rust title="get_unchecked_example" showLineNumbers +fn sum_of_first_three(v: BoundedVec) -> u32 { + // Always ensure the length is larger than the largest + // index passed to get_unchecked + assert(v.len() > 2); + let first = v.get_unchecked(0); + let second = v.get_unchecked(1); + let third = v.get_unchecked(2); + first + second + third +} +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L54-L64 + + + +### push + +```rust +pub fn push(&mut self, elem: T) { +``` + +Pushes an element to the end of the vector. This increases the length +of the vector by one. + +Panics if the new length of the vector will be greater than the max length. + +Example: + +```rust title="bounded-vec-push-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + + v.push(1); + v.push(2); + + // Panics with failed assertion "push out of bounds" + v.push(3); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L68-L76 + + +### pop + +```rust +pub fn pop(&mut self) -> T +``` + +Pops the element at the end of the vector. This will decrease the length +of the vector by one. + +Panics if the vector is empty. + +Example: + +```rust title="bounded-vec-pop-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + v.push(1); + v.push(2); + + let two = v.pop(); + let one = v.pop(); + + assert(two == 2); + assert(one == 1); + // error: cannot pop from an empty vector + // let _ = v.pop(); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L81-L93 + + +### len + +```rust +pub fn len(self) -> u64 { +``` + +Returns the current length of this vector + +Example: + +```rust title="bounded-vec-len-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + assert(v.len() == 0); + + v.push(100); + assert(v.len() == 1); + + v.push(200); + v.push(300); + v.push(400); + assert(v.len() == 4); + + let _ = v.pop(); + let _ = v.pop(); + assert(v.len() == 2); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L98-L113 + + +### max_len + +```rust +pub fn max_len(_self: BoundedVec) -> u64 { +``` + +Returns the maximum length of this vector. This is always +equal to the `MaxLen` parameter this vector was initialized with. + +Example: + +```rust title="bounded-vec-max-len-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + + assert(v.max_len() == 5); + v.push(10); + assert(v.max_len() == 5); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L118-L124 + + +### storage + +```rust +pub fn storage(self) -> [T; MaxLen] { +``` + +Returns the internal array within this vector. +Since arrays in Noir are immutable, mutating the returned storage array will not mutate +the storage held internally by this vector. + +Note that uninitialized elements may be zeroed out! + +Example: + +```rust title="bounded-vec-storage-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + + assert(v.storage() == [0, 0, 0, 0, 0]); + + v.push(57); + assert(v.storage() == [57, 0, 0, 0, 0]); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L129-L136 + + +### extend_from_array + +```rust +pub fn extend_from_array(&mut self, array: [T; Len]) +``` + +Pushes each element from the given array to this vector. + +Panics if pushing each element would cause the length of this vector +to exceed the maximum length. + +Example: + +```rust title="bounded-vec-extend-from-array-example" showLineNumbers +let mut vec: BoundedVec = BoundedVec::new(); + vec.extend_from_array([2, 4]); + + assert(vec.len == 2); + assert(vec.get(0) == 2); + assert(vec.get(1) == 4); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L141-L148 + + +### extend_from_bounded_vec + +```rust +pub fn extend_from_bounded_vec(&mut self, vec: BoundedVec) +``` + +Pushes each element from the other vector to this vector. The length of +the other vector is left unchanged. + +Panics if pushing each element would cause the length of this vector +to exceed the maximum length. + +Example: + +```rust title="bounded-vec-extend-from-bounded-vec-example" showLineNumbers +let mut v1: BoundedVec = BoundedVec::new(); + let mut v2: BoundedVec = BoundedVec::new(); + + v2.extend_from_array([1, 2, 3]); + v1.extend_from_bounded_vec(v2); + + assert(v1.storage() == [1, 2, 3, 0, 0]); + assert(v2.storage() == [1, 2, 3, 0, 0, 0, 0]); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L153-L162 + + +### any + +```rust +pub fn any(self, predicate: fn[Env](T) -> bool) -> bool +``` + +Returns true if the given predicate returns true for any element +in this vector. + +Example: + +```rust title="bounded-vec-any-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + v.extend_from_array([2, 4, 6]); + + let all_even = !v.any(|elem: u32| elem % 2 != 0); + assert(all_even); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L229-L235 + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/containers/hashmap.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/containers/hashmap.md new file mode 100644 index 000000000000..47faa99aba69 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/containers/hashmap.md @@ -0,0 +1,570 @@ +--- +title: HashMap +keywords: [noir, map, hash, hashmap] +sidebar_position: 1 +--- + +`HashMap` is used to efficiently store and look up key-value pairs. + +`HashMap` is a bounded type which can store anywhere from zero to `MaxLen` total elements. +Note that due to hash collisions, the actual maximum number of elements stored by any particular +hashmap is likely lower than `MaxLen`. This is true even with cryptographic hash functions since +every hash value will be performed modulo `MaxLen`. + +When creating `HashMap`s, the `MaxLen` generic should always be specified if it is not already +known. Otherwise, the compiler may infer a different value for `MaxLen` (such as zero), which +will likely change the result of the program. This behavior is set to become an error in future +versions instead. + +Example: + +```rust +// Create a mapping from Fields to u32s with a maximum length of 12 +// using a poseidon2 hasher +use dep::std::hash::poseidon2::Poseidon2Hasher; +let mut map: HashMap> = HashMap::default(); + +map.insert(1, 2); +map.insert(3, 4); + +let two = map.get(1).unwrap(); +``` + +## Methods + +### default + +```rust title="default" showLineNumbers +impl Default for HashMap +where + B: BuildHasher + Default, + H: Hasher + Default +{ + fn default() -> Self { +``` +> Source code: noir_stdlib/src/collections/map.nr#L462-L469 + + +Creates a fresh, empty HashMap. + +When using this function, always make sure to specify the maximum size of the hash map. + +This is the same `default` from the `Default` implementation given further below. It is +repeated here for convenience since it is the recommended way to create a hashmap. + +Example: + +```rust title="default_example" showLineNumbers +let hashmap: HashMap> = HashMap::default(); + assert(hashmap.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L202-L205 + + +Because `HashMap` has so many generic arguments that are likely to be the same throughout +your program, it may be helpful to create a type alias: + +```rust title="type_alias" showLineNumbers +type MyMap = HashMap>; +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L196-L198 + + +### with_hasher + +```rust title="with_hasher" showLineNumbers +pub fn with_hasher(_build_hasher: B) -> Self + where + B: BuildHasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L82-L86 + + +Creates a hashmap with an existing `BuildHasher`. This can be used to ensure multiple +hashmaps are created with the same hasher instance. + +Example: + +```rust title="with_hasher_example" showLineNumbers +let my_hasher: BuildHasherDefault = Default::default(); + let hashmap: HashMap> = HashMap::with_hasher(my_hasher); + assert(hashmap.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L207-L211 + + +### get + +```rust title="get" showLineNumbers +pub fn get( + self, + key: K + ) -> Option + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L278-L287 + + +Retrieves a value from the hashmap, returning `Option::none()` if it was not found. + +Example: + +```rust title="get_example" showLineNumbers +fn get_example(map: HashMap>) { + let x = map.get(12); + + if x.is_some() { + assert(x.unwrap() == 42); + } +} +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L299-L307 + + +### insert + +```rust title="insert" showLineNumbers +pub fn insert( + &mut self, + key: K, + value: V + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L313-L323 + + +Inserts a new key-value pair into the map. If the key was already in the map, its +previous value will be overridden with the newly provided one. + +Example: + +```rust title="insert_example" showLineNumbers +let mut map: HashMap> = HashMap::default(); + map.insert(12, 42); + assert(map.len() == 1); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L213-L217 + + +### remove + +```rust title="remove" showLineNumbers +pub fn remove( + &mut self, + key: K + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L356-L365 + + +Removes the given key-value pair from the map. If the key was not already present +in the map, this does nothing. + +Example: + +```rust title="remove_example" showLineNumbers +map.remove(12); + assert(map.is_empty()); + + // If a key was not present in the map, remove does nothing + map.remove(12); + assert(map.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L221-L228 + + +### is_empty + +```rust title="is_empty" showLineNumbers +pub fn is_empty(self) -> bool { +``` +> Source code: noir_stdlib/src/collections/map.nr#L115-L117 + + +True if the length of the hash map is empty. + +Example: + +```rust title="is_empty_example" showLineNumbers +assert(map.is_empty()); + + map.insert(1, 2); + assert(!map.is_empty()); + + map.remove(1); + assert(map.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L230-L238 + + +### len + +```rust title="len" showLineNumbers +pub fn len(self) -> u64 { +``` +> Source code: noir_stdlib/src/collections/map.nr#L264-L266 + + +Returns the current length of this hash map. + +Example: + +```rust title="len_example" showLineNumbers +// This is equivalent to checking map.is_empty() + assert(map.len() == 0); + + map.insert(1, 2); + map.insert(3, 4); + map.insert(5, 6); + assert(map.len() == 3); + + // 3 was already present as a key in the hash map, so the length is unchanged + map.insert(3, 7); + assert(map.len() == 3); + + map.remove(1); + assert(map.len() == 2); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L240-L255 + + +### capacity + +```rust title="capacity" showLineNumbers +pub fn capacity(_self: Self) -> u64 { +``` +> Source code: noir_stdlib/src/collections/map.nr#L271-L273 + + +Returns the maximum capacity of this hashmap. This is always equal to the capacity +specified in the hashmap's type. + +Unlike hashmaps in general purpose programming languages, hashmaps in Noir have a +static capacity that does not increase as the map grows larger. Thus, this capacity +is also the maximum possible element count that can be inserted into the hashmap. +Due to hash collisions (modulo the hashmap length), it is likely the actual maximum +element count will be lower than the full capacity. + +Example: + +```rust title="capacity_example" showLineNumbers +let empty_map: HashMap> = HashMap::default(); + assert(empty_map.len() == 0); + assert(empty_map.capacity() == 42); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L257-L261 + + +### clear + +```rust title="clear" showLineNumbers +pub fn clear(&mut self) { +``` +> Source code: noir_stdlib/src/collections/map.nr#L93-L95 + + +Clears the hashmap, removing all key-value pairs from it. + +Example: + +```rust title="clear_example" showLineNumbers +assert(!map.is_empty()); + map.clear(); + assert(map.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L263-L267 + + +### contains_key + +```rust title="contains_key" showLineNumbers +pub fn contains_key( + self, + key: K + ) -> bool + where + K: Hash + Eq, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L101-L110 + + +True if the hashmap contains the given key. Unlike `get`, this will not also return +the value associated with the key. + +Example: + +```rust title="contains_key_example" showLineNumbers +if map.contains_key(7) { + let value = map.get(7); + assert(value.is_some()); + } else { + println("No value for key 7!"); + } +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L269-L276 + + +### entries + +```rust title="entries" showLineNumbers +pub fn entries(self) -> BoundedVec<(K, V), N> { +``` +> Source code: noir_stdlib/src/collections/map.nr#L123-L125 + + +Returns a vector of each key-value pair present in the hashmap. + +The length of the returned vector is always equal to the length of the hashmap. + +Example: + +```rust title="entries_example" showLineNumbers +let entries = map.entries(); + + // The length of a hashmap may not be compile-time known, so we + // need to loop over its capacity instead + for i in 0..map.capacity() { + if i < entries.len() { + let (key, value) = entries.get(i); + println(f"{key} -> {value}"); + } + } +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L310-L321 + + +### keys + +```rust title="keys" showLineNumbers +pub fn keys(self) -> BoundedVec { +``` +> Source code: noir_stdlib/src/collections/map.nr#L144-L146 + + +Returns a vector of each key present in the hashmap. + +The length of the returned vector is always equal to the length of the hashmap. + +Example: + +```rust title="keys_example" showLineNumbers +let keys = map.keys(); + + for i in 0..keys.max_len() { + if i < keys.len() { + let key = keys.get_unchecked(i); + let value = map.get(key).unwrap_unchecked(); + println(f"{key} -> {value}"); + } + } +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L323-L333 + + +### values + +```rust title="values" showLineNumbers +pub fn values(self) -> BoundedVec { +``` +> Source code: noir_stdlib/src/collections/map.nr#L164-L166 + + +Returns a vector of each value present in the hashmap. + +The length of the returned vector is always equal to the length of the hashmap. + +Example: + +```rust title="values_example" showLineNumbers +let values = map.values(); + + for i in 0..values.max_len() { + if i < values.len() { + let value = values.get_unchecked(i); + println(f"Found value {value}"); + } + } +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L335-L344 + + +### iter_mut + +```rust title="iter_mut" showLineNumbers +pub fn iter_mut( + &mut self, + f: fn(K, V) -> (K, V) + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L183-L192 + + +Iterates through each key-value pair of the HashMap, setting each key-value pair to the +result returned from the given function. + +Note that since keys can be mutated, the HashMap needs to be rebuilt as it is iterated +through. If this is not desired, use `iter_values_mut` if only values need to be mutated, +or `entries` if neither keys nor values need to be mutated. + +The iteration order is left unspecified. As a result, if two keys are mutated to become +equal, which of the two values that will be present for the key in the resulting map is also unspecified. + +Example: + +```rust title="iter_mut_example" showLineNumbers +// Add 1 to each key in the map, and double the value associated with that key. + map.iter_mut(|k, v| (k + 1, v * 2)); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L348-L351 + + +### iter_keys_mut + +```rust title="iter_keys_mut" showLineNumbers +pub fn iter_keys_mut( + &mut self, + f: fn(K) -> K + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L208-L217 + + +Iterates through the HashMap, mutating each key to the result returned from +the given function. + +Note that since keys can be mutated, the HashMap needs to be rebuilt as it is iterated +through. If only iteration is desired and the keys are not intended to be mutated, +prefer using `entries` instead. + +The iteration order is left unspecified. As a result, if two keys are mutated to become +equal, which of the two values that will be present for the key in the resulting map is also unspecified. + +Example: + +```rust title="iter_keys_mut_example" showLineNumbers +// Double each key, leaving the value associated with that key untouched + map.iter_keys_mut(|k| k * 2); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L353-L356 + + +### iter_values_mut + +```rust title="iter_values_mut" showLineNumbers +pub fn iter_values_mut(&mut self, f: fn(V) -> V) { +``` +> Source code: noir_stdlib/src/collections/map.nr#L233-L235 + + +Iterates through the HashMap, applying the given function to each value and mutating the +value to equal the result. This function is more efficient than `iter_mut` and `iter_keys_mut` +because the keys are untouched and the underlying hashmap thus does not need to be reordered. + +Example: + +```rust title="iter_values_mut_example" showLineNumbers +// Halve each value + map.iter_values_mut(|v| v / 2); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L358-L361 + + +### retain + +```rust title="retain" showLineNumbers +pub fn retain(&mut self, f: fn(K, V) -> bool) { +``` +> Source code: noir_stdlib/src/collections/map.nr#L247-L249 + + +Retains only the key-value pairs for which the given function returns true. +Any key-value pairs for which the function returns false will be removed from the map. + +Example: + +```rust title="retain_example" showLineNumbers +map.retain(|k, v| (k != 0) & (v != 0)); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L281-L283 + + +## Trait Implementations + +### default + +```rust title="default" showLineNumbers +impl Default for HashMap +where + B: BuildHasher + Default, + H: Hasher + Default +{ + fn default() -> Self { +``` +> Source code: noir_stdlib/src/collections/map.nr#L462-L469 + + +Constructs an empty HashMap. + +Example: + +```rust title="default_example" showLineNumbers +let hashmap: HashMap> = HashMap::default(); + assert(hashmap.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L202-L205 + + +### eq + +```rust title="eq" showLineNumbers +impl Eq for HashMap +where + K: Eq + Hash, + V: Eq, + B: BuildHasher, + H: Hasher +{ + fn eq(self, other: HashMap) -> bool { +``` +> Source code: noir_stdlib/src/collections/map.nr#L426-L435 + + +Checks if two HashMaps are equal. + +Example: + +```rust title="eq_example" showLineNumbers +let mut map1: HashMap> = HashMap::default(); + let mut map2: HashMap> = HashMap::default(); + + map1.insert(1, 2); + map1.insert(3, 4); + + map2.insert(3, 4); + map2.insert(1, 2); + + assert(map1 == map2); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L285-L296 + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/containers/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/containers/index.md new file mode 100644 index 000000000000..ea84c6d5c21e --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/containers/index.md @@ -0,0 +1,5 @@ +--- +title: Containers +description: Container types provided by Noir's standard library for storing and retrieving data +keywords: [containers, data types, vec, hashmap] +--- diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/containers/vec.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/containers/vec.mdx new file mode 100644 index 000000000000..fcfd7e07aa00 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/containers/vec.mdx @@ -0,0 +1,151 @@ +--- +title: Vectors +description: Delve into the Vec data type in Noir. Learn about its methods, practical examples, and best practices for using Vectors in your Noir code. +keywords: [noir, vector type, methods, examples, dynamic arrays] +sidebar_position: 6 +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A vector is a collection type similar to Rust's `Vec` type. In Noir, it is a convenient way to use slices as mutable arrays. + +Example: + +```rust +let mut vector: Vec = Vec::new(); +for i in 0..5 { + vector.push(i); +} +assert(vector.len() == 5); +``` + +## Methods + +### new + +Creates a new, empty vector. + +```rust +pub fn new() -> Self +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` + +### from_slice + +Creates a vector containing each element from a given slice. Mutations to the resulting vector will not affect the original slice. + +```rust +pub fn from_slice(slice: [T]) -> Self +``` + +Example: + +```rust +let slice: [Field] = &[1, 2, 3]; +let vector_from_slice = Vec::from_slice(slice); +assert(vector_from_slice.len() == 3); +``` + +### len + +Returns the number of elements in the vector. + +```rust +pub fn len(self) -> Field +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` + +### get + +Retrieves an element from the vector at a given index. Panics if the index points beyond the vector's end. + +```rust +pub fn get(self, index: Field) -> T +``` + +Example: + +```rust +let vector: Vec = Vec::from_slice(&[10, 20, 30]); +assert(vector.get(1) == 20); +``` + +### push + +Adds a new element to the vector's end, returning a new vector with a length one greater than the original unmodified vector. + +```rust +pub fn push(&mut self, elem: T) +``` + +Example: + +```rust +let mut vector: Vec = Vec::new(); +vector.push(10); +assert(vector.len() == 1); +``` + +### pop + +Removes an element from the vector's end, returning a new vector with a length one less than the original vector, along with the removed element. Panics if the vector's length is zero. + +```rust +pub fn pop(&mut self) -> T +``` + +Example: + +```rust +let mut vector = Vec::from_slice(&[10, 20]); +let popped_elem = vector.pop(); +assert(popped_elem == 20); +assert(vector.len() == 1); +``` + +### insert + +Inserts an element at a specified index, shifting subsequent elements to the right. + +```rust +pub fn insert(&mut self, index: Field, elem: T) +``` + +Example: + +```rust +let mut vector = Vec::from_slice(&[10, 30]); +vector.insert(1, 20); +assert(vector.get(1) == 20); +``` + +### remove + +Removes an element at a specified index, shifting subsequent elements to the left, and returns the removed element. + +```rust +pub fn remove(&mut self, index: Field) -> T +``` + +Example: + +```rust +let mut vector = Vec::from_slice(&[10, 20, 30]); +let removed_elem = vector.remove(1); +assert(removed_elem == 20); +assert(vector.len() == 2); +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/_category_.json new file mode 100644 index 000000000000..5d694210bbf3 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/ec_primitives.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/ec_primitives.md new file mode 100644 index 000000000000..d2b42d67b7cb --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/ec_primitives.md @@ -0,0 +1,102 @@ +--- +title: Elliptic Curve Primitives +keywords: [cryptographic primitives, Noir project] +sidebar_position: 4 +--- + +Data structures and methods on them that allow you to carry out computations involving elliptic +curves over the (mathematical) field corresponding to `Field`. For the field currently at our +disposal, applications would involve a curve embedded in BN254, e.g. the +[Baby Jubjub curve](https://eips.ethereum.org/EIPS/eip-2494). + +## Data structures + +### Elliptic curve configurations + +(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Curve`), i.e. the specific elliptic +curve you want to use, which would be specified using any one of the methods +`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::new` which take the coefficients in the +defining equation together with a generator point as parameters. You can find more detail in the +comments in +[`noir_stdlib/src/ec.nr`](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr), but +the gist of it is that the elliptic curves of interest are usually expressed in one of the standard +forms implemented here (Twisted Edwards, Montgomery and Short Weierstraß), and in addition to that, +you could choose to use `affine` coordinates (Cartesian coordinates - the usual (x,y) - possibly +together with a point at infinity) or `curvegroup` coordinates (some form of projective coordinates +requiring more coordinates but allowing for more efficient implementations of elliptic curve +operations). Conversions between all of these forms are provided, and under the hood these +conversions are done whenever an operation is more efficient in a different representation (or a +mixed coordinate representation is employed). + +### Points + +(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Point`), i.e. points lying on the +elliptic curve. For a curve configuration `c` and a point `p`, it may be checked that `p` +does indeed lie on `c` by calling `c.contains(p1)`. + +## Methods + +(given a choice of curve representation, e.g. use `std::ec::tecurve::affine::Curve` and use +`std::ec::tecurve::affine::Point`) + +- The **zero element** is given by `Point::zero()`, and we can verify whether a point `p: Point` is + zero by calling `p.is_zero()`. +- **Equality**: Points `p1: Point` and `p2: Point` may be checked for equality by calling + `p1.eq(p2)`. +- **Addition**: For `c: Curve` and points `p1: Point` and `p2: Point` on the curve, adding these two + points is accomplished by calling `c.add(p1,p2)`. +- **Negation**: For a point `p: Point`, `p.negate()` is its negation. +- **Subtraction**: For `c` and `p1`, `p2` as above, subtracting `p2` from `p1` is accomplished by + calling `c.subtract(p1,p2)`. +- **Scalar multiplication**: For `c` as above, `p: Point` a point on the curve and `n: Field`, + scalar multiplication is given by `c.mul(n,p)`. If instead `n :: [u1; N]`, i.e. `n` is a bit + array, the `bit_mul` method may be used instead: `c.bit_mul(n,p)` +- **Multi-scalar multiplication**: For `c` as above and arrays `n: [Field; N]` and `p: [Point; N]`, + multi-scalar multiplication is given by `c.msm(n,p)`. +- **Coordinate representation conversions**: The `into_group` method converts a point or curve + configuration in the affine representation to one in the CurveGroup representation, and + `into_affine` goes in the other direction. +- **Curve representation conversions**: `tecurve` and `montcurve` curves and points are equivalent + and may be converted between one another by calling `into_montcurve` or `into_tecurve` on their + configurations or points. `swcurve` is more general and a curve c of one of the other two types + may be converted to this representation by calling `c.into_swcurve()`, whereas a point `p` lying + on the curve given by `c` may be mapped to its corresponding `swcurve` point by calling + `c.map_into_swcurve(p)`. +- **Map-to-curve methods**: The Elligator 2 method of mapping a field element `n: Field` into a + `tecurve` or `montcurve` with configuration `c` may be called as `c.elligator2_map(n)`. For all of + the curve configurations, the SWU map-to-curve method may be called as `c.swu_map(z,n)`, where + `z: Field` depends on `Field` and `c` and must be chosen by the user (the conditions it needs to + satisfy are specified in the comments + [here](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr)). + +## Examples + +The +[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/test_programs/compile_success_empty/ec_baby_jubjub/src/main.nr) +illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more +interesting examples in Noir would be: + +Public-key cryptography: Given an elliptic curve and a 'base point' on it, determine the public key +from the private key. This is a matter of using scalar multiplication. In the case of Baby Jubjub, +for example, this code would do: + +```rust +use dep::std::ec::tecurve::affine::{Curve, Point}; + +fn bjj_pub_key(priv_key: Field) -> Point +{ + + let bjj = Curve::new(168700, 168696, G::new(995203441582195749578291179787384436505546430278305826713579947235728471134,5472060717959818805561601436314318772137091100104008585924551046643952123905)); + + let base_pt = Point::new(5299619240641551281634865583518297030282874472190772894086521144482721001553, 16950150798460657717958625567821834550301663161624707787222815936182638968203); + + bjj.mul(priv_key,base_pt) +} +``` + +This would come in handy in a Merkle proof. + +- EdDSA signature verification: This is a matter of combining these primitives with a suitable hash + function. See + [feat(stdlib): EdDSA sig verification noir#1136](https://github.com/noir-lang/noir/pull/1136) for + the case of Baby Jubjub and the Poseidon hash function. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx new file mode 100644 index 000000000000..4394b48f9073 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx @@ -0,0 +1,98 @@ +--- +title: ECDSA Signature Verification +description: Learn about the cryptographic primitives regarding ECDSA over the secp256k1 and secp256r1 curves +keywords: [cryptographic primitives, Noir project, ecdsa, secp256k1, secp256r1, signatures] +sidebar_position: 3 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +Noir supports ECDSA signatures verification over the secp256k1 and secp256r1 curves. + +## ecdsa_secp256k1::verify_signature + +Verifier for ECDSA Secp256k1 signatures. +See ecdsa_secp256k1::verify_signature_slice for a version that accepts slices directly. + +```rust title="ecdsa_secp256k1" showLineNumbers +pub fn verify_signature( + public_key_x: [u8; 32], + public_key_y: [u8; 32], + signature: [u8; 64], + message_hash: [u8; N] +) -> bool +``` +> Source code: noir_stdlib/src/ecdsa_secp256k1.nr#L2-L9 + + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + + + +## ecdsa_secp256k1::verify_signature_slice + +Verifier for ECDSA Secp256k1 signatures where the message is a slice. + +```rust title="ecdsa_secp256k1_slice" showLineNumbers +pub fn verify_signature_slice( + public_key_x: [u8; 32], + public_key_y: [u8; 32], + signature: [u8; 64], + message_hash: [u8] +) -> bool +``` +> Source code: noir_stdlib/src/ecdsa_secp256k1.nr#L13-L20 + + + + +## ecdsa_secp256r1::verify_signature + +Verifier for ECDSA Secp256r1 signatures. +See ecdsa_secp256r1::verify_signature_slice for a version that accepts slices directly. + +```rust title="ecdsa_secp256r1" showLineNumbers +pub fn verify_signature( + public_key_x: [u8; 32], + public_key_y: [u8; 32], + signature: [u8; 64], + message_hash: [u8; N] +) -> bool +``` +> Source code: noir_stdlib/src/ecdsa_secp256r1.nr#L2-L9 + + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + + + +## ecdsa_secp256r1::verify_signature + +Verifier for ECDSA Secp256r1 signatures where the message is a slice. + +```rust title="ecdsa_secp256r1_slice" showLineNumbers +pub fn verify_signature_slice( + public_key_x: [u8; 32], + public_key_y: [u8; 32], + signature: [u8; 64], + message_hash: [u8] +) -> bool +``` +> Source code: noir_stdlib/src/ecdsa_secp256r1.nr#L13-L20 + + + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/eddsa.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/eddsa.mdx new file mode 100644 index 000000000000..c2c0624dfadb --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/eddsa.mdx @@ -0,0 +1,37 @@ +--- +title: EdDSA Verification +description: Learn about the cryptographic primitives regarding EdDSA +keywords: [cryptographic primitives, Noir project, eddsa, signatures] +sidebar_position: 5 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## eddsa::eddsa_poseidon_verify + +Verifier for EdDSA signatures + +```rust +fn eddsa_poseidon_verify(public_key_x : Field, public_key_y : Field, signature_s: Field, signature_r8_x: Field, signature_r8_y: Field, message: Field) -> bool +``` + +It is also possible to specify the hash algorithm used for the signature by using the `eddsa_verify_with_hasher` function with a parameter implementing the Hasher trait. For instance, if you want to use Poseidon2 instead, you can do the following: +```rust +use dep::std::hash::poseidon2::Poseidon2Hasher; + +let mut hasher = Poseidon2Hasher::default(); +eddsa_verify_with_hasher(pub_key_a.x, pub_key_a.y, s_a, r8_a.x, r8_a.y, msg, &mut hasher); +``` + + + +## eddsa::eddsa_to_pub + +Private to public key conversion. + +Returns `(pub_key_x, pub_key_y)` + +```rust +fn eddsa_to_pub(secret : Field) -> (Field, Field) +``` + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/hashes.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/hashes.mdx new file mode 100644 index 000000000000..3b83d9ec31a3 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/hashes.mdx @@ -0,0 +1,257 @@ +--- +title: Hash methods +description: + Learn about the cryptographic primitives ready to use for any Noir project, including sha256, + blake2s, pedersen, mimc_bn254 and mimc +keywords: + [cryptographic primitives, Noir project, sha256, blake2s, pedersen, mimc_bn254, mimc, hash] +sidebar_position: 0 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## sha256 + +Given an array of bytes, returns the resulting sha256 hash. +Specify a message_size to hash only the first `message_size` bytes of the input. + +```rust title="sha256" showLineNumbers +pub fn sha256(input: [u8; N]) -> [u8; 32] +``` +> Source code: noir_stdlib/src/hash.nr#L10-L12 + + +example: +```rust title="sha256_var" showLineNumbers +let digest = std::hash::sha256_var([x as u8], 1); +``` +> Source code: test_programs/execution_success/sha256/src/main.nr#L17-L19 + + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::sha256::sha256_var(x, 4); +} +``` + + + + +## blake2s + +Given an array of bytes, returns an array with the Blake2 hash + +```rust title="blake2s" showLineNumbers +pub fn blake2s(input: [u8; N]) -> [u8; 32] +``` +> Source code: noir_stdlib/src/hash.nr#L16-L18 + + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::blake2s(x); +} +``` + + + +## blake3 + +Given an array of bytes, returns an array with the Blake3 hash + +```rust title="blake3" showLineNumbers +pub fn blake3(input: [u8; N]) -> [u8; 32] +``` +> Source code: noir_stdlib/src/hash.nr#L22-L24 + + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::blake3(x); +} +``` + + + +## pedersen_hash + +Given an array of Fields, returns the Pedersen hash. + +```rust title="pedersen_hash" showLineNumbers +pub fn pedersen_hash(input: [Field; N]) -> Field +``` +> Source code: noir_stdlib/src/hash.nr#L46-L48 + + +example: + +```rust title="pedersen-hash" showLineNumbers +use dep::std; + +fn main(x: Field, y: Field, expected_hash: Field) { + let hash = std::hash::pedersen_hash([x, y]); + assert_eq(hash, expected_hash); +} +``` +> Source code: test_programs/execution_success/pedersen_hash/src/main.nr#L1-L8 + + + + +## pedersen_commitment + +Given an array of Fields, returns the Pedersen commitment. + +```rust title="pedersen_commitment" showLineNumbers +struct PedersenPoint { + x : Field, + y : Field, +} + +pub fn pedersen_commitment(input: [Field; N]) -> PedersenPoint { +``` +> Source code: noir_stdlib/src/hash.nr#L27-L34 + + +example: + +```rust title="pedersen-commitment" showLineNumbers +use dep::std; + +fn main(x: Field, y: Field, expected_commitment: std::hash::PedersenPoint) { + let commitment = std::hash::pedersen_commitment([x, y]); + assert_eq(commitment.x, expected_commitment.x); + assert_eq(commitment.y, expected_commitment.y); +} +``` +> Source code: test_programs/execution_success/pedersen_commitment/src/main.nr#L1-L9 + + + + +## keccak256 + +Given an array of bytes (`u8`), returns the resulting keccak hash as an array of +32 bytes (`[u8; 32]`). Specify a message_size to hash only the first +`message_size` bytes of the input. + +```rust title="keccak256" showLineNumbers +pub fn keccak256(input: [u8; N], message_size: u32) -> [u8; 32] +``` +> Source code: noir_stdlib/src/hash.nr#L68-L70 + + +example: + +```rust title="keccak256" showLineNumbers +use dep::std; + +fn main(x: Field, result: [u8; 32]) { + // We use the `as` keyword here to denote the fact that we want to take just the first byte from the x Field + // The padding is taken care of by the program + let digest = std::hash::keccak256([x as u8], 1); + assert(digest == result); + + //#1399: variable message size + let message_size = 4; + let hash_a = std::hash::keccak256([1, 2, 3, 4], message_size); + let hash_b = std::hash::keccak256([1, 2, 3, 4, 0, 0, 0, 0], message_size); + + assert(hash_a == hash_b); + + let message_size_big = 8; + let hash_c = std::hash::keccak256([1, 2, 3, 4, 0, 0, 0, 0], message_size_big); + + assert(hash_a != hash_c); +} +``` +> Source code: test_programs/execution_success/keccak256/src/main.nr#L1-L22 + + + + +## poseidon + +Given an array of Fields, returns a new Field with the Poseidon Hash. Mind that you need to specify +how many inputs are there to your Poseidon function. + +```rust +// example for hash_1, hash_2 accepts an array of length 2, etc +fn hash_1(input: [Field; 1]) -> Field +``` + +example: + +```rust title="poseidon" showLineNumbers +use dep::std::hash::poseidon; +use dep::std::hash::poseidon2; + +fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field, x3: [Field; 4], y3: Field) { + let hash1 = poseidon::bn254::hash_2(x1); + assert(hash1 == y1); + + let hash2 = poseidon::bn254::hash_4(x2); + assert(hash2 == y2); + + let hash3 = poseidon2::Poseidon2::hash(x3, x3.len()); + assert(hash3 == y3); +} +``` +> Source code: test_programs/execution_success/poseidon_bn254_hash/src/main.nr#L1-L15 + + +## poseidon 2 + +Given an array of Fields, returns a new Field with the Poseidon2 Hash. Contrary to the Poseidon +function, there is only one hash and you can specify a message_size to hash only the first +`message_size` bytes of the input, + +```rust +// example for hashing the first three elements of the input +Poseidon2::hash(input, 3); +``` + +The above example for Poseidon also includes Poseidon2. + +## mimc_bn254 and mimc + +`mimc_bn254` is `mimc`, but with hardcoded parameters for the BN254 curve. You can use it by +providing an array of Fields, and it returns a Field with the hash. You can use the `mimc` method if +you're willing to input your own constants: + +```rust +fn mimc(x: Field, k: Field, constants: [Field; N], exp : Field) -> Field +``` + +otherwise, use the `mimc_bn254` method: + +```rust +fn mimc_bn254(array: [Field; N]) -> Field +``` + +example: + +```rust + +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::mimc::mimc_bn254(x); +} +``` + +## hash_to_field + +```rust +fn hash_to_field(_input : [Field]) -> Field {} +``` + +Calculates the `blake2s` hash of the inputs and returns the hash modulo the field modulus to return +a value which can be represented as a `Field`. + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/index.md new file mode 100644 index 000000000000..650f30165d56 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/index.md @@ -0,0 +1,14 @@ +--- +title: Cryptographic Primitives +description: + Learn about the cryptographic primitives ready to use for any Noir project +keywords: + [ + cryptographic primitives, + Noir project, + ] +--- + +The Noir team is progressively adding new cryptographic primitives to the standard library. Reach out for news or if you would be interested in adding more of these calculations in Noir. + +Some methods are available thanks to the Aztec backend, not being performed using Noir. When using other backends, these methods may or may not be supplied. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/scalar.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/scalar.mdx new file mode 100644 index 000000000000..df411ca54433 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/scalar.mdx @@ -0,0 +1,33 @@ +--- +title: Scalar multiplication +description: See how you can perform scalar multiplications over a fixed base in Noir +keywords: [cryptographic primitives, Noir project, scalar multiplication] +sidebar_position: 1 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## scalar_mul::fixed_base_embedded_curve + +Performs scalar multiplication over the embedded curve whose coordinates are defined by the +configured noir field. For the BN254 scalar field, this is BabyJubJub or Grumpkin. + +```rust title="fixed_base_embedded_curve" showLineNumbers +pub fn fixed_base_embedded_curve( + low: Field, + high: Field +) -> [Field; 2] +``` +> Source code: noir_stdlib/src/scalar_mul.nr#L27-L32 + + +example + +```rust +fn main(x : Field) { + let scal = std::scalar_mul::fixed_base_embedded_curve(x); + println(scal); +} +``` + + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/schnorr.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/schnorr.mdx new file mode 100644 index 000000000000..b59e69c8f07f --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/cryptographic_primitives/schnorr.mdx @@ -0,0 +1,64 @@ +--- +title: Schnorr Signatures +description: Learn how you can verify Schnorr signatures using Noir +keywords: [cryptographic primitives, Noir project, schnorr, signatures] +sidebar_position: 2 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## schnorr::verify_signature + +Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin). +See schnorr::verify_signature_slice for a version that works directly on slices. + +```rust title="schnorr_verify" showLineNumbers +pub fn verify_signature( + public_key_x: Field, + public_key_y: Field, + signature: [u8; 64], + message: [u8; N] +) -> bool +``` +> Source code: noir_stdlib/src/schnorr.nr#L2-L9 + + +where `_signature` can be generated like so using the npm package +[@noir-lang/barretenberg](https://www.npmjs.com/package/@noir-lang/barretenberg) + +```js +const { BarretenbergWasm } = require('@noir-lang/barretenberg/dest/wasm'); +const { Schnorr } = require('@noir-lang/barretenberg/dest/crypto/schnorr'); + +... + +const barretenberg = await BarretenbergWasm.new(); +const schnorr = new Schnorr(barretenberg); +const pubKey = schnorr.computePublicKey(privateKey); +const message = ... +const signature = Array.from( + schnorr.constructSignature(hash, privateKey).toBuffer() +); + +... +``` + + + +## schnorr::verify_signature_slice + +Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin) +where the message is a slice. + +```rust title="schnorr_verify_slice" showLineNumbers +pub fn verify_signature_slice( + public_key_x: Field, + public_key_y: Field, + signature: [u8; 64], + message: [u8] +) -> bool +``` +> Source code: noir_stdlib/src/schnorr.nr#L13-L20 + + + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/logging.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/logging.md new file mode 100644 index 000000000000..db75ef9f86fa --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/logging.md @@ -0,0 +1,78 @@ +--- +title: Logging +description: + Learn how to use the println statement for debugging in Noir with this tutorial. Understand the + basics of logging in Noir and how to implement it in your code. +keywords: + [ + noir logging, + println statement, + print statement, + debugging in noir, + noir std library, + logging tutorial, + basic logging in noir, + noir logging implementation, + noir debugging techniques, + rust, + ] +--- + +The standard library provides two familiar statements you can use: `println` and `print`. Despite being a limited implementation of rust's `println!` and `print!` macros, these constructs can be useful for debugging. + +You can print the output of both statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are print statements in your tests). + +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` or `print` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). Neither `println`, nor `print` are callable for failed constraints caught at compile time. + +Both `print` and `println` are generic functions which can work on integers, fields, strings, and even structs or expressions. Note however, that slices are currently unsupported. For example: + +```rust +struct Person { + age: Field, + height: Field, +} + +fn main(age: Field, height: Field) { + let person = Person { + age: age, + height: height, + }; + println(person); + println(age + height); + println("Hello world!"); +} +``` + +You can print different types in the same statement (including strings) with a type called `fmtstr`. It can be specified in the same way as a normal string, just prepended with an "f" character: + +```rust + let fmt_str = f"i: {i}, j: {j}"; + println(fmt_str); + + let s = myStruct { y: x, x: y }; + println(s); + + println(f"i: {i}, s: {s}"); + + println(x); + println([x, y]); + + let foo = fooStruct { my_struct: s, foo: 15 }; + println(f"s: {s}, foo: {foo}"); + + println(15); // prints 0x0f, implicit Field + println(-1 as u8); // prints 255 + println(-1 as i8); // prints -1 +``` + +Examples shown above are interchangeable between the two `print` statements: + +```rust +let person = Person { age : age, height : height }; + +println(person); +print(person); + +println("Hello world!"); // Prints with a newline at the end of the input +print("Hello world!"); // Prints the input and keeps cursor on the same line +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/merkle_trees.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/merkle_trees.md new file mode 100644 index 000000000000..6a9ebf72ada0 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/merkle_trees.md @@ -0,0 +1,58 @@ +--- +title: Merkle Trees +description: Learn about Merkle Trees in Noir with this tutorial. Explore the basics of computing a merkle root using a proof, with examples. +keywords: + [ + Merkle trees in Noir, + Noir programming language, + check membership, + computing root from leaf, + Noir Merkle tree implementation, + Merkle tree tutorial, + Merkle tree code examples, + Noir libraries, + pedersen hash., + ] +--- + +## compute_merkle_root + +Returns the root of the tree from the provided leaf and its hash path, using a [Pedersen hash](./cryptographic_primitives/hashes.mdx#pedersen_hash). + +```rust +fn compute_merkle_root(leaf : Field, index : Field, hash_path: [Field]) -> Field +``` + +example: + +```rust +/** + // these values are for this example only + index = "0" + priv_key = "0x000000000000000000000000000000000000000000000000000000616c696365" + secret = "0x1929ea3ab8d9106a899386883d9428f8256cfedb3c4f6b66bf4aa4d28a79988f" + note_hash_path = [ + "0x1e61bdae0f027b1b2159e1f9d3f8d00fa668a952dddd822fda80dc745d6f65cc", + "0x0e4223f3925f98934393c74975142bd73079ab0621f4ee133cee050a3c194f1a", + "0x2fd7bb412155bf8693a3bd2a3e7581a679c95c68a052f835dddca85fa1569a40" + ] + */ +fn main(index: Field, priv_key: Field, secret: Field, note_hash_path: [Field; 3]) { + + let pubkey = std::scalar_mul::fixed_base_embedded_curve(priv_key); + let pubkey_x = pubkey[0]; + let pubkey_y = pubkey[1]; + let note_commitment = std::hash::pedersen(&[pubkey_x, pubkey_y, secret]); + + let root = std::merkle::compute_merkle_root(note_commitment[0], index, note_hash_path.as_slice()); + println(root); +} +``` + +To check merkle tree membership: + +1. Include a merkle root as a program input. +2. Compute the merkle root of a given leaf, index and hash path. +3. Assert the merkle roots are equal. + +For more info about merkle trees, see the Wikipedia [page](https://en.wikipedia.org/wiki/Merkle_tree). diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/options.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/options.md new file mode 100644 index 000000000000..a1bd4e1de5fd --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/options.md @@ -0,0 +1,101 @@ +--- +title: Option Type +--- + +The `Option` type is a way to express that a value might be present (`Some(T))` or absent (`None`). It's a safer way to handle potential absence of values, compared to using nulls in many other languages. + +```rust +struct Option { + None, + Some(T), +} +``` + +The `Option` type, already imported into your Noir program, can be used directly: + +```rust +fn main() { + let none = Option::none(); + let some = Option::some(3); +} +``` + +See [this test](https://github.com/noir-lang/noir/blob/5cbfb9c4a06c8865c98ff2b594464b037d821a5c/crates/nargo_cli/tests/test_data/option/src/main.nr) for a more comprehensive set of examples of each of the methods described below. + +## Methods + +### none + +Constructs a none value. + +### some + +Constructs a some wrapper around a given value. + +### is_none + +Returns true if the Option is None. + +### is_some + +Returns true of the Option is Some. + +### unwrap + +Asserts `self.is_some()` and returns the wrapped value. + +### unwrap_unchecked + +Returns the inner value without asserting `self.is_some()`. This method can be useful within an if condition when we already know that `option.is_some()`. If the option is None, there is no guarantee what value will be returned, only that it will be of type T for an `Option`. + +### unwrap_or + +Returns the wrapped value if `self.is_some()`. Otherwise, returns the given default value. + +### unwrap_or_else + +Returns the wrapped value if `self.is_some()`. Otherwise, calls the given function to return a default value. + +### expect + +Asserts `self.is_some()` with a provided custom message and returns the contained `Some` value. The custom message is expected to be a format string. + +### map + +If self is `Some(x)`, this returns `Some(f(x))`. Otherwise, this returns `None`. + +### map_or + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns the given default value. + +### map_or_else + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns `default()`. + +### and + +Returns None if self is None. Otherwise, this returns `other`. + +### and_then + +If self is None, this returns None. Otherwise, this calls the given function with the Some value contained within self, and returns the result of that call. In some languages this function is called `flat_map` or `bind`. + +### or + +If self is Some, return self. Otherwise, return `other`. + +### or_else + +If self is Some, return self. Otherwise, return `default()`. + +### xor + +If only one of the two Options is Some, return that option. Otherwise, if both options are Some or both are None, None is returned. + +### filter + +Returns `Some(x)` if self is `Some(x)` and `predicate(x)` is true. Otherwise, this returns `None`. + +### flatten + +Flattens an `Option>` into a `Option`. This returns `None` if the outer Option is None. Otherwise, this returns the inner Option. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/recursion.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/recursion.md new file mode 100644 index 000000000000..a93894043dce --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/recursion.md @@ -0,0 +1,88 @@ +--- +title: Recursive Proofs +description: Learn about how to write recursive proofs in Noir. +keywords: [recursion, recursive proofs, verification_key, verify_proof] +--- + +Noir supports recursively verifying proofs, meaning you verify the proof of a Noir program in another Noir program. This enables creating proofs of arbitrary size by doing step-wise verification of smaller components of a large proof. + +Read [the explainer on recursion](../../explainers/explainer-recursion.md) to know more about this function and the [guide on how to use it.](../../how_to/how-to-recursion.md) + +## The `#[recursive]` Attribute + +In Noir, the `#[recursive]` attribute is used to indicate that a circuit is designed for recursive proof generation. When applied, it informs the compiler and the tooling that the circuit should be compiled in a way that makes its proofs suitable for recursive verification. This attribute eliminates the need for manual flagging of recursion at the tooling level, streamlining the proof generation process for recursive circuits. + +### Example usage with `#[recursive]` + +```rust +#[recursive] +fn main(x: Field, y: pub Field) { + assert(x == y, "x and y are not equal"); +} + +// This marks the circuit as recursion-friendly and indicates that proofs generated from this circuit +// are intended for recursive verification. +``` + +By incorporating this attribute directly in the circuit's definition, tooling like Nargo and NoirJS can automatically execute recursive-specific duties for Noir programs (e.g. recursive-friendly proof artifact generation) without additional flags or configurations. + +## Verifying Recursive Proofs + +```rust +#[foreign(recursive_aggregation)] +pub fn verify_proof(verification_key: [Field], proof: [Field], public_inputs: [Field], key_hash: Field) {} +``` + +:::info + +This is a black box function. Read [this section](./black_box_fns) to learn more about black box functions in Noir. + +::: + +## Example usage + +```rust +use dep::std; + +fn main( + verification_key : [Field; 114], + proof : [Field; 93], + public_inputs : [Field; 1], + key_hash : Field, + proof_b : [Field; 93], +) { + std::verify_proof( + verification_key.as_slice(), + proof.as_slice(), + public_inputs.as_slice(), + key_hash + ); + + std::verify_proof( + verification_key.as_slice(), + proof_b.as_slice(), + public_inputs.as_slice(), + key_hash + ); +} +``` + +You can see a full example of recursive proofs in [this example recursion demo repo](https://github.com/noir-lang/noir-examples/tree/master/recursion). + +## Parameters + +### `verification_key` + +The verification key for the zk program that is being verified. + +### `proof` + +The proof for the zk program that is being verified. + +### `public_inputs` + +These represent the public inputs of the proof we are verifying. + +### `key_hash` + +A key hash is used to check the validity of the verification key. The circuit implementing this opcode can use this hash to ensure that the key provided to the circuit matches the key produced by the circuit creator. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/traits.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/traits.md new file mode 100644 index 000000000000..04dc40f0dac6 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/traits.md @@ -0,0 +1,410 @@ +--- +title: Traits +description: Noir's stdlib provides a few commonly used traits. +keywords: [traits, trait, interface, protocol, default, add, eq] +--- + +## `std::default` + +### `std::default::Default` + +```rust title="default-trait" showLineNumbers +trait Default { + fn default() -> Self; +} +``` +> Source code: noir_stdlib/src/default.nr#L1-L5 + + +Constructs a default value of a type. + +Implementations: +```rust +impl Default for Field { .. } + +impl Default for i8 { .. } +impl Default for i16 { .. } +impl Default for i32 { .. } +impl Default for i64 { .. } + +impl Default for u8 { .. } +impl Default for u16 { .. } +impl Default for u32 { .. } +impl Default for u64 { .. } + +impl Default for () { .. } +impl Default for bool { .. } + +impl Default for [T; N] + where T: Default { .. } + +impl Default for [T] { .. } + +impl Default for (A, B) + where A: Default, B: Default { .. } + +impl Default for (A, B, C) + where A: Default, B: Default, C: Default { .. } + +impl Default for (A, B, C, D) + where A: Default, B: Default, C: Default, D: Default { .. } + +impl Default for (A, B, C, D, E) + where A: Default, B: Default, C: Default, D: Default, E: Default { .. } +``` + +For primitive integer types, the return value of `default` is `0`. Container +types such as arrays are filled with default values of their element type, +except slices whose length is unknown and thus defaulted to zero. + + +## `std::convert` + +### `std::convert::From` + +```rust title="from-trait" showLineNumbers +trait From { + fn from(input: T) -> Self; +} +``` +> Source code: noir_stdlib/src/convert.nr#L1-L5 + + +The `From` trait defines how to convert from a given type `T` to the type on which the trait is implemented. + +The Noir standard library provides a number of implementations of `From` between primitive types. +```rust title="from-impls" showLineNumbers +// Unsigned integers + +impl From for u32 { fn from(value: u8) -> u32 { value as u32 } } + +impl From for u64 { fn from(value: u8) -> u64 { value as u64 } } +impl From for u64 { fn from(value: u32) -> u64 { value as u64 } } + +impl From for Field { fn from(value: u8) -> Field { value as Field } } +impl From for Field { fn from(value: u32) -> Field { value as Field } } +impl From for Field { fn from(value: u64) -> Field { value as Field } } + +// Signed integers + +impl From for i32 { fn from(value: i8) -> i32 { value as i32 } } + +impl From for i64 { fn from(value: i8) -> i64 { value as i64 } } +impl From for i64 { fn from(value: i32) -> i64 { value as i64 } } + +// Booleans +impl From for u8 { fn from(value: bool) -> u8 { value as u8 } } +impl From for u32 { fn from(value: bool) -> u32 { value as u32 } } +impl From for u64 { fn from(value: bool) -> u64 { value as u64 } } +impl From for i8 { fn from(value: bool) -> i8 { value as i8 } } +impl From for i32 { fn from(value: bool) -> i32 { value as i32 } } +impl From for i64 { fn from(value: bool) -> i64 { value as i64 } } +impl From for Field { fn from(value: bool) -> Field { value as Field } } +``` +> Source code: noir_stdlib/src/convert.nr#L25-L52 + + +#### When to implement `From` + +As a general rule of thumb, `From` may be implemented in the [situations where it would be suitable in Rust](https://doc.rust-lang.org/std/convert/trait.From.html#when-to-implement-from): + +- The conversion is *infallible*: Noir does not provide an equivalent to Rust's `TryFrom`, if the conversion can fail then provide a named method instead. +- The conversion is *lossless*: semantically, it should not lose or discard information. For example, `u32: From` can losslessly convert any `u16` into a valid `u32` such that the original `u16` can be recovered. On the other hand, `u16: From` should not be implemented as `2**16` is a `u32` which cannot be losslessly converted into a `u16`. +- The conversion is *value-preserving*: the conceptual kind and meaning of the resulting value is the same, even though the Noir type and technical representation might be different. While it's possible to infallibly and losslessly convert a `u8` into a `str<2>` hex representation, `4u8` and `"04"` are too different for `str<2>: From` to be implemented. +- The conversion is *obvious*: it's the only reasonable conversion between the two types. If there's ambiguity on how to convert between them such that the same input could potentially map to two different values then a named method should be used. For instance rather than implementing `U128: From<[u8; 16]>`, the methods `U128::from_le_bytes` and `U128::from_be_bytes` are used as otherwise the endianness of the array would be ambiguous, resulting in two potential values of `U128` from the same byte array. + +One additional recommendation specific to Noir is: +- The conversion is *efficient*: it's relatively cheap to convert between the two types. Due to being a ZK DSL, it's more important to avoid unnecessary computation compared to Rust. If the implementation of `From` would encourage users to perform unnecessary conversion, resulting in additional proving time, then it may be preferable to expose functionality such that this conversion may be avoided. + +### `std::convert::Into` + +The `Into` trait is defined as the reciprocal of `From`. It should be easy to convince yourself that if we can convert to type `A` from type `B`, then it's possible to convert type `B` into type `A`. + +For this reason, implementing `From` on a type will automatically generate a matching `Into` implementation. One should always prefer implementing `From` over `Into` as implementing `Into` will not generate a matching `From` implementation. + +```rust title="into-trait" showLineNumbers +trait Into { + fn into(self) -> T; +} + +impl Into for U where T: From { + fn into(self) -> T { + T::from(self) + } +} +``` +> Source code: noir_stdlib/src/convert.nr#L13-L23 + + +`Into` is most useful when passing function arguments where the types don't quite match up with what the function expects. In this case, the compiler has enough type information to perform the necessary conversion by just appending `.into()` onto the arguments in question. + + +## `std::cmp` + +### `std::cmp::Eq` + +```rust title="eq-trait" showLineNumbers +trait Eq { + fn eq(self, other: Self) -> bool; +} +``` +> Source code: noir_stdlib/src/cmp.nr#L1-L5 + + +Returns `true` if `self` is equal to `other`. Implementing this trait on a type +allows the type to be used with `==` and `!=`. + +Implementations: +```rust +impl Eq for Field { .. } + +impl Eq for i8 { .. } +impl Eq for i16 { .. } +impl Eq for i32 { .. } +impl Eq for i64 { .. } + +impl Eq for u8 { .. } +impl Eq for u16 { .. } +impl Eq for u32 { .. } +impl Eq for u64 { .. } + +impl Eq for () { .. } +impl Eq for bool { .. } + +impl Eq for [T; N] + where T: Eq { .. } + +impl Eq for [T] + where T: Eq { .. } + +impl Eq for (A, B) + where A: Eq, B: Eq { .. } + +impl Eq for (A, B, C) + where A: Eq, B: Eq, C: Eq { .. } + +impl Eq for (A, B, C, D) + where A: Eq, B: Eq, C: Eq, D: Eq { .. } + +impl Eq for (A, B, C, D, E) + where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { .. } +``` + +### `std::cmp::Ord` + +```rust title="ord-trait" showLineNumbers +trait Ord { + fn cmp(self, other: Self) -> Ordering; +} +``` +> Source code: noir_stdlib/src/cmp.nr#L102-L106 + + +`a.cmp(b)` compares two values returning `Ordering::less()` if `a < b`, +`Ordering::equal()` if `a == b`, or `Ordering::greater()` if `a > b`. +Implementing this trait on a type allows `<`, `<=`, `>`, and `>=` to be +used on values of the type. + +`std::cmp` also provides `max` and `min` functions for any type which implements the `Ord` trait. + +Implementations: + +```rust +impl Ord for u8 { .. } +impl Ord for u16 { .. } +impl Ord for u32 { .. } +impl Ord for u64 { .. } + +impl Ord for i8 { .. } +impl Ord for i16 { .. } +impl Ord for i32 { .. } + +impl Ord for i64 { .. } + +impl Ord for () { .. } +impl Ord for bool { .. } + +impl Ord for [T; N] + where T: Ord { .. } + +impl Ord for [T] + where T: Ord { .. } + +impl Ord for (A, B) + where A: Ord, B: Ord { .. } + +impl Ord for (A, B, C) + where A: Ord, B: Ord, C: Ord { .. } + +impl Ord for (A, B, C, D) + where A: Ord, B: Ord, C: Ord, D: Ord { .. } + +impl Ord for (A, B, C, D, E) + where A: Ord, B: Ord, C: Ord, D: Ord, E: Ord { .. } +``` + +## `std::ops` + +### `std::ops::Add`, `std::ops::Sub`, `std::ops::Mul`, and `std::ops::Div` + +These traits abstract over addition, subtraction, multiplication, and division respectively. +Implementing these traits for a given type will also allow that type to be used with the corresponding operator +for that trait (`+` for Add, etc) in addition to the normal method names. + +```rust title="add-trait" showLineNumbers +trait Add { + fn add(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L1-L5 + +```rust title="sub-trait" showLineNumbers +trait Sub { + fn sub(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L17-L21 + +```rust title="mul-trait" showLineNumbers +trait Mul { + fn mul(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L33-L37 + +```rust title="div-trait" showLineNumbers +trait Div { + fn div(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L49-L53 + + +The implementations block below is given for the `Add` trait, but the same types that implement +`Add` also implement `Sub`, `Mul`, and `Div`. + +Implementations: +```rust +impl Add for Field { .. } + +impl Add for i8 { .. } +impl Add for i16 { .. } +impl Add for i32 { .. } +impl Add for i64 { .. } + +impl Add for u8 { .. } +impl Add for u16 { .. } +impl Add for u32 { .. } +impl Add for u64 { .. } +``` + +### `std::ops::Rem` + +```rust title="rem-trait" showLineNumbers +trait Rem{ + fn rem(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L65-L69 + + +`Rem::rem(a, b)` is the remainder function returning the result of what is +left after dividing `a` and `b`. Implementing `Rem` allows the `%` operator +to be used with the implementation type. + +Unlike other numeric traits, `Rem` is not implemented for `Field`. + +Implementations: +```rust +impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } +impl Rem for u16 { fn rem(self, other: u16) -> u16 { self % other } } +impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } +impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } + +impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } +impl Rem for i16 { fn rem(self, other: i16) -> i16 { self % other } } +impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } +impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } +``` + +### `std::ops::{ BitOr, BitAnd, BitXor }` + +```rust title="bitor-trait" showLineNumbers +trait BitOr { + fn bitor(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L79-L83 + +```rust title="bitand-trait" showLineNumbers +trait BitAnd { + fn bitand(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L95-L99 + +```rust title="bitxor-trait" showLineNumbers +trait BitXor { + fn bitxor(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L111-L115 + + +Traits for the bitwise operations `|`, `&`, and `^`. + +Implementing `BitOr`, `BitAnd` or `BitXor` for a type allows the `|`, `&`, or `^` operator respectively +to be used with the type. + +The implementations block below is given for the `BitOr` trait, but the same types that implement +`BitOr` also implement `BitAnd` and `BitXor`. + +Implementations: +```rust +impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } + +impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } +impl BitOr for u16 { fn bitor(self, other: u16) -> u16 { self | other } } +impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } +impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } + +impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } +impl BitOr for i16 { fn bitor(self, other: i16) -> i16 { self | other } } +impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } +impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } +``` + +### `std::ops::{ Shl, Shr }` + +```rust title="shl-trait" showLineNumbers +trait Shl { + fn shl(self, other: u8) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L127-L131 + +```rust title="shr-trait" showLineNumbers +trait Shr { + fn shr(self, other: u8) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L142-L146 + + +Traits for a bit shift left and bit shift right. + +Implementing `Shl` for a type allows the left shift operator (`<<`) to be used with the implementation type. +Similarly, implementing `Shr` allows the right shift operator (`>>`) to be used with the type. + +Note that bit shifting is not currently implemented for signed types. + +The implementations block below is given for the `Shl` trait, but the same types that implement +`Shl` also implement `Shr`. + +Implementations: +```rust +impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } +impl Shl for u16 { fn shl(self, other: u16) -> u16 { self << other } } +impl Shl for u32 { fn shl(self, other: u32) -> u32 { self << other } } +impl Shl for u64 { fn shl(self, other: u64) -> u64 { self << other } } +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/zeroed.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/zeroed.md new file mode 100644 index 000000000000..f450fecdd364 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/noir/standard_library/zeroed.md @@ -0,0 +1,26 @@ +--- +title: Zeroed Function +description: + The zeroed function returns a zeroed value of any type. +keywords: + [ + zeroed + ] +--- + +Implements `fn zeroed() -> T` to return a zeroed value of any type. This function is generally unsafe to use as the zeroed bit pattern is not guaranteed to be valid for all types. It can however, be useful in cases when the value is guaranteed not to be used such as in a BoundedVec library implementing a growable vector, up to a certain length, backed by an array. The array can be initialized with zeroed values which are guaranteed to be inaccessible until the vector is pushed to. Similarly, enumerations in noir can be implemented using this method by providing zeroed values for the unused variants. + +You can access the function at `std::unsafe::zeroed`. + +This function currently supports the following types: + +- Field +- Bool +- Uint +- Array +- Slice +- String +- Tuple +- Function + +Using it on other types could result in unexpected behavior. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/.nojekyll b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/.nojekyll new file mode 100644 index 000000000000..e2ac6616addc --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md new file mode 100644 index 000000000000..d7249d243306 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md @@ -0,0 +1,160 @@ +# BarretenbergBackend + +## Extends + +- `BarretenbergVerifierBackend` + +## Implements + +- [`Backend`](../index.md#backend) + +## Constructors + +### new BarretenbergBackend(acirCircuit, options) + +```ts +new BarretenbergBackend(acirCircuit, options): BarretenbergBackend +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `acirCircuit` | `CompiledCircuit` | +| `options` | [`BackendOptions`](../type-aliases/BackendOptions.md) | + +#### Returns + +[`BarretenbergBackend`](BarretenbergBackend.md) + +#### Inherited from + +BarretenbergVerifierBackend.constructor + +## Properties + +| Property | Type | Description | Inheritance | +| :------ | :------ | :------ | :------ | +| `acirComposer` | `any` | - | BarretenbergVerifierBackend.acirComposer | +| `acirUncompressedBytecode` | `Uint8Array` | - | BarretenbergVerifierBackend.acirUncompressedBytecode | +| `api` | `Barretenberg` | - | BarretenbergVerifierBackend.api | +| `options` | [`BackendOptions`](../type-aliases/BackendOptions.md) | - | BarretenbergVerifierBackend.options | + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Inherited from + +BarretenbergVerifierBackend.destroy + +*** + +### generateProof() + +```ts +generateProof(compressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `compressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<`ProofData`\> + +#### Description + +Generates a proof + +*** + +### generateRecursiveProofArtifacts() + +```ts +generateRecursiveProofArtifacts(proofData, numOfPublicInputs): Promise +``` + +Generates artifacts that will be passed to a circuit that will verify this proof. + +Instead of passing the proof and verification key as a byte array, we pass them +as fields which makes it cheaper to verify in a circuit. + +The proof that is passed here will have been created using a circuit +that has the #[recursive] attribute on its `main` method. + +The number of public inputs denotes how many public inputs are in the inner proof. + +#### Parameters + +| Parameter | Type | Default value | +| :------ | :------ | :------ | +| `proofData` | `ProofData` | `undefined` | +| `numOfPublicInputs` | `number` | `0` | + +#### Returns + +`Promise`\<`object`\> + +#### Example + +```typescript +const artifacts = await backend.generateRecursiveProofArtifacts(proof, numOfPublicInputs); +``` + +*** + +### getVerificationKey() + +```ts +getVerificationKey(): Promise +``` + +#### Returns + +`Promise`\<`Uint8Array`\> + +#### Inherited from + +BarretenbergVerifierBackend.getVerificationKey + +*** + +### verifyProof() + +```ts +verifyProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | `ProofData` | + +#### Returns + +`Promise`\<`boolean`\> + +#### Inherited from + +BarretenbergVerifierBackend.verifyProof + +#### Description + +Verifies a proof + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergVerifier.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergVerifier.md new file mode 100644 index 000000000000..500276ea7480 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergVerifier.md @@ -0,0 +1,58 @@ +# BarretenbergVerifier + +## Constructors + +### new BarretenbergVerifier(options) + +```ts +new BarretenbergVerifier(options): BarretenbergVerifier +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `options` | [`BackendOptions`](../type-aliases/BackendOptions.md) | + +#### Returns + +[`BarretenbergVerifier`](BarretenbergVerifier.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +*** + +### verifyProof() + +```ts +verifyProof(proofData, verificationKey): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | `ProofData` | +| `verificationKey` | `Uint8Array` | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies a proof + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/index.md new file mode 100644 index 000000000000..649719731960 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/index.md @@ -0,0 +1,59 @@ +# backend_barretenberg + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [BarretenbergBackend](classes/BarretenbergBackend.md) | - | +| [BarretenbergVerifier](classes/BarretenbergVerifier.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [BackendOptions](type-aliases/BackendOptions.md) | - | + +## References + +### CompiledCircuit + +Renames and re-exports [Backend](index.md#backend) + +*** + +### ProofData + +Renames and re-exports [Backend](index.md#backend) + +## Variables + +### Backend + +```ts +Backend: any; +``` + +## Functions + +### publicInputsToWitnessMap() + +```ts +publicInputsToWitnessMap(publicInputs, abi): Backend +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `publicInputs` | `string`[] | +| `abi` | `Abi` | + +#### Returns + +[`Backend`](index.md#backend) + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md new file mode 100644 index 000000000000..b49a479f4f46 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md @@ -0,0 +1,21 @@ +# BackendOptions + +```ts +type BackendOptions: object; +``` + +## Description + +An options object, currently only used to specify the number of threads to use. + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `memory` | `object` | - | +| `memory.maximum` | `number` | - | +| `threads` | `number` | **Description**

Number of threads | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs new file mode 100644 index 000000000000..d7d5128f9e3c --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend","label":"BarretenbergBackend"},{"type":"doc","id":"reference/NoirJS/backend_barretenberg/classes/BarretenbergVerifier","label":"BarretenbergVerifier"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions","label":"BackendOptions"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/.nojekyll b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/.nojekyll new file mode 100644 index 000000000000..e2ac6616addc --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/classes/Noir.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/classes/Noir.md new file mode 100644 index 000000000000..45dd62ee57e7 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/classes/Noir.md @@ -0,0 +1,132 @@ +# Noir + +## Constructors + +### new Noir(circuit, backend) + +```ts +new Noir(circuit, backend?): Noir +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `circuit` | `CompiledCircuit` | +| `backend`? | `any` | + +#### Returns + +[`Noir`](Noir.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Description + +Destroys the underlying backend instance. + +#### Example + +```typescript +await noir.destroy(); +``` + +*** + +### execute() + +```ts +execute(inputs, foreignCallHandler?): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | `InputMap` | +| `foreignCallHandler`? | [`ForeignCallHandler`](../type-aliases/ForeignCallHandler.md) | + +#### Returns + +`Promise`\<`object`\> + +#### Description + +Allows to execute a circuit to get its witness and return value. + +#### Example + +```typescript +async execute(inputs) +``` + +*** + +### generateProof() + +```ts +generateProof(inputs, foreignCallHandler?): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | `InputMap` | +| `foreignCallHandler`? | [`ForeignCallHandler`](../type-aliases/ForeignCallHandler.md) | + +#### Returns + +`Promise`\<`ProofData`\> + +#### Description + +Generates a witness and a proof given an object as input. + +#### Example + +```typescript +async generateProof(input) +``` + +*** + +### verifyProof() + +```ts +verifyProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | `ProofData` | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Instantiates the verification key and verifies a proof. + +#### Example + +```typescript +async verifyProof(proof) +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/and.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/and.md new file mode 100644 index 000000000000..c783283e3965 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/and.md @@ -0,0 +1,22 @@ +# and() + +```ts +and(lhs, rhs): string +``` + +Performs a bitwise AND operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/blake2s256.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/blake2s256.md new file mode 100644 index 000000000000..7882d0da8d50 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/blake2s256.md @@ -0,0 +1,21 @@ +# blake2s256() + +```ts +blake2s256(inputs): Uint8Array +``` + +Calculates the Blake2s256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md new file mode 100644 index 000000000000..5e3cd53e9d36 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md @@ -0,0 +1,28 @@ +# ecdsa\_secp256k1\_verify() + +```ts +ecdsa_secp256k1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Verifies a ECDSA signature over the secp256k1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md new file mode 100644 index 000000000000..0b20ff689575 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md @@ -0,0 +1,28 @@ +# ecdsa\_secp256r1\_verify() + +```ts +ecdsa_secp256r1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Verifies a ECDSA signature over the secp256r1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/keccak256.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/keccak256.md new file mode 100644 index 000000000000..d10f155ce86f --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/keccak256.md @@ -0,0 +1,21 @@ +# keccak256() + +```ts +keccak256(inputs): Uint8Array +``` + +Calculates the Keccak256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/sha256.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/sha256.md new file mode 100644 index 000000000000..6ba4ecac0229 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/sha256.md @@ -0,0 +1,21 @@ +# sha256() + +```ts +sha256(inputs): Uint8Array +``` + +Calculates the SHA256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/xor.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/xor.md new file mode 100644 index 000000000000..8d762b895d30 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/functions/xor.md @@ -0,0 +1,22 @@ +# xor() + +```ts +xor(lhs, rhs): string +``` + +Performs a bitwise XOR operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/index.md new file mode 100644 index 000000000000..cca6b3ace41f --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/index.md @@ -0,0 +1,54 @@ +# noir_js + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [Noir](classes/Noir.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [ForeignCallHandler](type-aliases/ForeignCallHandler.md) | A callback which performs an foreign call and returns the response. | +| [ForeignCallInput](type-aliases/ForeignCallInput.md) | - | +| [ForeignCallOutput](type-aliases/ForeignCallOutput.md) | - | +| [WitnessMap](type-aliases/WitnessMap.md) | - | + +### Functions + +| Function | Description | +| :------ | :------ | +| [and](functions/and.md) | Performs a bitwise AND operation between `lhs` and `rhs` | +| [blake2s256](functions/blake2s256.md) | Calculates the Blake2s256 hash of the input bytes | +| [ecdsa\_secp256k1\_verify](functions/ecdsa_secp256k1_verify.md) | Verifies a ECDSA signature over the secp256k1 curve. | +| [ecdsa\_secp256r1\_verify](functions/ecdsa_secp256r1_verify.md) | Verifies a ECDSA signature over the secp256r1 curve. | +| [keccak256](functions/keccak256.md) | Calculates the Keccak256 hash of the input bytes | +| [sha256](functions/sha256.md) | Calculates the SHA256 hash of the input bytes | +| [xor](functions/xor.md) | Performs a bitwise XOR operation between `lhs` and `rhs` | + +## References + +### CompiledCircuit + +Renames and re-exports [InputMap](index.md#inputmap) + +*** + +### ProofData + +Renames and re-exports [InputMap](index.md#inputmap) + +## Variables + +### InputMap + +```ts +InputMap: any; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md new file mode 100644 index 000000000000..812b8b164818 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md @@ -0,0 +1,24 @@ +# ForeignCallHandler + +```ts +type ForeignCallHandler: (name, inputs) => Promise; +``` + +A callback which performs an foreign call and returns the response. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `name` | `string` | The identifier for the type of foreign call being performed. | +| `inputs` | [`ForeignCallInput`](ForeignCallInput.md)[] | An array of hex encoded inputs to the foreign call. | + +## Returns + +`Promise`\<[`ForeignCallOutput`](ForeignCallOutput.md)[]\> + +outputs - An array of hex encoded outputs containing the results of the foreign call. + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md new file mode 100644 index 000000000000..dd95809186a2 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md @@ -0,0 +1,9 @@ +# ForeignCallInput + +```ts +type ForeignCallInput: string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md new file mode 100644 index 000000000000..b71fb78a9469 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md @@ -0,0 +1,9 @@ +# ForeignCallOutput + +```ts +type ForeignCallOutput: string | string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md new file mode 100644 index 000000000000..258c46f9d0c9 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md @@ -0,0 +1,9 @@ +# WitnessMap + +```ts +type WitnessMap: Map; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs new file mode 100644 index 000000000000..c6d8125eaada --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"reference/NoirJS/noir_js/classes/Noir","label":"Noir"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallHandler","label":"ForeignCallHandler"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallInput","label":"ForeignCallInput"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallOutput","label":"ForeignCallOutput"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/WitnessMap","label":"WitnessMap"}]},{"type":"category","label":"Functions","items":[{"type":"doc","id":"reference/NoirJS/noir_js/functions/and","label":"and"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/blake2s256","label":"blake2s256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify","label":"ecdsa_secp256k1_verify"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify","label":"ecdsa_secp256r1_verify"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/keccak256","label":"keccak256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/sha256","label":"sha256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/xor","label":"xor"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/.nojekyll b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/.nojekyll new file mode 100644 index 000000000000..e2ac6616addc --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/functions/compile.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/functions/compile.md new file mode 100644 index 000000000000..6faf763b37f7 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/functions/compile.md @@ -0,0 +1,51 @@ +# compile() + +```ts +compile( + fileManager, + projectPath?, + logFn?, +debugLogFn?): Promise +``` + +Compiles a Noir project + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `fileManager` | `FileManager` | The file manager to use | +| `projectPath`? | `string` | The path to the project inside the file manager. Defaults to the root of the file manager | +| `logFn`? | `LogFn` | A logging function. If not provided, console.log will be used | +| `debugLogFn`? | `LogFn` | A debug logging function. If not provided, logFn will be used | + +## Returns + +`Promise`\<[`ProgramCompilationArtifacts`](../index.md#programcompilationartifacts)\> + +## Example + +```typescript +// Node.js + +import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; + +const fm = createFileManager(myProjectPath); +const myCompiledCode = await compile_program(fm); +``` + +```typescript +// Browser + +import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; + +const fm = createFileManager('/'); +for (const path of files) { + await fm.writeFile(path, await getFileAsStream(path)); +} +const myCompiledCode = await compile_program(fm); +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/functions/compile_contract.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/functions/compile_contract.md new file mode 100644 index 000000000000..7d0b39a43ef8 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/functions/compile_contract.md @@ -0,0 +1,51 @@ +# compile\_contract() + +```ts +compile_contract( + fileManager, + projectPath?, + logFn?, +debugLogFn?): Promise +``` + +Compiles a Noir project + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `fileManager` | `FileManager` | The file manager to use | +| `projectPath`? | `string` | The path to the project inside the file manager. Defaults to the root of the file manager | +| `logFn`? | `LogFn` | A logging function. If not provided, console.log will be used | +| `debugLogFn`? | `LogFn` | A debug logging function. If not provided, logFn will be used | + +## Returns + +`Promise`\<[`ContractCompilationArtifacts`](../index.md#contractcompilationartifacts)\> + +## Example + +```typescript +// Node.js + +import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + +const fm = createFileManager(myProjectPath); +const myCompiledCode = await compile_contract(fm); +``` + +```typescript +// Browser + +import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + +const fm = createFileManager('/'); +for (const path of files) { + await fm.writeFile(path, await getFileAsStream(path)); +} +const myCompiledCode = await compile_contract(fm); +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/functions/createFileManager.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/functions/createFileManager.md new file mode 100644 index 000000000000..7e65c1d69c7e --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/functions/createFileManager.md @@ -0,0 +1,21 @@ +# createFileManager() + +```ts +createFileManager(dataDir): FileManager +``` + +Creates a new FileManager instance based on fs in node and memfs in the browser (via webpack alias) + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `dataDir` | `string` | root of the file system | + +## Returns + +`FileManager` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/functions/inflateDebugSymbols.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/functions/inflateDebugSymbols.md new file mode 100644 index 000000000000..fcea92753412 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/functions/inflateDebugSymbols.md @@ -0,0 +1,21 @@ +# inflateDebugSymbols() + +```ts +inflateDebugSymbols(debugSymbols): any +``` + +Decompresses and decodes the debug symbols + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `debugSymbols` | `string` | The base64 encoded debug symbols | + +## Returns + +`any` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/index.md new file mode 100644 index 000000000000..b6e0f9d1bc0e --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/index.md @@ -0,0 +1,49 @@ +# noir_wasm + +## Exports + +### Functions + +| Function | Description | +| :------ | :------ | +| [compile](functions/compile.md) | Compiles a Noir project | +| [compile\_contract](functions/compile_contract.md) | Compiles a Noir project | +| [createFileManager](functions/createFileManager.md) | Creates a new FileManager instance based on fs in node and memfs in the browser (via webpack alias) | +| [inflateDebugSymbols](functions/inflateDebugSymbols.md) | Decompresses and decodes the debug symbols | + +## References + +### compile\_program + +Renames and re-exports [compile](functions/compile.md) + +## Interfaces + +### ContractCompilationArtifacts + +The compilation artifacts of a given contract. + +#### Properties + +| Property | Type | Description | +| :------ | :------ | :------ | +| `contract` | `ContractArtifact` | The compiled contract. | +| `warnings` | `unknown`[] | Compilation warnings. | + +*** + +### ProgramCompilationArtifacts + +The compilation artifacts of a given program. + +#### Properties + +| Property | Type | Description | +| :------ | :------ | :------ | +| `name` | `string` | not part of the compilation output, injected later | +| `program` | `ProgramArtifact` | The compiled contract. | +| `warnings` | `unknown`[] | Compilation warnings. | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/typedoc-sidebar.cjs b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/typedoc-sidebar.cjs new file mode 100644 index 000000000000..e0870710349c --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/NoirJS/noir_wasm/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"doc","id":"reference/NoirJS/noir_wasm/index","label":"API"},{"type":"category","label":"Functions","items":[{"type":"doc","id":"reference/NoirJS/noir_wasm/functions/compile","label":"compile"},{"type":"doc","id":"reference/NoirJS/noir_wasm/functions/compile_contract","label":"compile_contract"},{"type":"doc","id":"reference/NoirJS/noir_wasm/functions/createFileManager","label":"createFileManager"},{"type":"doc","id":"reference/NoirJS/noir_wasm/functions/inflateDebugSymbols","label":"inflateDebugSymbols"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/_category_.json new file mode 100644 index 000000000000..5b6a20a609af --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 4, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/debugger/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/debugger/_category_.json new file mode 100644 index 000000000000..27869205ad3c --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/debugger/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Debugger", + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/debugger/debugger_known_limitations.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/debugger/debugger_known_limitations.md new file mode 100644 index 000000000000..936d416ac4bc --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/debugger/debugger_known_limitations.md @@ -0,0 +1,59 @@ +--- +title: Known limitations +description: + An overview of known limitations of the current version of the Noir debugger +keywords: + [ + Nargo, + Noir Debugger, + VS Code, + ] +sidebar_position: 2 +--- + +# Debugger Known Limitations + +There are currently some limits to what the debugger can observe. + +## Mutable references + +The debugger is currently blind to any state mutated via a mutable reference. For example, in: + +``` +let mut x = 1; +let y = &mut x; +*y = 2; +``` + +The update on `x` will not be observed by the debugger. That means, when running `vars` from the debugger REPL, or inspecting the _local variables_ pane in the VS Code debugger, `x` will appear with value 1 despite having executed `*y = 2;`. + +## Variables of type function or mutable references are opaque + +When inspecting variables, any variable of type `Function` or `MutableReference` will render its value as `<>` or `<>`. + +## Debugger instrumentation affects resulting ACIR + +In order to make the state of local variables observable, the debugger compiles Noir circuits interleaving foreign calls that track any mutations to them. While this works (except in the cases described above) and doesn't introduce any behavior changes, it does as a side effect produce bigger bytecode. In particular, when running the command `opcodes` on the REPL debugger, you will notice Unconstrained VM blocks that look like this: + +``` +... +5 BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [], q_c: 2 }), Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(2))], q_c: 0 })] + | outputs=[] + 5.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 5.1 | Mov { destination: RegisterIndex(3), source: RegisterIndex(1) } + 5.2 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 5.3 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 5.4 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 5.5 | Mov { destination: RegisterIndex(3), source: RegisterIndex(3) } + 5.6 | Call { location: 8 } + 5.7 | Stop + 5.8 | ForeignCall { function: "__debug_var_assign", destinations: [], inputs: [RegisterIndex(RegisterIndex(2)), RegisterIndex(RegisterIndex(3))] } +... +``` + +If you are interested in debugging/inspecting compiled ACIR without these synthetic changes, you can invoke the REPL debugger with the `--skip-instrumentation` flag or launch the VS Code debugger with the `skipConfiguration` property set to true in its launch configuration. You can find more details about those in the [Debugger REPL reference](debugger_repl.md) and the [VS Code Debugger reference](debugger_vscode.md). + +:::note +Skipping debugger instrumentation means you won't be able to inspect values of local variables. +::: + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/debugger/debugger_repl.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/debugger/debugger_repl.md new file mode 100644 index 000000000000..46e2011304e5 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/debugger/debugger_repl.md @@ -0,0 +1,360 @@ +--- +title: REPL Debugger +description: + Noir Debugger REPL options and commands. +keywords: + [ + Nargo, + Noir CLI, + Noir Debugger, + REPL, + ] +sidebar_position: 1 +--- + +## Running the REPL debugger + +`nargo debug [OPTIONS] [WITNESS_NAME]` + +Runs the Noir REPL debugger. If a `WITNESS_NAME` is provided the debugger writes the resulting execution witness to a `WITNESS_NAME` file. + +### Options + +| Option | Description | +| --------------------- | ------------------------------------------------------------ | +| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover]| +| `--package ` | The name of the package to debug | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +None of these options are required. + +:::note +Since the debugger starts by compiling the target package, all Noir compiler options are also available. Check out the [compiler reference](../nargo_commands.md#nargo-compile) to learn more about the compiler options. +::: + +## REPL commands + +Once the debugger is running, it accepts the following commands. + +#### `help` (h) + +Displays the menu of available commands. + +``` +> help +Available commands: + + opcodes display ACIR opcodes + into step into to the next opcode + next step until a new source location is reached + out step until a new source location is reached + and the current stack frame is finished + break LOCATION:OpcodeLocation add a breakpoint at an opcode location + over step until a new source location is reached + without diving into function calls + restart restart the debugging session + delete LOCATION:OpcodeLocation delete breakpoint at an opcode location + witness show witness map + witness index:u32 display a single witness from the witness map + witness index:u32 value:String update a witness with the given value + memset index:usize value:String update a memory cell with the given + value + continue continue execution until the end of the + program + vars show variable values available at this point + in execution + stacktrace display the current stack trace + memory show memory (valid when executing unconstrained code) value + step step to the next ACIR opcode + +Other commands: + + help Show this help message + quit Quit repl + +``` + +### Stepping through programs + +#### `next` (n) + +Step until the next Noir source code location. While other commands, such as [`into`](#into-i) and [`step`](#step-s), allow for finer grained control of the program's execution at the opcode level, `next` is source code centric. For example: + +``` +3 ... +4 fn main(x: u32) { +5 assert(entry_point(x) == 2); +6 swap_entry_point(x, x + 1); +7 -> assert(deep_entry_point(x) == 4); +8 multiple_values_entry_point(x); +9 } +``` + + +Using `next` here would cause the debugger to jump to the definition of `deep_entry_point` (if available). + +If you want to step over `deep_entry_point` and go straight to line 8, use [the `over` command](#over) instead. + +#### `over` + +Step until the next source code location, without diving into function calls. For example: + +``` +3 ... +4 fn main(x: u32) { +5 assert(entry_point(x) == 2); +6 swap_entry_point(x, x + 1); +7 -> assert(deep_entry_point(x) == 4); +8 multiple_values_entry_point(x); +9 } +``` + + +Using `over` here would cause the debugger to execute until line 8 (`multiple_values_entry_point(x);`). + +If you want to step into `deep_entry_point` instead, use [the `next` command](#next-n). + +#### `out` + +Step until the end of the current function call. For example: + +``` + 3 ... + 4 fn main(x: u32) { + 5 assert(entry_point(x) == 2); + 6 swap_entry_point(x, x + 1); + 7 -> assert(deep_entry_point(x) == 4); + 8 multiple_values_entry_point(x); + 9 } + 10 + 11 unconstrained fn returns_multiple_values(x: u32) -> (u32, u32, u32, u32) { + 12 ... + ... + 55 + 56 unconstrained fn deep_entry_point(x: u32) -> u32 { + 57 -> level_1(x + 1) + 58 } + +``` + +Running `out` here will resume execution until line 8. + +#### `step` (s) + +Skips to the next ACIR code. A compiled Noir program is a sequence of ACIR opcodes. However, an unconstrained VM opcode denotes the start of an unconstrained code block, to be executed by the unconstrained VM. For example (redacted for brevity): + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +The `->` here shows the debugger paused at an ACIR opcode: `BRILLIG`, at index 1, which denotes an unconstrained code block is about to start. + +Using the `step` command at this point would result in the debugger stopping at ACIR opcode 2, `EXPR`, skipping unconstrained computation steps. + +Use [the `into` command](#into-i) instead if you want to follow unconstrained computation step by step. + +#### `into` (i) + +Steps into the next opcode. A compiled Noir program is a sequence of ACIR opcodes. However, a BRILLIG opcode denotes the start of an unconstrained code block, to be executed by the unconstrained VM. For example (redacted for brevity): + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +The `->` here shows the debugger paused at an ACIR opcode: `BRILLIG`, at index 1, which denotes an unconstrained code block is about to start. + +Using the `into` command at this point would result in the debugger stopping at opcode 1.0, `Mov ...`, allowing the debugger user to follow unconstrained computation step by step. + +Use [the `step` command](#step-s) instead if you want to skip to the next ACIR code directly. + +#### `continue` (c) + +Continues execution until the next breakpoint, or the end of the program. + +#### `restart` (res) + +Interrupts execution, and restarts a new debugging session from scratch. + +#### `opcodes` (o) + +Display the program's ACIR opcode sequence. For example: + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +### Breakpoints + +#### `break [Opcode]` (or shorthand `b [Opcode]`) + +Sets a breakpoint on the specified opcode index. To get a list of the program opcode numbers, see [the `opcode` command](#opcodes-o). For example: + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +In this example, issuing a `break 1.2` command adds break on opcode 1.2, as denoted by the `*` character: + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | * Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +Running [the `continue` command](#continue-c) at this point would cause the debugger to execute the program until opcode 1.2. + +#### `delete [Opcode]` (or shorthand `d [Opcode]`) + +Deletes a breakpoint at an opcode location. Usage is analogous to [the `break` command](#). + +### Variable inspection + +#### vars + +Show variable values available at this point in execution. + +:::note +The ability to inspect variable values from the debugger depends on compilation to be run in a special debug instrumentation mode. This instrumentation weaves variable tracing code with the original source code. + +So variable value inspection comes at the expense of making the resulting ACIR bytecode bigger and harder to understand and optimize. + +If you find this compromise unacceptable, you can run the debugger with the flag `--skip-debug-instrumentation`. This will compile your circuit without any additional debug information, so the resulting ACIR bytecode will be identical to the one produced by standard Noir compilation. However, if you opt for this, the `vars` command will not be available while debugging. +::: + + +### Stacktrace + +#### `stacktrace` + +Displays the current stack trace. + + +### Witness map + +#### `witness` (w) + +Show witness map. For example: + +``` +_0 = 0 +_1 = 2 +_2 = 1 +``` + +#### `witness [Witness Index]` + +Display a single witness from the witness map. For example: + +``` +> witness 1 +_1 = 2 +``` + +#### `witness [Witness Index] [New value]` + +Overwrite the given index with a new value. For example: + +``` +> witness 1 3 +_1 = 3 +``` + + +### Unconstrained VM memory + +#### `memory` + +Show unconstrained VM memory state. For example: + +``` +> memory +At opcode 1.13: Store { destination_pointer: RegisterIndex(0), source: RegisterIndex(3) } +... +> registers +0 = 0 +1 = 10 +2 = 0 +3 = 1 +4 = 1 +5 = 2³² +6 = 1 +> into +At opcode 1.14: Const { destination: RegisterIndex(5), value: Value { inner: 1 } } +... +> memory +0 = 1 +> +``` + +In the example above: we start with clean memory, then step through a `Store` opcode which stores the value of register 3 (1) into the memory address stored in register 0 (0). Thus now `memory` shows memory address 0 contains value 1. + +:::note +This command is only functional while the debugger is executing unconstrained code. +::: + +#### `memset [Memory address] [New value]` + +Update a memory cell with the given value. For example: + +``` +> memory +0 = 1 +> memset 0 2 +> memory +0 = 2 +> memset 1 4 +> memory +0 = 2 +1 = 4 +> +``` + +:::note +This command is only functional while the debugger is executing unconstrained code. +::: \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/debugger/debugger_vscode.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/debugger/debugger_vscode.md new file mode 100644 index 000000000000..c027332b3b04 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/debugger/debugger_vscode.md @@ -0,0 +1,82 @@ +--- +title: VS Code Debugger +description: + VS Code Debugger configuration and features. +keywords: + [ + Nargo, + Noir CLI, + Noir Debugger, + VS Code, + IDE, + ] +sidebar_position: 0 +--- + +# VS Code Noir Debugger Reference + +The Noir debugger enabled by the vscode-noir extension ships with default settings such that the most common scenario should run without any additional configuration steps. + +These defaults can nevertheless be overridden by defining a launch configuration file. This page provides a reference for the properties you can override via a launch configuration file, as well as documenting the Nargo `dap` command, which is a dependency of the VS Code Noir debugger. + + +## Creating and editing launch configuration files + +To create a launch configuration file from VS Code, open the _debug pane_, and click on _create a launch.json file_. + +![Creating a launch configuration file](@site/static/img/debugger/ref1-create-launch.png) + +A `launch.json` file will be created, populated with basic defaults. + +### Noir Debugger launch.json properties + +#### projectFolder + +_String, optional._ + +Absolute path to the Nargo project to debug. By default, it is dynamically determined by looking for the nearest `Nargo.toml` file to the active file at the moment of launching the debugger. + +#### proverName + +_String, optional._ + +Name of the prover input to use. Defaults to `Prover`, which looks for a file named `Prover.toml` at the `projectFolder`. + +#### generateAcir + +_Boolean, optional._ + +If true, generate ACIR opcodes instead of unconstrained opcodes which will be closer to release binaries but less convenient for debugging. Defaults to `false`. + +#### skipInstrumentation + +_Boolean, optional._ + +Skips variables debugging instrumentation of code, making debugging less convenient but the resulting binary smaller and closer to production. Defaults to `false`. + +:::note +Skipping instrumentation causes the debugger to be unable to inspect local variables. +::: + +## `nargo dap [OPTIONS]` + +When run without any option flags, it starts the Nargo Debug Adapter Protocol server, which acts as the debugging backend for the VS Code Noir Debugger. + +All option flags are related to preflight checks. The Debug Adapter Protocol specifies how errors are to be informed from a running DAP server, but it doesn't specify mechanisms to communicate server initialization errors between the DAP server and its client IDE. + +Thus `nargo dap` ships with a _preflight check_ mode. If flag `--preflight-check` and the rest of the `--preflight-*` flags are provided, Nargo will run the same initialization routine except it will not start the DAP server. + +`vscode-noir` will then run `nargo dap` in preflight check mode first before a debugging session starts. If the preflight check ends in error, vscode-noir will present stderr and stdout output from this process through its own Output pane in VS Code. This makes it possible for users to diagnose what pieces of configuration might be wrong or missing in case of initialization errors. + +If the preflight check succeeds, `vscode-noir` proceeds to start the DAP server normally but running `nargo dap` without any additional flags. + +### Options + +| Option | Description | +| --------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- | +| `--preflight-check` | If present, dap runs in preflight check mode. | +| `--preflight-project-folder ` | Absolute path to the project to debug for preflight check. | +| `--preflight-prover-name ` | Name of prover file to use for preflight check | +| `--preflight-generate-acir` | Optional. If present, compile in ACIR mode while running preflight check. | +| `--preflight-skip-instrumentation` | Optional. If present, compile without introducing debug instrumentation while running preflight check. | +| `-h, --help` | Print help. | diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/nargo_commands.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/nargo_commands.md new file mode 100644 index 000000000000..218fcfb0c8cb --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/reference/nargo_commands.md @@ -0,0 +1,381 @@ +--- +title: Nargo +description: + Noir CLI Commands for Noir Prover and Verifier to create, execute, prove and verify programs, + generate Solidity verifier smart contract and compile into JSON file containing ACIR + representation and ABI of circuit. +keywords: + [ + Nargo, + Noir CLI, + Noir Prover, + Noir Verifier, + generate Solidity verifier, + compile JSON file, + ACIR representation, + ABI of circuit, + TypeScript, + ] +sidebar_position: 0 +--- + +# Command-Line Help for `nargo` + +This document contains the help content for the `nargo` command-line program. + +**Command Overview:** + +* [`nargo`↴](#nargo) +* [`nargo backend`↴](#nargo-backend) +* [`nargo backend current`↴](#nargo-backend-current) +* [`nargo backend ls`↴](#nargo-backend-ls) +* [`nargo backend use`↴](#nargo-backend-use) +* [`nargo backend install`↴](#nargo-backend-install) +* [`nargo backend uninstall`↴](#nargo-backend-uninstall) +* [`nargo check`↴](#nargo-check) +* [`nargo fmt`↴](#nargo-fmt) +* [`nargo codegen-verifier`↴](#nargo-codegen-verifier) +* [`nargo compile`↴](#nargo-compile) +* [`nargo new`↴](#nargo-new) +* [`nargo init`↴](#nargo-init) +* [`nargo execute`↴](#nargo-execute) +* [`nargo prove`↴](#nargo-prove) +* [`nargo verify`↴](#nargo-verify) +* [`nargo test`↴](#nargo-test) +* [`nargo info`↴](#nargo-info) +* [`nargo lsp`↴](#nargo-lsp) + +## `nargo` + +Noir's package manager + +**Usage:** `nargo ` + +###### **Subcommands:** + +* `backend` — Install and select custom backends used to generate and verify proofs +* `check` — Checks the constraint system for errors +* `fmt` — Format the Noir files in a workspace +* `codegen-verifier` — Generates a Solidity verifier smart contract for the program +* `compile` — Compile the program and its secret execution trace into ACIR format +* `new` — Create a Noir project in a new directory +* `init` — Create a Noir project in the current directory +* `execute` — Executes a circuit to calculate its return value +* `prove` — Create proof for this program. The proof is returned as a hex encoded string +* `verify` — Given a proof and a program, verify whether the proof is valid +* `test` — Run the tests for this program +* `info` — Provides detailed information on each of a program's function (represented by a single circuit) +* `lsp` — Starts the Noir LSP server + +###### **Options:** + + + + +## `nargo backend` + +Install and select custom backends used to generate and verify proofs + +**Usage:** `nargo backend ` + +###### **Subcommands:** + +* `current` — Prints the name of the currently active backend +* `ls` — Prints the list of currently installed backends +* `use` — Select the backend to use +* `install` — Install a new backend from a URL +* `uninstall` — Uninstalls a backend + + + +## `nargo backend current` + +Prints the name of the currently active backend + +**Usage:** `nargo backend current` + + + +## `nargo backend ls` + +Prints the list of currently installed backends + +**Usage:** `nargo backend ls` + + + +## `nargo backend use` + +Select the backend to use + +**Usage:** `nargo backend use ` + +###### **Arguments:** + +* `` + + + +## `nargo backend install` + +Install a new backend from a URL + +**Usage:** `nargo backend install ` + +###### **Arguments:** + +* `` — The name of the backend to install +* `` — The URL from which to download the backend + + + +## `nargo backend uninstall` + +Uninstalls a backend + +**Usage:** `nargo backend uninstall ` + +###### **Arguments:** + +* `` — The name of the backend to uninstall + + + +## `nargo check` + +Checks the constraint system for errors + +**Usage:** `nargo check [OPTIONS]` + +###### **Options:** + +* `--package ` — The name of the package to check +* `--workspace` — Check all packages in the workspace +* `--overwrite` — Force overwrite of existing files +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings + + + +## `nargo fmt` + +Format the Noir files in a workspace + +**Usage:** `nargo fmt [OPTIONS]` + +###### **Options:** + +* `--check` — Run noirfmt in check mode + + + +## `nargo codegen-verifier` + +Generates a Solidity verifier smart contract for the program + +**Usage:** `nargo codegen-verifier [OPTIONS]` + +###### **Options:** + +* `--package ` — The name of the package to codegen +* `--workspace` — Codegen all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings + + + +## `nargo compile` + +Compile the program and its secret execution trace into ACIR format + +**Usage:** `nargo compile [OPTIONS]` + +###### **Options:** + +* `--package ` — The name of the package to compile +* `--workspace` — Compile all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings + + + +## `nargo new` + +Create a Noir project in a new directory + +**Usage:** `nargo new [OPTIONS] ` + +###### **Arguments:** + +* `` — The path to save the new project + +###### **Options:** + +* `--name ` — Name of the package [default: package directory name] +* `--lib` — Use a library template +* `--bin` — Use a binary template [default] +* `--contract` — Use a contract template + + + +## `nargo init` + +Create a Noir project in the current directory + +**Usage:** `nargo init [OPTIONS]` + +###### **Options:** + +* `--name ` — Name of the package [default: current directory name] +* `--lib` — Use a library template +* `--bin` — Use a binary template [default] +* `--contract` — Use a contract template + + + +## `nargo execute` + +Executes a circuit to calculate its return value + +**Usage:** `nargo execute [OPTIONS] [WITNESS_NAME]` + +###### **Arguments:** + +* `` — Write the execution witness to named file + +###### **Options:** + +* `-p`, `--prover-name ` — The name of the toml file which contains the inputs for the prover + + Default value: `Prover` +* `--package ` — The name of the package to execute +* `--workspace` — Execute all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings +* `--oracle-resolver ` — JSON RPC url to solve oracle calls + + + +## `nargo prove` + +Create proof for this program. The proof is returned as a hex encoded string + +**Usage:** `nargo prove [OPTIONS]` + +###### **Options:** + +* `-p`, `--prover-name ` — The name of the toml file which contains the inputs for the prover + + Default value: `Prover` +* `-v`, `--verifier-name ` — The name of the toml file which contains the inputs for the verifier + + Default value: `Verifier` +* `--verify` — Verify proof after proving +* `--package ` — The name of the package to prove +* `--workspace` — Prove all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings +* `--oracle-resolver ` — JSON RPC url to solve oracle calls + + + +## `nargo verify` + +Given a proof and a program, verify whether the proof is valid + +**Usage:** `nargo verify [OPTIONS]` + +###### **Options:** + +* `-v`, `--verifier-name ` — The name of the toml file which contains the inputs for the verifier + + Default value: `Verifier` +* `--package ` — The name of the package verify +* `--workspace` — Verify all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings + + + +## `nargo test` + +Run the tests for this program + +**Usage:** `nargo test [OPTIONS] [TEST_NAME]` + +###### **Arguments:** + +* `` — If given, only tests with names containing this string will be run + +###### **Options:** + +* `--show-output` — Display output of `println` statements +* `--exact` — Only run tests that match exactly +* `--package ` — The name of the package to test +* `--workspace` — Test all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings +* `--oracle-resolver ` — JSON RPC url to solve oracle calls + + + +## `nargo info` + +Provides detailed information on each of a program's function (represented by a single circuit) + +Current information provided per circuit: 1. The number of ACIR opcodes 2. Counts the final number gates in the circuit used by a backend + +**Usage:** `nargo info [OPTIONS]` + +###### **Options:** + +* `--package ` — The name of the package to detail +* `--workspace` — Detail all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings + + + +## `nargo lsp` + +Starts the Noir LSP server + +Starts an LSP server which allows IDEs such as VS Code to display diagnostics in Noir source. + +VS Code Noir Language Support: https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir + +**Usage:** `nargo lsp` + + + +
+ + + This document was generated automatically by + clap-markdown. + + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/tooling/debugger.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/tooling/debugger.md new file mode 100644 index 000000000000..184c436068fc --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/tooling/debugger.md @@ -0,0 +1,27 @@ +--- +title: Debugger +description: Learn about the Noir Debugger, in its REPL or VS Code versions. +keywords: [Nargo, VSCode, Visual Studio Code, REPL, Debugger] +sidebar_position: 2 +--- + +# Noir Debugger + +There are currently two ways of debugging Noir programs: + +1. From VS Code, via the [vscode-noir](https://github.com/noir-lang/vscode-noir) extension. You can install it via the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). +2. Via the REPL debugger, which ships with Nargo. + +In order to use either version of the debugger, you will need to install recent enough versions of Noir, [Nargo](../getting_started/installation) and vscode-noir: + +- Noir 0.xx +- Nargo 0.xx +- vscode-noir 0.xx + +:::info +At the moment, the debugger supports debugging binary projects, but not contracts. +::: + +We cover the VS Code Noir debugger more in depth in [its VS Code debugger how-to guide](../how_to/debugger/debugging_with_vs_code.md) and [the reference](../reference/debugger/debugger_vscode.md). + +The REPL debugger is discussed at length in [the REPL debugger how-to guide](../how_to/debugger/debugging_with_the_repl.md) and [the reference](../reference/debugger/debugger_repl.md). diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/tooling/language_server.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/tooling/language_server.md new file mode 100644 index 000000000000..81e0356ef8a1 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/tooling/language_server.md @@ -0,0 +1,43 @@ +--- +title: Language Server +description: Learn about the Noir Language Server, how to install the components, and configuration that may be required. +keywords: [Nargo, Language Server, LSP, VSCode, Visual Studio Code] +sidebar_position: 0 +--- + +This section helps you install and configure the Noir Language Server. + +The Language Server Protocol (LSP) has two components, the [Server](#language-server) and the [Client](#language-client). Below we describe each in the context of Noir. + +## Language Server + +The Server component is provided by the Nargo command line tool that you installed at the beginning of this guide. +As long as Nargo is installed and you've used it to run other commands in this guide, it should be good to go! + +If you'd like to verify that the `nargo lsp` command is available, you can run `nargo --help` and look for `lsp` in the list of commands. If you see it, you're using a version of Noir with LSP support. + +## Language Client + +The Client component is usually an editor plugin that launches the Server. It communicates LSP messages between the editor and the Server. For example, when you save a file, the Client will alert the Server, so it can try to compile the project and report any errors. + +Currently, Noir provides a Language Client for Visual Studio Code via the [vscode-noir](https://github.com/noir-lang/vscode-noir) extension. You can install it via the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). + +> **Note:** Noir's Language Server Protocol support currently assumes users' VSCode workspace root to be the same as users' Noir project root (i.e. where Nargo.toml lies). +> +> If LSP features seem to be missing / malfunctioning, make sure you are opening your Noir project directly (instead of as a sub-folder) in your VSCode instance. + +When your language server is running correctly and the VSCode plugin is installed, you should see handy codelens buttons for compilation, measuring circuit size, execution, and tests: + +![Compile and Execute](@site/static/img/codelens_compile_execute.png) +![Run test](@site/static/img/codelens_run_test.png) + +You should also see your tests in the `testing` panel: + +![Testing panel](@site/static/img/codelens_testing_panel.png) + +### Configuration + +- **Noir: Enable LSP** - If checked, the extension will launch the Language Server via `nargo lsp` and communicate with it. +- **Noir: Nargo Flags** - Additional flags may be specified if you require them to be added when the extension calls `nargo lsp`. +- **Noir: Nargo Path** - An absolute path to a Nargo binary with the `lsp` command. This may be useful if Nargo is not within the `PATH` of your editor. +- **Noir > Trace: Server** - Setting this to `"messages"` or `"verbose"` will log LSP messages between the Client and Server. Useful for debugging. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/tooling/testing.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/tooling/testing.md new file mode 100644 index 000000000000..d3e0c5224730 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/tooling/testing.md @@ -0,0 +1,62 @@ +--- +title: Testing in Noir +description: Learn how to use Nargo to test your Noir program in a quick and easy way +keywords: [Nargo, testing, Noir, compile, test] +sidebar_position: 1 +--- + +You can test your Noir programs using Noir circuits. + +Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if +you run `nargo test`. + +For example if you have a program like: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test] +fn test_add() { + assert(add(2,2) == 4); + assert(add(0,1) == 1); + assert(add(1,0) == 1); +} +``` + +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't +have any arguments currently. + +### Test fail + +You can write tests that are expected to fail by using the decorator `#[test(should_fail)]`. For example: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test(should_fail)] +fn test_add() { + assert(add(2,2) == 5); +} +``` + +You can be more specific and make it fail with a specific reason by using `should_fail_with = "`: + +```rust +fn main(african_swallow_avg_speed : Field) { + assert(african_swallow_avg_speed == 65, "What is the airspeed velocity of an unladen swallow"); +} + +#[test] +fn test_king_arthur() { + main(65); +} + +#[test(should_fail_with = "What is the airspeed velocity of an unladen swallow")] +fn test_bridgekeeper() { + main(32); +} + +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.29.0/tutorials/noirjs_app.md b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/tutorials/noirjs_app.md new file mode 100644 index 000000000000..6446e0b2a76f --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.29.0/tutorials/noirjs_app.md @@ -0,0 +1,326 @@ +--- +title: Building a web app with NoirJS +description: Learn how to setup a new app that uses Noir to generate and verify zero-knowledge SNARK proofs in a typescript or javascript environment. +keywords: [how to, guide, javascript, typescript, noir, barretenberg, zero-knowledge, proofs, app] +sidebar_position: 0 +pagination_next: noir/concepts/data_types/index +--- + +NoirJS is a set of packages meant to work both in a browser and a server environment. In this tutorial, we will build a simple web app using them. From here, you should get an idea on how to proceed with your own Noir projects! + +You can find the complete app code for this guide [here](https://github.com/noir-lang/tiny-noirjs-app). + +## Setup + +:::note + +Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.27.x matches `noir_js@0.27.x`, etc. + +In this guide, we will be pinned to 0.27.0. + +::: + +Before we start, we want to make sure we have Node and Nargo installed. + +We start by opening a terminal and executing `node --version`. If we don't get an output like `v20.10.0`, that means node is not installed. Let's do that by following the handy [nvm guide](https://github.com/nvm-sh/nvm?tab=readme-ov-file#install--update-script). + +As for `Nargo`, we can follow the the [Nargo guide](../getting_started/installation/index.md) to install it. If you're lazy, just paste this on a terminal and run `noirup`: + +```sh +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +Easy enough. Onwards! + +## Our project + +ZK is a powerful technology. An app that doesn't reveal one of the inputs to _anyone_ is almost unbelievable, yet Noir makes it as easy as a single line of code. + +In fact, it's so simple that it comes nicely packaged in `nargo`. Let's do that! + +### Nargo + +Run: + +`nargo new circuit` + +And... That's about it. Your program is ready to be compiled and run. + +To compile, let's `cd` into the `circuit` folder to enter our project, and call: + +`nargo compile` + +This compiles our circuit into `json` format and add it to a new `target` folder. + +:::info + +At this point in the tutorial, your folder structure should look like this: + +```tree +. +└── circuit <---- our working directory + ├── Nargo.toml + ├── src + │ └── main.nr + └── target + └── circuit.json +``` + +::: + +### Node and Vite + +If you want to explore Nargo, feel free to go on a side-quest now and follow the steps in the +[getting started](../getting_started/hello_noir/index.md) guide. However, we want our app to run on the browser, so we need Vite. + +Vite is a powerful tool to generate static websites. While it provides all kinds of features, let's just go barebones with some good old vanilla JS. + +To do this this, go back to the previous folder (`cd ..`) and create a new vite project by running `npm create vite` and choosing "Vanilla" and "Javascript". + +A wild `vite-project` directory should now appear in your root folder! Let's not waste any time and dive right in: + +```bash +cd vite-project +``` + +### Setting Up Vite and Configuring the Project + +Before we proceed with any coding, let's get our environment tailored for Noir. We'll start by laying down the foundations with a `vite.config.js` file. This little piece of configuration is our secret sauce for making sure everything meshes well with the NoirJS libraries and other special setups we might need, like handling WebAssembly modules. Here’s how you get that going: + +#### Creating the vite.config.js + +In your freshly minted `vite-project` folder, create a new file named `vite.config.js` and open it in your code editor. Paste the following to set the stage: + +```javascript +import { defineConfig } from "vite"; +import copy from "rollup-plugin-copy"; + +export default defineConfig({ + esbuild: { + target: "esnext", + }, + optimizeDeps: { + esbuildOptions: { + target: "esnext", + }, + }, + plugins: [ + copy({ + targets: [ + { src: "node_modules/**/*.wasm", dest: "node_modules/.vite/dist" }, + ], + copySync: true, + hook: "buildStart", + }), + ], + server: { + port: 3000, + }, +}); +``` + +#### Install Dependencies + +Now that our stage is set, install the necessary NoirJS packages along with our other dependencies: + +```bash +npm install && npm install @noir-lang/backend_barretenberg@0.27.0 @noir-lang/noir_js@0.27.0 +npm install rollup-plugin-copy --save-dev +``` + +:::info + +At this point in the tutorial, your folder structure should look like this: + +```tree +. +└── circuit + └── ...etc... +└── vite-project <---- our working directory + └── ...etc... +``` + +::: + +#### Some cleanup + +`npx create vite` is amazing but it creates a bunch of files we don't really need for our simple example. Actually, let's just delete everything except for `vite.config.js`, `index.html`, `main.js` and `package.json`. I feel lighter already. + +![my heart is ready for you, noir.js](@site/static/img/memes/titanic.jpeg) + +## HTML + +Our app won't run like this, of course. We need some working HTML, at least. Let's open our broken-hearted `index.html` and replace everything with this code snippet: + +```html + + + + + + +

Noir app

+
+ + +
+
+

Logs

+

Proof

+
+ + +``` + +It _could_ be a beautiful UI... Depending on which universe you live in. + +## Some good old vanilla Javascript + +Our love for Noir needs undivided attention, so let's just open `main.js` and delete everything (this is where the romantic scenery becomes a bit creepy). + +Start by pasting in this boilerplate code: + +```js +const setup = async () => { + await Promise.all([ + import('@noir-lang/noirc_abi').then((module) => + module.default(new URL('@noir-lang/noirc_abi/web/noirc_abi_wasm_bg.wasm', import.meta.url).toString()), + ), + import('@noir-lang/acvm_js').then((module) => + module.default(new URL('@noir-lang/acvm_js/web/acvm_js_bg.wasm', import.meta.url).toString()), + ), + ]); +}; + +function display(container, msg) { + const c = document.getElementById(container); + const p = document.createElement('p'); + p.textContent = msg; + c.appendChild(p); +} + +document.getElementById('submitGuess').addEventListener('click', async () => { + try { + // here's where love happens + } catch (err) { + display('logs', 'Oh 💔 Wrong guess'); + } +}); +``` + +The display function doesn't do much. We're simply manipulating our website to see stuff happening. For example, if the proof fails, it will simply log a broken heart 😢 + +As for the `setup` function, it's just a sad reminder that dealing with `wasm` on the browser is not as easy as it should. Just copy, paste, and forget. + +:::info + +At this point in the tutorial, your folder structure should look like this: + +```tree +. +└── circuit + └── ...same as above +└── vite-project + ├── vite.config.js + ├── main.js + ├── package.json + └── index.html +``` + +You'll see other files and folders showing up (like `package-lock.json`, `node_modules`) but you shouldn't have to care about those. + +::: + +## Some NoirJS + +We're starting with the good stuff now. If you've compiled the circuit as described above, you should have a `json` file we want to import at the very top of our `main.js` file: + +```ts +import circuit from '../circuit/target/circuit.json'; +``` + +[Noir is backend-agnostic](../index.mdx#whats-new-about-noir). We write Noir, but we also need a proving backend. That's why we need to import and instantiate the two dependencies we installed above: `BarretenbergBackend` and `Noir`. Let's import them right below: + +```js +import { BarretenbergBackend, BarretenbergVerifier as Verifier } from '@noir-lang/backend_barretenberg'; +import { Noir } from '@noir-lang/noir_js'; +``` + +And instantiate them inside our try-catch block: + +```ts +// try { +const backend = new BarretenbergBackend(circuit); +const noir = new Noir(circuit, backend); +// } +``` + +:::note + +For the remainder of the tutorial, everything will be happening inside the `try` block + +::: + +## Our app + +Now for the app itself. We're capturing whatever is in the input when people press the submit button. Just add this: + +```js +const x = parseInt(document.getElementById('guessInput').value); +const input = { x, y: 2 }; +``` + +Now we're ready to prove stuff! Let's feed some inputs to our circuit and calculate the proof: + +```js +await setup(); // let's squeeze our wasm inits here + +display('logs', 'Generating proof... ⌛'); +const proof = await noir.generateProof(input); +display('logs', 'Generating proof... ✅'); +display('results', proof.proof); +``` + +You're probably eager to see stuff happening, so go and run your app now! + +From your terminal, run `npm run dev`. If it doesn't open a browser for you, just visit `localhost:5173`. You should now see the worst UI ever, with an ugly input. + +![Getting Started 0](@site/static/img/noir_getting_started_1.png) + +Now, our circuit says `fn main(x: Field, y: pub Field)`. This means only the `y` value is public, and it's hardcoded above: `input = { x, y: 2 }`. In other words, you won't need to send your secret`x` to the verifier! + +By inputting any number other than 2 in the input box and clicking "submit", you should get a valid proof. Otherwise the proof won't even generate correctly. By the way, if you're human, you shouldn't be able to understand anything on the "proof" box. That's OK. We like you, human ❤️. + +## Verifying + +Time to celebrate, yes! But we shouldn't trust machines so blindly. Let's add these lines to see our proof being verified: + +```js +display('logs', 'Verifying proof... ⌛'); +const verificationKey = await backend.getVerificationKey(); +const verifier = new Verifier(); +const isValid = await verifier.verifyProof(proof, verificationKey); +if (isValid) display('logs', 'Verifying proof... ✅'); +``` + +You have successfully generated a client-side Noir web app! + +![coded app without math knowledge](@site/static/img/memes/flextape.jpeg) + +## Further Reading + +You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. + +You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. diff --git a/noir/noir-repo/docs/versioned_sidebars/version-v0.29.0-sidebars.json b/noir/noir-repo/docs/versioned_sidebars/version-v0.29.0-sidebars.json new file mode 100644 index 000000000000..b9ad026f69ff --- /dev/null +++ b/noir/noir-repo/docs/versioned_sidebars/version-v0.29.0-sidebars.json @@ -0,0 +1,93 @@ +{ + "sidebar": [ + { + "type": "doc", + "id": "index" + }, + { + "type": "category", + "label": "Getting Started", + "items": [ + { + "type": "autogenerated", + "dirName": "getting_started" + } + ] + }, + { + "type": "category", + "label": "The Noir Language", + "items": [ + { + "type": "autogenerated", + "dirName": "noir" + } + ] + }, + { + "type": "html", + "value": "
", + "defaultStyle": true + }, + { + "type": "category", + "label": "How To Guides", + "items": [ + { + "type": "autogenerated", + "dirName": "how_to" + } + ] + }, + { + "type": "category", + "label": "Explainers", + "items": [ + { + "type": "autogenerated", + "dirName": "explainers" + } + ] + }, + { + "type": "category", + "label": "Tutorials", + "items": [ + { + "type": "autogenerated", + "dirName": "tutorials" + } + ] + }, + { + "type": "category", + "label": "Reference", + "items": [ + { + "type": "autogenerated", + "dirName": "reference" + } + ] + }, + { + "type": "category", + "label": "Tooling", + "items": [ + { + "type": "autogenerated", + "dirName": "tooling" + } + ] + }, + { + "type": "html", + "value": "
", + "defaultStyle": true + }, + { + "type": "doc", + "id": "migration_notes", + "label": "Migration notes" + } + ] +} diff --git a/noir/noir-repo/noir_stdlib/src/aes128.nr b/noir/noir-repo/noir_stdlib/src/aes128.nr new file mode 100644 index 000000000000..e6e2a5e49975 --- /dev/null +++ b/noir/noir-repo/noir_stdlib/src/aes128.nr @@ -0,0 +1,4 @@ +#[foreign(aes128_encrypt)] +// docs:start:aes128 +pub fn aes128_encrypt(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8] {} +// docs:end:aes128 diff --git a/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr b/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr index c6a3365a9793..65f3716dd34e 100644 --- a/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr @@ -1,4 +1,4 @@ -use crate::cmp::Eq; +use crate::{cmp::Eq, convert::From}; struct BoundedVec { storage: [T; MaxLen], @@ -74,6 +74,13 @@ impl BoundedVec { self.len = new_len; } + pub fn from_array(array: [T; Len]) -> Self { + assert(Len <= MaxLen, "from array out of bounds"); + let mut vec: BoundedVec = BoundedVec::new(); + vec.extend_from_array(array); + vec + } + pub fn pop(&mut self) -> T { assert(self.len > 0); self.len -= 1; @@ -107,6 +114,12 @@ impl Eq for BoundedVec where T: Eq { } } +impl From<[T; Len]> for BoundedVec { + fn from(array: [T; Len]) -> BoundedVec { + BoundedVec::from_array(array) + } +} + mod bounded_vec_tests { // TODO: Allow imports from "super" use crate::collections::bounded_vec::BoundedVec; @@ -128,4 +141,60 @@ mod bounded_vec_tests { assert(bounded_vec1 != bounded_vec2); } + + mod from_array { + use crate::collections::bounded_vec::BoundedVec; + + #[test] + fn empty() { + let empty_array: [Field; 0] = []; + let bounded_vec = BoundedVec::from_array([]); + + assert_eq(bounded_vec.max_len(), 0); + assert_eq(bounded_vec.len(), 0); + assert_eq(bounded_vec.storage(), empty_array); + } + + #[test] + fn equal_len() { + let array = [1, 2, 3]; + let bounded_vec = BoundedVec::from_array(array); + + assert_eq(bounded_vec.max_len(), 3); + assert_eq(bounded_vec.len(), 3); + assert_eq(bounded_vec.storage(), array); + } + + #[test] + fn max_len_greater_then_array_len() { + let array = [1, 2, 3]; + let bounded_vec: BoundedVec = BoundedVec::from_array(array); + + assert_eq(bounded_vec.max_len(), 10); + assert_eq(bounded_vec.len(), 3); + assert_eq(bounded_vec.storage()[0], 1); + assert_eq(bounded_vec.storage()[1], 2); + assert_eq(bounded_vec.storage()[2], 3); + } + + #[test(should_fail_with="from array out of bounds")] + fn max_len_lower_then_array_len() { + let _: BoundedVec = BoundedVec::from_array([0; 3]); + } + } + + mod trait_from { + use crate::collections::bounded_vec::BoundedVec; + + #[test] + fn simple() { + let array = [1, 2]; + let bounded_vec: BoundedVec = BoundedVec::from(array); + + assert_eq(bounded_vec.max_len(), 10); + assert_eq(bounded_vec.len(), 2); + assert_eq(bounded_vec.storage()[0], 1); + assert_eq(bounded_vec.storage()[1], 2); + } + } } diff --git a/noir/noir-repo/noir_stdlib/src/scalar_mul.nr b/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr similarity index 50% rename from noir/noir-repo/noir_stdlib/src/scalar_mul.nr rename to noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr index 457b7b7791c8..21d658db615b 100644 --- a/noir/noir-repo/noir_stdlib/src/scalar_mul.nr +++ b/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr @@ -1,4 +1,4 @@ -use crate::ops::Add; +use crate::ops::arith::{Add, Sub, Neg}; // TODO(https://github.com/noir-lang/noir/issues/4931) struct EmbeddedCurvePoint { @@ -18,42 +18,57 @@ impl Add for EmbeddedCurvePoint { } } -// Computes a fixed base scalar multiplication over the embedded curve. +impl Sub for EmbeddedCurvePoint { + fn sub(self, other: EmbeddedCurvePoint) -> EmbeddedCurvePoint { + self + other.neg() + } +} + +impl Neg for EmbeddedCurvePoint { + fn neg(self) -> EmbeddedCurvePoint { + EmbeddedCurvePoint { + x: self.x, + y: -self.y + } + } +} + +// Computes a multi scalar multiplication over the embedded curve. // For bn254, We have Grumpkin and Baby JubJub. // For bls12-381, we have JubJub and Bandersnatch. // // The embedded curve being used is decided by the // underlying proof system. -#[foreign(fixed_base_scalar_mul)] -// docs:start:fixed_base_embedded_curve -pub fn fixed_base_embedded_curve( - low: Field, // low limb of the scalar - high: Field // high limb of the scalar +#[foreign(multi_scalar_mul)] +// docs:start:multi_scalar_mul +pub fn multi_scalar_mul( + points: [Field; N], // points represented as x and y coordinates [x1, y1, x2, y2, ...] + scalars: [Field; N] // scalars represented as low and high limbs [low1, high1, low2, high2, ...] ) -> [Field; 2] -// docs:end:fixed_base_embedded_curve +// docs:end:multi_scalar_mul {} -// Computes a variable base scalar multiplication over the embedded curve. -// For bn254, We have Grumpkin and Baby JubJub. -// For bls12-381, we have JubJub and Bandersnatch. -// -// The embedded curve being used is decided by the -// underlying proof system. -// TODO(https://github.com/noir-lang/noir/issues/4931): use a point struct instead of two fields -#[foreign(variable_base_scalar_mul)] -// docs:start:variable_base_embedded_curve -pub fn variable_base_embedded_curve( - point_x: Field, // x coordinate of a point to multiply the scalar with - point_y: Field, // y coordinate of a point to multiply the scalar with - scalar_low: Field, // low limb of the scalar - scalar_high: Field // high limb of the scalar +// docs:start:fixed_base_scalar_mul +pub fn fixed_base_scalar_mul( + scalar_low: Field, + scalar_high: Field ) -> [Field; 2] -// docs:end:variable_base_embedded_curve -{} +// docs:end:fixed_base_scalar_mul +{ + let g1_x = 1; + let g1_y = 17631683881184975370165255887551781615748388533673675138860; + multi_scalar_mul([g1_x, g1_y], [scalar_low, scalar_high]) +} // This is a hack as returning an `EmbeddedCurvePoint` from a foreign function in brillig returns a [BrilligVariable::SingleAddr; 2] rather than BrilligVariable::BrilligArray // as is defined in the brillig bytecode format. This is a workaround which allows us to fix this without modifying the serialization format. -fn embedded_curve_add(point1: EmbeddedCurvePoint, point2: EmbeddedCurvePoint) -> EmbeddedCurvePoint { +// docs:start:embedded_curve_add +fn embedded_curve_add( + point1: EmbeddedCurvePoint, + point2: EmbeddedCurvePoint +) -> EmbeddedCurvePoint +// docs:end:embedded_curve_add +{ let point_array = embedded_curve_add_array_return(point1, point2); let x = point_array[0]; let y = point_array[1]; diff --git a/noir/noir-repo/noir_stdlib/src/grumpkin_scalar.nr b/noir/noir-repo/noir_stdlib/src/grumpkin_scalar.nr deleted file mode 100644 index d05158488f49..000000000000 --- a/noir/noir-repo/noir_stdlib/src/grumpkin_scalar.nr +++ /dev/null @@ -1,21 +0,0 @@ -struct GrumpkinScalar { - low: Field, - high: Field, -} - -impl GrumpkinScalar { - pub fn new(low: Field, high: Field) -> Self { - // TODO: check that the low and high value fit within the grumpkin modulus - GrumpkinScalar { low, high } - } -} - -global GRUMPKIN_SCALAR_SERIALIZED_LEN: Field = 2; - -pub fn deserialize_grumpkin_scalar(fields: [Field; GRUMPKIN_SCALAR_SERIALIZED_LEN]) -> GrumpkinScalar { - GrumpkinScalar { low: fields[0], high: fields[1] } -} - -pub fn serialize_grumpkin_scalar(scalar: GrumpkinScalar) -> [Field; GRUMPKIN_SCALAR_SERIALIZED_LEN] { - [scalar.low, scalar.high] -} diff --git a/noir/noir-repo/noir_stdlib/src/grumpkin_scalar_mul.nr b/noir/noir-repo/noir_stdlib/src/grumpkin_scalar_mul.nr deleted file mode 100644 index c1195073ef6d..000000000000 --- a/noir/noir-repo/noir_stdlib/src/grumpkin_scalar_mul.nr +++ /dev/null @@ -1,6 +0,0 @@ -use crate::grumpkin_scalar::GrumpkinScalar; -use crate::scalar_mul::{fixed_base_embedded_curve, variable_base_embedded_curve}; - -pub fn grumpkin_fixed_base(scalar: GrumpkinScalar) -> [Field; 2] { - fixed_base_embedded_curve(scalar.low, scalar.high) -} \ No newline at end of file diff --git a/noir/noir-repo/noir_stdlib/src/lib.nr b/noir/noir-repo/noir_stdlib/src/lib.nr index ebde4b888588..33504be0b9a0 100644 --- a/noir/noir-repo/noir_stdlib/src/lib.nr +++ b/noir/noir-repo/noir_stdlib/src/lib.nr @@ -1,4 +1,5 @@ mod hash; +mod aes128; mod array; mod slice; mod merkle; @@ -6,9 +7,7 @@ mod schnorr; mod ecdsa_secp256k1; mod ecdsa_secp256r1; mod eddsa; -mod grumpkin_scalar; -mod grumpkin_scalar_mul; -mod scalar_mul; +mod embedded_curve_ops; mod sha256; mod sha512; mod field; diff --git a/noir/noir-repo/noir_stdlib/src/ops.nr b/noir/noir-repo/noir_stdlib/src/ops.nr index ad65a4e11fe1..8b1903cff0b7 100644 --- a/noir/noir-repo/noir_stdlib/src/ops.nr +++ b/noir/noir-repo/noir_stdlib/src/ops.nr @@ -1,155 +1,5 @@ -// docs:start:add-trait -trait Add { - fn add(self, other: Self) -> Self; -} -// docs:end:add-trait +mod arith; +mod bit; -impl Add for Field { fn add(self, other: Field) -> Field { self + other } } - -impl Add for u64 { fn add(self, other: u64) -> u64 { self + other } } -impl Add for u32 { fn add(self, other: u32) -> u32 { self + other } } -impl Add for u8 { fn add(self, other: u8) -> u8 { self + other } } - -impl Add for i8 { fn add(self, other: i8) -> i8 { self + other } } -impl Add for i32 { fn add(self, other: i32) -> i32 { self + other } } -impl Add for i64 { fn add(self, other: i64) -> i64 { self + other } } - -// docs:start:sub-trait -trait Sub { - fn sub(self, other: Self) -> Self; -} -// docs:end:sub-trait - -impl Sub for Field { fn sub(self, other: Field) -> Field { self - other } } - -impl Sub for u64 { fn sub(self, other: u64) -> u64 { self - other } } -impl Sub for u32 { fn sub(self, other: u32) -> u32 { self - other } } -impl Sub for u8 { fn sub(self, other: u8) -> u8 { self - other } } - -impl Sub for i8 { fn sub(self, other: i8) -> i8 { self - other } } -impl Sub for i32 { fn sub(self, other: i32) -> i32 { self - other } } -impl Sub for i64 { fn sub(self, other: i64) -> i64 { self - other } } - -// docs:start:mul-trait -trait Mul { - fn mul(self, other: Self) -> Self; -} -// docs:end:mul-trait - -impl Mul for Field { fn mul(self, other: Field) -> Field { self * other } } - -impl Mul for u64 { fn mul(self, other: u64) -> u64 { self * other } } -impl Mul for u32 { fn mul(self, other: u32) -> u32 { self * other } } -impl Mul for u8 { fn mul(self, other: u8) -> u8 { self * other } } - -impl Mul for i8 { fn mul(self, other: i8) -> i8 { self * other } } -impl Mul for i32 { fn mul(self, other: i32) -> i32 { self * other } } -impl Mul for i64 { fn mul(self, other: i64) -> i64 { self * other } } - -// docs:start:div-trait -trait Div { - fn div(self, other: Self) -> Self; -} -// docs:end:div-trait - -impl Div for Field { fn div(self, other: Field) -> Field { self / other } } - -impl Div for u64 { fn div(self, other: u64) -> u64 { self / other } } -impl Div for u32 { fn div(self, other: u32) -> u32 { self / other } } -impl Div for u8 { fn div(self, other: u8) -> u8 { self / other } } - -impl Div for i8 { fn div(self, other: i8) -> i8 { self / other } } -impl Div for i32 { fn div(self, other: i32) -> i32 { self / other } } -impl Div for i64 { fn div(self, other: i64) -> i64 { self / other } } - -// docs:start:rem-trait -trait Rem{ - fn rem(self, other: Self) -> Self; -} -// docs:end:rem-trait - -impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } -impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } -impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } - -impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } -impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } -impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } - -// docs:start:bitor-trait -trait BitOr { - fn bitor(self, other: Self) -> Self; -} -// docs:end:bitor-trait - -impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } - -impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } -impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } -impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } - -impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } -impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } -impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } - -// docs:start:bitand-trait -trait BitAnd { - fn bitand(self, other: Self) -> Self; -} -// docs:end:bitand-trait - -impl BitAnd for bool { fn bitand(self, other: bool) -> bool { self & other } } - -impl BitAnd for u64 { fn bitand(self, other: u64) -> u64 { self & other } } -impl BitAnd for u32 { fn bitand(self, other: u32) -> u32 { self & other } } -impl BitAnd for u8 { fn bitand(self, other: u8) -> u8 { self & other } } - -impl BitAnd for i8 { fn bitand(self, other: i8) -> i8 { self & other } } -impl BitAnd for i32 { fn bitand(self, other: i32) -> i32 { self & other } } -impl BitAnd for i64 { fn bitand(self, other: i64) -> i64 { self & other } } - -// docs:start:bitxor-trait -trait BitXor { - fn bitxor(self, other: Self) -> Self; -} -// docs:end:bitxor-trait - -impl BitXor for bool { fn bitxor(self, other: bool) -> bool { self ^ other } } - -impl BitXor for u64 { fn bitxor(self, other: u64) -> u64 { self ^ other } } -impl BitXor for u32 { fn bitxor(self, other: u32) -> u32 { self ^ other } } -impl BitXor for u8 { fn bitxor(self, other: u8) -> u8 { self ^ other } } - -impl BitXor for i8 { fn bitxor(self, other: i8) -> i8 { self ^ other } } -impl BitXor for i32 { fn bitxor(self, other: i32) -> i32 { self ^ other } } -impl BitXor for i64 { fn bitxor(self, other: i64) -> i64 { self ^ other } } - -// docs:start:shl-trait -trait Shl { - fn shl(self, other: u8) -> Self; -} -// docs:end:shl-trait - -impl Shl for u32 { fn shl(self, other: u8) -> u32 { self << other } } -impl Shl for u64 { fn shl(self, other: u8) -> u64 { self << other } } -impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } -impl Shl for u1 { fn shl(self, other: u8) -> u1 { self << other } } - -impl Shl for i8 { fn shl(self, other: u8) -> i8 { self << other } } -impl Shl for i32 { fn shl(self, other: u8) -> i32 { self << other } } -impl Shl for i64 { fn shl(self, other: u8) -> i64 { self << other } } - -// docs:start:shr-trait -trait Shr { - fn shr(self, other: u8) -> Self; -} -// docs:end:shr-trait - -impl Shr for u64 { fn shr(self, other: u8) -> u64 { self >> other } } -impl Shr for u32 { fn shr(self, other: u8) -> u32 { self >> other } } -impl Shr for u8 { fn shr(self, other: u8) -> u8 { self >> other } } -impl Shr for u1 { fn shr(self, other: u8) -> u1 { self >> other } } - -impl Shr for i8 { fn shr(self, other: u8) -> i8 { self >> other } } -impl Shr for i32 { fn shr(self, other: u8) -> i32 { self >> other } } -impl Shr for i64 { fn shr(self, other: u8) -> i64 { self >> other } } +use arith::{Add, Sub, Mul, Div, Rem, Neg}; +use bit::{Not, BitOr, BitAnd, BitXor, Shl, Shr}; diff --git a/noir/noir-repo/noir_stdlib/src/ops/arith.nr b/noir/noir-repo/noir_stdlib/src/ops/arith.nr new file mode 100644 index 000000000000..df0ff978a7cf --- /dev/null +++ b/noir/noir-repo/noir_stdlib/src/ops/arith.nr @@ -0,0 +1,103 @@ +// docs:start:add-trait +trait Add { + fn add(self, other: Self) -> Self; +} +// docs:end:add-trait + +impl Add for Field { fn add(self, other: Field) -> Field { self + other } } + +impl Add for u64 { fn add(self, other: u64) -> u64 { self + other } } +impl Add for u32 { fn add(self, other: u32) -> u32 { self + other } } +impl Add for u16 { fn add(self, other: u16) -> u16 { self + other } } +impl Add for u8 { fn add(self, other: u8) -> u8 { self + other } } + +impl Add for i8 { fn add(self, other: i8) -> i8 { self + other } } +impl Add for i16 { fn add(self, other: i16) -> i16 { self + other } } +impl Add for i32 { fn add(self, other: i32) -> i32 { self + other } } +impl Add for i64 { fn add(self, other: i64) -> i64 { self + other } } + +// docs:start:sub-trait +trait Sub { + fn sub(self, other: Self) -> Self; +} +// docs:end:sub-trait + +impl Sub for Field { fn sub(self, other: Field) -> Field { self - other } } + +impl Sub for u64 { fn sub(self, other: u64) -> u64 { self - other } } +impl Sub for u32 { fn sub(self, other: u32) -> u32 { self - other } } +impl Sub for u16 { fn sub(self, other: u16) -> u16 { self - other } } +impl Sub for u8 { fn sub(self, other: u8) -> u8 { self - other } } + +impl Sub for i8 { fn sub(self, other: i8) -> i8 { self - other } } +impl Sub for i16 { fn sub(self, other: i16) -> i16 { self - other } } +impl Sub for i32 { fn sub(self, other: i32) -> i32 { self - other } } +impl Sub for i64 { fn sub(self, other: i64) -> i64 { self - other } } + +// docs:start:mul-trait +trait Mul { + fn mul(self, other: Self) -> Self; +} +// docs:end:mul-trait + +impl Mul for Field { fn mul(self, other: Field) -> Field { self * other } } + +impl Mul for u64 { fn mul(self, other: u64) -> u64 { self * other } } +impl Mul for u32 { fn mul(self, other: u32) -> u32 { self * other } } +impl Mul for u16 { fn mul(self, other: u16) -> u16 { self * other } } +impl Mul for u8 { fn mul(self, other: u8) -> u8 { self * other } } + +impl Mul for i8 { fn mul(self, other: i8) -> i8 { self * other } } +impl Mul for i16 { fn mul(self, other: i16) -> i16 { self * other } } +impl Mul for i32 { fn mul(self, other: i32) -> i32 { self * other } } +impl Mul for i64 { fn mul(self, other: i64) -> i64 { self * other } } + +// docs:start:div-trait +trait Div { + fn div(self, other: Self) -> Self; +} +// docs:end:div-trait + +impl Div for Field { fn div(self, other: Field) -> Field { self / other } } + +impl Div for u64 { fn div(self, other: u64) -> u64 { self / other } } +impl Div for u32 { fn div(self, other: u32) -> u32 { self / other } } +impl Div for u16 { fn div(self, other: u16) -> u16 { self / other } } +impl Div for u8 { fn div(self, other: u8) -> u8 { self / other } } + +impl Div for i8 { fn div(self, other: i8) -> i8 { self / other } } +impl Div for i16 { fn div(self, other: i16) -> i16 { self / other } } +impl Div for i32 { fn div(self, other: i32) -> i32 { self / other } } +impl Div for i64 { fn div(self, other: i64) -> i64 { self / other } } + +// docs:start:rem-trait +trait Rem{ + fn rem(self, other: Self) -> Self; +} +// docs:end:rem-trait + +impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } +impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } +impl Rem for u16 { fn rem(self, other: u16) -> u16 { self % other } } +impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } + +impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } +impl Rem for i16 { fn rem(self, other: i16) -> i16 { self % other } } +impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } +impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } + +// docs:start:neg-trait +trait Neg { + fn neg(self) -> Self; +} +// docs:end:neg-trait + +// docs:start:neg-trait-impls +impl Neg for Field { fn neg(self) -> Field { -self } } + +impl Neg for i8 { fn neg(self) -> i8 { -self } } +impl Neg for i16 { fn neg(self) -> i16 { -self } } +impl Neg for i32 { fn neg(self) -> i32 { -self } } +impl Neg for i64 { fn neg(self) -> i64 { -self } } +// docs:end:neg-trait-impls + diff --git a/noir/noir-repo/noir_stdlib/src/ops/bit.nr b/noir/noir-repo/noir_stdlib/src/ops/bit.nr new file mode 100644 index 000000000000..a31cfee878cc --- /dev/null +++ b/noir/noir-repo/noir_stdlib/src/ops/bit.nr @@ -0,0 +1,109 @@ +// docs:start:not-trait +trait Not { + fn not(self: Self) -> Self; +} +// docs:end:not-trait + +// docs:start:not-trait-impls +impl Not for bool { fn not(self) -> bool { !self } } + +impl Not for u64 { fn not(self) -> u64 { !self } } +impl Not for u32 { fn not(self) -> u32 { !self } } +impl Not for u16 { fn not(self) -> u16 { !self } } +impl Not for u8 { fn not(self) -> u8 { !self } } +impl Not for u1 { fn not(self) -> u1 { !self } } + +impl Not for i8 { fn not(self) -> i8 { !self } } +impl Not for i16 { fn not(self) -> i16 { !self } } +impl Not for i32 { fn not(self) -> i32 { !self } } +impl Not for i64 { fn not(self) -> i64 { !self } } +// docs:end:not-trait-impls + +// docs:start:bitor-trait +trait BitOr { + fn bitor(self, other: Self) -> Self; +} +// docs:end:bitor-trait + +impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } + +impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } +impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } +impl BitOr for u16 { fn bitor(self, other: u16) -> u16 { self | other } } +impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } + +impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } +impl BitOr for i16 { fn bitor(self, other: i16) -> i16 { self | other } } +impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } +impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } + +// docs:start:bitand-trait +trait BitAnd { + fn bitand(self, other: Self) -> Self; +} +// docs:end:bitand-trait + +impl BitAnd for bool { fn bitand(self, other: bool) -> bool { self & other } } + +impl BitAnd for u64 { fn bitand(self, other: u64) -> u64 { self & other } } +impl BitAnd for u32 { fn bitand(self, other: u32) -> u32 { self & other } } +impl BitAnd for u16 { fn bitand(self, other: u16) -> u16 { self & other } } +impl BitAnd for u8 { fn bitand(self, other: u8) -> u8 { self & other } } + +impl BitAnd for i8 { fn bitand(self, other: i8) -> i8 { self & other } } +impl BitAnd for i16 { fn bitand(self, other: i16) -> i16 { self & other } } +impl BitAnd for i32 { fn bitand(self, other: i32) -> i32 { self & other } } +impl BitAnd for i64 { fn bitand(self, other: i64) -> i64 { self & other } } + +// docs:start:bitxor-trait +trait BitXor { + fn bitxor(self, other: Self) -> Self; +} +// docs:end:bitxor-trait + +impl BitXor for bool { fn bitxor(self, other: bool) -> bool { self ^ other } } + +impl BitXor for u64 { fn bitxor(self, other: u64) -> u64 { self ^ other } } +impl BitXor for u32 { fn bitxor(self, other: u32) -> u32 { self ^ other } } +impl BitXor for u16 { fn bitxor(self, other: u16) -> u16 { self ^ other } } +impl BitXor for u8 { fn bitxor(self, other: u8) -> u8 { self ^ other } } + +impl BitXor for i8 { fn bitxor(self, other: i8) -> i8 { self ^ other } } +impl BitXor for i16 { fn bitxor(self, other: i16) -> i16 { self ^ other } } +impl BitXor for i32 { fn bitxor(self, other: i32) -> i32 { self ^ other } } +impl BitXor for i64 { fn bitxor(self, other: i64) -> i64 { self ^ other } } + +// docs:start:shl-trait +trait Shl { + fn shl(self, other: u8) -> Self; +} +// docs:end:shl-trait + +impl Shl for u32 { fn shl(self, other: u8) -> u32 { self << other } } +impl Shl for u64 { fn shl(self, other: u8) -> u64 { self << other } } +impl Shl for u16 { fn shl(self, other: u8) -> u16 { self << other } } +impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } +impl Shl for u1 { fn shl(self, other: u8) -> u1 { self << other } } + +impl Shl for i8 { fn shl(self, other: u8) -> i8 { self << other } } +impl Shl for i16 { fn shl(self, other: u8) -> i16 { self << other } } +impl Shl for i32 { fn shl(self, other: u8) -> i32 { self << other } } +impl Shl for i64 { fn shl(self, other: u8) -> i64 { self << other } } + +// docs:start:shr-trait +trait Shr { + fn shr(self, other: u8) -> Self; +} +// docs:end:shr-trait + +impl Shr for u64 { fn shr(self, other: u8) -> u64 { self >> other } } +impl Shr for u32 { fn shr(self, other: u8) -> u32 { self >> other } } +impl Shr for u16 { fn shr(self, other: u8) -> u16 { self >> other } } +impl Shr for u8 { fn shr(self, other: u8) -> u8 { self >> other } } +impl Shr for u1 { fn shr(self, other: u8) -> u1 { self >> other } } + +impl Shr for i8 { fn shr(self, other: u8) -> i8 { self >> other } } +impl Shr for i16 { fn shr(self, other: u8) -> i16 { self >> other } } +impl Shr for i32 { fn shr(self, other: u8) -> i32 { self >> other } } +impl Shr for i64 { fn shr(self, other: u8) -> i64 { self >> other } } + diff --git a/noir/noir-repo/noir_stdlib/src/uint128.nr b/noir/noir-repo/noir_stdlib/src/uint128.nr index d0f38079e6f0..9c61fc801f3c 100644 --- a/noir/noir-repo/noir_stdlib/src/uint128.nr +++ b/noir/noir-repo/noir_stdlib/src/uint128.nr @@ -1,4 +1,4 @@ -use crate::ops::{Add, Sub, Mul, Div, Rem, BitOr, BitAnd, BitXor, Shl, Shr}; +use crate::ops::{Add, Sub, Mul, Div, Rem, Not, BitOr, BitAnd, BitXor, Shl, Shr}; use crate::cmp::{Eq, Ord, Ordering}; global pow64 : Field = 18446744073709551616; //2^64; @@ -228,11 +228,20 @@ impl Ord for U128 { } } +impl Not for U128 { + fn not(self) -> U128 { + U128 { + lo: (!(self.lo as u64)) as Field, + hi: (!(self.hi as u64)) as Field + } + } +} + impl BitOr for U128 { fn bitor(self, other: U128) -> U128 { U128 { lo: ((self.lo as u64) | (other.lo as u64)) as Field, - hi: ((self.hi as u64) | (other.hi as u64))as Field + hi: ((self.hi as u64) | (other.hi as u64)) as Field } } } @@ -284,3 +293,20 @@ impl Shr for U128 { self / U128::from_integer(y) } } + +mod test { + use crate::uint128::{U128, pow64}; + + #[test] + fn test_not() { + let num = U128::from_u64s_le(0, 0); + let not_num = num.not(); + + let max_u64: Field = pow64 - 1; + assert_eq(not_num.hi, max_u64); + assert_eq(not_num.lo, max_u64); + + let not_not_num = not_num.not(); + assert_eq(num, not_not_num); + } +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/intrinsic_die/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/intrinsic_die/src/main.nr index 8cac707dfeae..9ce17f72c0d0 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/intrinsic_die/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/intrinsic_die/src/main.nr @@ -2,5 +2,7 @@ use dep::std; // This test checks that we perform dead-instruction-elimination on intrinsic functions. fn main(x: Field) { let hash = std::hash::pedersen_commitment([x]); - let _p1 = std::scalar_mul::fixed_base_embedded_curve(x, 0); + let g1_x = 0x0000000000000000000000000000000000000000000000000000000000000001; + let g1_y = 0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c; + let _p1 = std::embedded_curve_ops::multi_scalar_mul([g1_x, g1_y], [x, 0]); } diff --git a/noir/noir-repo/test_programs/execution_success/aes128_encrypt/Nargo.toml b/noir/noir-repo/test_programs/execution_success/aes128_encrypt/Nargo.toml new file mode 100644 index 000000000000..29425131cff2 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/aes128_encrypt/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "aes128_encrypt" +type = "bin" +authors = [""] +compiler_version = ">=0.27.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/aes128_encrypt/Prover.toml b/noir/noir-repo/test_programs/execution_success/aes128_encrypt/Prover.toml new file mode 100644 index 000000000000..b6b684790e1a --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/aes128_encrypt/Prover.toml @@ -0,0 +1,4 @@ +inputs = "kevlovesrust" +iv = "0000000000000000" +key = "0000000000000000" +output = "F40E7EACAB28D0BAADB8E269EE7ACDBF" \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/aes128_encrypt/src/main.nr b/noir/noir-repo/test_programs/execution_success/aes128_encrypt/src/main.nr new file mode 100644 index 000000000000..f6ed0f309c32 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/aes128_encrypt/src/main.nr @@ -0,0 +1,44 @@ +use dep::std; + +unconstrained fn decode_ascii(ascii: u8) -> u8 { + if ascii < 58 { + ascii - 48 + } else if ascii < 71 { + ascii - 55 + } else { + ascii - 87 + } +} + +unconstrained fn decode_hex(s: str) -> [u8; M] { + let mut result: [u8; M] = [0; M]; + let as_bytes = s.as_bytes(); + for i in 0..N { + if i % 2 != 0 { + continue; + } + result[i/2] = decode_ascii(as_bytes[i]) * 16 + decode_ascii(as_bytes[i + 1]); + } + result +} + +unconstrained fn cipher(plaintext: [u8; 12], iv: [u8; 16], key: [u8; 16]) -> [u8; 16] { + let slice_res = std::aes128::aes128_encrypt(plaintext, iv, key); + let mut result = [0; 16]; + for i in 0..16 { + result[i] = slice_res[i]; + } + result +} + +fn main(inputs: str<12>, iv: str<16>, key: str<16>, output: str<32>) { + let result = std::aes128::aes128_encrypt(inputs.as_bytes(), iv.as_bytes(), key.as_bytes()); + let output_bytes: [u8; 16] = decode_hex(output); + for i in 0..16 { + assert(result[i] == output_bytes[i]); + } + let unconstrained_result = cipher(inputs.as_bytes(), iv.as_bytes(), key.as_bytes()); + for i in 0..16 { + assert(unconstrained_result[i] == output_bytes[i]); + } +} diff --git a/noir/noir-repo/test_programs/execution_success/array_if_cond_simple/Nargo.toml b/noir/noir-repo/test_programs/execution_success/array_if_cond_simple/Nargo.toml new file mode 100644 index 000000000000..b885d22c0190 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/array_if_cond_simple/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "array_if_cond_simple" +type = "bin" +authors = [""] +compiler_version = ">=0.28.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/array_if_cond_simple/Prover.toml b/noir/noir-repo/test_programs/execution_success/array_if_cond_simple/Prover.toml new file mode 100644 index 000000000000..2825143e8ad2 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/array_if_cond_simple/Prover.toml @@ -0,0 +1,2 @@ +x = true +y = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/array_if_cond_simple/src/main.nr b/noir/noir-repo/test_programs/execution_success/array_if_cond_simple/src/main.nr new file mode 100644 index 000000000000..ee2f762d43c8 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/array_if_cond_simple/src/main.nr @@ -0,0 +1,8 @@ +fn main(x: bool, mut y: [u32; 30]) { + if x { + y[0] = 1; + } + + let z = y[0] + y[1]; + assert(z == 1); +} diff --git a/noir/noir-repo/test_programs/execution_success/fixed_base_scalar_mul/Nargo.toml b/noir/noir-repo/test_programs/execution_success/brillig_embedded_curve/Nargo.toml similarity index 62% rename from noir/noir-repo/test_programs/execution_success/fixed_base_scalar_mul/Nargo.toml rename to noir/noir-repo/test_programs/execution_success/brillig_embedded_curve/Nargo.toml index a8e45c9b5ade..b92e11d63839 100644 --- a/noir/noir-repo/test_programs/execution_success/fixed_base_scalar_mul/Nargo.toml +++ b/noir/noir-repo/test_programs/execution_success/brillig_embedded_curve/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "fixed_base_scalar_mul" +name = "brillig_embedded_curve" type = "bin" authors = [""] diff --git a/noir/noir-repo/test_programs/execution_success/brillig_embedded_curve/Prover.toml b/noir/noir-repo/test_programs/execution_success/brillig_embedded_curve/Prover.toml new file mode 100644 index 000000000000..7113b9cd0384 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/brillig_embedded_curve/Prover.toml @@ -0,0 +1,3 @@ +priv_key = "1" +pub_x = "0x0000000000000000000000000000000000000000000000000000000000000001" +pub_y = "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c" \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/brillig_embedded_curve/src/main.nr b/noir/noir-repo/test_programs/execution_success/brillig_embedded_curve/src/main.nr new file mode 100644 index 000000000000..8a1a7f08975f --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/brillig_embedded_curve/src/main.nr @@ -0,0 +1,24 @@ +use dep::std; + +unconstrained fn main(priv_key: Field, pub_x: pub Field, pub_y: pub Field) { + let g1_y = 17631683881184975370165255887551781615748388533673675138860; + let g1 = std::embedded_curve_ops::EmbeddedCurvePoint { x: 1, y: g1_y }; + + // Test that multi_scalar_mul correctly derives the public key + let res = std::embedded_curve_ops::multi_scalar_mul([g1.x, g1.y], [priv_key, 0]); + assert(res[0] == pub_x); + assert(res[1] == pub_y); + + // Test that double function calling embedded_curve_add works as expected + let pub_point = std::embedded_curve_ops::EmbeddedCurvePoint { x: pub_x, y: pub_y }; + let res = pub_point.double(); + let double = g1.add(g1); + + assert(double.x == res.x); + + // Test calling multi_scalar_mul with multiple points and scalars + let res = std::embedded_curve_ops::multi_scalar_mul([g1.x, g1.y, g1.x, g1.y], [priv_key, 0, priv_key, 0]); + + // The results should be double the g1 point because the scalars are 1 and we pass in g1 twice + assert(double.x == res[0]); +} diff --git a/noir/noir-repo/test_programs/execution_success/brillig_scalar_mul/Prover.toml b/noir/noir-repo/test_programs/execution_success/brillig_scalar_mul/Prover.toml deleted file mode 100644 index 69b91cb5f312..000000000000 --- a/noir/noir-repo/test_programs/execution_success/brillig_scalar_mul/Prover.toml +++ /dev/null @@ -1,7 +0,0 @@ -a = "1" -a_pub_x = "0x0000000000000000000000000000000000000000000000000000000000000001" -a_pub_y = "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c" - -b = "2" -b_pub_x = "0x06ce1b0827aafa85ddeb49cdaa36306d19a74caa311e13d46d8bc688cdbffffe" -b_pub_y = "0x1c122f81a3a14964909ede0ba2a6855fc93faf6fa1a788bf467be7e7a43f80ac" \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/brillig_scalar_mul/src/main.nr b/noir/noir-repo/test_programs/execution_success/brillig_scalar_mul/src/main.nr deleted file mode 100644 index c7c3a85a4fff..000000000000 --- a/noir/noir-repo/test_programs/execution_success/brillig_scalar_mul/src/main.nr +++ /dev/null @@ -1,32 +0,0 @@ -use dep::std; - -unconstrained fn main( - a: Field, - a_pub_x: pub Field, - a_pub_y: pub Field, - b: Field, - b_pub_x: pub Field, - b_pub_y: pub Field -) { - let mut priv_key = a; - let mut pub_x: Field = a_pub_x; - let mut pub_y: Field = a_pub_y; - if a != 1 { - // Change `a` in Prover.toml to test input `b` - priv_key = b; - pub_x = b_pub_x; - pub_y = b_pub_y; - } - let res = std::scalar_mul::fixed_base_embedded_curve(priv_key, 0); - assert(res[0] == pub_x); - assert(res[1] == pub_y); - - let pub_point= std::scalar_mul::EmbeddedCurvePoint { x: pub_x, y: pub_y }; - let g1_y = 17631683881184975370165255887551781615748388533673675138860; - let g1= std::scalar_mul::EmbeddedCurvePoint { x: 1, y: g1_y }; - - let res = pub_point.double(); - let double = g1.add(g1); - - assert(double.x == res.x); -} diff --git a/noir/noir-repo/test_programs/execution_success/debug_logs/src/main.nr b/noir/noir-repo/test_programs/execution_success/debug_logs/src/main.nr index ec24b0cc8e88..c7fd01ebbc52 100644 --- a/noir/noir-repo/test_programs/execution_success/debug_logs/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/debug_logs/src/main.nr @@ -71,6 +71,8 @@ fn main(x: Field, y: pub Field) { let closured_lambda = |x| x + one; println(f"closured_lambda: {closured_lambda}, sentinel: {sentinel}"); println(closured_lambda); + + regression_4967(); } fn string_identity(string: fmtstr<14, (Field, Field)>) -> fmtstr<14, (Field, Field)> { @@ -122,3 +124,14 @@ fn regression_2906() { println(f"array_five_vals: {array_five_vals}, label_five_vals: {label_five_vals}"); } +fn regression_4967() { + let sentinel: u32 = 8888; + + let slice_of_tuples: [(i32, u8)] = &[(11, 22), (33, 44)]; + println(f"slice_of_tuples: {slice_of_tuples}, sentinel: {sentinel}"); + println(slice_of_tuples); + + let slice_of_tuples_coerced: [(i32, u8)] = [(11, 22), (33, 44)]; + println(f"slice_of_tuples: {slice_of_tuples_coerced}, sentinel: {sentinel}"); + println(slice_of_tuples_coerced); +} diff --git a/noir/noir-repo/test_programs/execution_success/brillig_scalar_mul/Nargo.toml b/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/Nargo.toml similarity index 65% rename from noir/noir-repo/test_programs/execution_success/brillig_scalar_mul/Nargo.toml rename to noir/noir-repo/test_programs/execution_success/embedded_curve_ops/Nargo.toml index eefd041b899c..1c389149aaf0 100644 --- a/noir/noir-repo/test_programs/execution_success/brillig_scalar_mul/Nargo.toml +++ b/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "brillig_scalar_mul" +name = "embedded_curve_ops" type = "bin" authors = [""] diff --git a/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/Prover.toml b/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/Prover.toml new file mode 100644 index 000000000000..7113b9cd0384 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/Prover.toml @@ -0,0 +1,3 @@ +priv_key = "1" +pub_x = "0x0000000000000000000000000000000000000000000000000000000000000001" +pub_y = "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c" \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/src/main.nr b/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/src/main.nr new file mode 100644 index 000000000000..3cb27d8c1810 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/src/main.nr @@ -0,0 +1,24 @@ +use dep::std; + +fn main(priv_key: Field, pub_x: pub Field, pub_y: pub Field) { + let g1_y = 17631683881184975370165255887551781615748388533673675138860; + let g1 = std::embedded_curve_ops::EmbeddedCurvePoint { x: 1, y: g1_y }; + + // Test that multi_scalar_mul correctly derives the public key + let res = std::embedded_curve_ops::multi_scalar_mul([g1.x, g1.y], [priv_key, 0]); + assert(res[0] == pub_x); + assert(res[1] == pub_y); + + // Test that double function calling embedded_curve_add works as expected + let pub_point = std::embedded_curve_ops::EmbeddedCurvePoint { x: pub_x, y: pub_y }; + let res = pub_point.double(); + let double = g1.add(g1); + + assert(double.x == res.x); + + // Test calling multi_scalar_mul with multiple points and scalars + let res = std::embedded_curve_ops::multi_scalar_mul([g1.x, g1.y, g1.x, g1.y], [priv_key, 0, priv_key, 0]); + + // The results should be double the g1 point because the scalars are 1 and we pass in g1 twice + assert(double.x == res[0]); +} diff --git a/noir/noir-repo/test_programs/execution_success/fixed_base_scalar_mul/Prover.toml b/noir/noir-repo/test_programs/execution_success/fixed_base_scalar_mul/Prover.toml deleted file mode 100644 index 69b91cb5f312..000000000000 --- a/noir/noir-repo/test_programs/execution_success/fixed_base_scalar_mul/Prover.toml +++ /dev/null @@ -1,7 +0,0 @@ -a = "1" -a_pub_x = "0x0000000000000000000000000000000000000000000000000000000000000001" -a_pub_y = "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c" - -b = "2" -b_pub_x = "0x06ce1b0827aafa85ddeb49cdaa36306d19a74caa311e13d46d8bc688cdbffffe" -b_pub_y = "0x1c122f81a3a14964909ede0ba2a6855fc93faf6fa1a788bf467be7e7a43f80ac" \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/fixed_base_scalar_mul/src/main.nr b/noir/noir-repo/test_programs/execution_success/fixed_base_scalar_mul/src/main.nr deleted file mode 100644 index e20f47907db7..000000000000 --- a/noir/noir-repo/test_programs/execution_success/fixed_base_scalar_mul/src/main.nr +++ /dev/null @@ -1,31 +0,0 @@ -use dep::std; - -fn main( - a: Field, - a_pub_x: pub Field, - a_pub_y: pub Field, - b: Field, - b_pub_x: pub Field, - b_pub_y: pub Field -) { - let mut priv_key = a; - let mut pub_x: Field = a_pub_x; - let mut pub_y: Field = a_pub_y; - if a != 1 { - // Change `a` in Prover.toml to test input `b` - priv_key = b; - pub_x = b_pub_x; - pub_y = b_pub_y; - } - let res = std::scalar_mul::fixed_base_embedded_curve(priv_key, 0); - assert(res[0] == pub_x); - assert(res[1] == pub_y); - let pub_point= std::scalar_mul::EmbeddedCurvePoint { x: pub_x, y: pub_y }; - let g1_y = 17631683881184975370165255887551781615748388533673675138860; - let g1= std::scalar_mul::EmbeddedCurvePoint { x: 1, y: g1_y }; - - let res = pub_point.double(); - let double = g1.add(g1); - - assert(double.x == res.x); -} diff --git a/noir/noir-repo/test_programs/execution_success/regression_struct_array_conditional/Nargo.toml b/noir/noir-repo/test_programs/execution_success/regression_struct_array_conditional/Nargo.toml new file mode 100644 index 000000000000..a0587210464f --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_struct_array_conditional/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_struct_array_conditional" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/regression_struct_array_conditional/Prover.toml b/noir/noir-repo/test_programs/execution_success/regression_struct_array_conditional/Prover.toml new file mode 100644 index 000000000000..ef97f9d482a4 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_struct_array_conditional/Prover.toml @@ -0,0 +1,18 @@ +y = 1 +z = 1 + +[[x]] +value = "0x23de33be058ce5504e1ade738db8bdacfe268fa9dbde777092bf1d38519bdf59" +counter = "10" +dummy = "0" + +[[x]] +value = "3" +counter = "2" +dummy = "0" + +[[x]] +value = "2" +counter = "0" +dummy = "0" + diff --git a/noir/noir-repo/test_programs/execution_success/regression_struct_array_conditional/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_struct_array_conditional/src/main.nr new file mode 100644 index 000000000000..17502a9fe50d --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_struct_array_conditional/src/main.nr @@ -0,0 +1,38 @@ +struct foo { + value: Field, + counter: u8, + dummy: u8, +} +struct bar { + dummy: [u8;3], + value: Field, + counter: u8, +} +struct bar_field { + dummy: [Field;3], + value: Field, + counter: u8, +} +fn main(x: [foo; 3], y: u32, z: u32) -> pub u8 { + let a = [y, z, x[y].counter as u32]; + let mut b = [bar { value: 0, counter: 0, dummy: [0; 3] }; 3]; + let mut c = [bar_field { value: 0, counter: 0, dummy: [0; 3] }; 3]; + for i in 0..3 { + b[i].value = x[i].value; + b[i].counter = x[i].counter; + b[i].dummy[0] = x[i].dummy; + c[i].value = x[i].value; + c[i].counter = x[i].counter; + c[i].dummy[0] = x[i].dummy as Field; + } + if z == 0 { + // offset + assert(y as u8 < x[y].counter); + assert(y <= a[y]); + // first element is compatible + assert(y as u8 < b[y].counter); + // fallback + assert(y as u8 < c[y].counter); + } + x[0].counter +} diff --git a/noir/noir-repo/test_programs/execution_success/simple_shield/src/main.nr b/noir/noir-repo/test_programs/execution_success/simple_shield/src/main.nr index c46d3b4594c8..548ba17d4620 100644 --- a/noir/noir-repo/test_programs/execution_success/simple_shield/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/simple_shield/src/main.nr @@ -13,7 +13,7 @@ fn main( to_pubkey_y: Field ) -> pub [Field; 2] { // Compute public key from private key to show ownership - let pubkey = std::scalar_mul::fixed_base_embedded_curve(priv_key, 0); + let pubkey = std::embedded_curve_ops::fixed_base_scalar_mul(priv_key, 0); let pubkey_x = pubkey[0]; let pubkey_y = pubkey[1]; // Compute input note commitment diff --git a/noir/noir-repo/test_programs/execution_success/slice_coercion/src/main.nr b/noir/noir-repo/test_programs/execution_success/slice_coercion/src/main.nr index a3785e79afa0..a7ba0443bd18 100644 --- a/noir/noir-repo/test_programs/execution_success/slice_coercion/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/slice_coercion/src/main.nr @@ -16,4 +16,12 @@ fn main(expected: pub Field, first: Field) { let mut hasher = Hasher::new(); hasher.add(first); assert(hasher.fields[0] == expected); + + regression_4967(); +} + +fn regression_4967() { + let var1: [(i32, u8)] = [(1, 2)]; + assert(var1.len() == 1); + dep::std::println(var1); } diff --git a/noir/noir-repo/test_programs/execution_success/u16_support/Nargo.toml b/noir/noir-repo/test_programs/execution_success/u16_support/Nargo.toml new file mode 100644 index 000000000000..1c6b58e01e8d --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/u16_support/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "u16_support" +type = "bin" +authors = [""] +compiler_version = ">=0.29.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/u16_support/Prover.toml b/noir/noir-repo/test_programs/execution_success/u16_support/Prover.toml new file mode 100644 index 000000000000..a56a84e61a45 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/u16_support/Prover.toml @@ -0,0 +1 @@ +x = "2" diff --git a/noir/noir-repo/test_programs/execution_success/u16_support/src/main.nr b/noir/noir-repo/test_programs/execution_success/u16_support/src/main.nr new file mode 100644 index 000000000000..e8b418f16da4 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/u16_support/src/main.nr @@ -0,0 +1,24 @@ +fn main(x: u16) { + test_u16(x); + test_u16_unconstrained(x); +} + +unconstrained fn test_u16_unconstrained(x: u16) { + test_u16(x) +} + +fn test_u16(x: u16) { + let t1: u16 = 1234; + let t2: u16 = 4321; + let t = t1 + t2; + + let t4 = t - t2; + assert(t4 == t1); + + let mut small_int = x as u16; + let shift = small_int << (x as u8); + assert(shift == 8); + assert(shift >> (x as u8) == small_int); + assert(shift >> 15 == 0); + assert(shift << 15 == 0); +} diff --git a/noir/noir-repo/test_programs/execution_success/variable_base_scalar_mul/Nargo.toml b/noir/noir-repo/test_programs/execution_success/variable_base_scalar_mul/Nargo.toml deleted file mode 100644 index 66712ab503cb..000000000000 --- a/noir/noir-repo/test_programs/execution_success/variable_base_scalar_mul/Nargo.toml +++ /dev/null @@ -1,6 +0,0 @@ -[package] -name = "variable_base_scalar_mul" -type = "bin" -authors = [""] - -[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/variable_base_scalar_mul/Prover.toml b/noir/noir-repo/test_programs/execution_success/variable_base_scalar_mul/Prover.toml deleted file mode 100644 index 51d6fc9b96c5..000000000000 --- a/noir/noir-repo/test_programs/execution_success/variable_base_scalar_mul/Prover.toml +++ /dev/null @@ -1,4 +0,0 @@ -point_x = "0x0000000000000000000000000000000000000000000000000000000000000001" -point_y = "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c" -scalar_low = "0x0000000000000000000000000000000000000000000000000000000000000003" -scalar_high = "0x0000000000000000000000000000000000000000000000000000000000000000" diff --git a/noir/noir-repo/test_programs/execution_success/variable_base_scalar_mul/src/main.nr b/noir/noir-repo/test_programs/execution_success/variable_base_scalar_mul/src/main.nr deleted file mode 100644 index 4914ad017771..000000000000 --- a/noir/noir-repo/test_programs/execution_success/variable_base_scalar_mul/src/main.nr +++ /dev/null @@ -1,33 +0,0 @@ -use dep::std; - -fn main(point_x: pub Field, point_y: pub Field, scalar_low: pub Field, scalar_high: pub Field) { - // We multiply the point by 3 and check it matches result out of embedded_curve_add func - let res = std::scalar_mul::variable_base_embedded_curve(point_x, point_y, scalar_low, scalar_high); - - let point = std::scalar_mul::EmbeddedCurvePoint { x: point_x, y: point_y }; - - let double = point.double(); - let triple = point + double; - - assert(triple.x == res[0]); - assert(triple.y == res[1]); - - // We test that brillig gives us the same result - let brillig_res = get_brillig_result(point_x, point_y, scalar_low, scalar_high); - assert(res[0] == brillig_res[0]); - assert(res[1] == brillig_res[1]); - - // Multiplying the point by 1 should return the same point - let res = std::scalar_mul::variable_base_embedded_curve(point_x, point_y, 1, 0); - assert(point_x == res[0]); - assert(point_y == res[1]); -} - -unconstrained fn get_brillig_result( - point_x: Field, - point_y: Field, - scalar_low: Field, - scalar_high: Field -) -> [Field; 2] { - std::scalar_mul::variable_base_embedded_curve(point_x, point_y, scalar_low, scalar_high) -} diff --git a/noir/noir-repo/tooling/backend_interface/Cargo.toml b/noir/noir-repo/tooling/backend_interface/Cargo.toml index b731c138c7db..f6b5d5d0132b 100644 --- a/noir/noir-repo/tooling/backend_interface/Cargo.toml +++ b/noir/noir-repo/tooling/backend_interface/Cargo.toml @@ -13,7 +13,6 @@ license.workspace = true acvm.workspace = true dirs.workspace = true thiserror.workspace = true -serde.workspace = true serde_json.workspace = true bb_abstraction_leaks.workspace = true tracing.workspace = true diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/info.rs b/noir/noir-repo/tooling/backend_interface/src/cli/info.rs deleted file mode 100644 index 6e6603ce53e5..000000000000 --- a/noir/noir-repo/tooling/backend_interface/src/cli/info.rs +++ /dev/null @@ -1,62 +0,0 @@ -use acvm::acir::circuit::ExpressionWidth; - -use serde::Deserialize; -use std::path::{Path, PathBuf}; - -use crate::BackendError; - -use super::string_from_stderr; - -pub(crate) struct InfoCommand { - pub(crate) crs_path: PathBuf, -} - -#[derive(Deserialize)] -struct InfoResponse { - language: LanguageResponse, -} - -#[derive(Deserialize)] -struct LanguageResponse { - name: String, - width: Option, -} - -impl InfoCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result { - let mut command = std::process::Command::new(binary_path); - - command.arg("info").arg("-c").arg(self.crs_path).arg("-o").arg("-"); - - let output = command.output()?; - - if !output.status.success() { - return Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))); - } - - let backend_info: InfoResponse = - serde_json::from_slice(&output.stdout).expect("Backend should return valid json"); - let expression_width: ExpressionWidth = match backend_info.language.name.as_str() { - "PLONK-CSAT" => { - let width = backend_info.language.width.unwrap(); - ExpressionWidth::Bounded { width } - } - "R1CS" => ExpressionWidth::Unbounded, - _ => panic!("Unknown Expression width configuration"), - }; - - Ok(expression_width) - } -} - -#[test] -fn info_command() -> Result<(), BackendError> { - let backend = crate::get_mock_backend()?; - let crs_path = backend.backend_directory(); - - let expression_width = InfoCommand { crs_path }.run(backend.binary_path())?; - - assert!(matches!(expression_width, ExpressionWidth::Bounded { width: 4 })); - - Ok(()) -} diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs b/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs index b4dec859839e..df43bd5cc2f6 100644 --- a/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs +++ b/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs @@ -2,7 +2,6 @@ mod contract; mod gates; -mod info; mod proof_as_fields; mod prove; mod verify; @@ -12,7 +11,6 @@ mod write_vk; pub(crate) use contract::ContractCommand; pub(crate) use gates::GatesCommand; -pub(crate) use info::InfoCommand; pub(crate) use proof_as_fields::ProofAsFieldsCommand; pub(crate) use prove::ProveCommand; pub(crate) use verify::VerifyCommand; diff --git a/noir/noir-repo/tooling/backend_interface/src/proof_system.rs b/noir/noir-repo/tooling/backend_interface/src/proof_system.rs index fa1f82a5722d..20a6dcf70f11 100644 --- a/noir/noir-repo/tooling/backend_interface/src/proof_system.rs +++ b/noir/noir-repo/tooling/backend_interface/src/proof_system.rs @@ -3,7 +3,7 @@ use std::io::Write; use std::path::Path; use acvm::acir::{ - circuit::{ExpressionWidth, Program}, + circuit::Program, native_types::{WitnessMap, WitnessStack}, }; use acvm::FieldElement; @@ -11,8 +11,8 @@ use tempfile::tempdir; use tracing::warn; use crate::cli::{ - GatesCommand, InfoCommand, ProofAsFieldsCommand, ProveCommand, VerifyCommand, - VkAsFieldsCommand, WriteVkCommand, + GatesCommand, ProofAsFieldsCommand, ProveCommand, VerifyCommand, VkAsFieldsCommand, + WriteVkCommand, }; use crate::{Backend, BackendError}; @@ -33,25 +33,6 @@ impl Backend { .run(binary_path) } - pub fn get_backend_info(&self) -> Result { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - InfoCommand { crs_path: self.crs_directory() }.run(binary_path) - } - - /// If we cannot get a valid backend, returns `ExpressionWidth::Bound { width: 4 }`` - /// The function also prints a message saying we could not find a backend - pub fn get_backend_info_or_default(&self) -> ExpressionWidth { - if let Ok(expression_width) = self.get_backend_info() { - expression_width - } else { - warn!( - "No valid backend found, ExpressionWidth defaulting to Bounded with a width of 4" - ); - ExpressionWidth::Bounded { width: 4 } - } - } - #[tracing::instrument(level = "trace", skip_all)] pub fn prove( &self, diff --git a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs deleted file mode 100644 index 75a6d323e7bd..000000000000 --- a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs +++ /dev/null @@ -1,40 +0,0 @@ -use clap::Args; -use std::io::Write; -use std::path::PathBuf; - -const INFO_RESPONSE: &str = r#"{ - "language": { - "name": "PLONK-CSAT", - "width": 4 - }, - "opcodes_supported": ["arithmetic", "directive", "brillig", "memory_init", "memory_op"], - "black_box_functions_supported": [ - "and", - "xor", - "range", - "sha256", - "blake2s", - "blake3", - "keccak256", - "schnorr_verify", - "pedersen", - "pedersen_hash", - "ecdsa_secp256k1", - "ecdsa_secp256r1", - "fixed_base_scalar_mul", - "recursive_aggregation" - ] -}"#; - -#[derive(Debug, Clone, Args)] -pub(crate) struct InfoCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'o')] - pub(crate) info_path: Option, -} - -pub(crate) fn run(_args: InfoCommand) { - std::io::stdout().write_all(INFO_RESPONSE.as_bytes()).unwrap(); -} diff --git a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/main.rs b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/main.rs index ef8819af94b4..74ea82d28f8a 100644 --- a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/main.rs +++ b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/main.rs @@ -7,7 +7,6 @@ use clap::{Parser, Subcommand}; mod contract_cmd; mod gates_cmd; -mod info_cmd; mod prove_cmd; mod verify_cmd; mod write_vk_cmd; @@ -21,7 +20,6 @@ struct BackendCli { #[derive(Subcommand, Clone, Debug)] enum BackendCommand { - Info(info_cmd::InfoCommand), Contract(contract_cmd::ContractCommand), Gates(gates_cmd::GatesCommand), Prove(prove_cmd::ProveCommand), @@ -34,7 +32,6 @@ fn main() { let BackendCli { command } = BackendCli::parse(); match command { - BackendCommand::Info(args) => info_cmd::run(args), BackendCommand::Contract(args) => contract_cmd::run(args), BackendCommand::Gates(args) => gates_cmd::run(args), BackendCommand::Prove(args) => prove_cmd::run(args), diff --git a/noir/noir-repo/tooling/bb_abstraction_leaks/build.rs b/noir/noir-repo/tooling/bb_abstraction_leaks/build.rs index b3dfff9e94c7..45da7f9d00c3 100644 --- a/noir/noir-repo/tooling/bb_abstraction_leaks/build.rs +++ b/noir/noir-repo/tooling/bb_abstraction_leaks/build.rs @@ -10,7 +10,7 @@ use const_format::formatcp; const USERNAME: &str = "AztecProtocol"; const REPO: &str = "aztec-packages"; -const VERSION: &str = "0.35.1"; +const VERSION: &str = "0.38.0"; const TAG: &str = formatcp!("aztec-packages-v{}", VERSION); const API_URL: &str = diff --git a/noir/noir-repo/tooling/debugger/ignored-tests.txt b/noir/noir-repo/tooling/debugger/ignored-tests.txt index d08f5609645e..cda261694216 100644 --- a/noir/noir-repo/tooling/debugger/ignored-tests.txt +++ b/noir/noir-repo/tooling/debugger/ignored-tests.txt @@ -24,3 +24,4 @@ fold_distinct_return fold_fibonacci fold_complex_outputs slice_init_with_complex_type +hashmap diff --git a/noir/noir-repo/tooling/debugger/src/context.rs b/noir/noir-repo/tooling/debugger/src/context.rs index ea32c864a0b8..646beaf00963 100644 --- a/noir/noir-repo/tooling/debugger/src/context.rs +++ b/noir/noir-repo/tooling/debugger/src/context.rs @@ -862,7 +862,11 @@ mod tests { let opcodes = vec![ Opcode::BrilligCall { id: 0, inputs: vec![], outputs: vec![], predicate: None }, - Opcode::MemoryInit { block_id: BlockId(0), init: vec![] }, + Opcode::MemoryInit { + block_id: BlockId(0), + init: vec![], + block_type: acvm::acir::circuit::opcodes::BlockType::Memory, + }, Opcode::BrilligCall { id: 0, inputs: vec![], outputs: vec![], predicate: None }, Opcode::AssertZero(Expression::default()), ]; diff --git a/noir/noir-repo/tooling/lsp/src/solver.rs b/noir/noir-repo/tooling/lsp/src/solver.rs index b47c30af5f68..249406effaf2 100644 --- a/noir/noir-repo/tooling/lsp/src/solver.rs +++ b/noir/noir-repo/tooling/lsp/src/solver.rs @@ -24,22 +24,12 @@ impl BlackBoxFunctionSolver for WrapperSolver { self.0.pedersen_commitment(inputs, domain_separator) } - fn fixed_base_scalar_mul( + fn multi_scalar_mul( &self, - low: &acvm::FieldElement, - high: &acvm::FieldElement, + points: &[acvm::FieldElement], + scalars: &[acvm::FieldElement], ) -> Result<(acvm::FieldElement, acvm::FieldElement), acvm::BlackBoxResolutionError> { - self.0.fixed_base_scalar_mul(low, high) - } - - fn variable_base_scalar_mul( - &self, - point_x: &acvm::FieldElement, - point_y: &acvm::FieldElement, - scalar_low: &acvm::FieldElement, - scalar_high: &acvm::FieldElement, - ) -> Result<(acvm::FieldElement, acvm::FieldElement), acvm::BlackBoxResolutionError> { - self.0.variable_base_scalar_mul(point_x, point_y, scalar_low, scalar_high) + self.0.multi_scalar_mul(points, scalars) } fn pedersen_hash( diff --git a/noir/noir-repo/tooling/nargo/src/artifacts/contract.rs b/noir/noir-repo/tooling/nargo/src/artifacts/contract.rs index 83bb4b94f820..a864da7c33cd 100644 --- a/noir/noir-repo/tooling/nargo/src/artifacts/contract.rs +++ b/noir/noir-repo/tooling/nargo/src/artifacts/contract.rs @@ -9,7 +9,7 @@ use std::collections::{BTreeMap, HashMap}; use fm::FileId; -#[derive(Serialize, Deserialize)] +#[derive(Clone, Serialize, Deserialize)] pub struct ContractOutputsArtifact { pub structs: HashMap>, pub globals: HashMap>, @@ -21,7 +21,7 @@ impl From for ContractOutputsArtifact { } } -#[derive(Serialize, Deserialize)] +#[derive(Clone, Serialize, Deserialize)] pub struct ContractArtifact { /// Version of noir used to compile this contract pub noir_version: String, @@ -51,7 +51,7 @@ impl From for ContractArtifact { /// /// A contract function unlike a regular Noir program however can have additional properties. /// One of these being a function type. -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct ContractFunctionArtifact { pub name: String, diff --git a/noir/noir-repo/tooling/nargo/src/artifacts/debug.rs b/noir/noir-repo/tooling/nargo/src/artifacts/debug.rs index 496896468cc2..2570c3f5c9f2 100644 --- a/noir/noir-repo/tooling/nargo/src/artifacts/debug.rs +++ b/noir/noir-repo/tooling/nargo/src/artifacts/debug.rs @@ -9,6 +9,7 @@ use std::{ }; pub use super::debug_vars::{DebugVars, StackFrame}; +use super::{contract::ContractArtifact, program::ProgramArtifact}; use fm::{FileId, FileManager, PathString}; /// A Debug Artifact stores, for a given program, the debug info for every function @@ -128,6 +129,16 @@ impl From for DebugArtifact { } } +impl From for DebugArtifact { + fn from(program_artifact: ProgramArtifact) -> Self { + DebugArtifact { + debug_symbols: program_artifact.debug_symbols.debug_infos, + file_map: program_artifact.file_map, + warnings: Vec::new(), + } + } +} + impl From for DebugArtifact { fn from(compiled_artifact: CompiledContract) -> Self { let all_functions_debug: Vec = compiled_artifact @@ -144,6 +155,22 @@ impl From for DebugArtifact { } } +impl From for DebugArtifact { + fn from(compiled_artifact: ContractArtifact) -> Self { + let all_functions_debug: Vec = compiled_artifact + .functions + .into_iter() + .flat_map(|contract_function| contract_function.debug_symbols.debug_infos) + .collect(); + + DebugArtifact { + debug_symbols: all_functions_debug, + file_map: compiled_artifact.file_map, + warnings: Vec::new(), + } + } +} + impl<'a> Files<'a> for DebugArtifact { type FileId = FileId; type Name = PathString; diff --git a/noir/noir-repo/tooling/nargo/src/artifacts/debug_vars.rs b/noir/noir-repo/tooling/nargo/src/artifacts/debug_vars.rs index 6a42a4c33112..3695fa478421 100644 --- a/noir/noir-repo/tooling/nargo/src/artifacts/debug_vars.rs +++ b/noir/noir-repo/tooling/nargo/src/artifacts/debug_vars.rs @@ -96,13 +96,11 @@ impl DebugVars { PrintableType::Array { length, typ }, ) => { assert!(!*is_slice, "slice has array type"); - if let Some(len) = length { - if *index as u64 >= *len { - panic!("unexpected field index past array length") - } - if *len != array_elements.len() as u64 { - panic!("type/array length mismatch") - } + if *index as u64 >= *length { + panic!("unexpected field index past array length") + } + if *length != array_elements.len() as u64 { + panic!("type/array length mismatch") } (array_elements.get_mut(*index as usize).unwrap(), &*Box::leak(typ.clone())) } @@ -110,7 +108,7 @@ impl DebugVars { PrintableValue::Vec { array_elements, is_slice }, PrintableType::Slice { typ }, ) => { - assert!(*is_slice, "array has slice type"); + assert!(*is_slice, "slice doesn't have slice type"); (array_elements.get_mut(*index as usize).unwrap(), &*Box::leak(typ.clone())) } ( diff --git a/noir/noir-repo/tooling/nargo/src/artifacts/program.rs b/noir/noir-repo/tooling/nargo/src/artifacts/program.rs index 67ac9f53ec82..3c25b9e33454 100644 --- a/noir/noir-repo/tooling/nargo/src/artifacts/program.rs +++ b/noir/noir-repo/tooling/nargo/src/artifacts/program.rs @@ -8,7 +8,7 @@ use noirc_driver::DebugFile; use noirc_errors::debug_info::ProgramDebugInfo; use serde::{Deserialize, Serialize}; -#[derive(Serialize, Deserialize, Debug)] +#[derive(Clone, Serialize, Deserialize, Debug)] pub struct ProgramArtifact { pub noir_version: String, diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs index 2b729e44b8a5..208379b098d2 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs @@ -1,4 +1,3 @@ -use crate::backends::Backend; use crate::errors::CliError; use clap::Args; @@ -42,11 +41,7 @@ pub(crate) struct CheckCommand { compile_options: CompileOptions, } -pub(crate) fn run( - _backend: &Backend, - args: CheckCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: CheckCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs index 259e209b65a3..04ed5c2b6b87 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -1,13 +1,13 @@ +use super::compile_cmd::compile_workspace_full; use super::fs::{create_named_dir, write_to_file}; use super::NargoConfig; use crate::backends::Backend; +use crate::cli::fs::program::read_program_from_file; use crate::errors::CliError; use clap::Args; -use nargo::ops::{compile_program, report_errors}; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; /// Generates a Solidity verifier smart contract for the program @@ -40,29 +40,13 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; - let expression_width = backend.get_backend_info()?; let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let program = nargo::ops::transform_program(program, expression_width); + let program_artifact_path = workspace.package_build_path(package); + let program: CompiledProgram = read_program_from_file(program_artifact_path)?.into(); // TODO(https://github.com/noir-lang/noir/issues/4428): // We do not expect to have a smart contract verifier for a foldable program with multiple circuits. diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs index 54e8535f0948..8f28e5d93888 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -20,7 +20,6 @@ use noirc_frontend::hir::ParsedFiles; use notify::{EventKind, RecursiveMode, Watcher}; use notify_debouncer_full::new_debouncer; -use crate::backends::Backend; use crate::errors::CliError; use super::fs::program::only_acir; @@ -47,11 +46,7 @@ pub(crate) struct CompileCommand { watch: bool, } -pub(crate) fn run( - backend: &Backend, - mut args: CompileCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: CompileCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; @@ -63,10 +58,6 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_owned()), )?; - if args.compile_options.expression_width.is_none() { - args.compile_options.expression_width = Some(backend.get_backend_info_or_default()); - }; - if args.watch { watch_workspace(&workspace, &args.compile_options) .map_err(|err| CliError::Generic(err.to_string()))?; @@ -120,7 +111,7 @@ fn watch_workspace(workspace: &Workspace, compile_options: &CompileOptions) -> n Ok(()) } -fn compile_workspace_full( +pub(super) fn compile_workspace_full( workspace: &Workspace, compile_options: &CompileOptions, ) -> Result<(), CliError> { @@ -128,8 +119,6 @@ fn compile_workspace_full( insert_all_files_for_workspace_into_file_manager(workspace, &mut workspace_file_manager); let parsed_files = parse_all(&workspace_file_manager); - let expression_width = - compile_options.expression_width.expect("expression width should have been set"); let compiled_workspace = compile_workspace(&workspace_file_manager, &parsed_files, workspace, compile_options); @@ -149,12 +138,12 @@ fn compile_workspace_full( // Save build artifacts to disk. let only_acir = compile_options.only_acir; for (package, program) in binary_packages.into_iter().zip(compiled_programs) { - let program = nargo::ops::transform_program(program, expression_width); + let program = nargo::ops::transform_program(program, compile_options.expression_width); save_program(program.clone(), &package, &workspace.target_directory_path(), only_acir); } let circuit_dir = workspace.target_directory_path(); for (package, contract) in contract_packages.into_iter().zip(compiled_contracts) { - let contract = nargo::ops::transform_contract(contract, expression_width); + let contract = nargo::ops::transform_contract(contract, compile_options.expression_width); save_contract(contract, &package, &circuit_dir); } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/dap_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/dap_cmd.rs index ba4f91609ef3..124e30069ae3 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/dap_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/dap_cmd.rs @@ -1,6 +1,5 @@ use acvm::acir::circuit::ExpressionWidth; use acvm::acir::native_types::WitnessMap; -use backend_interface::Backend; use clap::Args; use nargo::constants::PROVER_INPUT_FILE; use nargo::workspace::Workspace; @@ -29,8 +28,8 @@ use noir_debugger::errors::{DapError, LoadError}; #[derive(Debug, Clone, Args)] pub(crate) struct DapCommand { /// Override the expression width requested by the backend. - #[arg(long, value_parser = parse_expression_width)] - expression_width: Option, + #[arg(long, value_parser = parse_expression_width, default_value = "4")] + expression_width: ExpressionWidth, #[clap(long)] preflight_check: bool, @@ -249,14 +248,7 @@ fn run_preflight_check( Ok(()) } -pub(crate) fn run( - backend: &Backend, - args: DapCommand, - _config: NargoConfig, -) -> Result<(), CliError> { - let expression_width = - args.expression_width.unwrap_or_else(|| backend.get_backend_info_or_default()); - +pub(crate) fn run(args: DapCommand, _config: NargoConfig) -> Result<(), CliError> { // When the --preflight-check flag is present, we run Noir's DAP server in "pre-flight mode", which test runs // the DAP initialization code without actually starting the DAP server. // @@ -270,12 +262,12 @@ pub(crate) fn run( // the DAP loop is established, which otherwise are considered "out of band" by the maintainers of the DAP spec. // More details here: https://github.com/microsoft/vscode/issues/108138 if args.preflight_check { - return run_preflight_check(expression_width, args).map_err(CliError::DapError); + return run_preflight_check(args.expression_width, args).map_err(CliError::DapError); } let output = BufWriter::new(std::io::stdout()); let input = BufReader::new(std::io::stdin()); let server = Server::new(input, output); - loop_uninitialized_dap(server, expression_width).map_err(CliError::DapError) + loop_uninitialized_dap(server, args.expression_width).map_err(CliError::DapError) } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs index 7cb5cd7846b5..f950cd0405cc 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -24,7 +24,6 @@ use noirc_frontend::hir::ParsedFiles; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; use super::NargoConfig; -use crate::backends::Backend; use crate::errors::CliError; /// Executes a circuit in debug mode @@ -53,11 +52,7 @@ pub(crate) struct DebugCommand { skip_instrumentation: Option, } -pub(crate) fn run( - backend: &Backend, - args: DebugCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: DebugCommand, config: NargoConfig) -> Result<(), CliError> { let acir_mode = args.acir_mode; let skip_instrumentation = args.skip_instrumentation.unwrap_or(acir_mode); @@ -69,10 +64,6 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; let target_dir = &workspace.target_directory_path(); - let expression_width = args - .compile_options - .expression_width - .unwrap_or_else(|| backend.get_backend_info_or_default()); let Some(package) = workspace.into_iter().find(|p| p.is_binary()) else { println!( @@ -89,7 +80,8 @@ pub(crate) fn run( args.compile_options.clone(), )?; - let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); + let compiled_program = + nargo::ops::transform_program(compiled_program, args.compile_options.expression_width); run_async(package, compiled_program, &args.prover_name, &args.witness_name, target_dir) } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs index 854ad5590121..862a46884efa 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -5,20 +5,18 @@ use clap::Args; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; use nargo::errors::try_to_diagnose_runtime_error; -use nargo::ops::{compile_program, report_errors, DefaultForeignCallExecutor}; +use nargo::ops::DefaultForeignCallExecutor; use nargo::package::Package; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::InputMap; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; +use super::compile_cmd::compile_workspace_full; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; use super::NargoConfig; -use crate::backends::Backend; +use crate::cli::fs::program::read_program_from_file; use crate::errors::CliError; /// Executes a circuit to calculate its return value @@ -48,11 +46,7 @@ pub(crate) struct ExecuteCommand { oracle_resolver: Option, } -pub(crate) fn run( - backend: &Backend, - args: ExecuteCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: ExecuteCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; @@ -64,35 +58,16 @@ pub(crate) fn run( )?; let target_dir = &workspace.target_directory_path(); - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; - let expression_width = args - .compile_options - .expression_width - .unwrap_or_else(|| backend.get_backend_info_or_default()); let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); + let program_artifact_path = workspace.package_build_path(package); + let program: CompiledProgram = read_program_from_file(program_artifact_path)?.into(); let (return_value, witness_stack) = execute_program_and_decode( - compiled_program, + program, package, &args.prover_name, args.oracle_resolver.as_deref(), diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs index 044c2cb4ebb5..a61f3ccfc026 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs @@ -20,7 +20,6 @@ use noirc_frontend::graph::CrateName; use clap::Args; -use crate::backends::Backend; use crate::errors::CliError; use super::check_cmd::check_crate_and_report_errors; @@ -43,11 +42,7 @@ pub(crate) struct ExportCommand { compile_options: CompileOptions, } -pub(crate) fn run( - _backend: &Backend, - args: ExportCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: ExportCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/program.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/fs/program.rs index 77005e8d5af5..72d686b0b368 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/program.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/fs/program.rs @@ -60,3 +60,16 @@ pub(crate) fn read_program_from_file>( Ok(program) } + +pub(crate) fn read_contract_from_file>( + circuit_path: P, +) -> Result { + let file_path = circuit_path.as_ref().with_extension("json"); + + let input_string = + std::fs::read(&file_path).map_err(|_| FilesystemError::PathNotValid(file_path))?; + let contract = serde_json::from_slice(&input_string) + .map_err(|err| FilesystemError::ProgramSerializationError(err.to_string()))?; + + Ok(contract) +} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs index 3695fb57d311..1ae2d5db1044 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs @@ -5,14 +5,11 @@ use backend_interface::BackendError; use clap::Args; use iter_extended::vecmap; use nargo::{ - artifacts::debug::DebugArtifact, insert_all_files_for_workspace_into_file_manager, - ops::report_errors, package::Package, parse_all, + artifacts::{contract::ContractArtifact, debug::DebugArtifact, program::ProgramArtifact}, + package::Package, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledContract, CompiledProgram, - NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; use noirc_errors::{debug_info::OpCodesCount, Location}; use noirc_frontend::graph::CrateName; use prettytable::{row, table, Row}; @@ -22,7 +19,11 @@ use serde::Serialize; use crate::backends::Backend; use crate::errors::CliError; -use super::{compile_cmd::compile_workspace, NargoConfig}; +use super::{ + compile_cmd::compile_workspace_full, + fs::program::{read_contract_from_file, read_program_from_file}, + NargoConfig, +}; /// Provides detailed information on each of a program's function (represented by a single circuit) /// @@ -66,39 +67,32 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); - - let expression_width = args - .compile_options - .expression_width - .unwrap_or_else(|| backend.get_backend_info_or_default()); - let compiled_workspace = compile_workspace( - &workspace_file_manager, - &parsed_files, - &workspace, - &args.compile_options, - ); - - let (compiled_programs, compiled_contracts) = report_errors( - compiled_workspace, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; - let compiled_programs = vecmap(compiled_programs, |program| { - nargo::ops::transform_program(program, expression_width) - }); - let compiled_contracts = vecmap(compiled_contracts, |contract| { - nargo::ops::transform_contract(contract, expression_width) - }); + let binary_packages: Vec<(Package, ProgramArtifact)> = workspace + .into_iter() + .filter(|package| package.is_binary()) + .map(|package| -> Result<(Package, ProgramArtifact), CliError> { + let program_artifact_path = workspace.package_build_path(package); + let program = read_program_from_file(program_artifact_path)?; + Ok((package.clone(), program)) + }) + .collect::>()?; + + let compiled_contracts: Vec = workspace + .into_iter() + .filter(|package| package.is_contract()) + .map(|package| { + let contract_artifact_path = workspace.package_build_path(package); + read_contract_from_file(contract_artifact_path) + }) + .collect::>()?; if args.profile_info { - for compiled_program in &compiled_programs { + for (_, compiled_program) in &binary_packages { let debug_artifact = DebugArtifact::from(compiled_program.clone()); - for function_debug in compiled_program.debug.iter() { + for function_debug in compiled_program.debug_symbols.debug_infos.iter() { let span_opcodes = function_debug.count_span_opcodes(); print_span_opcodes(span_opcodes, &debug_artifact); } @@ -108,7 +102,7 @@ pub(crate) fn run( let debug_artifact = DebugArtifact::from(compiled_contract.clone()); let functions = &compiled_contract.functions; for contract_function in functions { - for function_debug in contract_function.debug.iter() { + for function_debug in contract_function.debug_symbols.debug_infos.iter() { let span_opcodes = function_debug.count_span_opcodes(); print_span_opcodes(span_opcodes, &debug_artifact); } @@ -116,19 +110,28 @@ pub(crate) fn run( } } - let binary_packages = - workspace.into_iter().filter(|package| package.is_binary()).zip(compiled_programs); - let program_info = binary_packages + .into_iter() .par_bridge() .map(|(package, program)| { - count_opcodes_and_gates_in_program(backend, program, package, expression_width) + count_opcodes_and_gates_in_program( + backend, + program, + &package, + args.compile_options.expression_width, + ) }) .collect::>()?; let contract_info = compiled_contracts .into_par_iter() - .map(|contract| count_opcodes_and_gates_in_contract(backend, contract, expression_width)) + .map(|contract| { + count_opcodes_and_gates_in_contract( + backend, + contract, + args.compile_options.expression_width, + ) + }) .collect::>()?; let info_report = InfoReport { programs: program_info, contracts: contract_info }; @@ -280,12 +283,12 @@ impl From for Vec { fn count_opcodes_and_gates_in_program( backend: &Backend, - compiled_program: CompiledProgram, + compiled_program: ProgramArtifact, package: &Package, expression_width: ExpressionWidth, ) -> Result { let functions = compiled_program - .program + .bytecode .functions .into_par_iter() .enumerate() @@ -307,7 +310,7 @@ fn count_opcodes_and_gates_in_program( fn count_opcodes_and_gates_in_contract( backend: &Backend, - contract: CompiledContract, + contract: ContractArtifact, expression_width: ExpressionWidth, ) -> Result { let functions = contract diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/lsp_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/lsp_cmd.rs index 1428b8070c83..45ac02ea5522 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/lsp_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/lsp_cmd.rs @@ -8,7 +8,6 @@ use noir_lsp::NargoLspService; use tower::ServiceBuilder; use super::NargoConfig; -use crate::backends::Backend; use crate::errors::CliError; /// Starts the Noir LSP server @@ -19,12 +18,7 @@ use crate::errors::CliError; #[derive(Debug, Clone, Args)] pub(crate) struct LspCommand; -pub(crate) fn run( - // Backend is currently unused, but we might want to use it to inform the lsp in the future - _backend: &Backend, - _args: LspCommand, - _config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(_args: LspCommand, _config: NargoConfig) -> Result<(), CliError> { use tokio::runtime::Builder; let runtime = Builder::new_current_thread().enable_all().build().unwrap(); diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs index e8e178938157..ad778549ac09 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs @@ -107,21 +107,21 @@ pub(crate) fn start_cli() -> eyre::Result<()> { let backend = crate::backends::Backend::new(active_backend); match command { - NargoCommand::New(args) => new_cmd::run(&backend, args, config), + NargoCommand::New(args) => new_cmd::run(args, config), NargoCommand::Init(args) => init_cmd::run(args, config), - NargoCommand::Check(args) => check_cmd::run(&backend, args, config), - NargoCommand::Compile(args) => compile_cmd::run(&backend, args, config), - NargoCommand::Debug(args) => debug_cmd::run(&backend, args, config), - NargoCommand::Execute(args) => execute_cmd::run(&backend, args, config), - NargoCommand::Export(args) => export_cmd::run(&backend, args, config), + NargoCommand::Check(args) => check_cmd::run(args, config), + NargoCommand::Compile(args) => compile_cmd::run(args, config), + NargoCommand::Debug(args) => debug_cmd::run(args, config), + NargoCommand::Execute(args) => execute_cmd::run(args, config), + NargoCommand::Export(args) => export_cmd::run(args, config), NargoCommand::Prove(args) => prove_cmd::run(&backend, args, config), NargoCommand::Verify(args) => verify_cmd::run(&backend, args, config), - NargoCommand::Test(args) => test_cmd::run(&backend, args, config), + NargoCommand::Test(args) => test_cmd::run(args, config), NargoCommand::Info(args) => info_cmd::run(&backend, args, config), NargoCommand::CodegenVerifier(args) => codegen_verifier_cmd::run(&backend, args, config), NargoCommand::Backend(args) => backend_cmd::run(args), - NargoCommand::Lsp(args) => lsp_cmd::run(&backend, args, config), - NargoCommand::Dap(args) => dap_cmd::run(&backend, args, config), + NargoCommand::Lsp(args) => lsp_cmd::run(args, config), + NargoCommand::Dap(args) => dap_cmd::run(args, config), NargoCommand::Fmt(args) => fmt_cmd::run(args, config), }?; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/new_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/new_cmd.rs index b4c823d0c1e9..21951f272601 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/new_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/new_cmd.rs @@ -1,4 +1,3 @@ -use crate::backends::Backend; use crate::errors::CliError; use super::{init_cmd::initialize_project, NargoConfig}; @@ -30,12 +29,7 @@ pub(crate) struct NewCommand { pub(crate) contract: bool, } -pub(crate) fn run( - // Backend is currently unused, but we might want to use it to inform the "new" template in the future - _backend: &Backend, - args: NewCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: NewCommand, config: NargoConfig) -> Result<(), CliError> { let package_dir = config.program_dir.join(&args.path); if package_dir.exists() { diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs index b9e4bca9e697..6fb6e7269f7f 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs @@ -1,16 +1,13 @@ use clap::Args; use nargo::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; -use nargo::ops::{compile_program, report_errors}; use nargo::package::Package; -use nargo::workspace::Workspace; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; +use super::compile_cmd::compile_workspace_full; +use super::fs::program::read_program_from_file; use super::fs::{ inputs::{read_inputs_from_file, write_inputs_to_file}, proof::save_proof_to_dir, @@ -65,59 +62,39 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; - let expression_width = args - .compile_options - .expression_width - .unwrap_or_else(|| backend.get_backend_info_or_default()); let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); + let program_artifact_path = workspace.package_build_path(package); + let program: CompiledProgram = read_program_from_file(program_artifact_path)?.into(); - prove_package( + let proof = prove_package( backend, - &workspace, package, - compiled_program, + program, &args.prover_name, &args.verifier_name, args.verify, args.oracle_resolver.as_deref(), )?; + + save_proof_to_dir(&proof, &String::from(&package.name), workspace.proofs_directory_path())?; } Ok(()) } -#[allow(clippy::too_many_arguments)] -pub(crate) fn prove_package( +fn prove_package( backend: &Backend, - workspace: &Workspace, package: &Package, compiled_program: CompiledProgram, prover_name: &str, verifier_name: &str, check_proof: bool, foreign_call_resolver_url: Option<&str>, -) -> Result<(), CliError> { +) -> Result, CliError> { // Parse the initial witness values from Prover.toml let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &compiled_program.abi)?; @@ -151,7 +128,5 @@ pub(crate) fn prove_package( } } - save_proof_to_dir(&proof, &String::from(&package.name), workspace.proofs_directory_path())?; - - Ok(()) + Ok(proof) } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs index 88a804d5cf42..967d4c87e6d0 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs @@ -19,7 +19,7 @@ use noirc_frontend::{ use rayon::prelude::{IntoParallelIterator, ParallelBridge, ParallelIterator}; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; -use crate::{backends::Backend, cli::check_cmd::check_crate_and_report_errors, errors::CliError}; +use crate::{cli::check_cmd::check_crate_and_report_errors, errors::CliError}; use super::NargoConfig; @@ -54,11 +54,7 @@ pub(crate) struct TestCommand { oracle_resolver: Option, } -pub(crate) fn run( - _backend: &Backend, - args: TestCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: TestCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs index 7202a179aaea..a7f2772330a0 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs @@ -1,18 +1,16 @@ +use super::compile_cmd::compile_workspace_full; +use super::fs::program::read_program_from_file; use super::fs::{inputs::read_inputs_from_file, load_hex_data}; use super::NargoConfig; use crate::{backends::Backend, errors::CliError}; use clap::Args; use nargo::constants::{PROOF_EXT, VERIFIER_INPUT_FILE}; -use nargo::ops::{compile_program, report_errors}; use nargo::package::Package; use nargo::workspace::Workspace; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; /// Given a proof and a program, verify whether the proof is valid @@ -50,34 +48,15 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; - let expression_width = args - .compile_options - .expression_width - .unwrap_or_else(|| backend.get_backend_info_or_default()); let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); - - verify_package(backend, &workspace, package, compiled_program, &args.verifier_name)?; + let program_artifact_path = workspace.package_build_path(package); + let program: CompiledProgram = read_program_from_file(program_artifact_path)?.into(); + + verify_package(backend, &workspace, package, program, &args.verifier_name)?; } Ok(()) diff --git a/noir/noir-repo/tooling/noir_codegen/package.json b/noir/noir-repo/tooling/noir_codegen/package.json index 2a90e9374dfd..fa3df8ce1017 100644 --- a/noir/noir-repo/tooling/noir_codegen/package.json +++ b/noir/noir-repo/tooling/noir_codegen/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.28.0", + "version": "0.29.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", diff --git a/noir/noir-repo/tooling/noir_js/package.json b/noir/noir-repo/tooling/noir_js/package.json index c5bb5af9dfab..325ba0fb9a7d 100644 --- a/noir/noir-repo/tooling/noir_js/package.json +++ b/noir/noir-repo/tooling/noir_js/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.28.0", + "version": "0.29.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", diff --git a/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts b/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts index 54a42d40b60b..dcf9f4890039 100644 --- a/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts +++ b/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts @@ -81,15 +81,3 @@ it('circuit with a raw assert payload should fail with the decoded payload', asy }); } }); - -it('successfully executes a program with multiple acir circuits', async () => { - const inputs = { - x: '10', - }; - try { - await new Noir(fold_fibonacci_program).execute(inputs); - } catch (error) { - const knownError = error as Error; - expect(knownError.message).to.equal('Circuit execution failed: Error: Cannot satisfy constraint'); - } -}); diff --git a/noir/noir-repo/tooling/noir_js_backend_barretenberg/package.json b/noir/noir-repo/tooling/noir_js_backend_barretenberg/package.json index b57822696742..c6985f4b037f 100644 --- a/noir/noir-repo/tooling/noir_js_backend_barretenberg/package.json +++ b/noir/noir-repo/tooling/noir_js_backend_barretenberg/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.28.0", + "version": "0.29.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", diff --git a/noir/noir-repo/tooling/noir_js_types/package.json b/noir/noir-repo/tooling/noir_js_types/package.json index a356a771b2ad..5332ce20cc72 100644 --- a/noir/noir-repo/tooling/noir_js_types/package.json +++ b/noir/noir-repo/tooling/noir_js_types/package.json @@ -4,7 +4,7 @@ "The Noir Team " ], "packageManager": "yarn@3.5.1", - "version": "0.28.0", + "version": "0.29.0", "license": "(MIT OR Apache-2.0)", "homepage": "https://noir-lang.org/", "repository": { diff --git a/noir/noir-repo/tooling/noirc_abi/src/lib.rs b/noir/noir-repo/tooling/noirc_abi/src/lib.rs index 35f85b5f59cb..7e89a102a984 100644 --- a/noir/noir-repo/tooling/noirc_abi/src/lib.rs +++ b/noir/noir-repo/tooling/noirc_abi/src/lib.rs @@ -221,7 +221,7 @@ impl From<&AbiType> for PrintableType { } AbiType::Array { length, typ } => { let borrowed: &AbiType = typ.borrow(); - PrintableType::Array { length: Some(*length), typ: Box::new(borrowed.into()) } + PrintableType::Array { length: *length, typ: Box::new(borrowed.into()) } } AbiType::Boolean => PrintableType::Boolean, AbiType::Struct { path, fields } => { diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/package.json b/noir/noir-repo/tooling/noirc_abi_wasm/package.json index 701c843456a0..ac7d16062984 100644 --- a/noir/noir-repo/tooling/noirc_abi_wasm/package.json +++ b/noir/noir-repo/tooling/noirc_abi_wasm/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.28.0", + "version": "0.29.0", "license": "(MIT OR Apache-2.0)", "homepage": "https://noir-lang.org/", "repository": { diff --git a/scripts/earthly-ci b/scripts/earthly-ci index 84ffc925c7bf..e424c0a42017 100755 --- a/scripts/earthly-ci +++ b/scripts/earthly-ci @@ -3,6 +3,21 @@ # The silver lining is if Earthly does crash, the cache can pick up the build. set -eu -o pipefail +MAX_WAIT_TIME=300 # Maximum wait time in seconds +WAIT_INTERVAL=10 # Interval between checks in seconds +elapsed_time=0 + +while ! [ -f /run/.earthly-bootstrap ] ; do + echo "Did not detect .earthly-bootstrap. Waiting for runner to fully initialize..." + if [ $elapsed_time -ge $MAX_WAIT_TIME ]; then + echo "Earthly bootstrap did not become available within $MAX_WAIT_TIME seconds... did the runner start correctly?" + exit 1 + fi + + sleep $WAIT_INTERVAL + elapsed_time=$((elapsed_time + WAIT_INTERVAL)) +done + OUTPUT_FILE=$(mktemp) INCONSISTENT_GRAPH_STATE_COUNT=0 # Counter for 'inconsistent graph state' errors @@ -22,12 +37,12 @@ while [ $ATTEMPT_COUNT -lt $MAX_ATTEMPTS ]; do # Check the output for specific errors if grep 'failed to get edge: inconsistent graph state' $OUTPUT_FILE >/dev/null || grep 'failed to get state for index' $OUTPUT_FILE >/dev/null ; then INCONSISTENT_GRAPH_STATE_COUNT=$((INCONSISTENT_GRAPH_STATE_COUNT + 1)) - if [ "$INCONSISTENT_GRAPH_STATE_COUNT" -eq 2 ]; then + if [ "$INCONSISTENT_GRAPH_STATE_COUNT" -eq 3 ]; then echo "Unable to recover from 'inconsistent graph state' or 'failed to get state for index'. Connect to spot runner and run 'earthly prune'." exit 1 fi - echo "Got 'inconsistent graph state' or 'failed to get state for index'. Sleeping for 20 seconds and retrying." - sleep 20 + echo "Got 'inconsistent graph state' or 'failed to get state for index'. Sleeping for 30 seconds and retrying." + sleep 30 elif grep 'Error: pull ping error: pull ping response' $OUTPUT_FILE >/dev/null; then echo "Got 'Error: pull ping error: pull ping response', intermittent failure when writing out images to docker" elif grep '================================= System Info ==================================' $OUTPUT_FILE >/dev/null; then diff --git a/yarn-project/Earthfile b/yarn-project/Earthfile index 0c6c1f9a1e7b..fc3b21deccfa 100644 --- a/yarn-project/Earthfile +++ b/yarn-project/Earthfile @@ -5,7 +5,8 @@ deps: LET packages = $(git ls-files "**/package*.json" package*.json) LET tsconfigs = $(git ls-files "**/tsconfig*.json" tsconfig*.json) FROM ../build-images+build - # copy bb-js and noir-packages + # copy bb, bb-js and noir-packages + COPY ../barretenberg/cpp/+preset-release/bin /usr/src/barretenberg/cpp/build/ COPY ../barretenberg/ts/+build/build /usr/src/barretenberg/ts COPY ../noir/+packages/packages /usr/src/noir/packages WORKDIR /usr/src/yarn-project @@ -100,7 +101,7 @@ end-to-end: RUN apt-get update && apt-get install -y wget gnupg \ && wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - \ && echo "deb [arch=$(dpkg --print-architecture)] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google-chrome.list \ - && apt update && apt install nodejs jq google-chrome-stable netcat-openbsd -y \ + && apt update && apt install curl nodejs jq google-chrome-stable netcat-openbsd -y \ && rm -rf /var/lib/apt/lists/* ENV CHROME_BIN="/usr/bin/google-chrome-stable" ENV PATH=/opt/foundry/bin:$PATH diff --git a/yarn-project/aztec-node/src/aztec-node/config.ts b/yarn-project/aztec-node/src/aztec-node/config.ts index dba7d824025a..8c00246d6088 100644 --- a/yarn-project/aztec-node/src/aztec-node/config.ts +++ b/yarn-project/aztec-node/src/aztec-node/config.ts @@ -1,6 +1,6 @@ import { type ArchiverConfig, getConfigEnvVars as getArchiverVars } from '@aztec/archiver'; import { type P2PConfig, getP2PConfigEnvVars } from '@aztec/p2p'; -import { type ProverConfig, getProverEnvVars } from '@aztec/prover-client'; +import { type ProverClientConfig, getProverEnvVars } from '@aztec/prover-client'; import { type SequencerClientConfig, getConfigEnvVars as getSequencerVars } from '@aztec/sequencer-client'; import { getConfigEnvVars as getWorldStateVars } from '@aztec/world-state'; @@ -9,7 +9,7 @@ import { getConfigEnvVars as getWorldStateVars } from '@aztec/world-state'; */ export type AztecNodeConfig = ArchiverConfig & SequencerClientConfig & - ProverConfig & + ProverClientConfig & P2PConfig & { /** Whether the sequencer is disabled for this node. */ disableSequencer: boolean; diff --git a/yarn-project/aztec-node/src/aztec-node/http_rpc_server.ts b/yarn-project/aztec-node/src/aztec-node/http_rpc_server.ts index dd2836448269..8270b171ffec 100644 --- a/yarn-project/aztec-node/src/aztec-node/http_rpc_server.ts +++ b/yarn-project/aztec-node/src/aztec-node/http_rpc_server.ts @@ -5,6 +5,7 @@ import { L2Block, LogId, NullifierMembershipWitness, + PublicDataWitness, SiblingPath, Tx, TxEffect, @@ -37,6 +38,7 @@ export function createAztecNodeRpcServer(node: AztecNode) { TxEffect, LogId, TxHash, + PublicDataWitness, SiblingPath, }, { Tx, TxReceipt, EncryptedL2BlockL2Logs, UnencryptedL2BlockL2Logs, NullifierMembershipWitness }, diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 343130a1b38c..980c9cf6df0c 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -15,6 +15,7 @@ import { NullifierMembershipWitness, type ProcessOutput, type ProverClient, + type ProverConfig, PublicDataWitness, type SequencerConfig, type SiblingPath, @@ -32,8 +33,6 @@ import { type Header, INITIAL_L2_BLOCK_NUM, type L1_TO_L2_MSG_TREE_HEIGHT, - L2_TO_L1_MESSAGE_LENGTH, - MAX_NEW_L2_TO_L1_MSGS_PER_TX, type NOTE_HASH_TREE_HEIGHT, type NULLIFIER_TREE_HEIGHT, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, @@ -44,7 +43,6 @@ import { import { computePublicDataTreeLeafSlot } from '@aztec/circuits.js/hash'; import { type L1ContractAddresses, createEthereumChain } from '@aztec/ethereum'; import { AztecAddress } from '@aztec/foundation/aztec-address'; -import { padArrayEnd } from '@aztec/foundation/collection'; import { createDebugLogger } from '@aztec/foundation/log'; import { type AztecKVStore } from '@aztec/kv-store'; import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; @@ -453,11 +451,7 @@ export class AztecNodeService implements AztecNode { throw new Error('Block is not defined'); } - // We multiply the number of messages per block by the length of each message because each message occupies - // 2 leaves in the tree! - const l2ToL1Messages = block.body.txEffects.flatMap(txEffect => - padArrayEnd(txEffect.l2ToL1Msgs, Fr.ZERO, MAX_NEW_L2_TO_L1_MSGS_PER_TX * L2_TO_L1_MESSAGE_LENGTH), - ); + const l2ToL1Messages = block.body.txEffects.flatMap(txEffect => txEffect.l2ToL1Msgs); const indexOfL2ToL1Message = BigInt( l2ToL1Messages.findIndex(l2ToL1MessageInBlock => l2ToL1MessageInBlock.equals(l2ToL1Message)), @@ -467,7 +461,8 @@ export class AztecNodeService implements AztecNode { throw new Error('The L2ToL1Message you are trying to prove inclusion of does not exist'); } - const treeHeight = Math.ceil(Math.log2(l2ToL1Messages.length)); + // Match how l2ToL1TreeHeight is calculated in Rollup.sol. + const treeHeight = block.header.contentCommitment.txTreeHeight.toNumber() + 1; // The root of this tree is the out_hash calculated in Noir => we truncate to match Noir's SHA const tree = new StandardTree( openTmpStore(true), @@ -667,6 +662,7 @@ export class AztecNodeService implements AztecNode { const processor = await publicProcessorFactory.create(prevHeader, newGlobalVariables); // REFACTOR: Consider merging ProcessReturnValues into ProcessedTx const [processedTxs, failedTxs, returns] = await processor.process([tx]); + // REFACTOR: Consider returning the error/revert rather than throwing if (failedTxs.length) { this.log.warn(`Simulated tx ${tx.getTxHash()} fails: ${failedTxs[0].error}`); throw failedTxs[0].error; @@ -685,12 +681,13 @@ export class AztecNodeService implements AztecNode { end: processedTx.data.end, revertReason: processedTx.revertReason, publicReturnValues: returns[0], + gasUsed: processedTx.gasUsed, }; } - public setConfig(config: Partial): Promise { + public async setConfig(config: Partial): Promise { this.sequencer?.updateSequencerConfig(config); - return Promise.resolve(); + await this.prover.updateProverConfig(config); } /** diff --git a/yarn-project/aztec.js/src/index.ts b/yarn-project/aztec.js/src/index.ts index cb9179a84a74..070ecb51b948 100644 --- a/yarn-project/aztec.js/src/index.ts +++ b/yarn-project/aztec.js/src/index.ts @@ -123,6 +123,8 @@ export { mockTx, Comparator, SiblingPath, + EncryptedLogHeader, + EncryptedLogBody, } from '@aztec/circuit-types'; export { NodeInfo } from '@aztec/types/interfaces'; diff --git a/yarn-project/aztec/CHANGELOG.md b/yarn-project/aztec/CHANGELOG.md index 3f0d5f163102..1f3185ae61ca 100644 --- a/yarn-project/aztec/CHANGELOG.md +++ b/yarn-project/aztec/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.38.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-package-v0.37.0...aztec-package-v0.38.0) (2024-05-07) + + +### Features + +* Proving benchmark ([#6051](https://github.com/AztecProtocol/aztec-packages/issues/6051)) ([644bd85](https://github.com/AztecProtocol/aztec-packages/commit/644bd8525f6de8b71d6cc299baf3fda94b68abbb)) + ## [0.37.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-package-v0.36.0...aztec-package-v0.37.0) (2024-05-02) diff --git a/yarn-project/aztec/package.json b/yarn-project/aztec/package.json index f8c277e0494b..f01354896f84 100644 --- a/yarn-project/aztec/package.json +++ b/yarn-project/aztec/package.json @@ -1,6 +1,6 @@ { "name": "@aztec/aztec", - "version": "0.37.0", + "version": "0.38.0", "type": "module", "exports": { ".": "./dest/index.js" diff --git a/yarn-project/aztec/src/cli/cmds/start_prover.ts b/yarn-project/aztec/src/cli/cmds/start_prover.ts index 103ca97c8df0..7c39fe6e16a7 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover.ts @@ -1,6 +1,8 @@ import { type ProvingJobSource } from '@aztec/circuit-types'; import { ProverPool, createProvingJobSourceClient } from '@aztec/prover-client/prover-pool'; +import { tmpdir } from 'node:os'; + import { type ServiceStarter, parseModuleOptions } from '../util.js'; type ProverOptions = Partial<{ @@ -35,6 +37,8 @@ export const startProver: ServiceStarter = async (options, signalHandlers, logge { acvmBinaryPath: proverOptions.acvmBinaryPath, bbBinaryPath: proverOptions.bbBinaryPath, + acvmWorkingDirectory: tmpdir(), + bbWorkingDirectory: tmpdir(), }, agentCount, ); diff --git a/yarn-project/builder/package.json b/yarn-project/builder/package.json index cb01a3034bc1..29b648700ff1 100644 --- a/yarn-project/builder/package.json +++ b/yarn-project/builder/package.json @@ -10,7 +10,7 @@ "entryPoints": [ "./src/index.ts" ], - "name": "Aztec.nr compiler", + "name": "Aztec builder", "tsconfig": "./tsconfig.json" }, "bin": { diff --git a/yarn-project/builder/src/cli.ts b/yarn-project/builder/src/cli.ts index a8d2faf4d6a5..9dea06bba50c 100644 --- a/yarn-project/builder/src/cli.ts +++ b/yarn-project/builder/src/cli.ts @@ -1,30 +1,13 @@ #!/usr/bin/env node import { createConsoleLogger } from '@aztec/foundation/log'; -import { Command, Option } from 'commander'; -import { lookup } from 'dns/promises'; +import { Command } from 'commander'; import { dirname } from 'path'; const program = new Command(); -const log = createConsoleLogger('aztec:compiler-cli'); - -/** - * If we can successfully resolve 'host.docker.internal', then we are running in a container, and we should treat - * localhost as being host.docker.internal. - */ -const getLocalhost = () => - lookup('host.docker.internal') - .then(() => 'host.docker.internal') - .catch(() => 'localhost'); - -const LOCALHOST = await getLocalhost(); +const log = createConsoleLogger('aztec:builder'); const main = async () => { - const pxeOption = new Option('-u, --rpc-url ', 'URL of the PXE') - .env('PXE_URL') - .default(`http://${LOCALHOST}:8080`) - .makeOptionMandatory(true); - program.name('aztec-builder'); program .command('codegen') @@ -43,14 +26,16 @@ const main = async () => { .argument('[projectPath]', 'Path to the project directory', process.cwd()) .option('--contract [paths...]', 'Paths to contracts to update dependencies', []) .option('--aztec-version ', 'The version to update Aztec packages to. Defaults to latest', 'latest') - .addOption(pxeOption) .action(async (projectPath: string, options) => { const { update } = await import('./cli/update/update.js'); - const { contract, aztecVersion, rpcUrl } = options; - await update(projectPath, contract, rpcUrl, aztecVersion, log); + const { contract, aztecVersion } = options; + await update(projectPath, contract, aztecVersion, log); }); await program.parseAsync(process.argv); + // I force exit here because spawnSync in npm.ts just blocks the process from exiting. Spent a bit of time debugging + // it without success and I think it doesn't make sense to invest more time in this. + process.exit(0); }; main().catch(err => { diff --git a/yarn-project/builder/src/cli/update/update.ts b/yarn-project/builder/src/cli/update/update.ts index 0bbe96639ade..5d518839ebb3 100644 --- a/yarn-project/builder/src/cli/update/update.ts +++ b/yarn-project/builder/src/cli/update/update.ts @@ -15,7 +15,6 @@ const UPDATE_DOCS_URL = 'https://docs.aztec.network/developers/updating'; export async function update( projectPath: string, contracts: string[], - pxeUrl: string, aztecVersion: string, log: LogFn, ): Promise { diff --git a/yarn-project/circuit-types/src/index.ts b/yarn-project/circuit-types/src/index.ts index fb71e18a5dc8..67763d4d6d33 100644 --- a/yarn-project/circuit-types/src/index.ts +++ b/yarn-project/circuit-types/src/index.ts @@ -6,6 +6,7 @@ export * from './l2_block.js'; export * from './body.js'; export * from './l2_block_downloader/index.js'; export * from './l2_block_source.js'; +export * from './public_data_witness.js'; export * from './tx_effect.js'; export * from './logs/index.js'; export * from './merkle_tree_id.js'; diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.ts index 95cc81d5bc24..d59543943e8c 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.ts @@ -20,13 +20,14 @@ import { type LogType, } from '../logs/index.js'; import { type MerkleTreeId } from '../merkle_tree_id.js'; +import { type PublicDataWitness } from '../public_data_witness.js'; import { type SiblingPath } from '../sibling_path/index.js'; import { type ProcessOutput, type Tx, type TxHash, type TxReceipt } from '../tx/index.js'; import { type TxEffect } from '../tx_effect.js'; import { type SequencerConfig } from './configs.js'; import { type L2BlockNumber } from './l2_block_number.js'; import { type NullifierMembershipWitness } from './nullifier_tree.js'; -import { type PublicDataWitness } from './public_data_tree.js'; +import { type ProverConfig } from './prover-client.js'; /** * The aztec node. @@ -288,7 +289,7 @@ export interface AztecNode { * Updates the configuration of this node. * @param config - Updated configuration to be merged with the current one. */ - setConfig(config: Partial): Promise; + setConfig(config: Partial): Promise; /** * Returns a registered contract class given its id. diff --git a/yarn-project/circuit-types/src/interfaces/index.ts b/yarn-project/circuit-types/src/interfaces/index.ts index 71bfeeda4a2e..5b13506853c3 100644 --- a/yarn-project/circuit-types/src/interfaces/index.ts +++ b/yarn-project/circuit-types/src/interfaces/index.ts @@ -4,7 +4,6 @@ export * from './pxe.js'; export * from './sync-status.js'; export * from './configs.js'; export * from './nullifier_tree.js'; -export * from './public_data_tree.js'; export * from './prover-client.js'; export * from './proving-job.js'; export * from './block-prover.js'; diff --git a/yarn-project/circuit-types/src/interfaces/prover-client.ts b/yarn-project/circuit-types/src/interfaces/prover-client.ts index 8e55d3a2dbbb..6ce183fc3853 100644 --- a/yarn-project/circuit-types/src/interfaces/prover-client.ts +++ b/yarn-project/circuit-types/src/interfaces/prover-client.ts @@ -1,6 +1,14 @@ import { type BlockProver } from './block-prover.js'; import { type ProvingJobSource } from './proving-job.js'; +/** + * The prover configuration. + */ +export type ProverConfig = { + /** How many agents to run */ + proverAgents: number; +}; + /** * The interface to the prover client. * Provides the ability to generate proofs and build rollups. @@ -11,4 +19,6 @@ export interface ProverClient extends BlockProver { stop(): Promise; getProvingJobSource(): ProvingJobSource; + + updateProverConfig(config: Partial): Promise; } diff --git a/yarn-project/circuit-types/src/interfaces/public_data_tree.ts b/yarn-project/circuit-types/src/interfaces/public_data_tree.ts deleted file mode 100644 index 1ae620154ee2..000000000000 --- a/yarn-project/circuit-types/src/interfaces/public_data_tree.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { Fr, type PUBLIC_DATA_TREE_HEIGHT, type PublicDataTreeLeafPreimage } from '@aztec/circuits.js'; - -import { type SiblingPath } from '../sibling_path/index.js'; - -/** - * Public data witness. - * @remarks This allows to prove either: - * - That a slot in the public data tree is empty (0 value) if it falls within the range of the leaf. - * - The current value of a slot in the public data tree if it matches exactly the slot of the leaf. - */ -export class PublicDataWitness { - constructor( - /** - * The index of the leaf in the public data tree. - */ - public readonly index: bigint, - /** - * Preimage of a low leaf. All the slots in the range of the leaf are empty, and the current value of the - * leaf slot is stored in the leaf. - */ - public readonly leafPreimage: PublicDataTreeLeafPreimage, - /** - * Sibling path to prove membership of the leaf. - */ - public readonly siblingPath: SiblingPath, - ) {} - - /** - * Returns a field array representation of a public data witness. - * @returns A field array representation of a public data witness. - */ - public toFields(): Fr[] { - return [ - new Fr(this.index), - new Fr(this.leafPreimage.slot), - new Fr(this.leafPreimage.value), - new Fr(this.leafPreimage.nextIndex), - new Fr(this.leafPreimage.nextSlot), - ...this.siblingPath.toFields(), - ]; - } -} diff --git a/yarn-project/circuit-types/src/logs/encrypted_log_body.test.ts b/yarn-project/circuit-types/src/logs/encrypted_log_body.test.ts new file mode 100644 index 000000000000..170c26078b58 --- /dev/null +++ b/yarn-project/circuit-types/src/logs/encrypted_log_body.test.ts @@ -0,0 +1,66 @@ +import { Fr, GrumpkinScalar } from '@aztec/circuits.js'; +import { Grumpkin } from '@aztec/circuits.js/barretenberg'; +import { updateInlineTestData } from '@aztec/foundation/testing'; + +import { EncryptedLogBody } from './encrypted_log_body.js'; +import { Note } from './l1_note_payload/note.js'; + +describe('encrypt log body', () => { + let grumpkin: Grumpkin; + + beforeAll(() => { + grumpkin = new Grumpkin(); + }); + + it('encrypt and decrypt a log body', () => { + const ephSecretKey = GrumpkinScalar.random(); + const viewingSecretKey = GrumpkinScalar.random(); + + const ephPubKey = grumpkin.mul(Grumpkin.generator, ephSecretKey); + const viewingPubKey = grumpkin.mul(Grumpkin.generator, viewingSecretKey); + + const note = Note.random(); + const noteTypeId = Fr.random(); + const storageSlot = Fr.random(); + + const body = new EncryptedLogBody(noteTypeId, storageSlot, note); + + const encrypted = body.computeCiphertext(ephSecretKey, viewingPubKey); + + const recreated = EncryptedLogBody.fromCiphertext(encrypted, viewingSecretKey, ephPubKey); + + expect(recreated.toBuffer()).toEqual(body.toBuffer()); + }); + + it('encrypt a log body, generate input for noir test', () => { + // The following 2 are arbitrary fixed values - fixed in order to test a match with Noir + const viewingSecretKey: GrumpkinScalar = new GrumpkinScalar( + 0x23b3127c127b1f29a7adff5cccf8fb06649e7ca01d9de27b21624098b897babdn, + ); + const ephSecretKey: GrumpkinScalar = new GrumpkinScalar( + 0x1fdd0dd8c99b21af8e00d2d130bdc263b36dadcbea84ac5ec9293a0660deca01n, + ); + + const viewingPubKey = grumpkin.mul(Grumpkin.generator, viewingSecretKey); + + const note = new Note([new Fr(1), new Fr(2), new Fr(3)]); + const noteTypeId = new Fr(1); + const storageSlot = new Fr(2); + + const body = new EncryptedLogBody(noteTypeId, storageSlot, note); + + const encrypted = body.computeCiphertext(ephSecretKey, viewingPubKey); + + const byteArrayString = `[${encrypted + .toString('hex') + .match(/.{1,2}/g)! + .map(byte => parseInt(byte, 16))}]`; + + // Run with AZTEC_GENERATE_TEST_DATA=1 to update noir test data + updateInlineTestData( + 'noir-projects/aztec-nr/aztec/src/encrypted_logs/body.nr', + 'expected_body_ciphertext', + byteArrayString, + ); + }); +}); diff --git a/yarn-project/circuit-types/src/logs/encrypted_log_body.ts b/yarn-project/circuit-types/src/logs/encrypted_log_body.ts new file mode 100644 index 000000000000..40a4d2e4c4ee --- /dev/null +++ b/yarn-project/circuit-types/src/logs/encrypted_log_body.ts @@ -0,0 +1,81 @@ +import { Fr, type GrumpkinPrivateKey, type PublicKey } from '@aztec/circuits.js'; +import { Aes128 } from '@aztec/circuits.js/barretenberg'; +import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; + +import { Note, deriveAESSecret } from './l1_note_payload/index.js'; + +export class EncryptedLogBody { + constructor(public storageSlot: Fr, public noteTypeId: Fr, public note: Note) {} + + /** + * Serializes the log body to a buffer WITHOUT the length of the note buffer + * + * @returns The serialized log body + */ + public toBuffer(): Buffer { + const noteBufferWithoutLength = this.note.toBuffer().subarray(4); + return serializeToBuffer(this.storageSlot, this.noteTypeId, noteBufferWithoutLength); + } + + /** + * Deserialized the log body from a buffer WITHOUT the length of the note buffer + * + * @param buf - The buffer to deserialize + * @returns The deserialized log body + */ + public static fromBuffer(buf: Buffer): EncryptedLogBody { + const reader = BufferReader.asReader(buf); + const storageSlot = Fr.fromBuffer(reader); + const noteTypeId = Fr.fromBuffer(reader); + + // 2 Fields (storage slot and note type id) are not included in the note buffer + const fieldsInNote = reader.getLength() / 32 - 2; + const note = new Note(reader.readArray(fieldsInNote, Fr)); + + return new EncryptedLogBody(storageSlot, noteTypeId, note); + } + + /** + * Encrypts a log body + * + * @param secret - The ephemeral secret key + * @param publicKey - The incoming viewing key for the recipient of this log + * + * @returns The ciphertext of the encrypted log body + */ + public computeCiphertext(secret: GrumpkinPrivateKey, publicKey: PublicKey) { + const aesSecret = deriveAESSecret(secret, publicKey); + const key = aesSecret.subarray(0, 16); + const iv = aesSecret.subarray(16, 32); + + const aes128 = new Aes128(); + const buffer = this.toBuffer(); + + return aes128.encryptBufferCBC(buffer, iv, key); + } + + /** + * Decrypts a log body + * + * @param ciphertext - The ciphertext buffer + * @param secret - The private key matching the public key used in encryption (the viewing key secret) + * @param publicKey - The public key generated with the ephemeral secret key used in encryption + * + * @returns The decrypted log body + */ + public static fromCiphertext( + ciphertext: Buffer | bigint[], + secret: GrumpkinPrivateKey, + publicKey: PublicKey, + ): EncryptedLogBody { + const input = Buffer.isBuffer(ciphertext) ? ciphertext : Buffer.from(ciphertext.map((x: bigint) => Number(x))); + + const aesSecret = deriveAESSecret(secret, publicKey); + const key = aesSecret.subarray(0, 16); + const iv = aesSecret.subarray(16, 32); + + const aes128 = new Aes128(); + const buffer = aes128.decryptBufferCBC(input, iv, key); + return EncryptedLogBody.fromBuffer(buffer); + } +} diff --git a/yarn-project/circuit-types/src/logs/encrypted_log_header.test.ts b/yarn-project/circuit-types/src/logs/encrypted_log_header.test.ts new file mode 100644 index 000000000000..af5a63c9f810 --- /dev/null +++ b/yarn-project/circuit-types/src/logs/encrypted_log_header.test.ts @@ -0,0 +1,57 @@ +import { AztecAddress, GrumpkinScalar } from '@aztec/circuits.js'; +import { Grumpkin } from '@aztec/circuits.js/barretenberg'; +import { updateInlineTestData } from '@aztec/foundation/testing'; + +import { EncryptedLogHeader } from './encrypted_log_header.js'; + +describe('encrypt log header', () => { + let grumpkin: Grumpkin; + + beforeAll(() => { + grumpkin = new Grumpkin(); + }); + + it('encrypt and decrypt a log header', () => { + const ephSecretKey = GrumpkinScalar.random(); + const viewingSecretKey = GrumpkinScalar.random(); + + const ephPubKey = grumpkin.mul(Grumpkin.generator, ephSecretKey); + const viewingPubKey = grumpkin.mul(Grumpkin.generator, viewingSecretKey); + + const header = new EncryptedLogHeader(AztecAddress.random()); + + const encrypted = header.computeCiphertext(ephSecretKey, viewingPubKey); + + const recreated = EncryptedLogHeader.fromCiphertext(encrypted, viewingSecretKey, ephPubKey); + + expect(recreated.toBuffer()).toEqual(header.toBuffer()); + }); + + it('encrypt a log header, generate input for noir test', () => { + // The following 2 are arbitrary fixed values - fixed in order to test a match with Noir + const viewingSecretKey: GrumpkinScalar = new GrumpkinScalar( + 0x23b3127c127b1f29a7adff5cccf8fb06649e7ca01d9de27b21624098b897babdn, + ); + const ephSecretKey: GrumpkinScalar = new GrumpkinScalar( + 0x1fdd0dd8c99b21af8e00d2d130bdc263b36dadcbea84ac5ec9293a0660deca01n, + ); + + const viewingPubKey = grumpkin.mul(Grumpkin.generator, viewingSecretKey); + + const header = new EncryptedLogHeader(AztecAddress.fromBigInt(BigInt('0xdeadbeef'))); + + const encrypted = header.computeCiphertext(ephSecretKey, viewingPubKey); + + const byteArrayString = `[${encrypted + .toString('hex') + .match(/.{1,2}/g)! + .map(byte => parseInt(byte, 16))}]`; + + // Run with AZTEC_GENERATE_TEST_DATA=1 to update noir test data + updateInlineTestData( + 'noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr', + 'expected_header_ciphertext', + byteArrayString, + ); + }); +}); diff --git a/yarn-project/circuit-types/src/logs/encrypted_log_header.ts b/yarn-project/circuit-types/src/logs/encrypted_log_header.ts new file mode 100644 index 000000000000..baef6cc264be --- /dev/null +++ b/yarn-project/circuit-types/src/logs/encrypted_log_header.ts @@ -0,0 +1,70 @@ +import { AztecAddress, type GrumpkinPrivateKey, type PublicKey } from '@aztec/circuits.js'; +import { Aes128 } from '@aztec/circuits.js/barretenberg'; + +import { deriveAESSecret } from './l1_note_payload/encrypt_buffer.js'; + +/** + * An encrypted log header, containing the address of the log along with utility + * functions to compute and decrypt its ciphertext. + * + * Using AES-128-CBC for encryption. + * Can be used for both incoming and outgoing logs. + * + */ +export class EncryptedLogHeader { + constructor(public readonly address: AztecAddress) {} + + /** + * Serializes the log header to a buffer + * + * @returns The serialized log header + */ + public toBuffer(): Buffer { + return this.address.toBuffer(); + } + + public static fromBuffer(buf: Buffer): EncryptedLogHeader { + return new EncryptedLogHeader(AztecAddress.fromBuffer(buf)); + } + + /** + * Computes the ciphertext of the encrypted log header + * + * @param secret - An ephemeral secret key + * @param publicKey - The incoming or outgoing viewing key of the "recipient" of this log + * @returns The ciphertext of the encrypted log header + */ + public computeCiphertext(secret: GrumpkinPrivateKey, publicKey: PublicKey) { + const aesSecret = deriveAESSecret(secret, publicKey); + const key = aesSecret.subarray(0, 16); + const iv = aesSecret.subarray(16, 32); + + const aes128 = new Aes128(); + const buffer = this.toBuffer(); + return aes128.encryptBufferCBC(buffer, iv, key); + } + + /** + * + * @param ciphertext - The ciphertext buffer + * @param secret - The private key matching the public key used in encryption + * @param publicKey - The public key generated with the ephemeral secret key used in encryption + * e.g., eph_sk * G + * @returns + */ + public static fromCiphertext( + ciphertext: Buffer | bigint[], + secret: GrumpkinPrivateKey, + publicKey: PublicKey, + ): EncryptedLogHeader { + const input = Buffer.isBuffer(ciphertext) ? ciphertext : Buffer.from(ciphertext.map((x: bigint) => Number(x))); + + const aesSecret = deriveAESSecret(secret, publicKey); + const key = aesSecret.subarray(0, 16); + const iv = aesSecret.subarray(16, 32); + + const aes128 = new Aes128(); + const buffer = aes128.decryptBufferCBC(input, iv, key); + return EncryptedLogHeader.fromBuffer(buffer); + } +} diff --git a/yarn-project/circuit-types/src/logs/index.ts b/yarn-project/circuit-types/src/logs/index.ts index c333eb9fdae9..ca6b731ff80c 100644 --- a/yarn-project/circuit-types/src/logs/index.ts +++ b/yarn-project/circuit-types/src/logs/index.ts @@ -10,3 +10,5 @@ export * from './l1_note_payload/index.js'; export * from './tx_l2_logs.js'; export * from './unencrypted_l2_log.js'; export * from './extended_unencrypted_l2_log.js'; +export * from './encrypted_log_header.js'; +export * from './encrypted_log_body.js'; diff --git a/yarn-project/circuit-types/src/mocks.ts b/yarn-project/circuit-types/src/mocks.ts index 367cd4f09e19..9cda922037cc 100644 --- a/yarn-project/circuit-types/src/mocks.ts +++ b/yarn-project/circuit-types/src/mocks.ts @@ -136,6 +136,7 @@ export const mockSimulatedTx = (seed = 1, hasLogs = true) => { end: makeCombinedAccumulatedData(), revertReason: undefined, publicReturnValues: dec, + gasUsed: {}, }; return new SimulatedTx(tx, dec, output); }; diff --git a/yarn-project/circuit-types/src/public_data_witness.test.ts b/yarn-project/circuit-types/src/public_data_witness.test.ts new file mode 100644 index 000000000000..c2b031217d99 --- /dev/null +++ b/yarn-project/circuit-types/src/public_data_witness.test.ts @@ -0,0 +1,31 @@ +import { PUBLIC_DATA_TREE_HEIGHT, PublicDataTreeLeafPreimage } from '@aztec/circuits.js'; +import { fr } from '@aztec/circuits.js/testing'; +import { toBufferBE } from '@aztec/foundation/bigint-buffer'; +import { randomInt } from '@aztec/foundation/crypto'; + +import { PublicDataWitness } from './public_data_witness.js'; +import { SiblingPath } from './sibling_path/sibling_path.js'; + +describe('contract_artifact', () => { + it('serializes and deserializes an instance', () => { + const witness = makePublicDataWitness(randomInt(1000000)); + + const deserialized = PublicDataWitness.fromBuffer(witness.toBuffer()); + expect(deserialized).toEqual(witness); + }); +}); + +/** + * Factory function to create a PublicDataWitness based on given seed. + * @param seed - A seed used to derive all parameters. + * @returns A new instance of PublicDataWitness. + */ +function makePublicDataWitness(seed: number): PublicDataWitness { + const leafPreimage = new PublicDataTreeLeafPreimage(fr(seed + 1), fr(seed + 2), fr(seed + 3), BigInt(seed + 4)); + const siblingPath = new SiblingPath( + PUBLIC_DATA_TREE_HEIGHT, + Array.from({ length: PUBLIC_DATA_TREE_HEIGHT }, (_, i) => toBufferBE(BigInt(seed + i + 6), 32)), + ); + + return new PublicDataWitness(BigInt(seed + 5), leafPreimage, siblingPath); +} diff --git a/yarn-project/circuit-types/src/public_data_witness.ts b/yarn-project/circuit-types/src/public_data_witness.ts new file mode 100644 index 000000000000..a8a80a76e0c5 --- /dev/null +++ b/yarn-project/circuit-types/src/public_data_witness.ts @@ -0,0 +1,79 @@ +import { Fr, PUBLIC_DATA_TREE_HEIGHT, PublicDataTreeLeafPreimage } from '@aztec/circuits.js'; +import { toBigIntBE } from '@aztec/foundation/bigint-buffer'; +import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; + +import { SiblingPath } from './sibling_path/sibling_path.js'; + +/** + * Public data witness. + * @remarks This allows to prove either: + * - That a slot in the public data tree is empty (0 value) if it falls within the range of the leaf. + * - The current value of a slot in the public data tree if it matches exactly the slot of the leaf. + */ +export class PublicDataWitness { + constructor( + /** + * The index of the leaf in the public data tree. + */ + public readonly index: bigint, + /** + * Preimage of a low leaf. All the slots in the range of the leaf are empty, and the current value of the + * leaf slot is stored in the leaf. + */ + public readonly leafPreimage: PublicDataTreeLeafPreimage, + /** + * Sibling path to prove membership of the leaf. + */ + public readonly siblingPath: SiblingPath, + ) {} + + /** + * Returns a field array representation of a public data witness. + * @returns A field array representation of a public data witness. + */ + public toFields(): Fr[] { + return [ + new Fr(this.index), + new Fr(this.leafPreimage.slot), + new Fr(this.leafPreimage.value), + new Fr(this.leafPreimage.nextIndex), + new Fr(this.leafPreimage.nextSlot), + ...this.siblingPath.toFields(), + ]; + } + + toBuffer(): Buffer { + return serializeToBuffer([this.index, this.leafPreimage, this.siblingPath]); + } + + /** + * Returns a string representation of the TxEffect object. + */ + toString(): string { + return this.toBuffer().toString('hex'); + } + + /** + * Deserializes an PublicDataWitness object from a buffer. + * @param buf - Buffer or BufferReader to deserialize. + * @returns An instance of PublicDataWitness. + */ + static fromBuffer(buffer: Buffer | BufferReader): PublicDataWitness { + const reader = BufferReader.asReader(buffer); + + return new PublicDataWitness( + toBigIntBE(reader.readBytes(32)), + reader.readObject(PublicDataTreeLeafPreimage), + SiblingPath.fromBuffer(reader.readBytes(4 + 32 * PUBLIC_DATA_TREE_HEIGHT)), + ); + } + + /** + * Deserializes an PublicDataWitness object from a string. + * @param str - String to deserialize. + * @returns An instance of PublicDataWitness. + */ + static fromString(str: string) { + return PublicDataWitness.fromBuffer(Buffer.from(str, 'hex')); + } +} diff --git a/yarn-project/circuit-types/src/tx/processed_tx.ts b/yarn-project/circuit-types/src/tx/processed_tx.ts index 59032c18a768..24a5265b3056 100644 --- a/yarn-project/circuit-types/src/tx/processed_tx.ts +++ b/yarn-project/circuit-types/src/tx/processed_tx.ts @@ -9,6 +9,7 @@ import { } from '@aztec/circuit-types'; import { Fr, + type Gas, type Header, KernelCircuitPublicInputs, type Proof, @@ -68,6 +69,11 @@ export type ProcessedTx = Pick>; }; export type RevertedTx = ProcessedTx & { @@ -122,6 +128,7 @@ export function makeProcessedTx( proof: Proof, publicKernelRequests: PublicKernelRequest[], revertReason?: SimulationError, + gasUsed: ProcessedTx['gasUsed'] = {}, ): ProcessedTx { return { hash: tx.getTxHash(), @@ -132,6 +139,7 @@ export function makeProcessedTx( isEmpty: false, revertReason, publicKernelRequests, + gasUsed, }; } @@ -156,6 +164,7 @@ export function makeEmptyProcessedTx(header: Header, chainId: Fr, version: Fr): isEmpty: true, revertReason: undefined, publicKernelRequests: [], + gasUsed: {}, }; } diff --git a/yarn-project/circuit-types/src/tx/simulated_tx.test.ts b/yarn-project/circuit-types/src/tx/simulated_tx.test.ts index 8dd9ccc5c25b..be7f94291a39 100644 --- a/yarn-project/circuit-types/src/tx/simulated_tx.test.ts +++ b/yarn-project/circuit-types/src/tx/simulated_tx.test.ts @@ -1,16 +1,59 @@ +import { Gas } from '@aztec/circuits.js'; + import { mockSimulatedTx } from '../mocks.js'; +import { PublicKernelType } from './processed_tx.js'; import { SimulatedTx } from './simulated_tx.js'; describe('simulated_tx', () => { - it('convert to and from json', () => { - const simulatedTx = mockSimulatedTx(); - expect(SimulatedTx.fromJSON(simulatedTx.toJSON())).toEqual(simulatedTx); + let simulatedTx: SimulatedTx; + + beforeEach(() => { + simulatedTx = mockSimulatedTx(); + }); + + describe('json', () => { + it('convert to and from json', () => { + expect(SimulatedTx.fromJSON(simulatedTx.toJSON())).toEqual(simulatedTx); + }); + + it('convert undefined effects to and from json', () => { + simulatedTx.privateReturnValues = undefined; + simulatedTx.publicOutput = undefined; + expect(SimulatedTx.fromJSON(simulatedTx.toJSON())).toEqual(simulatedTx); + }); }); - it('convert undefined effects to and from json', () => { - const simulatedTx = mockSimulatedTx(); - simulatedTx.privateReturnValues = undefined; - simulatedTx.publicOutput = undefined; - expect(SimulatedTx.fromJSON(simulatedTx.toJSON())).toEqual(simulatedTx); + describe('getGasLimits', () => { + beforeEach(() => { + simulatedTx.tx.data.publicInputs.end.gasUsed = Gas.from({ daGas: 100, l2Gas: 200 }); + simulatedTx.publicOutput!.gasUsed = { + [PublicKernelType.SETUP]: Gas.from({ daGas: 10, l2Gas: 20 }), + [PublicKernelType.APP_LOGIC]: Gas.from({ daGas: 20, l2Gas: 40 }), + [PublicKernelType.TEARDOWN]: Gas.from({ daGas: 10, l2Gas: 20 }), + }; + }); + + it('returns gas limits from private gas usage only', () => { + simulatedTx.publicOutput = undefined; + // Should be 110 and 220 but oh floating point + expect(simulatedTx.getGasLimits()).toEqual({ + totalGas: Gas.from({ daGas: 111, l2Gas: 221 }), + teardownGas: Gas.empty(), + }); + }); + + it('returns gas limits for private and public', () => { + expect(simulatedTx.getGasLimits()).toEqual({ + totalGas: Gas.from({ daGas: 154, l2Gas: 308 }), + teardownGas: Gas.from({ daGas: 11, l2Gas: 22 }), + }); + }); + + it('pads gas limits', () => { + expect(simulatedTx.getGasLimits(1)).toEqual({ + totalGas: Gas.from({ daGas: 280, l2Gas: 560 }), + teardownGas: Gas.from({ daGas: 20, l2Gas: 40 }), + }); + }); }); }); diff --git a/yarn-project/circuit-types/src/tx/simulated_tx.ts b/yarn-project/circuit-types/src/tx/simulated_tx.ts index bf012e0f9012..61883a5d1f5d 100644 --- a/yarn-project/circuit-types/src/tx/simulated_tx.ts +++ b/yarn-project/circuit-types/src/tx/simulated_tx.ts @@ -1,7 +1,8 @@ -import { CombinedAccumulatedData, CombinedConstantData, Fr } from '@aztec/circuits.js'; +import { CombinedAccumulatedData, CombinedConstantData, Fr, Gas } from '@aztec/circuits.js'; +import { mapValues } from '@aztec/foundation/collection'; import { EncryptedTxL2Logs, UnencryptedTxL2Logs } from '../logs/index.js'; -import { type ProcessedTx } from './processed_tx.js'; +import { type ProcessedTx, PublicKernelType } from './processed_tx.js'; import { Tx } from './tx.js'; /** Return values of simulating a circuit. */ @@ -11,7 +12,7 @@ export type ProcessReturnValues = Fr[] | undefined; * Outputs of processing the public component of a transaction. * REFACTOR: Rename. */ -export type ProcessOutput = Pick & +export type ProcessOutput = Pick & Pick & { publicReturnValues: ProcessReturnValues }; function processOutputToJSON(output: ProcessOutput) { @@ -22,6 +23,7 @@ function processOutputToJSON(output: ProcessOutput) { constants: output.constants.toBuffer().toString('hex'), end: output.end.toBuffer().toString('hex'), publicReturnValues: output.publicReturnValues?.map(fr => fr.toString()), + gasUsed: mapValues(output.gasUsed, gas => gas?.toJSON()), }; } @@ -33,6 +35,7 @@ function processOutputFromJSON(json: any): ProcessOutput { constants: CombinedConstantData.fromBuffer(Buffer.from(json.constants, 'hex')), end: CombinedAccumulatedData.fromBuffer(Buffer.from(json.end, 'hex')), publicReturnValues: json.publicReturnValues?.map(Fr.fromString), + gasUsed: mapValues(json.gasUsed, gas => (gas ? Gas.fromJSON(gas) : undefined)), }; } @@ -45,6 +48,29 @@ function processOutputFromJSON(json: any): ProcessOutput { export class SimulatedTx { constructor(public tx: Tx, public privateReturnValues?: ProcessReturnValues, public publicOutput?: ProcessOutput) {} + /** + * Returns suggested total and teardown gas limits for the simulated tx. + * Note that public gas usage is only accounted for if the publicOutput is present. + * @param pad - Percentage to pad the suggested gas limits by, defaults to 10%. + */ + public getGasLimits(pad = 0.1) { + const privateGasUsed = this.tx.data.publicInputs.end.gasUsed; + if (this.publicOutput) { + const publicGasUsed = Object.values(this.publicOutput.gasUsed).reduce( + (total, current) => total.add(current), + Gas.empty(), + ); + const teardownGas = this.publicOutput.gasUsed[PublicKernelType.TEARDOWN] ?? Gas.empty(); + + return { + totalGas: privateGasUsed.add(publicGasUsed).mul(1 + pad), + teardownGas: teardownGas.mul(1 + pad), + }; + } + + return { totalGas: privateGasUsed.mul(1 + pad), teardownGas: Gas.empty() }; + } + /** * Convert a SimulatedTx class object to a plain JSON object. * @returns A plain object with SimulatedTx properties. diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index 70d3975bfea9..8e143fd70ca5 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -74,7 +74,7 @@ export const REGISTERER_UNCONSTRAINED_FUNCTION_BROADCASTED_MAGIC_VALUE = 0xe7af816635466f128568edb04c9fa024f6c87fb9010fdbffa68b3d99n; export const DEPLOYER_CONTRACT_INSTANCE_DEPLOYED_MAGIC_VALUE = 0x85864497636cf755ae7bde03f267ce01a520981c21c3682aaf82a631n; -export const DEPLOYER_CONTRACT_ADDRESS = 0x0097949bb96834550868230a1b6cc242d1f662f7c52946245e4e73da1b8b2165n; +export const DEPLOYER_CONTRACT_ADDRESS = 0x2e9c386f07e22a1d24e677ab70407b2dd0adbc7cafb9c822bf249685d6a2e4ccn; export const DEFAULT_GAS_LIMIT = 1_000_000_000; export const DEFAULT_TEARDOWN_GAS_LIMIT = 100_000_000; export const DEFAULT_MAX_FEE_PER_GAS = 10; @@ -98,15 +98,17 @@ export const FUNCTION_LEAF_PREIMAGE_LENGTH = 5; export const GLOBAL_VARIABLES_LENGTH = 6 + GAS_FEES_LENGTH; export const APPEND_ONLY_TREE_SNAPSHOT_LENGTH = 2; export const L1_TO_L2_MESSAGE_LENGTH = 6; -export const L2_TO_L1_MESSAGE_LENGTH = 2; +export const L2_TO_L1_MESSAGE_LENGTH = 3; +export const SCOPED_L2_TO_L1_MESSAGE_LENGTH = L2_TO_L1_MESSAGE_LENGTH + 1; export const MAX_BLOCK_NUMBER_LENGTH = 2; export const NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = 3; -export const NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH = 4; +export const SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH + 1; export const PARTIAL_STATE_REFERENCE_LENGTH = 6; export const READ_REQUEST_LENGTH = 2; export const NOTE_HASH_LENGTH = 2; -export const NOTE_HASH_CONTEXT_LENGTH = 3; +export const SCOPED_NOTE_HASH_LENGTH = NOTE_HASH_LENGTH + 2; export const NULLIFIER_LENGTH = 3; +export const SCOPED_NULLIFIER_LENGTH = NULLIFIER_LENGTH + 1; export const SIDE_EFFECT_LENGTH = 2; export const STATE_REFERENCE_LENGTH = APPEND_ONLY_TREE_SNAPSHOT_LENGTH + PARTIAL_STATE_REFERENCE_LENGTH; export const TX_CONTEXT_LENGTH = 2 + GAS_SETTINGS_LENGTH; diff --git a/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.test.ts b/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.test.ts index 7c2f508e4fbb..fa15e1b15edd 100644 --- a/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.test.ts +++ b/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.test.ts @@ -6,12 +6,14 @@ import { type Tuple } from '@aztec/foundation/serialize'; import { MAX_NEW_NOTE_HASHES_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX } from '../constants.gen.js'; import { siloNoteHash } from '../hash/index.js'; import { - NoteHashContext, + NoteHash, type NoteHashReadRequestHints, NoteHashReadRequestHintsBuilder, PendingReadHint, - ReadRequestContext, + ReadRequest, ReadRequestStatus, + type ScopedNoteHash, + ScopedReadRequest, SettledReadHint, } from '../structs/index.js'; import { buildNoteHashReadRequestHints } from './build_note_hash_read_request_hints.js'; @@ -25,8 +27,8 @@ describe('buildNoteHashReadRequestHints', () => { getNoteHashMembershipWitness: (leafIndex: bigint) => settledLeafIndexes.includes(leafIndex) ? ({} as any) : undefined, }; - let noteHashReadRequests: Tuple; - let noteHashes: Tuple; + let noteHashReadRequests: Tuple; + let noteHashes: Tuple; let noteHashLeafIndexMap: Map = new Map(); let expectedHints: NoteHashReadRequestHints; let numReadRequests = 0; @@ -36,12 +38,9 @@ describe('buildNoteHashReadRequestHints', () => { const innerNoteHash = (index: number) => index + 9999; const makeReadRequest = (value: number, counter = 2) => - new ReadRequestContext(new Fr(value), counter, contractAddress); + new ReadRequest(new Fr(value), counter).scope(contractAddress); - function makeNoteHash(value: number, counter = 1) { - const siloedValue = siloNoteHash(contractAddress, new Fr(value)); - return new NoteHashContext(siloedValue, counter, 0); - } + const makeNoteHash = (value: number, counter = 1) => new NoteHash(new Fr(value), counter).scope(0, contractAddress); const readPendingNoteHash = ({ noteHashIndex, @@ -68,7 +67,7 @@ describe('buildNoteHashReadRequestHints', () => { } = {}) => { const value = settledNoteHashes[hintIndex]; noteHashLeafIndexMap.set(value.toBigInt(), settledLeafIndexes[hintIndex]); - noteHashReadRequests[readRequestIndex] = new ReadRequestContext(value, 1, contractAddress); + noteHashReadRequests[readRequestIndex] = new ReadRequest(value, 1).scope(contractAddress); expectedHints.readRequestStatuses[readRequestIndex] = ReadRequestStatus.settled(hintIndex); expectedHints.settledReadHints[hintIndex] = new SettledReadHint(readRequestIndex, {} as any, value); numReadRequests++; @@ -79,7 +78,7 @@ describe('buildNoteHashReadRequestHints', () => { buildNoteHashReadRequestHints(oracle, noteHashReadRequests, noteHashes, noteHashLeafIndexMap); beforeEach(() => { - noteHashReadRequests = makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, ReadRequestContext.empty); + noteHashReadRequests = makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, ScopedReadRequest.empty); noteHashes = makeTuple(MAX_NEW_NOTE_HASHES_PER_TX, i => makeNoteHash(innerNoteHash(i))); noteHashLeafIndexMap = new Map(); expectedHints = NoteHashReadRequestHintsBuilder.empty(); @@ -121,7 +120,7 @@ describe('buildNoteHashReadRequestHints', () => { it('throws if cannot find a match in pending set and in the tree', async () => { readPendingNoteHash({ noteHashIndex: 2 }); // Tweak the value of the read request. - noteHashReadRequests[0].value = new Fr(123); + noteHashReadRequests[0].readRequest.value = new Fr(123); await expect(() => buildHints()).rejects.toThrow('Read request is reading an unknown note hash.'); }); }); diff --git a/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.ts b/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.ts index 0364333d56e5..4fc62f762162 100644 --- a/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.ts +++ b/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.ts @@ -5,20 +5,18 @@ import { type MAX_NOTE_HASH_READ_REQUESTS_PER_TX, type NOTE_HASH_TREE_HEIGHT, } from '../constants.gen.js'; -import { siloNoteHash } from '../hash/index.js'; import { type MembershipWitness, - type NoteHashContext, NoteHashReadRequestHintsBuilder, - type ReadRequestContext, + type ScopedNoteHash, + type ScopedReadRequest, } from '../structs/index.js'; import { countAccumulatedItems, getNonEmptyItems } from '../utils/index.js'; -function isValidNoteHashReadRequest(readRequest: ReadRequestContext, noteHash: NoteHashContext) { - // TODO(#6122) +function isValidNoteHashReadRequest(readRequest: ScopedReadRequest, noteHash: ScopedNoteHash) { return ( - // noteHash.value.equals(readRequest.value) && - noteHash.counter < readRequest.counter && + noteHash.contractAddress.equals(readRequest.contractAddress) && + readRequest.counter > noteHash.counter && (noteHash.nullifierCounter === 0 || noteHash.nullifierCounter > readRequest.counter) ); } @@ -27,15 +25,15 @@ export async function buildNoteHashReadRequestHints( oracle: { getNoteHashMembershipWitness(leafIndex: bigint): Promise>; }, - noteHashReadRequests: Tuple, - noteHashes: Tuple, + noteHashReadRequests: Tuple, + noteHashes: Tuple, noteHashLeafIndexMap: Map, ) { const builder = new NoteHashReadRequestHintsBuilder(); const numReadRequests = countAccumulatedItems(noteHashReadRequests); - const noteHashMap: Map = new Map(); + const noteHashMap: Map = new Map(); getNonEmptyItems(noteHashes).forEach((noteHash, index) => { const value = noteHash.value.toBigInt(); const arr = noteHashMap.get(value) ?? []; @@ -45,17 +43,15 @@ export async function buildNoteHashReadRequestHints( for (let i = 0; i < numReadRequests; ++i) { const readRequest = noteHashReadRequests[i]; - // TODO(#2847): Read request value shouldn't have been siloed by apps. const value = readRequest.value; - // But reads for transient note hash are not siloed. - const siloedValue = siloNoteHash(readRequest.contractAddress, readRequest.value); const pendingNoteHash = noteHashMap - .get(siloedValue.toBigInt()) + .get(value.toBigInt()) ?.find(n => isValidNoteHashReadRequest(readRequest, n.noteHash)); if (pendingNoteHash !== undefined) { builder.addPendingReadRequest(i, pendingNoteHash.index); } else { + // TODO(#2847): Read request value for settled note hash shouldn't have been siloed by apps. const leafIndex = noteHashLeafIndexMap.get(value.toBigInt()); if (leafIndex === undefined) { throw new Error('Read request is reading an unknown note hash.'); diff --git a/yarn-project/circuits.js/src/hints/build_nullifier_non_existent_read_request_hints.test.ts b/yarn-project/circuits.js/src/hints/build_nullifier_non_existent_read_request_hints.test.ts index 7d4040e35c87..89a2fb7ed6da 100644 --- a/yarn-project/circuits.js/src/hints/build_nullifier_non_existent_read_request_hints.test.ts +++ b/yarn-project/circuits.js/src/hints/build_nullifier_non_existent_read_request_hints.test.ts @@ -5,7 +5,12 @@ import { Fr } from '@aztec/foundation/fields'; import { MAX_NEW_NULLIFIERS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX } from '../constants.gen.js'; import { siloNullifier } from '../hash/index.js'; -import { Nullifier, NullifierNonExistentReadRequestHintsBuilder, ReadRequestContext } from '../structs/index.js'; +import { + Nullifier, + NullifierNonExistentReadRequestHintsBuilder, + ReadRequest, + ScopedReadRequest, +} from '../structs/index.js'; import { buildNullifierNonExistentReadRequestHints } from './build_nullifier_non_existent_read_request_hints.js'; describe('buildNullifierNonExistentReadRequestHints', () => { @@ -13,13 +18,13 @@ describe('buildNullifierNonExistentReadRequestHints', () => { const oracle = { getLowNullifierMembershipWitness: () => ({ membershipWitness: {}, leafPreimage: {} } as any), }; - const nonExistentReadRequests = makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ReadRequestContext.empty); + const nonExistentReadRequests = makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ScopedReadRequest.empty); let nullifiers = makeTuple(MAX_NEW_NULLIFIERS_PER_TX, Nullifier.empty); const innerNullifier = (index: number) => index + 1; const makeReadRequest = (value: number, counter = 2) => - new ReadRequestContext(new Fr(value), counter, contractAddress); + new ReadRequest(new Fr(value), counter).scope(contractAddress); const makeNullifier = (value: number, counter = 1) => { const siloedValue = siloNullifier(contractAddress, new Fr(value)); diff --git a/yarn-project/circuits.js/src/hints/build_nullifier_non_existent_read_request_hints.ts b/yarn-project/circuits.js/src/hints/build_nullifier_non_existent_read_request_hints.ts index c2f657014025..5bb6fa3eb764 100644 --- a/yarn-project/circuits.js/src/hints/build_nullifier_non_existent_read_request_hints.ts +++ b/yarn-project/circuits.js/src/hints/build_nullifier_non_existent_read_request_hints.ts @@ -12,7 +12,7 @@ import { siloNullifier } from '../hash/index.js'; import { Nullifier } from '../structs/index.js'; import { type MembershipWitness } from '../structs/membership_witness.js'; import { NullifierNonExistentReadRequestHintsBuilder } from '../structs/non_existent_read_request_hints.js'; -import { type ReadRequestContext } from '../structs/read_request.js'; +import { type ScopedReadRequest } from '../structs/read_request.js'; import { countAccumulatedItems } from '../utils/index.js'; interface NullifierMembershipWitnessWithPreimage { @@ -53,7 +53,7 @@ export async function buildNullifierNonExistentReadRequestHints( oracle: { getLowNullifierMembershipWitness(nullifier: Fr): Promise; }, - nullifierNonExistentReadRequests: Tuple, + nullifierNonExistentReadRequests: Tuple, pendingNullifiers: Tuple, ) { const { sortedValues, sortedIndexHints } = sortNullifiersByValues(pendingNullifiers); diff --git a/yarn-project/circuits.js/src/hints/build_nullifier_read_request_hints.test.ts b/yarn-project/circuits.js/src/hints/build_nullifier_read_request_hints.test.ts index 5297a84f5ad9..bec3d9d5a8b3 100644 --- a/yarn-project/circuits.js/src/hints/build_nullifier_read_request_hints.test.ts +++ b/yarn-project/circuits.js/src/hints/build_nullifier_read_request_hints.test.ts @@ -4,15 +4,16 @@ import { Fr } from '@aztec/foundation/fields'; import { type Tuple } from '@aztec/foundation/serialize'; import { MAX_NEW_NULLIFIERS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX } from '../constants.gen.js'; -import { siloNullifier } from '../hash/index.js'; import { Nullifier, type NullifierReadRequestHints, NullifierReadRequestHintsBuilder, PendingReadHint, - ReadRequestContext, + ReadRequest, ReadRequestState, ReadRequestStatus, + type ScopedNullifier, + ScopedReadRequest, SettledReadHint, } from '../structs/index.js'; import { buildNullifierReadRequestHints } from './build_nullifier_read_request_hints.js'; @@ -23,8 +24,8 @@ describe('buildNullifierReadRequestHints', () => { const oracle = { getNullifierMembershipWitness: () => ({ membershipWitness: {}, leafPreimage: {} } as any), }; - let nullifierReadRequests: Tuple; - let nullifiers: Tuple; + let nullifierReadRequests: Tuple; + let nullifiers: Tuple; let expectedHints: NullifierReadRequestHints; let numReadRequests = 0; let numPendingReads = 0; @@ -33,12 +34,10 @@ describe('buildNullifierReadRequestHints', () => { const innerNullifier = (index: number) => index + 1; const makeReadRequest = (value: number, counter = 2) => - new ReadRequestContext(new Fr(value), counter, contractAddress); + new ReadRequest(new Fr(value), counter).scope(contractAddress); - function makeNullifier(value: number, counter = 1) { - const siloedValue = siloNullifier(contractAddress, new Fr(value)); - return new Nullifier(siloedValue, counter, Fr.ZERO); - } + const makeNullifier = (value: number, counter = 1) => + new Nullifier(new Fr(value), counter, Fr.ZERO).scope(contractAddress); const readPendingNullifier = ({ nullifierIndex, @@ -73,7 +72,7 @@ describe('buildNullifierReadRequestHints', () => { const buildHints = () => buildNullifierReadRequestHints(oracle, nullifierReadRequests, nullifiers); beforeEach(() => { - nullifierReadRequests = makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ReadRequestContext.empty); + nullifierReadRequests = makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ScopedReadRequest.empty); nullifiers = makeTuple(MAX_NEW_NULLIFIERS_PER_TX, i => makeNullifier(innerNullifier(i))); expectedHints = NullifierReadRequestHintsBuilder.empty(); numReadRequests = 0; diff --git a/yarn-project/circuits.js/src/hints/build_nullifier_read_request_hints.ts b/yarn-project/circuits.js/src/hints/build_nullifier_read_request_hints.ts index 3b17bd9d3b55..9a1cbb38ae89 100644 --- a/yarn-project/circuits.js/src/hints/build_nullifier_read_request_hints.ts +++ b/yarn-project/circuits.js/src/hints/build_nullifier_read_request_hints.ts @@ -1,20 +1,24 @@ +import { AztecAddress } from '@aztec/foundation/aztec-address'; +import { padArrayEnd } from '@aztec/foundation/collection'; import { type Fr } from '@aztec/foundation/fields'; import { type Tuple } from '@aztec/foundation/serialize'; import { type IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { type MAX_NEW_NULLIFIERS_PER_TX, - type MAX_NULLIFIER_READ_REQUESTS_PER_TX, + MAX_NULLIFIER_READ_REQUESTS_PER_TX, type NULLIFIER_TREE_HEIGHT, } from '../constants.gen.js'; -import { siloNullifier } from '../hash/index.js'; +import { siloNullifier } from '../hash/hash.js'; import { type MembershipWitness, - type Nullifier, + Nullifier, NullifierReadRequestHintsBuilder, - type ReadRequestContext, + ReadRequest, + type ScopedNullifier, + ScopedReadRequest, } from '../structs/index.js'; -import { countAccumulatedItems } from '../utils/index.js'; +import { countAccumulatedItems, getNonEmptyItems } from '../utils/index.js'; interface NullifierMembershipWitnessWithPreimage { membershipWitness: MembershipWitness; @@ -25,26 +29,36 @@ export async function buildNullifierReadRequestHints( oracle: { getNullifierMembershipWitness(nullifier: Fr): Promise; }, - nullifierReadRequests: Tuple, - nullifiers: Tuple, + nullifierReadRequests: Tuple, + nullifiers: Tuple, + siloed = false, ) { const builder = new NullifierReadRequestHintsBuilder(); const numReadRequests = countAccumulatedItems(nullifierReadRequests); - const nullifierIndexMap: Map = new Map(); - nullifiers.forEach((n, i) => nullifierIndexMap.set(n.value.toBigInt(), i)); + const nullifierMap: Map = new Map(); + getNonEmptyItems(nullifiers).forEach((nullifier, index) => { + const value = nullifier.value.toBigInt(); + const arr = nullifierMap.get(value) ?? []; + arr.push({ nullifier, index }); + nullifierMap.set(value, arr); + }); for (let i = 0; i < numReadRequests; ++i) { const readRequest = nullifierReadRequests[i]; - // TODO - Should be comparing un-siloed values and contract addresses. - const value = siloNullifier(readRequest.contractAddress, readRequest.value); + const pendingNullifier = nullifierMap + .get(readRequest.value.toBigInt()) + ?.find( + ({ nullifier }) => + nullifier.contractAddress.equals(readRequest.contractAddress) && readRequest.counter > nullifier.counter, + ); - const pendingValueIndex = nullifierIndexMap.get(value.toBigInt()); - if (pendingValueIndex !== undefined) { - builder.addPendingReadRequest(i, pendingValueIndex); + if (pendingNullifier !== undefined) { + builder.addPendingReadRequest(i, pendingNullifier.index); } else { - const membershipWitnessWithPreimage = await oracle.getNullifierMembershipWitness(value); + const siloedValue = siloed ? readRequest.value : siloNullifier(readRequest.contractAddress, readRequest.value); + const membershipWitnessWithPreimage = await oracle.getNullifierMembershipWitness(siloedValue); builder.addSettledReadRequest( i, membershipWitnessWithPreimage.membershipWitness, @@ -54,3 +68,27 @@ export async function buildNullifierReadRequestHints( } return builder.toHints(); } + +export function buildSiloedNullifierReadRequestHints( + oracle: { + getNullifierMembershipWitness(nullifier: Fr): Promise; + }, + nullifierReadRequests: Tuple, + nullifiers: Tuple, +) { + // Nullifiers outputted from public kernels are already siloed while read requests are not. + // Siloing the read request values and set the contract addresses to zero to find the matching nullifier contexts. + const siloedReadRequests = padArrayEnd( + getNonEmptyItems(nullifierReadRequests).map(r => + new ReadRequest(siloNullifier(r.contractAddress, r.value), r.counter).scope(AztecAddress.ZERO), + ), + ScopedReadRequest.empty(), + MAX_NULLIFIER_READ_REQUESTS_PER_TX, + ); + + const scopedNullifiers = nullifiers.map(n => + new Nullifier(n.value, n.counter, n.noteHash).scope(AztecAddress.ZERO), + ) as Tuple; + + return buildNullifierReadRequestHints(oracle, siloedReadRequests, scopedNullifiers, true); +} diff --git a/yarn-project/circuits.js/src/hints/build_transient_data_hints.test.ts b/yarn-project/circuits.js/src/hints/build_transient_data_hints.test.ts index a0d4e3fee60a..783ea56c8850 100644 --- a/yarn-project/circuits.js/src/hints/build_transient_data_hints.test.ts +++ b/yarn-project/circuits.js/src/hints/build_transient_data_hints.test.ts @@ -1,22 +1,24 @@ -import { Fr, NoteHashContext, Nullifier } from '@aztec/circuits.js'; +import { AztecAddress, Fr, NoteHash, Nullifier, type ScopedNoteHash, type ScopedNullifier } from '@aztec/circuits.js'; import { buildTransientDataHints } from './build_transient_data_hints.js'; describe('buildTransientDataHints', () => { - let noteHashes: NoteHashContext[]; - let nullifiers: Nullifier[]; + const contractAddress = AztecAddress.fromBigInt(987654n); + + let noteHashes: ScopedNoteHash[]; + let nullifiers: ScopedNullifier[]; beforeEach(() => { noteHashes = [ - new NoteHashContext(new Fr(11), 100, 700), - new NoteHashContext(new Fr(22), 200, 0), - new NoteHashContext(new Fr(33), 300, 500), + new NoteHash(new Fr(11), 100).scope(700, contractAddress), + new NoteHash(new Fr(22), 200).scope(0, contractAddress), + new NoteHash(new Fr(33), 300).scope(500, contractAddress), ]; nullifiers = [ - new Nullifier(new Fr(44), 400, new Fr(0)), - new Nullifier(new Fr(55), 500, new Fr(33)), - new Nullifier(new Fr(66), 600, new Fr(0)), - new Nullifier(new Fr(77), 700, new Fr(11)), + new Nullifier(new Fr(44), 400, new Fr(0)).scope(contractAddress), + new Nullifier(new Fr(55), 500, new Fr(33)).scope(contractAddress), + new Nullifier(new Fr(66), 600, new Fr(0)).scope(contractAddress), + new Nullifier(new Fr(77), 700, new Fr(11)).scope(contractAddress), ]; }); @@ -32,7 +34,14 @@ describe('buildTransientDataHints', () => { }); it('throws if note hash does not match', () => { - nullifiers[1].noteHash = new Fr(11); + nullifiers[1].nullifier.noteHash = new Fr(11); expect(() => buildTransientDataHints(noteHashes, nullifiers)).toThrow('Hinted note hash does not match.'); }); + + it('throws if contract address does not match', () => { + nullifiers[1].contractAddress = AztecAddress.fromBigInt(123456n); + expect(() => buildTransientDataHints(noteHashes, nullifiers)).toThrow( + 'Contract address of hinted note hash does not match.', + ); + }); }); diff --git a/yarn-project/circuits.js/src/hints/build_transient_data_hints.ts b/yarn-project/circuits.js/src/hints/build_transient_data_hints.ts index bee36948fd75..a9664d6e5ce6 100644 --- a/yarn-project/circuits.js/src/hints/build_transient_data_hints.ts +++ b/yarn-project/circuits.js/src/hints/build_transient_data_hints.ts @@ -1,10 +1,10 @@ -import { type NoteHashContext, type Nullifier, countAccumulatedItems } from '@aztec/circuits.js'; +import { type ScopedNoteHash, type ScopedNullifier, countAccumulatedItems } from '@aztec/circuits.js'; import { makeTuple } from '@aztec/foundation/array'; import { type Tuple } from '@aztec/foundation/serialize'; export function buildTransientDataHints( - noteHashes: Tuple, - nullifiers: Tuple, + noteHashes: Tuple, + nullifiers: Tuple, noteHashesLength: NOTE_HASHES_LEN = noteHashes.length as NOTE_HASHES_LEN, nullifiersLength: NULLIFIERS_LEN = nullifiers.length as NULLIFIERS_LEN, ): [Tuple, Tuple] { @@ -31,9 +31,12 @@ export function buildTransientDataHints`; +exports[`PrivateCallStackItem computes empty item hash 1`] = `Fr<0x2a1bab3d40feb5234df51a7a6665998920119fd60f5c1e4d9ff3f1128a5f8f81>`; -exports[`PrivateCallStackItem computes hash 1`] = `Fr<0x0efad8edafef07ee5165f01a51dec26edc7fd28f55eff90478d86f8a95a5352b>`; +exports[`PrivateCallStackItem computes hash 1`] = `Fr<0x1368f96c8d186bfc35d8dc71a0ac006d12e25cfa9fdf12bd3bd5af001049933f>`; diff --git a/yarn-project/circuits.js/src/structs/__snapshots__/private_circuit_public_inputs.test.ts.snap b/yarn-project/circuits.js/src/structs/__snapshots__/private_circuit_public_inputs.test.ts.snap index a01d735ecb78..54ef2021ca43 100644 --- a/yarn-project/circuits.js/src/structs/__snapshots__/private_circuit_public_inputs.test.ts.snap +++ b/yarn-project/circuits.js/src/structs/__snapshots__/private_circuit_public_inputs.test.ts.snap @@ -1,5 +1,5 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`PrivateCircuitPublicInputs computes empty inputs hash 1`] = `Fr<0x249d46b5a3e35f6489e793cd604e375634d4bfdac762ec06b5f8f03016bb4257>`; +exports[`PrivateCircuitPublicInputs computes empty inputs hash 1`] = `Fr<0x09cc3ed80b2171f093828087431d66777514912b4e7baddb418ab5f1ddbbfd5a>`; -exports[`PrivateCircuitPublicInputs hash matches snapshot 1`] = `Fr<0x2f33953d4e47a0ebbe6ae3f4785ada5d107383e82038e7caf27cc37fdb69a088>`; +exports[`PrivateCircuitPublicInputs hash matches snapshot 1`] = `Fr<0x03dee3f2b52e26410a7a69b1c67e7aee5012d9acd53c85f72ab83917e1f4a8f6>`; diff --git a/yarn-project/circuits.js/src/structs/__snapshots__/public_call_stack_item.test.ts.snap b/yarn-project/circuits.js/src/structs/__snapshots__/public_call_stack_item.test.ts.snap index 4a2e6a331794..0ccb386246e3 100644 --- a/yarn-project/circuits.js/src/structs/__snapshots__/public_call_stack_item.test.ts.snap +++ b/yarn-project/circuits.js/src/structs/__snapshots__/public_call_stack_item.test.ts.snap @@ -1,9 +1,9 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`PublicCallStackItem Computes a callstack item hash 1`] = `"0x1122a7d7e6174b7e5d111c8eb0233564d3a1ffd755afc7ce4b594d738e2770d7"`; +exports[`PublicCallStackItem Computes a callstack item hash 1`] = `"0x0f7624c0d5ea65fcec318c4d34cb3fcbf9c67435aebbf1548b3c90ef641424f8"`; -exports[`PublicCallStackItem Computes a callstack item request hash 1`] = `"0x1595b195f0faa3a492109039dc807b291d0edd81a5e3a380866d5098ffd505dd"`; +exports[`PublicCallStackItem Computes a callstack item request hash 1`] = `"0x1177a69fbc37f0ebdf290025414ff72504497840f174896bd427d0f30ec21c55"`; -exports[`PublicCallStackItem computes empty item hash 1`] = `Fr<0x302550c2014c51737798139c9a80af984fa23be608c9758de295181944dddf66>`; +exports[`PublicCallStackItem computes empty item hash 1`] = `Fr<0x020b98dcc882881a349edfd43044d58c8703fdcfc9d4b250b799d951608dcd6b>`; -exports[`PublicCallStackItem computes hash 1`] = `Fr<0x1682642d96f9873ed85f245b4ca2ec93d2a0e11ba8e3d614f94ba409030af2c9>`; +exports[`PublicCallStackItem computes hash 1`] = `Fr<0x18d2b726728360b534121bb15accd1059f7df38225e76768e64d3e3040122440>`; diff --git a/yarn-project/circuits.js/src/structs/__snapshots__/public_circuit_public_inputs.test.ts.snap b/yarn-project/circuits.js/src/structs/__snapshots__/public_circuit_public_inputs.test.ts.snap index 6cf756de088b..834668caf755 100644 --- a/yarn-project/circuits.js/src/structs/__snapshots__/public_circuit_public_inputs.test.ts.snap +++ b/yarn-project/circuits.js/src/structs/__snapshots__/public_circuit_public_inputs.test.ts.snap @@ -1,5 +1,5 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`PublicCircuitPublicInputs computes empty inputs hash 1`] = `Fr<0x1a2da219bb2e3ac24519fd844365c4f656fc3ba8c58f2960706d25bceb4d1769>`; +exports[`PublicCircuitPublicInputs computes empty inputs hash 1`] = `Fr<0x132559f41b7adc7388e0cd52b91fd6837c296b2f9ec1b6d2ed046f7a56db18f8>`; -exports[`PublicCircuitPublicInputs hash matches snapshot 1`] = `Fr<0x05db8cb4a08d8d1f5b0f38b2ef50f0bf70b4ed33099f649062326084197f1b79>`; +exports[`PublicCircuitPublicInputs hash matches snapshot 1`] = `Fr<0x0ac3cb8eb6605fc7aa83e9420eb988c1f6c9a5dcc2457c133216624bc6932619>`; diff --git a/yarn-project/circuits.js/src/structs/gas.ts b/yarn-project/circuits.js/src/structs/gas.ts index 94af1bdb0216..e913190032be 100644 --- a/yarn-project/circuits.js/src/structs/gas.ts +++ b/yarn-project/circuits.js/src/structs/gas.ts @@ -83,4 +83,12 @@ export class Gas { const reader = FieldReader.asReader(fields); return new Gas(reader.readU32(), reader.readU32()); } + + toJSON() { + return { daGas: this.daGas, l2Gas: this.l2Gas }; + } + + static fromJSON(json: any) { + return new Gas(json.daGas, json.l2Gas); + } } diff --git a/yarn-project/circuits.js/src/structs/kernel/private_accumulated_data.ts b/yarn-project/circuits.js/src/structs/kernel/private_accumulated_data.ts index 6d074a9e6a80..3d0029b199b3 100644 --- a/yarn-project/circuits.js/src/structs/kernel/private_accumulated_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/private_accumulated_data.ts @@ -13,8 +13,9 @@ import { MAX_UNENCRYPTED_LOGS_PER_TX, } from '../../constants.gen.js'; import { CallRequest } from '../call_request.js'; -import { NoteHashContext } from '../note_hash.js'; -import { Nullifier } from '../nullifier.js'; +import { ScopedL2ToL1Message } from '../l2_to_l1_message.js'; +import { ScopedNoteHash } from '../note_hash.js'; +import { ScopedNullifier } from '../nullifier.js'; import { SideEffect } from '../side_effects.js'; /** @@ -26,15 +27,15 @@ export class PrivateAccumulatedData { /** * The new note hashes made in this transaction. */ - public newNoteHashes: Tuple, + public newNoteHashes: Tuple, /** * The new nullifiers made in this transaction. */ - public newNullifiers: Tuple, + public newNullifiers: Tuple, /** * All the new L2 to L1 messages created in this transaction. */ - public newL2ToL1Msgs: Tuple, + public newL2ToL1Msgs: Tuple, /** * Accumulated encrypted logs hash from all the previous kernel iterations. * Note: Represented as a tuple of 2 fields in order to fit in all of the 256 bits of sha256 hash. @@ -90,9 +91,9 @@ export class PrivateAccumulatedData { static fromBuffer(buffer: Buffer | BufferReader): PrivateAccumulatedData { const reader = BufferReader.asReader(buffer); return new PrivateAccumulatedData( - reader.readArray(MAX_NEW_NOTE_HASHES_PER_TX, NoteHashContext), - reader.readArray(MAX_NEW_NULLIFIERS_PER_TX, Nullifier), - reader.readArray(MAX_NEW_L2_TO_L1_MSGS_PER_TX, Fr), + reader.readArray(MAX_NEW_NOTE_HASHES_PER_TX, ScopedNoteHash), + reader.readArray(MAX_NEW_NULLIFIERS_PER_TX, ScopedNullifier), + reader.readArray(MAX_NEW_L2_TO_L1_MSGS_PER_TX, ScopedL2ToL1Message), reader.readArray(MAX_ENCRYPTED_LOGS_PER_TX, SideEffect), reader.readArray(MAX_UNENCRYPTED_LOGS_PER_TX, SideEffect), Fr.fromBuffer(reader), @@ -113,9 +114,9 @@ export class PrivateAccumulatedData { static empty() { return new PrivateAccumulatedData( - makeTuple(MAX_NEW_NOTE_HASHES_PER_TX, NoteHashContext.empty), - makeTuple(MAX_NEW_NULLIFIERS_PER_TX, Nullifier.empty), - makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_TX, Fr.zero), + makeTuple(MAX_NEW_NOTE_HASHES_PER_TX, ScopedNoteHash.empty), + makeTuple(MAX_NEW_NULLIFIERS_PER_TX, ScopedNullifier.empty), + makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_TX, ScopedL2ToL1Message.empty), makeTuple(MAX_ENCRYPTED_LOGS_PER_TX, SideEffect.empty), makeTuple(MAX_UNENCRYPTED_LOGS_PER_TX, SideEffect.empty), Fr.zero(), diff --git a/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_private_inputs.ts b/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_private_inputs.ts index 8fa1de2c2ac2..36ecd40f5c79 100644 --- a/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_private_inputs.ts +++ b/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_private_inputs.ts @@ -10,8 +10,8 @@ import { } from '../../constants.gen.js'; import { type GrumpkinPrivateKey } from '../../types/grumpkin_private_key.js'; import { countAccumulatedItems } from '../../utils/index.js'; -import { NoteHashContext } from '../note_hash.js'; -import { Nullifier } from '../nullifier.js'; +import { ScopedNoteHash } from '../note_hash.js'; +import { ScopedNullifier } from '../nullifier.js'; import { type NoteHashReadRequestHints, type NullifierReadRequestHints, @@ -23,8 +23,8 @@ import { PrivateKernelData } from './private_kernel_data.js'; export class PrivateKernelTailOutputs { constructor( - public noteHashes: Tuple, - public nullifiers: Tuple, + public noteHashes: Tuple, + public nullifiers: Tuple, ) {} toBuffer() { @@ -34,8 +34,8 @@ export class PrivateKernelTailOutputs { static fromBuffer(buffer: Buffer | BufferReader) { const reader = BufferReader.asReader(buffer); return new PrivateKernelTailOutputs( - reader.readArray(MAX_NEW_NOTE_HASHES_PER_TX, NoteHashContext), - reader.readArray(MAX_NEW_NULLIFIERS_PER_TX, Nullifier), + reader.readArray(MAX_NEW_NOTE_HASHES_PER_TX, ScopedNoteHash), + reader.readArray(MAX_NEW_NULLIFIERS_PER_TX, ScopedNullifier), ); } } @@ -66,7 +66,7 @@ export class PrivateKernelTailHints { /* * The sorted new note hashes. */ - public sortedNewNoteHashes: Tuple, + public sortedNewNoteHashes: Tuple, /** * The sorted new note hashes indexes. Maps original to sorted. */ @@ -74,7 +74,7 @@ export class PrivateKernelTailHints { /** * The sorted new nullifiers. Maps original to sorted. */ - public sortedNewNullifiers: Tuple, + public sortedNewNullifiers: Tuple, /** * The sorted new nullifiers indexes. */ @@ -128,9 +128,9 @@ export class PrivateKernelTailHints { reader.readObject({ fromBuffer: noteHashReadRequestHintsFromBuffer }), reader.readObject({ fromBuffer: nullifierReadRequestHintsFromBuffer }), reader.readArray(MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, GrumpkinScalar), - reader.readArray(MAX_NEW_NOTE_HASHES_PER_TX, NoteHashContext), + reader.readArray(MAX_NEW_NOTE_HASHES_PER_TX, ScopedNoteHash), reader.readNumbers(MAX_NEW_NOTE_HASHES_PER_TX), - reader.readArray(MAX_NEW_NULLIFIERS_PER_TX, Nullifier), + reader.readArray(MAX_NEW_NULLIFIERS_PER_TX, ScopedNullifier), reader.readNumbers(MAX_NEW_NULLIFIERS_PER_TX), reader.readArray(MAX_ENCRYPTED_LOGS_PER_TX, SideEffect), reader.readNumbers(MAX_ENCRYPTED_LOGS_PER_TX), diff --git a/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_public_inputs.ts b/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_public_inputs.ts index 41e7f4dd3f23..f0d80109bd32 100644 --- a/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_public_inputs.ts +++ b/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_public_inputs.ts @@ -124,6 +124,10 @@ export class PrivateKernelTailCircuitPublicInputs { } } + get publicInputs(): PartialPrivateTailPublicInputsForPublic | PartialPrivateTailPublicInputsForRollup { + return (this.forPublic ?? this.forRollup)!; + } + toPublicKernelCircuitPublicInputs() { if (!this.forPublic) { throw new Error('Private tail public inputs is not for public circuit.'); diff --git a/yarn-project/circuits.js/src/structs/l2_to_l1_message.ts b/yarn-project/circuits.js/src/structs/l2_to_l1_message.ts index ef4bb082386d..8d5a1e214d41 100644 --- a/yarn-project/circuits.js/src/structs/l2_to_l1_message.ts +++ b/yarn-project/circuits.js/src/structs/l2_to_l1_message.ts @@ -1,3 +1,4 @@ +import { AztecAddress } from '@aztec/foundation/aztec-address'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize'; @@ -5,14 +6,14 @@ import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/ import { L2_TO_L1_MESSAGE_LENGTH } from '../constants.gen.js'; export class L2ToL1Message { - constructor(public recipient: EthAddress, public content: Fr) {} + constructor(public recipient: EthAddress, public content: Fr, public counter: number) {} /** * Creates an empty L2ToL1Message with default values. * @returns An instance of L2ToL1Message with empty fields. */ static empty(): L2ToL1Message { - return new L2ToL1Message(EthAddress.ZERO, Fr.zero()); + return new L2ToL1Message(EthAddress.ZERO, Fr.zero(), 0); } /** @@ -21,7 +22,9 @@ export class L2ToL1Message { * @returns True if both recipient and content are equal. */ equals(other: L2ToL1Message): boolean { - return this.recipient.equals(other.recipient) && this.content.equals(other.content); + return ( + this.recipient.equals(other.recipient) && this.content.equals(other.content) && this.counter === other.counter + ); } /** @@ -29,7 +32,7 @@ export class L2ToL1Message { * @returns The buffer. */ toBuffer(): Buffer { - return serializeToBuffer(this.recipient, this.content); + return serializeToBuffer(this.recipient, this.content, this.counter); } /** @@ -37,7 +40,7 @@ export class L2ToL1Message { * @returns An array of fields representing the serialized message. */ toFields(): Fr[] { - const fields = [this.recipient.toField(), this.content]; + const fields = [this.recipient.toField(), this.content, new Fr(this.counter)]; if (fields.length !== L2_TO_L1_MESSAGE_LENGTH) { throw new Error( `Invalid number of fields for L2ToL1Message. Expected ${L2_TO_L1_MESSAGE_LENGTH}, got ${fields.length}`, @@ -53,7 +56,7 @@ export class L2ToL1Message { */ static fromFields(fields: Fr[] | FieldReader): L2ToL1Message { const reader = FieldReader.asReader(fields); - return new L2ToL1Message(reader.readObject(EthAddress), reader.readField()); + return new L2ToL1Message(reader.readObject(EthAddress), reader.readField(), reader.readU32()); } /** @@ -63,7 +66,7 @@ export class L2ToL1Message { */ static fromBuffer(buffer: Buffer | BufferReader): L2ToL1Message { const reader = BufferReader.asReader(buffer); - return new L2ToL1Message(reader.readObject(EthAddress), reader.readObject(Fr)); + return new L2ToL1Message(reader.readObject(EthAddress), reader.readObject(Fr), reader.readNumber()); } /** @@ -71,6 +74,31 @@ export class L2ToL1Message { * @returns True if both recipient and content are zero. */ isEmpty(): boolean { - return this.recipient.isZero() && this.content.isZero(); + return this.recipient.isZero() && this.content.isZero() && !this.counter; + } +} + +export class ScopedL2ToL1Message { + constructor(public message: L2ToL1Message, public contractAddress: AztecAddress) {} + + static empty() { + return new ScopedL2ToL1Message(L2ToL1Message.empty(), AztecAddress.ZERO); + } + + equals(other: ScopedL2ToL1Message): boolean { + return this.message.equals(other.message) && this.contractAddress.equals(other.contractAddress); + } + + toBuffer(): Buffer { + return serializeToBuffer(this.message, this.contractAddress); + } + + static fromBuffer(buffer: Buffer | BufferReader) { + const reader = BufferReader.asReader(buffer); + return new ScopedL2ToL1Message(reader.readObject(L2ToL1Message), reader.readObject(AztecAddress)); + } + + isEmpty(): boolean { + return this.message.isEmpty() && this.contractAddress.isZero(); } } diff --git a/yarn-project/circuits.js/src/structs/note_hash.ts b/yarn-project/circuits.js/src/structs/note_hash.ts index dfe63e8c720b..824b788e2757 100644 --- a/yarn-project/circuits.js/src/structs/note_hash.ts +++ b/yarn-project/circuits.js/src/structs/note_hash.ts @@ -1,3 +1,4 @@ +import { AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize'; @@ -35,38 +36,54 @@ export class NoteHash { toString(): string { return `value=${this.value} counter=${this.counter}`; } + + scope(nullifierCounter: number, contractAddress: AztecAddress) { + return new ScopedNoteHash(this, nullifierCounter, contractAddress); + } } -export class NoteHashContext implements Ordered { - constructor(public value: Fr, public counter: number, public nullifierCounter: number) {} +export class ScopedNoteHash implements Ordered { + constructor(public noteHash: NoteHash, public nullifierCounter: number, public contractAddress: AztecAddress) {} + + get counter() { + return this.noteHash.counter; + } + + get value() { + return this.noteHash.value; + } toFields(): Fr[] { - return [this.value, new Fr(this.counter), new Fr(this.nullifierCounter)]; + return [...this.noteHash.toFields(), new Fr(this.nullifierCounter), this.contractAddress.toField()]; } static fromFields(fields: Fr[] | FieldReader) { const reader = FieldReader.asReader(fields); - return new NoteHashContext(reader.readField(), reader.readU32(), reader.readU32()); + return new ScopedNoteHash( + reader.readObject(NoteHash), + reader.readU32(), + AztecAddress.fromField(reader.readField()), + ); } isEmpty() { - return this.value.isZero() && !this.counter && !this.nullifierCounter; + return this.noteHash.isEmpty() && !this.nullifierCounter && this.contractAddress.isZero(); } static empty() { - return new NoteHashContext(Fr.zero(), 0, 0); + return new ScopedNoteHash(NoteHash.empty(), 0, AztecAddress.ZERO); } toBuffer(): Buffer { - return serializeToBuffer(this.value, this.counter, this.nullifierCounter); + return serializeToBuffer(this.noteHash, this.nullifierCounter, this.contractAddress); } static fromBuffer(buffer: Buffer | BufferReader) { const reader = BufferReader.asReader(buffer); - return new NoteHashContext(Fr.fromBuffer(reader), reader.readNumber(), reader.readNumber()); + return new ScopedNoteHash(NoteHash.fromBuffer(reader), reader.readNumber(), AztecAddress.fromBuffer(reader)); } toString(): string { - return `value=${this.value} counter=${this.counter} nullifierCounter=${this.nullifierCounter}`; + return `noteHash=${this.noteHash} nullifierCounter=${this.nullifierCounter} contractAddress=${this.contractAddress}`; } } diff --git a/yarn-project/circuits.js/src/structs/nullifier.ts b/yarn-project/circuits.js/src/structs/nullifier.ts index 176628d5e1d6..7f1e73477e1d 100644 --- a/yarn-project/circuits.js/src/structs/nullifier.ts +++ b/yarn-project/circuits.js/src/structs/nullifier.ts @@ -1,3 +1,4 @@ +import { AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize'; @@ -35,4 +36,54 @@ export class Nullifier implements Ordered { toString(): string { return `value=${this.value} counter=${this.counter} noteHash=${this.noteHash}`; } + + scope(contractAddress: AztecAddress) { + return new ScopedNullifier(this, contractAddress); + } +} + +export class ScopedNullifier implements Ordered { + constructor(public nullifier: Nullifier, public contractAddress: AztecAddress) {} + + get counter() { + return this.nullifier.counter; + } + + get value() { + return this.nullifier.value; + } + + get nullifiedNoteHash() { + return this.nullifier.noteHash; + } + + toFields(): Fr[] { + return [...this.nullifier.toFields(), this.contractAddress.toField()]; + } + + static fromFields(fields: Fr[] | FieldReader) { + const reader = FieldReader.asReader(fields); + return new ScopedNullifier(reader.readObject(Nullifier), AztecAddress.fromField(reader.readField())); + } + + isEmpty() { + return this.nullifier.isEmpty() && this.contractAddress.isZero(); + } + + static empty() { + return new ScopedNullifier(Nullifier.empty(), AztecAddress.ZERO); + } + + toBuffer(): Buffer { + return serializeToBuffer(this.nullifier, this.contractAddress); + } + + static fromBuffer(buffer: Buffer | BufferReader) { + const reader = BufferReader.asReader(buffer); + return new ScopedNullifier(Nullifier.fromBuffer(reader), AztecAddress.fromBuffer(reader)); + } + + toString(): string { + return `nullifier=${this.nullifier} contractAddress=${this.contractAddress}`; + } } diff --git a/yarn-project/circuits.js/src/structs/nullifier_key_validation_request.ts b/yarn-project/circuits.js/src/structs/nullifier_key_validation_request.ts index c145e0d2e821..2d1fa9813b5e 100644 --- a/yarn-project/circuits.js/src/structs/nullifier_key_validation_request.ts +++ b/yarn-project/circuits.js/src/structs/nullifier_key_validation_request.ts @@ -3,8 +3,8 @@ import { Fr, Point } from '@aztec/foundation/fields'; import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { - NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH, NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH, + SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH, } from '../constants.gen.js'; /** @@ -58,61 +58,44 @@ export class NullifierKeyValidationRequest { /** * Request for validating a nullifier key pair used in the app. */ -export class NullifierKeyValidationRequestContext { - constructor( - /** - * Public key of the nullifier key (Npk_m). - */ - public readonly masterNullifierPublicKey: Point, - /** - * App-siloed nullifier secret key (nsk_app*). - */ - public readonly appNullifierSecretKey: Fr, - /** - * The storage contract address the nullifier key is for. - */ - public readonly contractAddress: AztecAddress, - ) {} +export class ScopedNullifierKeyValidationRequest { + constructor(public readonly request: NullifierKeyValidationRequest, public readonly contractAddress: AztecAddress) {} toBuffer() { - return serializeToBuffer(this.masterNullifierPublicKey, this.appNullifierSecretKey, this.contractAddress); + return serializeToBuffer(this.request, this.contractAddress); } static fromBuffer(buffer: Buffer | BufferReader) { const reader = BufferReader.asReader(buffer); - return new NullifierKeyValidationRequestContext( - Point.fromBuffer(reader), - Fr.fromBuffer(reader), + return new ScopedNullifierKeyValidationRequest( + NullifierKeyValidationRequest.fromBuffer(reader), AztecAddress.fromBuffer(reader), ); } toFields(): Fr[] { - const fields = [this.masterNullifierPublicKey.toFields(), this.appNullifierSecretKey, this.contractAddress].flat(); - if (fields.length !== NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH) { + const fields = [...this.request.toFields(), this.contractAddress]; + if (fields.length !== SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH) { throw new Error( - `Invalid number of fields for NullifierKeyValidationRequestContext. Expected ${NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH}, got ${fields.length}`, + `Invalid number of fields for ScopedNullifierKeyValidationRequest. Expected ${SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH}, got ${fields.length}`, ); } return fields; } - static fromFields(fields: Fr[] | FieldReader): NullifierKeyValidationRequestContext { + static fromFields(fields: Fr[] | FieldReader): ScopedNullifierKeyValidationRequest { const reader = FieldReader.asReader(fields); - return new NullifierKeyValidationRequestContext( - Point.fromFields(reader), - reader.readField(), + return new ScopedNullifierKeyValidationRequest( + NullifierKeyValidationRequest.fromFields(reader), AztecAddress.fromFields(reader), ); } isEmpty() { - return ( - this.masterNullifierPublicKey.isZero() && this.appNullifierSecretKey.isZero() && this.contractAddress.isZero() - ); + return this.request.isEmpty() && this.contractAddress.isZero(); } static empty() { - return new NullifierKeyValidationRequestContext(Point.ZERO, Fr.ZERO, AztecAddress.ZERO); + return new ScopedNullifierKeyValidationRequest(NullifierKeyValidationRequest.empty(), AztecAddress.ZERO); } } diff --git a/yarn-project/circuits.js/src/structs/read_request.ts b/yarn-project/circuits.js/src/structs/read_request.ts index 1f47f967c6d8..60127af7c0d8 100644 --- a/yarn-project/circuits.js/src/structs/read_request.ts +++ b/yarn-project/circuits.js/src/structs/read_request.ts @@ -60,43 +60,42 @@ export class ReadRequest { static empty(): ReadRequest { return new ReadRequest(Fr.zero(), 0); } + + scope(contractAddress: AztecAddress) { + return new ScopedReadRequest(this, contractAddress); + } } /** * ReadRequest with context of the contract emitting the request. */ -export class ReadRequestContext { - constructor( - /** - * The value being read. - */ - public value: Fr, - /** - * The counter. - */ - public counter: number, - /** - * The address of the contract emitting the request. - */ - public contractAddress: AztecAddress, - ) {} +export class ScopedReadRequest { + constructor(public readRequest: ReadRequest, public contractAddress: AztecAddress) {} + + get value() { + return this.readRequest.value; + } + + get counter() { + return this.readRequest.counter; + } /** * Serialize this as a buffer. * @returns The buffer. */ toBuffer(): Buffer { - return serializeToBuffer(this.value, this.counter, this.contractAddress); + return serializeToBuffer(this.readRequest, this.contractAddress); } /** * Deserializes from a buffer or reader, corresponding to a write in cpp. * @param buffer - Buffer or reader to read from. - * @returns A new instance of ReadRequestContext. + * @returns A new instance of ScopedReadRequest. */ static fromBuffer(buffer: Buffer | BufferReader) { const reader = BufferReader.asReader(buffer); - return new ReadRequestContext(Fr.fromBuffer(reader), reader.readNumber(), AztecAddress.fromBuffer(reader)); + return new ScopedReadRequest(ReadRequest.fromBuffer(reader), AztecAddress.fromBuffer(reader)); } /** @@ -104,12 +103,12 @@ export class ReadRequestContext { * @returns The array of fields. */ toFields(): Fr[] { - return [this.value, new Fr(this.counter), this.contractAddress.toField()]; + return [...this.readRequest.toFields(), this.contractAddress.toField()]; } static fromFields(fields: Fr[] | FieldReader) { const reader = FieldReader.asReader(fields); - return new ReadRequestContext(reader.readField(), reader.readU32(), AztecAddress.fromField(reader.readField())); + return new ScopedReadRequest(reader.readObject(ReadRequest), AztecAddress.fromField(reader.readField())); } /** @@ -117,14 +116,14 @@ export class ReadRequestContext { * @returns True if the value, note hash and counter are all zero. */ isEmpty() { - return this.value.isZero() && !this.counter && this.contractAddress.isZero(); + return this.readRequest.isEmpty() && this.contractAddress.isZero(); } /** * Returns an empty instance of side-effect. * @returns Side-effect with value, note hash and counter being zero. */ - static empty(): ReadRequestContext { - return new ReadRequestContext(Fr.zero(), 0, AztecAddress.ZERO); + static empty(): ScopedReadRequest { + return new ScopedReadRequest(ReadRequest.empty(), AztecAddress.ZERO); } } diff --git a/yarn-project/circuits.js/src/structs/validation_requests.ts b/yarn-project/circuits.js/src/structs/validation_requests.ts index 839c08e78156..6d33a5b5865a 100644 --- a/yarn-project/circuits.js/src/structs/validation_requests.ts +++ b/yarn-project/circuits.js/src/structs/validation_requests.ts @@ -10,9 +10,9 @@ import { MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_PUBLIC_DATA_READS_PER_TX, } from '../constants.gen.js'; -import { NullifierKeyValidationRequestContext } from './nullifier_key_validation_request.js'; +import { ScopedNullifierKeyValidationRequest } from './nullifier_key_validation_request.js'; import { PublicDataRead } from './public_data_read_request.js'; -import { ReadRequestContext } from './read_request.js'; +import { ScopedReadRequest } from './read_request.js'; import { RollupValidationRequests } from './rollup_validation_requests.js'; /** @@ -28,23 +28,23 @@ export class ValidationRequests { /** * All the read requests made in this transaction. */ - public noteHashReadRequests: Tuple, + public noteHashReadRequests: Tuple, /** * All the nullifier read requests made in this transaction. */ - public nullifierReadRequests: Tuple, + public nullifierReadRequests: Tuple, /** * The nullifier read requests made in this transaction. */ public nullifierNonExistentReadRequests: Tuple< - ReadRequestContext, + ScopedReadRequest, typeof MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX >, /** * All the nullifier key validation requests made in this transaction. */ public nullifierKeyValidationRequests: Tuple< - NullifierKeyValidationRequestContext, + ScopedNullifierKeyValidationRequest, typeof MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX >, /** @@ -77,10 +77,10 @@ export class ValidationRequests { const reader = BufferReader.asReader(buffer); return new ValidationRequests( reader.readObject(RollupValidationRequests), - reader.readArray(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, ReadRequestContext), - reader.readArray(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ReadRequestContext), - reader.readArray(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, ReadRequestContext), - reader.readArray(MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, NullifierKeyValidationRequestContext), + reader.readArray(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, ScopedReadRequest), + reader.readArray(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ScopedReadRequest), + reader.readArray(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, ScopedReadRequest), + reader.readArray(MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, ScopedNullifierKeyValidationRequest), reader.readArray(MAX_PUBLIC_DATA_READS_PER_TX, PublicDataRead), ); } @@ -97,10 +97,10 @@ export class ValidationRequests { static empty() { return new ValidationRequests( RollupValidationRequests.empty(), - makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, ReadRequestContext.empty), - makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ReadRequestContext.empty), - makeTuple(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, ReadRequestContext.empty), - makeTuple(MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, NullifierKeyValidationRequestContext.empty), + makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, ScopedReadRequest.empty), + makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ScopedReadRequest.empty), + makeTuple(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, ScopedReadRequest.empty), + makeTuple(MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, ScopedNullifierKeyValidationRequest.empty), makeTuple(MAX_PUBLIC_DATA_READS_PER_TX, PublicDataRead.empty), ); } diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index 2f48780c24fb..6dc5712a2faf 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -54,7 +54,6 @@ import { MAX_NULLIFIER_READ_REQUESTS_PER_CALL, MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, - MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, MAX_PUBLIC_DATA_HINTS, @@ -75,10 +74,8 @@ import { NUM_BASE_PARITY_PER_ROOT_PARITY, NUM_MSGS_PER_BASE_PARITY, NoteHash, - NoteHashContext, Nullifier, NullifierKeyValidationRequest, - NullifierKeyValidationRequestContext, NullifierLeafPreimage, NullifierNonExistentReadRequestHintsBuilder, NullifierReadRequestHintsBuilder, @@ -90,12 +87,9 @@ import { PartialStateReference, Point, PreviousRollupData, - PrivateAccumulatedData, PrivateCallData, PrivateCallStackItem, PrivateCircuitPublicInputs, - PrivateKernelCircuitPublicInputs, - PrivateKernelData, PrivateKernelTailCircuitPublicInputs, Proof, PublicAccumulatedData, @@ -116,13 +110,14 @@ import { RECURSIVE_PROOF_LENGTH, ROLLUP_VK_TREE_HEIGHT, ReadRequest, - ReadRequestContext, RevertCode, RollupTypes, RootParityInput, RootParityInputs, RootRollupInputs, RootRollupPublicInputs, + ScopedNullifierKeyValidationRequest, + ScopedReadRequest, SideEffect, StateDiffHints, StateReference, @@ -160,10 +155,6 @@ function makeNoteHash(seed: number) { return new NoteHash(fr(seed), seed + 1); } -function makeNoteHashContext(seed: number) { - return new NoteHashContext(fr(seed), seed + 1, seed + 2); -} - function makeNullifier(seed: number) { return new Nullifier(fr(seed), seed + 1, fr(seed + 2)); } @@ -207,8 +198,8 @@ function makeReadRequest(n: number): ReadRequest { return new ReadRequest(new Fr(BigInt(n)), n + 1); } -function makeReadRequestContext(n: number): ReadRequestContext { - return new ReadRequestContext(new Fr(BigInt(n)), n + 1, AztecAddress.fromBigInt(BigInt(n + 2))); +function makeScopedReadRequest(n: number): ScopedReadRequest { + return new ScopedReadRequest(makeReadRequest(n), AztecAddress.fromBigInt(BigInt(n + 2))); } /** @@ -220,13 +211,8 @@ function makeNullifierKeyValidationRequest(seed: number): NullifierKeyValidation return new NullifierKeyValidationRequest(makePoint(seed), fr(seed + 2)); } -/** - * Creates arbitrary NullifierKeyValidationRequestContext from the given seed. - * @param seed - The seed to use for generating the NullifierKeyValidationRequestContext. - * @returns A NullifierKeyValidationRequestContext. - */ -function makeNullifierKeyValidationRequestContext(seed: number): NullifierKeyValidationRequestContext { - return new NullifierKeyValidationRequestContext(makePoint(seed), fr(seed + 2), makeAztecAddress(seed + 4)); +function makeScopedNullifierKeyValidationRequest(seed: number): ScopedNullifierKeyValidationRequest { + return new ScopedNullifierKeyValidationRequest(makeNullifierKeyValidationRequest(seed), makeAztecAddress(seed + 4)); } /** @@ -284,10 +270,10 @@ export function makeContractStorageRead(seed = 1): ContractStorageRead { export function makeValidationRequests(seed = 1) { return new ValidationRequests( makeRollupValidationRequests(seed), - makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, makeReadRequestContext, seed + 0x80), - makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, makeReadRequestContext, seed + 0x90), - makeTuple(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, makeReadRequestContext, seed + 0x95), - makeTuple(MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, makeNullifierKeyValidationRequestContext, seed + 0x100), + makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, makeScopedReadRequest, seed + 0x80), + makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, makeScopedReadRequest, seed + 0x90), + makeTuple(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, makeScopedReadRequest, seed + 0x95), + makeTuple(MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, makeScopedNullifierKeyValidationRequest, seed + 0x100), makeTuple(MAX_PUBLIC_DATA_READS_PER_TX, makePublicDataRead, seed + 0xe00), ); } @@ -357,27 +343,6 @@ export function makePublicAccumulatedData(seed = 1, full = false): PublicAccumul ); } -/** - * Creates arbitrary accumulated data. - * @param seed - The seed to use for generating the accumulated data. - * @returns An accumulated data. - */ -export function makePrivateAccumulatedData(seed = 1, full = false) { - const tupleGenerator = full ? makeTuple : makeHalfFullTuple; - - return new PrivateAccumulatedData( - tupleGenerator(MAX_NEW_NOTE_HASHES_PER_TX, makeNoteHashContext, seed + 0x120, NoteHashContext.empty), - tupleGenerator(MAX_NEW_NULLIFIERS_PER_TX, makeNullifier, seed + 0x200, Nullifier.empty), - tupleGenerator(MAX_NEW_L2_TO_L1_MSGS_PER_TX, fr, seed + 0x600, Fr.zero), - tupleGenerator(MAX_ENCRYPTED_LOGS_PER_TX, makeNewSideEffect, seed + 0x700, SideEffect.empty), // encrypted logs hashes - tupleGenerator(MAX_UNENCRYPTED_LOGS_PER_TX, makeNewSideEffect, seed + 0x800, SideEffect.empty), // unencrypted logs hashes - fr(seed + 0x900), // encrypted_log_preimages_length - fr(seed + 0xa00), // unencrypted_log_preimages_length - tupleGenerator(MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX, makeCallRequest, seed + 0x400, CallRequest.empty), - tupleGenerator(MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, makeCallRequest, seed + 0x500, CallRequest.empty), - ); -} - /** * Creates arbitrary aggregation object. * @param seed - The seed to use for generating the aggregation object. @@ -474,22 +439,6 @@ export function makePublicKernelCircuitPublicInputs( ); } -/** - * Creates arbitrary private kernel inner circuit public inputs. - * @param seed - The seed to use for generating the kernel circuit public inputs. - * @returns Private kernel circuit public inputs. - */ -export function makePrivateKernelCircuitPublicInputs(seed = 1, full = true): PrivateKernelCircuitPublicInputs { - return new PrivateKernelCircuitPublicInputs( - makeAggregationObject(seed), - fr(seed + 0x100), - makeValidationRequests(seed), - makePrivateAccumulatedData(seed, full), - makeConstantData(seed + 0x100), - makeCallRequest(seed + 0x200), - ); -} - /** * Creates arbitrary private kernel tail circuit public inputs. * @param seed - The seed to use for generating the kernel circuit public inputs. @@ -642,22 +591,6 @@ export function makeRollupKernelData(seed = 1, kernelPublicInputs?: KernelCircui ); } -/** - * Makes arbitrary previous kernel data. - * @param seed - The seed to use for generating the previous kernel data. - * @param inputs - The kernel public inputs to use for generating the private kernel inner data. - * @returns A previous kernel data. - */ -export function makePrivateKernelInnerData(seed = 1, inputs?: PrivateKernelCircuitPublicInputs): PrivateKernelData { - return new PrivateKernelData( - inputs ?? makePrivateKernelCircuitPublicInputs(seed, true), - new Proof(Buffer.alloc(16, seed + 0x80)), - makeVerificationKey(), - 0x42, - makeTuple(VK_TREE_HEIGHT, fr, 0x1000), - ); -} - /** * Makes arbitrary proof. * @param seed - The seed to use for generating/mocking the proof. @@ -1107,7 +1040,7 @@ export function makeL2ToL1Message(seed = 0): L2ToL1Message { const recipient = EthAddress.fromField(new Fr(seed)); const content = new Fr(seed + 1); - return new L2ToL1Message(recipient, content); + return new L2ToL1Message(recipient, content, seed + 2); } /** diff --git a/yarn-project/end-to-end/Earthfile b/yarn-project/end-to-end/Earthfile index 7085b410cc55..ea6fdbe5ae77 100644 --- a/yarn-project/end-to-end/Earthfile +++ b/yarn-project/end-to-end/Earthfile @@ -15,16 +15,15 @@ E2E_COMPOSE_TEST: ELSE LET CMD="docker-compose" END - # In CI, we do an optimization to push these images to docker once - # We still want the default code path to work with no faff locally - # To not rebuild unnecessarily, we pass --skip_build=true in CI - IF [ $skip_build != "true" ] + # Let docker compose know about the pushed tags above + ENV AZTEC_DOCKER_TAG=$(git rev-parse HEAD) + # Optimize to not cause serial behavior if image already exists + IF ! docker image ls --format '{{.Repository}}:{{.Tag}}' | grep "aztecprotocol/aztec:$AZTEC_DOCKER_TAG" || \ + ! docker image ls --format '{{.Repository}}:{{.Tag}}' | grep "aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG" WAIT BUILD ../+export-e2e-test-images END END - # Let docker compose know about the pushed tags above - ENV AZTEC_DOCKER_TAG=$(git rev-parse HEAD) # Run our docker compose, ending whenever sandbox ends, filtering out noisy eth_getLogs RUN $CMD -p $project_name -f $compose_file up --exit-code-from=end-to-end --force-recreate @@ -42,10 +41,133 @@ UPLOAD_LOGS: ENV COMMIT_HASH=$COMMIT_HASH RUN --secret AWS_ACCESS_KEY_ID --secret AWS_SECRET_ACCESS_KEY /usr/src/scripts/logs/upload_logs_to_s3.sh /usr/var/log -# Define e2e tests -e2e-tests: +e2e-2-pxes: + FROM ../+end-to-end + RUN yarn test ./src/e2e_2_pxes.test.ts + +e2e-account-contracts: + FROM ../+end-to-end + RUN yarn test ./src/e2e_account_contracts.test.ts + +e2e-auth-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_auth_contract.test.ts + +e2e-authwit: + FROM ../+end-to-end + RUN yarn test ./src/e2e_authwit.test.ts + +e2e-avm-simulator: + FROM ../+end-to-end + RUN yarn test ./src/e2e_avm_simulator.test.ts + +e2e-blacklist-token-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_blacklist_token_contract + +e2e-block-building: + FROM ../+end-to-end + RUN yarn test ./src/e2e_block_building.test.ts + +e2e-card-game: + FROM ../+end-to-end + RUN yarn test ./src/e2e_card_game.test.ts + +e2e-cheat-codes: + FROM ../+end-to-end + RUN yarn test ./src/e2e_cheat_codes.test.ts + +e2e-counter-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_counter_contract.test.ts + +e2e-cross-chain-messaging: + FROM ../+end-to-end + RUN yarn test ./src/e2e_cross_chain_messaging.test.ts + +e2e-crowdfunding-and-claim: + FROM ../+end-to-end + RUN yarn test ./src/e2e_crowdfunding_and_claim.test.ts + +e2e-delegate-calls: + FROM ../+end-to-end + RUN yarn test ./src/e2e_delegate_calls + +e2e-deploy-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_deploy_contract + +e2e-encryption: + FROM ../+end-to-end + RUN yarn test ./src/e2e_encryption.test.ts + +e2e-escrow-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_escrow_contract.test.ts + +e2e-key-registry: FROM ../+end-to-end - RUN yarn test ./src/e2e + RUN yarn test ./src/e2e_key_registry.test.ts + +e2e-lending-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_lending_contract.test.ts + +e2e-max-block-number: + FROM ../+end-to-end + RUN yarn test ./src/e2e_max_block_number.test.ts + +e2e-multiple-accounts-1-enc-key: + FROM ../+end-to-end + RUN yarn test ./src/e2e_multiple_accounts_1_enc_key.test.ts + +e2e-nested-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_nested_contract + +e2e-non-contract-account: + FROM ../+end-to-end + RUN yarn test ./src/e2e_non_contract_account.test.ts + +e2e-note-getter: + FROM ../+end-to-end + RUN yarn test ./src/e2e_note_getter.test.ts + +e2e-ordering: + FROM ../+end-to-end + RUN yarn test ./src/e2e_ordering.test.ts + +e2e-outbox: + FROM ../+end-to-end + RUN yarn test ./src/e2e_outbox.test.ts + +e2e-pending-note-hashes-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_pending_note_hashes_contract.test.ts + +e2e-private-voting-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_private_voting_contract.test.ts + +e2e-public-cross-chain-messaging: + FROM ../+end-to-end + RUN yarn test ./src/e2e_public_cross_chain_messaging + +e2e-public-to-private-messaging: + FROM ../+end-to-end + RUN yarn test ./src/e2e_public_to_private_messaging.test.ts + +e2e-state-vars: + FROM ../+end-to-end + RUN yarn test ./src/e2e_state_vars.test.ts + +e2e-static-calls: + FROM ../+end-to-end + RUN yarn test ./src/e2e_static_calls.test.ts + +e2e-token-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_token_contract flakey-e2e-tests: FROM ../+end-to-end @@ -103,3 +225,11 @@ bench-tx-size: ARG COMMIT_HASH DO +E2E_COMPOSE_TEST --test=benchmarks/bench_tx_size_fees.test.ts --debug="aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees" --enable_gas=1 --compose_file=./scripts/docker-compose-no-sandbox.yml DO +UPLOAD_LOGS --e2e_mode=$e2e_mode --PULL_REQUEST=$PULL_REQUEST --BRANCH=$BRANCH --COMMIT_HASH=$COMMIT_HASH + +bench-proving: + ARG e2e_mode=local + ARG PULL_REQUEST + ARG BRANCH + ARG COMMIT_HASH + DO +E2E_COMPOSE_TEST --test=bench_proving --debug="aztec:benchmarks:*,aztec:prover*,aztec:bb*" --e2e_mode=$e2e_mode --enable_gas=1 --compose_file=./scripts/docker-compose-no-sandbox.yml + DO +UPLOAD_LOGS --e2e_mode=$e2e_mode --PULL_REQUEST=$PULL_REQUEST --BRANCH=$BRANCH --COMMIT_HASH=$COMMIT_HASH diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index fcc621d59adf..ea3a6893cfd6 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -15,7 +15,7 @@ "clean": "rm -rf ./dest .tsbuildinfo", "formatting": "run -T prettier --check ./src \"!src/web/main.js\" && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", - "test": "LOG_LEVEL=${LOG_LEVEL:-silent} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", + "test": "LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", "test:integration": "concurrently -k -s first -c reset,dim -n test,anvil \"yarn test:integration:run\" \"anvil\"", "test:integration:run": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --no-cache --runInBand --config jest.integration.config.json" }, diff --git a/yarn-project/end-to-end/package.local.json b/yarn-project/end-to-end/package.local.json index 6e3666e9fa6f..a998d042e73c 100644 --- a/yarn-project/end-to-end/package.local.json +++ b/yarn-project/end-to-end/package.local.json @@ -2,6 +2,6 @@ "scripts": { "build": "yarn clean && tsc -b && webpack", "formatting": "run -T prettier --check ./src \"!src/web/main.js\" && run -T eslint ./src", - "test": "LOG_LEVEL=${LOG_LEVEL:-silent} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit" + "test": "LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit" } } diff --git a/yarn-project/end-to-end/src/benchmarks/bench_proving.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_proving.test.ts new file mode 100644 index 000000000000..954c88c26536 --- /dev/null +++ b/yarn-project/end-to-end/src/benchmarks/bench_proving.test.ts @@ -0,0 +1,124 @@ +import { type AztecNodeService } from '@aztec/aztec-node'; +import { type AccountWallet, EthAddress, PublicFeePaymentMethod, TxStatus } from '@aztec/aztec.js'; +import { GasSettings } from '@aztec/circuits.js'; +import { FPCContract, GasTokenContract, TestContract, TokenContract } from '@aztec/noir-contracts.js'; +import { getCanonicalGasTokenAddress } from '@aztec/protocol-contracts/gas-token'; +import { ProverPool } from '@aztec/prover-client/prover-pool'; + +import { jest } from '@jest/globals'; + +import { getACVMConfig } from '../fixtures/get_acvm_config.js'; +import { getBBConfig } from '../fixtures/get_bb_config.js'; +import { type EndToEndContext, publicDeployAccounts, setup } from '../fixtures/utils.js'; + +jest.setTimeout(600_000); + +const txTimeoutSec = 600; + +describe('benchmarks/proving', () => { + let ctx: EndToEndContext; + let wallet: AccountWallet; + let testContract: TestContract; + let tokenContract: TokenContract; + let fpContract: FPCContract; + let acvmCleanup: () => Promise; + let bbCleanup: () => Promise; + let proverPool: ProverPool; + + // setup the environment quickly using fake proofs + beforeAll(async () => { + ctx = await setup( + 1, + { + // do setup with fake proofs + realProofs: false, + proverAgents: 4, + proverAgentPollInterval: 10, + minTxsPerBlock: 1, + }, + {}, + true, // enable gas + ); + + wallet = ctx.wallet; + + await publicDeployAccounts(wallet, ctx.wallets); + + testContract = await TestContract.deploy(wallet).send().deployed(); + tokenContract = await TokenContract.deploy(wallet, wallet.getAddress(), 'test', 't', 18).send().deployed(); + const gas = await GasTokenContract.at( + getCanonicalGasTokenAddress(ctx.deployL1ContractsValues.l1ContractAddresses.gasPortalAddress), + wallet, + ); + fpContract = await FPCContract.deploy(wallet, tokenContract.address, gas.address).send().deployed(); + + await Promise.all([ + gas.methods.mint_public(fpContract.address, 1e12).send().wait(), + tokenContract.methods.mint_public(wallet.getAddress(), 1e12).send().wait(), + ]); + }); + + // remove the fake prover and setup the real one + beforeAll(async () => { + const [acvmConfig, bbConfig] = await Promise.all([getACVMConfig(ctx.logger), getBBConfig(ctx.logger)]); + if (!acvmConfig || !bbConfig) { + throw new Error('Missing ACVM or BB config'); + } + + acvmCleanup = acvmConfig.cleanup; + bbCleanup = bbConfig.cleanup; + + proverPool = ProverPool.nativePool( + { + ...acvmConfig, + ...bbConfig, + }, + 4, + 10, + ); + + ctx.logger.info('Stopping fake provers'); + await ctx.aztecNode.setConfig({ + // stop the fake provers + proverAgents: 0, + // 4-tx blocks so that we have at least one merge level + minTxsPerBlock: 4, + }); + + ctx.logger.info('Starting real provers'); + await proverPool.start((ctx.aztecNode as AztecNodeService).getProver().getProvingJobSource()); + }); + + afterAll(async () => { + await proverPool.stop(); + await ctx.teardown(); + await acvmCleanup(); + await bbCleanup(); + }); + + it('builds a full block', async () => { + const txs = [ + // fully private tx + testContract.methods.emit_nullifier(42).send(), + // tx with setup, app, teardown + testContract.methods.emit_unencrypted(43).send({ + fee: { + gasSettings: GasSettings.default(), + paymentMethod: new PublicFeePaymentMethod(tokenContract.address, fpContract.address, wallet), + }, + }), + // tx with messages + testContract.methods.create_l2_to_l1_message_public(45, 46, EthAddress.random()).send(), + // tx with private and public exec + testContract.methods.set_tx_max_block_number(100, true).send({ + fee: { + gasSettings: GasSettings.default(), + paymentMethod: new PublicFeePaymentMethod(tokenContract.address, fpContract.address, wallet), + }, + }), + ]; + + const receipts = await Promise.all(txs.map(tx => tx.wait({ timeout: txTimeoutSec }))); + expect(receipts.every(r => r.status === TxStatus.MINED)).toBe(true); + }); +}); diff --git a/yarn-project/end-to-end/src/client_prover_integration/client_prover_integration.test.ts b/yarn-project/end-to-end/src/client_prover_integration/client_prover_integration.test.ts index ec6c32d11a6c..ed244b2da643 100644 --- a/yarn-project/end-to-end/src/client_prover_integration/client_prover_integration.test.ts +++ b/yarn-project/end-to-end/src/client_prover_integration/client_prover_integration.test.ts @@ -4,13 +4,8 @@ import { type BBNativeProofCreator } from '@aztec/pxe'; import { ClientProverTest } from './client_prover_test.js'; -const TIMEOUT = 300_000; - -async function verifyProof(_1: ClientProtocolArtifact, _2: Tx, _3: BBNativeProofCreator) { - // TODO(@PhilWindle): Will verify proof once the circuits are fixed - await Promise.resolve(); - //const result = await proofCreator.verifyProof(circuitType, tx.proof); - expect(true).toBeTruthy(); +async function verifyProof(circuitType: ClientProtocolArtifact, tx: Tx, proofCreator: BBNativeProofCreator) { + await expect(proofCreator.verifyProof(circuitType, tx.proof)).resolves.not.toThrow(); } describe('client_prover_integration', () => { @@ -32,47 +27,39 @@ describe('client_prover_integration', () => { await t.tokenSim.check(); }); - it( - 'private transfer less than balance', - async () => { - logger.info( - `Starting test using function: ${provenAsset.address}:${provenAsset.methods.balance_of_private.selector}`, - ); - const balance0 = await provenAsset.methods.balance_of_private(accounts[0].address).simulate(); - const amount = balance0 / 2n; - expect(amount).toBeGreaterThan(0n); - const interaction = provenAsset.methods.transfer(accounts[0].address, accounts[1].address, amount, 0); - const provenTx = await interaction.prove(); + it('private transfer less than balance', async () => { + logger.info( + `Starting test using function: ${provenAsset.address}:${provenAsset.methods.balance_of_private.selector}`, + ); + const balance0 = await provenAsset.methods.balance_of_private(accounts[0].address).simulate(); + const amount = balance0 / 2n; + expect(amount).toBeGreaterThan(0n); + const interaction = provenAsset.methods.transfer(accounts[0].address, accounts[1].address, amount, 0); + const provenTx = await interaction.prove(); - // This will recursively verify all app and kernel circuits involved in the private stage of this transaction! - logger.info(`Verifying kernel tail proof`); - await verifyProof('PrivateKernelTailArtifact', provenTx, proofCreator!); + // This will recursively verify all app and kernel circuits involved in the private stage of this transaction! + logger.info(`Verifying kernel tail proof`); + await verifyProof('PrivateKernelTailArtifact', provenTx, proofCreator!); - await interaction.send().wait(); - tokenSim.transferPrivate(accounts[0].address, accounts[1].address, amount); - }, - TIMEOUT, - ); + await interaction.send().wait(); + tokenSim.transferPrivate(accounts[0].address, accounts[1].address, amount); + }); - it( - 'public transfer less than balance', - async () => { - logger.info( - `Starting test using function: ${provenAsset.address}:${provenAsset.methods.balance_of_public.selector}`, - ); - const balance0 = await provenAsset.methods.balance_of_public(accounts[0].address).simulate(); - const amount = balance0 / 2n; - expect(amount).toBeGreaterThan(0n); - const interaction = provenAsset.methods.transfer(accounts[0].address, accounts[1].address, amount, 0); - const provenTx = await interaction.prove(); + it('public transfer less than balance', async () => { + logger.info( + `Starting test using function: ${provenAsset.address}:${provenAsset.methods.balance_of_public.selector}`, + ); + const balance0 = await provenAsset.methods.balance_of_public(accounts[0].address).simulate(); + const amount = balance0 / 2n; + expect(amount).toBeGreaterThan(0n); + const interaction = provenAsset.methods.transfer_public(accounts[0].address, accounts[1].address, amount, 0); + const provenTx = await interaction.prove(); - // This will recursively verify all app and kernel circuits involved in the private stage of this transaction! - logger.info(`Verifying kernel tail to public proof`); - await verifyProof('PrivateKernelTailToPublicArtifact', provenTx, proofCreator!); + // This will recursively verify all app and kernel circuits involved in the private stage of this transaction! + logger.info(`Verifying kernel tail to public proof`); + await verifyProof('PrivateKernelTailToPublicArtifact', provenTx, proofCreator!); - await interaction.send().wait(); - tokenSim.transferPublic(accounts[0].address, accounts[1].address, amount); - }, - TIMEOUT, - ); + await interaction.send().wait(); + tokenSim.transferPublic(accounts[0].address, accounts[1].address, amount); + }); }); diff --git a/yarn-project/end-to-end/src/client_prover_integration/client_prover_test.ts b/yarn-project/end-to-end/src/client_prover_integration/client_prover_test.ts index 346147d92d0e..0303dcecdab2 100644 --- a/yarn-project/end-to-end/src/client_prover_integration/client_prover_test.ts +++ b/yarn-project/end-to-end/src/client_prover_integration/client_prover_test.ts @@ -15,9 +15,8 @@ import { import { TokenContract } from '@aztec/noir-contracts.js'; import { BBNativeProofCreator, type PXEService } from '@aztec/pxe'; -import * as fs from 'fs/promises'; - import { waitRegisteredAccountSynced } from '../benchmarks/utils.js'; +import { getBBConfig } from '../fixtures/get_bb_config.js'; import { type ISnapshotManager, type SubsystemsContext, @@ -25,7 +24,7 @@ import { createSnapshotManager, publicDeployAccounts, } from '../fixtures/snapshot_manager.js'; -import { getBBConfig, setupPXEService } from '../fixtures/utils.js'; +import { setupPXEService } from '../fixtures/utils.js'; import { TokenSimulator } from '../simulators/token_simulator.js'; const { E2E_DATA_PATH: dataPath } = process.env; @@ -55,7 +54,7 @@ export class ClientProverTest { fullProverPXE!: PXEService; provenAsset!: TokenContract; provenPXETeardown?: () => Promise; - private directoryToCleanup?: string; + private bbConfigCleanup?: () => Promise; proofCreator?: BBNativeProofCreator; constructor(testName: string) { @@ -121,13 +120,13 @@ export class ClientProverTest { // Configure a full prover PXE const bbConfig = await getBBConfig(this.logger); - this.directoryToCleanup = bbConfig?.directoryToCleanup; + this.bbConfigCleanup = bbConfig?.cleanup; - if (!bbConfig?.bbWorkingDirectory || !bbConfig?.expectedBBPath) { + if (!bbConfig?.bbWorkingDirectory || !bbConfig?.bbBinaryPath) { throw new Error(`Test must be run with BB native configuration`); } - this.proofCreator = new BBNativeProofCreator(bbConfig?.expectedBBPath, bbConfig?.bbWorkingDirectory); + this.proofCreator = new BBNativeProofCreator(bbConfig.bbBinaryPath, bbConfig.bbWorkingDirectory); this.logger.debug(`Main setup completed, initializing full prover PXE...`); ({ pxe: this.fullProverPXE, teardown: this.provenPXETeardown } = await setupPXEService( @@ -135,7 +134,7 @@ export class ClientProverTest { this.aztecNode, { proverEnabled: false, - bbBinaryPath: bbConfig?.expectedBBPath, + bbBinaryPath: bbConfig?.bbBinaryPath, bbWorkingDirectory: bbConfig?.bbWorkingDirectory, }, undefined, @@ -180,9 +179,7 @@ export class ClientProverTest { // Cleanup related to the second 'full prover' PXE await this.provenPXETeardown?.(); - if (this.directoryToCleanup) { - await fs.rm(this.directoryToCleanup, { recursive: true, force: true }); - } + await this.bbConfigCleanup?.(); } async addPendingShieldNoteToPXE(accountIndex: number, amount: bigint, secretHash: Fr, txHash: TxHash) { diff --git a/yarn-project/end-to-end/src/e2e_2_pxes.test.ts b/yarn-project/end-to-end/src/e2e_2_pxes.test.ts index 4f7f20cbe87a..7e185f169e04 100644 --- a/yarn-project/end-to-end/src/e2e_2_pxes.test.ts +++ b/yarn-project/end-to-end/src/e2e_2_pxes.test.ts @@ -17,7 +17,7 @@ import { jest } from '@jest/globals'; import { expectsNumOfEncryptedLogsInTheLastBlockToBe, setup, setupPXEService } from './fixtures/utils.js'; -const TIMEOUT = 90_000; +const TIMEOUT = 120_000; describe('e2e_2_pxes', () => { jest.setTimeout(TIMEOUT); diff --git a/yarn-project/end-to-end/src/e2e_auth_contract.test.ts b/yarn-project/end-to-end/src/e2e_auth_contract.test.ts index 2ccf9ff3493d..4702797a4d2a 100644 --- a/yarn-project/end-to-end/src/e2e_auth_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_auth_contract.test.ts @@ -51,6 +51,12 @@ describe('e2e_auth_contract', () => { expect(await contract.methods.get_authorized().simulate()).toEqual(AztecAddress.ZERO); }); + it('non-admin canoot set authorized', async () => { + await expect( + contract.withWallet(other).methods.set_authorized(authorized.getAddress()).send().wait(), + ).rejects.toThrow('caller is not admin'); + }); + it('admin sets authorized', async () => { await contract.withWallet(admin).methods.set_authorized(authorized.getAddress()).send().wait(); @@ -68,7 +74,9 @@ describe('e2e_auth_contract', () => { it('after a while the scheduled change is effective and can be used with max block restriction', async () => { await mineBlocks(DELAY); // This gets us past the block of change + // docs:start:simulate_public_getter expect(await contract.methods.get_authorized().simulate()).toEqual(authorized.getAddress()); + // docs:end:simulate_public_getter const interaction = contract.withWallet(authorized).methods.do_private_authorized_thing(); diff --git a/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts b/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts index 435bc3e8be0e..3acebe956a05 100644 --- a/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts +++ b/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts @@ -1,4 +1,4 @@ -import { type AccountWallet, AztecAddress, Fr, FunctionSelector, TxStatus } from '@aztec/aztec.js'; +import { type AccountWallet, AztecAddress, BatchCall, Fr, FunctionSelector, TxStatus } from '@aztec/aztec.js'; import { GasSettings } from '@aztec/circuits.js'; import { AvmAcvmInteropTestContract, @@ -33,6 +33,14 @@ describe('e2e_avm_simulator', () => { avmContract = await AvmTestContract.deploy(wallet).send().deployed(); }); + describe('Assertions', () => { + it('Processes assertions in the PXE', async () => { + await expect(avmContract.methods.assert_nullifier_exists(123).simulate()).rejects.toThrow( + "Assertion failed: Nullifier doesn't exist!", + ); + }); + }); + describe('Gas metering', () => { it('Tracks L2 gas usage on simulation', async () => { const request = await avmContract.methods.add_args_return(20n, 30n).create(); @@ -60,6 +68,19 @@ describe('e2e_avm_simulator', () => { await avmContract.methods.add_storage_map(address, 100).send().wait(); expect(await avmContract.methods.view_storage_map(address).simulate()).toEqual(200n); }); + + it('Preserves storage across enqueued public calls', async () => { + const address = AztecAddress.fromBigInt(9090n); + // This will create 1 tx with 2 public calls in it. + await new BatchCall(wallet, [ + avmContract.methods.set_storage_map(address, 100).request(), + avmContract.methods.add_storage_map(address, 100).request(), + ]) + .send() + .wait(); + // On a separate tx, we check the result. + expect(await avmContract.methods.view_storage_map(address).simulate()).toEqual(200n); + }); }); describe('Contract instance', () => { @@ -85,6 +106,18 @@ describe('e2e_avm_simulator', () => { tx = await avmContract.methods.assert_nullifier_exists(nullifier).send().wait(); expect(tx.status).toEqual(TxStatus.MINED); }); + + it('Emit and check in separate enqueued calls but same tx', async () => { + const nullifier = new Fr(123456); + + // This will create 1 tx with 2 public calls in it. + await new BatchCall(wallet, [ + avmContract.methods.new_nullifier(nullifier).request(), + avmContract.methods.assert_nullifier_exists(nullifier).request(), + ]) + .send() + .wait(); + }); }); }); diff --git a/yarn-project/end-to-end/src/e2e_card_game.test.ts b/yarn-project/end-to-end/src/e2e_card_game.test.ts index 56f7a547f412..f0949b166635 100644 --- a/yarn-project/end-to-end/src/e2e_card_game.test.ts +++ b/yarn-project/end-to-end/src/e2e_card_game.test.ts @@ -144,7 +144,9 @@ describe('e2e_card_game', () => { it('should be able to buy packs', async () => { const seed = 27n; + // docs:start:send_tx await contract.methods.buy_pack(seed).send().wait(); + // docs:end:send_tx const collection = await contract.methods.view_collection_cards(firstPlayer, 0).simulate({ from: firstPlayer }); const expected = getPackedCards(0, seed); expect(unwrapOptions(collection)).toMatchObject(expected); diff --git a/yarn-project/end-to-end/src/e2e_encryption.test.ts b/yarn-project/end-to-end/src/e2e_encryption.test.ts index a8d4e7b34be3..2cb4b3ea5289 100644 --- a/yarn-project/end-to-end/src/e2e_encryption.test.ts +++ b/yarn-project/end-to-end/src/e2e_encryption.test.ts @@ -1,5 +1,5 @@ -import { type Wallet } from '@aztec/aztec.js'; -import { Aes128 } from '@aztec/circuits.js/barretenberg'; +import { EncryptedLogBody, EncryptedLogHeader, Fr, GrumpkinScalar, Note, type Wallet } from '@aztec/aztec.js'; +import { Aes128, Grumpkin } from '@aztec/circuits.js/barretenberg'; import { TestContract } from '@aztec/noir-contracts.js'; import { randomBytes } from 'crypto'; @@ -8,6 +8,7 @@ import { setup } from './fixtures/utils.js'; describe('e2e_encryption', () => { const aes128 = new Aes128(); + let grumpkin: Grumpkin; let wallet: Wallet; let teardown: () => Promise; @@ -17,7 +18,8 @@ describe('e2e_encryption', () => { beforeAll(async () => { ({ teardown, wallet } = await setup()); contract = await TestContract.deploy(wallet).send().deployed(); - }); + grumpkin = new Grumpkin(); + }, 120_000); afterAll(() => teardown()); @@ -52,4 +54,49 @@ describe('e2e_encryption', () => { expect(ciphertext).toEqual(expectedCiphertext); }); + + it('encrypts header', async () => { + const ephSecretKey = GrumpkinScalar.random(); + const viewingSecretKey = GrumpkinScalar.random(); + + const ephPubKey = grumpkin.mul(Grumpkin.generator, ephSecretKey); + const viewingPubKey = grumpkin.mul(Grumpkin.generator, viewingSecretKey); + const header = new EncryptedLogHeader(contract.address); + + const encrypted = await contract.methods.compute_note_header_ciphertext(ephSecretKey, viewingPubKey).simulate(); + expect(Buffer.from(encrypted.map((x: bigint) => Number(x)))).toEqual( + header.computeCiphertext(ephSecretKey, viewingPubKey), + ); + + const recreated = EncryptedLogHeader.fromCiphertext(encrypted, viewingSecretKey, ephPubKey); + + expect(recreated.address).toEqual(contract.address); + }); + + it('encrypted body', async () => { + const ephSecretKey = GrumpkinScalar.random(); + const viewingSecretKey = GrumpkinScalar.random(); + + const ephPubKey = grumpkin.mul(Grumpkin.generator, ephSecretKey); + const viewingPubKey = grumpkin.mul(Grumpkin.generator, viewingSecretKey); + + const storageSlot = new Fr(1); + const noteTypeId = TestContract.artifact.notes['TestNote'].id; + const value = Fr.random(); + const note = new Note([value]); + + const body = new EncryptedLogBody(storageSlot, noteTypeId, note); + + const encrypted = await contract.methods + .compute_note_body_ciphertext(ephSecretKey, viewingPubKey, storageSlot, value) + .simulate(); + + expect(Buffer.from(encrypted.map((x: bigint) => Number(x)))).toEqual( + body.computeCiphertext(ephSecretKey, viewingPubKey), + ); + + const recreated = EncryptedLogBody.fromCiphertext(encrypted, viewingSecretKey, ephPubKey); + + expect(recreated.toBuffer()).toEqual(body.toBuffer()); + }); }); diff --git a/yarn-project/end-to-end/src/e2e_key_registry.test.ts b/yarn-project/end-to-end/src/e2e_key_registry.test.ts index 48fb1fa90abe..c770ceaf9dd7 100644 --- a/yarn-project/end-to-end/src/e2e_key_registry.test.ts +++ b/yarn-project/end-to-end/src/e2e_key_registry.test.ts @@ -8,7 +8,7 @@ import { jest } from '@jest/globals'; import { publicDeployAccounts, setup } from './fixtures/utils.js'; -const TIMEOUT = 100_000; +const TIMEOUT = 120_000; const SHARED_MUTABLE_DELAY = 5; diff --git a/yarn-project/end-to-end/src/fixtures/get_acvm_config.ts b/yarn-project/end-to-end/src/fixtures/get_acvm_config.ts index 556726411137..a8c8349a6cec 100644 --- a/yarn-project/end-to-end/src/fixtures/get_acvm_config.ts +++ b/yarn-project/end-to-end/src/fixtures/get_acvm_config.ts @@ -13,14 +13,21 @@ const { } = process.env; // Determines if we have access to the acvm binary and a tmp folder for temp files -export async function getACVMConfig(logger: DebugLogger) { +export async function getACVMConfig(logger: DebugLogger): Promise< + | { + acvmWorkingDirectory: string; + acvmBinaryPath: string; + cleanup: () => Promise; + } + | undefined +> { try { - const expectedAcvmPath = ACVM_BINARY_PATH ? ACVM_BINARY_PATH : `../../noir/${NOIR_RELEASE_DIR}/acvm`; - await fs.access(expectedAcvmPath, fs.constants.R_OK); + const acvmBinaryPath = ACVM_BINARY_PATH ? ACVM_BINARY_PATH : `../../noir/${NOIR_RELEASE_DIR}/acvm`; + await fs.access(acvmBinaryPath, fs.constants.R_OK); const tempWorkingDirectory = `${TEMP_DIR}/${randomBytes(4).toString('hex')}`; const acvmWorkingDirectory = ACVM_WORKING_DIRECTORY ? ACVM_WORKING_DIRECTORY : `${tempWorkingDirectory}/acvm`; await fs.mkdir(acvmWorkingDirectory, { recursive: true }); - logger.verbose(`Using native ACVM binary at ${expectedAcvmPath} with working directory ${acvmWorkingDirectory}`); + logger.verbose(`Using native ACVM binary at ${acvmBinaryPath} with working directory ${acvmWorkingDirectory}`); const directoryToCleanup = ACVM_WORKING_DIRECTORY ? undefined : tempWorkingDirectory; @@ -33,7 +40,7 @@ export async function getACVMConfig(logger: DebugLogger) { return { acvmWorkingDirectory, - expectedAcvmPath, + acvmBinaryPath, cleanup, }; } catch (err) { diff --git a/yarn-project/end-to-end/src/fixtures/get_bb_config.ts b/yarn-project/end-to-end/src/fixtures/get_bb_config.ts new file mode 100644 index 000000000000..412c93164579 --- /dev/null +++ b/yarn-project/end-to-end/src/fixtures/get_bb_config.ts @@ -0,0 +1,46 @@ +import { type DebugLogger, fileURLToPath } from '@aztec/aztec.js'; + +import fs from 'node:fs/promises'; +import { tmpdir } from 'node:os'; +import path from 'path'; + +const { + BB_RELEASE_DIR = 'barretenberg/cpp/build/bin', + BB_BINARY_PATH, + TEMP_DIR = tmpdir(), + BB_WORKING_DIRECTORY = '', +} = process.env; + +export const getBBConfig = async ( + logger: DebugLogger, +): Promise<{ bbBinaryPath: string; bbWorkingDirectory: string; cleanup: () => Promise } | undefined> => { + try { + const bbBinaryPath = + BB_BINARY_PATH ?? + path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../../../../', BB_RELEASE_DIR, 'bb'); + await fs.access(bbBinaryPath, fs.constants.R_OK); + + let bbWorkingDirectory: string; + let directoryToCleanup: string | undefined; + + if (BB_WORKING_DIRECTORY) { + bbWorkingDirectory = BB_WORKING_DIRECTORY; + } else { + bbWorkingDirectory = await fs.mkdtemp(path.join(TEMP_DIR, 'bb-')); + directoryToCleanup = bbWorkingDirectory; + } + + await fs.mkdir(bbWorkingDirectory, { recursive: true }); + + const cleanup = async () => { + if (directoryToCleanup) { + await fs.rm(directoryToCleanup, { recursive: true, force: true }); + } + }; + + return { bbBinaryPath, bbWorkingDirectory, cleanup }; + } catch (err) { + logger.error(`Native BB not available, error: ${err}`); + return undefined; + } +}; diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 2f1e67405272..72651f79e278 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -253,7 +253,7 @@ async function setupFromFresh(statePath: string | undefined, logger: Logger): Pr const acvmConfig = await getACVMConfig(logger); if (acvmConfig) { aztecNodeConfig.acvmWorkingDirectory = acvmConfig.acvmWorkingDirectory; - aztecNodeConfig.acvmBinaryPath = acvmConfig.expectedAcvmPath; + aztecNodeConfig.acvmBinaryPath = acvmConfig.acvmBinaryPath; } logger.verbose('Creating and synching an aztec node...'); @@ -305,7 +305,7 @@ async function setupFromState(statePath: string, logger: Logger): Promise { return PXE_URL; }; -// Determines if we have access to the acvm binary and a tmp folder for temp files -const getACVMConfig = async (logger: DebugLogger) => { - try { - const expectedAcvmPath = ACVM_BINARY_PATH - ? ACVM_BINARY_PATH - : `${path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../../../../noir/', NOIR_RELEASE_DIR)}/acvm`; - await fs.access(expectedAcvmPath, fs.constants.R_OK); - const tempWorkingDirectory = `${TEMP_DIR}/${randomBytes(4).toString('hex')}`; - const acvmWorkingDirectory = ACVM_WORKING_DIRECTORY ? ACVM_WORKING_DIRECTORY : `${tempWorkingDirectory}/acvm`; - await fs.mkdir(acvmWorkingDirectory, { recursive: true }); - logger.info(`Using native ACVM binary at ${expectedAcvmPath} with working directory ${acvmWorkingDirectory}`); - return { - acvmWorkingDirectory, - expectedAcvmPath, - directoryToCleanup: ACVM_WORKING_DIRECTORY ? undefined : tempWorkingDirectory, - }; - } catch (err) { - logger.error(`Native ACVM not available, error: ${err}`); - return undefined; - } -}; - -// Determines if we have access to the bb binary and a tmp folder for temp files -export const getBBConfig = async (logger: DebugLogger) => { - try { - const expectedBBPath = BB_BINARY_PATH - ? BB_BINARY_PATH - : `${path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../../../../barretenberg/', BB_RELEASE_DIR)}/bb`; - await fs.access(expectedBBPath, fs.constants.R_OK); - const tempWorkingDirectory = `${TEMP_DIR}/${randomBytes(4).toString('hex')}`; - const bbWorkingDirectory = BB_WORKING_DIRECTORY ? BB_WORKING_DIRECTORY : `${tempWorkingDirectory}/bb`; - await fs.mkdir(bbWorkingDirectory, { recursive: true }); - logger.info(`Using native BB binary at ${expectedBBPath} with working directory ${bbWorkingDirectory}`); - return { - bbWorkingDirectory, - expectedBBPath, - directoryToCleanup: BB_WORKING_DIRECTORY ? undefined : tempWorkingDirectory, - }; - } catch (err) { - logger.error(`Native BB not available, error: ${err}`); - return undefined; - } -}; - export const setupL1Contracts = async ( l1RpcUrl: string, account: HDAccount | PrivateKeyAccount, @@ -312,7 +258,7 @@ async function setupWithRemoteEnvironment( const { chainId, protocolVersion } = await pxeClient.getNodeInfo(); // this contract might already have been deployed - // the following deployin functions are idempotent + // the following deploying functions are idempotent await deployCanonicalKeyRegistry( new SignerlessWallet(pxeClient, new DefaultMultiCallEntrypoint(chainId, protocolVersion)), ); @@ -326,6 +272,7 @@ async function setupWithRemoteEnvironment( return { aztecNode, sequencer: undefined, + prover: undefined, pxe: pxeClient, deployL1ContractsValues, accounts: await pxeClient!.getRegisteredAccounts(), @@ -366,6 +313,8 @@ export type EndToEndContext = { logger: DebugLogger; /** The cheat codes. */ cheatCodes: CheatCodes; + /** Proving jobs */ + prover: ProverClient | undefined; /** Function to stop the started services. */ teardown: () => Promise; }; @@ -383,6 +332,7 @@ export async function setup( enableGas = false, ): Promise { const config = { ...getConfigEnvVars(), ...opts }; + const logger = getLogger(); let anvil: Anvil | undefined; @@ -413,6 +363,7 @@ export async function setup( // Enable logging metrics to a local file named after the test suite if (isMetricsLoggingRequested()) { const filename = path.join('log', getJobName() + '.jsonl'); + logger.info(`Logging metrics to ${filename}`); setupMetricsLogger(filename); } @@ -421,7 +372,6 @@ export async function setup( await ethCheatCodes.loadChainState(opts.stateLoad); } - const logger = getLogger(); const hdAccount = mnemonicToAccount(MNEMONIC); const privKeyRaw = hdAccount.getHdKey().privateKey; const publisherPrivKey = privKeyRaw === null ? null : Buffer.from(privKeyRaw); @@ -442,11 +392,12 @@ export async function setup( const acvmConfig = await getACVMConfig(logger); if (acvmConfig) { config.acvmWorkingDirectory = acvmConfig.acvmWorkingDirectory; - config.acvmBinaryPath = acvmConfig.expectedAcvmPath; + config.acvmBinaryPath = acvmConfig.acvmBinaryPath; } config.l1BlockPublishRetryIntervalMS = 100; const aztecNode = await AztecNodeService.createAndSync(config); const sequencer = aztecNode.getSequencer(); + const prover = aztecNode.getProver(); logger.verbose('Creating a pxe...'); const { pxe, wallets } = await setupPXEService(numberOfAccounts, aztecNode!, pxeOpts, logger); @@ -473,10 +424,10 @@ export async function setup( await pxe?.stop(); } - if (acvmConfig?.directoryToCleanup) { + if (acvmConfig?.cleanup) { // remove the temp directory created for the acvm - logger.verbose(`Cleaning up ACVM temp directory ${acvmConfig.directoryToCleanup}`); - await fs.rm(acvmConfig.directoryToCleanup, { recursive: true, force: true }); + logger.verbose(`Cleaning up ACVM state`); + await acvmConfig.cleanup(); } await anvil?.stop(); @@ -492,6 +443,7 @@ export async function setup( logger, cheatCodes, sequencer, + prover, teardown, }; } diff --git a/yarn-project/foundation/src/abi/abi.ts b/yarn-project/foundation/src/abi/abi.ts index 3dbe2dc70360..ff1be9bcdd75 100644 --- a/yarn-project/foundation/src/abi/abi.ts +++ b/yarn-project/foundation/src/abi/abi.ts @@ -183,18 +183,14 @@ export interface FunctionAbi { * The artifact entry of a function. */ export interface FunctionArtifact extends FunctionAbi { - /** - * The ACIR bytecode of the function. - */ + /** The ACIR bytecode of the function. */ bytecode: Buffer; - /** - * The verification key of the function. - */ + /** The verification key of the function. */ verificationKey?: string; - /** - * Maps opcodes to source code pointers - */ + /** Maps opcodes to source code pointers */ debugSymbols: string; + /** Debug metadata for the function. */ + debug?: FunctionDebugMetadata; } /** @@ -350,14 +346,8 @@ export interface FunctionDebugMetadata { files: DebugFileMap; } -/** A function artifact with optional debug metadata */ -export interface FunctionArtifactWithDebugMetadata extends FunctionArtifact { - /** Debug metadata for the function. */ - debug?: FunctionDebugMetadata; -} - /** - * Gets a function artifact given its name or selector. + * Gets a function artifact including debug metadata given its name or selector. */ export function getFunctionArtifact( artifact: ContractArtifact, @@ -371,22 +361,6 @@ export function getFunctionArtifact( if (!functionArtifact) { throw new Error(`Unknown function ${functionNameOrSelector}`); } - return functionArtifact; -} - -/** @deprecated Use getFunctionArtifact instead */ -export function getFunctionArtifactWithSelector(artifact: ContractArtifact, selector: FunctionSelector) { - return getFunctionArtifact(artifact, selector); -} - -/** - * Gets a function artifact including debug metadata given its name or selector. - */ -export function getFunctionArtifactWithDebugMetadata( - artifact: ContractArtifact, - functionNameOrSelector: string | FunctionSelector, -): FunctionArtifactWithDebugMetadata { - const functionArtifact = getFunctionArtifact(artifact, functionNameOrSelector); const debugMetadata = getFunctionDebugMetadata(artifact, functionArtifact); return { ...functionArtifact, debug: debugMetadata }; } diff --git a/yarn-project/foundation/src/collection/index.ts b/yarn-project/foundation/src/collection/index.ts index 1759a6df56af..00f8115dd60b 100644 --- a/yarn-project/foundation/src/collection/index.ts +++ b/yarn-project/foundation/src/collection/index.ts @@ -1 +1,2 @@ export * from './array.js'; +export * from './object.js'; diff --git a/yarn-project/foundation/src/collection/object.test.ts b/yarn-project/foundation/src/collection/object.test.ts new file mode 100644 index 000000000000..362fc87bbd0b --- /dev/null +++ b/yarn-project/foundation/src/collection/object.test.ts @@ -0,0 +1,30 @@ +import { mapValues } from './object.js'; + +describe('mapValues', () => { + it('should return a new object with mapped values', () => { + const obj = { a: 1, b: 2, c: 3 }; + const fn = (value: number) => value * 2; + + const result = mapValues(obj, fn); + + expect(result).toEqual({ a: 2, b: 4, c: 6 }); + }); + + it('should handle an empty object', () => { + const obj = {}; + const fn = (value: number) => value * 2; + + const result = mapValues(obj, fn); + + expect(result).toEqual({}); + }); + + it('should handle different value types', () => { + const obj = { a: 'hello', b: true, c: [1, 2, 3] }; + const fn = (value: any) => typeof value; + + const result = mapValues(obj, fn); + + expect(result).toEqual({ a: 'string', b: 'boolean', c: 'object' }); + }); +}); diff --git a/yarn-project/foundation/src/collection/object.ts b/yarn-project/foundation/src/collection/object.ts new file mode 100644 index 000000000000..912599bc5515 --- /dev/null +++ b/yarn-project/foundation/src/collection/object.ts @@ -0,0 +1,19 @@ +/** Returns a new object with the same keys and where each value has been passed through the mapping function. */ +export function mapValues( + obj: Record, + fn: (value: T) => U, +): Record; +export function mapValues( + obj: Partial>, + fn: (value: T) => U, +): Partial>; +export function mapValues( + obj: Record, + fn: (value: T) => U, +): Record { + const result: Record = {} as Record; + for (const key in obj) { + result[key] = fn(obj[key]); + } + return result; +} diff --git a/yarn-project/noir-protocol-circuits-types/package.json b/yarn-project/noir-protocol-circuits-types/package.json index c8bcbff91717..83f918856b40 100644 --- a/yarn-project/noir-protocol-circuits-types/package.json +++ b/yarn-project/noir-protocol-circuits-types/package.json @@ -52,6 +52,7 @@ "@noir-lang/acvm_js": "portal:../../noir/packages/acvm_js", "@noir-lang/noirc_abi": "portal:../../noir/packages/noirc_abi", "@noir-lang/types": "portal:../../noir/packages/types", + "change-case": "^5.4.4", "tslib": "^2.4.0" }, "devDependencies": { diff --git a/yarn-project/noir-protocol-circuits-types/src/index.ts b/yarn-project/noir-protocol-circuits-types/src/index.ts index 6e48dddc7a37..20cff6648bea 100644 --- a/yarn-project/noir-protocol-circuits-types/src/index.ts +++ b/yarn-project/noir-protocol-circuits-types/src/index.ts @@ -2,7 +2,6 @@ import { type BaseOrMergeRollupPublicInputs, type BaseParityInputs, type BaseRollupInputs, - Fr, type KernelCircuitPublicInputs, type MergeRollupInputs, type ParityPublicInputs, @@ -25,6 +24,7 @@ import { type NoirCompiledCircuit } from '@aztec/types/noir'; import { type ForeignCallInput, + type ForeignCallOutput, type WasmBlackBoxFunctionSolver, createBlackBoxSolver, executeCircuitWithBlackBoxSolver, @@ -71,26 +71,22 @@ import { mapRootRollupInputsToNoir, mapRootRollupPublicInputsFromNoir, } from './type_conversion.js'; -import { type ReturnType as BaseParityReturnType } from './types/parity_base_types.js'; -import { type ReturnType as RootParityReturnType } from './types/parity_root_types.js'; import { - type InputType as InitInputType, - type ReturnType as InitReturnType, -} from './types/private_kernel_init_types.js'; -import { - type InputType as InnerInputType, - type ReturnType as InnerReturnType, -} from './types/private_kernel_inner_types.js'; -import { type InputType as TailToPublicInputType } from './types/private_kernel_tail_to_public_types.js'; -import { - type InputType as TailInputType, - type ReturnType as TailReturnType, -} from './types/private_kernel_tail_types.js'; -import { type ReturnType as PublicPublicPreviousReturnType } from './types/public_kernel_app_logic_types.js'; -import { type ReturnType as PublicSetupReturnType } from './types/public_kernel_setup_types.js'; -import { type ReturnType as BaseRollupReturnType } from './types/rollup_base_types.js'; -import { type ReturnType as MergeRollupReturnType } from './types/rollup_merge_types.js'; -import { type ReturnType as RootRollupReturnType } from './types/rollup_root_types.js'; + type ParityBaseReturnType as BaseParityReturnType, + type RollupBaseReturnType as BaseRollupReturnType, + type PrivateKernelInitInputType as InitInputType, + type PrivateKernelInitReturnType as InitReturnType, + type PrivateKernelInnerInputType as InnerInputType, + type PrivateKernelInnerReturnType as InnerReturnType, + type RollupMergeReturnType as MergeRollupReturnType, + type PublicKernelAppLogicReturnType as PublicPublicPreviousReturnType, + type PublicKernelSetupReturnType as PublicSetupReturnType, + type ParityRootReturnType as RootParityReturnType, + type RollupRootReturnType as RootRollupReturnType, + type PrivateKernelTailInputType as TailInputType, + type PrivateKernelTailReturnType as TailReturnType, + type PrivateKernelTailToPublicInputType as TailToPublicInputType, +} from './types/index.js'; // TODO(Tom): This should be exported from noirc_abi /** @@ -755,7 +751,7 @@ async function executePrivateKernelTailToPublicWithACVM( return decodedInputs.return_value as PublicPublicPreviousReturnType; } -export const foreignCallHandler = (name: string, args: ForeignCallInput[]) => { +export function foreignCallHandler(name: string, args: ForeignCallInput[]): Promise { const log = createDebugLogger('aztec:noir-protocol-circuits:oracle'); if (name === 'debugLog') { @@ -766,5 +762,5 @@ export const foreignCallHandler = (name: string, args: ForeignCallInput[]) => { throw Error(`unexpected oracle during execution: ${name}`); } - return Promise.resolve([`0x${Buffer.alloc(Fr.SIZE_IN_BYTES).toString('hex')}`]); -}; + return Promise.resolve([]); +} diff --git a/yarn-project/noir-protocol-circuits-types/src/scripts/abi_type_with_generics.ts b/yarn-project/noir-protocol-circuits-types/src/scripts/abi_type_with_generics.ts new file mode 100644 index 000000000000..c03dbb62a65a --- /dev/null +++ b/yarn-project/noir-protocol-circuits-types/src/scripts/abi_type_with_generics.ts @@ -0,0 +1,127 @@ +import { type AbiType } from '@aztec/foundation/abi'; + +/** + * Represents a binding to a generic. + */ +export class BindingId { + constructor(public id: number, public isNumeric: boolean) {} +} + +export type StructType = { + path: string; + fields: { name: string; type: AbiTypeWithGenerics }[]; + /** The generics of the struct, bound to the fields */ + generics: BindingId[]; +}; + +export type StringType = { + kind: 'string'; + length: number | BindingId; +}; + +export type Constant = { + kind: 'constant'; + value: number; +}; + +export type ArrayType = { + kind: 'array'; + length: number | BindingId; + type: AbiTypeWithGenerics; +}; + +export type Tuple = { + kind: 'tuple'; + fields: AbiTypeWithGenerics[]; +}; + +export type Struct = { + kind: 'struct'; + structType: StructType; + /** The arguments are the concrete instantiation of the generics in the struct type. */ + args: AbiTypeWithGenerics[]; +}; + +export type AbiTypeWithGenerics = + | { kind: 'field' } + | { kind: 'boolean' } + | { kind: 'integer'; sign: string; width: number } + | { kind: 'binding'; id: BindingId } + | { kind: 'constant'; value: number } + | StringType + | ArrayType + | Tuple + | Struct; + +/** + * Maps an ABI type to an ABI type with generics. + * This performs pure type conversion, and does not generate any bindings. + */ +export function mapAbiTypeToAbiTypeWithGenerics(abiType: AbiType): AbiTypeWithGenerics { + switch (abiType.kind) { + case 'field': + case 'boolean': + case 'string': + case 'integer': + return abiType; + case 'array': + return { + kind: 'array', + length: abiType.length, + type: mapAbiTypeToAbiTypeWithGenerics(abiType.type), + }; + case 'struct': { + const structType = { + path: abiType.path, + fields: abiType.fields.map(field => ({ + name: field.name, + type: mapAbiTypeToAbiTypeWithGenerics(field.type), + })), + generics: [], + }; + return { + kind: 'struct', + structType, + args: [], + }; + } + } +} + +/** + * Finds the structs in an ABI type. + * This won't explore nested structs. + */ +export function findStructsInType(abiType: AbiTypeWithGenerics): Struct[] { + switch (abiType.kind) { + case 'field': + case 'boolean': + case 'string': + case 'integer': + return []; + case 'array': + return findStructsInType(abiType.type); + case 'tuple': + return abiType.fields.flatMap(findStructsInType); + case 'struct': + return [abiType]; + default: { + return []; + } + } +} + +/** + * Finds all the structs in an ABI type, including nested structs. + */ +export function findAllStructsInType(abiType: AbiTypeWithGenerics): Struct[] { + let allStructs: Struct[] = []; + let lastStructs = findStructsInType(abiType); + while (lastStructs.length > 0) { + allStructs = allStructs.concat(lastStructs); + lastStructs = lastStructs.flatMap(struct => + struct.structType.fields.flatMap(field => findStructsInType(field.type)), + ); + } + return allStructs; +} diff --git a/yarn-project/noir-protocol-circuits-types/src/scripts/demonomorphizer.ts b/yarn-project/noir-protocol-circuits-types/src/scripts/demonomorphizer.ts new file mode 100644 index 000000000000..bd654ca5a187 --- /dev/null +++ b/yarn-project/noir-protocol-circuits-types/src/scripts/demonomorphizer.ts @@ -0,0 +1,274 @@ +import { + type AbiTypeWithGenerics, + type ArrayType, + BindingId, + type Constant, + type StringType, + type Struct, + type StructType, + type Tuple, + findAllStructsInType, + findStructsInType, +} from './abi_type_with_generics.js'; + +/** + * Demonomorphizes a list of ABI types adding generics to structs. + * Since monomorphization of the generics destroys information, this process is not guaranteed to return the original structure. + * However, it should succesfully unify all struct types that share the same name and field names. + */ +export class Demonomorphizer { + private variantsMap: Map; + private visitedStructs: Map; + private lastBindingId = 0; + + /** + * Demonomorphizes the passed in ABI types, mutating them. + */ + public static demonomorphize(abiTypes: AbiTypeWithGenerics[]) { + new Demonomorphizer(abiTypes); + } + + private constructor(private types: AbiTypeWithGenerics[]) { + this.variantsMap = new Map(); + this.fillVariantsMap(); + + this.visitedStructs = new Map(); + this.demonomorphizeStructs(); + } + + /** + * Finds all the variants of the structs in the types. + * A variant is every use of a struct with the same name and fields. + */ + private fillVariantsMap() { + const allStructs = this.types.flatMap(findAllStructsInType); + for (const struct of allStructs) { + const id = Demonomorphizer.buildIdForStruct(struct.structType); + const variants = this.variantsMap.get(id) ?? []; + variants.push(struct); + this.variantsMap.set(id, variants); + } + } + + private demonomorphizeStructs() { + for (const type of this.types) { + const topLevelStructs = findStructsInType(type); + for (const struct of topLevelStructs) { + this.demonomorphizeStruct(struct); + } + } + } + + /** + * Demonomorphizes a struct, by demonomorphizing its dependencies first. + * Then it'll unify the types of the variants generating a unique generic type. + * It'll also generate args that instantiate the generic type with the concrete arguments for each variant. + */ + private demonomorphizeStruct(struct: Struct) { + const id = Demonomorphizer.buildIdForStruct(struct.structType); + if (this.visitedStructs.has(id)) { + return; + } + const dependencies = struct.structType.fields.flatMap(field => findStructsInType(field.type)); + for (const dependency of dependencies) { + this.demonomorphizeStruct(dependency); + } + if (this.visitedStructs.has(id)) { + throw new Error('Circular dependency detected'); + } + + const variants = this.variantsMap.get(id)!; + const mappedStructType = struct.structType; + + for (let i = 0; i < struct.structType.fields.length; i++) { + const variantTypes = variants.map(variant => variant.structType.fields[i].type); + const mappedType = this.unifyTypes(variantTypes, mappedStructType.generics, variants); + mappedStructType.fields[i].type = mappedType; + } + + // Mutate variants setting the new struct type + variants.forEach(variant => (variant.structType = mappedStructType)); + + this.visitedStructs.set(id, mappedStructType); + } + + /** + * Tries to unify the types of a set of variants recursively. + * Unification will imply replacing some properties with bindings and pushing bindings to the generics of the struct. + */ + private unifyTypes( + types: AbiTypeWithGenerics[], + generics: BindingId[], // Mutates generics adding new bindings + variants: Struct[], // mutates variants adding different args to the variants + ): AbiTypeWithGenerics { + const kinds = new Set(types.map(type => type.kind)); + if (kinds.size > 1) { + return this.buildBindingAndPushToVariants(types, generics, variants); + } + switch (types[0].kind) { + case 'field': + case 'boolean': + case 'binding': + return types[0]; + case 'integer': { + if (allDeepEqual(types)) { + return types[0]; + } else { + return this.buildBindingAndPushToVariants(types, generics, variants); + } + } + case 'string': { + const strings = types as StringType[]; + const unifiedString = strings[0]; + if (strings.every(string => string.length === unifiedString.length)) { + return unifiedString; + } else { + const unifiedStringType: StringType = unifiedString; + unifiedStringType.length = this.buildNumericBindingAndPushToVariants( + strings.map(string => { + if (typeof string.length !== 'number') { + throw new Error('Trying to unify strings with bindings'); + } + return string.length; + }), + generics, + variants, + ); + return unifiedStringType; + } + } + case 'array': { + const arrays = types as ArrayType[]; + const unifiedArrayType: ArrayType = arrays[0]; + if ( + !arrays.every(array => { + return array.length === unifiedArrayType.length; + }) + ) { + unifiedArrayType.length = this.buildNumericBindingAndPushToVariants( + arrays.map(array => { + if (typeof array.length !== 'number') { + throw new Error('Trying to unify arrays with bindings'); + } + return array.length; + }), + generics, + variants, + ); + } + + unifiedArrayType.type = this.unifyTypes( + arrays.map(array => array.type), + generics, + variants, + ); + return unifiedArrayType; + } + case 'tuple': { + const tuples = types as Tuple[]; + const unifiedTupleType: Tuple = tuples[0]; + for (let i = 0; i < unifiedTupleType.fields.length; i++) { + unifiedTupleType.fields[i] = this.unifyTypes( + tuples.map(tuple => tuple.fields[i]), + generics, + variants, + ); + } + return unifiedTupleType; + } + case 'struct': { + const structs = types as Struct[]; + const ids = new Set(structs.map(struct => Demonomorphizer.buildIdForStruct(struct.structType))); + if (ids.size > 1) { + // If the types are different structs, we can only unify them by creating a new binding. + // For example, if we have a struct A { x: u32 } and a struct A { x: Field }, the only possible unification is A { x: T } + return this.buildBindingAndPushToVariants(types, generics, variants); + } else { + // If the types are the same struct, we must unify the arguments to the struct. + // For example, if we have A and A, we need to unify to A and push T to the generics of the struct type. + const unifiedStruct = structs[0]; + + if (!structs.every(struct => struct.args.length === unifiedStruct.args.length)) { + throw new Error('Same struct with different number of args encountered'); + } + for (let i = 0; i < unifiedStruct.args.length; i++) { + const argTypes = structs.map(struct => struct.args[i]); + unifiedStruct.args[i] = this.unifyTypes(argTypes, generics, variants); + } + return unifiedStruct; + } + } + + case 'constant': { + const constants = types as Constant[]; + if (constants.every(constant => constant.value === constants[0].value)) { + return constants[0]; + } else { + return this.buildBindingAndPushToVariants(types, generics, variants, true); + } + } + + default: { + const exhaustiveCheck: never = types[0]; + throw new Error(`Unhandled abi type: ${exhaustiveCheck}`); + } + } + } + + /** + * We consider a struct to be the same if it has the same name and field names. + * Structs with the same id will be unified into a single type by the demonomorphizer. + */ + public static buildIdForStruct(struct: StructType): string { + const name = struct.path.split('::').pop()!; + const fields = struct.fields.map(field => field.name).join(','); + return `${name}(${fields})`; + } + + private buildBindingAndPushToVariants( + concreteTypes: AbiTypeWithGenerics[], + generics: BindingId[], + variants: Struct[], + isNumeric = false, + ): AbiTypeWithGenerics { + const bindingId = new BindingId(this.lastBindingId++, isNumeric); + + for (let i = 0; i < variants.length; i++) { + const variant = variants[i]; + const concreteType = concreteTypes[i]; + variant.args.push(concreteType); + } + + generics.push(bindingId); + return { kind: 'binding', id: bindingId }; + } + + private buildNumericBindingAndPushToVariants( + concreteNumbers: number[], + generics: BindingId[], + variants: Struct[], + ): BindingId { + const bindingId = new BindingId(this.lastBindingId++, true); + + for (let i = 0; i < variants.length; i++) { + const variant = variants[i]; + variant.args.push({ kind: 'constant', value: concreteNumbers[i] }); + } + + generics.push(bindingId); + return bindingId; + } +} + +function allDeepEqual(arr: T[]): boolean { + if (arr.length === 0) { + return true; + } + const first = JSON.stringify(arr[0]); + for (let i = 0; i < arr.length; i++) { + if (JSON.stringify(arr[i]) !== first) { + return false; + } + } + return true; +} diff --git a/yarn-project/noir-protocol-circuits-types/src/scripts/generate_ts_from_abi.ts b/yarn-project/noir-protocol-circuits-types/src/scripts/generate_ts_from_abi.ts index 8b8f29d890f4..7222143d6b02 100644 --- a/yarn-project/noir-protocol-circuits-types/src/scripts/generate_ts_from_abi.ts +++ b/yarn-project/noir-protocol-circuits-types/src/scripts/generate_ts_from_abi.ts @@ -1,9 +1,18 @@ -import { type AbiType } from '@aztec/foundation/abi'; import { createConsoleLogger } from '@aztec/foundation/log'; import { type NoirCompiledCircuit, type NoirFunctionAbi } from '@aztec/types/noir'; +import { pascalCase } from 'change-case'; import fs from 'fs/promises'; +import { + type AbiTypeWithGenerics, + type BindingId, + type StructType, + findAllStructsInType, + mapAbiTypeToAbiTypeWithGenerics, +} from './abi_type_with_generics.js'; +import { Demonomorphizer } from './demonomorphizer.js'; + const log = createConsoleLogger('aztec:noir-contracts'); /** @@ -30,52 +39,6 @@ type PrimitiveTypesUsed = { tsType: string; }; -const noirPrimitiveTypesToTsTypes = new Map(); - -/** - * Typescript does not allow us to check for equality of non-primitive types - * easily, so we create a addIfUnique function that will only add an item - * to the map if it is not already there by using JSON.stringify. - * @param item - The item to add to the map. - */ -function addIfUnique(item: PrimitiveTypesUsed) { - const key = JSON.stringify(item); - if (!noirPrimitiveTypesToTsTypes.has(key)) { - noirPrimitiveTypesToTsTypes.set(key, item); - } -} - -/** - * Converts an ABI type to a TypeScript type. - * @param type - The ABI type to convert. - * @returns The typescript code to define the type. - */ -function abiTypeToTs(type: AbiType): string { - switch (type.kind) { - case 'integer': { - let tsIntType = ''; - if (type.sign === 'signed') { - tsIntType = `i${type.width}`; - } else { - tsIntType = `u${type.width}`; - } - addIfUnique({ aliasName: tsIntType, tsType: 'string' }); - return tsIntType; - } - case 'boolean': - return `boolean`; - case 'array': - return `FixedLengthArray<${abiTypeToTs(type.type)}, ${type.length}>`; - case 'struct': - return getLastComponentOfPath(type.path); - case 'field': - addIfUnique({ aliasName: 'Field', tsType: 'string' }); - return 'Field'; - default: - throw new Error(`Unknown ABI type ${type}`); - } -} - /** * Returns the last component of a path, e.g. "foo::bar::baz" -\> "baz" * Note: that if we have a path such as "Baz", we will return "Baz". @@ -102,99 +65,217 @@ function getLastComponentOfPath(str: string): string { } /** - * Generates TypeScript interfaces for the structs used in the ABI. - * @param type - The ABI type to generate the interface for. - * @param output - The set of structs that we have already generated bindings for. - * @returns The TypeScript code to define the struct. + * Replaces a numeric binding with the corresponding generics name or the actual value. */ -function generateStructInterfaces(type: AbiType, output: Set): string { - let result = ''; - - // Edge case to handle the array of structs case. - if ( - type.kind === 'array' && - ((type.type.kind === 'struct' && !output.has(getLastComponentOfPath(type.type.path))) || type.type.kind === 'array') - ) { - result += generateStructInterfaces(type.type, output); +function replaceNumericBinding(id: number | BindingId, genericsNameMap: Map): string { + if (typeof id === 'number') { + return id.toString(); + } else { + return genericsNameMap.get(id.id) ?? 'unknown'; } +} - if (type.kind !== 'struct') { - return result; +class TypingsGenerator { + /** All the types in the ABIs */ + private allTypes: AbiTypeWithGenerics[] = []; + /** The demonomorphized ABIs of the circuits */ + private demonomorphizedAbis: { + circuitName: string; + params: { name: string; type: AbiTypeWithGenerics }[]; + returnType?: AbiTypeWithGenerics; + }[] = []; + /** Maps struct id to name for structs with the same name and different field sets */ + private structIdToTsName = new Map(); + /** Collect all the primitives used in the types to add them to the codegen */ + private primitiveTypesUsed = new Map(); + + constructor(circuits: { abi: NoirFunctionAbi; circuitName: string }[]) { + // Map all the types used in the ABIs to the demonomorphized types + for (const { abi, circuitName } of circuits) { + const params = abi.parameters.map(param => { + const type = mapAbiTypeToAbiTypeWithGenerics(param.type); + this.allTypes.push(type); + return { name: param.name, type }; + }); + if (abi.return_type) { + const returnType = mapAbiTypeToAbiTypeWithGenerics(abi.return_type.abi_type); + this.allTypes.push(returnType); + this.demonomorphizedAbis.push({ circuitName, params, returnType }); + } else { + this.demonomorphizedAbis.push({ circuitName, params }); + } + } + // Demonomorphize the types + Demonomorphizer.demonomorphize(this.allTypes); } - // List of structs encountered while viewing this type that we need to generate - // bindings for. - const typesEncountered = new Set(); - - // Codegen the struct and then its fields, so that the structs fields - // are defined before the struct itself. - let codeGeneratedStruct = ''; - let codeGeneratedStructFields = ''; - - const structName = getLastComponentOfPath(type.path); - if (!output.has(structName)) { - codeGeneratedStruct += `export interface ${structName} {\n`; - for (const field of type.fields) { - codeGeneratedStruct += ` ${field.name}: ${abiTypeToTs(field.type)};\n`; - typesEncountered.add(field.type); + public codegen(): string { + this.primitiveTypesUsed = new Map(); + const structsCode = this.codegenAllStructs(); + const interfacesCode = this.codegenAllInterfaces(); + const primitivesCode = this.codegenAllPrimitives(); + + return ` + /* Autogenerated file, do not edit! */ + /* eslint-disable */ + ${primitivesCode} + ${structsCode} + ${interfacesCode}`; + } + + private codegenAllStructs(): string { + const allStructs = this.allTypes.flatMap(findAllStructsInType); + // First, deduplicate the structs used + const structTypesToExport = new Map(); + for (const struct of allStructs) { + const id = Demonomorphizer.buildIdForStruct(struct.structType); + if (structTypesToExport.has(id)) { + continue; + } + structTypesToExport.set(id, struct.structType); } - codeGeneratedStruct += `}\n\n`; - output.add(structName); - // Generate code for the encountered structs in the field above - for (const type of typesEncountered) { - codeGeneratedStructFields += generateStructInterfaces(type, output); + // Then, we have to consider the case where we have struct with the same name but different fields. + // For those, we'll naively append a number to the name. + const idsPerName = new Map(); + for (const [id, structType] of structTypesToExport.entries()) { + const name = getLastComponentOfPath(structType.path); + const ids = idsPerName.get(name) ?? []; + ids.push(id); + idsPerName.set(name, ids); + } + + this.structIdToTsName = new Map(); + for (const [name, ids] of idsPerName.entries()) { + if (ids.length !== 1) { + ids.forEach((id, index) => { + this.structIdToTsName.set(id, `${name}${index + 1}`); + }); + } } + // Now we can just generate the code for the structs + let resultCode = ''; + + for (const structType of structTypesToExport.values()) { + resultCode += this.codegenStructType(structType); + } + + return resultCode; } - return codeGeneratedStructFields + '\n' + codeGeneratedStruct; -} + private getStructName(structType: StructType): string { + return ( + this.structIdToTsName.get(Demonomorphizer.buildIdForStruct(structType)) || getLastComponentOfPath(structType.path) + ); + } -/** - * Generates a TypeScript interface for the ABI. - * @param abiObj - The ABI to generate the interface for. - * @returns The TypeScript code to define the interface. - */ -function generateTsInterface(abiObj: NoirFunctionAbi): string { - let result = ``; - const outputStructs = new Set(); + private codegenStructType(structType: StructType): string { + // Generate names for the generic bindings. + const genericsNameMap = new Map(); + structType.generics.forEach((generic, index) => { + genericsNameMap.set(generic.id, String.fromCharCode('A'.charCodeAt(0) + index)); + }); - // Define structs for composite types - for (const param of abiObj.parameters) { - result += generateStructInterfaces(param.type, outputStructs); + const name = this.getStructName(structType); + const generics = structType.generics.length + ? `<${structType.generics + .map(generic => `${genericsNameMap.get(generic.id)}${generic.isNumeric ? ' extends number' : ''}`) + .join(', ')}>` + : ''; + + let resultCode = `export interface ${name}${generics} {\n`; + + for (const field of structType.fields) { + resultCode += ` ${field.name}: ${this.codegenType(field.type, genericsNameMap)};\n`; + } + + resultCode += '}\n\n'; + + return resultCode; } - // Generating Return type, if it exists - // - if (abiObj.return_type != null) { - result += generateStructInterfaces(abiObj.return_type.abi_type, outputStructs); - result += `export type ReturnType = ${abiTypeToTs(abiObj.return_type.abi_type)};\n`; + private codegenType(type: AbiTypeWithGenerics, genericsNameMap: Map): string { + switch (type.kind) { + case 'field': + this.addIfUnique({ aliasName: 'Field', tsType: 'string' }); + return 'Field'; + case 'boolean': + return 'boolean'; + case 'integer': { + let tsIntType = ''; + if (type.sign === 'signed') { + tsIntType = `i${type.width}`; + } else { + tsIntType = `u${type.width}`; + } + this.addIfUnique({ aliasName: tsIntType, tsType: 'string' }); + return tsIntType; + } + case 'binding': + return genericsNameMap.get(type.id.id) ?? 'unknown'; + case 'constant': + return type.value.toString(); + case 'string': + return `string`; + case 'array': + return `FixedLengthArray<${this.codegenType(type.type, genericsNameMap)}, ${replaceNumericBinding( + type.length, + genericsNameMap, + )}>`; + case 'tuple': + throw new Error('Unimplemented'); + case 'struct': { + const name = this.getStructName(type.structType); + if (type.args.length) { + const args = type.args.map(arg => this.codegenType(arg, genericsNameMap)).join(', '); + return `${name}<${args}>`; + } else { + return name; + } + } + } } - // Generating Input type - result += '\nexport interface InputType {\n'; - for (const param of abiObj.parameters) { - result += ` ${param.name}: ${abiTypeToTs(param.type)};\n`; + /** + * Typescript does not allow us to check for equality of non-primitive types + * easily, so we create a addIfUnique function that will only add an item + * to the map if it is not already there by using JSON.stringify. + * @param item - The item to add to the map. + */ + private addIfUnique(item: PrimitiveTypesUsed) { + const key = JSON.stringify(item); + if (!this.primitiveTypesUsed.has(key)) { + this.primitiveTypesUsed.set(key, item); + } } - result += '}'; - // Add the primitive Noir types that do not have a 1-1 mapping to TypeScript. - let primitiveTypeAliases = ''; - for (const [, value] of noirPrimitiveTypesToTsTypes) { - primitiveTypeAliases += `\nexport type ${value.aliasName} = ${value.tsType};`; + /** + * Codegen all the interfaces for the circuits. + * For a circuit named Foo, we'll codegen FooInputType and FooReturnType. + */ + private codegenAllInterfaces(): string { + let resultCode = ''; + for (const { circuitName, params, returnType } of this.demonomorphizedAbis) { + resultCode += this.codegenStructType({ + path: `${circuitName}InputType`, + fields: params, + generics: [], + }); + if (returnType) { + resultCode += `export type ${circuitName}ReturnType = ${this.codegenType(returnType, new Map())};\n`; + } + } + return resultCode; } - const fixedLengthArray = - '\nexport type FixedLengthArray = L extends 0 ? never[]: T[] & { length: L }'; - - return ( - `/* Autogenerated file, do not edit! */\n\n/* eslint-disable */\n` + - fixedLengthArray + - '\n' + - primitiveTypeAliases + - '\n' + - result - ); + private codegenAllPrimitives(): string { + let primitiveTypeAliases = + 'export type FixedLengthArray = L extends 0 ? never[]: T[] & { length: L }\n'; + for (const [, value] of this.primitiveTypesUsed) { + primitiveTypeAliases += `export type ${value.aliasName} = ${value.tsType};\n`; + } + return primitiveTypeAliases; + } } const circuits = [ @@ -220,14 +301,19 @@ const main = async () => { await fs.mkdir('./src/types', { recursive: true }); } + const allAbis = []; + + // Collect all abis for (const circuit of circuits) { const rawData = await fs.readFile(`./src/target/${circuit}.json`, 'utf-8'); const abiObj: NoirCompiledCircuit = JSON.parse(rawData); - const generatedInterface = generateTsInterface(abiObj.abi); - - const outputFile = `./src/types/${circuit}_types.ts`; - await fs.writeFile(outputFile, generatedInterface); + allAbis.push({ + abi: abiObj.abi, + circuitName: pascalCase(circuit), + }); } + const interfaces = new TypingsGenerator(allAbis).codegen(); + await fs.writeFile('./src/types/index.ts', interfaces); }; try { diff --git a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts index a891b6c775ce..ec844c1a3d68 100644 --- a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts +++ b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts @@ -1,5 +1,4 @@ import { - type ARCHIVE_HEIGHT, AggregationObject, AppendOnlyTreeSnapshot, AztecAddress, @@ -16,7 +15,6 @@ import { type ContractStorageRead, type ContractStorageUpdateRequest, EthAddress, - type FUNCTION_TREE_HEIGHT, Fr, FunctionData, FunctionSelector, @@ -29,7 +27,7 @@ import { Header, KernelCircuitPublicInputs, type KernelData, - type L2ToL1Message, + L2ToL1Message, type LeafDataReadHint, MAX_ENCRYPTED_LOGS_PER_TX, MAX_NEW_L2_TO_L1_MSGS_PER_TX, @@ -53,11 +51,9 @@ import { NUM_BYTES_PER_SHA256, type NonMembershipHint, NoteHash, - NoteHashContext, type NoteHashReadRequestHints, Nullifier, NullifierKeyValidationRequest, - NullifierKeyValidationRequestContext, type NullifierLeafPreimage, type NullifierNonExistentReadRequestHints, type NullifierReadRequestHints, @@ -98,7 +94,6 @@ import { type PublicKernelTailCircuitPrivateInputs, type RECURSIVE_PROOF_LENGTH, ReadRequest, - ReadRequestContext, type ReadRequestStatus, type RecursiveProof, RevertCode, @@ -107,6 +102,11 @@ import { type RootParityInputs, type RootRollupInputs, RootRollupPublicInputs, + ScopedL2ToL1Message, + ScopedNoteHash, + ScopedNullifier, + ScopedNullifierKeyValidationRequest, + ScopedReadRequest, type SettledReadHint, SideEffect, type StateDiffHints, @@ -119,120 +119,103 @@ import { import { toBufferBE } from '@aztec/foundation/bigint-buffer'; import { type Tuple, mapTuple, toTruncField } from '@aztec/foundation/serialize'; -import { type BaseParityInputs as BaseParityInputsNoir } from './types/parity_base_types.js'; -import { - type RootParityInput as ParityRootParityInputNoir, - type RootParityInputs as RootParityInputsNoir, -} from './types/parity_root_types.js'; -import { - type CallContext as CallContextNoir, - type CallRequest as CallRequestNoir, - type CallerContext as CallerContextNoir, - type CombinedConstantData as CombinedConstantDataNoir, - type FunctionData as FunctionDataNoir, - type FunctionLeafMembershipWitness as FunctionLeafMembershipWitnessNoir, - type FunctionSelector as FunctionSelectorNoir, - type GasFees as GasFeesNoir, - type GasSettings as GasSettingsNoir, - type L2ToL1Message as L2ToL1MessageNoir, - type MaxBlockNumber as MaxBlockNumberNoir, - type AztecAddress as NoirAztecAddress, - type EthAddress as NoirEthAddress, - type Field as NoirField, - type GrumpkinPoint as NoirPoint, - type NoteHashContext as NoteHashContextNoir, - type NoteHash as NoteHashNoir, - type NullifierKeyValidationRequestContext as NullifierKeyValidationRequestContextNoir, - type NullifierKeyValidationRequest as NullifierKeyValidationRequestNoir, - type Nullifier as NullifierNoir, - type PrivateAccumulatedData as PrivateAccumulatedDataNoir, - type PrivateCallData as PrivateCallDataNoir, - type PrivateCallStackItem as PrivateCallStackItemNoir, - type PrivateCircuitPublicInputs as PrivateCircuitPublicInputsNoir, - type PrivateKernelCircuitPublicInputs as PrivateKernelCircuitPublicInputsNoir, - type PrivateKernelInitCircuitPrivateInputs as PrivateKernelInitCircuitPrivateInputsNoir, - type PublicDataRead as PublicDataReadNoir, - type ReadRequestContext as ReadRequestContextNoir, - type ReadRequest as ReadRequestNoir, - type RollupValidationRequests as RollupValidationRequestsNoir, - type SideEffect as SideEffectNoir, - type TxContext as TxContextNoir, - type TxRequest as TxRequestNoir, - type ValidationRequests as ValidationRequestsNoir, -} from './types/private_kernel_init_types.js'; -import { - type PrivateKernelInnerCircuitPrivateInputs as PrivateKernelInnerCircuitPrivateInputsNoir, - type PrivateKernelInnerHints as PrivateKernelInnerHintsNoir, -} from './types/private_kernel_inner_types.js'; -import { type PrivateKernelTailToPublicCircuitPrivateInputs as PrivateKernelTailToPublicCircuitPrivateInputsNoir } from './types/private_kernel_tail_to_public_types.js'; -import { - type CombinedAccumulatedData as CombinedAccumulatedDataNoir, - type Gas as GasNoir, - type GrumpkinPrivateKey as GrumpkinPrivateKeyNoir, - type NoteHashLeafPreimage as NoteHashLeafPreimageNoir, - type NoteHashMembershipWitness as NoteHashMembershipWitnessNoir, - type NoteHashReadRequestHints as NoteHashReadRequestHintsNoir, - type NoteHashSettledReadHint as NoteHashSettledReadHintNoir, - type NullifierReadRequestHints as NullifierReadRequestHintsNoir, - type NullifierSettledReadHint as NullifierSettledReadHintNoir, - type PendingReadHint as PendingReadHintNoir, - type PrivateKernelData as PrivateKernelDataNoir, - type PrivateKernelTailCircuitPrivateInputs as PrivateKernelTailCircuitPrivateInputsNoir, - type PrivateKernelTailHints as PrivateKernelTailHintsNoir, - type PrivateKernelTailOutputs as PrivateKernelTailOutputsNoir, - type ReadRequestStatus as ReadRequestStatusNoir, -} from './types/private_kernel_tail_types.js'; -import { - type PublicAccumulatedData as PublicAccumulatedDataNoir, - type PublicKernelData as PublicKernelDataNoir, -} from './types/public_kernel_app_logic_types.js'; -import { - type PublicCallData as PublicCallDataNoir, - type PublicCallStackItem as PublicCallStackItemNoir, - type PublicCircuitPublicInputs as PublicCircuitPublicInputsNoir, - type PublicKernelCircuitPublicInputs as PublicKernelCircuitPublicInputsNoir, - type PublicKernelSetupCircuitPrivateInputs as PublicKernelSetupCircuitPrivateInputsNoir, - type StorageRead as StorageReadNoir, - type StorageUpdateRequest as StorageUpdateRequestNoir, -} from './types/public_kernel_setup_types.js'; -import { - type LeafDataReadHint as LeafDataReadHintNoir, - type NullifierNonExistentReadRequestHints as NullifierNonExistentReadRequestHintsNoir, - type NullifierNonMembershipHint as NullifierNonMembershipHintNoir, - type PublicDataHint as PublicDataHintNoir, - type PublicDataReadRequestHints as PublicDataReadRequestHintsNoir, - type PublicDataUpdateRequest as PublicDataUpdateRequestNoir, - type PublicKernelTailCircuitPrivateInputs as PublicKernelTailCircuitPrivateInputsNoir, -} from './types/public_kernel_tail_types.js'; -import { - type ArchiveRootMembershipWitness as ArchiveRootMembershipWitnessNoir, - type BaseRollupInputs as BaseRollupInputsNoir, - type KernelCircuitPublicInputs as KernelCircuitPublicInputsNoir, - type KernelData as KernelDataNoir, - type NullifierLeafPreimage as NullifierLeafPreimageNoir, - type NullifierMembershipWitness as NullifierMembershipWitnessNoir, - type PublicDataMembershipWitness as PublicDataMembershipWitnessNoir, - type PublicDataTreeLeaf as PublicDataTreeLeafNoir, - type PublicDataTreeLeafPreimage as PublicDataTreeLeafPreimageNoir, - type StateDiffHints as StateDiffHintsNoir, -} from './types/rollup_base_types.js'; -import { type MergeRollupInputs as MergeRollupInputsNoir } from './types/rollup_merge_types.js'; -import { - type AppendOnlyTreeSnapshot as AppendOnlyTreeSnapshotNoir, - type BaseOrMergeRollupPublicInputs as BaseOrMergeRollupPublicInputsNoir, - type ConstantRollupData as ConstantRollupDataNoir, - type ContentCommitment as ContentCommitmentNoir, - type Field, - type GlobalVariables as GlobalVariablesNoir, - type Header as HeaderNoir, - type ParityPublicInputs as ParityPublicInputsNoir, - type PartialStateReference as PartialStateReferenceNoir, - type PreviousRollupData as PreviousRollupDataNoir, - type RootRollupInputs as RootRollupInputsNoir, - type RootRollupParityInput as RootRollupParityInputNoir, - type RootRollupPublicInputs as RootRollupPublicInputsNoir, - type StateReference as StateReferenceNoir, -} from './types/rollup_root_types.js'; +import type { + AppendOnlyTreeSnapshot as AppendOnlyTreeSnapshotNoir, + BaseOrMergeRollupPublicInputs as BaseOrMergeRollupPublicInputsNoir, + BaseParityInputs as BaseParityInputsNoir, + BaseRollupInputs as BaseRollupInputsNoir, + CallContext as CallContextNoir, + CallRequest as CallRequestNoir, + CallerContext as CallerContextNoir, + CombinedAccumulatedData as CombinedAccumulatedDataNoir, + CombinedConstantData as CombinedConstantDataNoir, + ConstantRollupData as ConstantRollupDataNoir, + ContentCommitment as ContentCommitmentNoir, + Field, + FixedLengthArray, + FunctionData as FunctionDataNoir, + FunctionSelector as FunctionSelectorNoir, + GasFees as GasFeesNoir, + Gas as GasNoir, + GasSettings as GasSettingsNoir, + GlobalVariables as GlobalVariablesNoir, + GrumpkinPrivateKey as GrumpkinPrivateKeyNoir, + Header as HeaderNoir, + KernelCircuitPublicInputs as KernelCircuitPublicInputsNoir, + KernelData as KernelDataNoir, + L2ToL1Message as L2ToL1MessageNoir, + LeafDataReadHint as LeafDataReadHintNoir, + MaxBlockNumber as MaxBlockNumberNoir, + MembershipWitness as MembershipWitnessNoir, + MergeRollupInputs as MergeRollupInputsNoir, + AztecAddress as NoirAztecAddress, + EthAddress as NoirEthAddress, + Field as NoirField, + GrumpkinPoint as NoirPoint, + NoteHashLeafPreimage as NoteHashLeafPreimageNoir, + NoteHash as NoteHashNoir, + NoteHashReadRequestHints as NoteHashReadRequestHintsNoir, + NoteHashSettledReadHint as NoteHashSettledReadHintNoir, + NullifierKeyValidationRequest as NullifierKeyValidationRequestNoir, + NullifierLeafPreimage as NullifierLeafPreimageNoir, + Nullifier as NullifierNoir, + NullifierNonExistentReadRequestHints as NullifierNonExistentReadRequestHintsNoir, + NullifierNonMembershipHint as NullifierNonMembershipHintNoir, + NullifierReadRequestHints as NullifierReadRequestHintsNoir, + NullifierSettledReadHint as NullifierSettledReadHintNoir, + ParityPublicInputs as ParityPublicInputsNoir, + RootParityInput as ParityRootParityInputNoir, + PartialStateReference as PartialStateReferenceNoir, + PendingReadHint as PendingReadHintNoir, + PreviousRollupData as PreviousRollupDataNoir, + PrivateAccumulatedData as PrivateAccumulatedDataNoir, + PrivateCallData as PrivateCallDataNoir, + PrivateCallStackItem as PrivateCallStackItemNoir, + PrivateCircuitPublicInputs as PrivateCircuitPublicInputsNoir, + PrivateKernelCircuitPublicInputs as PrivateKernelCircuitPublicInputsNoir, + PrivateKernelData as PrivateKernelDataNoir, + PrivateKernelInitCircuitPrivateInputs as PrivateKernelInitCircuitPrivateInputsNoir, + PrivateKernelInnerCircuitPrivateInputs as PrivateKernelInnerCircuitPrivateInputsNoir, + PrivateKernelInnerHints as PrivateKernelInnerHintsNoir, + PrivateKernelTailCircuitPrivateInputs as PrivateKernelTailCircuitPrivateInputsNoir, + PrivateKernelTailHints as PrivateKernelTailHintsNoir, + PrivateKernelTailOutputs as PrivateKernelTailOutputsNoir, + PrivateKernelTailToPublicCircuitPrivateInputs as PrivateKernelTailToPublicCircuitPrivateInputsNoir, + PublicAccumulatedData as PublicAccumulatedDataNoir, + PublicCallData as PublicCallDataNoir, + PublicCallStackItem as PublicCallStackItemNoir, + PublicCircuitPublicInputs as PublicCircuitPublicInputsNoir, + PublicDataHint as PublicDataHintNoir, + PublicDataRead as PublicDataReadNoir, + PublicDataReadRequestHints as PublicDataReadRequestHintsNoir, + PublicDataTreeLeaf as PublicDataTreeLeafNoir, + PublicDataTreeLeafPreimage as PublicDataTreeLeafPreimageNoir, + PublicDataUpdateRequest as PublicDataUpdateRequestNoir, + PublicKernelCircuitPublicInputs as PublicKernelCircuitPublicInputsNoir, + PublicKernelData as PublicKernelDataNoir, + PublicKernelSetupCircuitPrivateInputs as PublicKernelSetupCircuitPrivateInputsNoir, + PublicKernelTailCircuitPrivateInputs as PublicKernelTailCircuitPrivateInputsNoir, + ReadRequest as ReadRequestNoir, + ReadRequestStatus as ReadRequestStatusNoir, + RollupValidationRequests as RollupValidationRequestsNoir, + RootParityInputs as RootParityInputsNoir, + RootRollupInputs as RootRollupInputsNoir, + RootRollupParityInput as RootRollupParityInputNoir, + RootRollupPublicInputs as RootRollupPublicInputsNoir, + ScopedL2ToL1Message as ScopedL2ToL1MessageNoir, + ScopedNoteHash as ScopedNoteHashNoir, + ScopedNullifierKeyValidationRequest as ScopedNullifierKeyValidationRequestNoir, + ScopedNullifier as ScopedNullifierNoir, + ScopedReadRequest as ScopedReadRequestNoir, + SideEffect as SideEffectNoir, + StateDiffHints as StateDiffHintsNoir, + StateReference as StateReferenceNoir, + StorageRead as StorageReadNoir, + StorageUpdateRequest as StorageUpdateRequestNoir, + TxContext as TxContextNoir, + TxRequest as TxRequestNoir, + ValidationRequests as ValidationRequestsNoir, +} from './types/index.js'; /* eslint-disable camelcase */ @@ -555,19 +538,19 @@ function mapNoteHashFromNoir(noteHash: NoteHashNoir) { return new NoteHash(mapFieldFromNoir(noteHash.value), mapNumberFromNoir(noteHash.counter)); } -function mapNoteHashContextToNoir(noteHash: NoteHashContext): NoteHashContextNoir { +function mapScopedNoteHashToNoir(noteHash: ScopedNoteHash): ScopedNoteHashNoir { return { - value: mapFieldToNoir(noteHash.value), - counter: mapNumberToNoir(noteHash.counter), + note_hash: mapNoteHashToNoir(noteHash.noteHash), nullifier_counter: mapNumberToNoir(noteHash.nullifierCounter), + contract_address: mapAztecAddressToNoir(noteHash.contractAddress), }; } -function mapNoteHashContextFromNoir(noteHash: NoteHashContextNoir) { - return new NoteHashContext( - mapFieldFromNoir(noteHash.value), - mapNumberFromNoir(noteHash.counter), +function mapScopedNoteHashFromNoir(noteHash: ScopedNoteHashNoir) { + return new ScopedNoteHash( + mapNoteHashFromNoir(noteHash.note_hash), mapNumberFromNoir(noteHash.nullifier_counter), + mapAztecAddressFromNoir(noteHash.contract_address), ); } @@ -587,6 +570,20 @@ function mapNullifierFromNoir(nullifier: NullifierNoir) { ); } +function mapScopedNullifierToNoir(nullifier: ScopedNullifier): ScopedNullifierNoir { + return { + nullifier: mapNullifierToNoir(nullifier.nullifier), + contract_address: mapAztecAddressToNoir(nullifier.contractAddress), + }; +} + +function mapScopedNullifierFromNoir(nullifier: ScopedNullifierNoir) { + return new ScopedNullifier( + mapNullifierFromNoir(nullifier.nullifier), + mapAztecAddressFromNoir(nullifier.contract_address), + ); +} + /** * Maps a SideEffect to a noir side effect. * @param sideEffect - The SideEffect. @@ -629,16 +626,10 @@ export function mapReadRequestFromNoir(readRequest: ReadRequestNoir): ReadReques return new ReadRequest(mapFieldFromNoir(readRequest.value), mapNumberFromNoir(readRequest.counter)); } -/** - * Maps a ReadRequestContext to a noir ReadRequestContext. - * @param readRequestContext - The read request context. - * @returns The noir ReadRequestContext. - */ -export function mapReadRequestContextToNoir(readRequestContext: ReadRequestContext): ReadRequestContextNoir { +function mapScopedReadRequestToNoir(scopedReadRequest: ScopedReadRequest): ScopedReadRequestNoir { return { - value: mapFieldToNoir(readRequestContext.value), - counter: mapNumberToNoir(readRequestContext.counter), - contract_address: mapAztecAddressToNoir(readRequestContext.contractAddress), + read_request: mapReadRequestToNoir(scopedReadRequest.readRequest), + contract_address: mapAztecAddressToNoir(scopedReadRequest.contractAddress), }; } @@ -647,11 +638,10 @@ export function mapReadRequestContextToNoir(readRequestContext: ReadRequestConte * @param readRequest - The noir ReadRequest. * @returns The TS ReadRequest. */ -export function mapReadRequestContextFromNoir(readRequestContext: ReadRequestContextNoir): ReadRequestContext { - return new ReadRequestContext( - mapFieldFromNoir(readRequestContext.value), - mapNumberFromNoir(readRequestContext.counter), - mapAztecAddressFromNoir(readRequestContext.contract_address), +export function mapScopedReadRequestFromNoir(scoped: ScopedReadRequestNoir): ScopedReadRequest { + return new ScopedReadRequest( + mapReadRequestFromNoir(scoped.read_request), + mapAztecAddressFromNoir(scoped.contract_address), ); } @@ -683,32 +673,20 @@ export function mapNullifierKeyValidationRequestFromNoir( ); } -/** - * Maps a NullifierKeyValidationRequest to a noir NullifierKeyValidationRequest. - * @param request - The NullifierKeyValidationRequest. - * @returns The noir NullifierKeyValidationRequest. - */ -export function mapNullifierKeyValidationRequestContextToNoir( - request: NullifierKeyValidationRequestContext, -): NullifierKeyValidationRequestContextNoir { +function mapScopedNullifierKeyValidationRequestToNoir( + request: ScopedNullifierKeyValidationRequest, +): ScopedNullifierKeyValidationRequestNoir { return { - master_nullifier_public_key: mapPointToNoir(request.masterNullifierPublicKey), - app_nullifier_secret_key: mapFieldToNoir(request.appNullifierSecretKey), + request: mapNullifierKeyValidationRequestToNoir(request.request), contract_address: mapAztecAddressToNoir(request.contractAddress), }; } -/** - * Maps a noir NullifierKeyValidationRequestContext to NullifierKeyValidationRequestContext. - * @param request - The noir NullifierKeyValidationRequestContext. - * @returns The TS NullifierKeyValidationRequestContext. - */ -export function mapNullifierKeyValidationRequestContextFromNoir( - request: NullifierKeyValidationRequestContextNoir, -): NullifierKeyValidationRequestContext { - return new NullifierKeyValidationRequestContext( - mapPointFromNoir(request.master_nullifier_public_key), - mapFieldFromNoir(request.app_nullifier_secret_key), +function mapScopedNullifierKeyValidationRequestFromNoir( + request: ScopedNullifierKeyValidationRequestNoir, +): ScopedNullifierKeyValidationRequest { + return new ScopedNullifierKeyValidationRequest( + mapNullifierKeyValidationRequestFromNoir(request.request), mapAztecAddressFromNoir(request.contract_address), ); } @@ -722,6 +700,29 @@ export function mapL2ToL1MessageToNoir(message: L2ToL1Message): L2ToL1MessageNoi return { recipient: mapEthAddressToNoir(message.recipient), content: mapFieldToNoir(message.content), + counter: mapNumberToNoir(message.counter), + }; +} + +function mapL2ToL1MessageFromNoir(message: L2ToL1MessageNoir) { + return new L2ToL1Message( + mapEthAddressFromNoir(message.recipient), + mapFieldFromNoir(message.content), + mapNumberFromNoir(message.counter), + ); +} + +function mapScopedL2ToL1MessageFromNoir(message: ScopedL2ToL1MessageNoir) { + return new ScopedL2ToL1Message( + mapL2ToL1MessageFromNoir(message.message), + mapAztecAddressFromNoir(message.contract_address), + ); +} + +function mapScopedL2ToL1MessageToNoir(message: ScopedL2ToL1Message): ScopedL2ToL1MessageNoir { + return { + message: mapL2ToL1MessageToNoir(message.message), + contract_address: mapAztecAddressToNoir(message.contractAddress), }; } @@ -775,20 +776,6 @@ export function mapPrivateCallStackItemToNoir(privateCallStackItem: PrivateCallS }; } -/** - * Maps a function leaf membership witness to a noir function leaf membership witness. - * @param membershipWitness - The membership witness. - * @returns The noir function leaf membership witness. - */ -function mapFunctionLeafMembershipWitnessToNoir( - membershipWitness: MembershipWitness, -): FunctionLeafMembershipWitnessNoir { - return { - leaf_index: membershipWitness.leafIndex.toString(), - sibling_path: mapTuple(membershipWitness.siblingPath, mapFieldToNoir), - }; -} - /** * Maps a private call data to a noir private call data. * @param privateCallData - The private call data. @@ -801,9 +788,7 @@ export function mapPrivateCallDataToNoir(privateCallData: PrivateCallData): Priv public_call_stack: mapTuple(privateCallData.publicCallStack, mapCallRequestToNoir), proof: {}, vk: {}, - function_leaf_membership_witness: mapFunctionLeafMembershipWitnessToNoir( - privateCallData.functionLeafMembershipWitness, - ), + function_leaf_membership_witness: mapMembershipWitnessToNoir(privateCallData.functionLeafMembershipWitness), contract_class_artifact_hash: mapFieldToNoir(privateCallData.contractClassArtifactHash), contract_class_public_bytecode_commitment: mapFieldToNoir(privateCallData.contractClassPublicBytecodeCommitment), public_keys_hash: mapWrappedFieldToNoir(privateCallData.publicKeysHash), @@ -931,7 +916,7 @@ function mapNoteHashSettledReadHintToNoir( ): NoteHashSettledReadHintNoir { return { read_request_index: mapNumberToNoir(hint.readRequestIndex), - membership_witness: mapNoteHashMembershipWitnessToNoir(hint.membershipWitness), + membership_witness: mapMembershipWitnessToNoir(hint.membershipWitness), leaf_preimage: mapNoteHashLeafPreimageToNoir(hint.leafPreimage), }; } @@ -941,7 +926,7 @@ function mapNullifierSettledReadHintToNoir( ): NullifierSettledReadHintNoir { return { read_request_index: mapNumberToNoir(hint.readRequestIndex), - membership_witness: mapNullifierMembershipWitnessToNoir(hint.membershipWitness), + membership_witness: mapMembershipWitnessToNoir(hint.membershipWitness), leaf_preimage: mapNullifierLeafPreimageToNoir(hint.leafPreimage), }; } @@ -967,7 +952,7 @@ function mapNullifierNonMembershipHintToNoir( ): NullifierNonMembershipHintNoir { return { low_leaf_preimage: mapNullifierLeafPreimageToNoir(hint.leafPreimage), - membership_witness: mapNullifierMembershipWitnessToNoir(hint.membershipWitness), + membership_witness: mapMembershipWitnessToNoir(hint.membershipWitness), }; } @@ -987,7 +972,7 @@ function mapPublicDataHintToNoir(hint: PublicDataHint): PublicDataHintNoir { leaf_slot: mapFieldToNoir(hint.leafSlot), value: mapFieldToNoir(hint.value), override_counter: mapNumberToNoir(hint.overrideCounter), - membership_witness: mapPublicDataMembershipWitnessToNoir(hint.membershipWitness), + membership_witness: mapMembershipWitnessToNoir(hint.membershipWitness), leaf_preimage: mapPublicDataTreePreimageToNoir(hint.leafPreimage), }; } @@ -1003,15 +988,15 @@ function mapPublicDataReadRequestHintsToNoir(hints: PublicDataReadRequestHints): function mapValidationRequestsToNoir(requests: ValidationRequests): ValidationRequestsNoir { return { for_rollup: mapRollupValidationRequestsToNoir(requests.forRollup), - note_hash_read_requests: mapTuple(requests.noteHashReadRequests, mapReadRequestContextToNoir), - nullifier_read_requests: mapTuple(requests.nullifierReadRequests, mapReadRequestContextToNoir), + note_hash_read_requests: mapTuple(requests.noteHashReadRequests, mapScopedReadRequestToNoir), + nullifier_read_requests: mapTuple(requests.nullifierReadRequests, mapScopedReadRequestToNoir), nullifier_non_existent_read_requests: mapTuple( requests.nullifierNonExistentReadRequests, - mapReadRequestContextToNoir, + mapScopedReadRequestToNoir, ), nullifier_key_validation_requests: mapTuple( requests.nullifierKeyValidationRequests, - mapNullifierKeyValidationRequestContextToNoir, + mapScopedNullifierKeyValidationRequestToNoir, ), public_data_reads: mapTuple(requests.publicDataReads, mapPublicDataReadToNoir), }; @@ -1023,22 +1008,22 @@ function mapValidationRequestsFromNoir(requests: ValidationRequestsNoir): Valida mapTupleFromNoir( requests.note_hash_read_requests, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, - mapReadRequestContextFromNoir, + mapScopedReadRequestFromNoir, ), mapTupleFromNoir( requests.nullifier_read_requests, MAX_NULLIFIER_READ_REQUESTS_PER_TX, - mapReadRequestContextFromNoir, + mapScopedReadRequestFromNoir, ), mapTupleFromNoir( requests.nullifier_non_existent_read_requests, MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, - mapReadRequestContextFromNoir, + mapScopedReadRequestFromNoir, ), mapTupleFromNoir( requests.nullifier_key_validation_requests, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, - mapNullifierKeyValidationRequestContextFromNoir, + mapScopedNullifierKeyValidationRequestFromNoir, ), mapTupleFromNoir(requests.public_data_reads, MAX_PUBLIC_DATA_READS_PER_TX, mapPublicDataReadFromNoir), ); @@ -1048,9 +1033,13 @@ export function mapPrivateAccumulatedDataFromNoir( privateAccumulatedData: PrivateAccumulatedDataNoir, ): PrivateAccumulatedData { return new PrivateAccumulatedData( - mapTupleFromNoir(privateAccumulatedData.new_note_hashes, MAX_NEW_NOTE_HASHES_PER_TX, mapNoteHashContextFromNoir), - mapTupleFromNoir(privateAccumulatedData.new_nullifiers, MAX_NEW_NULLIFIERS_PER_TX, mapNullifierFromNoir), - mapTupleFromNoir(privateAccumulatedData.new_l2_to_l1_msgs, MAX_NEW_L2_TO_L1_MSGS_PER_TX, mapFieldFromNoir), + mapTupleFromNoir(privateAccumulatedData.new_note_hashes, MAX_NEW_NOTE_HASHES_PER_TX, mapScopedNoteHashFromNoir), + mapTupleFromNoir(privateAccumulatedData.new_nullifiers, MAX_NEW_NULLIFIERS_PER_TX, mapScopedNullifierFromNoir), + mapTupleFromNoir( + privateAccumulatedData.new_l2_to_l1_msgs, + MAX_NEW_L2_TO_L1_MSGS_PER_TX, + mapScopedL2ToL1MessageFromNoir, + ), mapTupleFromNoir(privateAccumulatedData.encrypted_logs_hashes, MAX_ENCRYPTED_LOGS_PER_TX, mapSideEffectFromNoir), mapTupleFromNoir( privateAccumulatedData.unencrypted_logs_hashes, @@ -1074,9 +1063,9 @@ export function mapPrivateAccumulatedDataFromNoir( export function mapPrivateAccumulatedDataToNoir(data: PrivateAccumulatedData): PrivateAccumulatedDataNoir { return { - new_note_hashes: mapTuple(data.newNoteHashes, mapNoteHashContextToNoir), - new_nullifiers: mapTuple(data.newNullifiers, mapNullifierToNoir), - new_l2_to_l1_msgs: mapTuple(data.newL2ToL1Msgs, mapFieldToNoir), + new_note_hashes: mapTuple(data.newNoteHashes, mapScopedNoteHashToNoir), + new_nullifiers: mapTuple(data.newNullifiers, mapScopedNullifierToNoir), + new_l2_to_l1_msgs: mapTuple(data.newL2ToL1Msgs, mapScopedL2ToL1MessageToNoir), encrypted_logs_hashes: mapTuple(data.encryptedLogsHashes, mapSideEffectToNoir), unencrypted_logs_hashes: mapTuple(data.unencryptedLogsHashes, mapSideEffectToNoir), encrypted_log_preimages_length: mapFieldToNoir(data.encryptedLogPreimagesLength), @@ -1412,8 +1401,8 @@ export function mapPrivateKernelInnerCircuitPrivateInputsToNoir( function mapPrivateKernelTailOutputsToNoir(inputs: PrivateKernelTailOutputs): PrivateKernelTailOutputsNoir { return { - note_hashes: mapTuple(inputs.noteHashes, mapNoteHashContextToNoir), - nullifiers: mapTuple(inputs.nullifiers, mapNullifierToNoir), + note_hashes: mapTuple(inputs.noteHashes, mapScopedNoteHashToNoir), + nullifiers: mapTuple(inputs.nullifiers, mapScopedNullifierToNoir), }; } @@ -1427,9 +1416,9 @@ function mapPrivateKernelTailHintsToNoir(inputs: PrivateKernelTailHints): Privat note_hash_read_request_hints: mapNoteHashReadRequestHintsToNoir(inputs.noteHashReadRequestHints), nullifier_read_request_hints: mapNullifierReadRequestHintsToNoir(inputs.nullifierReadRequestHints), master_nullifier_secret_keys: mapTuple(inputs.masterNullifierSecretKeys, mapGrumpkinPrivateKeyToNoir), - sorted_new_note_hashes: mapTuple(inputs.sortedNewNoteHashes, mapNoteHashContextToNoir), + sorted_new_note_hashes: mapTuple(inputs.sortedNewNoteHashes, mapScopedNoteHashToNoir), sorted_new_note_hashes_indexes: mapTuple(inputs.sortedNewNoteHashesIndexes, mapNumberToNoir), - sorted_new_nullifiers: mapTuple(inputs.sortedNewNullifiers, mapNullifierToNoir), + sorted_new_nullifiers: mapTuple(inputs.sortedNewNullifiers, mapScopedNullifierToNoir), sorted_new_nullifiers_indexes: mapTuple(inputs.sortedNewNullifiersIndexes, mapNumberToNoir), sorted_encrypted_log_hashes: mapTuple(inputs.sortedEncryptedLogHashes, mapSideEffectToNoir), sorted_encrypted_log_hashes_indexes: mapTuple(inputs.sortedEncryptedLogHashesIndexes, mapNumberToNoir), @@ -1957,52 +1946,11 @@ export function mapNullifierLeafPreimageToNoir( }; } -function mapNoteHashMembershipWitnessToNoir( - membershipWitness: MembershipWitness, -): NoteHashMembershipWitnessNoir { - return { - leaf_index: membershipWitness.leafIndex.toString(), - sibling_path: mapTuple(membershipWitness.siblingPath, mapFieldToNoir), - }; -} - -/** - * Maps a nullifier membership witness to noir. - * @param membershipWitness - The nullifier membership witness. - * @returns The noir nullifier membership witness. - */ -export function mapNullifierMembershipWitnessToNoir( - membershipWitness: MembershipWitness, -): NullifierMembershipWitnessNoir { - return { - leaf_index: membershipWitness.leafIndex.toString(), - sibling_path: mapTuple(membershipWitness.siblingPath, mapFieldToNoir), - }; -} - -/** - * Maps a membership witness of the public data tree to noir. - */ -export function mapPublicDataMembershipWitnessToNoir( - membershipWitness: MembershipWitness, -): PublicDataMembershipWitnessNoir { - return { - leaf_index: membershipWitness.leafIndex.toString(), - sibling_path: mapTuple(membershipWitness.siblingPath, mapFieldToNoir), - }; -} - -/** - * Maps a membership witness of the blocks tree to noir. - * @param membershipWitness - The membership witness. - * @returns The noir membership witness. - */ -export function mapArchiveRootMembershipWitnessToNoir( - membershipWitness: MembershipWitness, -): ArchiveRootMembershipWitnessNoir { +function mapMembershipWitnessToNoir(witness: MembershipWitness): MembershipWitnessNoir { + const siblingPath = mapTuple(witness.siblingPath, mapFieldToNoir) as FixedLengthArray; return { - leaf_index: membershipWitness.leafIndex.toString(), - sibling_path: mapTuple(membershipWitness.siblingPath, mapFieldToNoir), + leaf_index: witness.leafIndex.toString(), + sibling_path: siblingPath, }; } @@ -2053,7 +2001,7 @@ export function mapStateDiffHintsToNoir(hints: StateDiffHints): StateDiffHintsNo nullifier_predecessor_preimages: mapTuple(hints.nullifierPredecessorPreimages, mapNullifierLeafPreimageToNoir), nullifier_predecessor_membership_witnesses: mapTuple( hints.nullifierPredecessorMembershipWitnesses, - mapNullifierMembershipWitnessToNoir, + (witness: MembershipWitness) => mapMembershipWitnessToNoir(witness), ), sorted_nullifiers: mapTuple(hints.sortedNullifiers, mapFieldToNoir), sorted_nullifier_indexes: mapTuple(hints.sortedNullifierIndexes, (index: number) => mapNumberToNoir(index)), @@ -2104,10 +2052,10 @@ export function mapBaseRollupInputsToNoir(inputs: BaseRollupInputs): BaseRollupI low_public_data_writes_witnesses: mapTuple( inputs.lowPublicDataWritesMembershipWitnesses, - mapPublicDataMembershipWitnessToNoir, + (witness: MembershipWitness) => mapMembershipWitnessToNoir(witness), ), - archive_root_membership_witness: mapArchiveRootMembershipWitnessToNoir(inputs.archiveRootMembershipWitness), + archive_root_membership_witness: mapMembershipWitnessToNoir(inputs.archiveRootMembershipWitness), constants: mapConstantRollupDataToNoir(inputs.constants), }; } diff --git a/yarn-project/prover-client/src/bb/execute.ts b/yarn-project/prover-client/src/bb/execute.ts index f53950dd0f5c..58a7cb968a61 100644 --- a/yarn-project/prover-client/src/bb/execute.ts +++ b/yarn-project/prover-client/src/bb/execute.ts @@ -50,16 +50,16 @@ export function executeBB( ) { return new Promise((resolve, reject) => { // spawn the bb process - const acvm = proc.spawn(pathToBB, [command, ...args]); - acvm.stdout.on('data', data => { + const bb = proc.spawn(pathToBB, [command, ...args]); + bb.stdout.on('data', data => { const message = data.toString('utf-8').replace(/\n$/, ''); logger(message); }); - acvm.stderr.on('data', data => { + bb.stderr.on('data', data => { const message = data.toString('utf-8').replace(/\n$/, ''); logger(message); }); - acvm.on('close', (code: number) => { + bb.on('close', (code: number) => { if (resultParser(code)) { resolve(BB_RESULT.SUCCESS); } else { diff --git a/yarn-project/prover-client/src/config.ts b/yarn-project/prover-client/src/config.ts index 0b8f7cce6ea1..8549e60b2aaf 100644 --- a/yarn-project/prover-client/src/config.ts +++ b/yarn-project/prover-client/src/config.ts @@ -1,9 +1,11 @@ +import { type ProverConfig } from '@aztec/circuit-types'; + import { tmpdir } from 'os'; /** * The prover configuration. */ -export interface ProverConfig { +export type ProverClientConfig = ProverConfig & { /** The working directory to use for simulation/proving */ acvmWorkingDirectory: string; /** The path to the ACVM binary */ @@ -12,29 +14,34 @@ export interface ProverConfig { bbWorkingDirectory: string; /** The path to the bb binary */ bbBinaryPath: string; - /** How many agents to start */ - proverAgents: number; /** Enable proving. If true, must set bb env vars */ realProofs: boolean; -} + /** The interval agents poll for jobs at */ + proverAgentPollInterval: number; +}; /** * Returns the prover configuration from the environment variables. * Note: If an environment variable is not set, the default value is used. * @returns The prover configuration. */ -export function getProverEnvVars(): ProverConfig { +export function getProverEnvVars(): ProverClientConfig { const { ACVM_WORKING_DIRECTORY = tmpdir(), ACVM_BINARY_PATH = '', BB_WORKING_DIRECTORY = tmpdir(), BB_BINARY_PATH = '', PROVER_AGENTS = '1', + PROVER_AGENT_POLL_INTERVAL_MS = '50', PROVER_REAL_PROOFS = '', } = process.env; const parsedProverAgents = parseInt(PROVER_AGENTS, 10); const proverAgents = Number.isSafeInteger(parsedProverAgents) ? parsedProverAgents : 0; + const parsedProverAgentPollInterval = parseInt(PROVER_AGENT_POLL_INTERVAL_MS, 10); + const proverAgentPollInterval = Number.isSafeInteger(parsedProverAgentPollInterval) + ? parsedProverAgentPollInterval + : 50; return { acvmWorkingDirectory: ACVM_WORKING_DIRECTORY, @@ -43,5 +50,6 @@ export function getProverEnvVars(): ProverConfig { bbWorkingDirectory: BB_WORKING_DIRECTORY, proverAgents, realProofs: ['1', 'true'].includes(PROVER_REAL_PROOFS), + proverAgentPollInterval, }; } diff --git a/yarn-project/prover-client/src/dummy-prover.ts b/yarn-project/prover-client/src/dummy-prover.ts index e8c76d009e56..4a912e09119a 100644 --- a/yarn-project/prover-client/src/dummy-prover.ts +++ b/yarn-project/prover-client/src/dummy-prover.ts @@ -4,6 +4,7 @@ import { PROVING_STATUS, type ProcessedTx, type ProverClient, + type ProverConfig, type ProvingJob, type ProvingJobSource, type ProvingRequest, @@ -63,6 +64,10 @@ export class DummyProver implements ProverClient { getProvingJobSource(): ProvingJobSource { return this.jobs; } + + updateProverConfig(_config: Partial): Promise { + return Promise.resolve(); + } } class DummyProvingJobSource implements ProvingJobSource { diff --git a/yarn-project/prover-client/src/index.ts b/yarn-project/prover-client/src/index.ts index c47f1852f991..4331fcaff0ea 100644 --- a/yarn-project/prover-client/src/index.ts +++ b/yarn-project/prover-client/src/index.ts @@ -1,3 +1,5 @@ +export { ProverClient } from '@aztec/circuit-types'; + export * from './tx-prover/tx-prover.js'; export * from './config.js'; export * from './dummy-prover.js'; diff --git a/yarn-project/prover-client/src/mocks/test_context.ts b/yarn-project/prover-client/src/mocks/test_context.ts index 75abb24ada64..7dcdcf6ea939 100644 --- a/yarn-project/prover-client/src/mocks/test_context.ts +++ b/yarn-project/prover-client/src/mocks/test_context.ts @@ -1,5 +1,5 @@ import { type BlockProver, type ProcessedTx, type Tx, type TxValidator } from '@aztec/circuit-types'; -import { type Gas, GlobalVariables, Header, type TxContext } from '@aztec/circuits.js'; +import { type Gas, GlobalVariables, Header, type Nullifier, type TxContext } from '@aztec/circuits.js'; import { type Fr } from '@aztec/foundation/fields'; import { type DebugLogger } from '@aztec/foundation/log'; import { openTmpStore } from '@aztec/kv-store/utils'; @@ -129,6 +129,7 @@ export class TestContext { _globalVariables: GlobalVariables, availableGas: Gas, _txContext: TxContext, + _pendingNullifiers: Nullifier[], transactionFee?: Fr, _sideEffectCounter?: number, ) => { @@ -166,6 +167,7 @@ export class TestContext { globalVariables: GlobalVariables, availableGas: Gas, txContext: TxContext, + pendingNullifiers: Nullifier[], transactionFee?: Fr, sideEffectCounter?: number, ) => Promise, diff --git a/yarn-project/prover-client/src/prover-pool/memory-proving-queue.ts b/yarn-project/prover-client/src/prover-pool/memory-proving-queue.ts index 1f93a17f3e83..9980a093e7ef 100644 --- a/yarn-project/prover-client/src/prover-pool/memory-proving-queue.ts +++ b/yarn-project/prover-client/src/prover-pool/memory-proving-queue.ts @@ -22,6 +22,7 @@ import type { RootRollupInputs, RootRollupPublicInputs, } from '@aztec/circuits.js'; +import { randomBytes } from '@aztec/foundation/crypto'; import { AbortedError, TimeoutError } from '@aztec/foundation/error'; import { MemoryFifo } from '@aztec/foundation/fifo'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -38,12 +39,15 @@ type ProvingJobWithResolvers = { const MAX_RETRIES = 3; +const defaultIdGenerator = () => randomBytes(4).toString('hex'); + export class MemoryProvingQueue implements CircuitProver, ProvingJobSource { - private jobId = 0; private log = createDebugLogger('aztec:prover-client:prover-pool:queue'); private queue = new MemoryFifo(); private jobsInProgress = new Map(); + constructor(private generateId = defaultIdGenerator) {} + async getProvingJob({ timeoutSec = 1 } = {}): Promise | undefined> { try { const job = await this.queue.get(timeoutSec); @@ -119,7 +123,7 @@ export class MemoryProvingQueue implements CircuitProver, ProvingJobSource { ): Promise> { const { promise, resolve, reject } = promiseWithResolvers>(); const item: ProvingJobWithResolvers = { - id: String(this.jobId++), + id: this.generateId(), request, signal, promise, diff --git a/yarn-project/prover-client/src/prover-pool/prover-agent.ts b/yarn-project/prover-client/src/prover-pool/prover-agent.ts index 401795969375..40ff2f22fcfe 100644 --- a/yarn-project/prover-client/src/prover-pool/prover-agent.ts +++ b/yarn-project/prover-client/src/prover-pool/prover-agent.ts @@ -52,15 +52,18 @@ export class ProverAgent { }, this.intervalMs); this.runningPromise.start(); + this.log.info('Agent started'); } async stop(): Promise { - if (!this.runningPromise) { - throw new Error('Agent is not running'); + if (!this.runningPromise?.isRunning()) { + return; } await this.runningPromise.stop(); this.runningPromise = undefined; + + this.log.info('Agent stopped'); } private work(request: ProvingRequest): Promise> { diff --git a/yarn-project/prover-client/src/prover-pool/prover-pool.ts b/yarn-project/prover-client/src/prover-pool/prover-pool.ts index 4916e3e98a49..8dc7a320ff48 100644 --- a/yarn-project/prover-client/src/prover-pool/prover-pool.ts +++ b/yarn-project/prover-client/src/prover-pool/prover-pool.ts @@ -3,7 +3,6 @@ import { sleep } from '@aztec/foundation/sleep'; import { type SimulationProvider } from '@aztec/simulator'; import { mkdtemp } from 'fs/promises'; -import { tmpdir } from 'os'; import { join } from 'path'; import { BBNativeRollupProver, type BBProverConfig } from '../prover/bb_prover.js'; @@ -41,7 +40,7 @@ export class ProverPool { async stop(): Promise { if (!this.running) { - throw new Error('Prover pool is not running'); + return; } for (const agent of this.agents) { @@ -51,6 +50,20 @@ export class ProverPool { this.running = false; } + async rescale(newSize: number): Promise { + if (newSize > this.size) { + this.size = newSize; + for (let i = this.agents.length; i < newSize; i++) { + this.agents.push(await this.agentFactory(i)); + } + } else if (newSize < this.size) { + this.size = newSize; + while (this.agents.length > newSize) { + await this.agents.pop()?.stop(); + } + } + } + static testPool(simulationProvider?: SimulationProvider, size = 1, agentPollIntervalMS = 10): ProverPool { return new ProverPool( size, @@ -58,22 +71,18 @@ export class ProverPool { ); } - static nativePool( - { acvmBinaryPath, bbBinaryPath }: Pick, - size: number, - agentPollIntervalMS = 10, - ): ProverPool { + static nativePool(config: Omit, size: number, agentPollIntervalMS = 10): ProverPool { // TODO generate keys ahead of time so that each agent doesn't have to do it return new ProverPool(size, async i => { const [acvmWorkingDirectory, bbWorkingDirectory] = await Promise.all([ - mkdtemp(join(tmpdir(), 'acvm-')), - mkdtemp(join(tmpdir(), 'bb-')), + mkdtemp(join(config.acvmWorkingDirectory, 'agent-')), + mkdtemp(join(config.bbWorkingDirectory, 'agent-')), ]); return new ProverAgent( await BBNativeRollupProver.new({ - acvmBinaryPath, + acvmBinaryPath: config.acvmBinaryPath, acvmWorkingDirectory, - bbBinaryPath, + bbBinaryPath: config.bbBinaryPath, bbWorkingDirectory, }), agentPollIntervalMS, diff --git a/yarn-project/prover-client/src/tx-prover/tx-prover.ts b/yarn-project/prover-client/src/tx-prover/tx-prover.ts index 88f987e81240..a34716a682da 100644 --- a/yarn-project/prover-client/src/tx-prover/tx-prover.ts +++ b/yarn-project/prover-client/src/tx-prover/tx-prover.ts @@ -2,6 +2,7 @@ import { type ProcessedTx } from '@aztec/circuit-types'; import { type BlockResult, type ProverClient, + type ProverConfig, type ProvingJobSource, type ProvingTicket, } from '@aztec/circuit-types/interfaces'; @@ -9,7 +10,7 @@ import { type Fr, type GlobalVariables } from '@aztec/circuits.js'; import { type SimulationProvider } from '@aztec/simulator'; import { type WorldStateSynchronizer } from '@aztec/world-state'; -import { type ProverConfig } from '../config.js'; +import { type ProverClientConfig } from '../config.js'; import { type VerificationKeys, getVerificationKeys } from '../mocks/verification_keys.js'; import { ProvingOrchestrator } from '../orchestrator/orchestrator.js'; import { MemoryProvingQueue } from '../prover-pool/memory-proving-queue.js'; @@ -30,6 +31,12 @@ export class TxProver implements ProverClient { this.orchestrator = new ProvingOrchestrator(worldStateSynchronizer.getLatest(), this.queue); } + async updateProverConfig(config: Partial): Promise { + if (typeof config.proverAgents === 'number') { + await this.proverPool?.rescale(config.proverAgents); + } + } + /** * Starts the prover instance */ @@ -51,7 +58,7 @@ export class TxProver implements ProverClient { * @returns An instance of the prover, constructed and started. */ public static async new( - config: ProverConfig, + config: ProverClientConfig, simulationProvider: SimulationProvider, worldStateSynchronizer: WorldStateSynchronizer, ) { @@ -68,9 +75,9 @@ export class TxProver implements ProverClient { throw new Error(); } - pool = ProverPool.nativePool(config, config.proverAgents, 50); + pool = ProverPool.nativePool(config, config.proverAgents, config.proverAgentPollInterval); } else { - pool = ProverPool.testPool(simulationProvider, config.proverAgents, 50); + pool = ProverPool.testPool(simulationProvider, config.proverAgents, config.proverAgentPollInterval); } const prover = new TxProver(worldStateSynchronizer, getVerificationKeys(), pool); diff --git a/yarn-project/pxe/src/kernel_prover/bb_prover/bb_native_proof_creator.ts b/yarn-project/pxe/src/kernel_prover/bb_prover/bb_native_proof_creator.ts index 7f4fba37064c..15eed0996a9c 100644 --- a/yarn-project/pxe/src/kernel_prover/bb_prover/bb_native_proof_creator.ts +++ b/yarn-project/pxe/src/kernel_prover/bb_prover/bb_native_proof_creator.ts @@ -8,7 +8,6 @@ import { type PrivateKernelTailCircuitPublicInputs, Proof, type VERIFICATION_KEY_LENGTH_IN_FIELDS, - makeEmptyProof, } from '@aztec/circuits.js'; import { siloNoteHash } from '@aztec/circuits.js/hash'; import { randomBytes, sha256 } from '@aztec/foundation/crypto'; @@ -23,9 +22,9 @@ import { convertPrivateKernelInnerInputsToWitnessMap, convertPrivateKernelInnerOutputsFromWitnessMap, convertPrivateKernelTailForPublicOutputsFromWitnessMap, + convertPrivateKernelTailInputsToWitnessMap, convertPrivateKernelTailOutputsFromWitnessMap, - executeTail, - executeTailForPublic, + convertPrivateKernelTailToPublicInputsToWitnessMap, } from '@aztec/noir-protocol-circuits-types'; import { type ACVMField, WASMSimulator } from '@aztec/simulator'; import { type NoirCompiledCircuit } from '@aztec/types/noir'; @@ -294,7 +293,7 @@ export async function generateKeyForNoirCircuit( await fs.writeFile(bytecodePath, bytecode); // args are the output path and the input bytecode path - const args = ['-o', outputPath, '-b', bytecodePath]; + const args = ['-o', `${outputPath}/${VK_FILENAME}`, '-b', bytecodePath]; const timer = new Timer(); let result = await executeBB(pathToBB, `write_${key}`, args, log); // If we succeeded and the type of key if verification, have bb write the 'fields' version too @@ -468,25 +467,12 @@ export class BBNativeProofCreator implements ProofCreator { public async createProofTail( inputs: PrivateKernelTailCircuitPrivateInputs, ): Promise> { - // if (!inputs.isForPublic()) { - // const witnessMap = convertPrivateKernelTailInputsToWitnessMap(inputs); - // return await this.createSafeProof(witnessMap, 'PrivateKernelTailArtifact'); - // } - if (!inputs.isForPublic()) { - const result = await executeTail(inputs); - return { - publicInputs: result, - proof: makeEmptyProof(), - }; + const witnessMap = convertPrivateKernelTailInputsToWitnessMap(inputs); + return await this.createSafeProof(witnessMap, 'PrivateKernelTailArtifact'); } - // const witnessMap = convertPrivateKernelTailToPublicInputsToWitnessMap(inputs); - // return await this.createSafeProof(witnessMap, 'PrivateKernelTailToPublicArtifact'); - const result = await executeTailForPublic(inputs); - return { - publicInputs: result, - proof: makeEmptyProof(), - }; + const witnessMap = convertPrivateKernelTailToPublicInputsToWitnessMap(inputs); + return await this.createSafeProof(witnessMap, 'PrivateKernelTailToPublicArtifact'); } public async createAppCircuitProof(partialWitness: Map, bytecode: Buffer): Promise { diff --git a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts b/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts index 6c9bb7f20657..58f29d4ceda2 100644 --- a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts +++ b/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts @@ -6,11 +6,11 @@ import { MAX_NEW_NOTE_HASHES_PER_TX, MembershipWitness, NoteHash, - NoteHashContext, PrivateCallStackItem, PrivateCircuitPublicInputs, PrivateKernelCircuitPublicInputs, PrivateKernelTailCircuitPublicInputs, + ScopedNoteHash, type TxRequest, VK_TREE_HEIGHT, VerificationKey, @@ -35,6 +35,8 @@ describe('Kernel Prover', () => { let prover: KernelProver; let dependencies: { [name: string]: string[] } = {}; + const contractAddress = AztecAddress.fromBigInt(987654n); + const notesAndSlots: NoteAndSlot[] = Array(10) .fill(null) .map(() => ({ @@ -78,9 +80,12 @@ describe('Kernel Prover', () => { const createProofOutput = (newNoteIndices: number[]) => { const publicInputs = PrivateKernelCircuitPublicInputs.empty(); - const noteHashes = makeTuple(MAX_NEW_NOTE_HASHES_PER_TX, NoteHashContext.empty); + const noteHashes = makeTuple(MAX_NEW_NOTE_HASHES_PER_TX, ScopedNoteHash.empty); for (let i = 0; i < newNoteIndices.length; i++) { - noteHashes[i] = new NoteHashContext(generateFakeSiloedCommitment(notesAndSlots[newNoteIndices[i]]), 0, 0); + noteHashes[i] = new NoteHash(generateFakeSiloedCommitment(notesAndSlots[newNoteIndices[i]]), 0).scope( + 0, + contractAddress, + ); } publicInputs.end.newNoteHashes = noteHashes; diff --git a/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_hints.ts b/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_hints.ts index ffa4eef1b9d8..efef31f153a5 100644 --- a/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_hints.ts +++ b/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_hints.ts @@ -9,11 +9,11 @@ import { type MAX_UNENCRYPTED_LOGS_PER_TX, MembershipWitness, NULLIFIER_TREE_HEIGHT, - type Nullifier, - type NullifierKeyValidationRequestContext, type PrivateKernelCircuitPublicInputs, PrivateKernelTailHints, - type ReadRequestContext, + type ScopedNullifier, + type ScopedNullifierKeyValidationRequest, + type ScopedReadRequest, type SideEffect, type SideEffectType, buildNoteHashReadRequestHints, @@ -49,8 +49,8 @@ function sortSideEffects( } function getNullifierReadRequestHints( - nullifierReadRequests: Tuple, - nullifiers: Tuple, + nullifierReadRequests: Tuple, + nullifiers: Tuple, oracle: ProvingDataOracle, ) { const getNullifierMembershipWitness = async (nullifier: Fr) => { @@ -75,14 +75,14 @@ function getNullifierReadRequestHints( async function getMasterNullifierSecretKeys( nullifierKeyValidationRequests: Tuple< - NullifierKeyValidationRequestContext, + ScopedNullifierKeyValidationRequest, typeof MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX >, oracle: ProvingDataOracle, ) { const keys = makeTuple(MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, GrumpkinScalar.zero); for (let i = 0; i < nullifierKeyValidationRequests.length; ++i) { - const request = nullifierKeyValidationRequests[i]; + const request = nullifierKeyValidationRequests[i].request; if (request.isEmpty()) { break; } diff --git a/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_outputs.ts b/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_outputs.ts index ab2594b7b625..91d8cd8be0a3 100644 --- a/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_outputs.ts +++ b/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_outputs.ts @@ -1,28 +1,28 @@ import { MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, - NoteHashContext, - Nullifier, PrivateKernelTailOutputs, + ScopedNoteHash, + ScopedNullifier, } from '@aztec/circuits.js'; import { padArrayEnd } from '@aztec/foundation/collection'; import { type Tuple } from '@aztec/foundation/serialize'; export function buildPrivateKernelTailOutputs( - prevNoteHashes: Tuple, - prevNullifiers: Tuple, + prevNoteHashes: Tuple, + prevNullifiers: Tuple, ) { // Propagate note hashes that are not linked to a nullifier. // Note that note hashes can't link to the first nullifier (counter == 0). const noteHashes = padArrayEnd( prevNoteHashes.filter(n => !n.nullifierCounter), - NoteHashContext.empty(), + ScopedNoteHash.empty(), MAX_NEW_NOTE_HASHES_PER_TX, ); const nullifiers = padArrayEnd( - prevNullifiers.filter(n => n.noteHash.isZero()), - Nullifier.empty(), + prevNullifiers.filter(n => n.nullifiedNoteHash.isZero()), + ScopedNullifier.empty(), MAX_NEW_NULLIFIERS_PER_TX, ); diff --git a/yarn-project/pxe/src/simulator_oracle/index.ts b/yarn-project/pxe/src/simulator_oracle/index.ts index 0a81cc5b1d6e..12e540148b76 100644 --- a/yarn-project/pxe/src/simulator_oracle/index.ts +++ b/yarn-project/pxe/src/simulator_oracle/index.ts @@ -18,7 +18,7 @@ import { type Point, } from '@aztec/circuits.js'; import { computeL1ToL2MessageNullifier } from '@aztec/circuits.js/hash'; -import { type FunctionArtifactWithDebugMetadata, getFunctionArtifactWithDebugMetadata } from '@aztec/foundation/abi'; +import { type FunctionArtifact, getFunctionArtifact } from '@aztec/foundation/abi'; import { createDebugLogger } from '@aztec/foundation/log'; import { type DBOracle, MessageLoadOracleInputs, type NullifierKeys } from '@aztec/simulator'; import { type ContractInstance } from '@aztec/types/contracts'; @@ -107,10 +107,7 @@ export class SimulatorOracle implements DBOracle { })); } - async getFunctionArtifact( - contractAddress: AztecAddress, - selector: FunctionSelector, - ): Promise { + async getFunctionArtifact(contractAddress: AztecAddress, selector: FunctionSelector): Promise { const artifact = await this.contractDataOracle.getFunctionArtifact(contractAddress, selector); const debug = await this.contractDataOracle.getFunctionDebugMetadata(contractAddress, selector); return { @@ -122,10 +119,10 @@ export class SimulatorOracle implements DBOracle { async getFunctionArtifactByName( contractAddress: AztecAddress, functionName: string, - ): Promise { + ): Promise { const instance = await this.contractDataOracle.getContractInstance(contractAddress); const artifact = await this.contractDataOracle.getContractArtifact(instance.contractClassId); - return artifact && getFunctionArtifactWithDebugMetadata(artifact, functionName); + return artifact && getFunctionArtifact(artifact, functionName); } /** diff --git a/yarn-project/simulator/src/acvm/acvm.ts b/yarn-project/simulator/src/acvm/acvm.ts index d166b5d16c33..9f9edb98bd78 100644 --- a/yarn-project/simulator/src/acvm/acvm.ts +++ b/yarn-project/simulator/src/acvm/acvm.ts @@ -19,7 +19,7 @@ import { type ORACLE_NAMES } from './oracle/index.js'; */ type ACIRCallback = Record< ORACLE_NAMES, - (...args: ForeignCallInput[]) => ForeignCallOutput | Promise + (...args: ForeignCallInput[]) => void | ForeignCallOutput | Promise >; /** @@ -105,7 +105,7 @@ export async function acvm( } const result = await oracleFunction.call(callback, ...args); - return [result]; + return typeof result === 'undefined' ? [] : [result]; } catch (err) { let typedError: Error; if (err instanceof Error) { diff --git a/yarn-project/simulator/src/acvm/oracle/oracle.ts b/yarn-project/simulator/src/acvm/oracle/oracle.ts index b599e32aeadd..41c027ba4112 100644 --- a/yarn-project/simulator/src/acvm/oracle/oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/oracle.ts @@ -373,14 +373,12 @@ export class Oracle { return toACVMField(logHash); } - debugLog(...args: ACVMField[][]): ACVMField { + debugLog(...args: ACVMField[][]): void { this.log.verbose(oracleDebugCallToFormattedStr(args)); - return toACVMField(0); } - debugLogWithPrefix(arg0: ACVMField[], ...args: ACVMField[][]): ACVMField { + debugLogWithPrefix(arg0: ACVMField[], ...args: ACVMField[][]): void { this.log.verbose(`${acvmFieldMessageToString(arg0)}: ${oracleDebugCallToFormattedStr(args)}`); - return toACVMField(0); } async callPrivateFunction( diff --git a/yarn-project/simulator/src/avm/avm_machine_state.ts b/yarn-project/simulator/src/avm/avm_machine_state.ts index eeb490b291b8..ca4b5e72056b 100644 --- a/yarn-project/simulator/src/avm/avm_machine_state.ts +++ b/yarn-project/simulator/src/avm/avm_machine_state.ts @@ -138,12 +138,12 @@ export class AvmMachineState { let revertReason = undefined; if (this.reverted && this.output.length > 0) { try { + // We remove the first element which is the 'error selector'. + const revertOutput = this.output.slice(1); // Try to interpret the output as a text string. - revertReason = new Error( - 'Reverted with output: ' + String.fromCharCode(...this.output.slice(1).map(fr => fr.toNumber())), - ); + revertReason = new Error('Assertion failed: ' + String.fromCharCode(...revertOutput.map(fr => fr.toNumber()))); } catch (e) { - revertReason = new Error('Reverted with non-string output'); + revertReason = new Error(''); } } return new AvmContractCallResults(this.reverted, this.output, revertReason); diff --git a/yarn-project/simulator/src/avm/avm_simulator.test.ts b/yarn-project/simulator/src/avm/avm_simulator.test.ts index a625e63697fc..9e21712d0170 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.test.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.test.ts @@ -111,7 +111,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { const results = await new AvmSimulator(context).executeBytecode(bytecode); expect(results.reverted).toBe(true); - expect(results.revertReason?.message).toEqual("Reverted with output: Nullifier doesn't exist!"); + expect(results.revertReason?.message).toEqual("Assertion failed: Nullifier doesn't exist!"); expect(results.output).toEqual([ new Fr(0), ...[..."Nullifier doesn't exist!"].flatMap(c => new Fr(c.charCodeAt(0))), @@ -842,7 +842,23 @@ describe('AVM simulator: transpiled Noir contracts', () => { const results = await new AvmSimulator(context).executeBytecode(callBytecode); expect(results.reverted).toBe(true); // The outer call should revert. - expect(results.revertReason?.message).toMatch(/Nested static call failed/); + expect(results.revertReason?.message).toEqual('Static calls cannot alter storage'); + }); + + it(`Nested calls rethrow exceptions`, async () => { + const calldata: Fr[] = [new Fr(1), new Fr(2)]; + const callBytecode = getAvmNestedCallsTestContractBytecode('nested_call_to_add'); + // We actually don't pass the function ADD, but it's ok because the signature is the same. + const nestedBytecode = getAvmNestedCallsTestContractBytecode('assert_same'); + const context = initContext({ env: initExecutionEnvironment({ calldata }) }); + jest + .spyOn(context.persistableState.hostStorage.contractsDb, 'getBytecode') + .mockReturnValue(Promise.resolve(nestedBytecode)); + + const results = await new AvmSimulator(context).executeBytecode(callBytecode); + + expect(results.reverted).toBe(true); // The outer call should revert. + expect(results.revertReason?.message).toEqual('Assertion failed: Values are not equal'); }); }); }); diff --git a/yarn-project/simulator/src/avm/journal/journal.test.ts b/yarn-project/simulator/src/avm/journal/journal.test.ts index 8a42f1e6796e..ea08c9a63871 100644 --- a/yarn-project/simulator/src/avm/journal/journal.test.ts +++ b/yarn-project/simulator/src/avm/journal/journal.test.ts @@ -153,7 +153,7 @@ describe('journal', () => { journal.writeL1Message(recipient, msgHash); const journalUpdates = journal.flush(); - expect(journalUpdates.newL1Messages).toEqual([{ recipient, content: msgHash }]); + expect(journalUpdates.newL1Messages).toEqual([expect.objectContaining({ recipient, content: msgHash })]); }); }); @@ -260,8 +260,8 @@ describe('journal', () => { ), ]); expect(journalUpdates.newL1Messages).toEqual([ - { recipient, content: commitment }, - { recipient, content: commitmentT1 }, + expect.objectContaining({ recipient, content: commitment }), + expect.objectContaining({ recipient, content: commitmentT1 }), ]); expect(journalUpdates.nullifierChecks).toEqual([ expect.objectContaining({ nullifier: commitment, exists: true }), @@ -403,7 +403,7 @@ describe('journal', () => { Buffer.concat(log.data.map(f => f.toBuffer())), ), ]); - expect(journalUpdates.newL1Messages).toEqual([{ recipient, content: commitment }]); + expect(journalUpdates.newL1Messages).toEqual([expect.objectContaining({ recipient, content: commitment })]); }); it('Can fork and merge journals', () => { diff --git a/yarn-project/simulator/src/avm/journal/journal.ts b/yarn-project/simulator/src/avm/journal/journal.ts index 175ed4db2d19..7bea5f1c42a3 100644 --- a/yarn-project/simulator/src/avm/journal/journal.ts +++ b/yarn-project/simulator/src/avm/journal/journal.ts @@ -50,6 +50,8 @@ export type JournalData = { newLogsHashes: TracedUnencryptedL2Log[]; /** contract address -\> key -\> value */ currentStorageValue: Map>; + + sideEffectCounter: number; }; // TRANSITIONAL: This should be removed once the kernel handles and entire enqueued call per circuit @@ -143,6 +145,15 @@ export class AvmPersistableStateManager { this.publicStorage.write(storageAddress, slot, value); // TRANSITIONAL: This should be removed once the kernel handles and entire enqueued call per circuit + // The current info to the kernel clears any previous read or write request. + this.transitionalExecutionResult.contractStorageReads = + this.transitionalExecutionResult.contractStorageReads.filter( + read => !read.storageSlot.equals(slot) || !read.contractAddress!.equals(storageAddress), + ); + this.transitionalExecutionResult.contractStorageUpdateRequests = + this.transitionalExecutionResult.contractStorageUpdateRequests.filter( + update => !update.storageSlot.equals(slot) || !update.contractAddress!.equals(storageAddress), + ); this.transitionalExecutionResult.contractStorageUpdateRequests.push( new ContractStorageUpdateRequest(slot, value, this.trace.accessCounter, storageAddress), ); @@ -159,16 +170,24 @@ export class AvmPersistableStateManager { * @returns the latest value written to slot, or 0 if never written to before */ public async readStorage(storageAddress: Fr, slot: Fr): Promise { - const [exists, value] = await this.publicStorage.read(storageAddress, slot); - this.log.debug(`storage(${storageAddress})@${slot} ?? value: ${value}, exists: ${exists}.`); + const { value, exists, cached } = await this.publicStorage.read(storageAddress, slot); + this.log.debug(`storage(${storageAddress})@${slot} ?? value: ${value}, exists: ${exists}, cached: ${cached}.`); // TRANSITIONAL: This should be removed once the kernel handles and entire enqueued call per circuit - this.transitionalExecutionResult.contractStorageReads.push( - new ContractStorageRead(slot, value, this.trace.accessCounter, storageAddress), - ); + // The current info to the kernel kernel does not consider cached reads. + if (!cached) { + // The current info to the kernel removes any previous reads to the same slot. + this.transitionalExecutionResult.contractStorageReads = + this.transitionalExecutionResult.contractStorageReads.filter( + read => !read.storageSlot.equals(slot) || !read.contractAddress!.equals(storageAddress), + ); + this.transitionalExecutionResult.contractStorageReads.push( + new ContractStorageRead(slot, value, this.trace.accessCounter, storageAddress), + ); + } // We want to keep track of all performed reads (even reverted ones) - this.trace.tracePublicStorageRead(storageAddress, slot, value, exists); + this.trace.tracePublicStorageRead(storageAddress, slot, value, exists, cached); return Promise.resolve(value); } @@ -266,7 +285,7 @@ export class AvmPersistableStateManager { public writeL1Message(recipient: EthAddress | Fr, content: Fr) { this.log.debug(`L1Messages(${recipient}) += ${content}.`); const recipientAddress = recipient instanceof EthAddress ? recipient : EthAddress.fromField(recipient); - const message = new L2ToL1Message(recipientAddress, content); + const message = new L2ToL1Message(recipientAddress, content, 0); this.newL1Messages.push(message); // TRANSITIONAL: This should be removed once the kernel handles and entire enqueued call per circuit @@ -348,6 +367,7 @@ export class AvmPersistableStateManager { currentStorageValue: this.publicStorage.getCache().cachePerContract, storageReads: this.trace.publicStorageReads, storageWrites: this.trace.publicStorageWrites, + sideEffectCounter: this.trace.accessCounter, }; } } diff --git a/yarn-project/simulator/src/avm/journal/nullifiers.ts b/yarn-project/simulator/src/avm/journal/nullifiers.ts index f8374f6f8a76..99d12757e60e 100644 --- a/yarn-project/simulator/src/avm/journal/nullifiers.ts +++ b/yarn-project/simulator/src/avm/journal/nullifiers.ts @@ -1,3 +1,4 @@ +import { AztecAddress } from '@aztec/circuits.js'; import { siloNullifier } from '@aztec/circuits.js/hash'; import { Fr } from '@aztec/foundation/fields'; @@ -10,7 +11,7 @@ import type { CommitmentsDB } from '../../index.js'; */ export class Nullifiers { /** Cached nullifiers. */ - private cache: NullifierCache; + public cache: NullifierCache; /** Parent's nullifier cache. Checked on cache-miss. */ private readonly parentCache: NullifierCache | undefined; /** Reference to node storage. Checked on parent cache-miss. */ @@ -95,6 +96,7 @@ export class NullifierCache { * each entry being a nullifier. */ private cachePerContract: Map> = new Map(); + private siloedNullifiers: Set = new Set(); /** * Check whether a nullifier exists in the cache. @@ -104,8 +106,10 @@ export class NullifierCache { * @returns whether the nullifier is found in the cache */ public exists(storageAddress: Fr, nullifier: Fr): boolean { - const exists = this.cachePerContract.get(storageAddress.toBigInt())?.has(nullifier.toBigInt()); - return exists ? true : false; + const exists = + this.cachePerContract.get(storageAddress.toBigInt())?.has(nullifier.toBigInt()) || + this.siloedNullifiers.has(siloNullifier(AztecAddress.fromField(storageAddress), nullifier).toBigInt()); + return !!exists; } /** @@ -115,20 +119,25 @@ export class NullifierCache { * @param nullifier - the nullifier to stage */ public append(storageAddress: Fr, nullifier: Fr) { + if (this.exists(storageAddress, nullifier)) { + throw new NullifierCollisionError( + `Nullifier ${nullifier} at contract ${storageAddress} already exists in cache.`, + ); + } + let nullifiersForContract = this.cachePerContract.get(storageAddress.toBigInt()); // If this contract's nullifier set has no cached nullifiers, create a new Set to store them if (!nullifiersForContract) { nullifiersForContract = new Set(); this.cachePerContract.set(storageAddress.toBigInt(), nullifiersForContract); } - if (nullifiersForContract.has(nullifier.toBigInt())) { - throw new NullifierCollisionError( - `Nullifier ${nullifier} at contract ${storageAddress} already exists in cache.`, - ); - } nullifiersForContract.add(nullifier.toBigInt()); } + public appendSiloed(siloedNullifier: Fr) { + this.siloedNullifiers.add(siloedNullifier.toBigInt()); + } + /** * Merge another cache's nullifiers into this instance's. * @@ -139,6 +148,8 @@ export class NullifierCache { * @param incomingNullifiers - the incoming cached nullifiers to merge into this instance's */ public acceptAndMerge(incomingNullifiers: NullifierCache) { + // Merge siloed nullifiers. + this.siloedNullifiers = new Set([...this.siloedNullifiers, ...incomingNullifiers.siloedNullifiers]); // Iterate over all contracts with staged writes in the child. for (const [incomingAddress, incomingCacheAtContract] of incomingNullifiers.cachePerContract) { const thisCacheAtContract = this.cachePerContract.get(incomingAddress); diff --git a/yarn-project/simulator/src/avm/journal/public_storage.test.ts b/yarn-project/simulator/src/avm/journal/public_storage.test.ts index 33c747977d72..54633e40f96b 100644 --- a/yarn-project/simulator/src/avm/journal/public_storage.test.ts +++ b/yarn-project/simulator/src/avm/journal/public_storage.test.ts @@ -19,22 +19,26 @@ describe('avm public storage', () => { const contractAddress = new Fr(1); const slot = new Fr(2); // never written! - const [exists, gotValue] = await publicStorage.read(contractAddress, slot); + const { exists, value: gotValue, cached } = await publicStorage.read(contractAddress, slot); // doesn't exist, value is zero expect(exists).toEqual(false); expect(gotValue).toEqual(Fr.ZERO); + expect(cached).toEqual(false); }); + it('Should cache storage write, reading works after write', async () => { const contractAddress = new Fr(1); const slot = new Fr(2); const value = new Fr(3); // Write to cache publicStorage.write(contractAddress, slot, value); - const [exists, gotValue] = await publicStorage.read(contractAddress, slot); + const { exists, value: gotValue, cached } = await publicStorage.read(contractAddress, slot); // exists because it was previously written expect(exists).toEqual(true); expect(gotValue).toEqual(value); + expect(cached).toEqual(true); }); + it('Reading works on fallback to host (gets value & exists)', async () => { const contractAddress = new Fr(1); const slot = new Fr(2); @@ -42,11 +46,13 @@ describe('avm public storage', () => { // ensure that fallback to host gets a value publicDb.storageRead.mockResolvedValue(Promise.resolve(storedValue)); - const [exists, gotValue] = await publicStorage.read(contractAddress, slot); + const { exists, value: gotValue, cached } = await publicStorage.read(contractAddress, slot); // it exists in the host, so it must've been written before expect(exists).toEqual(true); expect(gotValue).toEqual(storedValue); + expect(cached).toEqual(false); }); + it('Reading works on fallback to parent (gets value & exists)', async () => { const contractAddress = new Fr(1); const slot = new Fr(2); @@ -54,11 +60,13 @@ describe('avm public storage', () => { const childStorage = new PublicStorage(publicDb, publicStorage); publicStorage.write(contractAddress, slot, value); - const [exists, gotValue] = await childStorage.read(contractAddress, slot); + const { exists, value: gotValue, cached } = await childStorage.read(contractAddress, slot); // exists because it was previously written! expect(exists).toEqual(true); expect(gotValue).toEqual(value); + expect(cached).toEqual(true); }); + it('When reading from storage, should check cache, then parent, then host', async () => { // Store a different value in storage vs the cache, and make sure the cache is returned const contractAddress = new Fr(1); @@ -71,21 +79,24 @@ describe('avm public storage', () => { const childStorage = new PublicStorage(publicDb, publicStorage); // Cache miss falls back to host - const [, cacheMissResult] = await childStorage.read(contractAddress, slot); - expect(cacheMissResult).toEqual(storedValue); + const { cached: cachedCacheMiss, value: valueCacheMiss } = await childStorage.read(contractAddress, slot); + expect(valueCacheMiss).toEqual(storedValue); + expect(cachedCacheMiss).toEqual(false); // Write to storage publicStorage.write(contractAddress, slot, parentValue); // Reading from child should give value written in parent - const [, valueFromParent] = await childStorage.read(contractAddress, slot); + const { cached: cachedValueFromParent, value: valueFromParent } = await childStorage.read(contractAddress, slot); expect(valueFromParent).toEqual(parentValue); + expect(cachedValueFromParent).toEqual(true); // Now write a value directly in child childStorage.write(contractAddress, slot, cachedValue); // Reading should now give the value written in child - const [, cachedResult] = await childStorage.read(contractAddress, slot); + const { cached: cachedChild, value: cachedResult } = await childStorage.read(contractAddress, slot); expect(cachedResult).toEqual(cachedValue); + expect(cachedChild).toEqual(true); }); }); @@ -109,7 +120,7 @@ describe('avm public storage', () => { publicStorage.acceptAndMerge(childStorage); // Read from parent gives latest value written in child before merge (valueT1) - const [exists, result] = await publicStorage.read(contractAddress, slot); + const { exists, value: result } = await publicStorage.read(contractAddress, slot); expect(exists).toEqual(true); expect(result).toEqual(valueT1); }); diff --git a/yarn-project/simulator/src/avm/journal/public_storage.ts b/yarn-project/simulator/src/avm/journal/public_storage.ts index f6b3b7a35d86..30c59e421b31 100644 --- a/yarn-project/simulator/src/avm/journal/public_storage.ts +++ b/yarn-project/simulator/src/avm/journal/public_storage.ts @@ -1,7 +1,14 @@ +import { AztecAddress } from '@aztec/circuits.js'; import { Fr } from '@aztec/foundation/fields'; import type { PublicStateDB } from '../../index.js'; +type PublicStorageReadResult = { + value: Fr; + exists: boolean; + cached: boolean; +}; + /** * A class to manage public storage reads and writes during a contract call's AVM simulation. * Maintains a storage write cache, and ensures that reads fall back to the correct source. @@ -39,7 +46,8 @@ export class PublicStorage { * @param slot - the slot in the contract's storage being read from * @returns exists: whether the slot has EVER been written to before, value: the latest value written to slot, or 0 if never written to before */ - public async read(storageAddress: Fr, slot: Fr): Promise<[/*exists=*/ boolean, /*value=*/ Fr]> { + public async read(storageAddress: Fr, slot: Fr): Promise { + let cached = false; // First try check this storage cache let value = this.cache.read(storageAddress, slot); // Then try parent's storage cache (if it exists / written to earlier in this TX) @@ -49,11 +57,13 @@ export class PublicStorage { // Finally try the host's Aztec state (a trip to the database) if (!value) { value = await this.hostPublicStorage.storageRead(storageAddress, slot); + } else { + cached = true; } // if value is undefined, that means this slot has never been written to! const exists = value !== undefined; const valueOrZero = exists ? value : Fr.ZERO; - return Promise.resolve([exists, valueOrZero]); + return Promise.resolve({ value: valueOrZero, exists, cached }); } /** @@ -75,6 +85,17 @@ export class PublicStorage { public acceptAndMerge(incomingPublicStorage: PublicStorage) { this.cache.acceptAndMerge(incomingPublicStorage.cache); } + + /** + * Commits ALL staged writes to the host's state. + */ + public async commitToDB() { + for (const [storageAddress, cacheAtContract] of this.cache.cachePerContract) { + for (const [slot, value] of cacheAtContract) { + await this.hostPublicStorage.storageWrite(AztecAddress.fromBigInt(storageAddress), new Fr(slot), value); + } + } + } } /** diff --git a/yarn-project/simulator/src/avm/journal/trace.test.ts b/yarn-project/simulator/src/avm/journal/trace.test.ts index e19c14dc7069..f8692e01c841 100644 --- a/yarn-project/simulator/src/avm/journal/trace.test.ts +++ b/yarn-project/simulator/src/avm/journal/trace.test.ts @@ -110,7 +110,7 @@ describe('world state access trace', () => { let counter = 0; trace.tracePublicStorageWrite(contractAddress, slot, value); counter++; - trace.tracePublicStorageRead(contractAddress, slot, value, /*exists=*/ true); + trace.tracePublicStorageRead(contractAddress, slot, value, /*exists=*/ true, /*cached=*/ true); counter++; trace.traceNoteHashCheck(contractAddress, noteHash, noteHashExists, noteHashLeafIndex); counter++; @@ -124,7 +124,7 @@ describe('world state access trace', () => { counter++; trace.tracePublicStorageWrite(contractAddress, slot, value); counter++; - trace.tracePublicStorageRead(contractAddress, slot, value, /*exists=*/ true); + trace.tracePublicStorageRead(contractAddress, slot, value, /*exists=*/ true, /*cached=*/ true); counter++; trace.traceNewNoteHash(contractAddress, noteHash); counter++; @@ -178,7 +178,7 @@ describe('world state access trace', () => { }; trace.tracePublicStorageWrite(contractAddress, slot, value); - trace.tracePublicStorageRead(contractAddress, slot, value, /*exists=*/ true); + trace.tracePublicStorageRead(contractAddress, slot, value, /*exists=*/ true, /*cached=*/ true); trace.traceNoteHashCheck(contractAddress, noteHash, noteHashExists, noteHashLeafIndex); trace.traceNewNoteHash(contractAddress, noteHash); trace.traceNullifierCheck(contractAddress, nullifier, nullifierExists, nullifierIsPending, nullifierLeafIndex); @@ -187,7 +187,7 @@ describe('world state access trace', () => { const childTrace = new WorldStateAccessTrace(trace); childTrace.tracePublicStorageWrite(contractAddress, slot, valueT1); - childTrace.tracePublicStorageRead(contractAddress, slot, valueT1, /*exists=*/ true); + childTrace.tracePublicStorageRead(contractAddress, slot, valueT1, /*exists=*/ true, /*cached=*/ true); childTrace.traceNoteHashCheck(contractAddress, noteHashT1, noteHashExistsT1, noteHashLeafIndexT1); childTrace.traceNewNoteHash(contractAddress, nullifierT1); childTrace.traceNullifierCheck( @@ -205,8 +205,20 @@ describe('world state access trace', () => { expect(trace.getAccessCounter()).toEqual(childCounterBeforeMerge); expect(trace.publicStorageReads).toEqual([ - expect.objectContaining({ storageAddress: contractAddress, slot: slot, value: value, exists: true }), - expect.objectContaining({ storageAddress: contractAddress, slot: slot, value: valueT1, exists: true }), + expect.objectContaining({ + storageAddress: contractAddress, + slot: slot, + value: value, + exists: true, + cached: true, + }), + expect.objectContaining({ + storageAddress: contractAddress, + slot: slot, + value: valueT1, + exists: true, + cached: true, + }), ]); expect(trace.publicStorageWrites).toEqual([ expect.objectContaining({ storageAddress: contractAddress, slot: slot, value: value }), diff --git a/yarn-project/simulator/src/avm/journal/trace.ts b/yarn-project/simulator/src/avm/journal/trace.ts index 5ca5be9dbc1c..da857f984fc3 100644 --- a/yarn-project/simulator/src/avm/journal/trace.ts +++ b/yarn-project/simulator/src/avm/journal/trace.ts @@ -36,7 +36,7 @@ export class WorldStateAccessTrace { return this.accessCounter; } - public tracePublicStorageRead(storageAddress: Fr, slot: Fr, value: Fr, exists: boolean) { + public tracePublicStorageRead(storageAddress: Fr, slot: Fr, value: Fr, exists: boolean, cached: boolean) { // TODO(4805): check if some threshold is reached for max storage reads // (need access to parent length, or trace needs to be initialized with parent's contents) const traced: TracedPublicStorageRead = { @@ -45,6 +45,7 @@ export class WorldStateAccessTrace { slot, value, exists, + cached, counter: new Fr(this.accessCounter), // endLifetime: Fr.ZERO, }; diff --git a/yarn-project/simulator/src/avm/journal/trace_types.ts b/yarn-project/simulator/src/avm/journal/trace_types.ts index 3c93649cefdb..4c292146a7bd 100644 --- a/yarn-project/simulator/src/avm/journal/trace_types.ts +++ b/yarn-project/simulator/src/avm/journal/trace_types.ts @@ -11,6 +11,7 @@ export type TracedPublicStorageRead = { // callPointer: Fr; storageAddress: Fr; exists: boolean; + cached: boolean; slot: Fr; value: Fr; counter: Fr; diff --git a/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts b/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts index 88c7cadd1384..376f64a8cd95 100644 --- a/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts @@ -465,7 +465,9 @@ describe('Accrued Substate', () => { ).execute(context); const journalState = context.persistableState.flush(); - expect(journalState.newL1Messages).toEqual([{ recipient: EthAddress.fromField(recipient), content }]); + expect(journalState.newL1Messages).toEqual([ + expect.objectContaining({ recipient: EthAddress.fromField(recipient), content }), + ]); }); }); diff --git a/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts b/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts index ae81b5afc480..d19f5b833e07 100644 --- a/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts @@ -205,31 +205,25 @@ describe('External Calls', () => { it('Should fail if a static call attempts to touch storage', async () => { const gasOffset = 0; - const gas = new Field(0); - const addrOffset = 1; + const gas = [new Field(0n), new Field(0n), new Field(0n)]; + const addrOffset = 10; const addr = new Field(123456n); - const argsOffset = 2; + const argsOffset = 20; const args = [new Field(1n), new Field(2n), new Field(3n)]; const argsSize = args.length; - const argsSizeOffset = 20; - const retOffset = 8; + const argsSizeOffset = 40; + const retOffset = 80; const retSize = 2; - const successOffset = 7; + const successOffset = 70; - context.machineState.memory.set(0, gas); - context.machineState.memory.set(1, addr); + context.machineState.memory.setSlice(gasOffset, gas); + context.machineState.memory.set(addrOffset, addr); context.machineState.memory.set(argsSizeOffset, new Uint32(argsSize)); - context.machineState.memory.setSlice(2, args); + context.machineState.memory.setSlice(argsOffset, args); const otherContextInstructions: Instruction[] = [ - new CalldataCopy( - /*indirect=*/ 0, - /*csOffset=*/ adjustCalldataIndex(0), - /*copySize=*/ argsSize, - /*dstOffset=*/ 0, - ), - new SStore(/*indirect=*/ 0, /*srcOffset=*/ 1, /*size=*/ 1, /*slotOffset=*/ 0), + new SStore(/*indirect=*/ 0, /*srcOffset=*/ 0, /*size=*/ 0, /*slotOffset=*/ 0), ]; const otherContextInstructionsBytecode = markBytecodeAsAvm(encodeToBytecode(otherContextInstructions)); @@ -249,11 +243,7 @@ describe('External Calls', () => { successOffset, /*temporaryFunctionSelectorOffset=*/ 0, ); - await instruction.execute(context); - - // No revert has occurred, but the nested execution has failed - const successValue = context.machineState.memory.get(successOffset); - expect(successValue).toEqual(new Uint8(0n)); + await expect(() => instruction.execute(context)).rejects.toThrow(/Static calls cannot alter storage/); }); }); @@ -315,7 +305,7 @@ describe('External Calls', () => { expect(context.machineState.halted).toBe(true); expect(context.machineState.getResults()).toEqual({ reverted: true, - revertReason: new Error('Reverted with output: assert message'), + revertReason: new Error('Assertion failed: assert message'), output: returnData.map(f => f.toFr()), }); }); diff --git a/yarn-project/simulator/src/avm/opcodes/external_calls.ts b/yarn-project/simulator/src/avm/opcodes/external_calls.ts index 2fa2f02ddfc5..1cf06ce5fcb2 100644 --- a/yarn-project/simulator/src/avm/opcodes/external_calls.ts +++ b/yarn-project/simulator/src/avm/opcodes/external_calls.ts @@ -7,6 +7,7 @@ import { gasLeftToGas, sumGas } from '../avm_gas.js'; import { Field, Uint8 } from '../avm_memory_types.js'; import { type AvmContractCallResults } from '../avm_message_call_result.js'; import { AvmSimulator } from '../avm_simulator.js'; +import { AvmExecutionError } from '../errors.js'; import { Opcode, OperandType } from '../serialization/instruction_serialization.js'; import { Addressing } from './addressing_mode.js'; import { Instruction } from './instruction.js'; @@ -99,6 +100,18 @@ abstract class ExternalCall extends Instruction { const success = !nestedCallResults.reverted; + // TRANSITIONAL: We rethrow here so that the MESSAGE gets propagated. + if (!success) { + class RethrownError extends AvmExecutionError { + constructor(message: string) { + super(message); + this.name = 'RethrownError'; + } + } + + throw new RethrownError(nestedCallResults.revertReason?.message || 'Unknown nested call error'); + } + // We only take as much data as was specified in the return size and pad with zeroes if the return data is smaller // than the specified size in order to prevent that memory to be left with garbage const returnData = nestedCallResults.output.slice(0, this.retSize); diff --git a/yarn-project/simulator/src/client/db_oracle.ts b/yarn-project/simulator/src/client/db_oracle.ts index eb545df6e118..a7e78619eb16 100644 --- a/yarn-project/simulator/src/client/db_oracle.ts +++ b/yarn-project/simulator/src/client/db_oracle.ts @@ -6,7 +6,7 @@ import { type PublicDataWitness, } from '@aztec/circuit-types'; import { type CompleteAddress, type Header } from '@aztec/circuits.js'; -import { type FunctionArtifactWithDebugMetadata, type FunctionSelector } from '@aztec/foundation/abi'; +import { type FunctionArtifact, type FunctionSelector } from '@aztec/foundation/abi'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; import { type Fr, type Point } from '@aztec/foundation/fields'; import { type ContractInstance } from '@aztec/types/contracts'; @@ -102,10 +102,7 @@ export interface DBOracle extends CommitmentsDB { * @param selector - The corresponding function selector. * @returns A Promise that resolves to a FunctionArtifact object. */ - getFunctionArtifact( - contractAddress: AztecAddress, - selector: FunctionSelector, - ): Promise; + getFunctionArtifact(contractAddress: AztecAddress, selector: FunctionSelector): Promise; /** * Retrieves the artifact of a specified function within a given contract. @@ -115,10 +112,7 @@ export interface DBOracle extends CommitmentsDB { * @param functionName - The name of the function. * @returns The corresponding function's artifact as an object. */ - getFunctionArtifactByName( - contractAddress: AztecAddress, - functionName: string, - ): Promise; + getFunctionArtifactByName(contractAddress: AztecAddress, functionName: string): Promise; /** * Gets the index of a nullifier in the nullifier tree. diff --git a/yarn-project/simulator/src/client/private_execution.test.ts b/yarn-project/simulator/src/client/private_execution.test.ts index f82a4c265c8a..3b93537e10c2 100644 --- a/yarn-project/simulator/src/client/private_execution.test.ts +++ b/yarn-project/simulator/src/client/private_execution.test.ts @@ -29,13 +29,7 @@ import { } from '@aztec/circuits.js'; import { computeCommitmentNonce, computeSecretHash, computeVarArgsHash } from '@aztec/circuits.js/hash'; import { makeHeader } from '@aztec/circuits.js/testing'; -import { - type FunctionArtifact, - FunctionSelector, - encodeArguments, - getFunctionArtifact, - getFunctionArtifactWithSelector, -} from '@aztec/foundation/abi'; +import { type FunctionArtifact, FunctionSelector, encodeArguments, getFunctionArtifact } from '@aztec/foundation/abi'; import { asyncMap } from '@aztec/foundation/async-map'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { times } from '@aztec/foundation/collection'; @@ -871,7 +865,7 @@ describe('Private Execution test suite', () => { beforeEach(() => { oracle.getFunctionArtifact.mockImplementation((_, selector) => - Promise.resolve(getFunctionArtifactWithSelector(PendingNoteHashesContractArtifact, selector)), + Promise.resolve(getFunctionArtifact(PendingNoteHashesContractArtifact, selector)), ); oracle.getFunctionArtifactByName.mockImplementation((_, functionName: string) => Promise.resolve(getFunctionArtifact(PendingNoteHashesContractArtifact, functionName)), diff --git a/yarn-project/simulator/src/client/private_execution.ts b/yarn-project/simulator/src/client/private_execution.ts index 737d58205483..b787aa24544a 100644 --- a/yarn-project/simulator/src/client/private_execution.ts +++ b/yarn-project/simulator/src/client/private_execution.ts @@ -1,5 +1,5 @@ import { type FunctionData, PrivateCallStackItem, PrivateCircuitPublicInputs } from '@aztec/circuits.js'; -import { type FunctionArtifactWithDebugMetadata } from '@aztec/foundation/abi'; +import { type FunctionArtifact } from '@aztec/foundation/abi'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -15,7 +15,7 @@ import { AcirSimulator } from './simulator.js'; */ export async function executePrivateFunction( context: ClientExecutionContext, - artifact: FunctionArtifactWithDebugMetadata, + artifact: FunctionArtifact, contractAddress: AztecAddress, functionData: FunctionData, log = createDebugLogger('aztec:simulator:secret_execution'), diff --git a/yarn-project/simulator/src/client/simulator.test.ts b/yarn-project/simulator/src/client/simulator.test.ts index ef9fd366291b..24211b5f35aa 100644 --- a/yarn-project/simulator/src/client/simulator.test.ts +++ b/yarn-project/simulator/src/client/simulator.test.ts @@ -6,11 +6,7 @@ import { computeUniqueNoteHash, siloNoteHash, } from '@aztec/circuits.js/hash'; -import { - ABIParameterVisibility, - type FunctionArtifactWithDebugMetadata, - getFunctionArtifact, -} from '@aztec/foundation/abi'; +import { ABIParameterVisibility, type FunctionArtifact, getFunctionArtifact } from '@aztec/foundation/abi'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { poseidon2Hash } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; @@ -100,7 +96,7 @@ describe('Simulator', () => { it('throw if "compute_note_hash_and_nullifier" has the wrong number of parameters', async () => { const note = createNote(); - const modifiedArtifact: FunctionArtifactWithDebugMetadata = { + const modifiedArtifact: FunctionArtifact = { ...artifact, parameters: artifact.parameters.slice(1), }; @@ -119,7 +115,7 @@ describe('Simulator', () => { const note = createNote(); const wrongPreimageLength = note.length - 1; - const modifiedArtifact: FunctionArtifactWithDebugMetadata = { + const modifiedArtifact: FunctionArtifact = { ...artifact, parameters: [ ...artifact.parameters.slice(0, -1), diff --git a/yarn-project/simulator/src/client/simulator.ts b/yarn-project/simulator/src/client/simulator.ts index 3afb657a725d..1fbb92ad03db 100644 --- a/yarn-project/simulator/src/client/simulator.ts +++ b/yarn-project/simulator/src/client/simulator.ts @@ -2,7 +2,7 @@ import { type AztecNode, type FunctionCall, type Note, type TxExecutionRequest } import { CallContext, FunctionData } from '@aztec/circuits.js'; import { type ArrayType, - type FunctionArtifactWithDebugMetadata, + type FunctionArtifact, FunctionSelector, FunctionType, encodeArguments, @@ -65,7 +65,7 @@ export class AcirSimulator { */ public async run( request: TxExecutionRequest, - entryPointArtifact: FunctionArtifactWithDebugMetadata, + entryPointArtifact: FunctionArtifact, contractAddress: AztecAddress, msgSender = AztecAddress.ZERO, ): Promise { @@ -129,7 +129,7 @@ export class AcirSimulator { */ public async runUnconstrained( request: FunctionCall, - entryPointArtifact: FunctionArtifactWithDebugMetadata, + entryPointArtifact: FunctionArtifact, contractAddress: AztecAddress, ) { if (entryPointArtifact.functionType !== FunctionType.UNCONSTRAINED) { @@ -167,7 +167,7 @@ export class AcirSimulator { noteTypeId: Fr, note: Note, ) { - const artifact: FunctionArtifactWithDebugMetadata | undefined = await this.db.getFunctionArtifactByName( + const artifact: FunctionArtifact | undefined = await this.db.getFunctionArtifactByName( contractAddress, 'compute_note_hash_and_nullifier', ); diff --git a/yarn-project/simulator/src/client/unconstrained_execution.ts b/yarn-project/simulator/src/client/unconstrained_execution.ts index d821ca9fea9c..9b42a556009f 100644 --- a/yarn-project/simulator/src/client/unconstrained_execution.ts +++ b/yarn-project/simulator/src/client/unconstrained_execution.ts @@ -1,5 +1,5 @@ import { type FunctionData } from '@aztec/circuits.js'; -import { type DecodedReturn, type FunctionArtifactWithDebugMetadata, decodeReturnValues } from '@aztec/foundation/abi'; +import { type DecodedReturn, type FunctionArtifact, decodeReturnValues } from '@aztec/foundation/abi'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; import { type Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -16,7 +16,7 @@ import { type ViewDataOracle } from './view_data_oracle.js'; */ export async function executeUnconstrainedFunction( oracle: ViewDataOracle, - artifact: FunctionArtifactWithDebugMetadata, + artifact: FunctionArtifact, contractAddress: AztecAddress, functionData: FunctionData, args: Fr[], diff --git a/yarn-project/simulator/src/public/abstract_phase_manager.ts b/yarn-project/simulator/src/public/abstract_phase_manager.ts index e3084193263d..06e89e93ef93 100644 --- a/yarn-project/simulator/src/public/abstract_phase_manager.ts +++ b/yarn-project/simulator/src/public/abstract_phase_manager.ts @@ -2,6 +2,7 @@ import { MerkleTreeId, type ProcessReturnValues, type PublicKernelRequest, + PublicKernelType, type SimulationError, type Tx, type UnencryptedFunctionL2Logs, @@ -81,6 +82,20 @@ export const PhaseIsRevertible: Record = { [PublicKernelPhase.TAIL]: false, }; +// REFACTOR: Unify both enums and move to types or circuit-types. +export function publicKernelPhaseToKernelType(phase: PublicKernelPhase): PublicKernelType { + switch (phase) { + case PublicKernelPhase.SETUP: + return PublicKernelType.SETUP; + case PublicKernelPhase.APP_LOGIC: + return PublicKernelType.APP_LOGIC; + case PublicKernelPhase.TEARDOWN: + return PublicKernelType.TEARDOWN; + case PublicKernelPhase.TAIL: + return PublicKernelType.TAIL; + } +} + export abstract class AbstractPhaseManager { protected hintsBuilder: HintsBuilder; protected log: DebugLogger; @@ -127,6 +142,8 @@ export abstract class AbstractPhaseManager { */ revertReason: SimulationError | undefined; returnValues: ProcessReturnValues; + /** Gas used during the execution this particular phase. */ + gasUsed: Gas | undefined; }>; public static extractEnqueuedPublicCallsByPhase( @@ -202,6 +219,7 @@ export abstract class AbstractPhaseManager { return calls; } + // REFACTOR: Do not return an array and instead return a struct with similar shape to that returned by `handle` protected async processEnqueuedPublicCalls( tx: Tx, previousPublicKernelOutput: PublicKernelCircuitPublicInputs, @@ -214,6 +232,7 @@ export abstract class AbstractPhaseManager { UnencryptedFunctionL2Logs[], SimulationError | undefined, ProcessReturnValues, + Gas, ] > { let kernelOutput = previousPublicKernelOutput; @@ -223,7 +242,7 @@ export abstract class AbstractPhaseManager { const enqueuedCalls = this.extractEnqueuedPublicCalls(tx); if (!enqueuedCalls || !enqueuedCalls.length) { - return [[], kernelOutput, kernelProof, [], undefined, undefined]; + return [[], kernelOutput, kernelProof, [], undefined, undefined, Gas.empty()]; } const newUnencryptedFunctionLogs: UnencryptedFunctionL2Logs[] = []; @@ -236,6 +255,7 @@ export abstract class AbstractPhaseManager { // and submitted separately to the base rollup? let returns: ProcessReturnValues = undefined; + let gasUsed = Gas.empty(); for (const enqueuedCall of enqueuedCalls) { const executionStack: (PublicExecution | PublicExecutionResult)[] = [enqueuedCall]; @@ -249,6 +269,7 @@ export abstract class AbstractPhaseManager { // TODO(6052): Extract correct new counter from nested calls const sideEffectCounter = lastSideEffectCounter(tx) + 1; const availableGas = this.getAvailableGas(tx, kernelOutput); + const pendingNullifiers = this.getSiloedPendingNullifiers(kernelOutput); const result = isExecutionRequest ? await this.publicExecutor.simulate( @@ -256,11 +277,15 @@ export abstract class AbstractPhaseManager { this.globalVariables, availableGas, tx.data.constants.txContext, + pendingNullifiers, transactionFee, sideEffectCounter, ) : current; + // Accumulate gas used in this execution + gasUsed = gasUsed.add(Gas.from(result.startGasLeft).sub(Gas.from(result.endGasLeft))); + const functionSelector = result.execution.functionData.selector.toString(); if (result.reverted && !PhaseIsRevertible[this.phase]) { this.log.debug( @@ -304,7 +329,8 @@ export abstract class AbstractPhaseManager { result.revertReason }`, ); - return [[], kernelOutput, kernelProof, [], result.revertReason, undefined]; + // TODO(@spalladino): Check gasUsed is correct. The AVM should take care of setting gasLeft to zero upon a revert. + return [[], kernelOutput, kernelProof, [], result.revertReason, undefined, gasUsed]; } if (!enqueuedExecutionResult) { @@ -320,7 +346,12 @@ export abstract class AbstractPhaseManager { // TODO(#3675): This should be done in a public kernel circuit removeRedundantPublicDataWrites(kernelOutput, this.phase); - return [publicKernelInputs, kernelOutput, kernelProof, newUnencryptedFunctionLogs, undefined, returns]; + return [publicKernelInputs, kernelOutput, kernelProof, newUnencryptedFunctionLogs, undefined, returns, gasUsed]; + } + + /** Returns all pending private and public nullifiers. */ + private getSiloedPendingNullifiers(ko: PublicKernelCircuitPublicInputs) { + return [...ko.end.newNullifiers, ...ko.endNonRevertibleData.newNullifiers].filter(n => !n.isEmpty()); } protected getAvailableGas(tx: Tx, previousPublicKernelOutput: PublicKernelCircuitPublicInputs) { diff --git a/yarn-project/simulator/src/public/app_logic_phase_manager.ts b/yarn-project/simulator/src/public/app_logic_phase_manager.ts index d55c439a83c7..e5ce2e86a56b 100644 --- a/yarn-project/simulator/src/public/app_logic_phase_manager.ts +++ b/yarn-project/simulator/src/public/app_logic_phase_manager.ts @@ -47,6 +47,7 @@ export class AppLogicPhaseManager extends AbstractPhaseManager { newUnencryptedFunctionLogs, revertReason, returnValues, + gasUsed, ] = await this.processEnqueuedPublicCalls(tx, previousPublicKernelOutput, previousPublicKernelProof).catch( // if we throw for any reason other than simulation, we need to rollback and drop the TX async err => { @@ -71,6 +72,6 @@ export class AppLogicPhaseManager extends AbstractPhaseManager { }; return request; }); - return { kernelRequests, publicKernelOutput, publicKernelProof, revertReason, returnValues }; + return { kernelRequests, publicKernelOutput, publicKernelProof, revertReason, returnValues, gasUsed }; } } diff --git a/yarn-project/simulator/src/public/executor.ts b/yarn-project/simulator/src/public/executor.ts index d00ea70af12b..899516084047 100644 --- a/yarn-project/simulator/src/public/executor.ts +++ b/yarn-project/simulator/src/public/executor.ts @@ -4,6 +4,7 @@ import { Gas, type GlobalVariables, type Header, + type Nullifier, PublicCircuitPublicInputs, type TxContext, } from '@aztec/circuits.js'; @@ -46,7 +47,7 @@ export async function executePublicFunction( } if (isAvmBytecode(bytecode)) { - return await executeTopLevelPublicFunctionAvm(context); + return await executeTopLevelPublicFunctionAvm(context, bytecode); } else { return await executePublicFunctionAcvm(context, bytecode, nested); } @@ -58,6 +59,7 @@ export async function executePublicFunction( */ async function executeTopLevelPublicFunctionAvm( executionContext: PublicExecutionContext, + bytecode: Buffer, ): Promise { const address = executionContext.execution.contractAddress; const selector = executionContext.execution.functionData.selector; @@ -77,6 +79,9 @@ async function executeTopLevelPublicFunctionAvm( // or modify the PersistableStateManager to manage rollbacks across enqueued-calls and transactions. const worldStateJournal = new AvmPersistableStateManager(hostStorage); const startSideEffectCounter = executionContext.execution.callContext.sideEffectCounter; + for (const nullifier of executionContext.pendingNullifiers) { + worldStateJournal.nullifiers.cache.appendSiloed(nullifier.value); + } worldStateJournal.trace.accessCounter = startSideEffectCounter; const executionEnv = createAvmExecutionEnvironment( @@ -91,7 +96,12 @@ async function executeTopLevelPublicFunctionAvm( const avmContext = new AvmContext(worldStateJournal, executionEnv, machineState); const simulator = new AvmSimulator(avmContext); - const avmResult = await simulator.execute(); + const avmResult = await simulator.executeBytecode(bytecode); + + // Commit the journals state to the DBs since this is a top-level execution. + // Observe that this will write all the state changes to the DBs, not only the latest for each slot. + // However, the underlying DB keep a cache and will only write the latest state to disk. + await avmContext.persistableState.publicStorage.commitToDB(); log.verbose( `[AVM] ${address.toString()}:${selector} returned, reverted: ${avmResult.reverted}, reason: ${ @@ -99,8 +109,12 @@ async function executeTopLevelPublicFunctionAvm( }.`, ); - return Promise.resolve( - convertAvmResultsToPxResult(avmResult, startSideEffectCounter, executionContext.execution, startGas, avmContext), + return convertAvmResultsToPxResult( + avmResult, + startSideEffectCounter, + executionContext.execution, + startGas, + avmContext, ); } @@ -279,6 +293,7 @@ export class PublicExecutor { globalVariables: GlobalVariables, availableGas: Gas, txContext: TxContext, + pendingNullifiers: Nullifier[], transactionFee: Fr = Fr.ZERO, sideEffectCounter: number = 0, ): Promise { @@ -298,6 +313,7 @@ export class PublicExecutor { availableGas, transactionFee, txContext.gasSettings, + pendingNullifiers, ); const executionResult = await executePublicFunction(context, /*nested=*/ false); diff --git a/yarn-project/simulator/src/public/hints_builder.ts b/yarn-project/simulator/src/public/hints_builder.ts index b0cb14e33fbc..0d83cd44db49 100644 --- a/yarn-project/simulator/src/public/hints_builder.ts +++ b/yarn-project/simulator/src/public/hints_builder.ts @@ -15,11 +15,11 @@ import { type PublicDataRead, type PublicDataTreeLeafPreimage, type PublicDataUpdateRequest, - type ReadRequestContext, + type ScopedReadRequest, buildNullifierNonExistentReadRequestHints, - buildNullifierReadRequestHints, buildPublicDataHints, buildPublicDataReadRequestHints, + buildSiloedNullifierReadRequestHints, } from '@aztec/circuits.js'; import { type Tuple } from '@aztec/foundation/serialize'; import { type IndexedTreeId, type MerkleTreeOperations } from '@aztec/world-state'; @@ -28,14 +28,14 @@ export class HintsBuilder { constructor(private db: MerkleTreeOperations) {} getNullifierReadRequestHints( - nullifierReadRequests: Tuple, + nullifierReadRequests: Tuple, pendingNullifiers: Tuple, ) { - return buildNullifierReadRequestHints(this, nullifierReadRequests, pendingNullifiers); + return buildSiloedNullifierReadRequestHints(this, nullifierReadRequests, pendingNullifiers); } getNullifierNonExistentReadRequestHints( - nullifierNonExistentReadRequests: Tuple, + nullifierNonExistentReadRequests: Tuple, pendingNullifiers: Tuple, ) { return buildNullifierNonExistentReadRequestHints(this, nullifierNonExistentReadRequests, pendingNullifiers); diff --git a/yarn-project/simulator/src/public/index.test.ts b/yarn-project/simulator/src/public/index.test.ts index 14995aa727a4..54973bdc18e4 100644 --- a/yarn-project/simulator/src/public/index.test.ts +++ b/yarn-project/simulator/src/public/index.test.ts @@ -8,7 +8,6 @@ import { GlobalVariables, type Header, L1_TO_L2_MSG_TREE_HEIGHT, - L2ToL1Message, NULLIFIER_TREE_HEIGHT, NullifierLeaf, NullifierLeafPreimage, @@ -99,7 +98,7 @@ describe('ACIR public execution simulator', () => { }); const simulate = (execution: PublicExecution, globalVariables: GlobalVariables) => - executor.simulate(execution, globalVariables, Gas.test(), makeTxContext(), Fr.ZERO); + executor.simulate(execution, globalVariables, Gas.test(), makeTxContext(), /*pendingNullifiers=*/ [], Fr.ZERO); describe('Token contract', () => { let recipient: AztecAddress; @@ -400,10 +399,8 @@ describe('ACIR public execution simulator', () => { // Assert the l2 to l1 message was created expect(result.newL2ToL1Messages.length).toEqual(1); - - const expectedNewMessage = new L2ToL1Message(portalContractAddress, pedersenHash(params)); - - expect(result.newL2ToL1Messages[0]).toEqual(expectedNewMessage); + expect(result.newL2ToL1Messages[0].recipient).toEqual(portalContractAddress); + expect(result.newL2ToL1Messages[0].content).toEqual(pedersenHash(params)); }); it('Should be able to create a nullifier from the public context', async () => { @@ -731,7 +728,8 @@ describe('ACIR public execution simulator', () => { }); }); - describe('Historical header in public context', () => { + // TODO(4840): add AVM opcodes for getting header (members) + describe.skip('Historical header in public context', () => { let contractAddress: AztecAddress; let callContext: CallContext; let assertHeaderPublicArtifact: FunctionArtifact; diff --git a/yarn-project/simulator/src/public/public_execution_context.ts b/yarn-project/simulator/src/public/public_execution_context.ts index bcd468aa5681..c6279209ecfb 100644 --- a/yarn-project/simulator/src/public/public_execution_context.ts +++ b/yarn-project/simulator/src/public/public_execution_context.ts @@ -7,6 +7,7 @@ import { type GasSettings, type GlobalVariables, type Header, + type Nullifier, PublicContextInputs, } from '@aztec/circuits.js'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; @@ -45,6 +46,7 @@ export class PublicExecutionContext extends TypedOracle { public readonly availableGas: Gas, public readonly transactionFee: Fr, public readonly gasSettings: GasSettings, + public readonly pendingNullifiers: Nullifier[], // Unencrypted logs emitted during this call AND any nested calls // Useful for maintaining correct ordering in ts private allUnencryptedLogs: UnencryptedL2Log[] = [], @@ -239,6 +241,7 @@ export class PublicExecutionContext extends TypedOracle { this.availableGas, this.transactionFee, this.gasSettings, + /*pendingNullifiers=*/ [], this.allUnencryptedLogs, this.log, ); diff --git a/yarn-project/simulator/src/public/public_processor.test.ts b/yarn-project/simulator/src/public/public_processor.test.ts index 94e666fd524d..d86a4d1ff735 100644 --- a/yarn-project/simulator/src/public/public_processor.test.ts +++ b/yarn-project/simulator/src/public/public_processor.test.ts @@ -2,6 +2,7 @@ import { type BlockProver, type ProcessedTx, PublicDataWrite, + PublicKernelType, SimulationError, type Tx, type TxValidator, @@ -105,6 +106,7 @@ describe('public_processor', () => { isEmpty: false, revertReason: undefined, publicKernelRequests: [], + gasUsed: {}, }; // Jest is complaining that the two objects are not equal, but they are. @@ -678,9 +680,12 @@ describe('public_processor', () => { let simulatorCallCount = 0; const initialGas = gasLimits.sub(teardownGas); - const afterSetupGas = initialGas.sub(Gas.from({ l2Gas: 1e6 })); - const afterAppGas = afterSetupGas.sub(Gas.from({ l2Gas: 2e6, daGas: 2e6 })); - const afterTeardownGas = teardownGas.sub(Gas.from({ l2Gas: 3e6, daGas: 3e6 })); + const setupGasUsed = Gas.from({ l2Gas: 1e6 }); + const appGasUsed = Gas.from({ l2Gas: 2e6, daGas: 2e6 }); + const teardownGasUsed = Gas.from({ l2Gas: 3e6, daGas: 3e6 }); + const afterSetupGas = initialGas.sub(setupGasUsed); + const afterAppGas = afterSetupGas.sub(appGasUsed); + const afterTeardownGas = teardownGas.sub(teardownGasUsed); // Total gas used is the sum of teardown gas allocation plus all expenditures along the way, // without including the gas used in the teardown phase (since that's consumed entirely up front). @@ -764,6 +769,7 @@ describe('public_processor', () => { expect.anything(), // GlobalVariables Gas.from(availableGas), expect.anything(), // TxContext + expect.anything(), // pendingNullifiers new Fr(txFee), expect.anything(), // SideEffectCounter ]; @@ -779,6 +785,11 @@ describe('public_processor', () => { expect(publicWorldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); expect(processed[0].data.end.gasUsed).toEqual(Gas.from(expectedTotalGasUsed)); + expect(processed[0].gasUsed[PublicKernelType.SETUP]).toEqual(setupGasUsed); + expect(processed[0].gasUsed[PublicKernelType.APP_LOGIC]).toEqual(appGasUsed); + expect(processed[0].gasUsed[PublicKernelType.TEARDOWN]).toEqual(teardownGasUsed); + expect(processed[0].gasUsed[PublicKernelType.TAIL]).toBeUndefined(); + expect(processed[0].gasUsed[PublicKernelType.NON_PUBLIC]).toBeUndefined(); const txEffect = toTxEffect(processed[0]); expect(arrayNonEmptyLength(txEffect.publicDataWrites, PublicDataWrite.isEmpty)).toEqual(3); diff --git a/yarn-project/simulator/src/public/public_processor.ts b/yarn-project/simulator/src/public/public_processor.ts index b749c2276faf..360ed52b8891 100644 --- a/yarn-project/simulator/src/public/public_processor.ts +++ b/yarn-project/simulator/src/public/public_processor.ts @@ -20,7 +20,11 @@ import { PublicExecutor, type PublicStateDB, type SimulationProvider } from '@az import { type ContractDataSource } from '@aztec/types/contracts'; import { type MerkleTreeOperations } from '@aztec/world-state'; -import { type AbstractPhaseManager, PublicKernelPhase } from './abstract_phase_manager.js'; +import { + type AbstractPhaseManager, + PublicKernelPhase, + publicKernelPhaseToKernelType, +} from './abstract_phase_manager.js'; import { PhaseManagerFactory } from './phase_manager_factory.js'; import { ContractsDataSourcePublicDB, WorldStateDB, WorldStatePublicDB } from './public_executor.js'; import { RealPublicKernelCircuitSimulator } from './public_kernel.js'; @@ -169,8 +173,10 @@ export class PublicProcessor { let finalKernelOutput: KernelCircuitPublicInputs | undefined; let revertReason: SimulationError | undefined; const timer = new Timer(); + const gasUsed: ProcessedTx['gasUsed'] = {}; while (phase) { const output = await phase.handle(tx, publicKernelPublicInput, proof); + gasUsed[publicKernelPhaseToKernelType(phase.phase)] = output.gasUsed; if (phase.phase === PublicKernelPhase.APP_LOGIC) { returnValues = output.returnValues; } @@ -196,7 +202,7 @@ export class PublicProcessor { throw new Error('Final public kernel was not executed.'); } - const processedTx = makeProcessedTx(tx, finalKernelOutput, proof, publicRequests, revertReason); + const processedTx = makeProcessedTx(tx, finalKernelOutput, proof, publicRequests, revertReason, gasUsed); this.log.debug(`Processed public part of ${tx.getTxHash()}`, { eventName: 'tx-sequencer-processing', diff --git a/yarn-project/simulator/src/public/setup_phase_manager.ts b/yarn-project/simulator/src/public/setup_phase_manager.ts index 9c21f610a16a..33581b36f5b0 100644 --- a/yarn-project/simulator/src/public/setup_phase_manager.ts +++ b/yarn-project/simulator/src/public/setup_phase_manager.ts @@ -35,14 +35,21 @@ export class SetupPhaseManager extends AbstractPhaseManager { previousPublicKernelProof: Proof, ) { this.log.verbose(`Processing tx ${tx.getTxHash()}`); - const [kernelInputs, publicKernelOutput, publicKernelProof, newUnencryptedFunctionLogs, revertReason] = - await this.processEnqueuedPublicCalls(tx, previousPublicKernelOutput, previousPublicKernelProof).catch( - // the abstract phase manager throws if simulation gives error in a non-revertible phase - async err => { - await this.publicStateDB.rollbackToCommit(); - throw err; - }, - ); + const [ + kernelInputs, + publicKernelOutput, + publicKernelProof, + newUnencryptedFunctionLogs, + revertReason, + _returnValues, + gasUsed, + ] = await this.processEnqueuedPublicCalls(tx, previousPublicKernelOutput, previousPublicKernelProof).catch( + // the abstract phase manager throws if simulation gives error in a non-revertible phase + async err => { + await this.publicStateDB.rollbackToCommit(); + throw err; + }, + ); tx.unencryptedLogs.addFunctionLogs(newUnencryptedFunctionLogs); await this.publicStateDB.checkpoint(); @@ -61,6 +68,7 @@ export class SetupPhaseManager extends AbstractPhaseManager { publicKernelProof, revertReason, returnValues: undefined, + gasUsed, }; } } diff --git a/yarn-project/simulator/src/public/tail_phase_manager.ts b/yarn-project/simulator/src/public/tail_phase_manager.ts index ab1d67421e2d..06b7f732b478 100644 --- a/yarn-project/simulator/src/public/tail_phase_manager.ts +++ b/yarn-project/simulator/src/public/tail_phase_manager.ts @@ -39,7 +39,11 @@ export class TailPhaseManager extends AbstractPhaseManager { super(db, publicExecutor, publicKernel, globalVariables, historicalHeader, phase); } - async handle(tx: Tx, previousPublicKernelOutput: PublicKernelCircuitPublicInputs, previousPublicKernelProof: Proof) { + override async handle( + tx: Tx, + previousPublicKernelOutput: PublicKernelCircuitPublicInputs, + previousPublicKernelProof: Proof, + ) { this.log.verbose(`Processing tx ${tx.getTxHash()}`); const [inputs, finalKernelOutput] = await this.runTailKernelCircuit( previousPublicKernelOutput, @@ -67,6 +71,7 @@ export class TailPhaseManager extends AbstractPhaseManager { publicKernelProof: makeEmptyProof(), revertReason: undefined, returnValues: undefined, + gasUsed: undefined, }; } diff --git a/yarn-project/simulator/src/public/teardown_phase_manager.ts b/yarn-project/simulator/src/public/teardown_phase_manager.ts index 6cec359c9c5b..55b1b7656303 100644 --- a/yarn-project/simulator/src/public/teardown_phase_manager.ts +++ b/yarn-project/simulator/src/public/teardown_phase_manager.ts @@ -39,14 +39,21 @@ export class TeardownPhaseManager extends AbstractPhaseManager { previousPublicKernelProof: Proof, ) { this.log.verbose(`Processing tx ${tx.getTxHash()}`); - const [kernelInputs, publicKernelOutput, publicKernelProof, newUnencryptedFunctionLogs, revertReason] = - await this.processEnqueuedPublicCalls(tx, previousPublicKernelOutput, previousPublicKernelProof).catch( - // the abstract phase manager throws if simulation gives error in a non-revertible phase - async err => { - await this.publicStateDB.rollbackToCommit(); - throw err; - }, - ); + const [ + kernelInputs, + publicKernelOutput, + publicKernelProof, + newUnencryptedFunctionLogs, + revertReason, + _returnValues, + gasUsed, + ] = await this.processEnqueuedPublicCalls(tx, previousPublicKernelOutput, previousPublicKernelProof).catch( + // the abstract phase manager throws if simulation gives error in a non-revertible phase + async err => { + await this.publicStateDB.rollbackToCommit(); + throw err; + }, + ); tx.unencryptedLogs.addFunctionLogs(newUnencryptedFunctionLogs); await this.publicStateDB.checkpoint(); @@ -65,6 +72,7 @@ export class TeardownPhaseManager extends AbstractPhaseManager { publicKernelProof, revertReason, returnValues: undefined, + gasUsed, }; } diff --git a/yarn-project/simulator/src/public/transitional_adaptors.ts b/yarn-project/simulator/src/public/transitional_adaptors.ts index 09b2e2d99fc6..ae317f9006e8 100644 --- a/yarn-project/simulator/src/public/transitional_adaptors.ts +++ b/yarn-project/simulator/src/public/transitional_adaptors.ts @@ -1,31 +1,24 @@ // All code in this file needs to die once the public executor is phased out in favor of the AVM. -import { UnencryptedFunctionL2Logs, UnencryptedL2Log } from '@aztec/circuit-types'; +import { type SimulationError, UnencryptedFunctionL2Logs } from '@aztec/circuit-types'; import { + type AztecAddress, CallContext, - ContractStorageRead, - ContractStorageUpdateRequest, FunctionData, + type FunctionSelector, type Gas, type GasSettings, type GlobalVariables, type Header, - L2ToL1Message, - NoteHash, - Nullifier, - ReadRequest, - SideEffect, } from '@aztec/circuits.js'; import { Fr } from '@aztec/foundation/fields'; +import { extractCallStack } from '../acvm/index.js'; import { type AvmContext } from '../avm/avm_context.js'; import { AvmExecutionEnvironment } from '../avm/avm_execution_environment.js'; -import { type AvmMachineState } from '../avm/avm_machine_state.js'; -import { AvmContractCallResults } from '../avm/avm_message_call_result.js'; -import { type JournalData } from '../avm/journal/journal.js'; +import { type AvmContractCallResults } from '../avm/avm_message_call_result.js'; import { Mov } from '../avm/opcodes/memory.js'; -import { createSimulationError } from '../common/errors.js'; +import { ExecutionError, createSimulationError } from '../common/errors.js'; import { type PublicExecution, type PublicExecutionResult } from './execution.js'; -import { type PublicExecutionContext } from './public_execution_context.js'; /** * Convert a PublicExecution(Environment) object to an AvmExecutionEnvironment @@ -82,6 +75,29 @@ export function createPublicExecution( return execution; } +export function processRevertReason( + revertReason: Error | undefined, + contractAddress: AztecAddress, + functionSelector: FunctionSelector, +): SimulationError | undefined { + if (!revertReason) { + return undefined; + } + if (revertReason instanceof Error) { + const ee = new ExecutionError( + revertReason.message, + { + contractAddress, + functionSelector, + }, + extractCallStack(revertReason), + { cause: revertReason }, + ); + + return createSimulationError(ee); + } +} + export function convertAvmResultsToPxResult( avmResult: AvmContractCallResults, startSideEffectCounter: number, @@ -91,6 +107,7 @@ export function convertAvmResultsToPxResult( ): PublicExecutionResult { const endPersistableState = endAvmContext.persistableState; const endMachineState = endAvmContext.machineState; + return { ...endPersistableState.transitionalExecutionResult, // includes nestedExecutions execution: fromPx, @@ -102,104 +119,17 @@ export function convertAvmResultsToPxResult( endPersistableState.transitionalExecutionResult.allUnencryptedLogs, ), reverted: avmResult.reverted, - revertReason: avmResult.revertReason ? createSimulationError(avmResult.revertReason) : undefined, + revertReason: processRevertReason( + avmResult.revertReason, + endAvmContext.environment.address, + fromPx.functionData.selector, + ), startGasLeft: startGas, endGasLeft: endMachineState.gasLeft, transactionFee: endAvmContext.environment.transactionFee, }; } -/** - * Convert the result of an AVM contract call to a PublicExecutionResult for the public kernel - * - * @param execution - * @param newWorldState - * @param result - * @returns - */ -export async function convertAvmResults( - executionContext: PublicExecutionContext, - newWorldState: JournalData, - result: AvmContractCallResults, - endMachineState: AvmMachineState, -): Promise { - const execution = executionContext.execution; - - const contractStorageReads: ContractStorageRead[] = newWorldState.storageReads.map( - read => new ContractStorageRead(read.slot, read.value, read.counter.toNumber(), read.storageAddress), - ); - const contractStorageUpdateRequests: ContractStorageUpdateRequest[] = newWorldState.storageWrites.map( - write => new ContractStorageUpdateRequest(write.slot, write.value, write.counter.toNumber(), write.storageAddress), - ); - // We need to write the storage updates to the DB, because that's what the ACVM expects. - // Assumes the updates are in the right order. - for (const write of newWorldState.storageWrites) { - await executionContext.stateDb.storageWrite(write.storageAddress, write.slot, write.value); - } - - const newNoteHashes = newWorldState.newNoteHashes.map( - noteHash => new NoteHash(noteHash.noteHash, noteHash.counter.toNumber()), - ); - const nullifierReadRequests: ReadRequest[] = newWorldState.nullifierChecks - .filter(nullifierCheck => nullifierCheck.exists) - .map(nullifierCheck => new ReadRequest(nullifierCheck.nullifier, nullifierCheck.counter.toNumber())); - const nullifierNonExistentReadRequests: ReadRequest[] = newWorldState.nullifierChecks - .filter(nullifierCheck => !nullifierCheck.exists) - .map(nullifierCheck => new ReadRequest(nullifierCheck.nullifier, nullifierCheck.counter.toNumber())); - const newNullifiers: Nullifier[] = newWorldState.newNullifiers.map( - tracedNullifier => - new Nullifier( - /*value=*/ tracedNullifier.nullifier, - tracedNullifier.counter.toNumber(), - /*noteHash=*/ Fr.ZERO, // NEEDED? - ), - ); - const unencryptedLogs: UnencryptedFunctionL2Logs = new UnencryptedFunctionL2Logs( - newWorldState.newLogs.map(log => new UnencryptedL2Log(log.contractAddress, log.selector, log.data)), - ); - const unencryptedLogsHashes = newWorldState.newLogsHashes.map( - logHash => new SideEffect(logHash.logHash, logHash.counter), - ); - const newL2ToL1Messages = newWorldState.newL1Messages.map(m => new L2ToL1Message(m.recipient, m.content)); - - const returnValues = result.output; - - // TODO: Support nested executions. - const nestedExecutions: PublicExecutionResult[] = []; - const allUnencryptedLogs = unencryptedLogs; - // TODO keep track of side effect counters - const startSideEffectCounter = Fr.ZERO; - const endSideEffectCounter = Fr.ZERO; - - return { - execution, - nullifierReadRequests, - nullifierNonExistentReadRequests, - newNoteHashes, - newL2ToL1Messages, - startSideEffectCounter, - endSideEffectCounter, - newNullifiers, - contractStorageReads, - contractStorageUpdateRequests, - returnValues, - nestedExecutions, - unencryptedLogsHashes, - unencryptedLogs, - unencryptedLogPreimagesLength: new Fr(unencryptedLogs.getSerializedLength()), - allUnencryptedLogs, - reverted: result.reverted, - revertReason: result.revertReason ? createSimulationError(result.revertReason) : undefined, - startGasLeft: executionContext.availableGas, - endGasLeft: endMachineState.gasLeft, - transactionFee: executionContext.transactionFee, - }; -} - -export function convertPublicExecutionResult(res: PublicExecutionResult): AvmContractCallResults { - return new AvmContractCallResults(res.reverted, res.returnValues, res.revertReason); -} - const AVM_MAGIC_SUFFIX = Buffer.from([ Mov.opcode, // opcode 0x00, // indirect diff --git a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts index 1ccdb134c641..c7bc0fc5cbf9 100644 --- a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts +++ b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts @@ -27,6 +27,8 @@ const consumeNextBlocks = () => { const log = createDebugLogger('aztec:server_world_state_synchronizer_test'); describe('server_world_state_synchronizer', () => { + jest.setTimeout(30_000); + let db: AztecKVStore; let l1ToL2Messages: Fr[]; let inHash: Buffer; diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index f2539b2eb462..1be75065b372 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -592,6 +592,7 @@ __metadata: "@noir-lang/types": "portal:../../noir/packages/types" "@types/jest": ^29.5.0 "@types/node": ^18.7.23 + change-case: ^5.4.4 jest: ^29.5.0 levelup: ^5.1.1 memdown: ^6.1.1 @@ -2860,7 +2861,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/noirc_abi@portal:../noir/packages/noirc_abi::locator=%40aztec%2Faztec3-packages%40workspace%3A." dependencies: - "@noir-lang/types": 0.28.0 + "@noir-lang/types": 0.29.0 languageName: node linkType: soft @@ -5255,6 +5256,13 @@ __metadata: languageName: node linkType: hard +"change-case@npm:^5.4.4": + version: 5.4.4 + resolution: "change-case@npm:5.4.4" + checksum: a22a25a763719658424ffbcd41e931d2d19cc22399cc765dca447fbe1eaf13e179d5e8ab1677af75f2e814dbddf74e42ffdecb526cd5bc906cc859f62aa154b2 + languageName: node + linkType: hard + "char-regex@npm:^1.0.2": version: 1.0.2 resolution: "char-regex@npm:1.0.2"