diff --git a/.github/workflows/auto-pr-rebuild-script.yml b/.github/workflows/acir-artifacts.yml similarity index 74% rename from .github/workflows/auto-pr-rebuild-script.yml rename to .github/workflows/acir-artifacts.yml index 336f2288878..43d9acfdedb 100644 --- a/.github/workflows/auto-pr-rebuild-script.yml +++ b/.github/workflows/acir-artifacts.yml @@ -1,4 +1,4 @@ -name: Rebuild ACIR artifacts +name: Build ACIR artifacts on: pull_request: @@ -93,11 +93,6 @@ jobs: chmod +x ${{ github.workspace }}/nargo/nargo echo "${{ github.workspace }}/nargo" >> $GITHUB_PATH - - name: Set up Git user (Github Action) - run: | - git config --local user.name kevaundray - git config --local user.email kevtheappdev@gmail.com - - name: Run rebuild script working-directory: test_programs run: | @@ -110,21 +105,3 @@ jobs: name: acir-artifacts path: ./test_programs/acir_artifacts retention-days: 10 - - - name: Check for changes in acir_artifacts directory - id: check_changes - if: ${{ github.ref_name }} == "master" - run: | - git diff --quiet test_programs/acir_artifacts/ || echo "::set-output name=changes::true" - - - name: Create or Update PR - if: steps.check_changes.outputs.changes == 'true' - uses: peter-evans/create-pull-request@v3 - with: - token: ${{ secrets.NOIR_REPO_TOKEN }} - commit-message: "chore: update acir artifacts" - title: "chore: Update ACIR artifacts" - body: "Automatic PR to update acir artifacts" - add-paths: test_programs/acir_artifacts/*.gz - labels: "auto-pr" - branch: "auto-pr-rebuild-script-branch" diff --git a/.github/workflows/docs-new-version.yml b/.github/workflows/docs-new-version.yml deleted file mode 100644 index 9b109e170bb..00000000000 --- a/.github/workflows/docs-new-version.yml +++ /dev/null @@ -1,112 +0,0 @@ -name: Cut a new version of the docs - -on: - workflow_dispatch: - inputs: - tag: - description: The tag to build Docs for - required: false - -jobs: - publish-docs: - runs-on: ubuntu-latest - if: ${{ inputs.tag != '' }} - permissions: - pull-requests: write - contents: write - steps: - - name: Checkout sources - uses: actions/checkout@v4 - with: - ref: ${{ inputs.tag }} - - - name: Create new branch - run: | - git checkout -b new-docs-version-${{ github.event.inputs.tag }} - - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: '18' - - - name: Install wasm-bindgen-cli - uses: taiki-e/install-action@v2 - with: - tool: wasm-bindgen-cli@0.2.86 - - - name: Install wasm-opt - run: | - npm i wasm-opt -g - - - name: Install Yarn - run: npm install -g yarn - - - name: Install Yarn dependencies - run: yarn - - - name: Build acvm_js - run: yarn workspace @noir-lang/acvm_js build - - - name: Build noirc_abi - run: yarn workspace @noir-lang/noirc_abi build - - - name: Build noir_js_types - run: yarn workspace @noir-lang/types build - - - name: Build barretenberg wrapper - run: yarn workspace @noir-lang/backend_barretenberg build - - - name: Run noir_js - run: | - yarn workspace @noir-lang/noir_js build - - - name: Build docs - run: - yarn workspace docs build - - - name: Cut a new version - working-directory: ./docs - run: yarn docusaurus docs:version ${{ inputs.tag }} - - - name: Remove pre-releases - id: get_version - run: | - cd docs && yarn setStable - - - name: Commit new documentation version - run: | - git config --local user.name 'signorecello' - git config --local user.email 'github@zepedro.me' - git add . - git commit -m "chore(docs): cut new docs version for tag ${{ github.event.inputs.tag }}" - - - name: Push changes to new branch - run: git push origin new-docs-version-${{ github.event.inputs.tag }} - - - name: Create Pull Request - run: | - gh pr create \ - --title "chore(docs): docs for ${{ github.event.inputs.tag }}" \ - --body "Updates documentation to new version for tag ${{ github.event.inputs.tag }}." \ - --base master \ - --head new-docs-version-${{ github.event.inputs.tag }} \ - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Build docs - run: yarn workspace docs build - - - name: Deploy to Netlify - uses: nwtgck/actions-netlify@v2.1 - with: - publish-dir: './docs/build' - production-branch: master - production-deploy: true - github-token: ${{ secrets.GITHUB_TOKEN }} - enable-github-deployment: false - deploy-message: "Deploy from GitHub Actions for tag ${{ inputs.tag }}" - env: - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} - timeout-minutes: 1 - diff --git a/.github/workflows/docs-pr.yml b/.github/workflows/docs-pr.yml index 2b304b72b6f..12526df80f0 100644 --- a/.github/workflows/docs-pr.yml +++ b/.github/workflows/docs-pr.yml @@ -74,12 +74,6 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup - - name: Remove pre-releases - working-directory: docs - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: yarn setStable - - name: Build docs working-directory: docs run: diff --git a/.github/workflows/docs-release.yml b/.github/workflows/docs-release.yml deleted file mode 100644 index 4cd9d9998cb..00000000000 --- a/.github/workflows/docs-release.yml +++ /dev/null @@ -1,72 +0,0 @@ -name: Rebuild docs with the latest release - -on: - release: - types: [released] - workflow_dispatch: - -jobs: - build_and_deploy: - runs-on: ubuntu-latest - permissions: - pull-requests: write - steps: - - name: Checkout code - uses: actions/checkout@v2 - - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: '18' - - - name: Install wasm-bindgen-cli - uses: taiki-e/install-action@v2 - with: - tool: wasm-bindgen-cli@0.2.86 - - - name: Install wasm-opt - run: | - npm i wasm-opt -g - - - name: Install dependencies - run: yarn - - - name: Build acvm_js - run: yarn workspace @noir-lang/acvm_js build - - - name: Build noirc_abi - run: yarn workspace @noir-lang/noirc_abi build - - - name: Build noir_js_types - run: yarn workspace @noir-lang/types build - - - name: Build barretenberg wrapper - run: yarn workspace @noir-lang/backend_barretenberg build - - - name: Run noir_js - run: | - yarn workspace @noir-lang/noir_js build - - - name: Remove pre-releases - working-directory: docs - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: yarn setStable - - - name: Build docs - run: - yarn workspace docs build - - - name: Deploy to Netlify - uses: nwtgck/actions-netlify@v2.1 - with: - publish-dir: './docs/build' - production-branch: master - production-deploy: true - github-token: ${{ secrets.GITHUB_TOKEN }} - enable-github-deployment: false - deploy-message: "Deploy from GitHub Actions for release" - env: - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} - timeout-minutes: 1 diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml new file mode 100644 index 00000000000..4ef7dd89777 --- /dev/null +++ b/.github/workflows/publish-docs.yml @@ -0,0 +1,57 @@ +name: Publish documentation + +on: + workflow_dispatch: + inputs: + noir-ref: + description: The noir reference to checkout + required: false + default: 'master' + +jobs: + publish-docs: + name: Publish docs + runs-on: ubuntu-latest + + steps: + - name: Checkout release branch + uses: actions/checkout@v4 + with: + ref: ${{ inputs.noir-ref }} + token: ${{ secrets.NOIR_RELEASES_TOKEN }} + + - name: Setup Node.js + uses: actions/setup-node@v2 + with: + node-version: '18' + + - name: Install wasm-bindgen-cli + uses: taiki-e/install-action@v2 + with: + tool: wasm-bindgen-cli@0.2.86 + + - name: Install wasm-opt + run: | + npm i wasm-opt -g + + - name: Install Yarn dependencies + uses: ./.github/actions/setup + + - name: Build docs for deploying + working-directory: docs + run: + yarn workspaces foreach -Rt run build + + - name: Deploy to Netlify + uses: nwtgck/actions-netlify@v2.1 + with: + publish-dir: './docs/build' + production-branch: master + production-deploy: true + github-token: ${{ secrets.GITHUB_TOKEN }} + enable-github-deployment: false + deploy-message: "Deploy from GitHub Actions for tag ${{ inputs.noir-ref }}" + env: + NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} + NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} + timeout-minutes: 1 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 95da6792f04..744b4e3effc 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -50,6 +50,42 @@ jobs: git commit -m 'chore: Update lockfile' git push + + update-docs: + name: Update docs + needs: [release-please, update-lockfile] + if: ${{ needs.release-please.outputs.release-pr }} + runs-on: ubuntu-latest + steps: + - name: Checkout release branch + uses: actions/checkout@v4 + with: + ref: ${{ fromJSON(needs.release-please.outputs.release-pr).headBranchName }} + token: ${{ secrets.NOIR_RELEASES_TOKEN }} + + - name: Setup Node.js + uses: actions/setup-node@v2 + with: + node-version: '18' + + - name: Install Yarn dependencies + uses: ./.github/actions/setup + + - name: Cut a new version + working-directory: ./docs + run: yarn docusaurus docs:version ${{ needs.release-please.outputs.tag-name }} + + - name: Configure git + run: | + git config --local user.name 'signorecello' + git config --local user.email 'github@zepedro.me' + + - name: Commit new documentation version + run: | + git add . + git commit -m "chore(docs): cut new docs version for tag ${{ needs.release-please.outputs.tag-name }}" + git push + build-binaries: name: Build binaries needs: [release-please] @@ -78,19 +114,18 @@ jobs: ref: master token: ${{ secrets.NOIR_REPO_TOKEN }} inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}", "npm-tag": "latest" }' - publish-docs: name: Publish docs needs: [release-please] if: ${{ needs.release-please.outputs.tag-name }} runs-on: ubuntu-latest + steps: - - name: Dispatch to publish workflow + - name: Dispatch to publish-docs uses: benc-uk/workflow-dispatch@v1 with: - workflow: docs-new-version.yml - repo: noir-lang/noir + workflow: publish-docs.yml ref: master - token: ${{ secrets.GITHUB_TOKEN }} - inputs: '{ "tag": "${{ needs.release-please.outputs.tag-name }}"}' + token: ${{ secrets.NOIR_REPO_TOKEN }} + inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}" }' diff --git a/Cargo.toml b/Cargo.toml index 1a37a4f53e1..5738fe94984 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -50,14 +50,14 @@ repository = "https://github.com/noir-lang/noir/" [workspace.dependencies] # ACVM workspace dependencies -acir = { path = "acvm-repo/acir", default-features = false } -acvm = { path = "acvm-repo/acvm" } -acir_field = { path = "acvm-repo/acir_field", default-features = false } -stdlib = { package = "acvm_stdlib", path = "acvm-repo/stdlib", default-features = false } -brillig = { path = "acvm-repo/brillig", default-features = false } -brillig_vm = { path = "acvm-repo/brillig_vm", default-features = false } -acvm_blackbox_solver = { path = "acvm-repo/blackbox_solver", default-features = false } -barretenberg_blackbox_solver = { path = "acvm-repo/barretenberg_blackbox_solver", default-features = false } +acir_field = { version = "0.35.0", path = "acvm-repo/acir_field", default-features = false } +acir = { version = "0.35.0", path = "acvm-repo/acir", default-features = false } +acvm = { version = "0.35.0", path = "acvm-repo/acvm" } +stdlib = { version = "0.35.0", package = "acvm_stdlib", path = "acvm-repo/stdlib", default-features = false } +brillig = { version = "0.35.0", path = "acvm-repo/brillig", default-features = false } +brillig_vm = { version = "0.35.0", path = "acvm-repo/brillig_vm", default-features = false } +acvm_blackbox_solver = { version = "0.35.0", path = "acvm-repo/blackbox_solver", default-features = false } +barretenberg_blackbox_solver = { version = "0.35.0", path = "acvm-repo/barretenberg_blackbox_solver", default-features = false } # Noir compiler workspace dependencies arena = { path = "compiler/utils/arena" } diff --git a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs index 22278bdc635..70821913836 100644 --- a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs +++ b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs @@ -120,75 +120,6 @@ pub enum BlackBoxFuncCall { } impl BlackBoxFuncCall { - #[deprecated = "BlackBoxFuncCall::dummy() is unnecessary and will be removed in ACVM 0.24.0"] - pub fn dummy(bb_func: BlackBoxFunc) -> Self { - match bb_func { - BlackBoxFunc::AND => BlackBoxFuncCall::AND { - lhs: FunctionInput::dummy(), - rhs: FunctionInput::dummy(), - output: Witness(0), - }, - BlackBoxFunc::XOR => BlackBoxFuncCall::XOR { - lhs: FunctionInput::dummy(), - rhs: FunctionInput::dummy(), - output: Witness(0), - }, - BlackBoxFunc::RANGE => BlackBoxFuncCall::RANGE { input: FunctionInput::dummy() }, - BlackBoxFunc::SHA256 => BlackBoxFuncCall::SHA256 { inputs: vec![], outputs: vec![] }, - BlackBoxFunc::Blake2s => BlackBoxFuncCall::Blake2s { inputs: vec![], outputs: vec![] }, - BlackBoxFunc::SchnorrVerify => BlackBoxFuncCall::SchnorrVerify { - public_key_x: FunctionInput::dummy(), - public_key_y: FunctionInput::dummy(), - signature: vec![], - message: vec![], - output: Witness(0), - }, - BlackBoxFunc::PedersenCommitment => BlackBoxFuncCall::PedersenCommitment { - inputs: vec![], - domain_separator: 0, - outputs: (Witness(0), Witness(0)), - }, - BlackBoxFunc::PedersenHash => BlackBoxFuncCall::PedersenHash { - inputs: vec![], - domain_separator: 0, - output: Witness(0), - }, - BlackBoxFunc::HashToField128Security => { - BlackBoxFuncCall::HashToField128Security { inputs: vec![], output: Witness(0) } - } - BlackBoxFunc::EcdsaSecp256k1 => BlackBoxFuncCall::EcdsaSecp256k1 { - public_key_x: vec![], - public_key_y: vec![], - signature: vec![], - hashed_message: vec![], - output: Witness(0), - }, - BlackBoxFunc::EcdsaSecp256r1 => BlackBoxFuncCall::EcdsaSecp256r1 { - public_key_x: vec![], - public_key_y: vec![], - signature: vec![], - hashed_message: vec![], - output: Witness(0), - }, - BlackBoxFunc::FixedBaseScalarMul => BlackBoxFuncCall::FixedBaseScalarMul { - low: FunctionInput::dummy(), - high: FunctionInput::dummy(), - outputs: (Witness(0), Witness(0)), - }, - BlackBoxFunc::Keccak256 => { - BlackBoxFuncCall::Keccak256 { inputs: vec![], outputs: vec![] } - } - BlackBoxFunc::RecursiveAggregation => BlackBoxFuncCall::RecursiveAggregation { - verification_key: vec![], - proof: vec![], - public_inputs: vec![], - key_hash: FunctionInput::dummy(), - input_aggregation_object: None, - output_aggregation_object: vec![], - }, - } - } - pub fn get_black_box_func(&self) -> BlackBoxFunc { match self { BlackBoxFuncCall::AND { .. } => BlackBoxFunc::AND, diff --git a/acvm-repo/acir_field/src/generic_ark.rs b/acvm-repo/acir_field/src/generic_ark.rs index 0f4be21ad54..e3e66fe7a92 100644 --- a/acvm-repo/acir_field/src/generic_ark.rs +++ b/acvm-repo/acir_field/src/generic_ark.rs @@ -143,6 +143,12 @@ impl From for FieldElement { } } +impl From for FieldElement { + fn from(a: usize) -> FieldElement { + FieldElement::from(a as u128) + } +} + impl From for FieldElement { fn from(boolean: bool) -> FieldElement { if boolean { diff --git a/compiler/integration-tests/circuits/recursion/src/main.nr b/compiler/integration-tests/circuits/recursion/src/main.nr index 7cf956d1950..e60e4e0b61a 100644 --- a/compiler/integration-tests/circuits/recursion/src/main.nr +++ b/compiler/integration-tests/circuits/recursion/src/main.nr @@ -1,14 +1,17 @@ use dep::std; fn main( - verification_key: [Field; 114], - proof: [Field; 94], - public_inputs: [Field; 1], - key_hash: Field, - input_aggregation_object: [Field; 16] -) -> pub [Field; 16] { - let vk : [Field] = verification_key; - let p : [Field] = proof; - let pi : [Field] = public_inputs; - std::verify_proof(vk, p, pi, key_hash, input_aggregation_object) + verification_key : [Field; 114], + proof : [Field; 94], + public_inputs : [Field; 1], + key_hash : Field, +) -> pub [Field;16]{ + let input_aggregation_object = [0; 16]; + std::verify_proof( + verification_key.as_slice(), + proof.as_slice(), + public_inputs.as_slice(), + key_hash, + input_aggregation_object + ) } diff --git a/compiler/integration-tests/hardhat.config.ts b/compiler/integration-tests/hardhat.config.ts index 2c68b3fae0f..af4728c68ec 100644 --- a/compiler/integration-tests/hardhat.config.ts +++ b/compiler/integration-tests/hardhat.config.ts @@ -12,6 +12,9 @@ const config: HardhatUserConfig = { }, }, }, + mocha: { + timeout: 5 * 60 * 1000, + }, }; export default config; diff --git a/compiler/integration-tests/scripts/codegen-verifiers.sh b/compiler/integration-tests/scripts/codegen-verifiers.sh index 13667038728..b3a52217271 100644 --- a/compiler/integration-tests/scripts/codegen-verifiers.sh +++ b/compiler/integration-tests/scripts/codegen-verifiers.sh @@ -12,9 +12,15 @@ nargo --program-dir $mul_dir codegen-verifier assert_statement_dir=$repo_root/test_programs/execution_success/assert_statement nargo --program-dir $assert_statement_dir codegen-verifier +# Run codegen-verifier for recursion +recursion_dir=$repo_root/compiler/integration-tests/circuits/recursion +nargo --program-dir $recursion_dir codegen-verifier + # Copy compiled contracts from the root of compiler/integration-tests contracts_dir=$self_path/../contracts +rm -rf $contracts_dir mkdir $contracts_dir cp $mul_dir/contract/1_mul/plonk_vk.sol $contracts_dir/1_mul.sol cp $assert_statement_dir/contract/assert_statement/plonk_vk.sol $contracts_dir/assert_statement.sol +cp $recursion_dir/contract/recursion/plonk_vk.sol $contracts_dir/recursion.sol diff --git a/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts b/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts new file mode 100644 index 00000000000..353678b470b --- /dev/null +++ b/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts @@ -0,0 +1,67 @@ +import { expect } from 'chai'; +import { ethers } from 'hardhat'; + +import { readFileSync } from 'node:fs'; +import { resolve } from 'path'; +import toml from 'toml'; + +import { compile, CompiledProgram, init_log_level as compilerLogLevel } from '@noir-lang/noir_wasm'; +import { Noir } from '@noir-lang/noir_js'; +import { BarretenbergBackend, flattenPublicInputs } from '@noir-lang/backend_barretenberg'; +import { Field, InputMap } from '@noir-lang/noirc_abi'; + +compilerLogLevel('INFO'); + +it(`smart contract can verify a recursive proof`, async () => { + const inner_source_path = resolve(`../../test_programs/execution_success/assert_statement/src/main.nr`); + const inner_program = (compile(inner_source_path) as { program: CompiledProgram }).program; + + const recursion_source_path = resolve(`./circuits/recursion/src/main.nr`); + const recursion_program = (compile(recursion_source_path) as { program: CompiledProgram }).program; + + // Intermediate proof + + const inner_backend = new BarretenbergBackend(inner_program); + const inner = new Noir(inner_program); + + const inner_prover_toml = readFileSync( + resolve(`../../test_programs/execution_success/assert_statement/Prover.toml`), + ).toString(); + const inner_inputs = toml.parse(inner_prover_toml); + + const { witness: main_witness } = await inner.execute(inner_inputs); + const intermediate_proof = await inner_backend.generateIntermediateProof(main_witness); + + expect(await inner_backend.verifyIntermediateProof(intermediate_proof)).to.be.true; + + const { proofAsFields, vkAsFields, vkHash } = await inner_backend.generateIntermediateProofArtifacts( + intermediate_proof, + 1, // 1 public input + ); + + // Final proof + + const recursion_backend = new BarretenbergBackend(recursion_program); + const recursion = new Noir(recursion_program, recursion_backend); + + const recursion_inputs: InputMap = { + verification_key: vkAsFields, + proof: proofAsFields, + public_inputs: [inner_inputs.y as Field], + key_hash: vkHash, + }; + + const recursion_proof = await recursion.generateFinalProof(recursion_inputs); + expect(await recursion.verifyFinalProof(recursion_proof)).to.be.true; + + // Smart contract verification + + const contract = await ethers.deployContract('contracts/recursion.sol:UltraVerifier', []); + + const result = await contract.verify.staticCall( + recursion_proof.proof, + flattenPublicInputs(recursion_proof.publicInputs), + ); + + expect(result).to.be.true; +}); diff --git a/compiler/integration-tests/test/node/smart_contract_verifier.test.ts b/compiler/integration-tests/test/node/smart_contract_verifier.test.ts index 57199fc8667..7dafada0ffb 100644 --- a/compiler/integration-tests/test/node/smart_contract_verifier.test.ts +++ b/compiler/integration-tests/test/node/smart_contract_verifier.test.ts @@ -57,7 +57,7 @@ test_cases.forEach((testInfo) => { // Smart contract verification - const contract = await ethers.deployContract(testInfo.compiled, [], {}); + const contract = await ethers.deployContract(testInfo.compiled, []); const result = await contract.verify(proofData.proof, flattenPublicInputs(proofData.publicInputs)); diff --git a/compiler/noirc_errors/src/position.rs b/compiler/noirc_errors/src/position.rs index e308eb9a2c7..24b5c4d5ff0 100644 --- a/compiler/noirc_errors/src/position.rs +++ b/compiler/noirc_errors/src/position.rs @@ -65,6 +65,10 @@ impl Span { Span::inclusive(start, start) } + pub fn empty(position: u32) -> Span { + Span::from(position..position) + } + #[must_use] pub fn merge(self, other: Span) -> Span { Span(self.0.merge(other.0)) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index 0d97dd12601..9979bf0cd29 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -1,9 +1,9 @@ -use acvm::acir::{ - brillig::{BlackBoxOp, HeapVector, RegisterOrMemory}, - BlackBoxFunc, -}; +use acvm::acir::{brillig::BlackBoxOp, BlackBoxFunc}; -use crate::brillig::brillig_ir::BrilligContext; +use crate::brillig::brillig_ir::{ + brillig_variable::{BrilligVariable, BrilligVector}, + BrilligContext, +}; /// Transforms SSA's black box function calls into the corresponding brillig instructions /// Extracting arguments and results from the SSA function call @@ -11,31 +11,31 @@ use crate::brillig::brillig_ir::BrilligContext; pub(crate) fn convert_black_box_call( brillig_context: &mut BrilligContext, bb_func: &BlackBoxFunc, - function_arguments: &[RegisterOrMemory], - function_results: &[RegisterOrMemory], + function_arguments: &[BrilligVariable], + function_results: &[BrilligVariable], ) { match bb_func { BlackBoxFunc::SHA256 => { - if let ([message], [RegisterOrMemory::HeapArray(result_array)]) = + if let ([message], [BrilligVariable::BrilligArray(result_array)]) = (function_arguments, function_results) { let message_vector = convert_array_or_vector(brillig_context, message, bb_func); brillig_context.black_box_op_instruction(BlackBoxOp::Sha256 { - message: message_vector, - output: *result_array, + message: message_vector.to_heap_vector(), + output: result_array.to_heap_array(), }); } else { unreachable!("ICE: SHA256 expects one array argument and one array result") } } BlackBoxFunc::Blake2s => { - if let ([message], [RegisterOrMemory::HeapArray(result_array)]) = + if let ([message], [BrilligVariable::BrilligArray(result_array)]) = (function_arguments, function_results) { let message_vector = convert_array_or_vector(brillig_context, message, bb_func); brillig_context.black_box_op_instruction(BlackBoxOp::Blake2s { - message: message_vector, - output: *result_array, + message: message_vector.to_heap_vector(), + output: result_array.to_heap_array(), }); } else { unreachable!("ICE: Blake2s expects one array argument and one array result") @@ -43,28 +43,28 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::Keccak256 => { if let ( - [message, RegisterOrMemory::RegisterIndex(array_size)], - [RegisterOrMemory::HeapArray(result_array)], + [message, BrilligVariable::Simple(array_size)], + [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { let mut message_vector = convert_array_or_vector(brillig_context, message, bb_func); message_vector.size = *array_size; brillig_context.black_box_op_instruction(BlackBoxOp::Keccak256 { - message: message_vector, - output: *result_array, + message: message_vector.to_heap_vector(), + output: result_array.to_heap_array(), }); } else { unreachable!("ICE: Keccak256 expects message, message size and result array") } } BlackBoxFunc::HashToField128Security => { - if let ([message], [RegisterOrMemory::RegisterIndex(result_register)]) = + if let ([message], [BrilligVariable::Simple(result_register)]) = (function_arguments, function_results) { let message_vector = convert_array_or_vector(brillig_context, message, bb_func); brillig_context.black_box_op_instruction(BlackBoxOp::HashToField128Security { - message: message_vector, + message: message_vector.to_heap_vector(), output: *result_register, }); } else { @@ -73,17 +73,17 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::EcdsaSecp256k1 => { if let ( - [RegisterOrMemory::HeapArray(public_key_x), RegisterOrMemory::HeapArray(public_key_y), RegisterOrMemory::HeapArray(signature), message], - [RegisterOrMemory::RegisterIndex(result_register)], + [BrilligVariable::BrilligArray(public_key_x), BrilligVariable::BrilligArray(public_key_y), BrilligVariable::BrilligArray(signature), message], + [BrilligVariable::Simple(result_register)], ) = (function_arguments, function_results) { let message_hash_vector = convert_array_or_vector(brillig_context, message, bb_func); brillig_context.black_box_op_instruction(BlackBoxOp::EcdsaSecp256k1 { - hashed_msg: message_hash_vector, - public_key_x: *public_key_x, - public_key_y: *public_key_y, - signature: *signature, + hashed_msg: message_hash_vector.to_heap_vector(), + public_key_x: public_key_x.to_heap_array(), + public_key_y: public_key_y.to_heap_array(), + signature: signature.to_heap_array(), result: *result_register, }); } else { @@ -94,15 +94,15 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::PedersenCommitment => { if let ( - [message, RegisterOrMemory::RegisterIndex(domain_separator)], - [RegisterOrMemory::HeapArray(result_array)], + [message, BrilligVariable::Simple(domain_separator)], + [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { let message_vector = convert_array_or_vector(brillig_context, message, bb_func); brillig_context.black_box_op_instruction(BlackBoxOp::PedersenCommitment { - inputs: message_vector, + inputs: message_vector.to_heap_vector(), domain_separator: *domain_separator, - output: *result_array, + output: result_array.to_heap_array(), }); } else { unreachable!("ICE: Pedersen expects one array argument, a register for the domain separator, and one array result") @@ -110,13 +110,13 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::PedersenHash => { if let ( - [message, RegisterOrMemory::RegisterIndex(domain_separator)], - [RegisterOrMemory::RegisterIndex(result)], + [message, BrilligVariable::Simple(domain_separator)], + [BrilligVariable::Simple(result)], ) = (function_arguments, function_results) { let message_vector = convert_array_or_vector(brillig_context, message, bb_func); brillig_context.black_box_op_instruction(BlackBoxOp::PedersenHash { - inputs: message_vector, + inputs: message_vector.to_heap_vector(), domain_separator: *domain_separator, output: *result, }); @@ -126,8 +126,8 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::SchnorrVerify => { if let ( - [RegisterOrMemory::RegisterIndex(public_key_x), RegisterOrMemory::RegisterIndex(public_key_y), RegisterOrMemory::HeapArray(signature), message], - [RegisterOrMemory::RegisterIndex(result_register)], + [BrilligVariable::Simple(public_key_x), BrilligVariable::Simple(public_key_y), BrilligVariable::BrilligArray(signature), message], + [BrilligVariable::Simple(result_register)], ) = (function_arguments, function_results) { let message_hash = convert_array_or_vector(brillig_context, message, bb_func); @@ -135,8 +135,8 @@ pub(crate) fn convert_black_box_call( brillig_context.black_box_op_instruction(BlackBoxOp::SchnorrVerify { public_key_x: *public_key_x, public_key_y: *public_key_y, - message: message_hash, - signature, + message: message_hash.to_heap_vector(), + signature: signature.to_heap_vector(), result: *result_register, }); } else { @@ -145,14 +145,14 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::FixedBaseScalarMul => { if let ( - [RegisterOrMemory::RegisterIndex(low), RegisterOrMemory::RegisterIndex(high)], - [RegisterOrMemory::HeapArray(result_array)], + [BrilligVariable::Simple(low), BrilligVariable::Simple(high)], + [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::FixedBaseScalarMul { low: *low, high: *high, - result: *result_array, + result: result_array.to_heap_array(), }); } else { unreachable!( @@ -166,12 +166,12 @@ pub(crate) fn convert_black_box_call( fn convert_array_or_vector( brillig_context: &mut BrilligContext, - array_or_vector: &RegisterOrMemory, + array_or_vector: &BrilligVariable, bb_func: &BlackBoxFunc, -) -> HeapVector { +) -> BrilligVector { match array_or_vector { - RegisterOrMemory::HeapArray(array) => brillig_context.array_to_vector(array), - RegisterOrMemory::HeapVector(vector) => *vector, + BrilligVariable::BrilligArray(array) => brillig_context.array_to_vector(array), + BrilligVariable::BrilligVector(vector) => *vector, _ => unreachable!( "ICE: {} expected an array or a vector, but got {:?}", bb_func.name(), diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 18fd822b07d..0e06a36fd94 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -1,6 +1,6 @@ +use crate::brillig::brillig_ir::brillig_variable::{BrilligArray, BrilligVariable, BrilligVector}; use crate::brillig::brillig_ir::{ - extract_heap_array, extract_register, extract_registers, BrilligBinaryOp, BrilligContext, - BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, + BrilligBinaryOp, BrilligContext, BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, }; use crate::ssa::ir::dfg::CallStack; use crate::ssa::ir::{ @@ -13,7 +13,7 @@ use crate::ssa::ir::{ types::{NumericType, Type}, value::{Value, ValueId}, }; -use acvm::acir::brillig::{BinaryFieldOp, BinaryIntOp, HeapArray, RegisterIndex, RegisterOrMemory}; +use acvm::acir::brillig::{BinaryFieldOp, BinaryIntOp, RegisterIndex, RegisterOrMemory}; use acvm::brillig_vm::brillig::HeapVector; use acvm::FieldElement; use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; @@ -53,7 +53,7 @@ impl<'block> BrilligBlock<'block> { variables .get_available_variables(function_context) .into_iter() - .flat_map(extract_registers) + .flat_map(|variable| variable.extract_registers()) .collect(), ); let last_uses = function_context.liveness.get_last_uses(&block_id).clone(); @@ -159,7 +159,7 @@ impl<'block> BrilligBlock<'block> { .iter() .flat_map(|value_id| { let return_variable = self.convert_ssa_value(*value_id, dfg); - extract_registers(return_variable) + return_variable.extract_registers() }) .collect(); self.brillig_context.return_instruction(&return_registers); @@ -168,32 +168,44 @@ impl<'block> BrilligBlock<'block> { } /// Passes an arbitrary variable from the registers of the source to the registers of the destination - fn pass_variable(&mut self, source: RegisterOrMemory, destination: RegisterOrMemory) { + fn pass_variable(&mut self, source: BrilligVariable, destination: BrilligVariable) { match (source, destination) { ( - RegisterOrMemory::RegisterIndex(source_register), - RegisterOrMemory::RegisterIndex(destination_register), + BrilligVariable::Simple(source_register), + BrilligVariable::Simple(destination_register), ) => { self.brillig_context.mov_instruction(destination_register, source_register); } ( - RegisterOrMemory::HeapArray(HeapArray { pointer: source_pointer, .. }), - RegisterOrMemory::HeapArray(HeapArray { pointer: destination_pointer, .. }), + BrilligVariable::BrilligArray(BrilligArray { + pointer: source_pointer, + size: _, + rc: source_rc, + }), + BrilligVariable::BrilligArray(BrilligArray { + pointer: destination_pointer, + size: _, + rc: destination_rc, + }), ) => { self.brillig_context.mov_instruction(destination_pointer, source_pointer); + self.brillig_context.mov_instruction(destination_rc, source_rc); } ( - RegisterOrMemory::HeapVector(HeapVector { + BrilligVariable::BrilligVector(BrilligVector { pointer: source_pointer, size: source_size, + rc: source_rc, }), - RegisterOrMemory::HeapVector(HeapVector { + BrilligVariable::BrilligVector(BrilligVector { pointer: destination_pointer, size: destination_size, + rc: destination_rc, }), ) => { self.brillig_context.mov_instruction(destination_pointer, source_pointer); self.brillig_context.mov_instruction(destination_size, source_size); + self.brillig_context.mov_instruction(destination_rc, source_rc); } (_, _) => { unreachable!("ICE: Cannot pass value from {:?} to {:?}", source, destination); @@ -214,7 +226,7 @@ impl<'block> BrilligBlock<'block> { // In the case of arrays, the values should already be in memory and the register should // Be a valid pointer to the array. // For slices, two registers are passed, the pointer to the data and a register holding the size of the slice. - Type::Numeric(_) | Type::Array(..) | Type::Slice(..) | Type::Reference => { + Type::Numeric(_) | Type::Array(..) | Type::Slice(..) | Type::Reference(_) => { self.variables.get_block_param( self.function_context, self.block_id, @@ -264,7 +276,25 @@ impl<'block> BrilligBlock<'block> { result_value, dfg, ); - self.brillig_context.allocate_variable_instruction(address_register); + match dfg.type_of_value(result_value) { + Type::Reference(element) => match *element { + Type::Array(..) => { + self.brillig_context + .allocate_array_reference_instruction(address_register); + } + Type::Slice(..) => { + self.brillig_context + .allocate_vector_reference_instruction(address_register); + } + _ => { + self.brillig_context + .allocate_simple_reference_instruction(address_register); + } + }, + _ => { + unreachable!("ICE: Allocate on non-reference type") + } + } } Instruction::Store { address, value } => { let address_register = self.convert_ssa_register_value(*address, dfg); @@ -299,10 +329,11 @@ impl<'block> BrilligBlock<'block> { Value::ForeignFunction(func_name) => { let result_ids = dfg.instruction_results(instruction_id); - let input_registers = - vecmap(arguments, |value_id| self.convert_ssa_value(*value_id, dfg)); + let input_registers = vecmap(arguments, |value_id| { + self.convert_ssa_value(*value_id, dfg).to_register_or_memory() + }); let output_registers = vecmap(result_ids, |value_id| { - self.allocate_external_call_result(*value_id, dfg) + self.allocate_external_call_result(*value_id, dfg).to_register_or_memory() }); self.brillig_context.foreign_call_instruction( func_name.to_owned(), @@ -388,7 +419,7 @@ impl<'block> BrilligBlock<'block> { // or an array in the case of an array. if let Type::Numeric(_) = dfg.type_of_value(param_id) { let len_variable = self.convert_ssa_value(arguments[0], dfg); - let len_register_index = extract_register(len_variable); + let len_register_index = len_variable.extract_register(); self.brillig_context.mov_instruction(result_register, len_register_index); } else { self.convert_ssa_array_len(arguments[0], result_register, dfg); @@ -416,29 +447,29 @@ impl<'block> BrilligBlock<'block> { let results = dfg.instruction_results(instruction_id); - let target_len_variable = self.variables.define_variable( + let target_len = self.variables.define_register_variable( self.function_context, self.brillig_context, results[0], dfg, ); - let target_len = extract_register(target_len_variable); - let target_slice = self.variables.define_variable( - self.function_context, - self.brillig_context, - results[1], - dfg, - ); - - let heap_vec = self.brillig_context.extract_heap_vector(target_slice); + let target_vector = self + .variables + .define_variable( + self.function_context, + self.brillig_context, + results[1], + dfg, + ) + .extract_vector(); // Update the user-facing slice length self.brillig_context.mov_instruction(target_len, limb_count); self.brillig_context.radix_instruction( source, - heap_vec, + target_vector, radix, limb_count, matches!(endianness, Endian::Big), @@ -456,24 +487,29 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ); - let target_len = extract_register(target_len_variable); + let target_len = target_len_variable.extract_register(); - let target_slice = self.variables.define_variable( + let target_vector = match self.variables.define_variable( self.function_context, self.brillig_context, results[1], dfg, - ); + ) { + BrilligVariable::BrilligArray(array) => { + self.brillig_context.array_to_vector(&array) + } + BrilligVariable::BrilligVector(vector) => vector, + BrilligVariable::Simple(..) => unreachable!("ICE: ToBits on non-array"), + }; let radix = self.brillig_context.make_constant(2_usize.into()); - let heap_vec = self.brillig_context.extract_heap_vector(target_slice); // Update the user-facing slice length self.brillig_context.mov_instruction(target_len, limb_count); self.brillig_context.radix_instruction( source, - heap_vec, + target_vector, radix, limb_count, matches!(endianness, Endian::Big), @@ -523,8 +559,8 @@ impl<'block> BrilligBlock<'block> { let array_variable = self.convert_ssa_value(*array, dfg); let array_pointer = match array_variable { - RegisterOrMemory::HeapArray(HeapArray { pointer, .. }) => pointer, - RegisterOrMemory::HeapVector(HeapVector { pointer, .. }) => pointer, + BrilligVariable::BrilligArray(BrilligArray { pointer, .. }) => pointer, + BrilligVariable::BrilligVector(BrilligVector { pointer, .. }) => pointer, _ => unreachable!("ICE: array get on non-array"), }; @@ -574,6 +610,14 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.deallocate_register(condition); self.brillig_context.deallocate_register(right); } + Instruction::IncrementRc { value } => { + let rc_register = match self.convert_ssa_value(*value, dfg) { + BrilligVariable::BrilligArray(BrilligArray { rc, .. }) + | BrilligVariable::BrilligVector(BrilligVector { rc, .. }) => rc, + _ => unreachable!("ICE: increment rc on non-array"), + }; + self.brillig_context.usize_op_in_place(rc_register, BinaryIntOp::Add, 1); + } _ => todo!("ICE: Instruction not supported {instruction:?}"), }; @@ -598,10 +642,7 @@ impl<'block> BrilligBlock<'block> { // Convert the arguments to registers casting those to the types of the receiving function let argument_registers: Vec = arguments .iter() - .flat_map(|argument_id| { - let variable_to_pass = self.convert_ssa_value(*argument_id, dfg); - extract_registers(variable_to_pass) - }) + .flat_map(|argument_id| self.convert_ssa_value(*argument_id, dfg).extract_registers()) .collect(); let result_ids = dfg.instruction_results(instruction_id); @@ -637,7 +678,7 @@ impl<'block> BrilligBlock<'block> { // Collect the registers that should have been returned let returned_registers: Vec = variables_assigned_to .iter() - .flat_map(|returned_variable| extract_registers(*returned_variable)) + .flat_map(|returned_variable| returned_variable.extract_registers()) .collect(); assert!( @@ -654,17 +695,13 @@ impl<'block> BrilligBlock<'block> { &mut self, array_pointer: RegisterIndex, index_register: RegisterIndex, - destination_variable: RegisterOrMemory, + destination_variable: BrilligVariable, ) { match destination_variable { - RegisterOrMemory::RegisterIndex(destination_register) => { + BrilligVariable::Simple(destination_register) => { self.brillig_context.array_get(array_pointer, index_register, destination_register); } - RegisterOrMemory::HeapArray(HeapArray { pointer, .. }) => { - self.brillig_context.array_get(array_pointer, index_register, pointer); - } - RegisterOrMemory::HeapVector(..) => { - // Vectors are stored as references inside arrays to be able to match SSA indexes + BrilligVariable::BrilligArray(..) | BrilligVariable::BrilligVector(..) => { let reference = self.brillig_context.allocate_register(); self.brillig_context.array_get(array_pointer, index_register, reference); self.brillig_context.load_variable_instruction(destination_variable, reference); @@ -677,25 +714,30 @@ impl<'block> BrilligBlock<'block> { /// With a specific value changed. fn convert_ssa_array_set( &mut self, - source_variable: RegisterOrMemory, - destination_variable: RegisterOrMemory, + source_variable: BrilligVariable, + destination_variable: BrilligVariable, index_register: RegisterIndex, - value_variable: RegisterOrMemory, + value_variable: BrilligVariable, ) { let destination_pointer = match destination_variable { - RegisterOrMemory::HeapArray(HeapArray { pointer, .. }) => pointer, - RegisterOrMemory::HeapVector(HeapVector { pointer, .. }) => pointer, + BrilligVariable::BrilligArray(BrilligArray { pointer, .. }) => pointer, + BrilligVariable::BrilligVector(BrilligVector { pointer, .. }) => pointer, _ => unreachable!("ICE: array set returns non-array"), }; - // First issue a array copy to the destination + let reference_count = match source_variable { + BrilligVariable::BrilligArray(BrilligArray { rc, .. }) + | BrilligVariable::BrilligVector(BrilligVector { rc, .. }) => rc, + _ => unreachable!("ICE: array set on non-array"), + }; + let (source_pointer, source_size_as_register) = match source_variable { - RegisterOrMemory::HeapArray(HeapArray { size, pointer }) => { + BrilligVariable::BrilligArray(BrilligArray { size, pointer, rc: _ }) => { let source_size_register = self.brillig_context.allocate_register(); self.brillig_context.const_instruction(source_size_register, size.into()); (pointer, source_size_register) } - RegisterOrMemory::HeapVector(HeapVector { size, pointer }) => { + BrilligVariable::BrilligVector(BrilligVector { size, pointer, rc: _ }) => { let source_size_register = self.brillig_context.allocate_register(); self.brillig_context.mov_instruction(source_size_register, size); (pointer, source_size_register) @@ -703,51 +745,96 @@ impl<'block> BrilligBlock<'block> { _ => unreachable!("ICE: array set on non-array"), }; - self.brillig_context - .allocate_array_instruction(destination_pointer, source_size_as_register); + let one = self.brillig_context.make_constant(1_usize.into()); + let condition = self.brillig_context.allocate_register(); - self.brillig_context.copy_array_instruction( - source_pointer, - destination_pointer, - source_size_as_register, + self.brillig_context.binary_instruction( + reference_count, + one, + condition, + BrilligBinaryOp::Field { op: BinaryFieldOp::Equals }, ); - if let RegisterOrMemory::HeapVector(HeapVector { size: target_size, .. }) = - destination_variable - { - self.brillig_context.mov_instruction(target_size, source_size_as_register); + self.brillig_context.branch_instruction(condition, |ctx, cond| { + if cond { + // Reference count is 1, we can mutate the array directly + ctx.mov_instruction(destination_pointer, source_pointer); + } else { + // First issue a array copy to the destination + ctx.allocate_array_instruction(destination_pointer, source_size_as_register); + + ctx.copy_array_instruction( + source_pointer, + destination_pointer, + source_size_as_register, + ); + } + }); + + match destination_variable { + BrilligVariable::BrilligArray(BrilligArray { rc: target_rc, .. }) => { + self.brillig_context.const_instruction(target_rc, 1_usize.into()); + } + BrilligVariable::BrilligVector(BrilligVector { + size: target_size, + rc: target_rc, + .. + }) => { + self.brillig_context.mov_instruction(target_size, source_size_as_register); + self.brillig_context.const_instruction(target_rc, 1_usize.into()); + } + _ => unreachable!("ICE: array set on non-array"), } // Then set the value in the newly created array self.store_variable_in_array(destination_pointer, index_register, value_variable); self.brillig_context.deallocate_register(source_size_as_register); + self.brillig_context.deallocate_register(one); + self.brillig_context.deallocate_register(condition); } - pub(crate) fn store_variable_in_array( - &mut self, + pub(crate) fn store_variable_in_array_with_ctx( + ctx: &mut BrilligContext, destination_pointer: RegisterIndex, index_register: RegisterIndex, - value_variable: RegisterOrMemory, + value_variable: BrilligVariable, ) { match value_variable { - RegisterOrMemory::RegisterIndex(value_register) => { - self.brillig_context.array_set(destination_pointer, index_register, value_register); + BrilligVariable::Simple(value_register) => { + ctx.array_set(destination_pointer, index_register, value_register); } - RegisterOrMemory::HeapArray(HeapArray { pointer, .. }) => { - self.brillig_context.array_set(destination_pointer, index_register, pointer); + BrilligVariable::BrilligArray(_) => { + let reference: RegisterIndex = ctx.allocate_register(); + ctx.allocate_array_reference_instruction(reference); + ctx.store_variable_instruction(reference, value_variable); + ctx.array_set(destination_pointer, index_register, reference); + ctx.deallocate_register(reference); } - RegisterOrMemory::HeapVector(_) => { - // Vectors are stored as references inside arrays to be able to match SSA indexes - let reference = self.brillig_context.allocate_register(); - self.brillig_context.allocate_variable_instruction(reference); - self.brillig_context.store_variable_instruction(reference, value_variable); - self.brillig_context.array_set(destination_pointer, index_register, reference); - self.brillig_context.deallocate_register(reference); + BrilligVariable::BrilligVector(_) => { + let reference = ctx.allocate_register(); + ctx.allocate_vector_reference_instruction(reference); + ctx.store_variable_instruction(reference, value_variable); + ctx.array_set(destination_pointer, index_register, reference); + ctx.deallocate_register(reference); } } } + pub(crate) fn store_variable_in_array( + &mut self, + destination_pointer: RegisterIndex, + index_register: RegisterIndex, + value_variable: BrilligVariable, + ) { + Self::store_variable_in_array_with_ctx( + self.brillig_context, + destination_pointer, + index_register, + value_variable, + ); + } + /// Convert the SSA slice operations to brillig slice operations fn convert_ssa_slice_intrinsic_call( &mut self, @@ -770,7 +857,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - RegisterOrMemory::RegisterIndex(register_index) => register_index, + BrilligVariable::Simple(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -781,7 +868,7 @@ impl<'block> BrilligBlock<'block> { dfg, ); - let target_vector = self.brillig_context.extract_heap_vector(target_variable); + let target_vector = target_variable.extract_vector(); let item_values = vecmap(&arguments[2..element_size + 2], |arg| { self.convert_ssa_value(*arg, dfg) }); @@ -797,7 +884,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - RegisterOrMemory::RegisterIndex(register_index) => register_index, + BrilligVariable::Simple(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -807,7 +894,7 @@ impl<'block> BrilligBlock<'block> { results[1], dfg, ); - let target_vector = self.brillig_context.extract_heap_vector(target_variable); + let target_vector = target_variable.extract_vector(); let item_values = vecmap(&arguments[2..element_size + 2], |arg| { self.convert_ssa_value(*arg, dfg) }); @@ -823,7 +910,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - RegisterOrMemory::RegisterIndex(register_index) => register_index, + BrilligVariable::Simple(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -834,7 +921,7 @@ impl<'block> BrilligBlock<'block> { dfg, ); - let target_vector = self.brillig_context.extract_heap_vector(target_variable); + let target_vector = target_variable.extract_vector(); let pop_variables = vecmap(&results[2..element_size + 2], |result| { self.variables.define_variable( @@ -856,7 +943,7 @@ impl<'block> BrilligBlock<'block> { results[element_size], dfg, ) { - RegisterOrMemory::RegisterIndex(register_index) => register_index, + BrilligVariable::Simple(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -875,7 +962,7 @@ impl<'block> BrilligBlock<'block> { results[element_size + 1], dfg, ); - let target_vector = self.brillig_context.extract_heap_vector(target_variable); + let target_vector = target_variable.extract_vector(); self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Sub); @@ -888,7 +975,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - RegisterOrMemory::RegisterIndex(register_index) => register_index, + BrilligVariable::Simple(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -900,7 +987,7 @@ impl<'block> BrilligBlock<'block> { dfg, ); - let target_vector = self.brillig_context.extract_heap_vector(target_variable); + let target_vector = target_variable.extract_vector(); // Remove if indexing in insert is changed to flattened indexing // https://github.com/noir-lang/noir/issues/1889#issuecomment-1668048587 @@ -931,7 +1018,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - RegisterOrMemory::RegisterIndex(register_index) => register_index, + BrilligVariable::Simple(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -943,7 +1030,7 @@ impl<'block> BrilligBlock<'block> { target_id, dfg, ); - let target_vector = self.brillig_context.extract_heap_vector(target_variable); + let target_vector = target_variable.extract_vector(); // Remove if indexing in remove is changed to flattened indexing // https://github.com/noir-lang/noir/issues/1889#issuecomment-1668048587 @@ -998,7 +1085,7 @@ impl<'block> BrilligBlock<'block> { binary_op: BinaryIntOp, ) { let source_len_variable = self.convert_ssa_value(source_value, dfg); - let source_len = extract_register(source_len_variable); + let source_len = source_len_variable.extract_register(); self.brillig_context.usize_op(source_len, target_len, binary_op, 1); } @@ -1064,7 +1151,7 @@ impl<'block> BrilligBlock<'block> { } /// Converts an SSA `ValueId` into a `RegisterOrMemory`. Initializes if necessary. - fn convert_ssa_value(&mut self, value_id: ValueId, dfg: &DataFlowGraph) -> RegisterOrMemory { + fn convert_ssa_value(&mut self, value_id: ValueId, dfg: &DataFlowGraph) -> BrilligVariable { let value_id = dfg.resolve(value_id); let value = &dfg[value_id]; @@ -1082,7 +1169,7 @@ impl<'block> BrilligBlock<'block> { } else { let new_variable = self.variables.allocate_constant(self.brillig_context, value_id, dfg); - let register_index = extract_register(new_variable); + let register_index = new_variable.extract_register(); self.brillig_context.const_instruction(register_index, (*constant).into()); new_variable @@ -1097,19 +1184,21 @@ impl<'block> BrilligBlock<'block> { // Initialize the variable let pointer = match new_variable { - RegisterOrMemory::HeapArray(heap_array) => { + BrilligVariable::BrilligArray(brillig_array) => { self.brillig_context - .allocate_fixed_length_array(heap_array.pointer, array.len()); + .allocate_fixed_length_array(brillig_array.pointer, array.len()); + self.brillig_context + .const_instruction(brillig_array.rc, 1_usize.into()); - heap_array.pointer + brillig_array.pointer } - RegisterOrMemory::HeapVector(heap_vector) => { - self.brillig_context - .const_instruction(heap_vector.size, array.len().into()); + BrilligVariable::BrilligVector(vector) => { + self.brillig_context.const_instruction(vector.size, array.len().into()); self.brillig_context - .allocate_array_instruction(heap_vector.pointer, heap_vector.size); + .allocate_array_instruction(vector.pointer, vector.size); + self.brillig_context.const_instruction(vector.rc, 1_usize.into()); - heap_vector.pointer + vector.pointer } _ => unreachable!( "ICE: Cannot initialize array value created as {new_variable:?}" @@ -1138,7 +1227,7 @@ impl<'block> BrilligBlock<'block> { new_variable } } - _ => { + Value::Function(_) | Value::Intrinsic(_) | Value::ForeignFunction(_) => { todo!("ICE: Cannot convert value {value:?}") } } @@ -1151,14 +1240,14 @@ impl<'block> BrilligBlock<'block> { dfg: &DataFlowGraph, ) -> RegisterIndex { let variable = self.convert_ssa_value(value_id, dfg); - extract_register(variable) + variable.extract_register() } fn allocate_external_call_result( &mut self, result: ValueId, dfg: &DataFlowGraph, - ) -> RegisterOrMemory { + ) -> BrilligVariable { let typ = dfg[result].get_type(); match typ { Type::Numeric(_) => self.variables.define_variable( @@ -1175,8 +1264,10 @@ impl<'block> BrilligBlock<'block> { result, dfg, ); - let array = extract_heap_array(variable); + let array = variable.extract_array(); self.brillig_context.allocate_fixed_length_array(array.pointer, array.size); + self.brillig_context.const_instruction(array.rc, 1_usize.into()); + variable } Type::Slice(_) => { @@ -1186,12 +1277,14 @@ impl<'block> BrilligBlock<'block> { result, dfg, ); - let vector = self.brillig_context.extract_heap_vector(variable); + let vector = variable.extract_vector(); // Set the pointer to the current stack frame // The stack pointer will then be updated by the caller of this method // once the external call is resolved and the array size is known self.brillig_context.set_array_pointer(vector.pointer); + self.brillig_context.const_instruction(vector.rc, 1_usize.into()); + variable } _ => { @@ -1201,7 +1294,7 @@ impl<'block> BrilligBlock<'block> { } /// Gets the "user-facing" length of an array. - /// An array of structs with two fields would be stored as an 2 * array.len() heap array/heap vector. + /// An array of structs with two fields would be stored as an 2 * array.len() array/vector. /// So we divide the length by the number of subitems in an item to get the user-facing length. fn convert_ssa_array_len( &mut self, @@ -1213,11 +1306,11 @@ impl<'block> BrilligBlock<'block> { let element_size = dfg.type_of_value(array_id).element_size(); match array_variable { - RegisterOrMemory::HeapArray(HeapArray { size, .. }) => { + BrilligVariable::BrilligArray(BrilligArray { size, .. }) => { self.brillig_context .const_instruction(result_register, (size / element_size).into()); } - RegisterOrMemory::HeapVector(HeapVector { size, .. }) => { + BrilligVariable::BrilligVector(BrilligVector { size, .. }) => { self.brillig_context.usize_op( size, result_register, @@ -1240,7 +1333,7 @@ pub(crate) fn type_of_binary_operation(lhs_type: &Type, rhs_type: &Type) -> Type (_, Type::Function) | (Type::Function, _) => { unreachable!("Functions are invalid in binary operations") } - (_, Type::Reference) | (Type::Reference, _) => { + (_, Type::Reference(_)) | (Type::Reference(_), _) => { unreachable!("References are invalid in binary operations") } (_, Type::Array(..)) | (Type::Array(..), _) => { diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs index eb7bab8c971..f2e698c0aa9 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs @@ -1,8 +1,11 @@ -use acvm::brillig_vm::brillig::{HeapArray, HeapVector, RegisterIndex, RegisterOrMemory}; +use acvm::brillig_vm::brillig::RegisterIndex; use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; use crate::{ - brillig::brillig_ir::{extract_register, BrilligContext}, + brillig::brillig_ir::{ + brillig_variable::{BrilligArray, BrilligVariable, BrilligVector}, + BrilligContext, + }, ssa::ir::{ basic_block::BasicBlockId, dfg::DataFlowGraph, @@ -16,7 +19,7 @@ use super::brillig_fn::FunctionContext; #[derive(Debug, Default)] pub(crate) struct BlockVariables { available_variables: HashSet, - available_constants: HashMap, + available_constants: HashMap, } impl BlockVariables { @@ -32,7 +35,7 @@ impl BlockVariables { pub(crate) fn get_available_variables( &self, function_context: &FunctionContext, - ) -> Vec { + ) -> Vec { self.available_variables .iter() .map(|value_id| { @@ -52,7 +55,7 @@ impl BlockVariables { brillig_context: &mut BrilligContext, value_id: ValueId, dfg: &DataFlowGraph, - ) -> RegisterOrMemory { + ) -> BrilligVariable { let value_id = dfg.resolve(value_id); let variable = allocate_value(value_id, brillig_context, dfg); @@ -74,7 +77,7 @@ impl BlockVariables { dfg: &DataFlowGraph, ) -> RegisterIndex { let variable = self.define_variable(function_context, brillig_context, value, dfg); - extract_register(variable) + variable.extract_register() } /// Removes a variable so it's not used anymore within this block. @@ -88,7 +91,7 @@ impl BlockVariables { function_context: &FunctionContext, value_id: ValueId, dfg: &DataFlowGraph, - ) -> RegisterOrMemory { + ) -> BrilligVariable { let value_id = dfg.resolve(value_id); if let Some(constant) = self.available_constants.get(&value_id) { *constant @@ -112,7 +115,7 @@ impl BlockVariables { brillig_context: &mut BrilligContext, value_id: ValueId, dfg: &DataFlowGraph, - ) -> RegisterOrMemory { + ) -> BrilligVariable { let value_id = dfg.resolve(value_id); let constant = allocate_value(value_id, brillig_context, dfg); self.available_constants.insert(value_id, constant); @@ -124,7 +127,7 @@ impl BlockVariables { &mut self, value_id: ValueId, dfg: &DataFlowGraph, - ) -> Option { + ) -> Option { let value_id = dfg.resolve(value_id); self.available_constants.get(&value_id).cloned() } @@ -141,7 +144,7 @@ impl BlockVariables { block_id: BasicBlockId, value_id: ValueId, dfg: &DataFlowGraph, - ) -> RegisterOrMemory { + ) -> BrilligVariable { let value_id = dfg.resolve(value_id); assert!( function_context @@ -166,25 +169,34 @@ pub(crate) fn allocate_value( value_id: ValueId, brillig_context: &mut BrilligContext, dfg: &DataFlowGraph, -) -> RegisterOrMemory { +) -> BrilligVariable { let typ = dfg.type_of_value(value_id); match typ { - Type::Numeric(_) | Type::Reference => { + Type::Numeric(_) | Type::Reference(_) => { let register = brillig_context.allocate_register(); - RegisterOrMemory::RegisterIndex(register) + BrilligVariable::Simple(register) } Type::Array(item_typ, elem_count) => { let pointer_register = brillig_context.allocate_register(); + let rc_register = brillig_context.allocate_register(); let size = compute_array_length(&item_typ, elem_count); - RegisterOrMemory::HeapArray(HeapArray { pointer: pointer_register, size }) + + BrilligVariable::BrilligArray(BrilligArray { + pointer: pointer_register, + size, + rc: rc_register, + }) } Type::Slice(_) => { let pointer_register = brillig_context.allocate_register(); let size_register = brillig_context.allocate_register(); - RegisterOrMemory::HeapVector(HeapVector { + let rc_register = brillig_context.allocate_register(); + + BrilligVariable::BrilligVector(BrilligVector { pointer: pointer_register, size: size_register, + rc: rc_register, }) } Type::Function => { diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs index ec72ceb2909..026def4ef11 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs @@ -1,9 +1,9 @@ -use acvm::brillig_vm::brillig::RegisterOrMemory; use iter_extended::vecmap; use crate::{ brillig::brillig_ir::{ artifact::{BrilligParameter, Label}, + brillig_variable::BrilligVariable, BrilligContext, }, ssa::ir::{ @@ -21,7 +21,7 @@ use super::{brillig_block_variables::allocate_value, variable_liveness::Variable pub(crate) struct FunctionContext { pub(crate) function_id: FunctionId, /// Map from SSA values its allocation. Since values can be only defined once in SSA form, we insert them here on when we allocate them at their definition. - pub(crate) ssa_value_allocations: HashMap, + pub(crate) ssa_value_allocations: HashMap, /// Block parameters are pre allocated at the function level. pub(crate) block_parameters: HashMap>, /// The block ids of the function in reverse post order. @@ -72,7 +72,7 @@ impl FunctionContext { fn ssa_type_to_parameter(typ: &Type) -> BrilligParameter { match typ { - Type::Numeric(_) | Type::Reference => BrilligParameter::Simple, + Type::Numeric(_) | Type::Reference(_) => BrilligParameter::Simple, Type::Array(item_type, size) => BrilligParameter::Array( vecmap(item_type.iter(), |item_typ| { FunctionContext::ssa_type_to_parameter(item_typ) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs index 211d670e7d8..6402e6f9d97 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs @@ -1,13 +1,15 @@ -use acvm::brillig_vm::brillig::{BinaryIntOp, HeapVector, RegisterIndex, RegisterOrMemory}; +use acvm::brillig_vm::brillig::{BinaryIntOp, RegisterIndex}; + +use crate::brillig::brillig_ir::brillig_variable::{BrilligVariable, BrilligVector}; use super::brillig_block::BrilligBlock; impl<'block> BrilligBlock<'block> { pub(crate) fn slice_push_back_operation( &mut self, - target_vector: HeapVector, - source_vector: HeapVector, - variables_to_insert: &[RegisterOrMemory], + target_vector: BrilligVector, + source_vector: BrilligVector, + variables_to_insert: &[BrilligVariable], ) { // First we need to allocate the target vector incrementing the size by variables_to_insert.len() self.brillig_context.usize_op( @@ -17,6 +19,8 @@ impl<'block> BrilligBlock<'block> { variables_to_insert.len(), ); self.brillig_context.allocate_array_instruction(target_vector.pointer, target_vector.size); + // We initialize the RC of the target vector to 1 + self.brillig_context.const_instruction(target_vector.rc, 1_usize.into()); // Now we copy the source vector into the target vector self.brillig_context.copy_array_instruction( @@ -40,9 +44,9 @@ impl<'block> BrilligBlock<'block> { pub(crate) fn slice_push_front_operation( &mut self, - target_vector: HeapVector, - source_vector: HeapVector, - variables_to_insert: &[RegisterOrMemory], + target_vector: BrilligVector, + source_vector: BrilligVector, + variables_to_insert: &[BrilligVariable], ) { // First we need to allocate the target vector incrementing the size by variables_to_insert.len() self.brillig_context.usize_op( @@ -52,6 +56,8 @@ impl<'block> BrilligBlock<'block> { variables_to_insert.len(), ); self.brillig_context.allocate_array_instruction(target_vector.pointer, target_vector.size); + // We initialize the RC of the target vector to 1 + self.brillig_context.const_instruction(target_vector.rc, 1_usize.into()); // Now we offset the target pointer by variables_to_insert.len() let destination_copy_pointer = self.brillig_context.allocate_register(); @@ -81,9 +87,9 @@ impl<'block> BrilligBlock<'block> { pub(crate) fn slice_pop_front_operation( &mut self, - target_vector: HeapVector, - source_vector: HeapVector, - removed_items: &[RegisterOrMemory], + target_vector: BrilligVector, + source_vector: BrilligVector, + removed_items: &[BrilligVariable], ) { // First we need to allocate the target vector decrementing the size by removed_items.len() self.brillig_context.usize_op( @@ -93,6 +99,8 @@ impl<'block> BrilligBlock<'block> { removed_items.len(), ); self.brillig_context.allocate_array_instruction(target_vector.pointer, target_vector.size); + // We initialize the RC of the target vector to 1 + self.brillig_context.const_instruction(target_vector.rc, 1_usize.into()); // Now we offset the source pointer by removed_items.len() let source_copy_pointer = self.brillig_context.allocate_register(); @@ -121,9 +129,9 @@ impl<'block> BrilligBlock<'block> { pub(crate) fn slice_pop_back_operation( &mut self, - target_vector: HeapVector, - source_vector: HeapVector, - removed_items: &[RegisterOrMemory], + target_vector: BrilligVector, + source_vector: BrilligVector, + removed_items: &[BrilligVariable], ) { // First we need to allocate the target vector decrementing the size by removed_items.len() self.brillig_context.usize_op( @@ -133,6 +141,8 @@ impl<'block> BrilligBlock<'block> { removed_items.len(), ); self.brillig_context.allocate_array_instruction(target_vector.pointer, target_vector.size); + // We initialize the RC of the target vector to 1 + self.brillig_context.const_instruction(target_vector.rc, 1_usize.into()); // Now we copy all elements except the last items into the target vector self.brillig_context.copy_array_instruction( @@ -156,10 +166,10 @@ impl<'block> BrilligBlock<'block> { pub(crate) fn slice_insert_operation( &mut self, - target_vector: HeapVector, - source_vector: HeapVector, + target_vector: BrilligVector, + source_vector: BrilligVector, index: RegisterIndex, - items: &[RegisterOrMemory], + items: &[BrilligVariable], ) { // First we need to allocate the target vector incrementing the size by items.len() self.brillig_context.usize_op( @@ -169,6 +179,8 @@ impl<'block> BrilligBlock<'block> { items.len(), ); self.brillig_context.allocate_array_instruction(target_vector.pointer, target_vector.size); + // We initialize the RC of the target vector to 1 + self.brillig_context.const_instruction(target_vector.rc, 1_usize.into()); // Copy the elements to the left of the index self.brillig_context.copy_array_instruction( @@ -226,10 +238,10 @@ impl<'block> BrilligBlock<'block> { pub(crate) fn slice_remove_operation( &mut self, - target_vector: HeapVector, - source_vector: HeapVector, + target_vector: BrilligVector, + source_vector: BrilligVector, index: RegisterIndex, - removed_items: &[RegisterOrMemory], + removed_items: &[BrilligVariable], ) { // First we need to allocate the target vector decrementing the size by removed_items.len() self.brillig_context.usize_op( @@ -239,6 +251,8 @@ impl<'block> BrilligBlock<'block> { removed_items.len(), ); self.brillig_context.allocate_array_instruction(target_vector.pointer, target_vector.size); + // We initialize the RC of the target vector to 1 + self.brillig_context.const_instruction(target_vector.rc, 1_usize.into()); // Copy the elements to the left of the index self.brillig_context.copy_array_instruction( @@ -297,11 +311,11 @@ impl<'block> BrilligBlock<'block> { pub(crate) fn convert_array_or_vector_to_vector( &mut self, - source_variable: RegisterOrMemory, - ) -> HeapVector { + source_variable: BrilligVariable, + ) -> BrilligVector { match source_variable { - RegisterOrMemory::HeapVector(source_vector) => source_vector, - RegisterOrMemory::HeapArray(source_array) => { + BrilligVariable::BrilligVector(source_vector) => source_vector, + BrilligVariable::BrilligArray(source_array) => { self.brillig_context.array_to_vector(&source_array) } _ => unreachable!("ICE: unsupported slice push back source {:?}", source_variable), @@ -313,13 +327,16 @@ impl<'block> BrilligBlock<'block> { mod tests { use std::vec; - use acvm::acir::brillig::{HeapVector, Value}; - use acvm::brillig_vm::brillig::{RegisterIndex, RegisterOrMemory}; + use acvm::acir::brillig::Value; + use acvm::brillig_vm::brillig::RegisterIndex; use crate::brillig::brillig_gen::brillig_block::BrilligBlock; use crate::brillig::brillig_gen::brillig_block_variables::BlockVariables; use crate::brillig::brillig_gen::brillig_fn::FunctionContext; use crate::brillig::brillig_ir::artifact::BrilligParameter; + use crate::brillig::brillig_ir::brillig_variable::{ + BrilligArray, BrilligVariable, BrilligVector, + }; use crate::brillig::brillig_ir::tests::{ create_and_run_vm, create_context, create_entry_point_bytecode, }; @@ -373,33 +390,44 @@ mod tests { let (_, mut function_context, mut context) = create_test_environment(); // Allocate the parameters - let array_pointer = context.allocate_register(); + let array_variable = BrilligArray { + pointer: context.allocate_register(), + size: array.len(), + rc: context.allocate_register(), + }; let item_to_insert = context.allocate_register(); // Cast the source array to a vector - let array_size = context.make_constant(array.len().into()); + let source_vector = context.array_to_vector(&array_variable); // Allocate the results - let copied_array_pointer = context.allocate_register(); - let copied_array_size = context.allocate_register(); + let target_vector = BrilligVector { + pointer: context.allocate_register(), + size: context.allocate_register(), + rc: context.allocate_register(), + }; let mut block = create_brillig_block(&mut function_context, &mut context); if push_back { block.slice_push_back_operation( - HeapVector { pointer: copied_array_pointer, size: copied_array_size }, - HeapVector { pointer: array_pointer, size: array_size }, - &[RegisterOrMemory::RegisterIndex(item_to_insert)], + target_vector, + source_vector, + &[BrilligVariable::Simple(item_to_insert)], ); } else { block.slice_push_front_operation( - HeapVector { pointer: copied_array_pointer, size: copied_array_size }, - HeapVector { pointer: array_pointer, size: array_size }, - &[RegisterOrMemory::RegisterIndex(item_to_insert)], + target_vector, + source_vector, + &[BrilligVariable::Simple(item_to_insert)], ); } - context.return_instruction(&[copied_array_pointer, copied_array_size]); + context.return_instruction(&[ + target_vector.pointer, + target_vector.rc, + target_vector.size, + ]); let bytecode = create_entry_point_bytecode(context, arguments, returns).byte_code; let vm = create_and_run_vm( @@ -465,34 +493,45 @@ mod tests { let (_, mut function_context, mut context) = create_test_environment(); // Allocate the parameters - let array_pointer = context.allocate_register(); + let array_variable = BrilligArray { + pointer: context.allocate_register(), + size: array.len(), + rc: context.allocate_register(), + }; // Cast the source array to a vector - let array_size = context.make_constant(array.len().into()); + let source_vector = context.array_to_vector(&array_variable); // Allocate the results - let copied_array_pointer = context.allocate_register(); + let target_vector = BrilligVector { + pointer: context.allocate_register(), + size: context.allocate_register(), + rc: context.allocate_register(), + }; let removed_item = context.allocate_register(); - let copied_array_size = context.allocate_register(); - let mut block = create_brillig_block(&mut function_context, &mut context); if pop_back { block.slice_pop_back_operation( - HeapVector { pointer: copied_array_pointer, size: copied_array_size }, - HeapVector { pointer: array_pointer, size: array_size }, - &[RegisterOrMemory::RegisterIndex(removed_item)], + target_vector, + source_vector, + &[BrilligVariable::Simple(removed_item)], ); } else { block.slice_pop_front_operation( - HeapVector { pointer: copied_array_pointer, size: copied_array_size }, - HeapVector { pointer: array_pointer, size: array_size }, - &[RegisterOrMemory::RegisterIndex(removed_item)], + target_vector, + source_vector, + &[BrilligVariable::Simple(removed_item)], ); } - context.return_instruction(&[copied_array_pointer, copied_array_size, removed_item]); + context.return_instruction(&[ + target_vector.pointer, + target_vector.rc, + target_vector.size, + removed_item, + ]); let bytecode = create_entry_point_bytecode(context, arguments, returns).byte_code; let vm = create_and_run_vm(array.clone(), vec![Value::from(0_usize)], &bytecode); @@ -557,28 +596,38 @@ mod tests { let (_, mut function_context, mut context) = create_test_environment(); // Allocate the parameters - let array_pointer = context.allocate_register(); + let array_variable = BrilligArray { + pointer: context.allocate_register(), + size: array.len(), + rc: context.allocate_register(), + }; let item_to_insert = context.allocate_register(); let index_to_insert = context.allocate_register(); // Cast the source array to a vector - let array_size = context.make_constant(array.len().into()); + let source_vector = context.array_to_vector(&array_variable); // Allocate the results - let copied_array_pointer = context.allocate_register(); - - let copied_array_size = context.allocate_register(); + let target_vector = BrilligVector { + pointer: context.allocate_register(), + size: context.allocate_register(), + rc: context.allocate_register(), + }; let mut block = create_brillig_block(&mut function_context, &mut context); block.slice_insert_operation( - HeapVector { pointer: copied_array_pointer, size: copied_array_size }, - HeapVector { pointer: array_pointer, size: array_size }, + target_vector, + source_vector, index_to_insert, - &[RegisterOrMemory::RegisterIndex(item_to_insert)], + &[BrilligVariable::Simple(item_to_insert)], ); - context.return_instruction(&[copied_array_pointer, copied_array_size]); + context.return_instruction(&[ + target_vector.pointer, + target_vector.rc, + target_vector.size, + ]); let bytecode = create_entry_point_bytecode(context, arguments, returns).byte_code; let vm = create_and_run_vm( @@ -679,28 +728,39 @@ mod tests { let (_, mut function_context, mut context) = create_test_environment(); // Allocate the parameters - let array_pointer = context.allocate_register(); + let array_variable = BrilligArray { + pointer: context.allocate_register(), + size: array.len(), + rc: context.allocate_register(), + }; let index_to_insert = context.allocate_register(); // Cast the source array to a vector - let array_size = context.make_constant(array.len().into()); + let source_vector = context.array_to_vector(&array_variable); // Allocate the results - let copied_array_pointer = context.allocate_register(); + let target_vector = BrilligVector { + pointer: context.allocate_register(), + size: context.allocate_register(), + rc: context.allocate_register(), + }; let removed_item = context.allocate_register(); - let copied_array_size = context.allocate_register(); - let mut block = create_brillig_block(&mut function_context, &mut context); block.slice_remove_operation( - HeapVector { pointer: copied_array_pointer, size: copied_array_size }, - HeapVector { pointer: array_pointer, size: array_size }, + target_vector, + source_vector, index_to_insert, - &[RegisterOrMemory::RegisterIndex(removed_item)], + &[BrilligVariable::Simple(removed_item)], ); - context.return_instruction(&[copied_array_pointer, copied_array_size, removed_item]); + context.return_instruction(&[ + target_vector.pointer, + target_vector.rc, + target_vector.size, + removed_item, + ]); let bytecode = create_entry_point_bytecode(context, arguments, returns).byte_code; let vm = create_and_run_vm(array.clone(), vec![Value::from(0_usize), index], &bytecode); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs index d57196288bf..05978c2c6ab 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs @@ -332,7 +332,7 @@ mod test { let v0 = builder.add_parameter(Type::field()); let v1 = builder.add_parameter(Type::field()); - let v3 = builder.insert_allocate(); + let v3 = builder.insert_allocate(Type::field()); let zero = builder.numeric_constant(0u128, Type::field()); builder.insert_store(v3, zero); @@ -439,7 +439,7 @@ mod test { let v0 = builder.add_parameter(Type::field()); let v1 = builder.add_parameter(Type::field()); - let v3 = builder.insert_allocate(); + let v3 = builder.insert_allocate(Type::field()); let zero = builder.numeric_constant(0u128, Type::field()); builder.insert_store(v3, zero); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index 880ae95dcd7..ff182aaa7d2 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -5,6 +5,7 @@ //! ssa types and types in this module. //! A similar paradigm can be seen with the `acir_ir` module. pub(crate) mod artifact; +pub(crate) mod brillig_variable; pub(crate) mod debug_show; pub(crate) mod registers; @@ -14,12 +15,13 @@ use crate::ssa::ir::dfg::CallStack; use self::{ artifact::{BrilligArtifact, UnresolvedJumpLocation}, + brillig_variable::{BrilligArray, BrilligVariable, BrilligVector}, registers::BrilligRegistersContext, }; use acvm::{ acir::brillig::{ - BinaryFieldOp, BinaryIntOp, BlackBoxOp, HeapArray, HeapVector, Opcode as BrilligOpcode, - RegisterIndex, RegisterOrMemory, Value, + BinaryFieldOp, BinaryIntOp, BlackBoxOp, Opcode as BrilligOpcode, RegisterIndex, + RegisterOrMemory, Value, }, FieldElement, }; @@ -88,6 +90,8 @@ pub(crate) struct BrilligContext { context_label: String, /// Section label, used to separate sections of code section_label: usize, + /// Stores the next available section + next_section: usize, /// IR printer debug_show: DebugShow, } @@ -100,6 +104,7 @@ impl BrilligContext { registers: BrilligRegistersContext::new(), context_label: String::default(), section_label: 0, + next_section: 1, debug_show: DebugShow::new(enable_debug_trace), } } @@ -161,10 +166,14 @@ impl BrilligContext { /// Allocates a variable in memory and stores the /// pointer to the array in `pointer_register` - pub(crate) fn allocate_variable_instruction(&mut self, pointer_register: RegisterIndex) { + fn allocate_variable_reference_instruction( + &mut self, + pointer_register: RegisterIndex, + size: usize, + ) { self.debug_show.allocate_instruction(pointer_register); - // A variable can be stored in up to two values, so we reserve two values for that. - let size_register = self.make_constant(2_u128.into()); + // A variable can be stored in up to three values, so we reserve three values for that. + let size_register = self.make_constant(size.into()); self.push_opcode(BrilligOpcode::Mov { destination: pointer_register, source: ReservedRegisters::stack_pointer(), @@ -177,6 +186,30 @@ impl BrilligContext { ); } + pub(crate) fn allocate_simple_reference_instruction( + &mut self, + pointer_register: RegisterIndex, + ) { + self.allocate_variable_reference_instruction(pointer_register, 1); + } + + pub(crate) fn allocate_array_reference_instruction(&mut self, pointer_register: RegisterIndex) { + self.allocate_variable_reference_instruction( + pointer_register, + BrilligArray::registers_count(), + ); + } + + pub(crate) fn allocate_vector_reference_instruction( + &mut self, + pointer_register: RegisterIndex, + ) { + self.allocate_variable_reference_instruction( + pointer_register, + BrilligVector::registers_count(), + ); + } + /// Gets the value in the array at index `index` and stores it in `result` pub(crate) fn array_get( &mut self, @@ -253,8 +286,8 @@ impl BrilligContext { { let iterator_register = self.make_constant(0_u128.into()); - let loop_label = self.next_section_label(); - self.enter_next_section(); + let (loop_section, loop_label) = self.reserve_next_section_label(); + self.enter_section(loop_section); // Loop body @@ -267,7 +300,7 @@ impl BrilligContext { BinaryIntOp::LessThan, ); - let exit_loop_label = self.next_section_label(); + let (exit_loop_section, exit_loop_label) = self.reserve_next_section_label(); self.not_instruction(iterator_less_than_iterations, 1, iterator_less_than_iterations); self.jump_if_instruction(iterator_less_than_iterations, exit_loop_label); @@ -281,12 +314,41 @@ impl BrilligContext { self.jump_instruction(loop_label); // Exit the loop - self.enter_next_section(); + self.enter_section(exit_loop_section); + // Deallocate our temporary registers self.deallocate_register(iterator_less_than_iterations); self.deallocate_register(iterator_register); } + /// This instruction will issue an if-then branch that will check if the condition is true + /// and if so, perform the instructions given in `f(self, true)` and otherwise perform the + /// instructions given in `f(self, false)`. A boolean is passed instead of two separate + /// functions to allow the given function to mutably alias its environment. + pub(crate) fn branch_instruction( + &mut self, + condition: RegisterIndex, + mut f: impl FnMut(&mut BrilligContext, bool), + ) { + // Reserve 3 sections + let (then_section, then_label) = self.reserve_next_section_label(); + let (otherwise_section, otherwise_label) = self.reserve_next_section_label(); + let (end_section, end_label) = self.reserve_next_section_label(); + + self.jump_if_instruction(condition, then_label.clone()); + self.jump_instruction(otherwise_label.clone()); + + self.enter_section(then_section); + f(self, true); + self.jump_instruction(end_label.clone()); + + self.enter_section(otherwise_section); + f(self, false); + self.jump_instruction(end_label.clone()); + + self.enter_section(end_section); + } + /// Adds a label to the next opcode pub(crate) fn enter_context(&mut self, label: T) { self.debug_show.enter_context(label.to_string()); @@ -299,23 +361,25 @@ impl BrilligContext { .add_label_at_position(self.current_section_label(), self.obj.index_of_next_opcode()); } - /// Increments the section label and adds a section label to the next opcode - fn enter_next_section(&mut self) { - self.section_label += 1; + /// Enter the given section + fn enter_section(&mut self, section: usize) { + self.section_label = section; self.obj .add_label_at_position(self.current_section_label(), self.obj.index_of_next_opcode()); } + /// Create, reserve, and return a new section label. + fn reserve_next_section_label(&mut self) -> (usize, String) { + let section = self.next_section; + self.next_section += 1; + (section, self.compute_section_label(section)) + } + /// Internal function used to compute the section labels fn compute_section_label(&self, section: usize) -> String { format!("{}-{}", self.context_label, section) } - /// Returns the next section label - fn next_section_label(&self) -> String { - self.compute_section_label(self.section_label + 1) - } - /// Returns the current section label fn current_section_label(&self) -> String { self.compute_section_label(self.section_label) @@ -371,15 +435,13 @@ impl BrilligContext { assert_message: Option, ) { self.debug_show.constrain_instruction(condition); - self.add_unresolved_jump( - BrilligOpcode::JumpIf { condition, location: 0 }, - self.next_section_label(), - ); + let (next_section, next_label) = self.reserve_next_section_label(); + self.add_unresolved_jump(BrilligOpcode::JumpIf { condition, location: 0 }, next_label); self.push_opcode(BrilligOpcode::Trap); if let Some(assert_message) = assert_message { self.obj.add_assert_message_to_last_opcode(assert_message); } - self.enter_next_section(); + self.enter_section(next_section); } /// Processes a return instruction. @@ -528,17 +590,24 @@ impl BrilligContext { /// Loads a variable stored previously pub(crate) fn load_variable_instruction( &mut self, - destination: RegisterOrMemory, + destination: BrilligVariable, variable_pointer: RegisterIndex, ) { match destination { - RegisterOrMemory::RegisterIndex(register_index) => { + BrilligVariable::Simple(register_index) => { self.load_instruction(register_index, variable_pointer); } - RegisterOrMemory::HeapArray(HeapArray { pointer, .. }) => { + BrilligVariable::BrilligArray(BrilligArray { pointer, size: _, rc }) => { self.load_instruction(pointer, variable_pointer); + + let rc_pointer = self.allocate_register(); + self.mov_instruction(rc_pointer, variable_pointer); + self.usize_op_in_place(rc_pointer, BinaryIntOp::Add, 1_usize); + + self.load_instruction(rc, rc_pointer); + self.deallocate_register(rc_pointer); } - RegisterOrMemory::HeapVector(HeapVector { pointer, size }) => { + BrilligVariable::BrilligVector(BrilligVector { pointer, size, rc }) => { self.load_instruction(pointer, variable_pointer); let size_pointer = self.allocate_register(); @@ -547,6 +616,13 @@ impl BrilligContext { self.load_instruction(size, size_pointer); self.deallocate_register(size_pointer); + + let rc_pointer = self.allocate_register(); + self.mov_instruction(rc_pointer, variable_pointer); + self.usize_op_in_place(rc_pointer, BinaryIntOp::Add, 2_usize); + + self.load_instruction(rc, rc_pointer); + self.deallocate_register(rc_pointer); } } } @@ -565,32 +641,38 @@ impl BrilligContext { pub(crate) fn store_variable_instruction( &mut self, variable_pointer: RegisterIndex, - source: RegisterOrMemory, + source: BrilligVariable, ) { - let size_pointer = self.allocate_register(); - self.mov_instruction(size_pointer, variable_pointer); - self.usize_op_in_place(size_pointer, BinaryIntOp::Add, 1_usize); - match source { - RegisterOrMemory::RegisterIndex(register_index) => { + BrilligVariable::Simple(register_index) => { self.store_instruction(variable_pointer, register_index); - let size_constant = self.make_constant(Value::from(1_usize)); - self.store_instruction(size_pointer, size_constant); - self.deallocate_register(size_constant); } - RegisterOrMemory::HeapArray(HeapArray { pointer, size }) => { + BrilligVariable::BrilligArray(BrilligArray { pointer, size: _, rc }) => { self.store_instruction(variable_pointer, pointer); - let size_constant = self.make_constant(Value::from(size)); - self.store_instruction(size_pointer, size_constant); - self.deallocate_register(size_constant); + + let rc_pointer: RegisterIndex = self.allocate_register(); + self.mov_instruction(rc_pointer, variable_pointer); + self.usize_op_in_place(rc_pointer, BinaryIntOp::Add, 1_usize); + self.store_instruction(rc_pointer, rc); + self.deallocate_register(rc_pointer); } - RegisterOrMemory::HeapVector(HeapVector { pointer, size }) => { + BrilligVariable::BrilligVector(BrilligVector { pointer, size, rc }) => { self.store_instruction(variable_pointer, pointer); + + let size_pointer = self.allocate_register(); + self.mov_instruction(size_pointer, variable_pointer); + self.usize_op_in_place(size_pointer, BinaryIntOp::Add, 1_usize); self.store_instruction(size_pointer, size); + + let rc_pointer: RegisterIndex = self.allocate_register(); + self.mov_instruction(rc_pointer, variable_pointer); + self.usize_op_in_place(rc_pointer, BinaryIntOp::Add, 2_usize); + self.store_instruction(rc_pointer, rc); + + self.deallocate_register(size_pointer); + self.deallocate_register(rc_pointer); } } - - self.deallocate_register(size_pointer); } /// Emits a truncate instruction. @@ -725,14 +807,14 @@ impl BrilligContext { } /// Saves all of the registers that have been used up until this point. - fn save_registers_of_vars(&mut self, vars: &[RegisterOrMemory]) -> Vec { + fn save_registers_of_vars(&mut self, vars: &[BrilligVariable]) -> Vec { // Save all of the used registers at this point in memory // because the function call will/may overwrite them. // // Note that here it is important that the stack pointer register is at register 0, // as after the first register save we add to the pointer. let mut used_registers: Vec<_> = - vars.iter().flat_map(|var| extract_registers(*var)).collect(); + vars.iter().flat_map(|var| var.extract_registers()).collect(); // Also dump the previous stack pointer used_registers.push(ReservedRegisters::previous_stack_pointer()); @@ -811,7 +893,7 @@ impl BrilligContext { pub(crate) fn pre_call_save_registers_prep_args( &mut self, arguments: &[RegisterIndex], - variables_to_save: &[RegisterOrMemory], + variables_to_save: &[BrilligVariable], ) -> Vec { // Save all the registers we have used to the stack. let saved_registers = self.save_registers_of_vars(variables_to_save); @@ -852,9 +934,9 @@ impl BrilligContext { } /// Utility method to transform a HeapArray to a HeapVector by making a runtime constant with the size. - pub(crate) fn array_to_vector(&mut self, array: &HeapArray) -> HeapVector { + pub(crate) fn array_to_vector(&mut self, array: &BrilligArray) -> BrilligVector { let size_register = self.make_constant(array.size.into()); - HeapVector { size: size_register, pointer: array.pointer } + BrilligVector { size: size_register, pointer: array.pointer, rc: array.rc } } /// Issues a blackbox operation. @@ -868,12 +950,13 @@ impl BrilligContext { pub(crate) fn radix_instruction( &mut self, source: RegisterIndex, - target_vector: HeapVector, + target_vector: BrilligVector, radix: RegisterIndex, limb_count: RegisterIndex, big_endian: bool, ) { self.mov_instruction(target_vector.size, limb_count); + self.const_instruction(target_vector.rc, 1_usize.into()); self.allocate_array_instruction(target_vector.pointer, target_vector.size); let shifted_register = self.allocate_register(); @@ -914,7 +997,7 @@ impl BrilligContext { } /// This instruction will reverse the order of the elements in a vector. - pub(crate) fn reverse_vector_in_place_instruction(&mut self, vector: HeapVector) { + pub(crate) fn reverse_vector_in_place_instruction(&mut self, vector: BrilligVector) { let iteration_count = self.allocate_register(); self.usize_op(vector.size, iteration_count, BinaryIntOp::UnsignedDiv, 2); @@ -949,51 +1032,12 @@ impl BrilligContext { self.deallocate_register(index_at_end_of_array); } - pub(crate) fn extract_heap_vector(&mut self, variable: RegisterOrMemory) -> HeapVector { - match variable { - RegisterOrMemory::HeapVector(vector) => vector, - RegisterOrMemory::HeapArray(array) => { - let size = self.allocate_register(); - self.const_instruction(size, array.size.into()); - HeapVector { pointer: array.pointer, size } - } - _ => unreachable!("ICE: Expected vector, got {variable:?}"), - } - } - /// Sets a current call stack that the next pushed opcodes will be associated with. pub(crate) fn set_call_stack(&mut self, call_stack: CallStack) { self.obj.set_call_stack(call_stack); } } -pub(crate) fn extract_register(variable: RegisterOrMemory) -> RegisterIndex { - match variable { - RegisterOrMemory::RegisterIndex(register_index) => register_index, - _ => unreachable!("ICE: Expected register, got {variable:?}"), - } -} - -pub(crate) fn extract_heap_array(variable: RegisterOrMemory) -> HeapArray { - match variable { - RegisterOrMemory::HeapArray(array) => array, - _ => unreachable!("ICE: Expected array, got {variable:?}"), - } -} - -/// Collects the registers that a given variable is stored in. -pub(crate) fn extract_registers(variable: RegisterOrMemory) -> Vec { - match variable { - RegisterOrMemory::RegisterIndex(register_index) => vec![register_index], - RegisterOrMemory::HeapArray(array) => { - vec![array.pointer] - } - RegisterOrMemory::HeapVector(vector) => { - vec![vector.pointer, vector.size] - } - } -} - /// Type to encapsulate the binary operation types in Brillig #[derive(Clone)] pub(crate) enum BrilligBinaryOp { diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs new file mode 100644 index 00000000000..46c54d55ecb --- /dev/null +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs @@ -0,0 +1,99 @@ +use acvm::brillig_vm::brillig::{HeapArray, HeapVector, RegisterIndex, RegisterOrMemory}; +use serde::{Deserialize, Serialize}; + +/// The representation of a noir array in the Brillig IR +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] +pub(crate) struct BrilligArray { + pub(crate) pointer: RegisterIndex, + pub(crate) size: usize, + pub(crate) rc: RegisterIndex, +} + +impl BrilligArray { + pub(crate) fn to_heap_array(self) -> HeapArray { + HeapArray { pointer: self.pointer, size: self.size } + } + + pub(crate) fn registers_count() -> usize { + 2 + } + + pub(crate) fn extract_registers(self) -> Vec { + vec![self.pointer, self.rc] + } +} + +/// The representation of a noir slice in the Brillig IR +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] +pub(crate) struct BrilligVector { + pub(crate) pointer: RegisterIndex, + pub(crate) size: RegisterIndex, + pub(crate) rc: RegisterIndex, +} + +impl BrilligVector { + pub(crate) fn to_heap_vector(self) -> HeapVector { + HeapVector { pointer: self.pointer, size: self.size } + } + + pub(crate) fn registers_count() -> usize { + 3 + } + + pub(crate) fn extract_registers(self) -> Vec { + vec![self.pointer, self.size, self.rc] + } +} + +/// The representation of a noir value in the Brillig IR +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] +pub(crate) enum BrilligVariable { + Simple(RegisterIndex), + BrilligArray(BrilligArray), + BrilligVector(BrilligVector), +} + +impl BrilligVariable { + pub(crate) fn extract_register(self) -> RegisterIndex { + match self { + BrilligVariable::Simple(register_index) => register_index, + _ => unreachable!("ICE: Expected register, got {self:?}"), + } + } + + pub(crate) fn extract_array(self) -> BrilligArray { + match self { + BrilligVariable::BrilligArray(array) => array, + _ => unreachable!("ICE: Expected array, got {self:?}"), + } + } + + pub(crate) fn extract_vector(self) -> BrilligVector { + match self { + BrilligVariable::BrilligVector(vector) => vector, + _ => unreachable!("ICE: Expected vector, got {self:?}"), + } + } + + pub(crate) fn extract_registers(self) -> Vec { + match self { + BrilligVariable::Simple(register_index) => vec![register_index], + BrilligVariable::BrilligArray(array) => array.extract_registers(), + BrilligVariable::BrilligVector(vector) => vector.extract_registers(), + } + } + + pub(crate) fn to_register_or_memory(self) -> RegisterOrMemory { + match self { + BrilligVariable::Simple(register_index) => { + RegisterOrMemory::RegisterIndex(register_index) + } + BrilligVariable::BrilligArray(array) => { + RegisterOrMemory::HeapArray(array.to_heap_array()) + } + BrilligVariable::BrilligVector(vector) => { + RegisterOrMemory::HeapVector(vector.to_heap_vector()) + } + } + } +} diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs index fb426ad6876..48615988238 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs @@ -2,6 +2,7 @@ use crate::brillig::brillig_ir::ReservedRegisters; use super::{ artifact::{BrilligArtifact, BrilligParameter}, + brillig_variable::{BrilligArray, BrilligVariable}, debug_show::DebugShow, registers::BrilligRegistersContext, BrilligContext, @@ -20,6 +21,7 @@ impl BrilligContext { registers: BrilligRegistersContext::new(), context_label: String::default(), section_label: 0, + next_section: 1, debug_show: DebugShow::new(false), }; @@ -32,18 +34,39 @@ impl BrilligContext { } /// Adds the instructions needed to handle entry point parameters - /// - /// And sets the starting value of the reserved registers + /// The runtime will leave the parameters in the first `n` registers. + /// Arrays will be passed as pointers to the first element, with all the nested arrays flattened. + /// First, reserve the registers that contain the parameters. + /// This function also sets the starting value of the reserved registers fn entry_point_instruction(&mut self, arguments: Vec) { - // Translate the inputs by the reserved registers offset - for i in (0..arguments.len()).rev() { - self.push_opcode(BrilligOpcode::Mov { - destination: ReservedRegisters::user_register_index(i), - source: RegisterIndex::from(i), - }); - // Make sure we don't overwrite the arguments - self.allocate_register(); - } + let preallocated_registers: Vec<_> = + arguments.iter().enumerate().map(|(i, _)| RegisterIndex::from(i)).collect(); + self.set_allocated_registers(preallocated_registers.clone()); + + // Then allocate and initialize the variables that will hold the parameters + let argument_variables: Vec<_> = arguments + .iter() + .zip(preallocated_registers) + .map(|(argument, param_register)| match argument { + BrilligParameter::Simple => { + let variable_register = self.allocate_register(); + self.mov_instruction(variable_register, param_register); + BrilligVariable::Simple(variable_register) + } + BrilligParameter::Array(item_types, item_count) => { + let pointer_register = self.allocate_register(); + let rc_register = self.allocate_register(); + self.mov_instruction(pointer_register, param_register); + self.const_instruction(rc_register, 1_usize.into()); + BrilligVariable::BrilligArray(BrilligArray { + pointer: pointer_register, + size: item_types.len() * item_count, + rc: rc_register, + }) + } + BrilligParameter::Slice(_) => unimplemented!("Unsupported slices as parameter"), + }) + .collect(); // Calculate the initial value for the stack pointer register let size_arguments_memory: usize = arguments @@ -65,16 +88,24 @@ impl BrilligContext { value: 0_usize.into(), }); - for (index, parameter) in arguments.iter().enumerate() { + // Deflatten the arrays + for (parameter, assigned_variable) in arguments.iter().zip(&argument_variables) { if let BrilligParameter::Array(item_type, item_count) = parameter { if item_type.iter().any(|param| !matches!(param, BrilligParameter::Simple)) { - let pointer_register = ReservedRegisters::user_register_index(index); + let pointer_register = assigned_variable.extract_array().pointer; let deflattened_register = self.deflatten_array(item_type, *item_count, pointer_register); self.mov_instruction(pointer_register, deflattened_register); } } } + + // Move the parameters to the first user defined registers, to follow function call convention. + for (i, register) in + argument_variables.into_iter().flat_map(|arg| arg.extract_registers()).enumerate() + { + self.mov_instruction(ReservedRegisters::user_register_index(i), register); + } } /// Computes the size of a parameter if it was flattened @@ -92,6 +123,7 @@ impl BrilligContext { } /// Deflatten an array by recursively allocating nested arrays and copying the plain values. + /// Returns the pointer to the deflattened items. fn deflatten_array( &mut self, item_type: &[BrilligParameter], @@ -139,13 +171,25 @@ impl BrilligContext { *nested_array_item_count, nested_array_pointer, ); - self.array_set( - deflattened_array_pointer, - target_index, - deflattened_nested_array_pointer, + let reference = self.allocate_register(); + let rc = self.allocate_register(); + self.const_instruction(rc, 1_usize.into()); + + self.allocate_array_reference_instruction(reference); + self.store_variable_instruction( + reference, + BrilligVariable::BrilligArray(BrilligArray { + pointer: deflattened_nested_array_pointer, + size: nested_array_item_type.len() * nested_array_item_count, + rc, + }), ); + self.array_set(deflattened_array_pointer, target_index, reference); + self.deallocate_register(nested_array_pointer); + self.deallocate_register(reference); + self.deallocate_register(rc); source_offset += BrilligContext::flattened_size(subitem); } @@ -163,21 +207,36 @@ impl BrilligContext { } /// Adds the instructions needed to handle return parameters + /// The runtime expects the results in the first `n` registers. + /// Arrays are expected to be returned as pointers to the first element with all the nested arrays flattened. + /// However, the function called returns variables (that have extra data) and the returned arrays are unflattened. fn exit_point_instruction(&mut self, return_parameters: Vec) { - // Make sure we don't overwrite the return parameters - return_parameters.iter().for_each(|_| { - self.allocate_register(); - }); - - for (index, ret) in return_parameters.iter().enumerate() { - if let BrilligParameter::Array(item_type, item_count) = ret { + // First, we allocate the registers that hold the returned variables from the function call. + self.set_allocated_registers(vec![]); + let returned_variables: Vec<_> = return_parameters + .iter() + .map(|return_parameter| match return_parameter { + BrilligParameter::Simple => BrilligVariable::Simple(self.allocate_register()), + BrilligParameter::Array(item_types, item_count) => { + BrilligVariable::BrilligArray(BrilligArray { + pointer: self.allocate_register(), + size: item_types.len() * item_count, + rc: self.allocate_register(), + }) + } + BrilligParameter::Slice(..) => unreachable!("ICE: Cannot return slices"), + }) + .collect(); + // Now, we unflatten the returned arrays + for (return_param, returned_variable) in return_parameters.iter().zip(&returned_variables) { + if let BrilligParameter::Array(item_type, item_count) = return_param { if item_type.iter().any(|item| !matches!(item, BrilligParameter::Simple)) { - let returned_pointer = ReservedRegisters::user_register_index(index); + let returned_pointer = returned_variable.extract_array().pointer; let flattened_array_pointer = self.allocate_register(); self.allocate_fixed_length_array( flattened_array_pointer, - BrilligContext::flattened_size(ret), + BrilligContext::flattened_size(return_param), ); self.flatten_array( @@ -191,16 +250,18 @@ impl BrilligContext { } } } - // We want all functions to follow the calling convention of returning + // The VM expects us to follow the calling convention of returning // their results in the first `n` registers. So we to move the return values // to the first `n` registers once completed. // Move the results to registers 0..n - for i in 0..return_parameters.len() { - self.push_opcode(BrilligOpcode::Mov { - destination: i.into(), - source: ReservedRegisters::user_register_index(i), - }); + for (i, returned_variable) in returned_variables.into_iter().enumerate() { + let register = match returned_variable { + BrilligVariable::Simple(register) => register, + BrilligVariable::BrilligArray(array) => array.pointer, + BrilligVariable::BrilligVector(vector) => vector.pointer, + }; + self.push_opcode(BrilligOpcode::Mov { destination: i.into(), source: register }); } self.push_opcode(BrilligOpcode::Stop); } @@ -237,11 +298,22 @@ impl BrilligContext { target_offset += 1; } BrilligParameter::Array(nested_array_item_type, nested_array_item_count) => { - let nested_array_pointer = self.allocate_register(); + let nested_array_reference = self.allocate_register(); self.array_get( deflattened_array_pointer, source_index, - nested_array_pointer, + nested_array_reference, + ); + + let nested_array_variable = BrilligVariable::BrilligArray(BrilligArray { + pointer: self.allocate_register(), + size: nested_array_item_type.len() * nested_array_item_count, + rc: self.allocate_register(), + }); + + self.load_variable_instruction( + nested_array_variable, + nested_array_reference, ); let flattened_nested_array_pointer = self.allocate_register(); @@ -262,11 +334,15 @@ impl BrilligContext { nested_array_item_type, *nested_array_item_count, flattened_nested_array_pointer, - nested_array_pointer, + nested_array_variable.extract_array().pointer, ); - self.deallocate_register(nested_array_pointer); + self.deallocate_register(nested_array_reference); self.deallocate_register(flattened_nested_array_pointer); + nested_array_variable + .extract_registers() + .into_iter() + .for_each(|register| self.deallocate_register(register)); target_offset += BrilligContext::flattened_size(subitem); } @@ -288,6 +364,7 @@ mod tests { use crate::brillig::brillig_ir::{ artifact::BrilligParameter, + brillig_variable::BrilligArray, tests::{create_and_run_vm, create_context, create_entry_point_bytecode}, }; @@ -332,18 +409,24 @@ mod tests { Value::from(4_usize), Value::from(5_usize), Value::from(6_usize), - // The pointer to the nested array of the first item - Value::from(10_usize), - Value::from(3_usize), - // The pointer to the nested array of the second item + // The pointer to the nested reference of the first item Value::from(12_usize), + Value::from(3_usize), + // The pointer to the nested reference of the second item + Value::from(16_usize), Value::from(6_usize), // The nested array of the first item Value::from(1_usize), Value::from(2_usize), + // The nested reference of the first item + Value::from(10_usize), + Value::from(1_usize), // The nested array of the second item Value::from(4_usize), Value::from(5_usize), + // The nested reference of the second item + Value::from(14_usize), + Value::from(1_usize), ] ); } @@ -358,35 +441,31 @@ mod tests { Value::from(5_usize), Value::from(6_usize), ]; - let array_param = BrilligParameter::Array( vec![ - BrilligParameter::Simple, BrilligParameter::Array(vec![BrilligParameter::Simple], 2), + BrilligParameter::Simple, ], 2, ); - let arguments = vec![array_param.clone()]; let returns = vec![array_param]; let mut context = create_context(); // Allocate the parameter - let array_pointer = context.allocate_register(); + let brillig_array = BrilligArray { + pointer: context.allocate_register(), + size: 2, + rc: context.allocate_register(), + }; - context.return_instruction(&[array_pointer]); + context.return_instruction(&brillig_array.extract_registers()); let bytecode = create_entry_point_bytecode(context, arguments, returns).byte_code; let vm = create_and_run_vm(flattened_array.clone(), vec![Value::from(0_usize)], &bytecode); let memory = vm.get_memory(); - assert_eq!( - vm.get_registers().get(RegisterIndex(0)), - // The returned value will be past the original array and the deflattened array - Value::from(flattened_array.len() + (flattened_array.len() + 2)), - ); - assert_eq!( memory, &vec![ @@ -397,19 +476,25 @@ mod tests { Value::from(4_usize), Value::from(5_usize), Value::from(6_usize), - // The pointer to the nested array of the first item - Value::from(1_usize), - Value::from(10_usize), - // The pointer to the nested array of the second item - Value::from(4_usize), + // The pointer to the nested reference of the first item Value::from(12_usize), + Value::from(3_usize), + // The pointer to the nested reference of the second item + Value::from(16_usize), + Value::from(6_usize), // The nested array of the first item + Value::from(1_usize), Value::from(2_usize), - Value::from(3_usize), + // The nested reference of the first item + Value::from(10_usize), + Value::from(1_usize), // The nested array of the second item + Value::from(4_usize), Value::from(5_usize), - Value::from(6_usize), - // The values flattened again + // The nested reference of the second item + Value::from(14_usize), + Value::from(1_usize), + // The original flattened again Value::from(1_usize), Value::from(2_usize), Value::from(3_usize), @@ -418,5 +503,6 @@ mod tests { Value::from(6_usize), ] ); + assert_eq!(vm.get_registers().get(RegisterIndex(0)), 18_usize.into()); } } diff --git a/compiler/noirc_evaluator/src/errors.rs b/compiler/noirc_evaluator/src/errors.rs index 42672d4d0ad..42818e8b19d 100644 --- a/compiler/noirc_evaluator/src/errors.rs +++ b/compiler/noirc_evaluator/src/errors.rs @@ -26,7 +26,7 @@ pub enum RuntimeError { }, #[error(transparent)] InternalError(#[from] InternalError), - #[error("Index out of bounds, array has size {index:?}, but index was {array_size:?}")] + #[error("Index out of bounds, array has size {array_size}, but index was {index}")] IndexOutOfBounds { index: usize, array_size: usize, call_stack: CallStack }, #[error("Range constraint of {num_bits} bits is too large for the Field size")] InvalidRangeConstraint { num_bits: u32, call_stack: CallStack }, diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index c4b19379ecc..712913841f3 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -130,8 +130,8 @@ impl AcirContext { } /// Adds a constant to the context and assigns a Variable to represent it - pub(crate) fn add_constant(&mut self, constant: FieldElement) -> AcirVar { - let constant_data = AcirVarData::Const(constant); + pub(crate) fn add_constant(&mut self, constant: impl Into) -> AcirVar { + let constant_data = AcirVarData::Const(constant.into()); self.add_data(constant_data) } @@ -353,7 +353,7 @@ impl AcirContext { // Check to see if equality can be determined at compile-time. if diff_expr.is_const() { - return Ok(self.add_constant(diff_expr.is_zero().into())); + return Ok(self.add_constant(diff_expr.is_zero())); } let is_equal_witness = self.acir_ir.is_equal(&lhs_expr, &rhs_expr); @@ -380,9 +380,15 @@ impl AcirContext { rhs: AcirVar, typ: AcirType, ) -> Result { - let inputs = vec![AcirValue::Var(lhs, typ.clone()), AcirValue::Var(rhs, typ)]; - let outputs = self.black_box_function(BlackBoxFunc::AND, inputs, 1)?; - Ok(outputs[0]) + let bit_size = typ.bit_size(); + if bit_size == 1 { + // Operands are booleans. + self.mul_var(lhs, rhs) + } else { + let inputs = vec![AcirValue::Var(lhs, typ.clone()), AcirValue::Var(rhs, typ)]; + let outputs = self.black_box_function(BlackBoxFunc::AND, inputs, 1)?; + Ok(outputs[0]) + } } /// Returns an `AcirVar` that is the OR result of `lhs` & `rhs`. @@ -404,7 +410,7 @@ impl AcirContext { // max - ((max - a) AND (max -b)) // Subtracting from max flips the bits, so this is effectively: // (NOT a) NAND (NOT b) - let max = self.add_constant(FieldElement::from((1_u128 << bit_size) - 1)); + let max = self.add_constant((1_u128 << bit_size) - 1); let a = self.sub_var(max, lhs)?; let b = self.sub_var(max, rhs)?; let inputs = vec![AcirValue::Var(a, typ.clone()), AcirValue::Var(b, typ)]; @@ -547,7 +553,7 @@ impl AcirContext { pub(crate) fn not_var(&mut self, x: AcirVar, typ: AcirType) -> Result { let bit_size = typ.bit_size(); // Subtracting from max flips the bits - let max = self.add_constant(FieldElement::from((1_u128 << bit_size) - 1)); + let max = self.add_constant((1_u128 << bit_size) - 1); self.sub_var(max, x) } @@ -574,8 +580,8 @@ impl AcirContext { let quotient = lhs_const.to_u128() / rhs_const.to_u128(); let remainder = lhs_const.to_u128() - quotient * rhs_const.to_u128(); - let quotient_var = self.add_constant(FieldElement::from(quotient)); - let remainder_var = self.add_constant(FieldElement::from(remainder)); + let quotient_var = self.add_constant(quotient); + let remainder_var = self.add_constant(remainder); return Ok((quotient_var, remainder_var)); } @@ -772,7 +778,7 @@ impl AcirContext { // witness = lhs_offset + r assert!(bits + r_bit_size < FieldElement::max_num_bits()); //we need to ensure lhs_offset + r does not overflow - let r_var = self.add_constant(r.into()); + let r_var = self.add_constant(r); let aor = self.add_var(lhs_offset, r_var)?; // lhs_offset<=rhs_offset <=> lhs_offset + r < rhs_offset + r = 2^bit_size <=> witness < 2^bit_size self.range_constrain_var(aor, &NumericType::Unsigned { bit_size }, None)?; @@ -871,7 +877,8 @@ impl AcirContext { /// Converts the `AcirVar` to a `Witness` if it hasn't been already, and appends it to the /// `GeneratedAcir`'s return witnesses. pub(crate) fn return_var(&mut self, acir_var: AcirVar) -> Result<(), InternalError> { - let witness = self.var_to_witness(acir_var)?; + let return_var = self.get_or_create_witness_var(acir_var)?; + let witness = self.var_to_witness(return_var)?; self.acir_ir.push_return_witness(witness); Ok(()) } @@ -920,7 +927,7 @@ impl AcirContext { ) -> Result { // 2^{rhs} let divisor = - self.add_constant(FieldElement::from(2_i128).pow(&FieldElement::from(rhs as i128))); + self.add_constant(FieldElement::from(2_u128).pow(&FieldElement::from(rhs as u128))); let one = self.add_constant(FieldElement::one()); // Computes lhs = 2^{rhs} * q + r @@ -931,7 +938,7 @@ impl AcirContext { /// Returns an `AcirVar` which will be `1` if lhs >= rhs /// and `0` otherwise. - fn more_than_eq_var( + pub(crate) fn more_than_eq_var( &mut self, lhs: AcirVar, rhs: AcirVar, @@ -1144,10 +1151,7 @@ impl AcirContext { // `Intrinsic::ToRadix` returns slices which are represented // by tuples with the structure (length, slice contents) Ok(vec![ - AcirValue::Var( - self.add_constant(FieldElement::from(limb_vars.len() as u128)), - AcirType::field(), - ), + AcirValue::Var(self.add_constant(limb_vars.len()), AcirType::field()), AcirValue::Array(limb_vars.into()), ]) } @@ -1160,7 +1164,7 @@ impl AcirContext { limb_count_var: AcirVar, result_element_type: AcirType, ) -> Result, RuntimeError> { - let two_var = self.add_constant(FieldElement::from(2_u128)); + let two_var = self.add_constant(2_u128); self.radix_decompose(endian, input_var, two_var, limb_count_var, result_element_type) } @@ -1268,7 +1272,7 @@ impl AcirContext { AcirValue::DynamicArray(AcirDynamicArray { block_id, len, .. }) => { for i in 0..len { // We generate witnesses corresponding to the array values - let index_var = self.add_constant(FieldElement::from(i as u128)); + let index_var = self.add_constant(i); let value_read_var = self.read_from_memory(block_id, &index_var)?; let value_read = AcirValue::Var(value_read_var, AcirType::field()); @@ -1422,7 +1426,8 @@ impl AcirContext { index: &AcirVar, ) -> Result { // Fetch the witness corresponding to the index - let index_witness = self.var_to_witness(*index)?; + let index_var = self.get_or_create_witness_var(*index)?; + let index_witness = self.var_to_witness(index_var)?; // Create a Variable to hold the result of the read and extract the corresponding Witness let value_read_var = self.add_variable(); @@ -1443,11 +1448,12 @@ impl AcirContext { value: &AcirVar, ) -> Result<(), InternalError> { // Fetch the witness corresponding to the index - // - let index_witness = self.var_to_witness(*index)?; + let index_var = self.get_or_create_witness_var(*index)?; + let index_witness = self.var_to_witness(index_var)?; // Fetch the witness corresponding to the value to be written - let value_write_witness = self.var_to_witness(*value)?; + let value_write_var = self.get_or_create_witness_var(*value)?; + let value_write_witness = self.var_to_witness(value_write_var)?; // Add the memory write operation to the list of opcodes let op = MemOp::write_to_mem_index(index_witness.into(), value_write_witness.into()); @@ -1489,6 +1495,7 @@ impl AcirContext { ) -> Result<(), InternalError> { match input { AcirValue::Var(var, _) => { + let var = self.get_or_create_witness_var(var)?; witnesses.push(self.var_to_witness(var)?); } AcirValue::Array(values) => { diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 3a79a13c5a3..59ae4da51cc 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -136,6 +136,16 @@ impl AcirValue { } } + fn borrow_var(&self) -> Result { + match self { + AcirValue::Var(var, _) => Ok(*var), + AcirValue::DynamicArray(_) | AcirValue::Array(_) => Err(InternalError::General { + message: "Called AcirValue::borrow_var on an array".to_string(), + call_stack: CallStack::new(), + }), + } + } + fn flatten(self) -> Vec<(AcirVar, AcirType)> { match self { AcirValue::Var(var, typ) => vec![(var, typ)], @@ -447,8 +457,7 @@ impl Context { let mut read_dynamic_array_index = |block_id: BlockId, array_index: usize| -> Result { - let index_var = - self.acir_context.add_constant(FieldElement::from(array_index as u128)); + let index_var = self.acir_context.add_constant(array_index); self.acir_context.read_from_memory(block_id, &index_var) }; @@ -571,6 +580,9 @@ impl Context { Instruction::Load { .. } => { unreachable!("Expected all load instructions to be removed before acir_gen") } + Instruction::IncrementRc { .. } => { + // Do nothing. Only Brillig needs to worry about reference counted arrays + } Instruction::RangeCheck { value, max_bit_size, assert_message } => { let acir_var = self.convert_numeric_value(*value, dfg)?; self.acir_context.range_constrain_var( @@ -855,7 +867,7 @@ impl Context { ); let values = try_vecmap(0..*len, |i| { - let index_var = self.acir_context.add_constant(FieldElement::from(i as u128)); + let index_var = self.acir_context.add_constant(i); let read = self.acir_context.read_from_memory(*block_id, &index_var)?; Ok::(AcirValue::Var(read, AcirType::field())) @@ -916,7 +928,7 @@ impl Context { // The first max size is going to be the length of the parent slice // As we are fetching from the parent slice we just want its internal - // slize sizes. + // slice sizes. let slice_sizes = slice_sizes[1..].to_vec(); let value = self.array_get_value(&res_typ, block_id, &mut var_index, &slice_sizes)?; @@ -1041,7 +1053,7 @@ impl Context { self.copy_dynamic_array(block_id, result_block_id, array_len)?; } - self.array_set_value(store_value, result_block_id, &mut var_index)?; + self.array_set_value(&store_value, result_block_id, &mut var_index)?; // Set new resulting array to have the same slice sizes as the instruction input if let Type::Slice(element_types) = &array_typ { @@ -1076,7 +1088,7 @@ impl Context { fn array_set_value( &mut self, - value: AcirValue, + value: &AcirValue, block_id: BlockId, var_index: &mut AcirVar, ) -> Result<(), RuntimeError> { @@ -1084,8 +1096,8 @@ impl Context { match value { AcirValue::Var(store_var, _) => { // Write the new value into the new array at the specified index - self.acir_context.write_to_memory(block_id, var_index, &store_var)?; - // Incremement the var_index in case of a nested array + self.acir_context.write_to_memory(block_id, var_index, store_var)?; + // Increment the var_index in case of a nested array *var_index = self.acir_context.add_var(*var_index, one)?; } AcirValue::Array(values) => { @@ -1094,13 +1106,13 @@ impl Context { } } AcirValue::DynamicArray(AcirDynamicArray { block_id: inner_block_id, len, .. }) => { - let values = try_vecmap(0..len, |i| { - let index_var = self.acir_context.add_constant(FieldElement::from(i as u128)); + let values = try_vecmap(0..*len, |i| { + let index_var = self.acir_context.add_constant(i); - let read = self.acir_context.read_from_memory(inner_block_id, &index_var)?; + let read = self.acir_context.read_from_memory(*inner_block_id, &index_var)?; Ok::(AcirValue::Var(read, AcirType::field())) })?; - self.array_set_value(AcirValue::Array(values.into()), block_id, var_index)?; + self.array_set_value(&AcirValue::Array(values.into()), block_id, var_index)?; } } Ok(()) @@ -1254,7 +1266,7 @@ impl Context { // The final array should will the flattened index at each outer array index let init_values = vecmap(flat_elem_type_sizes, |type_size| { - let var = self.acir_context.add_constant(FieldElement::from(type_size as u128)); + let var = self.acir_context.add_constant(type_size); AcirValue::Var(var, AcirType::field()) }); let element_type_sizes_len = init_values.len(); @@ -1311,7 +1323,7 @@ impl Context { array_len: usize, ) -> Result<(), RuntimeError> { let init_values = try_vecmap(0..array_len, |i| { - let index_var = self.acir_context.add_constant(FieldElement::from(i as u128)); + let index_var = self.acir_context.add_constant(i); let read = self.acir_context.read_from_memory(source, &index_var)?; Ok::(AcirValue::Var(read, AcirType::field())) @@ -1598,7 +1610,7 @@ impl Context { (_, Type::Function) | (Type::Function, _) => { unreachable!("all functions should be inlined") } - (_, Type::Reference) | (Type::Reference, _) => { + (_, Type::Reference(_)) | (Type::Reference(_), _) => { unreachable!("References are invalid in binary operations") } (_, Type::Array(..)) | (Type::Array(..), _) => { @@ -1672,8 +1684,7 @@ impl Context { ) { // Subtractions must first have the integer modulus added before truncation can be // applied. This is done in order to prevent underflow. - let integer_modulus = - self.acir_context.add_constant(FieldElement::from(2_u128.pow(bit_size))); + let integer_modulus = self.acir_context.add_constant(2_u128.pow(bit_size)); var = self.acir_context.add_var(var, integer_modulus)?; } } @@ -1779,45 +1790,72 @@ impl Context { Intrinsic::ArrayLen => { let len = match self.convert_value(arguments[0], dfg) { AcirValue::Var(_, _) => unreachable!("Non-array passed to array.len() method"), - AcirValue::Array(values) => (values.len() as u128).into(), - AcirValue::DynamicArray(array) => (array.len as u128).into(), + AcirValue::Array(values) => values.len(), + AcirValue::DynamicArray(array) => array.len, }; Ok(vec![AcirValue::Var(self.acir_context.add_constant(len), AcirType::field())]) } Intrinsic::SlicePushBack => { + // arguments = [slice_length, slice_contents, ...elements_to_push] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let (array_id, array_typ, _) = + let (slice_contents, slice_typ, _) = self.check_array_is_initialized(arguments[1], dfg)?; - let slice = self.convert_value(arguments[1], dfg); + let slice = self.convert_value(slice_contents, dfg); - // TODO(#3364): make sure that we have handled nested struct inputs - let element = self.convert_value(arguments[2], dfg); - let one = self.acir_context.add_constant(FieldElement::one()); - let new_slice_length = self.acir_context.add_var(slice_length, one)?; + let mut new_elem_size = Self::flattened_value_size(&slice); - // We attach the element no matter what in case len == capacity, as otherwise we - // may get an out of bounds error. let mut new_slice = Vector::new(); - self.slice_intrinsic_input(&mut new_slice, slice.clone())?; - new_slice.push_back(element.clone()); + self.slice_intrinsic_input(&mut new_slice, slice)?; + + let elements_to_push = &arguments[2..]; + // We only fill internal slices for nested slices (a slice inside of a slice). + // So we must directly push back elements for slices which are not a nested slice. + if !slice_typ.is_nested_slice() { + for elem in elements_to_push { + let element = self.convert_value(*elem, dfg); + + new_elem_size += Self::flattened_value_size(&element); + new_slice.push_back(element); + } + } + + // Increase the slice length by one to enable accessing more elements in the slice. + let one = self.acir_context.add_constant(FieldElement::one()); + let new_slice_length = self.acir_context.add_var(slice_length, one)?; - // TODO(#3364): This works for non-nested outputs - let len = Self::flattened_value_size(&slice); - let new_elem_size = Self::flattened_value_size(&element); let new_slice_val = AcirValue::Array(new_slice); let result_block_id = self.block_id(&result_ids[1]); - self.initialize_array( - result_block_id, - len + new_elem_size, - Some(new_slice_val.clone()), - )?; + self.initialize_array(result_block_id, new_elem_size, Some(new_slice_val.clone()))?; + // The previous slice length represents the index we want to write into. let mut var_index = slice_length; - self.array_set_value(element, result_block_id, &mut var_index)?; + // Dynamic arrays are represented as flat memory. We must flatten the user facing index + // to a flattened index that matches the complex slice structure. + if slice_typ.is_nested_slice() { + let element_size = slice_typ.element_size(); + + // Multiply the element size against the var index before fetching the flattened index + // This operation makes sure our user-facing slice index matches the strategy for indexing in SSA, + // which is how `get_flattened_index` expects its index input. + let element_size_var = self.acir_context.add_constant(element_size); + var_index = self.acir_context.mul_var(slice_length, element_size_var)?; + var_index = + self.get_flattened_index(&slice_typ, slice_contents, var_index, dfg)?; + } + + // Write the elements we wish to push back directly. + // The slice's underlying array value should already be filled with dummy data + // to enable this write to be within bounds. + // The dummy data is either attached during SSA gen or in this match case for non-nested slices. + // These values can then be accessed due to the increased dynamic slice length. + for elem in elements_to_push { + let element = self.convert_value(*elem, dfg); + self.array_set_value(&element, result_block_id, &mut var_index)?; + } - let element_type_sizes = if !can_omit_element_sizes_array(&array_typ) { + let element_type_sizes = if !can_omit_element_sizes_array(&slice_typ) { Some(self.init_element_type_sizes_array( - &array_typ, - array_id, + &slice_typ, + slice_contents, Some(new_slice_val), dfg, )?) @@ -1826,155 +1864,501 @@ impl Context { }; let result = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, - len: len + new_elem_size, + len: new_elem_size, element_type_sizes, }); Ok(vec![AcirValue::Var(new_slice_length, AcirType::field()), result]) } Intrinsic::SlicePushFront => { + // arguments = [slice_length, slice_contents, ...elements_to_push] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let slice: AcirValue = self.convert_value(arguments[1], dfg); - // TODO(#3364): make sure that we have handled nested struct inputs - let element = self.convert_value(arguments[2], dfg); + let (slice_contents, slice_typ, _) = + self.check_array_is_initialized(arguments[1], dfg)?; + let slice: AcirValue = self.convert_value(slice_contents, dfg); + + let mut new_slice_size = Self::flattened_value_size(&slice); + + // Increase the slice length by one to enable accessing more elements in the slice. let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.add_var(slice_length, one)?; let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; - new_slice.push_front(element); - Ok(vec![ - AcirValue::Var(new_slice_length, AcirType::field()), - AcirValue::Array(new_slice), - ]) + let elements_to_push = &arguments[2..]; + let mut elem_size = 0; + // We only fill internal slices for nested slices (a slice inside of a slice). + // So we must directly push front elements for slices which are not a nested slice. + if !slice_typ.is_nested_slice() { + for elem in elements_to_push.iter().rev() { + let element = self.convert_value(*elem, dfg); + + elem_size += Self::flattened_value_size(&element); + new_slice.push_front(element); + } + new_slice_size += elem_size; + } else { + // We have already filled the appropriate dummy values for nested slice during SSA gen. + // We need to account for that we do not go out of bounds by removing dummy data as we + // push elements to the front of our slice. + // Using this strategy we are able to avoid dynamic writes like we do for a SlicePushBack. + for elem in elements_to_push.iter().rev() { + let element = self.convert_value(*elem, dfg); + + let elem_size = Self::flattened_value_size(&element); + // Have to pop based off of the flattened value size as we read the + // slice intrinsic as a flat list of AcirValue::Var + for _ in 0..elem_size { + new_slice.pop_back(); + } + new_slice.push_front(element); + } + } + + let new_slice_val = AcirValue::Array(new_slice.clone()); + + let result_block_id = self.block_id(&result_ids[1]); + self.initialize_array( + result_block_id, + new_slice_size, + Some(new_slice_val.clone()), + )?; + + let element_type_sizes = if !can_omit_element_sizes_array(&slice_typ) { + Some(self.init_element_type_sizes_array( + &slice_typ, + slice_contents, + Some(new_slice_val), + dfg, + )?) + } else { + None + }; + let result = AcirValue::DynamicArray(AcirDynamicArray { + block_id: result_block_id, + len: new_slice_size, + element_type_sizes, + }); + + Ok(vec![AcirValue::Var(new_slice_length, AcirType::field()), result]) } Intrinsic::SlicePopBack => { + // arguments = [slice_length, slice_contents] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let slice = self.convert_value(arguments[1], dfg); let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.sub_var(slice_length, one)?; - - let (_, _, block_id) = self.check_array_is_initialized(arguments[1], dfg)?; + // For a pop back operation we want to fetch from the `length - 1` as this is the + // last valid index that can be accessed in a slice. After the pop back operation + // the elements stored at that index will no longer be able to be accessed. let mut var_index = new_slice_length; - let elem = self.array_get_value( - &dfg.type_of_value(result_ids[2]), - block_id, - &mut var_index, - &[], - )?; - // TODO(#3364): make sure that we have handled nested struct inputs + let (slice_contents, slice_typ, block_id) = + self.check_array_is_initialized(arguments[1], dfg)?; + let slice = self.convert_value(slice_contents, dfg); + + let element_size = slice_typ.element_size(); + + let mut popped_elements = Vec::new(); + // Fetch the values we are popping off of the slice. + // In the case of non-nested slice the logic is simple as we do not + // need to account for the internal slice sizes or flattening the index. + // + // The pop back operation results are of the format [slice length, slice contents, popped elements]. + // Thus, we look at the result ids at index 2 and onwards to determine the type of each popped element. + if !slice_typ.is_nested_slice() { + for res in &result_ids[2..] { + let elem = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut var_index, + &[], + )?; + popped_elements.push(elem); + } + } else { + // Fetch the slice sizes of the nested slice. + let slice_sizes = self.slice_sizes.get(&slice_contents); + let mut slice_sizes = + slice_sizes.expect("ICE: should have slice sizes").clone(); + // We want to remove the parent size as we are fetching the child + slice_sizes.remove(0); + + // Multiply the element size against the var index before fetching the flattened index + // This operation makes sure our user-facing slice index matches the strategy for indexing in SSA, + // which is how `get_flattened_index` expects its index input. + let element_size_var = self.acir_context.add_constant(element_size); + // We want to use an index one less than the slice length + var_index = self.acir_context.mul_var(var_index, element_size_var)?; + var_index = + self.get_flattened_index(&slice_typ, slice_contents, var_index, dfg)?; + + for res in &result_ids[2..] { + let elem = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut var_index, + &slice_sizes, + )?; + popped_elements.push(elem); + } + } + let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; - Ok(vec![ + let mut results = vec![ AcirValue::Var(new_slice_length, AcirType::field()), AcirValue::Array(new_slice), - elem, - ]) + ]; + results.append(&mut popped_elements); + + Ok(results) } Intrinsic::SlicePopFront => { + // arguments = [slice_length, slice_contents] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let slice = self.convert_value(arguments[1], dfg); + + let (slice_contents, slice_typ, block_id) = + self.check_array_is_initialized(arguments[1], dfg)?; + let slice = self.convert_value(slice_contents, dfg); let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.sub_var(slice_length, one)?; let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; - // TODO(#3364): make sure that we have handled nested struct inputs - let elem = new_slice - .pop_front() - .expect("There are no elements in this slice to be removed"); - Ok(vec![ - elem, - AcirValue::Var(new_slice_length, AcirType::field()), - AcirValue::Array(new_slice), - ]) + let element_size = slice_typ.element_size(); + + let mut popped_elements: Vec = Vec::new(); + let mut popped_elements_size = 0; + let mut var_index = self.acir_context.add_constant(FieldElement::zero()); + // Fetch the values we are popping off of the slice. + // In the case of non-nested slice the logic is simple as we do not + // need to account for the internal slice sizes or flattening the index. + // + // The pop front operation results are of the format [popped elements, slice length, slice contents]. + // Thus, we look at the result ids up to the element size to determine the type of each popped element. + if !slice_typ.is_nested_slice() { + for res in &result_ids[..element_size] { + let element = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut var_index, + &[], + )?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); + } + } else { + let slice_sizes = self.slice_sizes.get(&slice_contents); + let mut slice_sizes = + slice_sizes.expect("ICE: should have slice sizes").clone(); + // We want to remove the parent size as we are fetching the child + slice_sizes.remove(0); + + for res in &result_ids[..element_size] { + let element = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut var_index, + &slice_sizes, + )?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); + } + } + // It is expected that the `popped_elements_size` is the flattened size of the elements, + // as the input slice should be a dynamic array which is represented by flat memory. + new_slice = new_slice.slice(popped_elements_size..); + + popped_elements.push(AcirValue::Var(new_slice_length, AcirType::field())); + popped_elements.push(AcirValue::Array(new_slice)); + + Ok(popped_elements) } Intrinsic::SliceInsert => { - // Slice insert with a constant index + // arguments = [slice_length, slice_contents, insert_index, ...elements_to_insert] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let slice = self.convert_value(arguments[1], dfg); - let index = self.convert_value(arguments[2], dfg).into_var()?; - let element = self.convert_value(arguments[3], dfg); + + let (slice_contents, slice_typ, block_id) = + self.check_array_is_initialized(arguments[1], dfg)?; + + let slice = self.convert_value(slice_contents, dfg); + let insert_index = self.convert_value(arguments[2], dfg).into_var()?; let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.add_var(slice_length, one)?; - // TODO(#2462): Slice insert is a little less obvious on how to implement due to the case - // of having a dynamic index - // The slice insert logic will need a more involved codegen - let index = self.acir_context.var_to_expression(index)?.to_const(); - let index = index - .expect("ICE: slice length should be fully tracked and constant by ACIR gen"); - let index = index.to_u128() as usize; + let slice_size = Self::flattened_value_size(&slice); + + // Fetch the flattened index from the user provided index argument. + let element_size = slice_typ.element_size(); + let element_size_var = self.acir_context.add_constant(element_size); + let flat_insert_index = + self.acir_context.mul_var(insert_index, element_size_var)?; + let flat_user_index = + self.get_flattened_index(&slice_typ, slice_contents, flat_insert_index, dfg)?; + + let elements_to_insert = &arguments[3..]; + // Determine the elements we need to write into our resulting dynamic array. + // We need to a fully flat list of AcirVar's as a dynamic array is represented with flat memory. + let mut inner_elem_size_usize = 0; + let mut flattened_elements = Vec::new(); + for elem in elements_to_insert { + let element = self.convert_value(*elem, dfg); + let elem_size = Self::flattened_value_size(&element); + inner_elem_size_usize += elem_size; + let mut flat_elem = element.flatten().into_iter().map(|(var, _)| var).collect(); + flattened_elements.append(&mut flat_elem); + } + let inner_elem_size = self.acir_context.add_constant(inner_elem_size_usize); + // Set the maximum flattened index at which a new element should be inserted. + let max_flat_user_index = + self.acir_context.add_var(flat_user_index, inner_elem_size)?; + + // Go through the entire slice argument and determine what value should be written to the new slice. + // 1. If we are below the starting insertion index we should insert the value that was already + // in the original slice. + // 2. If we are above the starting insertion index but below the max insertion index we should insert + // the flattened element arguments. + // 3. If we are above the max insertion index we should insert the previous value from the original slice, + // as during an insertion we want to shift all elements after the insertion up an index. + let result_block_id = self.block_id(&result_ids[1]); + self.initialize_array(result_block_id, slice_size, None)?; + let mut current_insert_index = 0; + for i in 0..slice_size { + let current_index = self.acir_context.add_constant(i); + + // Check that we are above the lower bound of the insertion index + let greater_eq_than_idx = self.acir_context.more_than_eq_var( + current_index, + flat_user_index, + 64, + self.current_side_effects_enabled_var, + )?; + // Check that we are below the upper bound of the insertion index + let less_than_idx = self.acir_context.less_than_var( + current_index, + max_flat_user_index, + 64, + self.current_side_effects_enabled_var, + )?; + + // Read from the original slice the value we want to insert into our new slice. + // We need to make sure of two things: + // 1. That we read the previous element for when we have an index greater than insertion index. + // 2. That the index we are reading from is within the array bounds + let shifted_index = if i < inner_elem_size_usize { + current_index + } else { + let index_minus_elem_size = + self.acir_context.add_constant(i - inner_elem_size_usize); - let mut new_slice = Vector::new(); - self.slice_intrinsic_input(&mut new_slice, slice)?; + let use_shifted_index_pred = self + .acir_context + .mul_var(index_minus_elem_size, greater_eq_than_idx)?; - // We do not return an index out of bounds error directly here - // as the length of the slice is dynamic, and length of `new_slice` - // represents the capacity of the slice, not the actual length. - // - // Constraints should be generated during SSA gen to tell the user - // they are attempting to insert at too large of an index. - // This check prevents a panic inside of the im::Vector insert method. - if index <= new_slice.len() { - // TODO(#3364): make sure that we have handled nested struct inputs - new_slice.insert(index, element); + let not_pred = self.acir_context.sub_var(one, greater_eq_than_idx)?; + let use_current_index_pred = + self.acir_context.mul_var(not_pred, current_index)?; + + self.acir_context.add_var(use_shifted_index_pred, use_current_index_pred)? + }; + + let value_shifted_index = + self.acir_context.read_from_memory(block_id, &shifted_index)?; + + // Final predicate to determine whether we are within the insertion bounds + let should_insert_value_pred = + self.acir_context.mul_var(greater_eq_than_idx, less_than_idx)?; + let insert_value_pred = self.acir_context.mul_var( + flattened_elements[current_insert_index], + should_insert_value_pred, + )?; + + let not_pred = self.acir_context.sub_var(one, should_insert_value_pred)?; + let shifted_value_pred = + self.acir_context.mul_var(not_pred, value_shifted_index)?; + + let new_value = + self.acir_context.add_var(insert_value_pred, shifted_value_pred)?; + + self.acir_context.write_to_memory( + result_block_id, + ¤t_index, + &new_value, + )?; + + current_insert_index += 1; + if inner_elem_size_usize == current_insert_index { + current_insert_index = 0; + } } - Ok(vec![ - AcirValue::Var(new_slice_length, AcirType::field()), - AcirValue::Array(new_slice), - ]) + // let new_slice_val = AcirValue::Array(new_slice); + let element_type_sizes = if !can_omit_element_sizes_array(&slice_typ) { + Some(self.init_element_type_sizes_array( + &slice_typ, + slice_contents, + Some(slice), + dfg, + )?) + } else { + None + }; + let result = AcirValue::DynamicArray(AcirDynamicArray { + block_id: result_block_id, + len: slice_size, + element_type_sizes, + }); + + Ok(vec![AcirValue::Var(new_slice_length, AcirType::field()), result]) } Intrinsic::SliceRemove => { - // Slice insert with a constant index + // arguments = [slice_length, slice_contents, remove_index] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let slice = self.convert_value(arguments[1], dfg); - let index = self.convert_value(arguments[2], dfg).into_var()?; + + let (slice_contents, slice_typ, block_id) = + self.check_array_is_initialized(arguments[1], dfg)?; + + let slice = self.convert_value(slice_contents, dfg); + let remove_index = self.convert_value(arguments[2], dfg).into_var()?; let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.sub_var(slice_length, one)?; - // TODO(#2462): allow slice remove with a constant index - // Slice remove is a little less obvious on how to implement due to the case - // of having a dynamic index - // The slice remove logic will need a more involved codegen - let index = self.acir_context.var_to_expression(index)?.to_const(); - let index = index - .expect("ICE: slice length should be fully tracked and constant by ACIR gen"); - let index = index.to_u128() as usize; + let slice_size = Self::flattened_value_size(&slice); let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; - // We do not return an index out of bounds error directly here - // as the length of the slice is dynamic, and length of `new_slice` - // represents the capacity of the slice, not the actual length. - // - // Constraints should be generated during SSA gen to tell the user - // they are attempting to remove at too large of an index. - // This check prevents a panic inside of the im::Vector remove method. - let removed_elem = if index < new_slice.len() { - // TODO(#3364): make sure that we have handled nested struct inputs - new_slice.remove(index) + // Compiler sanity check + assert_eq!( + new_slice.len(), + slice_size, + "ICE: The read flattened slice should match the computed size" + ); + + // Fetch the flattened index from the user provided index argument. + let element_size = slice_typ.element_size(); + let element_size_var = self.acir_context.add_constant(element_size); + let flat_remove_index = + self.acir_context.mul_var(remove_index, element_size_var)?; + let flat_user_index = + self.get_flattened_index(&slice_typ, slice_contents, flat_remove_index, dfg)?; + + // Fetch the values we are remove from the slice. + // In the case of non-nested slice the logic is simple as we do not + // need to account for the internal slice sizes or flattening the index. + // As we fetch the values we can determine the size of the removed values + // which we will later use for writing the correct resulting slice. + let mut popped_elements = Vec::new(); + let mut popped_elements_size = 0; + // Set a temp index just for fetching from the original slice as `array_get_value` mutates + // the index internally. + let mut temp_index = flat_user_index; + if !slice_typ.is_nested_slice() { + for res in &result_ids[2..(2 + element_size)] { + let element = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut temp_index, + &[], + )?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); + } } else { - // This is a dummy value which should never be used if the appropriate - // slice access checks are generated before this slice remove call. - AcirValue::Var(slice_length, AcirType::field()) + let slice_sizes = self.slice_sizes.get(&slice_contents); + let mut slice_sizes = + slice_sizes.expect("ICE: should have slice sizes").clone(); + // We want to remove the parent size as we are fetching the child + slice_sizes.remove(0); + + for res in &result_ids[2..(2 + element_size)] { + let element = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut temp_index, + &slice_sizes, + )?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); + } + } + + // Go through the entire slice argument and determine what value should be written to the new slice. + // 1. If the current index is greater than the removal index we must write the next value + // from the original slice to the current index + // 2. At the end of the slice reading from the next value of the original slice + // can lead to a potential out of bounds error. In this case we just fetch from the original slice + // at the current index. As we are decreasing the slice in length, this is a safe operation. + let result_block_id = self.block_id(&result_ids[1]); + self.initialize_array(result_block_id, slice_size, None)?; + for i in 0..slice_size { + let current_index = self.acir_context.add_constant(i); + + let shifted_index = if (i + popped_elements_size) >= slice_size { + current_index + } else { + self.acir_context.add_constant(i + popped_elements_size) + }; + + let value_shifted_index = + self.acir_context.read_from_memory(block_id, &shifted_index)?; + let value_current_index = new_slice[i].borrow_var()?; + + let use_shifted_value = self.acir_context.more_than_eq_var( + current_index, + flat_user_index, + 64, + self.current_side_effects_enabled_var, + )?; + + let shifted_value_pred = + self.acir_context.mul_var(value_shifted_index, use_shifted_value)?; + let not_pred = self.acir_context.sub_var(one, use_shifted_value)?; + let current_value_pred = + self.acir_context.mul_var(not_pred, value_current_index)?; + + let new_value = + self.acir_context.add_var(shifted_value_pred, current_value_pred)?; + + self.acir_context.write_to_memory( + result_block_id, + ¤t_index, + &new_value, + )?; + } + + let new_slice_val = AcirValue::Array(new_slice); + let element_type_sizes = if !can_omit_element_sizes_array(&slice_typ) { + Some(self.init_element_type_sizes_array( + &slice_typ, + slice_contents, + Some(new_slice_val), + dfg, + )?) + } else { + None }; + let result = AcirValue::DynamicArray(AcirDynamicArray { + block_id: result_block_id, + len: slice_size, + element_type_sizes, + }); - Ok(vec![ - AcirValue::Var(new_slice_length, AcirType::field()), - AcirValue::Array(new_slice), - removed_elem, - ]) + let mut result = vec![AcirValue::Var(new_slice_length, AcirType::field()), result]; + result.append(&mut popped_elements); + + Ok(result) } _ => todo!("expected a black box function"), } @@ -1997,7 +2381,7 @@ impl Context { AcirValue::DynamicArray(AcirDynamicArray { block_id, len, .. }) => { for i in 0..len { // We generate witnesses corresponding to the array values - let index_var = self.acir_context.add_constant(FieldElement::from(i as u128)); + let index_var = self.acir_context.add_constant(i); let value_read_var = self.acir_context.read_from_memory(block_id, &index_var)?; diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index 0c9d83d1d8d..f143ca7ee86 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -172,8 +172,9 @@ impl FunctionBuilder { /// Insert an allocate instruction at the end of the current block, allocating the /// given amount of field elements. Returns the result of the allocate instruction, /// which is always a Reference to the allocated data. - pub(crate) fn insert_allocate(&mut self) -> ValueId { - self.insert_instruction(Instruction::Allocate, None).first() + pub(crate) fn insert_allocate(&mut self, element_type: Type) -> ValueId { + let reference_type = Type::Reference(Rc::new(element_type)); + self.insert_instruction(Instruction::Allocate, Some(vec![reference_type])).first() } pub(crate) fn set_location(&mut self, location: Location) -> &mut FunctionBuilder { @@ -460,6 +461,27 @@ impl FunctionBuilder { _ => None, } } + + /// Insert instructions to increment the reference count of any array(s) stored + /// within the given value. If the given value is not an array and does not contain + /// any arrays, this does nothing. + pub(crate) fn increment_array_reference_count(&mut self, value: ValueId) { + match self.type_of_value(value) { + Type::Numeric(_) => (), + Type::Function => (), + Type::Reference(element) => { + if element.contains_an_array() { + let value = self.insert_load(value, element.as_ref().clone()); + self.increment_array_reference_count(value); + } + } + Type::Array(..) | Type::Slice(..) => { + self.insert_instruction(Instruction::IncrementRc { value }, None); + // If there are nested arrays or slices, we wait until ArrayGet + // is issued to increment the count of that array. + } + } + } } impl std::ops::Index for FunctionBuilder { diff --git a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index 1014ed1c5b4..abddbfb74c7 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -320,7 +320,7 @@ impl DataFlowGraph { /// True if the type of this value is Type::Reference. /// Using this method over type_of_value avoids cloning the value's type. pub(crate) fn value_is_reference(&self, value: ValueId) -> bool { - matches!(self.values[value].get_type(), Type::Reference) + matches!(self.values[value].get_type(), Type::Reference(_)) } /// Appends a result type to the instruction. @@ -523,13 +523,13 @@ impl<'dfg> InsertInstructionResult<'dfg> { #[cfg(test)] mod tests { use super::DataFlowGraph; - use crate::ssa::ir::instruction::Instruction; + use crate::ssa::ir::{instruction::Instruction, types::Type}; #[test] fn make_instruction() { let mut dfg = DataFlowGraph::default(); let ins = Instruction::Allocate; - let ins_id = dfg.make_instruction(ins, None); + let ins_id = dfg.make_instruction(ins, Some(vec![Type::field()])); let results = dfg.instruction_results(ins_id); assert_eq!(results.len(), 1); diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index 71401201715..63b32766f62 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -182,6 +182,13 @@ pub(crate) enum Instruction { /// Creates a new array with the new value at the given index. All other elements are identical /// to those in the given array. This will not modify the original array. ArraySet { array: ValueId, index: ValueId, value: ValueId }, + + /// An instruction to increment the reference count of a value. + /// + /// This currently only has an effect in Brillig code where array sharing and copy on write is + /// implemented via reference counting. In ACIR code this is done with im::Vector and these + /// IncrementRc instructions are ignored. + IncrementRc { value: ValueId }, } impl Instruction { @@ -195,18 +202,19 @@ impl Instruction { match self { Instruction::Binary(binary) => binary.result_type(), Instruction::Cast(_, typ) => InstructionResultType::Known(typ.clone()), - Instruction::Allocate { .. } => InstructionResultType::Known(Type::Reference), Instruction::Not(value) | Instruction::Truncate { value, .. } => { InstructionResultType::Operand(*value) } Instruction::ArraySet { array, .. } => InstructionResultType::Operand(*array), Instruction::Constrain(..) | Instruction::Store { .. } - | Instruction::EnableSideEffects { .. } - | Instruction::RangeCheck { .. } => InstructionResultType::None, - Instruction::Load { .. } | Instruction::ArrayGet { .. } | Instruction::Call { .. } => { - InstructionResultType::Unknown - } + | Instruction::IncrementRc { .. } + | Instruction::RangeCheck { .. } + | Instruction::EnableSideEffects { .. } => InstructionResultType::None, + Instruction::Allocate { .. } + | Instruction::Load { .. } + | Instruction::ArrayGet { .. } + | Instruction::Call { .. } => InstructionResultType::Unknown, } } @@ -235,6 +243,7 @@ impl Instruction { | Allocate | Load { .. } | Store { .. } + | IncrementRc { .. } | RangeCheck { .. } => false, Call { func, .. } => match dfg[*func] { @@ -266,7 +275,11 @@ impl Instruction { | ArrayGet { .. } | ArraySet { .. } => false, - Constrain(..) | Store { .. } | EnableSideEffects { .. } | RangeCheck { .. } => true, + Constrain(..) + | Store { .. } + | EnableSideEffects { .. } + | IncrementRc { .. } + | RangeCheck { .. } => true, // Some `Intrinsic`s have side effects so we must check what kind of `Call` this is. Call { func, .. } => match dfg[*func] { @@ -323,6 +336,7 @@ impl Instruction { Instruction::ArraySet { array, index, value } => { Instruction::ArraySet { array: f(*array), index: f(*index), value: f(*value) } } + Instruction::IncrementRc { value } => Instruction::IncrementRc { value: f(*value) }, Instruction::RangeCheck { value, max_bit_size, assert_message } => { Instruction::RangeCheck { value: f(*value), @@ -374,7 +388,7 @@ impl Instruction { Instruction::EnableSideEffects { condition } => { f(*condition); } - Instruction::RangeCheck { value, .. } => { + Instruction::IncrementRc { value } | Instruction::RangeCheck { value, .. } => { f(*value); } } @@ -474,6 +488,7 @@ impl Instruction { Instruction::Allocate { .. } => None, Instruction::Load { .. } => None, Instruction::Store { .. } => None, + Instruction::IncrementRc { .. } => None, Instruction::RangeCheck { value, max_bit_size, .. } => { if let Some(numeric_constant) = dfg.get_numeric_constant(*value) { if numeric_constant.num_bits() < *max_bit_size { diff --git a/compiler/noirc_evaluator/src/ssa/ir/printer.rs b/compiler/noirc_evaluator/src/ssa/ir/printer.rs index c6b1f3c7528..2899b987c1d 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/printer.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/printer.rs @@ -172,6 +172,9 @@ pub(crate) fn display_instruction( show(*value) ) } + Instruction::IncrementRc { value } => { + writeln!(f, "inc_rc {}", show(*value)) + } Instruction::RangeCheck { value, max_bit_size, .. } => { writeln!(f, "range_check {} to {} bits", show(*value), *max_bit_size,) } diff --git a/compiler/noirc_evaluator/src/ssa/ir/types.rs b/compiler/noirc_evaluator/src/ssa/ir/types.rs index 7eda93acf82..fbc95a16387 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -25,7 +25,7 @@ pub(crate) enum Type { Numeric(NumericType), /// A reference to some value, such as an array - Reference, + Reference(Rc), /// An immutable array value with the given element type and length Array(Rc, usize), @@ -86,7 +86,7 @@ impl Type { } Type::Slice(_) => true, Type::Numeric(_) => false, - Type::Reference => false, + Type::Reference(_) => false, Type::Function => false, } } @@ -103,6 +103,23 @@ impl Type { _ => 1, } } + + pub(crate) fn is_nested_slice(&self) -> bool { + if let Type::Slice(element_types) = self { + element_types.as_ref().iter().any(|typ| typ.contains_slice_element()) + } else { + false + } + } + + /// True if this type is an array (or slice) or internally contains an array (or slice) + pub(crate) fn contains_an_array(&self) -> bool { + match self { + Type::Numeric(_) | Type::Function => false, + Type::Array(_, _) | Type::Slice(_) => true, + Type::Reference(element) => element.contains_an_array(), + } + } } impl NumericType { @@ -134,7 +151,7 @@ impl std::fmt::Display for Type { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Type::Numeric(numeric) => numeric.fmt(f), - Type::Reference => write!(f, "reference"), + Type::Reference(element) => write!(f, "&mut {element}"), Type::Array(element, length) => { let elements = vecmap(element.iter(), |element| element.to_string()); write!(f, "[{}; {length}]", elements.join(", ")) diff --git a/compiler/noirc_evaluator/src/ssa/opt/die.rs b/compiler/noirc_evaluator/src/ssa/opt/die.rs index 0fc5eb438fb..492e96dc08c 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/die.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/die.rs @@ -7,7 +7,7 @@ use crate::ssa::{ basic_block::{BasicBlock, BasicBlockId}, dfg::DataFlowGraph, function::Function, - instruction::InstructionId, + instruction::{Instruction, InstructionId}, post_order::PostOrder, value::{Value, ValueId}, }, @@ -42,6 +42,8 @@ fn dead_instruction_elimination(function: &mut Function) { for block in blocks.as_slice() { context.remove_unused_instructions_in_block(function, *block); } + + context.remove_increment_rc_instructions(&mut function.dfg); } /// Per function context for tracking unused values and which instructions to remove. @@ -49,6 +51,11 @@ fn dead_instruction_elimination(function: &mut Function) { struct Context { used_values: HashSet, instructions_to_remove: HashSet, + + /// IncrementRc instructions must be revisited after the main DIE pass since + /// they are technically side-effectful but we stil want to remove them if their + /// `value` parameter is not used elsewhere. + increment_rc_instructions: Vec<(InstructionId, BasicBlockId)>, } impl Context { @@ -71,14 +78,19 @@ impl Context { let block = &function.dfg[block_id]; self.mark_terminator_values_as_used(function, block); - for instruction in block.instructions().iter().rev() { - if self.is_unused(*instruction, function) { - self.instructions_to_remove.insert(*instruction); + for instruction_id in block.instructions().iter().rev() { + if self.is_unused(*instruction_id, function) { + self.instructions_to_remove.insert(*instruction_id); } else { - let instruction = &function.dfg[*instruction]; - instruction.for_each_value(|value| { - self.mark_used_instruction_results(&function.dfg, value); - }); + let instruction = &function.dfg[*instruction_id]; + + if let Instruction::IncrementRc { .. } = instruction { + self.increment_rc_instructions.push((*instruction_id, block_id)); + } else { + instruction.for_each_value(|value| { + self.mark_used_instruction_results(&function.dfg, value); + }); + } } } @@ -123,11 +135,28 @@ impl Context { self.mark_used_instruction_results(dfg, *elem); } } + Value::Param { .. } => { + self.used_values.insert(value_id); + } _ => { // Does not comprise of any instruction results } } } + + fn remove_increment_rc_instructions(self, dfg: &mut DataFlowGraph) { + for (increment_rc, block) in self.increment_rc_instructions { + let value = match &dfg[increment_rc] { + Instruction::IncrementRc { value } => *value, + other => unreachable!("Expected IncrementRc instruction, found {other:?}"), + }; + + // This could be more efficient if we have to remove multiple IncrementRcs in a single block + if !self.used_values.contains(&value) { + dfg[block].instructions_mut().retain(|instruction| *instruction != increment_rc); + } + } + } } #[cfg(test)] @@ -180,10 +209,10 @@ mod test { builder.switch_to_block(b1); let _v3 = builder.add_block_parameter(b1, Type::field()); - let v4 = builder.insert_allocate(); + let v4 = builder.insert_allocate(Type::field()); let _v5 = builder.insert_load(v4, Type::field()); - let v6 = builder.insert_allocate(); + let v6 = builder.insert_allocate(Type::field()); builder.insert_store(v6, one); let v7 = builder.insert_load(v6, Type::field()); let v8 = builder.insert_binary(v7, BinaryOp::Add, one); diff --git a/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs b/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs index 24b21934386..f5e9598114c 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs @@ -46,7 +46,7 @@ use crate::ssa::{ ir::{ basic_block::BasicBlockId, dfg::CallStack, - function::Function, + function::{Function, RuntimeType}, function_inserter::FunctionInserter, instruction::{Instruction, InstructionId, Intrinsic}, post_order::PostOrder, @@ -62,11 +62,17 @@ use fxhash::FxHashMap as HashMap; impl Ssa { pub(crate) fn fill_internal_slices(mut self) -> Ssa { for function in self.functions.values_mut() { - let databus = function.dfg.data_bus.clone(); - let mut context = Context::new(function); - context.process_blocks(); - // update the databus with the new array instructions - function.dfg.data_bus = databus.map_values(|t| context.inserter.resolve(t)); + // This pass is only necessary for generating ACIR and thus we should not + // process Brillig functions. + // The pass is also currently only setup to handle a function with a single flattened block. + // For complex Brillig functions we can expect this pass to panic. + if function.runtime() == RuntimeType::Acir { + let databus = function.dfg.data_bus.clone(); + let mut context = Context::new(function); + context.process_blocks(); + // update the databus with the new array instructions + function.dfg.data_bus = databus.map_values(|t| context.inserter.resolve(t)); + } } self } @@ -171,6 +177,9 @@ impl<'f> Context<'f> { panic!("ICE: should have inner slice set for {slice_value}") }); slice_sizes.insert(results[0], inner_slice.clone()); + if slice_value != results[0] { + self.mapped_slice_values.insert(slice_value, results[0]); + } } } } @@ -195,17 +204,11 @@ impl<'f> Context<'f> { let inner_sizes = slice_sizes.get_mut(array).expect("ICE expected slice sizes"); inner_sizes.1.push(*value); - - let value_parent = self.resolve_slice_parent(*value); - if slice_values.contains(&value_parent) { - // Map the value parent to the current array in case nested slices - // from the current array are set to larger values later in the program - self.mapped_slice_values.insert(value_parent, *array); - } } if let Some(inner_sizes) = slice_sizes.get_mut(array) { let inner_sizes = inner_sizes.clone(); + slice_sizes.insert(results[0], inner_sizes); self.mapped_slice_values.insert(*array, results[0]); @@ -221,14 +224,27 @@ impl<'f> Context<'f> { | Intrinsic::SlicePopBack | Intrinsic::SliceInsert | Intrinsic::SliceRemove => (1, 1), - Intrinsic::SlicePopFront => (1, 2), + // `pop_front` returns the popped element, and then the respective slice. + // This means in the case of a slice with structs, the result index of the popped slice + // will change depending on the number of elements in the struct. + // For example, a slice with four elements will look as such in SSA: + // v3, v4, v5, v6, v7, v8 = call slice_pop_front(v1, v2) + // where v7 is the slice length and v8 is the popped slice itself. + Intrinsic::SlicePopFront => (1, results.len() - 1), _ => return, }; + let slice_contents = arguments[argument_index]; match intrinsic { Intrinsic::SlicePushBack | Intrinsic::SlicePushFront | Intrinsic::SliceInsert => { - let slice_contents = arguments[argument_index]; + for arg in &arguments[(argument_index + 1)..] { + let element_typ = self.inserter.function.dfg.type_of_value(*arg); + if element_typ.contains_slice_element() { + slice_values.push(*arg); + self.compute_slice_sizes(*arg, slice_sizes); + } + } if let Some(inner_sizes) = slice_sizes.get_mut(&slice_contents) { inner_sizes.0 += 1; @@ -237,12 +253,12 @@ impl<'f> Context<'f> { self.mapped_slice_values .insert(slice_contents, results[result_index]); + self.slice_parents.insert(results[result_index], slice_contents); } } Intrinsic::SlicePopBack - | Intrinsic::SlicePopFront - | Intrinsic::SliceRemove => { - let slice_contents = arguments[argument_index]; + | Intrinsic::SliceRemove + | Intrinsic::SlicePopFront => { // We do not decrement the size on intrinsics that could remove values from a slice. // This is because we could potentially go back to the smaller slice and not fill in dummies. // This pass should be tracking the potential max that a slice ***could be*** @@ -252,6 +268,7 @@ impl<'f> Context<'f> { self.mapped_slice_values .insert(slice_contents, results[result_index]); + self.slice_parents.insert(results[result_index], slice_contents); } } _ => {} @@ -274,7 +291,6 @@ impl<'f> Context<'f> { if slice_values.contains(array) { let (new_array_op_instr, call_stack) = self.get_updated_array_op_instr(*array, slice_sizes, instruction); - self.inserter.push_instruction_value( new_array_op_instr, instruction, @@ -285,6 +301,55 @@ impl<'f> Context<'f> { self.inserter.push_instruction(instruction, block); } } + Instruction::Call { func: _, arguments } => { + let mut args_to_replace = Vec::new(); + for (i, arg) in arguments.iter().enumerate() { + let element_typ = self.inserter.function.dfg.type_of_value(*arg); + if slice_values.contains(arg) && element_typ.contains_slice_element() { + args_to_replace.push((i, *arg)); + } + } + if args_to_replace.is_empty() { + self.inserter.push_instruction(instruction, block); + } else { + // Using the original slice is ok to do as during collection of slice information + // we guarantee that only the arguments to slice intrinsic calls can be replaced. + let slice_contents = arguments[1]; + + let element_typ = self.inserter.function.dfg.type_of_value(arguments[1]); + let elem_depth = Self::compute_nested_slice_depth(&element_typ); + + let mut max_sizes = Vec::new(); + max_sizes.resize(elem_depth, 0); + // We want the max for the parent of the argument + let parent = self.resolve_slice_parent(slice_contents); + self.compute_slice_max_sizes(parent, slice_sizes, &mut max_sizes, 0); + + for (index, arg) in args_to_replace { + let element_typ = self.inserter.function.dfg.type_of_value(arg); + max_sizes.remove(0); + let new_array = + self.attach_slice_dummies(&element_typ, Some(arg), false, &max_sizes); + + let instruction_id = instruction; + let (instruction, call_stack) = + self.inserter.map_instruction(instruction_id); + let new_call_instr = match instruction { + Instruction::Call { func, mut arguments } => { + arguments[index] = new_array; + Instruction::Call { func, arguments } + } + _ => panic!("Expected call instruction"), + }; + self.inserter.push_instruction_value( + new_call_instr, + instruction_id, + block, + call_stack, + ); + } + } + } _ => { self.inserter.push_instruction(instruction, block); } @@ -311,6 +376,7 @@ impl<'f> Context<'f> { let typ = self.inserter.function.dfg.type_of_value(array_id); let depth = Self::compute_nested_slice_depth(&typ); max_sizes.resize(depth, 0); + max_sizes[0] = *current_size; self.compute_slice_max_sizes(array_id, slice_sizes, &mut max_sizes, 1); @@ -367,9 +433,12 @@ impl<'f> Context<'f> { if let Some(value) = value { let mut slice = im::Vector::new(); - let array = match self.inserter.function.dfg[value].clone() { + let value = self.inserter.function.dfg[value].clone(); + let array = match value { Value::Array { array, .. } => array, - _ => panic!("Expected an array value"), + _ => { + panic!("Expected an array value"); + } }; if is_parent_slice { @@ -401,7 +470,7 @@ impl<'f> Context<'f> { self.inserter.function.dfg.make_array(slice, typ.clone()) } } - Type::Reference => { + Type::Reference(_) => { unreachable!("ICE: Generating dummy data for references is unsupported") } Type::Function => { @@ -484,7 +553,6 @@ impl<'f> Context<'f> { self.compute_slice_max_sizes(*inner_slice, slice_sizes, max_sizes, depth + 1); } - max_sizes[depth] = max; if max > max_sizes[depth] { max_sizes[depth] = max; } diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index d2ed21c60d7..29df9d3c76d 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -817,7 +817,7 @@ mod test { #[test] fn merge_stores() { // fn main f0 { - // b0(v0: u1, v1: ref): + // b0(v0: u1, v1: &mut Field): // jmpif v0, then: b1, else: b2 // b1(): // store v1, Field 5 @@ -832,7 +832,7 @@ mod test { let b2 = builder.insert_block(); let v0 = builder.add_parameter(Type::bool()); - let v1 = builder.add_parameter(Type::Reference); + let v1 = builder.add_parameter(Type::Reference(Rc::new(Type::field()))); builder.terminate_with_jmpif(v0, b1, b2); @@ -894,7 +894,7 @@ mod test { let b3 = builder.insert_block(); let v0 = builder.add_parameter(Type::bool()); - let v1 = builder.add_parameter(Type::Reference); + let v1 = builder.add_parameter(Type::Reference(Rc::new(Type::field()))); builder.terminate_with_jmpif(v0, b1, b2); @@ -993,7 +993,7 @@ mod test { let c1 = builder.add_parameter(Type::bool()); let c4 = builder.add_parameter(Type::bool()); - let r1 = builder.insert_allocate(); + let r1 = builder.insert_allocate(Type::field()); let store_value = |builder: &mut FunctionBuilder, value: u128| { let value = builder.field_constant(value); @@ -1144,7 +1144,7 @@ mod test { builder.terminate_with_jmpif(v0, b1, b2); builder.switch_to_block(b1); - let v2 = builder.insert_allocate(); + let v2 = builder.insert_allocate(Type::field()); let zero = builder.field_constant(0u128); builder.insert_store(v2, zero); let _v4 = builder.insert_load(v2, Type::field()); @@ -1313,7 +1313,7 @@ mod test { let v8 = builder.insert_binary(v6, BinaryOp::Mod, i_two); let v9 = builder.insert_cast(v8, Type::bool()); - let v10 = builder.insert_allocate(); + let v10 = builder.insert_allocate(Type::field()); builder.insert_store(v10, zero); builder.terminate_with_jmpif(v9, b1, b2); @@ -1412,9 +1412,9 @@ mod test { let ten = builder.field_constant(10u128); let one_hundred = builder.field_constant(100u128); - let v0 = builder.insert_allocate(); + let v0 = builder.insert_allocate(Type::field()); builder.insert_store(v0, zero); - let v2 = builder.insert_allocate(); + let v2 = builder.insert_allocate(Type::field()); builder.insert_store(v2, two); let v4 = builder.insert_load(v2, Type::field()); let v5 = builder.insert_binary(v4, BinaryOp::Lt, two); diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs index 32979f78632..446560f45f1 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs @@ -60,7 +60,7 @@ impl<'a> ValueMerger<'a> { typ @ Type::Slice(_) => { self.merge_slice_values(typ, then_condition, else_condition, then_value, else_value) } - Type::Reference => panic!("Cannot return references from an if expression"), + Type::Reference(_) => panic!("Cannot return references from an if expression"), Type::Function => panic!("Cannot return functions from an if expression"), } } @@ -333,7 +333,7 @@ impl<'a> ValueMerger<'a> { // to accurately construct dummy data unreachable!("ICE: Cannot return a slice of slices from an if expression") } - Type::Reference => { + Type::Reference(_) => { unreachable!("ICE: Merging references is unsupported") } Type::Function => { diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index 6be724a8756..ce205c8d883 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -327,7 +327,7 @@ impl<'f> PerFunctionContext<'f> { match typ { Type::Numeric(_) => false, Type::Function => false, - Type::Reference => true, + Type::Reference(_) => true, Type::Array(elements, _) | Type::Slice(elements) => { elements.iter().any(Self::contains_references) } @@ -433,7 +433,7 @@ mod tests { let func_id = Id::test_new(0); let mut builder = FunctionBuilder::new("func".into(), func_id, RuntimeType::Acir); - let v0 = builder.insert_allocate(); + let v0 = builder.insert_allocate(Type::Array(Rc::new(vec![Type::field()]), 2)); let one = builder.field_constant(FieldElement::one()); let two = builder.field_constant(FieldElement::one()); @@ -474,7 +474,7 @@ mod tests { let func_id = Id::test_new(0); let mut builder = FunctionBuilder::new("func".into(), func_id, RuntimeType::Acir); - let v0 = builder.insert_allocate(); + let v0 = builder.insert_allocate(Type::field()); let one = builder.field_constant(FieldElement::one()); builder.insert_store(v0, one); let v1 = builder.insert_load(v0, Type::field()); @@ -508,7 +508,7 @@ mod tests { let func_id = Id::test_new(0); let mut builder = FunctionBuilder::new("func".into(), func_id, RuntimeType::Acir); - let v0 = builder.insert_allocate(); + let v0 = builder.insert_allocate(Type::field()); let const_one = builder.field_constant(FieldElement::one()); builder.insert_store(v0, const_one); builder.terminate_with_return(vec![v0]); @@ -568,7 +568,7 @@ mod tests { let main_id = Id::test_new(0); let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); - let v0 = builder.insert_allocate(); + let v0 = builder.insert_allocate(Type::field()); let five = builder.field_constant(5u128); builder.insert_store(v0, five); @@ -648,12 +648,12 @@ mod tests { let main_id = Id::test_new(0); let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); - let v0 = builder.insert_allocate(); + let v0 = builder.insert_allocate(Type::field()); let zero = builder.field_constant(0u128); builder.insert_store(v0, zero); - let v2 = builder.insert_allocate(); + let v2 = builder.insert_allocate(Type::Reference(Rc::new(Type::field()))); builder.insert_store(v2, v0); let v3 = builder.insert_load(v2, Type::field()); diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index 72b94e575a9..9d27ffc60d8 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -152,7 +152,8 @@ impl<'a> FunctionContext<'a> { /// Allocate a single slot of memory and store into it the given initial value of the variable. /// Always returns a Value::Mutable wrapping the allocate instruction. pub(super) fn new_mutable_variable(&mut self, value_to_store: ValueId) -> Value { - let alloc = self.builder.insert_allocate(); + let element_type = self.builder.current_function.dfg.type_of_value(value_to_store); + let alloc = self.builder.insert_allocate(element_type); self.builder.insert_store(alloc, value_to_store); let typ = self.builder.type_of_value(value_to_store); Value::Mutable(alloc, typ) @@ -177,7 +178,7 @@ impl<'a> FunctionContext<'a> { // A mutable reference wraps each element into a reference. // This can be multiple values if the element type is a tuple. ast::Type::MutableReference(element) => { - Self::map_type_helper(element, &mut |_| f(Type::Reference)) + Self::map_type_helper(element, &mut |typ| f(Type::Reference(Rc::new(typ)))) } ast::Type::FmtString(len, fields) => { // A format string is represented by multiple values @@ -231,8 +232,8 @@ impl<'a> FunctionContext<'a> { ast::Type::Slice(_) => panic!("convert_non_tuple_type called on a slice: {typ}"), ast::Type::MutableReference(element) => { // Recursive call to panic if element is a tuple - Self::convert_non_tuple_type(element); - Type::Reference + let element = Self::convert_non_tuple_type(element); + Type::Reference(Rc::new(element)) } } } @@ -600,7 +601,7 @@ impl<'a> FunctionContext<'a> { let loop_end = self.builder.insert_block(); // pre-loop - let result_alloc = self.builder.set_location(location).insert_allocate(); + let result_alloc = self.builder.set_location(location).insert_allocate(Type::bool()); let true_value = self.builder.numeric_constant(1u128, Type::bool()); self.builder.insert_store(result_alloc, true_value); let zero = self.builder.field_constant(0u128); diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index e73124180d0..acf60cd6799 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -245,6 +245,13 @@ impl<'a> FunctionContext<'a> { for element in elements { element.for_each(|element| { let element = element.eval(self); + + // If we're referencing a sub-array in a larger nested array we need to + // increase the reference count of the sub array. This maintains a + // pessimistic reference count (since some are likely moved rather than shared) + // which is important for Brillig's copy on write optimization. This has no + // effect in ACIR code. + self.builder.increment_array_reference_count(element); array.push_back(element); }); } @@ -283,11 +290,12 @@ impl<'a> FunctionContext<'a> { Ok(self.codegen_reference(&unary.rhs)?.map(|rhs| { match rhs { value::Value::Normal(value) => { - let alloc = self.builder.insert_allocate(); + let rhs_type = self.builder.current_function.dfg.type_of_value(value); + let alloc = self.builder.insert_allocate(rhs_type); self.builder.insert_store(alloc, value); Tree::Leaf(value::Value::Normal(alloc)) } - // NOTE: The `.into()` here converts the Value::Mutable into + // The `.into()` here converts the Value::Mutable into // a Value::Normal so it is no longer automatically dereferenced. value::Value::Mutable(reference, _) => reference.into(), } @@ -335,6 +343,7 @@ impl<'a> FunctionContext<'a> { } else { (array_or_slice[0], None) }; + self.codegen_array_index( array, index_value, @@ -379,7 +388,13 @@ impl<'a> FunctionContext<'a> { } _ => unreachable!("must have array or slice but got {array_type}"), } - self.builder.insert_array_get(array, offset, typ).into() + + // Reference counting in brillig relies on us incrementing reference + // counts when nested arrays/slices are constructed or indexed. This + // has no effect in ACIR code. + let result = self.builder.insert_array_get(array, offset, typ); + self.builder.increment_array_reference_count(result); + result.into() })) } @@ -569,6 +584,11 @@ impl<'a> FunctionContext<'a> { arguments.append(&mut values); } + // If an array is passed as an argument we increase its reference count + for argument in &arguments { + self.builder.increment_array_reference_count(*argument); + } + self.codegen_intrinsic_call_checks(function, &arguments, call.location); Ok(self.insert_call(function, arguments, &call.return_type, call.location)) @@ -610,12 +630,18 @@ impl<'a> FunctionContext<'a> { fn codegen_let(&mut self, let_expr: &ast::Let) -> Result { let mut values = self.codegen_expression(&let_expr.expression)?; - if let_expr.mutable { - values = values.map(|value| { - let value = value.eval(self); - Tree::Leaf(self.new_mutable_variable(value)) - }); - } + values = values.map(|value| { + let value = value.eval(self); + + // Make sure to increment array reference counts on each let binding + self.builder.increment_array_reference_count(value); + + Tree::Leaf(if let_expr.mutable { + self.new_mutable_variable(value) + } else { + value::Value::Normal(value) + }) + }); self.define(let_expr.id, values); Ok(Self::unit_value()) diff --git a/compiler/noirc_frontend/src/ast/expression.rs b/compiler/noirc_frontend/src/ast/expression.rs index d29e1670944..41807d7eca7 100644 --- a/compiler/noirc_frontend/src/ast/expression.rs +++ b/compiler/noirc_frontend/src/ast/expression.rs @@ -76,6 +76,10 @@ impl ExpressionKind { ExpressionKind::Literal(Literal::Str(contents)) } + pub fn raw_string(contents: String, hashes: u8) -> ExpressionKind { + ExpressionKind::Literal(Literal::RawStr(contents, hashes)) + } + pub fn format_string(contents: String) -> ExpressionKind { ExpressionKind::Literal(Literal::FmtStr(contents)) } @@ -312,6 +316,7 @@ pub enum Literal { Bool(bool), Integer(FieldElement), Str(String), + RawStr(String, u8), FmtStr(String), Unit, } @@ -507,6 +512,11 @@ impl Display for Literal { Literal::Bool(boolean) => write!(f, "{}", if *boolean { "true" } else { "false" }), Literal::Integer(integer) => write!(f, "{}", integer.to_u128()), Literal::Str(string) => write!(f, "\"{string}\""), + Literal::RawStr(string, num_hashes) => { + let hashes: String = + std::iter::once('#').cycle().take(*num_hashes as usize).collect(); + write!(f, "r{hashes}\"{string}\"{hashes}") + } Literal::FmtStr(string) => write!(f, "f\"{string}\""), Literal::Unit => write!(f, "()"), } diff --git a/compiler/noirc_frontend/src/ast/mod.rs b/compiler/noirc_frontend/src/ast/mod.rs index b42ceb96e97..7054fe0bd8e 100644 --- a/compiler/noirc_frontend/src/ast/mod.rs +++ b/compiler/noirc_frontend/src/ast/mod.rs @@ -41,6 +41,8 @@ pub enum UnresolvedTypeData { FormatString(UnresolvedTypeExpression, Box), Unit, + Parenthesized(Box), + /// A Named UnresolvedType can be a struct type or a type variable Named(Path, Vec), @@ -152,6 +154,7 @@ impl std::fmt::Display for UnresolvedTypeData { Unit => write!(f, "()"), Error => write!(f, "error"), Unspecified => write!(f, "unspecified"), + Parenthesized(typ) => write!(f, "({typ})"), } } } diff --git a/compiler/noirc_frontend/src/ast/statement.rs b/compiler/noirc_frontend/src/ast/statement.rs index 1ca4d3101a9..73b1f68778d 100644 --- a/compiler/noirc_frontend/src/ast/statement.rs +++ b/compiler/noirc_frontend/src/ast/statement.rs @@ -198,7 +198,11 @@ impl From for Expression { fn from(i: Ident) -> Expression { Expression { span: i.0.span(), - kind: ExpressionKind::Variable(Path { segments: vec![i], kind: PathKind::Plain }), + kind: ExpressionKind::Variable(Path { + span: i.span(), + segments: vec![i], + kind: PathKind::Plain, + }), } } } @@ -311,6 +315,7 @@ impl UseTree { pub struct Path { pub segments: Vec, pub kind: PathKind, + pub span: Span, } impl Path { @@ -330,18 +335,11 @@ impl Path { } pub fn from_ident(name: Ident) -> Path { - Path { segments: vec![name], kind: PathKind::Plain } + Path { span: name.span(), segments: vec![name], kind: PathKind::Plain } } pub fn span(&self) -> Span { - let mut segments = self.segments.iter(); - let first_segment = segments.next().expect("ice : cannot have an empty path"); - let mut span = first_segment.0.span(); - - for segment in segments { - span = span.merge(segment.0.span()); - } - span + self.span } pub fn last_segment(&self) -> Ident { @@ -545,8 +543,11 @@ impl ForRange { // array.len() let segments = vec![array_ident]; - let array_ident = - ExpressionKind::Variable(Path { segments, kind: PathKind::Plain }); + let array_ident = ExpressionKind::Variable(Path { + segments, + kind: PathKind::Plain, + span: array_span, + }); let end_range = ExpressionKind::MethodCall(Box::new(MethodCallExpression { object: Expression::new(array_ident.clone(), array_span), @@ -561,8 +562,11 @@ impl ForRange { // array[i] let segments = vec![Ident::new(index_name, array_span)]; - let index_ident = - ExpressionKind::Variable(Path { segments, kind: PathKind::Plain }); + let index_ident = ExpressionKind::Variable(Path { + segments, + kind: PathKind::Plain, + span: array_span, + }); let loop_element = ExpressionKind::Index(Box::new(IndexExpression { collection: Expression::new(array_ident, array_span), diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 047c53e3206..86122530cde 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -1,34 +1,32 @@ use super::dc_mod::collect_defs; use super::errors::{DefCollectorErrorKind, DuplicateType}; use crate::graph::CrateId; -use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleData, ModuleDefId, ModuleId}; +use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}; use crate::hir::resolution::errors::ResolverError; -use crate::hir::resolution::import::PathResolutionError; -use crate::hir::resolution::path_resolver::PathResolver; + +use crate::hir::resolution::import::{resolve_imports, ImportDirective}; use crate::hir::resolution::resolver::Resolver; use crate::hir::resolution::{ - import::{resolve_imports, ImportDirective}, - path_resolver::StandardPathResolver, + collect_impls, collect_trait_impls, resolve_free_functions, resolve_globals, resolve_impls, + resolve_structs, resolve_trait_by_path, resolve_trait_impls, resolve_traits, + resolve_type_aliases, }; use crate::hir::type_check::{type_check_func, TypeCheckError, TypeChecker}; use crate::hir::Context; -use crate::hir_def::traits::{Trait, TraitConstant, TraitFunction, TraitImpl, TraitType}; + use crate::macros_api::MacroProcessor; -use crate::node_interner::{ - FuncId, NodeInterner, StmtId, StructId, TraitId, TraitImplId, TypeAliasId, -}; +use crate::node_interner::{FuncId, NodeInterner, StmtId, StructId, TraitId, TypeAliasId}; use crate::parser::{ParserError, SortedModule}; use crate::{ - ExpressionKind, Generics, Ident, LetStatement, Literal, NoirFunction, NoirStruct, NoirTrait, - NoirTypeAlias, Path, Shared, StructType, TraitItem, Type, TypeBinding, TypeVariableKind, - UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, + ExpressionKind, LetStatement, Literal, NoirFunction, NoirStruct, NoirTrait, NoirTypeAlias, + Path, Type, UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, }; use fm::FileId; use iter_extended::vecmap; use noirc_errors::{CustomDiagnostic, Span}; -use std::collections::{BTreeMap, HashMap, HashSet}; -use std::rc::Rc; +use std::collections::{BTreeMap, HashMap}; + use std::vec; /// Stores all of the unresolved functions in a particular file/mod @@ -125,6 +123,16 @@ pub struct DefCollector { pub(crate) collected_traits_impls: Vec, } +/// Maps the type and the module id in which the impl is defined to the functions contained in that +/// impl along with the generics declared on the impl itself. This also contains the Span +/// of the object_type of the impl, used to issue an error if the object type fails to resolve. +/// +/// Note that because these are keyed by unresolved types, the impl map is one of the few instances +/// of HashMap rather than BTreeMap. For this reason, we should be careful not to iterate over it +/// since it would be non-deterministic. +pub(crate) type ImplMap = + HashMap<(UnresolvedType, LocalModuleId), Vec<(UnresolvedGenerics, Span, UnresolvedFunctions)>>; + #[derive(Debug, Clone)] pub enum CompilationError { ParseError(ParserError), @@ -167,16 +175,6 @@ impl From for CompilationError { } } -/// Maps the type and the module id in which the impl is defined to the functions contained in that -/// impl along with the generics declared on the impl itself. This also contains the Span -/// of the object_type of the impl, used to issue an error if the object type fails to resolve. -/// -/// Note that because these are keyed by unresolved types, the impl map is one of the few instances -/// of HashMap rather than BTreeMap. For this reason, we should be careful not to iterate over it -/// since it would be non-deterministic. -type ImplMap = - HashMap<(UnresolvedType, LocalModuleId), Vec<(UnresolvedGenerics, Span, UnresolvedFunctions)>>; - impl DefCollector { fn new(def_map: CrateDefMap) -> DefCollector { DefCollector { @@ -360,264 +358,6 @@ impl DefCollector { } } -/// Go through the list of impls and add each function within to the scope -/// of the module defined by its type. -fn collect_impls( - context: &mut Context, - crate_id: CrateId, - collected_impls: &ImplMap, -) -> Vec<(CompilationError, FileId)> { - let interner = &mut context.def_interner; - let def_maps = &mut context.def_maps; - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - - for ((unresolved_type, module_id), methods) in collected_impls { - let path_resolver = - StandardPathResolver::new(ModuleId { local_id: *module_id, krate: crate_id }); - - let file = def_maps[&crate_id].file_id(*module_id); - - for (generics, span, unresolved) in methods { - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); - resolver.add_generics(generics); - let typ = resolver.resolve_type(unresolved_type.clone()); - - errors.extend(take_errors(unresolved.file_id, resolver)); - - if let Some(struct_type) = get_struct_type(&typ) { - let struct_type = struct_type.borrow(); - - // `impl`s are only allowed on types defined within the current crate - if struct_type.id.krate() != crate_id { - let span = *span; - let type_name = struct_type.name.to_string(); - let error = DefCollectorErrorKind::ForeignImpl { span, type_name }; - errors.push((error.into(), unresolved.file_id)); - continue; - } - - // Grab the module defined by the struct type. Note that impls are a case - // where the module the methods are added to is not the same as the module - // they are resolved in. - let module = get_module_mut(def_maps, struct_type.id.module_id()); - - for (_, method_id, method) in &unresolved.functions { - // If this method was already declared, remove it from the module so it cannot - // be accessed with the `TypeName::method` syntax. We'll check later whether the - // object types in each method overlap or not. If they do, we issue an error. - // If not, that is specialization which is allowed. - if module.declare_function(method.name_ident().clone(), *method_id).is_err() { - module.remove_function(method.name_ident()); - } - } - // Prohibit defining impls for primitive types if we're not in the stdlib - } else if typ != Type::Error && !crate_id.is_stdlib() { - let span = *span; - let error = DefCollectorErrorKind::NonStructTypeInImpl { span }; - errors.push((error.into(), unresolved.file_id)); - } - } - } - errors -} - -fn get_module_mut( - def_maps: &mut BTreeMap, - module: ModuleId, -) -> &mut ModuleData { - &mut def_maps.get_mut(&module.krate).unwrap().modules[module.local_id.0] -} - -fn collect_trait_impl_methods( - interner: &mut NodeInterner, - def_maps: &BTreeMap, - crate_id: CrateId, - trait_id: TraitId, - trait_impl: &mut UnresolvedTraitImpl, -) -> Vec<(CompilationError, FileId)> { - // In this Vec methods[i] corresponds to trait.methods[i]. If the impl has no implementation - // for a particular method, the default implementation will be added at that slot. - let mut ordered_methods = Vec::new(); - - let the_trait = interner.get_trait(trait_id); - - // check whether the trait implementation is in the same crate as either the trait or the type - let mut errors = - check_trait_impl_crate_coherence(interner, &the_trait, trait_impl, crate_id, def_maps); - // set of function ids that have a corresponding method in the trait - let mut func_ids_in_trait = HashSet::new(); - - for method in &the_trait.methods { - let overrides: Vec<_> = trait_impl - .methods - .functions - .iter() - .filter(|(_, _, f)| f.name() == method.name.0.contents) - .collect(); - - if overrides.is_empty() { - if let Some(default_impl) = &method.default_impl { - let func_id = interner.push_empty_fn(); - let module = ModuleId { local_id: trait_impl.module_id, krate: crate_id }; - interner.push_function(func_id, &default_impl.def, module); - func_ids_in_trait.insert(func_id); - ordered_methods.push(( - method.default_impl_module_id, - func_id, - *default_impl.clone(), - )); - } else { - let error = DefCollectorErrorKind::TraitMissingMethod { - trait_name: the_trait.name.clone(), - method_name: method.name.clone(), - trait_impl_span: trait_impl.object_type.span.expect("type must have a span"), - }; - errors.push((error.into(), trait_impl.file_id)); - } - } else { - for (_, func_id, _) in &overrides { - func_ids_in_trait.insert(*func_id); - } - - if overrides.len() > 1 { - let error = DefCollectorErrorKind::Duplicate { - typ: DuplicateType::TraitAssociatedFunction, - first_def: overrides[0].2.name_ident().clone(), - second_def: overrides[1].2.name_ident().clone(), - }; - errors.push((error.into(), trait_impl.file_id)); - } - - ordered_methods.push(overrides[0].clone()); - } - } - - // Emit MethodNotInTrait error for methods in the impl block that - // don't have a corresponding method signature defined in the trait - for (_, func_id, func) in &trait_impl.methods.functions { - if !func_ids_in_trait.contains(func_id) { - let error = DefCollectorErrorKind::MethodNotInTrait { - trait_name: the_trait.name.clone(), - impl_method: func.name_ident().clone(), - }; - errors.push((error.into(), trait_impl.file_id)); - } - } - - trait_impl.methods.functions = ordered_methods; - trait_impl.methods.trait_id = Some(trait_id); - errors -} - -fn collect_trait_impl( - context: &mut Context, - crate_id: CrateId, - trait_impl: &mut UnresolvedTraitImpl, -) -> Vec<(CompilationError, FileId)> { - let interner = &mut context.def_interner; - let def_maps = &mut context.def_maps; - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - let unresolved_type = trait_impl.object_type.clone(); - let module = ModuleId { local_id: trait_impl.module_id, krate: crate_id }; - trait_impl.trait_id = - match resolve_trait_by_path(def_maps, module, trait_impl.trait_path.clone()) { - Ok(trait_id) => Some(trait_id), - Err(error) => { - errors.push((error.into(), trait_impl.file_id)); - None - } - }; - - if let Some(trait_id) = trait_impl.trait_id { - errors - .extend(collect_trait_impl_methods(interner, def_maps, crate_id, trait_id, trait_impl)); - - let path_resolver = StandardPathResolver::new(module); - let file = def_maps[&crate_id].file_id(trait_impl.module_id); - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); - resolver.add_generics(&trait_impl.generics); - let typ = resolver.resolve_type(unresolved_type); - errors.extend(take_errors(trait_impl.file_id, resolver)); - - if let Some(struct_type) = get_struct_type(&typ) { - let struct_type = struct_type.borrow(); - let module = get_module_mut(def_maps, struct_type.id.module_id()); - - for (_, method_id, method) in &trait_impl.methods.functions { - // If this method was already declared, remove it from the module so it cannot - // be accessed with the `TypeName::method` syntax. We'll check later whether the - // object types in each method overlap or not. If they do, we issue an error. - // If not, that is specialization which is allowed. - if module.declare_function(method.name_ident().clone(), *method_id).is_err() { - module.remove_function(method.name_ident()); - } - } - } - } - errors -} - -fn collect_trait_impls( - context: &mut Context, - crate_id: CrateId, - collected_impls: &mut [UnresolvedTraitImpl], -) -> Vec<(CompilationError, FileId)> { - collected_impls - .iter_mut() - .flat_map(|trait_impl| collect_trait_impl(context, crate_id, trait_impl)) - .collect() -} - -fn check_trait_impl_crate_coherence( - interner: &mut NodeInterner, - the_trait: &Trait, - trait_impl: &UnresolvedTraitImpl, - current_crate: CrateId, - def_maps: &BTreeMap, -) -> Vec<(CompilationError, FileId)> { - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - - let module = ModuleId { krate: current_crate, local_id: trait_impl.module_id }; - let file = def_maps[¤t_crate].file_id(trait_impl.module_id); - let path_resolver = StandardPathResolver::new(module); - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); - - let object_crate = match resolver.resolve_type(trait_impl.object_type.clone()) { - Type::Struct(struct_type, _) => struct_type.borrow().id.krate(), - _ => CrateId::Dummy, - }; - - if current_crate != the_trait.crate_id && current_crate != object_crate { - let error = DefCollectorErrorKind::TraitImplOrphaned { - span: trait_impl.object_type.span.expect("object type must have a span"), - }; - errors.push((error.into(), trait_impl.file_id)); - } - - errors -} - -fn resolve_trait_by_path( - def_maps: &BTreeMap, - module: ModuleId, - path: Path, -) -> Result { - let path_resolver = StandardPathResolver::new(module); - - match path_resolver.resolve(def_maps, path.clone()) { - Ok(ModuleDefId::TraitId(trait_id)) => Ok(trait_id), - Ok(_) => Err(DefCollectorErrorKind::NotATrait { not_a_trait_name: path }), - Err(_) => Err(DefCollectorErrorKind::TraitNotFound { trait_path: path }), - } -} - -fn get_struct_type(typ: &Type) -> Option<&Shared> { - match typ { - Type::Struct(definition, _) => Some(definition), - _ => None, - } -} - /// Separate the globals Vec into two. The first element in the tuple will be the /// literal globals, except for arrays, and the second will be all other globals. /// We exclude array literals as they can contain complex types @@ -630,49 +370,6 @@ fn filter_literal_globals( }) } -pub struct ResolvedGlobals { - pub globals: Vec<(FileId, StmtId)>, - pub errors: Vec<(CompilationError, FileId)>, -} - -impl ResolvedGlobals { - pub fn extend(&mut self, oth: Self) { - self.globals.extend(oth.globals); - self.errors.extend(oth.errors); - } -} - -fn resolve_globals( - context: &mut Context, - globals: Vec, - crate_id: CrateId, -) -> ResolvedGlobals { - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - let globals = vecmap(globals, |global| { - let module_id = ModuleId { local_id: global.module_id, krate: crate_id }; - let path_resolver = StandardPathResolver::new(module_id); - - let mut resolver = Resolver::new( - &mut context.def_interner, - &path_resolver, - &context.def_maps, - global.file_id, - ); - - let name = global.stmt_def.pattern.name_ident().clone(); - - let hir_stmt = resolver.resolve_global_let(global.stmt_def); - errors.extend(take_errors(global.file_id, resolver)); - - context.def_interner.update_global(global.stmt_id, hir_stmt); - - context.def_interner.push_global(global.stmt_id, name, global.module_id); - - (global.file_id, global.stmt_id) - }); - ResolvedGlobals { globals, errors } -} - fn type_check_globals( interner: &mut NodeInterner, global_ids: Vec<(FileId, StmtId)>, @@ -705,360 +402,8 @@ fn type_check_functions( .collect() } -/// Create the mappings from TypeId -> StructType -/// so that expressions can access the fields of structs -fn resolve_structs( - context: &mut Context, - structs: BTreeMap, - crate_id: CrateId, -) -> Vec<(CompilationError, FileId)> { - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - // Resolve each field in each struct. - // Each struct should already be present in the NodeInterner after def collection. - for (type_id, typ) in structs { - let file_id = typ.file_id; - let (generics, fields, resolver_errors) = resolve_struct_fields(context, crate_id, typ); - errors.extend(vecmap(resolver_errors, |err| (err.into(), file_id))); - context.def_interner.update_struct(type_id, |struct_def| { - struct_def.set_fields(fields); - struct_def.generics = generics; - }); - } - errors -} - -fn resolve_trait_types( - _context: &mut Context, - _crate_id: CrateId, - _unresolved_trait: &UnresolvedTrait, -) -> (Vec, Vec<(CompilationError, FileId)>) { - // TODO - (vec![], vec![]) -} -fn resolve_trait_constants( - _context: &mut Context, - _crate_id: CrateId, - _unresolved_trait: &UnresolvedTrait, -) -> (Vec, Vec<(CompilationError, FileId)>) { - // TODO - (vec![], vec![]) -} - -fn resolve_trait_methods( - context: &mut Context, - trait_id: TraitId, - crate_id: CrateId, - unresolved_trait: &UnresolvedTrait, -) -> (Vec, Vec<(CompilationError, FileId)>) { - let interner = &mut context.def_interner; - let def_maps = &mut context.def_maps; - - let path_resolver = StandardPathResolver::new(ModuleId { - local_id: unresolved_trait.module_id, - krate: crate_id, - }); - let file = def_maps[&crate_id].file_id(unresolved_trait.module_id); - - let mut res = vec![]; - let mut resolver_errors = vec![]; - for item in &unresolved_trait.trait_def.items { - if let TraitItem::Function { - name, - generics, - parameters, - return_type, - where_clause: _, - body: _, - } = item - { - let the_trait = interner.get_trait(trait_id); - let self_type = - Type::TypeVariable(the_trait.self_type_typevar.clone(), TypeVariableKind::Normal); - - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); - resolver.add_generics(generics); - resolver.set_self_type(Some(self_type)); - - let arguments = vecmap(parameters, |param| resolver.resolve_type(param.1.clone())); - let resolved_return_type = resolver.resolve_type(return_type.get_type().into_owned()); - let generics = resolver.get_generics().to_vec(); - - let name = name.clone(); - let span: Span = name.span(); - let default_impl_list: Vec<_> = unresolved_trait - .fns_with_default_impl - .functions - .iter() - .filter(|(_, _, q)| q.name() == name.0.contents) - .collect(); - let default_impl = if default_impl_list.len() == 1 { - Some(Box::new(default_impl_list[0].2.clone())) - } else { - None - }; - - let f = TraitFunction { - name, - generics, - arguments, - return_type: resolved_return_type, - span, - default_impl, - default_impl_file_id: unresolved_trait.file_id, - default_impl_module_id: unresolved_trait.module_id, - }; - res.push(f); - resolver_errors.extend(take_errors_filter_self_not_resolved(file, resolver)); - } - } - (res, resolver_errors) -} - -fn take_errors_filter_self_not_resolved( - file_id: FileId, - resolver: Resolver<'_>, -) -> Vec<(CompilationError, FileId)> { - resolver - .take_errors() - .iter() - .filter(|resolution_error| match resolution_error { - ResolverError::PathResolutionError(PathResolutionError::Unresolved(ident)) => { - &ident.0.contents != "Self" - } - _ => true, - }) - .cloned() - .map(|resolution_error| (resolution_error.into(), file_id)) - .collect() -} - -fn take_errors(file_id: FileId, resolver: Resolver<'_>) -> Vec<(CompilationError, FileId)> { - vecmap(resolver.take_errors(), |e| (e.into(), file_id)) -} - -/// Create the mappings from TypeId -> TraitType -/// so that expressions can access the elements of traits -fn resolve_traits( - context: &mut Context, - traits: BTreeMap, - crate_id: CrateId, -) -> Vec<(CompilationError, FileId)> { - for (trait_id, unresolved_trait) in &traits { - context.def_interner.push_empty_trait(*trait_id, unresolved_trait); - } - let mut res: Vec<(CompilationError, FileId)> = vec![]; - for (trait_id, unresolved_trait) in traits { - // Resolve order - // 1. Trait Types ( Trait constants can have a trait type, therefore types before constants) - let _ = resolve_trait_types(context, crate_id, &unresolved_trait); - // 2. Trait Constants ( Trait's methods can use trait types & constants, therefore they should be after) - let _ = resolve_trait_constants(context, crate_id, &unresolved_trait); - // 3. Trait Methods - let (methods, errors) = - resolve_trait_methods(context, trait_id, crate_id, &unresolved_trait); - res.extend(errors); - context.def_interner.update_trait(trait_id, |trait_def| { - trait_def.set_methods(methods); - }); - } - res -} - -fn resolve_struct_fields( - context: &mut Context, - krate: CrateId, - unresolved: UnresolvedStruct, -) -> (Generics, Vec<(Ident, Type)>, Vec) { - let path_resolver = - StandardPathResolver::new(ModuleId { local_id: unresolved.module_id, krate }); - let file_id = unresolved.file_id; - let (generics, fields, errors) = - Resolver::new(&mut context.def_interner, &path_resolver, &context.def_maps, file_id) - .resolve_struct_fields(unresolved.struct_def); - (generics, fields, errors) -} - -fn resolve_type_aliases( - context: &mut Context, - type_aliases: BTreeMap, - crate_id: CrateId, -) -> Vec<(CompilationError, FileId)> { - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - for (type_id, unresolved_typ) in type_aliases { - let path_resolver = StandardPathResolver::new(ModuleId { - local_id: unresolved_typ.module_id, - krate: crate_id, - }); - let file = unresolved_typ.file_id; - let (typ, generics, resolver_errors) = - Resolver::new(&mut context.def_interner, &path_resolver, &context.def_maps, file) - .resolve_type_aliases(unresolved_typ.type_alias_def); - errors.extend(resolver_errors.iter().cloned().map(|e| (e.into(), file))); - context.def_interner.set_type_alias(type_id, typ, generics); - } - errors -} - -fn resolve_impls( - interner: &mut NodeInterner, - crate_id: CrateId, - def_maps: &BTreeMap, - collected_impls: ImplMap, - errors: &mut Vec<(CompilationError, FileId)>, -) -> Vec<(FileId, FuncId)> { - let mut file_method_ids = Vec::new(); - - for ((unresolved_type, module_id), methods) in collected_impls { - let path_resolver = - StandardPathResolver::new(ModuleId { local_id: module_id, krate: crate_id }); - - let file = def_maps[&crate_id].file_id(module_id); - - for (generics, _, functions) in methods { - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); - resolver.add_generics(&generics); - let generics = resolver.get_generics().to_vec(); - let self_type = resolver.resolve_type(unresolved_type.clone()); - - let mut file_func_ids = resolve_function_set( - interner, - crate_id, - def_maps, - functions, - Some(self_type.clone()), - None, - generics, - errors, - ); - if self_type != Type::Error { - for (file_id, method_id) in &file_func_ids { - let method_name = interner.function_name(method_id).to_owned(); - - if let Some(first_fn) = - interner.add_method(&self_type, method_name.clone(), *method_id, false) - { - let error = ResolverError::DuplicateDefinition { - name: method_name, - first_span: interner.function_ident(&first_fn).span(), - second_span: interner.function_ident(method_id).span(), - }; - errors.push((error.into(), *file_id)); - } - } - } - file_method_ids.append(&mut file_func_ids); - } - } - - file_method_ids -} - -fn resolve_trait_impls( - context: &mut Context, - traits: Vec, - crate_id: CrateId, - errors: &mut Vec<(CompilationError, FileId)>, -) -> Vec<(FileId, FuncId)> { - let interner = &mut context.def_interner; - let mut methods = Vec::<(FileId, FuncId)>::new(); - - for trait_impl in traits { - let unresolved_type = trait_impl.object_type; - let local_mod_id = trait_impl.module_id; - let module_id = ModuleId { krate: crate_id, local_id: local_mod_id }; - let path_resolver = StandardPathResolver::new(module_id); - - let self_type_span = unresolved_type.span; - - let mut resolver = - Resolver::new(interner, &path_resolver, &context.def_maps, trait_impl.file_id); - resolver.add_generics(&trait_impl.generics); - let self_type = resolver.resolve_type(unresolved_type.clone()); - let generics = resolver.get_generics().to_vec(); - - let impl_id = interner.next_trait_impl_id(); - - let mut impl_methods = resolve_function_set( - interner, - crate_id, - &context.def_maps, - trait_impl.methods.clone(), - Some(self_type.clone()), - Some(impl_id), - generics.clone(), - errors, - ); - - let maybe_trait_id = trait_impl.trait_id; - if let Some(trait_id) = maybe_trait_id { - for (_, func) in &impl_methods { - interner.set_function_trait(*func, self_type.clone(), trait_id); - } - } - - if matches!(self_type, Type::MutableReference(_)) { - let span = self_type_span.unwrap_or_else(|| trait_impl.trait_path.span()); - let error = DefCollectorErrorKind::MutableReferenceInTraitImpl { span }; - errors.push((error.into(), trait_impl.file_id)); - } - - let mut new_resolver = - Resolver::new(interner, &path_resolver, &context.def_maps, trait_impl.file_id); - - new_resolver.set_generics(generics); - new_resolver.set_self_type(Some(self_type.clone())); - - if let Some(trait_id) = maybe_trait_id { - check_methods_signatures( - &mut new_resolver, - &impl_methods, - trait_id, - trait_impl.generics.len(), - errors, - ); - - let where_clause = trait_impl - .where_clause - .into_iter() - .flat_map(|item| new_resolver.resolve_trait_constraint(item)) - .collect(); - - let resolved_trait_impl = Shared::new(TraitImpl { - ident: trait_impl.trait_path.last_segment().clone(), - typ: self_type.clone(), - trait_id, - file: trait_impl.file_id, - where_clause, - methods: vecmap(&impl_methods, |(_, func_id)| *func_id), - }); - - if let Err((prev_span, prev_file)) = interner.add_trait_implementation( - self_type.clone(), - trait_id, - impl_id, - resolved_trait_impl, - ) { - let error = DefCollectorErrorKind::OverlappingImpl { - typ: self_type.clone(), - span: self_type_span.unwrap_or_else(|| trait_impl.trait_path.span()), - }; - errors.push((error.into(), trait_impl.file_id)); - - // The 'previous impl defined here' note must be a separate error currently - // since it may be in a different file and all errors have the same file id. - let error = DefCollectorErrorKind::OverlappingImplNote { span: prev_span }; - errors.push((error.into(), prev_file)); - } - - methods.append(&mut impl_methods); - } - } - - methods -} - // TODO(vitkov): Move this out of here and into type_check -fn check_methods_signatures( +pub(crate) fn check_methods_signatures( resolver: &mut Resolver, impl_methods: &Vec<(FileId, FuncId)>, trait_id: TraitId, @@ -1151,72 +496,3 @@ fn check_methods_signatures( the_trait.self_type_typevar.borrow_mut().unbind(the_trait.self_type_typevar_id); } - -fn resolve_free_functions( - interner: &mut NodeInterner, - crate_id: CrateId, - def_maps: &BTreeMap, - collected_functions: Vec, - self_type: Option, - errors: &mut Vec<(CompilationError, FileId)>, -) -> Vec<(FileId, FuncId)> { - // Lower each function in the crate. This is now possible since imports have been resolved - collected_functions - .into_iter() - .flat_map(|unresolved_functions| { - resolve_function_set( - interner, - crate_id, - def_maps, - unresolved_functions, - self_type.clone(), - None, - vec![], // no impl generics - errors, - ) - }) - .collect() -} - -#[allow(clippy::too_many_arguments)] -fn resolve_function_set( - interner: &mut NodeInterner, - crate_id: CrateId, - def_maps: &BTreeMap, - mut unresolved_functions: UnresolvedFunctions, - self_type: Option, - trait_impl_id: Option, - impl_generics: Vec<(Rc, Shared, Span)>, - errors: &mut Vec<(CompilationError, FileId)>, -) -> Vec<(FileId, FuncId)> { - let file_id = unresolved_functions.file_id; - - let where_clause_errors = - unresolved_functions.resolve_trait_bounds_trait_ids(def_maps, crate_id); - errors.extend(where_clause_errors.iter().cloned().map(|e| (e.into(), file_id))); - - vecmap(unresolved_functions.functions, |(mod_id, func_id, func)| { - let module_id = ModuleId { krate: crate_id, local_id: mod_id }; - let path_resolver = StandardPathResolver::new(module_id); - - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file_id); - // Must use set_generics here to ensure we re-use the same generics from when - // the impl was originally collected. Otherwise the function will be using different - // TypeVariables for the same generic, causing it to instantiate incorrectly. - resolver.set_generics(impl_generics.clone()); - resolver.set_self_type(self_type.clone()); - resolver.set_trait_id(unresolved_functions.trait_id); - resolver.set_trait_impl_id(trait_impl_id); - - // Without this, impl methods can accidentally be placed in contracts. See #3254 - if self_type.is_some() { - resolver.set_in_contract(false); - } - - let (hir_func, func_meta, errs) = resolver.resolve_function(func, func_id); - interner.push_fn_meta(func_meta, func_id); - interner.update_fn(func_id, hir_func); - errors.extend(errs.iter().cloned().map(|e| (e.into(), file_id))); - (file_id, func_id) - }) -} diff --git a/compiler/noirc_frontend/src/hir/resolution/functions.rs b/compiler/noirc_frontend/src/hir/resolution/functions.rs new file mode 100644 index 00000000000..387f94e129c --- /dev/null +++ b/compiler/noirc_frontend/src/hir/resolution/functions.rs @@ -0,0 +1,85 @@ +use std::{collections::BTreeMap, rc::Rc}; + +use fm::FileId; +use iter_extended::vecmap; +use noirc_errors::Span; + +use crate::{ + graph::CrateId, + hir::{ + def_collector::dc_crate::{CompilationError, UnresolvedFunctions}, + def_map::{CrateDefMap, ModuleId}, + }, + node_interner::{FuncId, NodeInterner, TraitImplId}, + Shared, Type, TypeBinding, +}; + +use super::{path_resolver::StandardPathResolver, resolver::Resolver}; + +#[allow(clippy::too_many_arguments)] +pub(crate) fn resolve_function_set( + interner: &mut NodeInterner, + crate_id: CrateId, + def_maps: &BTreeMap, + mut unresolved_functions: UnresolvedFunctions, + self_type: Option, + trait_impl_id: Option, + impl_generics: Vec<(Rc, Shared, Span)>, + errors: &mut Vec<(CompilationError, FileId)>, +) -> Vec<(FileId, FuncId)> { + let file_id = unresolved_functions.file_id; + + let where_clause_errors = + unresolved_functions.resolve_trait_bounds_trait_ids(def_maps, crate_id); + errors.extend(where_clause_errors.iter().cloned().map(|e| (e.into(), file_id))); + + vecmap(unresolved_functions.functions, |(mod_id, func_id, func)| { + let module_id = ModuleId { krate: crate_id, local_id: mod_id }; + let path_resolver = StandardPathResolver::new(module_id); + + let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file_id); + // Must use set_generics here to ensure we re-use the same generics from when + // the impl was originally collected. Otherwise the function will be using different + // TypeVariables for the same generic, causing it to instantiate incorrectly. + resolver.set_generics(impl_generics.clone()); + resolver.set_self_type(self_type.clone()); + resolver.set_trait_id(unresolved_functions.trait_id); + resolver.set_trait_impl_id(trait_impl_id); + + // Without this, impl methods can accidentally be placed in contracts. See #3254 + if self_type.is_some() { + resolver.set_in_contract(false); + } + + let (hir_func, func_meta, errs) = resolver.resolve_function(func, func_id); + interner.push_fn_meta(func_meta, func_id); + interner.update_fn(func_id, hir_func); + errors.extend(errs.iter().cloned().map(|e| (e.into(), file_id))); + (file_id, func_id) + }) +} + +pub(crate) fn resolve_free_functions( + interner: &mut NodeInterner, + crate_id: CrateId, + def_maps: &BTreeMap, + collected_functions: Vec, + self_type: Option, + errors: &mut Vec<(CompilationError, FileId)>, +) -> Vec<(FileId, FuncId)> { + collected_functions + .into_iter() + .flat_map(|unresolved_functions| { + resolve_function_set( + interner, + crate_id, + def_maps, + unresolved_functions, + self_type.clone(), + None, + vec![], // no impl generics + errors, + ) + }) + .collect() +} diff --git a/compiler/noirc_frontend/src/hir/resolution/globals.rs b/compiler/noirc_frontend/src/hir/resolution/globals.rs new file mode 100644 index 00000000000..b5aec212dbf --- /dev/null +++ b/compiler/noirc_frontend/src/hir/resolution/globals.rs @@ -0,0 +1,55 @@ +use super::{path_resolver::StandardPathResolver, resolver::Resolver, take_errors}; +use crate::{ + graph::CrateId, + hir::{ + def_collector::dc_crate::{CompilationError, UnresolvedGlobal}, + def_map::ModuleId, + Context, + }, + node_interner::StmtId, +}; +use fm::FileId; +use iter_extended::vecmap; + +pub(crate) struct ResolvedGlobals { + pub(crate) globals: Vec<(FileId, StmtId)>, + pub(crate) errors: Vec<(CompilationError, FileId)>, +} + +impl ResolvedGlobals { + pub(crate) fn extend(&mut self, oth: Self) { + self.globals.extend(oth.globals); + self.errors.extend(oth.errors); + } +} + +pub(crate) fn resolve_globals( + context: &mut Context, + globals: Vec, + crate_id: CrateId, +) -> ResolvedGlobals { + let mut errors: Vec<(CompilationError, FileId)> = vec![]; + let globals = vecmap(globals, |global| { + let module_id = ModuleId { local_id: global.module_id, krate: crate_id }; + let path_resolver = StandardPathResolver::new(module_id); + + let mut resolver = Resolver::new( + &mut context.def_interner, + &path_resolver, + &context.def_maps, + global.file_id, + ); + + let name = global.stmt_def.pattern.name_ident().clone(); + + let hir_stmt = resolver.resolve_global_let(global.stmt_def); + errors.extend(take_errors(global.file_id, resolver)); + + context.def_interner.update_global(global.stmt_id, hir_stmt); + + context.def_interner.push_global(global.stmt_id, name, global.module_id); + + (global.file_id, global.stmt_id) + }); + ResolvedGlobals { globals, errors } +} diff --git a/compiler/noirc_frontend/src/hir/resolution/impls.rs b/compiler/noirc_frontend/src/hir/resolution/impls.rs new file mode 100644 index 00000000000..4aa70f00cfc --- /dev/null +++ b/compiler/noirc_frontend/src/hir/resolution/impls.rs @@ -0,0 +1,137 @@ +use std::collections::BTreeMap; + +use fm::FileId; + +use crate::{ + graph::CrateId, + hir::{ + def_collector::{ + dc_crate::{CompilationError, ImplMap}, + errors::DefCollectorErrorKind, + }, + def_map::{CrateDefMap, ModuleId}, + Context, + }, + node_interner::{FuncId, NodeInterner}, + Type, +}; + +use super::{ + errors::ResolverError, functions, get_module_mut, get_struct_type, + path_resolver::StandardPathResolver, resolver::Resolver, take_errors, +}; + +/// Go through the list of impls and add each function within to the scope +/// of the module defined by its type. +pub(crate) fn collect_impls( + context: &mut Context, + crate_id: CrateId, + collected_impls: &ImplMap, +) -> Vec<(CompilationError, FileId)> { + let interner = &mut context.def_interner; + let def_maps = &mut context.def_maps; + let mut errors: Vec<(CompilationError, FileId)> = vec![]; + + for ((unresolved_type, module_id), methods) in collected_impls { + let path_resolver = + StandardPathResolver::new(ModuleId { local_id: *module_id, krate: crate_id }); + + let file = def_maps[&crate_id].file_id(*module_id); + + for (generics, span, unresolved) in methods { + let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); + resolver.add_generics(generics); + let typ = resolver.resolve_type(unresolved_type.clone()); + + errors.extend(take_errors(unresolved.file_id, resolver)); + + if let Some(struct_type) = get_struct_type(&typ) { + let struct_type = struct_type.borrow(); + + // `impl`s are only allowed on types defined within the current crate + if struct_type.id.krate() != crate_id { + let span = *span; + let type_name = struct_type.name.to_string(); + let error = DefCollectorErrorKind::ForeignImpl { span, type_name }; + errors.push((error.into(), unresolved.file_id)); + continue; + } + + // Grab the module defined by the struct type. Note that impls are a case + // where the module the methods are added to is not the same as the module + // they are resolved in. + let module = get_module_mut(def_maps, struct_type.id.module_id()); + + for (_, method_id, method) in &unresolved.functions { + // If this method was already declared, remove it from the module so it cannot + // be accessed with the `TypeName::method` syntax. We'll check later whether the + // object types in each method overlap or not. If they do, we issue an error. + // If not, that is specialization which is allowed. + if module.declare_function(method.name_ident().clone(), *method_id).is_err() { + module.remove_function(method.name_ident()); + } + } + // Prohibit defining impls for primitive types if we're not in the stdlib + } else if typ != Type::Error && !crate_id.is_stdlib() { + let span = *span; + let error = DefCollectorErrorKind::NonStructTypeInImpl { span }; + errors.push((error.into(), unresolved.file_id)); + } + } + } + errors +} + +pub(crate) fn resolve_impls( + interner: &mut NodeInterner, + crate_id: CrateId, + def_maps: &BTreeMap, + collected_impls: ImplMap, + errors: &mut Vec<(CompilationError, FileId)>, +) -> Vec<(FileId, FuncId)> { + let mut file_method_ids = Vec::new(); + + for ((unresolved_type, module_id), methods) in collected_impls { + let path_resolver = + StandardPathResolver::new(ModuleId { local_id: module_id, krate: crate_id }); + + let file = def_maps[&crate_id].file_id(module_id); + + for (generics, _, functions) in methods { + let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); + resolver.add_generics(&generics); + let generics = resolver.get_generics().to_vec(); + let self_type = resolver.resolve_type(unresolved_type.clone()); + + let mut file_func_ids = functions::resolve_function_set( + interner, + crate_id, + def_maps, + functions, + Some(self_type.clone()), + None, + generics, + errors, + ); + if self_type != Type::Error { + for (file_id, method_id) in &file_func_ids { + let method_name = interner.function_name(method_id).to_owned(); + + if let Some(first_fn) = + interner.add_method(&self_type, method_name.clone(), *method_id, false) + { + let error = ResolverError::DuplicateDefinition { + name: method_name, + first_span: interner.function_ident(&first_fn).span(), + second_span: interner.function_ident(method_id).span(), + }; + errors.push((error.into(), *file_id)); + } + } + } + file_method_ids.append(&mut file_func_ids); + } + } + + file_method_ids +} diff --git a/compiler/noirc_frontend/src/hir/resolution/import.rs b/compiler/noirc_frontend/src/hir/resolution/import.rs index 6f3140a65d4..5b59dcd2241 100644 --- a/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -1,5 +1,5 @@ use iter_extended::partition_results; -use noirc_errors::CustomDiagnostic; +use noirc_errors::{CustomDiagnostic, Span}; use crate::graph::CrateId; use std::collections::BTreeMap; @@ -202,7 +202,11 @@ fn resolve_external_dep( // Create an import directive for the dependency crate let path_without_crate_name = &path[1..]; // XXX: This will panic if the path is of the form `use dep::std` Ideal algorithm will not distinguish between crate and module - let path = Path { segments: path_without_crate_name.to_vec(), kind: PathKind::Plain }; + let path = Path { + segments: path_without_crate_name.to_vec(), + kind: PathKind::Plain, + span: Span::default(), + }; let dep_directive = ImportDirective { module_id: dep_module.local_id, path, alias: directive.alias.clone() }; diff --git a/compiler/noirc_frontend/src/hir/resolution/mod.rs b/compiler/noirc_frontend/src/hir/resolution/mod.rs index 601e78015ca..8c16a9cca80 100644 --- a/compiler/noirc_frontend/src/hir/resolution/mod.rs +++ b/compiler/noirc_frontend/src/hir/resolution/mod.rs @@ -9,3 +9,50 @@ pub mod errors; pub mod import; pub mod path_resolver; pub mod resolver; + +mod functions; +mod globals; +mod impls; +mod structs; +mod traits; +mod type_aliases; + +pub(crate) use functions::resolve_free_functions; +pub(crate) use globals::resolve_globals; +pub(crate) use impls::{collect_impls, resolve_impls}; +pub(crate) use structs::resolve_structs; +pub(crate) use traits::{ + collect_trait_impls, resolve_trait_by_path, resolve_trait_impls, resolve_traits, +}; +pub(crate) use type_aliases::resolve_type_aliases; + +use crate::{ + graph::CrateId, + hir::{ + def_collector::dc_crate::CompilationError, + def_map::{CrateDefMap, ModuleData, ModuleId}, + }, + Shared, StructType, Type, +}; +use fm::FileId; +use iter_extended::vecmap; +use resolver::Resolver; +use std::collections::BTreeMap; + +fn take_errors(file_id: FileId, resolver: Resolver<'_>) -> Vec<(CompilationError, FileId)> { + vecmap(resolver.take_errors(), |e| (e.into(), file_id)) +} + +fn get_module_mut( + def_maps: &mut BTreeMap, + module: ModuleId, +) -> &mut ModuleData { + &mut def_maps.get_mut(&module.krate).unwrap().modules[module.local_id.0] +} + +fn get_struct_type(typ: &Type) -> Option<&Shared> { + match typ { + Type::Struct(definition, _) => Some(definition), + _ => None, + } +} diff --git a/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/compiler/noirc_frontend/src/hir/resolution/resolver.rs index d1dbc77d04c..4e7ed7e2ea9 100644 --- a/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -441,6 +441,7 @@ impl<'a> Resolver<'a> { MutableReference(element) => { Type::MutableReference(Box::new(self.resolve_type_inner(*element, new_variables))) } + Parenthesized(typ) => self.resolve_type_inner(*typ, new_variables), } } @@ -1203,6 +1204,7 @@ impl<'a> Resolver<'a> { } Literal::Integer(integer) => HirLiteral::Integer(integer), Literal::Str(str) => HirLiteral::Str(str), + Literal::RawStr(str, _) => HirLiteral::Str(str), Literal::FmtStr(str) => self.resolve_fmt_str_literal(str, expr.span), Literal::Unit => HirLiteral::Unit, }), @@ -1786,6 +1788,7 @@ impl<'a> Resolver<'a> { self.verify_type_valid_for_program_input(element); } } + UnresolvedTypeData::Parenthesized(typ) => self.verify_type_valid_for_program_input(typ), } } diff --git a/compiler/noirc_frontend/src/hir/resolution/structs.rs b/compiler/noirc_frontend/src/hir/resolution/structs.rs new file mode 100644 index 00000000000..72a7b736436 --- /dev/null +++ b/compiler/noirc_frontend/src/hir/resolution/structs.rs @@ -0,0 +1,53 @@ +use std::collections::BTreeMap; + +use fm::FileId; +use iter_extended::vecmap; + +use crate::{ + graph::CrateId, + hir::{ + def_collector::dc_crate::{CompilationError, UnresolvedStruct}, + def_map::ModuleId, + Context, + }, + node_interner::StructId, + Generics, Ident, Type, +}; + +use super::{errors::ResolverError, path_resolver::StandardPathResolver, resolver::Resolver}; + +/// Create the mappings from TypeId -> StructType +/// so that expressions can access the fields of structs +pub(crate) fn resolve_structs( + context: &mut Context, + structs: BTreeMap, + crate_id: CrateId, +) -> Vec<(CompilationError, FileId)> { + let mut errors: Vec<(CompilationError, FileId)> = vec![]; + // Resolve each field in each struct. + // Each struct should already be present in the NodeInterner after def collection. + for (type_id, typ) in structs { + let file_id = typ.file_id; + let (generics, fields, resolver_errors) = resolve_struct_fields(context, crate_id, typ); + errors.extend(vecmap(resolver_errors, |err| (err.into(), file_id))); + context.def_interner.update_struct(type_id, |struct_def| { + struct_def.set_fields(fields); + struct_def.generics = generics; + }); + } + errors +} + +fn resolve_struct_fields( + context: &mut Context, + krate: CrateId, + unresolved: UnresolvedStruct, +) -> (Generics, Vec<(Ident, Type)>, Vec) { + let path_resolver = + StandardPathResolver::new(ModuleId { local_id: unresolved.module_id, krate }); + let file_id = unresolved.file_id; + let (generics, fields, errors) = + Resolver::new(&mut context.def_interner, &path_resolver, &context.def_maps, file_id) + .resolve_struct_fields(unresolved.struct_def); + (generics, fields, errors) +} diff --git a/compiler/noirc_frontend/src/hir/resolution/traits.rs b/compiler/noirc_frontend/src/hir/resolution/traits.rs new file mode 100644 index 00000000000..702e96362a6 --- /dev/null +++ b/compiler/noirc_frontend/src/hir/resolution/traits.rs @@ -0,0 +1,450 @@ +use std::collections::{BTreeMap, HashSet}; + +use fm::FileId; +use iter_extended::vecmap; +use noirc_errors::Span; + +use crate::{ + graph::CrateId, + hir::{ + def_collector::{ + dc_crate::{ + check_methods_signatures, CompilationError, UnresolvedTrait, UnresolvedTraitImpl, + }, + errors::{DefCollectorErrorKind, DuplicateType}, + }, + def_map::{CrateDefMap, ModuleDefId, ModuleId}, + Context, + }, + hir_def::traits::{Trait, TraitConstant, TraitFunction, TraitImpl, TraitType}, + node_interner::{FuncId, NodeInterner, TraitId}, + Path, Shared, TraitItem, Type, TypeVariableKind, +}; + +use super::{ + errors::ResolverError, + functions, get_module_mut, get_struct_type, + import::PathResolutionError, + path_resolver::{PathResolver, StandardPathResolver}, + resolver::Resolver, + take_errors, +}; + +/// Create the mappings from TypeId -> TraitType +/// so that expressions can access the elements of traits +pub(crate) fn resolve_traits( + context: &mut Context, + traits: BTreeMap, + crate_id: CrateId, +) -> Vec<(CompilationError, FileId)> { + for (trait_id, unresolved_trait) in &traits { + context.def_interner.push_empty_trait(*trait_id, unresolved_trait); + } + let mut res: Vec<(CompilationError, FileId)> = vec![]; + for (trait_id, unresolved_trait) in traits { + // Resolve order + // 1. Trait Types ( Trait constants can have a trait type, therefore types before constants) + let _ = resolve_trait_types(context, crate_id, &unresolved_trait); + // 2. Trait Constants ( Trait's methods can use trait types & constants, therefore they should be after) + let _ = resolve_trait_constants(context, crate_id, &unresolved_trait); + // 3. Trait Methods + let (methods, errors) = + resolve_trait_methods(context, trait_id, crate_id, &unresolved_trait); + res.extend(errors); + context.def_interner.update_trait(trait_id, |trait_def| { + trait_def.set_methods(methods); + }); + } + res +} + +fn resolve_trait_types( + _context: &mut Context, + _crate_id: CrateId, + _unresolved_trait: &UnresolvedTrait, +) -> (Vec, Vec<(CompilationError, FileId)>) { + // TODO + (vec![], vec![]) +} +fn resolve_trait_constants( + _context: &mut Context, + _crate_id: CrateId, + _unresolved_trait: &UnresolvedTrait, +) -> (Vec, Vec<(CompilationError, FileId)>) { + // TODO + (vec![], vec![]) +} + +fn resolve_trait_methods( + context: &mut Context, + trait_id: TraitId, + crate_id: CrateId, + unresolved_trait: &UnresolvedTrait, +) -> (Vec, Vec<(CompilationError, FileId)>) { + let interner = &mut context.def_interner; + let def_maps = &mut context.def_maps; + + let path_resolver = StandardPathResolver::new(ModuleId { + local_id: unresolved_trait.module_id, + krate: crate_id, + }); + let file = def_maps[&crate_id].file_id(unresolved_trait.module_id); + + let mut res = vec![]; + let mut resolver_errors = vec![]; + for item in &unresolved_trait.trait_def.items { + if let TraitItem::Function { + name, + generics, + parameters, + return_type, + where_clause: _, + body: _, + } = item + { + let the_trait = interner.get_trait(trait_id); + let self_type = + Type::TypeVariable(the_trait.self_type_typevar.clone(), TypeVariableKind::Normal); + + let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); + resolver.add_generics(generics); + resolver.set_self_type(Some(self_type)); + + let arguments = vecmap(parameters, |param| resolver.resolve_type(param.1.clone())); + let resolved_return_type = resolver.resolve_type(return_type.get_type().into_owned()); + let generics = resolver.get_generics().to_vec(); + + let name = name.clone(); + let span: Span = name.span(); + let default_impl_list: Vec<_> = unresolved_trait + .fns_with_default_impl + .functions + .iter() + .filter(|(_, _, q)| q.name() == name.0.contents) + .collect(); + let default_impl = if default_impl_list.len() == 1 { + Some(Box::new(default_impl_list[0].2.clone())) + } else { + None + }; + + let f = TraitFunction { + name, + generics, + arguments, + return_type: resolved_return_type, + span, + default_impl, + default_impl_file_id: unresolved_trait.file_id, + default_impl_module_id: unresolved_trait.module_id, + }; + res.push(f); + resolver_errors.extend(take_errors_filter_self_not_resolved(file, resolver)); + } + } + (res, resolver_errors) +} + +fn collect_trait_impl_methods( + interner: &mut NodeInterner, + def_maps: &BTreeMap, + crate_id: CrateId, + trait_id: TraitId, + trait_impl: &mut UnresolvedTraitImpl, +) -> Vec<(CompilationError, FileId)> { + // In this Vec methods[i] corresponds to trait.methods[i]. If the impl has no implementation + // for a particular method, the default implementation will be added at that slot. + let mut ordered_methods = Vec::new(); + + let the_trait = interner.get_trait(trait_id); + + // check whether the trait implementation is in the same crate as either the trait or the type + let mut errors = + check_trait_impl_crate_coherence(interner, &the_trait, trait_impl, crate_id, def_maps); + // set of function ids that have a corresponding method in the trait + let mut func_ids_in_trait = HashSet::new(); + + for method in &the_trait.methods { + let overrides: Vec<_> = trait_impl + .methods + .functions + .iter() + .filter(|(_, _, f)| f.name() == method.name.0.contents) + .collect(); + + if overrides.is_empty() { + if let Some(default_impl) = &method.default_impl { + let func_id = interner.push_empty_fn(); + let module = ModuleId { local_id: trait_impl.module_id, krate: crate_id }; + interner.push_function(func_id, &default_impl.def, module); + func_ids_in_trait.insert(func_id); + ordered_methods.push(( + method.default_impl_module_id, + func_id, + *default_impl.clone(), + )); + } else { + let error = DefCollectorErrorKind::TraitMissingMethod { + trait_name: the_trait.name.clone(), + method_name: method.name.clone(), + trait_impl_span: trait_impl.object_type.span.expect("type must have a span"), + }; + errors.push((error.into(), trait_impl.file_id)); + } + } else { + for (_, func_id, _) in &overrides { + func_ids_in_trait.insert(*func_id); + } + + if overrides.len() > 1 { + let error = DefCollectorErrorKind::Duplicate { + typ: DuplicateType::TraitAssociatedFunction, + first_def: overrides[0].2.name_ident().clone(), + second_def: overrides[1].2.name_ident().clone(), + }; + errors.push((error.into(), trait_impl.file_id)); + } + + ordered_methods.push(overrides[0].clone()); + } + } + + // Emit MethodNotInTrait error for methods in the impl block that + // don't have a corresponding method signature defined in the trait + for (_, func_id, func) in &trait_impl.methods.functions { + if !func_ids_in_trait.contains(func_id) { + let error = DefCollectorErrorKind::MethodNotInTrait { + trait_name: the_trait.name.clone(), + impl_method: func.name_ident().clone(), + }; + errors.push((error.into(), trait_impl.file_id)); + } + } + + trait_impl.methods.functions = ordered_methods; + trait_impl.methods.trait_id = Some(trait_id); + errors +} + +fn collect_trait_impl( + context: &mut Context, + crate_id: CrateId, + trait_impl: &mut UnresolvedTraitImpl, +) -> Vec<(CompilationError, FileId)> { + let interner = &mut context.def_interner; + let def_maps = &mut context.def_maps; + let mut errors: Vec<(CompilationError, FileId)> = vec![]; + let unresolved_type = trait_impl.object_type.clone(); + let module = ModuleId { local_id: trait_impl.module_id, krate: crate_id }; + trait_impl.trait_id = + match resolve_trait_by_path(def_maps, module, trait_impl.trait_path.clone()) { + Ok(trait_id) => Some(trait_id), + Err(error) => { + errors.push((error.into(), trait_impl.file_id)); + None + } + }; + + if let Some(trait_id) = trait_impl.trait_id { + errors + .extend(collect_trait_impl_methods(interner, def_maps, crate_id, trait_id, trait_impl)); + + let path_resolver = StandardPathResolver::new(module); + let file = def_maps[&crate_id].file_id(trait_impl.module_id); + let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); + resolver.add_generics(&trait_impl.generics); + let typ = resolver.resolve_type(unresolved_type); + errors.extend(take_errors(trait_impl.file_id, resolver)); + + if let Some(struct_type) = get_struct_type(&typ) { + let struct_type = struct_type.borrow(); + let module = get_module_mut(def_maps, struct_type.id.module_id()); + + for (_, method_id, method) in &trait_impl.methods.functions { + // If this method was already declared, remove it from the module so it cannot + // be accessed with the `TypeName::method` syntax. We'll check later whether the + // object types in each method overlap or not. If they do, we issue an error. + // If not, that is specialization which is allowed. + if module.declare_function(method.name_ident().clone(), *method_id).is_err() { + module.remove_function(method.name_ident()); + } + } + } + } + errors +} + +pub(crate) fn collect_trait_impls( + context: &mut Context, + crate_id: CrateId, + collected_impls: &mut [UnresolvedTraitImpl], +) -> Vec<(CompilationError, FileId)> { + collected_impls + .iter_mut() + .flat_map(|trait_impl| collect_trait_impl(context, crate_id, trait_impl)) + .collect() +} + +fn check_trait_impl_crate_coherence( + interner: &mut NodeInterner, + the_trait: &Trait, + trait_impl: &UnresolvedTraitImpl, + current_crate: CrateId, + def_maps: &BTreeMap, +) -> Vec<(CompilationError, FileId)> { + let mut errors: Vec<(CompilationError, FileId)> = vec![]; + + let module = ModuleId { krate: current_crate, local_id: trait_impl.module_id }; + let file = def_maps[¤t_crate].file_id(trait_impl.module_id); + let path_resolver = StandardPathResolver::new(module); + let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); + + let object_crate = match resolver.resolve_type(trait_impl.object_type.clone()) { + Type::Struct(struct_type, _) => struct_type.borrow().id.krate(), + _ => CrateId::Dummy, + }; + + if current_crate != the_trait.crate_id && current_crate != object_crate { + let error = DefCollectorErrorKind::TraitImplOrphaned { + span: trait_impl.object_type.span.expect("object type must have a span"), + }; + errors.push((error.into(), trait_impl.file_id)); + } + + errors +} + +pub(crate) fn resolve_trait_by_path( + def_maps: &BTreeMap, + module: ModuleId, + path: Path, +) -> Result { + let path_resolver = StandardPathResolver::new(module); + + match path_resolver.resolve(def_maps, path.clone()) { + Ok(ModuleDefId::TraitId(trait_id)) => Ok(trait_id), + Ok(_) => Err(DefCollectorErrorKind::NotATrait { not_a_trait_name: path }), + Err(_) => Err(DefCollectorErrorKind::TraitNotFound { trait_path: path }), + } +} +pub(crate) fn resolve_trait_impls( + context: &mut Context, + traits: Vec, + crate_id: CrateId, + errors: &mut Vec<(CompilationError, FileId)>, +) -> Vec<(FileId, FuncId)> { + let interner = &mut context.def_interner; + let mut methods = Vec::<(FileId, FuncId)>::new(); + + for trait_impl in traits { + let unresolved_type = trait_impl.object_type; + let local_mod_id = trait_impl.module_id; + let module_id = ModuleId { krate: crate_id, local_id: local_mod_id }; + let path_resolver = StandardPathResolver::new(module_id); + + let self_type_span = unresolved_type.span; + + let mut resolver = + Resolver::new(interner, &path_resolver, &context.def_maps, trait_impl.file_id); + resolver.add_generics(&trait_impl.generics); + let self_type = resolver.resolve_type(unresolved_type.clone()); + let generics = resolver.get_generics().to_vec(); + + let impl_id = interner.next_trait_impl_id(); + + let mut impl_methods = functions::resolve_function_set( + interner, + crate_id, + &context.def_maps, + trait_impl.methods.clone(), + Some(self_type.clone()), + Some(impl_id), + generics.clone(), + errors, + ); + + let maybe_trait_id = trait_impl.trait_id; + if let Some(trait_id) = maybe_trait_id { + for (_, func) in &impl_methods { + interner.set_function_trait(*func, self_type.clone(), trait_id); + } + } + + if matches!(self_type, Type::MutableReference(_)) { + let span = self_type_span.unwrap_or_else(|| trait_impl.trait_path.span()); + let error = DefCollectorErrorKind::MutableReferenceInTraitImpl { span }; + errors.push((error.into(), trait_impl.file_id)); + } + + let mut new_resolver = + Resolver::new(interner, &path_resolver, &context.def_maps, trait_impl.file_id); + + new_resolver.set_generics(generics); + new_resolver.set_self_type(Some(self_type.clone())); + + if let Some(trait_id) = maybe_trait_id { + check_methods_signatures( + &mut new_resolver, + &impl_methods, + trait_id, + trait_impl.generics.len(), + errors, + ); + + let where_clause = trait_impl + .where_clause + .into_iter() + .flat_map(|item| new_resolver.resolve_trait_constraint(item)) + .collect(); + + let resolved_trait_impl = Shared::new(TraitImpl { + ident: trait_impl.trait_path.last_segment().clone(), + typ: self_type.clone(), + trait_id, + file: trait_impl.file_id, + where_clause, + methods: vecmap(&impl_methods, |(_, func_id)| *func_id), + }); + + if let Err((prev_span, prev_file)) = interner.add_trait_implementation( + self_type.clone(), + trait_id, + impl_id, + resolved_trait_impl, + ) { + let error = DefCollectorErrorKind::OverlappingImpl { + typ: self_type.clone(), + span: self_type_span.unwrap_or_else(|| trait_impl.trait_path.span()), + }; + errors.push((error.into(), trait_impl.file_id)); + + // The 'previous impl defined here' note must be a separate error currently + // since it may be in a different file and all errors have the same file id. + let error = DefCollectorErrorKind::OverlappingImplNote { span: prev_span }; + errors.push((error.into(), prev_file)); + } + + methods.append(&mut impl_methods); + } + } + + methods +} + +pub(crate) fn take_errors_filter_self_not_resolved( + file_id: FileId, + resolver: Resolver<'_>, +) -> Vec<(CompilationError, FileId)> { + resolver + .take_errors() + .iter() + .filter(|resolution_error| match resolution_error { + ResolverError::PathResolutionError(PathResolutionError::Unresolved(ident)) => { + &ident.0.contents != "Self" + } + _ => true, + }) + .cloned() + .map(|resolution_error| (resolution_error.into(), file_id)) + .collect() +} diff --git a/compiler/noirc_frontend/src/hir/resolution/type_aliases.rs b/compiler/noirc_frontend/src/hir/resolution/type_aliases.rs new file mode 100644 index 00000000000..f66f6c8dfa7 --- /dev/null +++ b/compiler/noirc_frontend/src/hir/resolution/type_aliases.rs @@ -0,0 +1,33 @@ +use super::{path_resolver::StandardPathResolver, resolver::Resolver}; +use crate::{ + graph::CrateId, + hir::{ + def_collector::dc_crate::{CompilationError, UnresolvedTypeAlias}, + def_map::ModuleId, + Context, + }, + node_interner::TypeAliasId, +}; +use fm::FileId; +use std::collections::BTreeMap; + +pub(crate) fn resolve_type_aliases( + context: &mut Context, + type_aliases: BTreeMap, + crate_id: CrateId, +) -> Vec<(CompilationError, FileId)> { + let mut errors: Vec<(CompilationError, FileId)> = vec![]; + for (type_id, unresolved_typ) in type_aliases { + let path_resolver = StandardPathResolver::new(ModuleId { + local_id: unresolved_typ.module_id, + krate: crate_id, + }); + let file = unresolved_typ.file_id; + let (typ, generics, resolver_errors) = + Resolver::new(&mut context.def_interner, &path_resolver, &context.def_maps, file) + .resolve_type_aliases(unresolved_typ.type_alias_def); + errors.extend(resolver_errors.iter().cloned().map(|e| (e.into(), file))); + context.def_interner.set_type_alias(type_id, typ, generics); + } + errors +} diff --git a/compiler/noirc_frontend/src/lexer/lexer.rs b/compiler/noirc_frontend/src/lexer/lexer.rs index be24c1249c6..7a2197ebb93 100644 --- a/compiler/noirc_frontend/src/lexer/lexer.rs +++ b/compiler/noirc_frontend/src/lexer/lexer.rs @@ -126,6 +126,7 @@ impl<'a> Lexer<'a> { Some(']') => self.single_char_token(Token::RightBracket), Some('"') => self.eat_string_literal(), Some('f') => self.eat_format_string_or_alpha_numeric(), + Some('r') => self.eat_raw_string_or_alpha_numeric(), Some('#') => self.eat_attribute(), Some(ch) if ch.is_ascii_alphanumeric() || ch == '_' => self.eat_alpha_numeric(ch), Some(ch) => { @@ -400,6 +401,78 @@ impl<'a> Lexer<'a> { } } + fn eat_raw_string(&mut self) -> SpannedTokenResult { + let start = self.position; + + let beginning_hashes = self.eat_while(None, |ch| ch == '#'); + let beginning_hashes_count = beginning_hashes.chars().count(); + if beginning_hashes_count > 255 { + // too many hashes (unlikely in practice) + // also, Rust disallows 256+ hashes as well + return Err(LexerErrorKind::UnexpectedCharacter { + span: Span::single_char(start + 255), + found: Some('#'), + expected: "\"".to_owned(), + }); + } + + if !self.peek_char_is('"') { + return Err(LexerErrorKind::UnexpectedCharacter { + span: Span::single_char(self.position), + found: self.next_char(), + expected: "\"".to_owned(), + }); + } + self.next_char(); + + let mut str_literal = String::new(); + loop { + let chars = self.eat_while(None, |ch| ch != '"'); + str_literal.push_str(&chars[..]); + if !self.peek_char_is('"') { + return Err(LexerErrorKind::UnexpectedCharacter { + span: Span::single_char(self.position), + found: self.next_char(), + expected: "\"".to_owned(), + }); + } + self.next_char(); + let mut ending_hashes_count = 0; + while let Some('#') = self.peek_char() { + if ending_hashes_count == beginning_hashes_count { + break; + } + self.next_char(); + ending_hashes_count += 1; + } + if ending_hashes_count == beginning_hashes_count { + break; + } else { + str_literal.push('"'); + for _ in 0..ending_hashes_count { + str_literal.push('#'); + } + } + } + + let str_literal_token = Token::RawStr(str_literal, beginning_hashes_count as u8); + + let end = self.position; + Ok(str_literal_token.into_span(start, end)) + } + + fn eat_raw_string_or_alpha_numeric(&mut self) -> SpannedTokenResult { + // Problem: we commit to eating raw strings once we see one or two characters. + // This is unclean, but likely ok in all practical cases, and works with existing + // `Lexer` methods. + let peek1 = self.peek_char().unwrap_or('X'); + let peek2 = self.peek2_char().unwrap_or('X'); + match (peek1, peek2) { + ('#', '#') | ('#', '"') | ('"', _) => self.eat_raw_string(), + _ => self.eat_alpha_numeric('r'), + } + } + fn parse_comment(&mut self, start: u32) -> SpannedTokenResult { let doc_style = match self.peek_char() { Some('!') => { diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index 7aafbc3fded..ba93e52e1a8 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -15,6 +15,7 @@ pub enum Token { Int(FieldElement), Bool(bool), Str(String), + RawStr(String, u8), FmtStr(String), Keyword(Keyword), IntType(IntType), @@ -157,6 +158,10 @@ impl fmt::Display for Token { Token::Bool(b) => write!(f, "{b}"), Token::Str(ref b) => write!(f, "{b}"), Token::FmtStr(ref b) => write!(f, "f{b}"), + Token::RawStr(ref b, hashes) => { + let h: String = std::iter::once('#').cycle().take(hashes as usize).collect(); + write!(f, "r{h}\"{b}\"{h}") + } Token::Keyword(k) => write!(f, "{k}"), Token::Attribute(ref a) => write!(f, "{a}"), Token::LineComment(ref s, _style) => write!(f, "//{s}"), @@ -227,7 +232,11 @@ impl Token { pub fn kind(&self) -> TokenKind { match *self { Token::Ident(_) => TokenKind::Ident, - Token::Int(_) | Token::Bool(_) | Token::Str(_) | Token::FmtStr(_) => TokenKind::Literal, + Token::Int(_) + | Token::Bool(_) + | Token::Str(_) + | Token::RawStr(..) + | Token::FmtStr(_) => TokenKind::Literal, Token::Keyword(_) => TokenKind::Keyword, Token::Attribute(_) => TokenKind::Attribute, ref tok => TokenKind::Token(tok.clone()), diff --git a/compiler/noirc_frontend/src/parser/parser.rs b/compiler/noirc_frontend/src/parser/parser.rs index 1fa6046f6f3..cc85fe88205 100644 --- a/compiler/noirc_frontend/src/parser/parser.rs +++ b/compiler/noirc_frontend/src/parser/parser.rs @@ -726,21 +726,21 @@ fn token_kind(token_kind: TokenKind) -> impl NoirParser { fn path() -> impl NoirParser { let idents = || ident().separated_by(just(Token::DoubleColon)).at_least(1); - let make_path = |kind| move |segments| Path { segments, kind }; + let make_path = |kind| move |segments, span| Path { segments, kind, span }; let prefix = |key| keyword(key).ignore_then(just(Token::DoubleColon)); - let path_kind = |key, kind| prefix(key).ignore_then(idents()).map(make_path(kind)); + let path_kind = |key, kind| prefix(key).ignore_then(idents()).map_with_span(make_path(kind)); choice(( path_kind(Keyword::Crate, PathKind::Crate), path_kind(Keyword::Dep, PathKind::Dep), - idents().map(make_path(PathKind::Plain)), + idents().map_with_span(make_path(PathKind::Plain)), )) } fn empty_path() -> impl NoirParser { - let make_path = |kind| move |_| Path { segments: Vec::new(), kind }; - let path_kind = |key, kind| keyword(key).map(make_path(kind)); + let make_path = |kind| move |_, span| Path { segments: Vec::new(), kind, span }; + let path_kind = |key, kind| keyword(key).map_with_span(make_path(kind)); choice((path_kind(Keyword::Crate, PathKind::Crate), path_kind(Keyword::Dep, PathKind::Dep))) } @@ -1015,13 +1015,24 @@ fn parse_type_inner( named_type(recursive_type_parser.clone()), named_trait(recursive_type_parser.clone()), array_type(recursive_type_parser.clone()), - recursive_type_parser.clone().delimited_by(just(Token::LeftParen), just(Token::RightParen)), + parenthesized_type(recursive_type_parser.clone()), tuple_type(recursive_type_parser.clone()), function_type(recursive_type_parser.clone()), mutable_reference_type(recursive_type_parser), )) } +fn parenthesized_type( + recursive_type_parser: impl NoirParser, +) -> impl NoirParser { + recursive_type_parser + .delimited_by(just(Token::LeftParen), just(Token::RightParen)) + .map_with_span(|typ, span| UnresolvedType { + typ: UnresolvedTypeData::Parenthesized(Box::new(typ)), + span: span.into(), + }) +} + fn optional_visibility() -> impl NoirParser { keyword(Keyword::Pub) .or(keyword(Keyword::CallData)) @@ -1185,7 +1196,9 @@ where .ignore_then(type_parser.clone()) .then_ignore(just(Token::RightBracket)) .or_not() - .map_with_span(|t, span| t.unwrap_or_else(|| UnresolvedTypeData::Unit.with_span(span))); + .map_with_span(|t, span| { + t.unwrap_or_else(|| UnresolvedTypeData::Unit.with_span(Span::empty(span.end()))) + }); keyword(Keyword::Fn) .ignore_then(env) @@ -1665,6 +1678,7 @@ fn literal() -> impl NoirParser { Token::Int(x) => ExpressionKind::integer(x), Token::Bool(b) => ExpressionKind::boolean(b), Token::Str(s) => ExpressionKind::string(s), + Token::RawStr(s, hashes) => ExpressionKind::raw_string(s, hashes), Token::FmtStr(s) => ExpressionKind::format_string(s), unexpected => unreachable!("Non-literal {} parsed as a literal", unexpected), }) @@ -2557,4 +2571,79 @@ mod test { check_cases_with_errors(&cases[..], block(fresh_statement())); } + + #[test] + fn parse_raw_string_expr() { + let cases = vec![ + Case { source: r##" r"foo" "##, expect: r##"r"foo""##, errors: 0 }, + Case { source: r##" r#"foo"# "##, expect: r##"r#"foo"#"##, errors: 0 }, + // backslash + Case { source: r##" r"\\" "##, expect: r##"r"\\""##, errors: 0 }, + Case { source: r##" r#"\"# "##, expect: r##"r#"\"#"##, errors: 0 }, + Case { source: r##" r#"\\"# "##, expect: r##"r#"\\"#"##, errors: 0 }, + Case { source: r##" r#"\\\"# "##, expect: r##"r#"\\\"#"##, errors: 0 }, + // escape sequence + Case { + source: r##" r#"\t\n\\t\\n\\\t\\\n\\\\"# "##, + expect: r##"r#"\t\n\\t\\n\\\t\\\n\\\\"#"##, + errors: 0, + }, + Case { source: r##" r#"\\\\\\\\"# "##, expect: r##"r#"\\\\\\\\"#"##, errors: 0 }, + // mismatch - errors: + Case { source: r###" r#"foo"## "###, expect: r###"r#"foo"#"###, errors: 1 }, + Case { source: r###" r##"foo"# "###, expect: "(none)", errors: 2 }, + // mismatch: short: + Case { source: r###" r"foo"# "###, expect: r###"r"foo""###, errors: 1 }, + Case { source: r###" r#"foo" "###, expect: "(none)", errors: 2 }, + // empty string + Case { source: r####"r"""####, expect: r####"r"""####, errors: 0 }, + Case { source: r####"r###""###"####, expect: r####"r###""###"####, errors: 0 }, + // miscellaneous + Case { source: r###" r#\"foo\"# "###, expect: "plain::r", errors: 2 }, + Case { source: r###" r\"foo\" "###, expect: "plain::r", errors: 1 }, + Case { source: r###" r##"foo"# "###, expect: "(none)", errors: 2 }, + // missing 'r' letter + Case { source: r###" ##"foo"# "###, expect: r#""foo""#, errors: 2 }, + Case { source: r###" #"foo" "###, expect: "plain::foo", errors: 2 }, + // whitespace + Case { source: r###" r #"foo"# "###, expect: "plain::r", errors: 2 }, + Case { source: r###" r# "foo"# "###, expect: "plain::r", errors: 3 }, + Case { source: r###" r#"foo" # "###, expect: "(none)", errors: 2 }, + // after identifier + Case { source: r###" bar#"foo"# "###, expect: "plain::bar", errors: 2 }, + // nested + Case { + source: r###"r##"foo r#"bar"# r"baz" ### bye"##"###, + expect: r###"r##"foo r#"bar"# r"baz" ### bye"##"###, + errors: 0, + }, + ]; + + check_cases_with_errors(&cases[..], expression()); + } + + #[test] + fn parse_raw_string_lit() { + let lit_cases = vec![ + Case { source: r##" r"foo" "##, expect: r##"r"foo""##, errors: 0 }, + Case { source: r##" r#"foo"# "##, expect: r##"r#"foo"#"##, errors: 0 }, + // backslash + Case { source: r##" r"\\" "##, expect: r##"r"\\""##, errors: 0 }, + Case { source: r##" r#"\"# "##, expect: r##"r#"\"#"##, errors: 0 }, + Case { source: r##" r#"\\"# "##, expect: r##"r#"\\"#"##, errors: 0 }, + Case { source: r##" r#"\\\"# "##, expect: r##"r#"\\\"#"##, errors: 0 }, + // escape sequence + Case { + source: r##" r#"\t\n\\t\\n\\\t\\\n\\\\"# "##, + expect: r##"r#"\t\n\\t\\n\\\t\\\n\\\\"#"##, + errors: 0, + }, + Case { source: r##" r#"\\\\\\\\"# "##, expect: r##"r#"\\\\\\\\"#"##, errors: 0 }, + // mismatch - errors: + Case { source: r###" r#"foo"## "###, expect: r###"r#"foo"#"###, errors: 1 }, + Case { source: r###" r##"foo"# "###, expect: "(none)", errors: 2 }, + ]; + + check_cases_with_errors(&lit_cases[..], literal()); + } } diff --git a/compiler/noirc_printable_type/src/lib.rs b/compiler/noirc_printable_type/src/lib.rs index 1c4f597add2..e10e400b0db 100644 --- a/compiler/noirc_printable_type/src/lib.rs +++ b/compiler/noirc_printable_type/src/lib.rs @@ -168,15 +168,24 @@ fn fetch_printable_type( fn to_string(value: &PrintableValue, typ: &PrintableType) -> Option { let mut output = String::new(); match (value, typ) { - ( - PrintableValue::Field(f), - PrintableType::Field - // TODO(#2401): We should print the sign for these and probably print normal integers instead of field strings - | PrintableType::SignedInteger { .. } - | PrintableType::UnsignedInteger { .. }, - ) => { + (PrintableValue::Field(f), PrintableType::Field) => { output.push_str(&format_field_string(*f)); } + (PrintableValue::Field(f), PrintableType::UnsignedInteger { width }) => { + let uint_cast = f.to_u128() & ((1 << width) - 1); // Retain the lower 'width' bits + output.push_str(&uint_cast.to_string()); + } + (PrintableValue::Field(f), PrintableType::SignedInteger { width }) => { + let mut uint = f.to_u128(); // Interpret as uint + + // Extract sign relative to width of input + if (uint >> (width - 1)) == 1 { + output.push('-'); + uint = (uint ^ ((1 << width) - 1)) + 1; // Two's complement relative to width of input + } + + output.push_str(&uint.to_string()); + } (PrintableValue::Field(f), PrintableType::Boolean) => { if f.is_one() { output.push_str("true"); @@ -187,8 +196,11 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Option { (PrintableValue::Vec(vector), PrintableType::Array { typ, .. }) => { output.push('['); let mut values = vector.iter().peekable(); - while let Some(value) = values.next() { - output.push_str(&format!("{}", PrintableValueDisplay::Plain(value.clone(), *typ.clone()))); + while let Some(value) = values.next() { + output.push_str(&format!( + "{}", + PrintableValueDisplay::Plain(value.clone(), *typ.clone()) + )); if values.peek().is_some() { output.push_str(", "); } @@ -204,9 +216,12 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Option { output.push_str(&format!("{name} {{ ")); let mut fields = fields.iter().peekable(); - while let Some((key, field_type)) = fields.next() { + while let Some((key, field_type)) = fields.next() { let value = &map[key]; - output.push_str(&format!("{key}: {}", PrintableValueDisplay::Plain(value.clone(), field_type.clone()))); + output.push_str(&format!( + "{key}: {}", + PrintableValueDisplay::Plain(value.clone(), field_type.clone()) + )); if fields.peek().is_some() { output.push_str(", "); } @@ -215,7 +230,7 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Option { output.push_str(" }"); } - _ => return None + _ => return None, }; Some(output) diff --git a/cspell.json b/cspell.json index 90d3963566d..fc8d8d7e82c 100644 --- a/cspell.json +++ b/cspell.json @@ -82,6 +82,7 @@ "nixpkgs", "noirc", "noirup", + "nomicfoundation", "pedersen", "peekable", "plonkc", diff --git a/docs/docs/language_concepts/01_functions.md b/docs/docs/language_concepts/01_functions.md index 47cdea0cf04..5eb22170e54 100644 --- a/docs/docs/language_concepts/01_functions.md +++ b/docs/docs/language_concepts/01_functions.md @@ -30,7 +30,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -38,7 +38,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/docs/language_concepts/07_mutability.md b/docs/docs/language_concepts/07_mutability.md index 4641521b1d9..ad902c42c9b 100644 --- a/docs/docs/language_concepts/07_mutability.md +++ b/docs/docs/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/docs/language_concepts/data_types/03_strings.md b/docs/docs/language_concepts/data_types/03_strings.md index c42f34ec3ad..e647a58472f 100644 --- a/docs/docs/language_concepts/data_types/03_strings.md +++ b/docs/docs/language_concepts/data_types/03_strings.md @@ -61,3 +61,19 @@ Example: let s = "Hello \"world" // prints "Hello "world" let s = "hey \tyou"; // prints "hey you" ``` + +## Raw strings + +A raw string begins with the letter `r` and is optionally delimited by a number of hashes `#`. + +Escape characters are *not* processed within raw strings. All contents are interpreted literally. + +Example: + +```rust +let s = r"Hello world"; +let s = r#"Simon says "hello world""#; + +// Any number of hashes may be used (>= 1) as long as the string also terminates with the same number of hashes +let s = r#####"One "#, Two "##, Three "###, Four "####, Five will end the string."#####; +``` diff --git a/docs/package.json b/docs/package.json index db0efbe7543..ee211065683 100644 --- a/docs/package.json +++ b/docs/package.json @@ -4,8 +4,8 @@ "private": true, "scripts": { "start": "docusaurus start", - "build": "docusaurus build", - "setStable": "node ./scripts/setStable.js" + "build": "yarn version::stables && docusaurus build", + "version::stables": "node ./scripts/setStable.js" }, "dependencies": { "@docusaurus/core": "^2.4.0", diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.10.5/language_concepts/01_functions.md index 7cb43c4c5f2..069d86c46d0 100644 --- a/docs/versioned_docs/version-v0.10.5/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.10.5/language_concepts/01_functions.md @@ -18,7 +18,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -26,7 +26,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.10.5/language_concepts/07_mutability.md index 4641521b1d9..ad902c42c9b 100644 --- a/docs/versioned_docs/version-v0.10.5/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.10.5/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_docs/version-v0.17.0/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.17.0/language_concepts/01_functions.md index 47cdea0cf04..5eb22170e54 100644 --- a/docs/versioned_docs/version-v0.17.0/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.17.0/language_concepts/01_functions.md @@ -30,7 +30,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -38,7 +38,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md index 4641521b1d9..ad902c42c9b 100644 --- a/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_docs/version-v0.19.0/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.19.0/language_concepts/01_functions.md index 47cdea0cf04..5eb22170e54 100644 --- a/docs/versioned_docs/version-v0.19.0/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.19.0/language_concepts/01_functions.md @@ -30,7 +30,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -38,7 +38,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md index 4641521b1d9..ad902c42c9b 100644 --- a/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_docs/version-v0.19.1/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.19.1/language_concepts/01_functions.md index 47cdea0cf04..5eb22170e54 100644 --- a/docs/versioned_docs/version-v0.19.1/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.19.1/language_concepts/01_functions.md @@ -30,7 +30,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -38,7 +38,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md index 4641521b1d9..ad902c42c9b 100644 --- a/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_docs/version-v0.19.2/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.19.2/language_concepts/01_functions.md index 47cdea0cf04..5eb22170e54 100644 --- a/docs/versioned_docs/version-v0.19.2/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.19.2/language_concepts/01_functions.md @@ -30,7 +30,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -38,7 +38,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md index 4641521b1d9..ad902c42c9b 100644 --- a/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.19.3/language_concepts/01_functions.md index 47cdea0cf04..5eb22170e54 100644 --- a/docs/versioned_docs/version-v0.19.3/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/01_functions.md @@ -30,7 +30,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -38,7 +38,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md index 4641521b1d9..ad902c42c9b 100644 --- a/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_docs/version-v0.19.4/examples/merkle-proof.mdx b/docs/versioned_docs/version-v0.19.4/examples/merkle-proof.mdx new file mode 100644 index 00000000000..832fb4bb55e --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/examples/merkle-proof.mdx @@ -0,0 +1,48 @@ +--- +title: Merkle Proof Membership +description: + Learn how to use merkle membership proof in Noir to prove that a given leaf is a member of a + merkle tree with a specified root, at a given index. +keywords: + [merkle proof, merkle membership proof, Noir, rust, hash function, Pedersen, sha256, merkle tree] +--- + +Let's walk through an example of a merkle membership proof in Noir that proves that a given leaf is +in a merkle tree. + +```rust +use dep::std; + +fn main(message : [Field; 62], index : Field, hashpath : [Field; 40], root : Field) { + let leaf = std::hash::hash_to_field(message); + let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); + assert(merkle_root == root); +} + +``` + +The message is hashed using `hash_to_field`. The specific hash function that is being used is chosen +by the backend. The only requirement is that this hash function can heuristically be used as a +random oracle. If only collision resistance is needed, then one can call `std::hash::pedersen_hash` +instead. + +```rust +let leaf = std::hash::hash_to_field(message); +``` + +The leaf is then passed to a compute_merkle_root function with the root, index and hashpath. The returned root can then be asserted to be the same as the provided root. + +```rust +let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); +assert (merkle_root == root); +``` + +> **Note:** It is possible to re-implement the merkle tree implementation without standard library. +> However, for most usecases, it is enough. In general, the standard library will always opt to be +> as conservative as possible, while striking a balance with efficiency. + +An example, the merkle membership proof, only requires a hash function that has collision +resistance, hence a hash function like Pedersen is allowed, which in most cases is more efficient +than the even more conservative sha256. + +[View an example on the starter repo](https://github.com/noir-lang/noir-examples/blob/3ea09545cabfa464124ec2f3ea8e60c608abe6df/stealthdrop/circuits/src/main.nr#L20) diff --git a/docs/versioned_docs/version-v0.19.4/getting_started/00_nargo_installation.md b/docs/versioned_docs/version-v0.19.4/getting_started/00_nargo_installation.md new file mode 100644 index 00000000000..725c5f4d373 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/getting_started/00_nargo_installation.md @@ -0,0 +1,249 @@ +--- +title: Nargo Installation +description: + nargo is a command line tool for interacting with Noir programs (e.g. compiling, proving, + verifying and more). Learn how to install and use Nargo for your projects with this comprehensive + guide. +keywords: [Nargo, command line tool, Noir programs, installation guide, how to use Nargo] +--- + +`nargo` is a command line tool for interacting with Noir programs (e.g. compiling, proving, +verifying and more). + +Alternatively, the interactions can also be performed in [NoirJS](../noir_js/noir_js.md). + +### UltraPlonk + +Nargo versions <0.5.0 of `aztec_backend` and `aztec_wasm_backend` are based on the TurboPlonk +version of Aztec Backend, which lacks efficient implementations of useful primitives (e.g. Keccak256 in 18k constraints, ECDSA verification in 36k constraints) that the UltraPlonk version offers. + +## Installation + +There are four approaches for installing Nargo: + +- [Option 1: Noirup](#option-1-noirup) +- [Option 2: Binaries](#option-2-binaries) +- [Option 3: Compile from Source](#option-3-compile-from-source) +- [Option 4: WSL for Windows](#option-4-wsl-for-windows) + +Optionally you can also install [Noir VS Code extension] for syntax highlighting. + +### Option 1: Noirup + +If you're on OSX or Linux, the easiest way to start using Noir and Nargo is via noirup. Just open a +terminal and run: + +```bash +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +Close the terminal, open another one, and run + +```bash +noirup +``` + +Done, you should have the latest version working. You can check with `nargo --version`. + +You can also install nightlies, specific versions +or branches, check out the [noirup repository](https://github.com/noir-lang/noirup) for more +information. + +#### GitHub Actions + +You can use `noirup` with GitHub Actions for CI/CD and automated testing. It is as simple as +installing `noirup` and running tests in your GitHub Action `yml` file. + +See the +[config file](https://github.com/TomAFrench/noir-hashes/blob/master/.github/workflows/noir.yml) in +this repo containing hash functions in Noir for an example. + +#### Nightly versions + +To install the nightly version of Noir (updated daily) run: + +```bash +noirup -n +``` + +### Option 2: Binaries + +See [GitHub Releases](https://github.com/noir-lang/noir/releases) for the latest and previous +platform specific binaries. + +#### Step 1 + +Paste and run the following in the terminal to extract and install the binary: + +> **macOS / Linux:** If you are prompted with `Permission denied` when running commands, prepend +> `sudo` and re-run it. + +##### macOS (Apple Silicon) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ +echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### macOS (Intel) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ +echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### Linux (Bash) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ +echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ +source ~/.bashrc +``` + +#### Step 2 + +Check if the installation was successful by running `nargo --help`. + +> **macOS:** If you are prompted with an OS alert, right-click and open the _nargo_ executable from +> Finder. Close the new terminal popped up and `nargo` should now be accessible. + +For a successful installation, you should see something similar to the following after running the +command: + +```sh +$ nargo --help + +Noir's package manager + +Usage: nargo + +Commands: + check Checks the constraint system for errors + codegen-verifier Generates a Solidity verifier smart contract for the program + compile Compile the program and its secret execution trace into ACIR format + new Create a new binary project + execute Executes a circuit to calculate its return value + prove Create proof for this program. The proof is returned as a hex encoded string + verify Given a proof and a program, verify whether the proof is valid + test Run the tests for this program + gates Counts the occurrences of different gates in circuit + help Print this message or the help of the given subcommand(s) +``` + +### Option 3: Compile from Source + +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). + +Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. + +#### Setting up your environment + +For the best experience, please follow these instructions to setup your environment: + +1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. +2. Create the file `~/.config/nix/nix.conf` with the contents: + +```ini +experimental-features = nix-command +extra-experimental-features = flakes +``` + +3. Install direnv into your Nix profile by running: + +```sh +nix profile install nixpkgs#direnv +``` + +4. Add direnv to your shell following [their guide](https://direnv.net/docs/hook.html). + 1. For bash or zshell, add `eval "$(direnv hook bash)"` or `eval "$(direnv hook zsh)"` to your ~/.bashrc or ~/.zshrc file, respectively. +5. Restart your shell. + +#### Shell & editor experience + +Now that your environment is set up, you can get to work on the project. + +1. Clone the repository, such as: + +```sh +git clone git@github.com:noir-lang/noir +``` + +> Replacing `noir` with whichever repository you want to work on. + +2. Navigate to the directory: + +```sh +cd noir +``` + +> Replacing `noir` with whichever repository you cloned. + +3. You should see a **direnv error** because projects aren't allowed by default. Make sure you've reviewed and trust our `.envrc` file, then you need to run: + +```sh +direnv allow +``` + +4. Now, wait awhile for all the native dependencies to be built. This will take some time and direnv will warn you that it is taking a long time, but we just need to let it run. + +5. Once you are presented with your prompt again, you can start your editor within the project directory (we recommend [VSCode](https://code.visualstudio.com/)): + +```sh +code . +``` + +6. (Recommended) When launching VSCode for the first time, you should be prompted to install our recommended plugins. We highly recommend installing these for the best development experience. + +#### Building and testing + +Assuming you are using `direnv` to populate your environment, building and testing the project can be done +with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `rust-toolchain.toml`, which is 1.71.1 at the time of this writing. + +If you want to build the entire project in an isolated sandbox, you can use Nix commands: + +1. `nix build .` (or `nix build . -L` for verbose output) to build the project in a Nix sandbox. +2. `nix flake check` (or `nix flake check -L` for verbose output) to run clippy and tests in a Nix sandbox. + +#### Without `direnv` + +If you have hesitations with using direnv, you can launch a subshell with `nix develop` and then launch your editor from within the subshell. However, if VSCode was already launched in the project directory, the environment won't be updated. + +Advanced: If you aren't using direnv nor launching your editor within the subshell, you can try to install Barretenberg and other global dependencies the package needs. This is an advanced workflow and likely won't receive support! + +### Option 4: WSL (for Windows) + +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed nativerly. However, it is available by using Windows Subsystem for Linux (WSL). + +Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. + +step 2: Follow the [Noirup instructions](#option-1-noirup). + +## Uninstalling Nargo + +### Noirup + +If you installed Noir with `noirup`, you can uninstall Noir by removing the files in `~/.nargo`, `~/nargo` and `~/noir_cache`. + +```bash +rm -r ~/.nargo +rm -r ~/nargo +rm -r ~/noir_cache +``` + +### Nix + +If you installed Noir with Nix or from source, you can remove the binary located at `~/.nix-profile/bin/nargo`. + +```bash +rm ~/.nix-profile/bin/nargo +``` + +[noir vs code extension]: https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir diff --git a/docs/versioned_docs/version-v0.19.4/getting_started/01_hello_world.md b/docs/versioned_docs/version-v0.19.4/getting_started/01_hello_world.md new file mode 100644 index 00000000000..8b4416beba1 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/getting_started/01_hello_world.md @@ -0,0 +1,147 @@ +--- +title: Create A Project +description: + Learn how to create and verify your first Noir program using Nargo, a programming language for + zero-knowledge proofs. +keywords: + [ + Nargo, + Noir, + zero-knowledge proofs, + programming language, + create Noir program, + verify Noir program, + step-by-step guide, + ] +--- + +Now that we have installed Nargo, it is time to make our first hello world program! + +## Create a Project Directory + +Noir code can live anywhere on your computer. Let us create a _projects_ folder in the home +directory to house our Noir programs. + +For Linux, macOS, and Windows PowerShell, create the directory and change directory into it by +running: + +```sh +mkdir ~/projects +cd ~/projects +``` + +For Windows CMD, run: + +```sh +> mkdir "%USERPROFILE%\projects" +> cd /d "%USERPROFILE%\projects" +``` + +## Create Our First Nargo Project + +Now that we are in the projects directory, create a new Nargo project by running: + +```sh +nargo new hello_world +``` + +> **Note:** `hello_world` can be any arbitrary project name, we are simply using `hello_world` for +> demonstration. +> +> In production, the common practice is to name the project folder as `circuits` for better +> identifiability when sitting alongside other folders in the codebase (e.g. `contracts`, `scripts`, +> `test`). + +A `hello_world` folder would be created. Similar to Rust, the folder houses _src/main.nr_ and +_Nargo.toml_ that contains the source code and environmental options of your Noir program +respectively. + +### Intro to Noir Syntax + +Let us take a closer look at _main.nr_. The default _main.nr_ generated should look like this: + +```rust +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` + +The first line of the program specifies the program's inputs: + +```rust +x : Field, y : pub Field +``` + +Program inputs in Noir are private by default (e.g. `x`), but can be labeled public using the +keyword `pub` (e.g. `y`). To learn more about private and public values, check the +[Data Types](../language_concepts/data_types) section. + +The next line of the program specifies its body: + +```rust +assert(x != y); +``` + +The Noir syntax `assert` can be interpreted as something similar to constraints in other zk-contract languages. + +For more Noir syntax, check the [Language Concepts](../language_concepts/comments) chapter. + +## Build In/Output Files + +Change directory into _hello_world_ and build in/output files for your Noir program by running: + +```sh +cd hello_world +nargo check +``` + +Two additional files would be generated in your project directory: + +_Prover.toml_ houses input values, and _Verifier.toml_ houses public values. + +## Prove Our Noir Program + +Now that the project is set up, we can create a proof of correct execution on our Noir program. + +Fill in input values for execution in the _Prover.toml_ file. For example: + +```toml +x = "1" +y = "2" +``` + +Prove the valid execution of your Noir program: + +```sh +nargo prove +``` + +A new folder _proofs_ would then be generated in your project directory, containing the proof file +`.proof`, where the project name is defined in Nargo.toml. + +The _Verifier.toml_ file would also be updated with the public values computed from program +execution (in this case the value of `y`): + +```toml +y = "0x0000000000000000000000000000000000000000000000000000000000000002" +``` + +> **Note:** Values in _Verifier.toml_ are computed as 32-byte hex values. + +## Verify Our Noir Program + +Once a proof is generated, we can verify correct execution of our Noir program by verifying the +proof file. + +Verify your proof by running: + +```sh +nargo verify +``` + +The verification will complete in silence if it is successful. If it fails, it will log the +corresponding error instead. + +Congratulations, you have now created and verified a proof for your very first Noir program! + +In the [next section](breakdown), we will go into more detail on each step performed. diff --git a/docs/versioned_docs/version-v0.19.4/getting_started/02_breakdown.md b/docs/versioned_docs/version-v0.19.4/getting_started/02_breakdown.md new file mode 100644 index 00000000000..9a17f5d6360 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/getting_started/02_breakdown.md @@ -0,0 +1,198 @@ +--- +title: Project Breakdown +description: + Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML + files, and how to prove and verify your program. +keywords: + [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] +--- + +This section breaks down our hello world program in section _1.2_. We elaborate on the project +structure and what the `prove` and `verify` commands did in the previous section. + +## Anatomy of a Nargo Project + +Upon creating a new project with `nargo new` and building the in/output files with `nargo check` +commands, you would get a minimal Nargo project of the following structure: + + - src + - Prover.toml + - Verifier.toml + - Nargo.toml + +The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ +file will be generated within it. + +### Prover.toml + +_Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. + +### Verifier.toml + +_Verifier.toml_ contains public in/output values computed when executing the Noir program. + +### Nargo.toml + +_Nargo.toml_ contains the environmental options of your project. It contains a "package" section and a "dependencies" section. + +Example Nargo.toml: + +```toml +[package] +name = "noirstarter" +type = "bin" +authors = ["Alice"] +compiler_version = "0.9.0" +description = "Getting started with Noir" +entry = "circuit/main.nr" +license = "MIT" + +[dependencies] +ecrecover = {tag = "v0.9.0", git = "https://github.com/colinnielsen/ecrecover-noir.git"} +``` + +Nargo.toml for a [workspace](../modules_packages_crates/workspaces) will look a bit different. For example: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +#### Package section + +The package section requires a number of fields including: + +- `name` (**required**) - the name of the package +- `type` (**required**) - can be "bin", "lib", or "contract" to specify whether its a binary, library or Aztec contract +- `authors` (optional) - authors of the project +- `compiler_version` - specifies the version of the compiler to use. This is enforced by the compiler and follow's [Rust's versioning](https://doc.rust-lang.org/cargo/reference/manifest.html#the-version-field), so a `compiler_version = 0.18.0` will enforce Nargo version 0.18.0, `compiler_version = ^0.18.0` will enforce anything above 0.18.0 but below 0.19.0, etc. For more information, see how [Rust handles these operators](https://docs.rs/semver/latest/semver/enum.Op.html) +- `description` (optional) +- `entry` (optional) - a relative filepath to use as the entry point into your package (overrides the default of `src/lib.nr` or `src/main.nr`) +- `backend` (optional) +- `license` (optional) + +#### Dependencies section + +This is where you will specify any dependencies for your project. See the [Dependencies page](../modules_packages_crates/dependencies) for more info. + +`./proofs/` and `./contract/` directories will not be immediately visible until you create a proof or +verifier contract respectively. + +### main.nr + +The _main.nr_ file contains a `main` method, this method is the entry point into your Noir program. + +In our sample program, _main.nr_ looks like this: + +```rust +fn main(x : Field, y : Field) { + assert(x != y); +} +``` + +The parameters `x` and `y` can be seen as the API for the program and must be supplied by the +prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when +verifying the proof. + +The prover supplies the values for `x` and `y` in the _Prover.toml_ file. + +As for the program body, `assert` ensures the satisfaction of the condition (e.g. `x != y`) is +constrained by the proof of the execution of said program (i.e. if the condition was not met, the +verifier would reject the proof as an invalid proof). + +### Prover.toml + +The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and +public). + +In our hello world program the _Prover.toml_ file looks like this: + +```toml +x = "1" +y = "2" +``` + +When the command `nargo prove` is executed, two processes happen: + +1. Noir creates a proof that `x` which holds the value of `1` and `y` which holds the value of `2` + is not equal. This not equal constraint is due to the line `assert(x != y)`. + +2. Noir creates and stores the proof of this statement in the _proofs_ directory in a file called your-project.proof. So if your project is named "private_voting" (defined in the project Nargo.toml), the proof will be saved at `./proofs/private_voting.proof`. Opening this file will display the proof in hex format. + +#### Arrays of Structs + +The following code shows how to pass an array of structs to a Noir program to generate a proof. + +```rust +// main.nr +struct Foo { + bar: Field, + baz: Field, +} + +fn main(foos: [Foo; 3]) -> pub Field { + foos[2].bar + foos[2].baz +} +``` + +Prover.toml: + +```toml +[[foos]] # foos[0] +bar = 0 +baz = 0 + +[[foos]] # foos[1] +bar = 0 +baz = 0 + +[[foos]] # foos[2] +bar = 1 +baz = 2 +``` + +#### Custom toml files + +You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. + +This command looks for proof inputs in the default **Prover.toml** and generates the proof and saves it at `./proofs/.proof`: + +```bash +nargo prove +``` + +This command looks for proof inputs in the custom **OtherProver.toml** and generates proof and saves it at `./proofs/.proof`: + +```bash +nargo prove -p OtherProver +``` + +## Verifying a Proof + +When the command `nargo verify` is executed, two processes happen: + +1. Noir checks in the _proofs_ directory for a proof file with the project name (eg. test_project.proof) + +2. If that file is found, the proof's validity is checked + +> **Note:** The validity of the proof is linked to the current Noir program; if the program is +> changed and the verifier verifies the proof, it will fail because the proof is not valid for the +> _modified_ Noir program. + +In production, the prover and the verifier are usually two separate entities. A prover would +retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the +verifier. The verifier would then retrieve the public inputs from usually external sources and +verifies the validity of the proof against it. + +Take a private asset transfer as an example: + +A user on browser as the prover would retrieve private inputs (e.g. the user's private key) and +public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof +and submit it to the verifier smart contract. + +The verifier contract would then draw the user's encrypted balance directly from the blockchain and +verify the proof submitted against it. If the verification passes, additional functions in the +verifier contract could trigger (e.g. approve the asset transfer). + +Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. diff --git a/docs/versioned_docs/version-v0.19.4/index.md b/docs/versioned_docs/version-v0.19.4/index.md new file mode 100644 index 00000000000..75e1abf2932 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/index.md @@ -0,0 +1,100 @@ +--- +title: Introducing Noir +description: + Learn about the public alpha release of Noir, a domain specific language heavily influenced by + Rust that compiles to an intermediate language which can be compiled to an arithmetic circuit or a + rank-1 constraint system. +keywords: + [ + Noir, + Domain Specific Language, + Rust, + Intermediate Language, + Arithmetic Circuit, + Rank-1 Constraint System, + Ethereum Developers, + Protocol Developers, + Blockchain Developers, + Proving System, + Smart Contract Language, + ] +slug: / +--- + +## What is Noir? + +Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. + +It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. + +## Who is Noir for? + +Noir can be used for a variety of purposes. + +### Solidity Developers + +Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will +be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +a verifier contract. + +### Protocol Developers + +As a protocol developer, you may not want to use the Aztec backend due to it not being a fit for +your stack, or maybe you simply want to use a different proving system. Since Noir does not compile +to a specific proof system, it is possible for protocol developers to replace the PLONK-based +proving system with a different proving system altogether. + +### Blockchain developers + +As a blockchain developer, you will be constrained by parameters set by your blockchain (for example, the +proving system and smart contract language has been pre-defined). In order for you to use Noir in +your blockchain, a proving system backend and a smart contract interface +must be implemented for it. + +## What's new about Noir? + +Noir is simple and flexible in its design, as it does not compile immediately to a fixed +NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). + +This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. + +## Current Features + +Compiler: + +- Module System +- For expressions +- Arrays +- Bit Operations +- Binary operations (<, <=, >, >=, +, -, \*, /, %) [See documentation for an extensive list] +- Unsigned integers +- If statements +- Structures and Tuples +- Generics + +ACIR Supported OPCODES: + +- Sha256 +- Blake2s +- Schnorr signature verification +- MerkleMembership +- Pedersen Commitment +- Pedersen Hash +- HashToField + +## Libraries + +Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). + +Some libraries that are available today include: + +- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers + +See the section on [dependencies](./modules_packages_crates/dependencies) for more information. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.19.4/language_concepts/01_functions.md new file mode 100644 index 00000000000..5eb22170e54 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/01_functions.md @@ -0,0 +1,225 @@ +--- +title: Functions +description: + Learn how to declare functions and methods in Noir, a programming language with Rust semantics. + This guide covers parameter declaration, return types, call expressions, and more. +keywords: [Noir, Rust, functions, methods, parameter declaration, return types, call expressions] +--- + +Functions in Noir follow the same semantics of Rust, though Noir does not support early returns. + +To declare a function the `fn` keyword is used. + +```rust +fn foo() {} +``` + +By default, functions are visible only within the package they are defined. To make them visible outside of that package (for example, as part of a [library](../modules_packages_crates/crates_and_packages.md#libraries)), you should mark them as `pub`: + +```rust +pub fn foo() {} +``` + +You can also restrict the visibility of the function to only the crate it was defined in, by specifying `pub(crate)`: + +```rust +pub(crate) fn foo() {} //foo can only be called within its crate +``` + +All parameters in a function must have a type and all types are known at compile time. The parameter +is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. + +```rust +fn foo(x : Field, y : Field){} +``` + +The return type of a function can be stated by using the `->` arrow notation. The function below +states that the foo function must return a `Field`. If the function returns no value, then the arrow +is omitted. + +```rust +fn foo(x : Field, y : Field) -> Field { + x + y +} +``` + +Note that a `return` keyword is unneeded in this case - the last expression in a function's body is +returned. + +## Main function + +If you're writing a binary, the `main` function is the starting point of your program. You can pass all types of expressions to it, as long as they have a fixed size at compile time: + +```rust +fn main(x : Field) // this is fine: passing a Field +fn main(x : [Field; 2]) // this is also fine: passing a Field with known size at compile-time +fn main(x : (Field, bool)) // 👌: passing a (Field, bool) tuple means size 2 +fn main(x : str<5>) // this is fine, as long as you pass a string of size 5 + +fn main(x : Vec) // can't compile, has variable size +fn main(x : [Field]) // can't compile, has variable size +fn main(....// i think you got it by now +``` + +Keep in mind [tests](../nargo/02_testing.md) don't differentiate between `main` and any other function. The following snippet passes tests, but won't compile or prove: + +```rust +fn main(x : [Field]) { + assert(x[0] == 1); +} + +#[test] +fn test_one() { + main([1, 2]); +} +``` + +```bash +$ nargo test +[testing] Running 1 test functions +[testing] Testing test_one... ok +[testing] All tests passed + +$ nargo check +The application panicked (crashed). +Message: Cannot have variable sized arrays as a parameter to main +``` + +## Call Expressions + +Calling a function in Noir is executed by using the function name and passing in the necessary +arguments. + +Below we show how to call the `foo` function from the `main` function using a call expression: + +```rust +fn main(x : Field, y : Field) { + let z = foo(x); +} + +fn foo(x : Field) -> Field { + x + x +} +``` + +## Methods + +You can define methods in Noir on any struct type in scope. + +```rust +struct MyStruct { + foo: Field, + bar: Field, +} + +impl MyStruct { + fn new(foo: Field) -> MyStruct { + MyStruct { + foo, + bar: 2, + } + } + + fn sum(self) -> Field { + self.foo + self.bar + } +} + +fn main() { + let s = MyStruct::new(40); + assert(s.sum() == 42); +} +``` + +Methods are just syntactic sugar for functions, so if we wanted to we could also call `sum` as +follows: + +```rust +assert(MyStruct::sum(s) == 42); +``` + +It is also possible to specialize which method is chosen depending on the [generic](./06_generics.md) type that is used. In this example, the `foo` function returns different values depending on its type: + +```rust +struct Foo {} + +impl Foo { + fn foo(self) -> Field { 1 } +} + +impl Foo { + fn foo(self) -> Field { 2 } +} + +fn main() { + let f1: Foo = Foo{}; + let f2: Foo = Foo{}; + assert(f1.foo() + f2.foo() == 3); +} +``` + +Also note that impls with the same method name defined in them cannot overlap. For example, if we already have `foo` defined for `Foo` and `Foo` like we do above, we cannot also define `foo` in an `impl Foo` since it would be ambiguous which version of `foo` to choose. + +```rust +// Including this impl in the same project as the above snippet would +// cause an overlapping impls error +impl Foo { + fn foo(self) -> Field { 3 } +} +``` + +## Lambdas + +Lambdas are anonymous functions. They follow the syntax of Rust - `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +See [Lambdas](./08_lambdas.md) for more details. + +## Attributes + +Attributes are metadata that can be applied to a function, using the following syntax: `#[attribute(value)]`. + +Supported attributes include: + +- **builtin**: the function is implemented by the compiler, for efficiency purposes. +- **deprecated**: mark the function as _deprecated_. Calling the function will generate a warning: `warning: use of deprecated function` +- **field**: Used to enable conditional compilation of code depending on the field size. See below for more details +- **oracle**: mark the function as _oracle_; meaning it is an external unconstrained function, implemented in noir_js. See [Unconstrained](./05_unconstrained.md) and [NoirJS](../noir_js/noir_js.md) for more details. +- **test**: mark the function as unit tests. See [Tests](../nargo/02_testing.md) for more details + +### Field Attribute + +The field attribute defines which field the function is compatible for. The function is conditionally compiled, under the condition that the field attribute matches the Noir native field. +The field can be defined implicitly, by using the name of the elliptic curve usually associated to it - for instance bn254, bls12_381 - or explicitly by using the field (prime) order, in decimal or hexadecimal form. +As a result, it is possible to define multiple versions of a function with each version specialized for a different field attribute. This can be useful when a function requires different parameters depending on the underlying elliptic curve. + +Example: we define the function `foo()` three times below. Once for the default Noir bn254 curve, once for the field $\mathbb F_{23}$, which will normally never be used by Noir, and once again for the bls12_381 curve. + +```rust +#[field(bn254)] +fn foo() -> u32 { + 1 +} + +#[field(23)] +fn foo() -> u32 { + 2 +} + +// This commented code would not compile as foo would be defined twice because it is the same field as bn254 +// #[field(21888242871839275222246405745257275088548364400416034343698204186575808495617)] +// fn foo() -> u32 { +// 2 +// } + +#[field(bls12_381)] +fn foo() -> u32 { + 3 +} +``` + +If the field name is not known to Noir, it will discard the function. Field names are case insensitive. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/02_control_flow.md b/docs/versioned_docs/version-v0.19.4/language_concepts/02_control_flow.md new file mode 100644 index 00000000000..a7f85360197 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/02_control_flow.md @@ -0,0 +1,44 @@ +--- +title: Control Flow +description: + Learn how to use loops and if expressions in the Noir programming language. Discover the syntax + and examples for for loops and if-else statements. +keywords: [Noir programming language, loops, for loop, if-else statements, Rust syntax] +--- + +## Loops + +Noir has one kind of loop: the `for` loop. `for` loops allow you to repeat a block of code multiple +times. + +The following block of code between the braces is run 10 times. + +```rust +for i in 0..10 { + // do something +}; +``` + +The index for loops is of type `u64`. + +## If Expressions + +Noir supports `if-else` statements. The syntax is most similar to Rust's where it is not required +for the statement's conditional to be surrounded by parentheses. + +```rust +let a = 0; +let mut x: u32 = 0; + +if a == 0 { + if a != 0 { + x = 6; + } else { + x = 2; + } +} else { + x = 5; + assert(x == 5); +} +assert(x == 2); +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/03_ops.md b/docs/versioned_docs/version-v0.19.4/language_concepts/03_ops.md new file mode 100644 index 00000000000..da02b126059 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/03_ops.md @@ -0,0 +1,97 @@ +--- +title: Logical Operations +description: + Learn about the supported arithmetic and logical operations in the Noir programming language. + Discover how to perform operations on private input types, integers, and booleans. +keywords: + [ + Noir programming language, + supported operations, + arithmetic operations, + logical operations, + predicate operators, + bitwise operations, + short-circuiting, + backend, + ] +--- + +# Operations + +## Table of Supported Operations + +| Operation | Description | Requirements | +| :-------- | :------------------------------------------------------------: | -------------------------------------: | +| + | Adds two private input types together | Types must be private input | +| - | Subtracts two private input types together | Types must be private input | +| \* | Multiplies two private input types together | Types must be private input | +| / | Divides two private input types together | Types must be private input | +| ^ | XOR two private input types together | Types must be integer | +| & | AND two private input types together | Types must be integer | +| \| | OR two private input types together | Types must be integer | +| << | Left shift an integer by another integer amount | Types must be integer | +| >> | Right shift an integer by another integer amount | Types must be integer | +| ! | Bitwise not of a value | Type must be integer or boolean | +| < | returns a bool if one value is less than the other | Upper bound must have a known bit size | +| <= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | +| > | returns a bool if one value is more than the other | Upper bound must have a known bit size | +| >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | +| == | returns a bool if one value is equal to the other | Both types must not be constants | +| != | returns a bool if one value is not equal to the other | Both types must not be constants | + +### Predicate Operators + +`<,<=, !=, == , >, >=` are known as predicate/comparison operations because they compare two values. +This differs from the operations such as `+` where the operands are used in _computation_. + +### Bitwise Operations Example + +```rust +fn main(x : Field) { + let y = x as u32; + let z = y & y; +} +``` + +`z` is implicitly constrained to be the result of `y & y`. The `&` operand is used to denote bitwise +`&`. + +> `x & x` would not compile as `x` is a `Field` and not an integer type. + +### Logical Operators + +Noir has no support for the logical operators `||` and `&&`. This is because encoding the +short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can +use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +short-circuiting. + +```rust +let my_val = 5; + +let mut flag = 1; +if (my_val > 6) | (my_val == 0) { + flag = 0; +} +assert(flag == 1); + +if (my_val != 10) & (my_val < 50) { + flag = 0; +} +assert(flag == 0); +``` + +### Shorthand operators + +Noir shorthand operators for most of the above operators, namely `+=, -=, *=, /=, %=, &=, |=, ^=, <<=`, and `>>=`. These allow for more concise syntax. For example: + +```rust +let mut i = 0; +i = i + 1; +``` + +could be written as: + +```rust +let mut i = 0; +i += 1; +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/04_assert.md b/docs/versioned_docs/version-v0.19.4/language_concepts/04_assert.md new file mode 100644 index 00000000000..7427ec6cc63 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/04_assert.md @@ -0,0 +1,26 @@ +--- +title: Assert Function +description: + Learn about the assert function in Noir, which can be used to explicitly constrain the predicate or + comparison expression that follows to be true, and what happens if the expression is false at + runtime. +keywords: [Noir programming language, assert statement, predicate expression, comparison expression] +--- + +Noir includes a special `assert` function which will explicitly constrain the predicate/comparison +expression that follows to be true. If this expression is false at runtime, the program will fail to +be proven. Example: + +```rust +fn main(x : Field, y : Field) { + assert(x == y); +} +``` + +You can optionally provide a message to be logged when the assertion fails: + +```rust +assert(x == y, "x and y are not equal"); +``` + +> Assertions only work for predicate operations, such as `==`. If there's any ambiguity on the operation, the program will fail to compile. For example, it is unclear if `assert(x + y)` would check for `x + y == 0` or simply would return `true`. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/05_unconstrained.md b/docs/versioned_docs/version-v0.19.4/language_concepts/05_unconstrained.md new file mode 100644 index 00000000000..6b621eda3eb --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/05_unconstrained.md @@ -0,0 +1,96 @@ +--- +title: Unconstrained Functions +description: "Learn about what unconstrained functions in Noir are, how to use them and when you'd want to." + +keywords: [Noir programming language, unconstrained, open] +--- + + + +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. + +## Why? + +Zero-knowledge (ZK) domain-specific languages (DSL) enable developers to generate ZK proofs from their programs by compiling code down to the constraints of an NP complete language (such as R1CS or PLONKish languages). However, the hard bounds of a constraint system can be very limiting to the functionality of a ZK DSL. + +Enabling a circuit language to perform unconstrained execution is a powerful tool. Said another way, unconstrained execution lets developers generate witnesses from code that does not generate any constraints. Being able to execute logic outside of a circuit is critical for both circuit performance and constructing proofs on information that is external to a circuit. + +Fetching information from somewhere external to a circuit can also be used to enable developers to improve circuit efficiency. + +A ZK DSL does not just prove computation, but proves that some computation was handled correctly. Thus, it is necessary that when we switch from performing some operation directly inside of a circuit to inside of an unconstrained environment that the appropriate constraints are still laid down elsewhere in the circuit. + +## Example + +An in depth example might help drive the point home. This example comes from the excellent [post](https://discord.com/channels/1113924620781883405/1124022445054111926/1128747641853972590) by Tom in the Noir Discord. + +Let's look at how we can optimize a function to turn a `u72` into an array of `u8`s. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u72 & 0xff) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 91 +Backend circuit size: 3619 +``` + +A lot of the operations in this function are optimized away by the compiler (all the bit-shifts turn into divisions by constants). However we can save a bunch of gates by casting to u8 a bit earlier. This automatically truncates the bit-shifted value to fit in a u8 which allows us to remove the XOR against 0xff. This saves us ~480 gates in total. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 75 +Backend circuit size: 3143 +``` + +Those are some nice savings already but we can do better. This code is all constrained so we're proving every step of calculating out using num, but we don't actually care about how we calculate this, just that it's correct. This is where brillig comes in. + +It turns out that truncating a u72 into a u8 is hard to do inside a snark, each time we do as u8 we lay down 4 ACIR opcodes which get converted into multiple gates. It's actually much easier to calculate num from out than the other way around. All we need to do is multiply each element of out by a constant and add them all together, both relatively easy operations inside a snark. + +We can then run u72_to_u8 as unconstrained brillig code in order to calculate out, then use that result in our constrained function and assert that if we were to do the reverse calculation we'd get back num. This looks a little like the below: + +```rust +fn main(num: u72) -> pub [u8; 8] { + let out = u72_to_u8(num); + + let mut reconstructed_num: u72 = 0; + for i in 0..8 { + reconstructed_num += (out[i] as u72 << (56 - (8 * i))); + } + assert(num == reconstructed_num); + out +} + +unconstrained fn u72_to_u8(num: u72) -> [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8))) as u8; + } + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 78 +Backend circuit size: 2902 +``` + +This ends up taking off another ~250 gates from our circuit! We've ended up with more ACIR opcodes than before but they're easier for the backend to prove (resulting in fewer gates). + +Generally we want to use brillig whenever there's something that's easy to verify but hard to compute within the circuit. For example, if you wanted to calculate a square root of a number it'll be a much better idea to calculate this in brillig and then assert that if you square the result you get back your number. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/06_generics.md b/docs/versioned_docs/version-v0.19.4/language_concepts/06_generics.md new file mode 100644 index 00000000000..9fb4177c2a8 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/06_generics.md @@ -0,0 +1,113 @@ +--- +title: Generics +description: Learn how to use Generics in Noir +keywords: [Noir, Rust, generics, functions, structs] +--- + +Generics allow you to use the same functions with multiple different concrete data types. You can +read more about the concept of generics in the Rust documentation +[here](https://doc.rust-lang.org/book/ch10-01-syntax.html). + +Here is a trivial example showing the identity function that supports any type. In Rust, it is +common to refer to the most general type as `T`. We follow the same convention in Noir. + +```rust +fn id(x: T) -> T { + x +} +``` + +## In Structs + +Generics are useful for specifying types in structs. For example, we can specify that a field in a +struct will be of a certain generic type. In this case `value` is of type `T`. + +```rust +struct RepeatedValue { + value: T, + count: Field, +} + +impl RepeatedValue { + fn new(value: T) -> Self { + Self { value, count: 1 } + } + + fn increment(mut repeated: Self) -> Self { + repeated.count += 1; + repeated + } + + fn print(self) { + for _i in 0 .. self.count { + dep::std::println(self.value); + } + } +} + +fn main() { + let mut repeated = RepeatedValue::new("Hello!"); + repeated = repeated.increment(); + repeated.print(); +} +``` + +The `print` function will print `Hello!` an arbitrary number of times, twice in this case. + +If we want to be generic over array lengths (which are type-level integers), we can use numeric +generics. Using these looks just like using regular generics, but these generics can resolve to +integers at compile-time, rather than resolving to types. Here's an example of a struct that is +generic over the size of the array it contains internally: + +```rust +struct BigInt { + limbs: [u32; N], +} + +impl BigInt { + // `N` is in scope of all methods in the impl + fn first(first: BigInt, second: BigInt) -> Self { + assert(first.limbs != second.limbs); + first + + fn second(first: BigInt, second: Self) -> Self { + assert(first.limbs != second.limbs); + second + } +} +``` + +## Calling functions on generic parameters + +Unlike Rust, Noir does not have traits, so how can one translate the equivalent of a trait bound in +Rust into Noir? That is, how can we write a function that is generic over some type `T`, while also +requiring there is a function like `eq: fn(T, T) -> bool` that works on the type? + +The answer is that we can translate this by passing in the function manually. Here's an example of +implementing array equality in Noir: + +```rust +fn array_eq(array1: [T; N], array2: [T; N], elem_eq: fn(T, T) -> bool) -> bool { + if array1.len() != array2.len() { + false + } else { + let mut result = true; + for i in 0 .. array1.len() { + result &= elem_eq(array1[i], array2[i]); + } + result + } +} + +fn main() { + assert(array_eq([1, 2, 3], [1, 2, 3], |a, b| a == b)); + + // We can use array_eq even for arrays of structs, as long as we have + // an equality function for these structs we can pass in + let array = [MyStruct::new(), MyStruct::new()]; + assert(array_eq(array, array, MyStruct::eq)); +} +``` + +You can see an example of generics in the tests +[here](https://github.com/noir-lang/noir/blob/master/tooling/nargo_cli/tests/execution_success/generics/src/main.nr). diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md new file mode 100644 index 00000000000..ad902c42c9b --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md @@ -0,0 +1,92 @@ +--- +title: Mutability +description: + Learn about mutable variables, constants, and globals in Noir programming language. Discover how + to declare, modify, and use them in your programs. +keywords: [noir programming language, mutability in noir, mutable variables, constants, globals] +--- + +Variables in noir can be declared mutable via the `mut` keyword. Mutable variables can be reassigned +to via an assignment expression. + +```rust +let x = 2; +x = 3; // error: x must be mutable to be assigned to + +let mut y = 3; +let y = 4; // OK +``` + +The `mut` modifier can also apply to patterns: + +```rust +let (a, mut b) = (1, 2); +a = 11; // error: a must be mutable to be assigned to +b = 12; // OK + +let mut (c, d) = (3, 4); +c = 13; // OK +d = 14; // OK + +// etc. +let MyStruct { x: mut y } = MyStruct { x: a }; +// y is now in scope +``` + +Note that mutability in noir is local and everything is passed by value, so if a called function +mutates its parameters then the parent function will keep the old value of the parameters. + +```rust +fn main() -> pub Field { + let x = 3; + helper(x); + x // x is still 3 +} + +fn helper(mut x: i32) { + x = 4; +} +``` + +## Comptime Values + +:::warning + +The 'comptime' keyword was removed in version 0.10. The comptime keyword and syntax are currently still kept and parsed for backwards compatibility, but are now deprecated and will issue a warning when used. `comptime` has been removed because it is no longer needed for accessing arrays. + +::: + +## Globals + +Noir also supports global variables. However, they must be known at compile-time. The global type can also be inferred by the compiler entirely. Globals can also be used to specify array +annotations for function parameters and can be imported from submodules. + +```rust +global N: Field = 5; // Same as `global N: Field = 5` + +fn main(x : Field, y : [Field; N]) { + let res = x * N; + + assert(res == y[0]); + + let res2 = x * mysubmodule::N; + assert(res != res2); +} + +mod mysubmodule { + use dep::std; + + global N: Field = 10; + + fn my_helper() -> Field { + let x = N; + x + } +} +``` + +## Why only local mutability? + +Witnesses in a proving system are immutable in nature. Noir aims to _closely_ mirror this setting +without applying additional overhead to the user. Modeling a mutable reference is not as +straightforward as on conventional architectures and would incur some possibly unexpected overhead. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/08_lambdas.md b/docs/versioned_docs/version-v0.19.4/language_concepts/08_lambdas.md new file mode 100644 index 00000000000..ae1e6aecab1 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/08_lambdas.md @@ -0,0 +1,80 @@ +--- +title: Lambdas +description: Learn how to use anonymous functions in Noir programming language. +keywords: [Noir programming language, lambda, closure, function, anonymous function] +--- + +## Introduction + +Lambdas are anonymous functions. The syntax is `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +A block can be used as the body of a lambda, allowing you to declare local variables inside it: + +```rust +let cool = || { + let x = 100; + let y = 100; + x + y +} + +assert(cool() == 200); +``` + +## Closures + +Inside the body of a lambda, you can use variables defined in the enclosing function. Such lambdas are called **closures**. In this example `x` is defined inside `main` and is accessed from within the lambda: + +```rust +fn main() { + let x = 100; + let closure = || x + 150; + assert(closure() == 250); +} +``` + +## Passing closures to higher-order functions + +It may catch you by surprise that the following code fails to compile: + +```rust +fn foo(f: fn () -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // error :( +} +``` + +The reason is that the closure's capture environment affects its type - we have a closure that captures two Fields and `foo` +expects a regular function as an argument - those are incompatible. +:::note + +Variables contained within the `||` are the closure's parameters, and the expression that follows it is the closure's body. The capture environment is comprised of any variables used in the closure's body that are not parameters. + +E.g. in |x| x + y, y would be a captured variable, but x would not be, since it is a parameter of the closure. + +::: +The syntax for the type of a closure is `fn[env](args) -> ret_type`, where `env` is the capture environment of the closure - +in this example that's `(Field, Field)`. + +The best solution in our case is to make `foo` generic over the environment type of its parameter, so that it can be called +with closures with any environment, as well as with regular functions: + +```rust +fn foo(f: fn[Env]() -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // compiles fine + assert(foo(|| 60) == 60); // compiles fine +} +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/09_comments.md b/docs/versioned_docs/version-v0.19.4/language_concepts/09_comments.md new file mode 100644 index 00000000000..3bb4d2f25a4 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/09_comments.md @@ -0,0 +1,32 @@ +--- +title: Comments +description: + Learn how to write comments in Noir programming language. A comment is a line of code that is + ignored by the compiler, but it can be read by programmers. Single-line and multi-line comments + are supported in Noir. +keywords: [Noir programming language, comments, single-line comments, multi-line comments] +--- + +A comment is a line in your codebase which the compiler ignores, however it can be read by +programmers. + +Here is a single line comment: + +```rust +// This is a comment and is ignored +``` + +`//` is used to tell the compiler to ignore the rest of the line. + +Noir also supports multi-line block comments. Start a block comment with `/*` and end the block with `*/`. + +Noir does not natively support doc comments. You may be able to use [Rust doc comments](https://doc.rust-lang.org/reference/comments.html) in your code to leverage some Rust documentation build tools with Noir code. + +```rust +/* + This is a block comment describing a complex function. +*/ +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/10_distinct.md b/docs/versioned_docs/version-v0.19.4/language_concepts/10_distinct.md new file mode 100644 index 00000000000..e7ff7f5017a --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/10_distinct.md @@ -0,0 +1,63 @@ +--- +title: Distinct Witnesses +--- + +The `distinct` keyword prevents repetitions of witness indices in the program's ABI. This ensures +that the witnesses being returned as public inputs are all unique. + +The `distinct` keyword is only used for return values on program entry points (usually the `main()` +function). + +When using `distinct` and `pub` simultaneously, `distinct` comes first. See the example below. + +You can read more about the problem this solves +[here](https://github.com/noir-lang/noir/issues/1183). + +## Example + +Without the `distinct` keyword, the following program + +```rust +fn main(x : pub Field, y : pub Field) -> pub [Field; 4] { + let a = 1; + let b = 1; + [x + 1, y, a, b] +} +``` + +compiles to + +```json +{ + //... + "abi": { + //... + "param_witnesses": { "x": [1], "y": [2] }, + "return_witnesses": [3, 2, 4, 4] + } +} +``` + +Whereas (with the `distinct` keyword) + +```rust +fn main(x : pub Field, y : pub Field) -> distinct pub [Field; 4] { + let a = 1; + let b = 1; + [x + 1, y, a, b] +} +``` + +compiles to + +```json +{ + //... + "abi": { + //... + "param_witnesses": { "x": [1], "y": [2] }, + //... + "return_witnesses": [3, 4, 5, 6] + } +} +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/11_shadowing.md b/docs/versioned_docs/version-v0.19.4/language_concepts/11_shadowing.md new file mode 100644 index 00000000000..efd743e764f --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/11_shadowing.md @@ -0,0 +1,43 @@ +--- +title: Shadowing +--- + +Noir allows for inheriting variables' values and re-declaring them with the same name similar to Rust, known as shadowing. + +For example, the following function is valid in Noir: + +```rust +fn main() { + let x = 5; + + { + let x = x * 2; + assert (x == 10); + } + + assert (x == 5); +} +``` + +In this example, a variable x is first defined with the value 5. + +The local scope that follows shadows the original x, i.e. creates a local mutable x based on the value of the original x. It is given a value of 2 times the original x. + +When we return to the main scope, x once again refers to just the original x, which stays at the value of 5. + +## Temporal mutability + +One way that shadowing is useful, in addition to ergonomics across scopes, is for temporarily mutating variables. + +```rust +fn main() { + let age = 30; + // age = age + 5; // Would error as `age` is immutable by default. + + let mut age = age + 5; // Temporarily mutates `age` with a new value. + + let age = age; // Locks `age`'s mutability again. + + assert (age == 35); +} +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types.md new file mode 100644 index 00000000000..d546cc463a8 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types.md @@ -0,0 +1,96 @@ +--- +title: Data Types +description: + Get a clear understanding of the two categories of Noir data types - primitive types and compound + types. Learn about their characteristics, differences, and how to use them in your Noir + programming. +keywords: + [ + noir, + data types, + primitive types, + compound types, + private types, + public types, + ] +--- + +Every value in Noir has a type, which determines which operations are valid for it. + +All values in Noir are fundamentally composed of `Field` elements. For a more approachable +developing experience, abstractions are added on top to introduce different data types in Noir. + +Noir has two category of data types: primitive types (e.g. `Field`, integers, `bool`) and compound +types that group primitive types (e.g. arrays, tuples, structs). Each value can either be private or +public. + +## Private & Public Types + +A **private value** is known only to the Prover, while a **public value** is known by both the +Prover and Verifier. Mark values as `private` when the value should only be known to the prover. All +primitive types (including individual fields of compound types) in Noir are private by default, and +can be marked public when certain values are intended to be revealed to the Verifier. + +> **Note:** For public values defined in Noir programs paired with smart contract verifiers, once +> the proofs are verified on-chain the values can be considered known to everyone that has access to +> that blockchain. + +Public data types are treated no differently to private types apart from the fact that their values +will be revealed in proofs generated. Simply changing the value of a public type will not change the +circuit (where the same goes for changing values of private types as well). + +_Private values_ are also referred to as _witnesses_ sometimes. + +> **Note:** The terms private and public when applied to a type (e.g. `pub Field`) have a different +> meaning than when applied to a function (e.g. `pub fn foo() {}`). +> +> The former is a visibility modifier for the Prover to interpret if a value should be made known to +> the Verifier, while the latter is a visibility modifier for the compiler to interpret if a +> function should be made accessible to external Noir programs like in other languages. + +### pub Modifier + +All data types in Noir are private by default. Types are explicitly declared as public using the +`pub` modifier: + +```rust +fn main(x : Field, y : pub Field) -> pub Field { + x + y +} +``` + +In this example, `x` is **private** while `y` and `x + y` (the return value) are **public**. Note +that visibility is handled **per variable**, so it is perfectly valid to have one input that is +private and another that is public. + +> **Note:** Public types can only be declared through parameters on `main`. + +## Type Aliases + +A type alias is a new name for an existing type. Type aliases are declared with the keyword `type`: + +```rust +type Id = u8; + +fn main() { + let id: Id = 1; + let zero: u8 = 0; + assert(zero + 1 == id); +} +``` + +Type aliases can also be used with [generics](./06_generics.md): + +```rust +type Id = Size; + +fn main() { + let id: Id = 1; + let zero: u32 = 0; + assert(zero + 1 == id); +} +``` + +### BigInt + +You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/00_fields.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/00_fields.md new file mode 100644 index 00000000000..658a0441ffb --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/00_fields.md @@ -0,0 +1,165 @@ +--- +title: Fields +description: + Dive deep into the Field data type in Noir. Understand its methods, practical examples, and best practices to effectively use Fields in your Noir programs. +keywords: + [ + noir, + field type, + methods, + examples, + best practices, + ] +--- + +The field type corresponds to the native field type of the proving backend. + +The size of a Noir field depends on the elliptic curve's finite field for the proving backend +adopted. For example, a field would be a 254-bit integer when paired with the default backend that +spans the Grumpkin curve. + +Fields support integer arithmetic and are often used as the default numeric type in Noir: + +```rust +fn main(x : Field, y : Field) { + let z = x + y; +} +``` + +`x`, `y` and `z` are all private fields in this example. Using the `let` keyword we defined a new +private value `z` constrained to be equal to `x + y`. + +If proving efficiency is of priority, fields should be used as a default for solving problems. +Smaller integer types (e.g. `u64`) incur extra range constraints. + +## Methods + +After declaring a Field, you can use these common methods on it: + +### to_le_bits + +Transforms the field into an array of bits, Little Endian. + +```rust +fn to_le_bits(_x : Field, _bit_size: u32) -> [u1; N] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_le_bits(32); +} +``` + +### to_be_bits + +Transforms the field into an array of bits, Big Endian. + +```rust +fn to_be_bits(_x : Field, _bit_size: u32) -> [u1; N] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_be_bits(32); +} +``` + +### to_le_bytes + +Transforms into an array of bytes, Little Endian + +```rust +fn to_le_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_le_bytes(4); +} +``` + +### to_be_bytes + +Transforms into an array of bytes, Big Endian + +```rust +fn to_be_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_be_bytes(4); +} +``` + +### to_le_radix + +Decomposes into a vector over the specified base, Little Endian + +```rust +fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_le_radix(256, 4); +} +``` + +### to_be_radix + +Decomposes into a vector over the specified base, Big Endian + +```rust +fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_be_radix(256, 4); +} +``` + +### pow_32 + +Returns the value to the power of the specified exponent + +```rust +fn pow_32(self, exponent: Field) -> Field +``` + +example: + +```rust +fn main() { + let field = 2 + let pow = field.pow_32(4); + assert(pow == 16); +} +``` + +### sgn0 + +Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ {0, ..., p-1} is even, otherwise sgn0(x mod p) = 1. + +```rust +fn sgn0(self) -> u1 +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/01_integers.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/01_integers.md new file mode 100644 index 00000000000..b1e7ad11bfd --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/01_integers.md @@ -0,0 +1,112 @@ +--- +title: Integers +description: Explore the Integer data type in Noir. Learn about its methods, see real-world examples, and grasp how to efficiently use Integers in your Noir code. +keywords: [noir, integer types, methods, examples, arithmetic] +--- + +An integer type is a range constrained field type. The Noir frontend supports arbitrarily-sized, both unsigned and signed integer types. + +:::info + +When an integer is defined in Noir without a specific type, it will default to `Field`. + +The one exception is for loop indices which default to `u64` since comparisons on `Field`s are not possible. + +::: + +## Unsigned Integers + +An unsigned integer type is specified first with the letter `u` (indicating its unsigned nature) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: u8 = 1; + let y: u8 = 1; + let z = x + y; + assert (z == 2); +} +``` + +The bit size determines the maximum value the integer type can store. For example, a `u8` variable can store a value in the range of 0 to 255 (i.e. $\\2^{8}-1\\$). + +## Signed Integers + +A signed integer type is specified first with the letter `i` (which stands for integer) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: i8 = -1; + let y: i8 = -1; + let z = x + y; + assert (z == -2); +} +``` + +The bit size determines the maximum and minimum range of value the integer type can store. For example, an `i8` variable can store a value in the range of -128 to 127 (i.e. $\\-2^{7}\\$ to $\\2^{7}-1\\$). + +:::tip + +If you are using the default proving backend with Noir, both even (e.g. _u2_, _i2_) and odd (e.g. _u3_, _i3_) arbitrarily-sized integer types up to 127 bits (i.e. _u127_ and _i127_) are supported. + +::: + +## Overflows + +Computations that exceed the type boundaries will result in overflow errors. This happens with both signed and unsigned integers. For example, attempting to prove: + +```rust +fn main(x: u8, y: u8) { + let z = x + y; +} +``` + +With: + +```toml +x = "255" +y = "1" +``` + +Would result in: + +``` +$ nargo prove +error: Assertion failed: 'attempt to add with overflow' +┌─ ~/src/main.nr:9:13 +│ +│ let z = x + y; +│ ----- +│ += Call stack: + ... +``` + +A similar error would happen with signed integers: + +```rust +fn main() { + let x: i8 = -118; + let y: i8 = -11; + let z = x + y; +} +``` + +### Wrapping methods + +Although integer overflow is expected to error, some use-cases rely on wrapping. For these use-cases, the standard library provides `wrapping` variants of certain common operations: + +```rust +fn wrapping_add(x: T, y: T) -> T; +fn wrapping_sub(x: T, y: T) -> T; +fn wrapping_mul(x: T, y: T) -> T; +``` + +Example of how it is used: + +```rust +use dep::std; + +fn main(x: u8, y: u8) -> pub u8 { + std::wrapping_add(x + y) +} +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/02_booleans.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/02_booleans.md new file mode 100644 index 00000000000..885db167d83 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/02_booleans.md @@ -0,0 +1,30 @@ +--- +title: Booleans +description: + Delve into the Boolean data type in Noir. Understand its methods, practical examples, and best practices for using Booleans in your Noir programs. +keywords: + [ + noir, + boolean type, + methods, + examples, + logical operations, + ] +--- + + +The `bool` type in Noir has two possible values: `true` and `false`: + +```rust +fn main() { + let t = true; + let f: bool = false; +} +``` + +> **Note:** When returning a boolean value, it will show up as a value of 1 for `true` and 0 for +> `false` in _Verifier.toml_. + +The boolean type is most commonly used in conditionals like `if` expressions and `assert` +statements. More about conditionals is covered in the [Control Flow](../control_flow) and +[Assert Function](../assert) sections. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/03_strings.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/03_strings.md new file mode 100644 index 00000000000..c42f34ec3ad --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/03_strings.md @@ -0,0 +1,63 @@ +--- +title: Strings +description: + Discover the String data type in Noir. Learn about its methods, see real-world examples, and understand how to effectively manipulate and use Strings in Noir. +keywords: + [ + noir, + string type, + methods, + examples, + concatenation, + ] +--- + + +The string type is a fixed length value defined with `str`. + +You can use strings in `assert()` functions or print them with +`std::println()`. See more about [Logging](../../standard_library/logging). + +```rust +use dep::std; + +fn main(message : pub str<11>, hex_as_string : str<4>) { + std::println(message); + assert(message == "hello world"); + assert(hex_as_string == "0x41"); +} +``` + +You can convert a `str` to a byte array by calling `as_bytes()` +or a vector by calling `as_bytes_vec()`. + +```rust +fn main() { + let message = "hello world"; + let message_bytes = message.as_bytes(); + let mut message_vec = message.as_bytes_vec(); + assert(message_bytes.len() == 11); + assert(message_bytes[0] == 104); + assert(message_bytes[0] == message_vec.get(0)); +} +``` + +## Escape characters + +You can use escape characters for your strings: + +| Escape Sequence | Description | +|-----------------|-----------------| +| `\r` | Carriage Return | +| `\n` | Newline | +| `\t` | Tab | +| `\0` | Null Character | +| `\"` | Double Quote | +| `\\` | Backslash | + +Example: + +```rust +let s = "Hello \"world" // prints "Hello "world" +let s = "hey \tyou"; // prints "hey you" +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/04_arrays.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/04_arrays.md new file mode 100644 index 00000000000..bdbd1798bef --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/04_arrays.md @@ -0,0 +1,244 @@ +--- +title: Arrays +description: + Dive into the Array data type in Noir. Grasp its methods, practical examples, and best practices for efficiently using Arrays in your Noir code. +keywords: + [ + noir, + array type, + methods, + examples, + indexing, + ] +--- + +An array is one way of grouping together values into one compound type. Array types can be inferred +or explicitly specified via the syntax `[; ]`: + +```rust +fn main(x : Field, y : Field) { + let my_arr = [x, y]; + let your_arr: [Field; 2] = [x, y]; +} +``` + +Here, both `my_arr` and `your_arr` are instantiated as an array containing two `Field` elements. + +Array elements can be accessed using indexing: + +```rust +fn main() { + let a = [1, 2, 3, 4, 5]; + + let first = a[0]; + let second = a[1]; +} +``` + +All elements in an array must be of the same type (i.e. homogeneous). That is, an array cannot group +a `Field` value and a `u8` value together for example. + +You can write mutable arrays, like: + +```rust +fn main() { + let mut arr = [1, 2, 3, 4, 5]; + assert(arr[0] == 1); + + arr[0] = 42; + assert(arr[0] == 42); +} +``` + +You can instantiate a new array of a fixed size with the same value repeated for each element. The following example instantiates an array of length 32 where each element is of type Field and has the value 0. + +```rust +let array: [Field; 32] = [0; 32]; +``` + +Like in Rust, arrays in Noir are a fixed size. However, if you wish to convert an array to a [slice](./slices), you can just call `as_slice` on your array: + +```rust +let array: [Field; 32] = [0; 32]; +let sl = array.as_slice() +``` + +You can define multidimensional arrays: + +```rust +let array : [[Field; 2]; 2]; +let element = array[0][0]; +``` + +## Types + +You can create arrays of primitive types or structs. There is not yet support for nested arrays +(arrays of arrays) or arrays of structs that contain arrays. + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for arrays: + +### len + +Returns the length of an array + +```rust +fn len(_array: [T; N]) -> comptime Field +``` + +example + +```rust +fn main() { + let array = [42, 42]; + assert(array.len() == 2); +} +``` + +### sort + +Returns a new sorted array. The original array remains untouched. Notice that this function will +only work for arrays of fields or integers, not for any arbitrary type. This is because the sorting +logic it uses internally is optimized specifically for these values. If you need a sort function to +sort any type, you should use the function `sort_via` described below. + +```rust +fn sort(_array: [T; N]) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32]; + let sorted = arr.sort(); + assert(sorted == [32, 42]); +} +``` + +### sort_via + +Sorts the array with a custom comparison function + +```rust +fn sort_via(mut a: [T; N], ordering: fn(T, T) -> bool) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32] + let sorted_ascending = arr.sort_via(|a, b| a < b); + assert(sorted_ascending == [32, 42]); // verifies + + let sorted_descending = arr.sort_via(|a, b| a > b); + assert(sorted_descending == [32, 42]); // does not verify +} +``` + +### map + +Applies a function to each element of the array, returning a new array containing the mapped elements. + +```rust +fn map(f: fn(T) -> U) -> [U; N] +``` + +example + +```rust +let a = [1, 2, 3]; +let b = a.map(|a| a * 2); // b is now [2, 4, 6] +``` + +### fold + +Applies a function to each element of the array, returning the final accumulated value. The first +parameter is the initial value. + +```rust +fn fold(mut accumulator: U, f: fn(U, T) -> U) -> U +``` + +This is a left fold, so the given function will be applied to the accumulator and first element of +the array, then the second, and so on. For a given call the expected result would be equivalent to: + +```rust +let a1 = [1]; +let a2 = [1, 2]; +let a3 = [1, 2, 3]; + +let f = |a, b| a - b; +a1.fold(10, f) //=> f(10, 1) +a2.fold(10, f) //=> f(f(10, 1), 2) +a3.fold(10, f) //=> f(f(f(10, 1), 2), 3) +``` + +example: + +```rust + +fn main() { + let arr = [2, 2, 2, 2, 2]; + let folded = arr.fold(0, |a, b| a + b); + assert(folded == 10); +} + +``` + +### reduce + +Same as fold, but uses the first element as starting element. + +```rust +fn reduce(f: fn(T, T) -> T) -> T +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let reduced = arr.reduce(|a, b| a + b); + assert(reduced == 10); +} +``` + +### all + +Returns true if all the elements satisfy the given predicate + +```rust +fn all(predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let all = arr.all(|a| a == 2); + assert(all); +} +``` + +### any + +Returns true if any of the elements satisfy the given predicate + +```rust +fn any(predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 5]; + let any = arr.any(|a| a == 5); + assert(any); +} + +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/05_slices.mdx b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/05_slices.mdx new file mode 100644 index 00000000000..1be0ec4a137 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/05_slices.mdx @@ -0,0 +1,146 @@ +--- +title: Slices +description: Explore the Slice data type in Noir. Understand its methods, see real-world examples, and learn how to effectively use Slices in your Noir programs. +keywords: [noir, slice type, methods, examples, subarrays] +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A slice is a dynamically-sized view into a sequence of elements. They can be resized at runtime, but because they don't own the data, they cannot be returned from a circuit. You can treat slices as arrays without a constrained size. + +```rust +use dep::std::slice; + +fn main() -> pub Field { + let mut slice: [Field] = [0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +View the corresponding test file [here][test-file]. + +[test-file]: https://github.com/noir-lang/noir/blob/f387ec1475129732f72ba294877efdf6857135ac/crates/nargo_cli/tests/test_data_ssa_refactor/slices/src/main.nr + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for slices: + +### push_back + +Pushes a new element to the end of the slice, returning a new slice with a length one greater than the original unmodified slice. + +```rust +fn push_back(_self: [T], _elem: T) -> [T] +``` + +example: + +```rust +fn main() -> pub Field { + let mut slice: [Field] = [0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +View the corresponding test file [here][test-file]. + +### push_front + +Returns a new array with the specified element inserted at index 0. The existing elements indexes are incremented by 1. + +```rust +fn push_front(_self: Self, _elem: T) -> Self +``` + +Example: + +```rust +let mut new_slice: [Field] = []; +new_slice = new_slice.push_front(20); +assert(new_slice[0] == 20); // returns true +``` + +View the corresponding test file [here][test-file]. + +### pop_front + +Returns a tuple of two items, the first element of the array and the rest of the array. + +```rust +fn pop_front(_self: Self) -> (T, Self) +``` + +Example: + +```rust +let (first_elem, rest_of_slice) = slice.pop_front(); +``` + +View the corresponding test file [here][test-file]. + +### pop_back + +Returns a tuple of two items, the beginning of the array with the last element omitted and the last element. + +```rust +fn pop_back(_self: Self) -> (Self, T) +``` + +Example: + +```rust +let (popped_slice, last_elem) = slice.pop_back(); +``` + +View the corresponding test file [here][test-file]. + +### append + +Loops over a slice and adds it to the end of another. + +```rust +fn append(mut self, other: Self) -> Self +``` + +Example: + +```rust +let append = [1, 2].append([3, 4, 5]); +``` + +### insert + +Inserts an element at a specified index and shifts all following elements by 1. + +```rust +fn insert(_self: Self, _index: Field, _elem: T) -> Self +``` + +Example: + +```rust +new_slice = rest_of_slice.insert(2, 100); +assert(new_slice[2] == 100); +``` + +View the corresponding test file [here][test-file]. + +### remove + +Remove an element at a specified index, shifting all elements after it to the left, returning the altered slice and the removed element. + +```rust +fn remove(_self: Self, _index: Field) -> (Self, T) +``` + +Example: + +```rust +let (remove_slice, removed_elem) = slice.remove(3); +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/06_vectors.mdx b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/06_vectors.mdx new file mode 100644 index 00000000000..4617e90d038 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/06_vectors.mdx @@ -0,0 +1,172 @@ +--- +title: Vectors +description: Delve into the Vector data type in Noir. Learn about its methods, practical examples, and best practices for using Vectors in your Noir code. +keywords: [noir, vector type, methods, examples, dynamic arrays] +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A vector is a collection type similar to Rust's Vector type. It's convenient way to use slices as mutable arrays. + +Example: + +```rust +use dep::std::collections::vec::Vec; + +let mut vector: Vec = Vec::new(); +for i in 0..5 { + vector.push(i); +} +assert(vector.len() == 5); +``` + +## Methods + +### new + +Creates a new, empty vector. + +```rust +pub fn new() -> Self { + Self { slice: [] } +} +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` + +### from_slice + +Creates a vector containing each element from a given slice. Mutations to the resulting vector will not affect the original slice. + +```rust +pub fn from_slice(slice: [T]) -> Self { + Self { slice } +} +``` + +Example: + +```rust +let arr: [Field] = [1, 2, 3]; +let vector_from_slice = Vec::from_slice(arr); +assert(vector_from_slice.len() == 3); +``` + +### get + +Retrieves an element from the vector at a given index. Panics if the index points beyond the vector's end. + +```rust +pub fn get(self, index: Field) -> T { + self.slice[index] +} +``` + +Example: + +```rust +let vector: Vec = Vec::from_slice([10, 20, 30]); +assert(vector.get(1) == 20); +``` + +### push + +Adds a new element to the vector's end, returning a new vector with a length one greater than the original unmodified vector. + +```rust +pub fn push(&mut self, elem: T) { + self.slice = self.slice.push_back(elem); +} +``` + +Example: + +```rust +let mut vector: Vec = Vec::new(); +vector.push(10); +assert(vector.len() == 1); +``` + +### pop + +Removes an element from the vector's end, returning a new vector with a length one less than the original vector, along with the removed element. Panics if the vector's length is zero. + +```rust +pub fn pop(&mut self) -> T { + let (popped_slice, last_elem) = self.slice.pop_back(); + self.slice = popped_slice; + last_elem +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 20]); +let popped_elem = vector.pop(); +assert(popped_elem == 20); +assert(vector.len() == 1); +``` + +### insert + +Inserts an element at a specified index, shifting subsequent elements to the right. + +```rust +pub fn insert(&mut self, index: Field, elem: T) { + self.slice = self.slice.insert(index, elem); +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 30]); +vector.insert(1, 20); +assert(vector.get(1) == 20); +``` + +### remove + +Removes an element at a specified index, shifting subsequent elements to the left, and returns the removed element. + +```rust +pub fn remove(&mut self, index: Field) -> T { + let (new_slice, elem) = self.slice.remove(index); + self.slice = new_slice; + elem +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 20, 30]); +let removed_elem = vector.remove(1); +assert(removed_elem == 20); +assert(vector.len() == 2); +``` + +### len + +Returns the number of elements in the vector. + +```rust +pub fn len(self) -> Field { + self.slice.len() +} +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/07_tuples.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/07_tuples.md new file mode 100644 index 00000000000..5f6cab974a8 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/07_tuples.md @@ -0,0 +1,47 @@ +--- +title: Tuples +description: + Dive into the Tuple data type in Noir. Understand its methods, practical examples, and best practices for efficiently using Tuples in your Noir code. +keywords: + [ + noir, + tuple type, + methods, + examples, + multi-value containers, + ] +--- + +A tuple collects multiple values like an array, but with the added ability to collect values of +different types: + +```rust +fn main() { + let tup: (u8, u64, Field) = (255, 500, 1000); +} +``` + +One way to access tuple elements is via destructuring using pattern matching: + +```rust +fn main() { + let tup = (1, 2); + + let (one, two) = tup; + + let three = one + two; +} +``` + +Another way to access tuple elements is via direct member access, using a period (`.`) followed by +the index of the element we want to access. Index `0` corresponds to the first tuple element, `1` to +the second and so on: + +```rust +fn main() { + let tup = (5, 6, 7, 8); + + let five = tup.0; + let eight = tup.3; +} +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/08_structs.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/08_structs.md new file mode 100644 index 00000000000..35421734639 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/08_structs.md @@ -0,0 +1,69 @@ +--- +title: Structs +description: + Explore the Struct data type in Noir. Learn about its methods, see real-world examples, and grasp how to effectively define and use Structs in your Noir programs. +keywords: + [ + noir, + struct type, + methods, + examples, + data structures, + ] +--- + +A struct also allows for grouping multiple values of different types. Unlike tuples, we can also +name each field. + +> **Note:** The usage of _field_ here refers to each element of the struct and is unrelated to the +> field type of Noir. + +Defining a struct requires giving it a name and listing each field within as `: ` pairs: + +```rust +struct Animal { + hands: Field, + legs: Field, + eyes: u8, +} +``` + +An instance of a struct can then be created with actual values in `: ` pairs in any +order. Struct fields are accessible using their given names: + +```rust +fn main() { + let legs = 4; + + let dog = Animal { + eyes: 2, + hands: 0, + legs, + }; + + let zero = dog.hands; +} +``` + +Structs can also be destructured in a pattern, binding each field to a new variable: + +```rust +fn main() { + let Animal { hands, legs: feet, eyes } = get_octopus(); + + let ten = hands + feet + eyes as u8; +} + +fn get_octopus() -> Animal { + let octopus = Animal { + hands: 0, + legs: 8, + eyes: 2, + }; + + octopus +} +``` + +The new variables can be bound with names different from the original struct field names, as +showcased in the `legs --> feet` binding in the example above. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/09_references.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/09_references.md new file mode 100644 index 00000000000..b0c35ce2cb9 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/09_references.md @@ -0,0 +1,22 @@ +--- +title: References +--- + +Noir supports first-class references. References are a bit like pointers: they point to a specific address that can be followed to access the data stored at that address. You can use Rust-like syntax to use pointers in Noir: the `&` operator references the variable, the `*` operator dereferences it. + +Example: + +```rust +fn main() { + let mut x = 2; + + // you can reference x as &mut and pass it to multiplyBy2 + multiplyBy2(&mut x); +} + +// you can access &mut here +fn multiplyBy2(x: &mut Field) { + // and dereference it with * + *x = *x * 2; +} +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/10_function_types.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/10_function_types.md new file mode 100644 index 00000000000..1ec92efd594 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/10_function_types.md @@ -0,0 +1,25 @@ +--- +title: Function types +--- + +Noir supports higher-order functions. The syntax for a function type is as follows: + +```rust +fn(arg1_type, arg2_type, ...) -> return_type +``` + +Example: + +```rust +fn assert_returns_100(f: fn() -> Field) { // f takes no args and returns a Field + assert(f() == 100); +} + +fn main() { + assert_returns_100(|| 100); // ok + assert_returns_100(|| 150); // fails +} +``` + +A function type also has an optional capture environment - this is necessary to support closures. +See [Lambdas](../08_lambdas.md) for more details. diff --git a/docs/versioned_docs/version-v0.19.4/migration_notes.md b/docs/versioned_docs/version-v0.19.4/migration_notes.md new file mode 100644 index 00000000000..e87eb1feaba --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/migration_notes.md @@ -0,0 +1,91 @@ +--- +title: Migration notes +description: Read about migration notes from previous versions, which could solve problems while updating +keywords: [Noir, notes, migration, updating, upgrading] +--- + +Noir is in full-speed development. Things break fast, wild, and often. This page attempts to leave some notes on errors you might encounter when upgrading and how to resolve them until proper patches are built. + +## ≥0.19 + +### Enforcing `compiler_version` + +From this version on, the compiler will check for the `compiler_version` field in `Nargo.toml`, and will error if it doesn't match the current Nargo version in use. + +To update, please make sure this field in `Nargo.toml` matches the output of `nargo --version`. + +## ≥0.14 + +The index of the [for loops](./language_concepts/02_control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: + +```rust +for i in 0..10 { + let i = i as Field; +} +``` + +## ≥v0.11.0 and Nargo backend + +From this version onwards, Nargo starts managing backends through the `nargo backend` command. Upgrading to the versions per usual steps might lead to: + +### `backend encountered an error` + +This is likely due to the existing locally installed version of proving backend (e.g. barretenberg) is incompatible with the version of Nargo in use. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo prove +``` + +with you Noir program. + +This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. + +### `backend encountered an error: illegal instruction` + +On certain Intel-based systems, an `illegal instruction` error may arise due to incompatibility of barretenberg with certain CPU instructions. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/barretenberg-js-binary/raw/master/run-bb.tar.gz +``` + +This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. + +The gzipped filed is running this bash script: , where we need to gzip it as the Nargo currently expect the backend to be zipped up. + +Then run: + +``` +DESIRED_BINARY_VERSION=0.8.1 nargo info +``` + +This overrides the bb native binary with a bb.js node application instead, which should be compatible with most if not all hardware. This does come with the drawback of being generally slower than native binary. + +0.8.1 indicates bb.js version 0.8.1, so if you change that it will update to a different version or the default version in the script if none was supplied. diff --git a/docs/versioned_docs/version-v0.19.4/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/crates_and_packages.md new file mode 100644 index 00000000000..fb83a33d94e --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/crates_and_packages.md @@ -0,0 +1,42 @@ +--- +title: Crates and Packages +description: Learn how to use Crates and Packages in your Noir project +keywords: [Nargo, dependencies, package management, crates, package] +--- + +## Crates + +A crate is the smallest amount of code that the Noir compiler considers at a time. +Crates can contain modules, and the modules may be defined in other files that get compiled with the crate, as we’ll see in the coming sections. + +### Crate Types + +A Noir crate can come in several forms: binaries, libraries or contracts. + +#### Binaries + +_Binary crates_ are programs which you can compile to an ACIR circuit which you can then create proofs against. Each must have a function called `main` that defines the ACIR circuit which is to be proved. + +#### Libraries + +_Library crates_ don't have a `main` function and they don't compile down to ACIR. Instead they define functionality intended to be shared with multiple projects, and eventually included in a binary crate. + +#### Contracts + +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/src/contracts). + +### Crate Root + +Every crate has a root, which is the source file that the compiler starts, this is also known as the root module. The Noir compiler does not enforce any conditions on the name of the file which is the crate root, however if you are compiling via Nargo the crate root must be called `lib.nr` or `main.nr` for library or binary crates respectively. + +## Packages + +A Nargo _package_ is a collection of one of more crates that provides a set of functionality. A package must include a Nargo.toml file. + +A package _must_ contain either a library or a binary crate, but not both. + +### Differences from Cargo Packages + +One notable difference between Rust's Cargo and Noir's Nargo is that while Cargo allows a package to contain an unlimited number of binary crates and a single library crate, Nargo currently only allows a package to contain a single crate. + +In future this restriction may be lifted to allow a Nargo package to contain both a binary and library crate or multiple binary crates. diff --git a/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md new file mode 100644 index 00000000000..75f95aaa305 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md @@ -0,0 +1,123 @@ +--- +title: Dependencies +description: + Learn how to specify and manage dependencies in Nargo, allowing you to upload packages to GitHub + and use them easily in your project. +keywords: [Nargo, dependencies, GitHub, package management, versioning] +--- + +Nargo allows you to upload packages to GitHub and use them as dependencies. + +## Specifying a dependency + +Specifying a dependency requires a tag to a specific commit and the git url to the url containing +the package. + +Currently, there are no requirements on the tag contents. If requirements are added, it would follow +semver 2.0 guidelines. + +> Note: Without a `tag` , there would be no versioning and dependencies would change each time you +> compile your project. + +For example, to add the [ecrecover-noir library](https://github.com/colinnielsen/ecrecover-noir) to your project, add it to `Nargo.toml`: + +```toml +# Nargo.toml + +[dependencies] +ecrecover = {tag = "v0.8.0", git = "https://github.com/colinnielsen/ecrecover-noir"} +``` + +If the module is in a subdirectory, you can define a subdirectory in your git repository, for example: + +```toml +# Nargo.toml + +[dependencies] +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/src/contracts/easy_private_token_contract"} +``` + +## Specifying a local dependency + +You can also specify dependencies that are local to your machine. + +For example, this file structure has a library and binary crate + +```tree +├── binary_crate +│   ├── Nargo.toml +│   └── src +│   └── main.nr +└── liba + ├── Nargo.toml + └── src + └── lib.nr +``` + +Inside of the binary crate, you can specify: + +```toml +# Nargo.toml + +[dependencies] +libA = { path = "../liba" } +``` + +## Importing dependencies + +You can import a dependency to a Noir file using the following syntax. For example, to import the +ecrecover-noir library and local liba referenced above: + +```rust +use dep::ecrecover; +use dep::libA; +``` + +You can also import only the specific parts of dependency that you want to use, like so: + +```rust +use dep::std::hash::sha256; +use dep::std::scalar_mul::fixed_base_embedded_curve; +``` + +Lastly, as demonstrated in the +[elliptic curve example](../standard_library/cryptographic_primitives/ec_primitives#examples), you +can import multiple items in the same line by enclosing them in curly braces: + +```rust +use dep::std::ec::tecurve::affine::{Curve, Point}; +``` + +We don't have a way to consume libraries from inside a [workspace](./workspaces) as external dependencies right now. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +## Dependencies of Dependencies + +Note that when you import a dependency, you also get access to all of the dependencies of that package. + +For example, the [phy_vector](https://github.com/resurgencelabs/phy_vector) library imports an [fraction](https://github.com/resurgencelabs/fraction) library. If you're importing the phy_vector library, then you can access the functions in fractions library like so: + +```rust +use dep::phy_vector; + +fn main(x : Field, y : pub Field) { + //... + let f = phy_vector::fraction::toFraction(true, 2, 1); + //... +} +``` + +## Available Libraries + +Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). + +Some libraries that are available today include: + +- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers diff --git a/docs/versioned_docs/version-v0.19.4/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/modules.md new file mode 100644 index 00000000000..147c9b284e8 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/modules.md @@ -0,0 +1,104 @@ +--- +title: Modules +description: + Learn how to organize your files using modules in Noir, following the same convention as Rust's + module system. Examples included. +keywords: [Noir, Rust, modules, organizing files, sub-modules] +--- + +Noir's module system follows the same convention as the _newer_ version of Rust's module system. + +## Purpose of Modules + +Modules are used to organise files. Without modules all of your code would need to live in a single +file. In Noir, the compiler does not automatically scan all of your files to detect modules. This +must be done explicitly by the developer. + +## Examples + +### Importing a module in the crate root + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::hello_world(); +} +``` + +Filename : `src/foo.nr` + +```rust +fn from_foo() {} +``` + +In the above snippet, the crate root is the `src/main.nr` file. The compiler sees the module +declaration `mod foo` which prompts it to look for a foo.nr file. + +Visually this module hierarchy looks like the following : + +``` +crate + ├── main + │ + └── foo + └── from_foo + +``` + +### Importing a module throughout the tree + +All modules are accessible from the `crate::` namespace. + +``` +crate + ├── bar + ├── foo + └── main + +``` + +In the above snippet, if `bar` would like to use functions in `foo`, it can do so by `use crate::foo::function_name`. + +### Sub-modules + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::from_foo(); +} +``` + +Filename : `src/foo.nr` + +```rust +mod bar; +fn from_foo() {} +``` + +Filename : `src/foo/bar.nr` + +```rust +fn from_bar() {} +``` + +In the above snippet, we have added an extra module to the module tree; `bar`. `bar` is a submodule +of `foo` hence we declare bar in `foo.nr` with `mod bar`. Since `foo` is not the crate root, the +compiler looks for the file associated with the `bar` module in `src/foo/bar.nr` + +Visually the module hierarchy looks as follows: + +``` +crate + ├── main + │ + └── foo + ├── from_foo + └── bar + └── from_bar +``` diff --git a/docs/versioned_docs/version-v0.19.4/modules_packages_crates/workspaces.md b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/workspaces.md new file mode 100644 index 00000000000..a979ef9f0a5 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/workspaces.md @@ -0,0 +1,39 @@ +--- +title: Workspaces +--- + +Workspaces are a feature of nargo that allow you to manage multiple related Noir packages in a single repository. A workspace is essentially a group of related projects that share common build output directories and configurations. + +Each Noir project (with it's own Nargo.toml file) can be thought of as a package. Each package is expected to contain exactly one "named circuit", being the "name" defined in Nargo.toml with the program logic defined in `./src/main.nr`. + +For a project with the following structure: + +```tree +├── crates +│   ├── a +│   │   ├── Nargo.toml +│   │   └── src +│   │   └── main.nr +│   └── b +│   ├── Nargo.toml +│   └── src +│   └── main.nr +├── Nargo.toml +└── Prover.toml +``` + +You can define a workspace in Nargo.toml like so: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +`members` indicates which packages are included in the workspace. As such, all member packages of a workspace will be processed when the `--workspace` flag is used with various commands or if a `default-member` is not specified. + +`default-member` indicates which package various commands process by default. + +Libraries can be defined in a workspace. Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.4/nargo/01_commands.md b/docs/versioned_docs/version-v0.19.4/nargo/01_commands.md new file mode 100644 index 00000000000..65e2bdb44e3 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/nargo/01_commands.md @@ -0,0 +1,250 @@ +--- +title: Commands +description: + Noir CLI Commands for Noir Prover and Verifier to create, execute, prove and verify programs, + generate Solidity verifier smart contract and compile into JSON file containing ACIR + representation and ABI of circuit. +keywords: + [ + Nargo, + Noir CLI, + Noir Prover, + Noir Verifier, + generate Solidity verifier, + compile JSON file, + ACIR representation, + ABI of circuit, + TypeScript, + ] +--- + +## General options + +| Option | Description | +| -------------------- | -------------------------------------------------- | +| `--show-ssa` | Emit debug information for the intermediate SSA IR | +| `--deny-warnings` | Quit execution when warnings are emitted | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo help [subcommand]` + +Prints the list of available commands or specific information of a subcommand. + +_Arguments_ + +| Argument | Description | +| -------------- | -------------------------------------------- | +| `` | The subcommand whose help message to display | + +## `nargo backend` + +Installs and selects custom backends used to generate and verify proofs. + +### Commands + +| Command | Description | +| ----------- | --------------------------------------------------------- | +| `current` | Prints the name of the currently active backend | +| `ls` | Prints the list of currently installed backends | +| `use` | Select the backend to use | +| `install` | Install a new backend from a URL | +| `uninstall` | Uninstalls a backend | +| `help` | Print this message or the help of the given subcommand(s) | + +### Options + +| Option | Description | +| ------------ | ----------- | +| `-h, --help` | Print help | + +## `nargo check` + +Generate the `Prover.toml` and `Verifier.toml` files for specifying prover and verifier in/output +values of the Noir program respectively. + +### Options + +| Option | Description | +| --------------------- | ------------------------------------- | +| `--package ` | The name of the package to check | +| `--workspace` | Check all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +### `nargo codegen-verifier` + +Generate a Solidity verifier smart contract for the program. + +### Options + +| Option | Description | +| --------------------- | ------------------------------------- | +| `--package ` | The name of the package to codegen | +| `--workspace` | Codegen all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo compile` + +Compile the program into a JSON build artifact file containing the ACIR representation and the ABI +of the circuit. This build artifact can then be used to generate and verify proofs. + +You can also use "build" as an alias for compile (e.g. `nargo build`). + +### Options + +| Option | Description | +| --------------------- | ------------------------------------------------------------ | +| `--include-keys` | Include Proving and Verification keys in the build artifacts | +| `--package ` | The name of the package to compile | +| `--workspace` | Compile all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo new ` + +Creates a new Noir project in a new folder. + +**Arguments** + +| Argument | Description | +| -------- | -------------------------------- | +| `` | The path to save the new project | + +### Options + +| Option | Description | +| --------------- | ----------------------------------------------------- | +| `--name ` | Name of the package [default: package directory name] | +| `--lib` | Use a library template | +| `--bin` | Use a binary template [default] | +| `--contract` | Use a contract template | +| `-h, --help` | Print help | + +## `nargo init` + +Creates a new Noir project in the current directory. + +### Options + +| Option | Description | +| --------------- | ----------------------------------------------------- | +| `--name ` | Name of the package [default: current directory name] | +| `--lib` | Use a library template | +| `--bin` | Use a binary template [default] | +| `--contract` | Use a contract template | +| `-h, --help` | Print help | + +## `nargo execute [WITNESS_NAME]` + +Runs the Noir program and prints its return value. + +**Arguments** + +| Argument | Description | +| ---------------- | ----------------------------------------- | +| `[WITNESS_NAME]` | Write the execution witness to named file | + +### Options + +| Option | Description | +| --------------------------------- | ------------------------------------------------------------------------------------ | +| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover] | +| `--package ` | The name of the package to execute | +| `--workspace` | Execute all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +_Usage_ + +The inputs to the circuit are read from the `Prover.toml` file generated by `nargo check`, which +must be filled in. + +To save the witness to file, run the command with a value for the `WITNESS_NAME` argument. A +`.tr` file will then be saved in the `./target` folder. + +## `nargo prove` + +Creates a proof for the program. + +### Options + +| Option | Description | +| ------------------------------------- | ---------------------------------------------------------------------------------------- | +| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover] | +| `-v, --verifier-name ` | The name of the toml file which contains the inputs for the verifier [default: Verifier] | +| `--verify` | Verify proof after proving | +| `--package ` | The name of the package to prove | +| `--workspace` | Prove all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo verify` + +Given a proof and a program, verify whether the proof is valid. + +### Options + +| Option | Description | +| ------------------------------------- | ---------------------------------------------------------------------------------------- | +| `-v, --verifier-name ` | The name of the toml file which contains the inputs for the verifier [default: Verifier] | +| `--package ` | The name of the package to verify | +| `--workspace` | Verify all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo test [TEST_NAME]` + +Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if +you run `nargo test`. To print `println` statements in tests, use the `--show-output` flag. + +Takes an optional `--exact` flag which allows you to select tests based on an exact name. + +See an example on the [testing page](./testing). + +### Options + +| Option | Description | +| --------------------- | -------------------------------------- | +| `--show-output` | Display output of `println` statements | +| `--exact` | Only run tests that match exactly | +| `--package ` | The name of the package to test | +| `--workspace` | Test all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo info` + +Prints a table containing the information of the package. + +Currently the table provide + +1. The number of ACIR opcodes +2. The final number gates in the circuit used by a backend + +If the file contains a contract the table will provide the +above information about each function of the contract. + +## `nargo lsp` + +Start a long-running Language Server process that communicates over stdin/stdout. +Usually this command is not run by a user, but instead will be run by a Language Client, such as [vscode-noir](https://github.com/noir-lang/vscode-noir). + +## `nargo fmt` + +Automatically formats your Noir source code based on the default formatting settings. diff --git a/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md b/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md new file mode 100644 index 00000000000..da767274efd --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md @@ -0,0 +1,61 @@ +--- +title: Testing in Noir +description: Learn how to use Nargo to test your Noir program in a quick and easy way +keywords: [Nargo, testing, Noir, compile, test] +--- + +You can test your Noir programs using Noir circuits. + +Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if +you run `nargo test`. + +For example if you have a program like: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test] +fn test_add() { + assert(add(2,2) == 4); + assert(add(0,1) == 1); + assert(add(1,0) == 1); +} +``` + +Running `nargo test` will test that the `test_add` function can be executed while satisfying the all +the contraints which allows you to test that add returns the expected values. Test functions can't +have any arguments currently. + +### Test fail + +You can write tests that are expected to fail by using the decorator `#[test(should_fail)]`. For example: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test(should_fail)] +fn test_add() { + assert(add(2,2) == 5); +} +``` + +You can be more specific and make it fail with a specific reason by using `should_fail_with = "`: + +```rust +fn main(african_swallow_avg_speed : Field) { + assert(african_swallow_avg_speed == 65, "What is the airspeed velocity of an unladen swallow"); +} + +#[test] +fn test_king_arthur() { + main(65); +} + +#[test(should_fail_with = "What is the airspeed velocity of an unladen swallow")] +fn test_bridgekeeper() { + main(32); +} + +``` diff --git a/docs/versioned_docs/version-v0.19.4/nargo/03_solidity_verifier.md b/docs/versioned_docs/version-v0.19.4/nargo/03_solidity_verifier.md new file mode 100644 index 00000000000..9ac60cb0ba7 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/nargo/03_solidity_verifier.md @@ -0,0 +1,129 @@ +--- +title: Solidity Verifier +description: + Learn how to run the verifier as a smart contract on the blockchain. Compile a Solidity verifier + contract for your Noir program and deploy it on any EVM blockchain acting as a verifier smart + contract. Read more to find out! +keywords: + [ + solidity verifier, + smart contract, + blockchain, + compiler, + plonk_vk.sol, + EVM blockchain, + verifying Noir programs, + proving backend, + Barretenberg, + ] +--- + +For certain applications, it may be desirable to run the verifier as a smart contract instead of on +a local machine. + +Compile a Solidity verifier contract for your Noir program by running: + +```sh +nargo codegen-verifier +``` + +A new `contract` folder would then be generated in your project directory, containing the Solidity +file `plonk_vk.sol`. It can be deployed on any EVM blockchain acting as a verifier smart contract. + +> **Note:** It is possible to compile verifier contracts of Noir programs for other smart contract +> platforms as long as the proving backend supplies an implementation. +> +> Barretenberg, the default proving backend for Nargo, supports compilation of verifier contracts in +> Solidity only for the time being. + +## Verify + +To verify a proof using the Solidity verifier contract, call the `verify` function with the +following signature: + +```solidity +function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool) +``` + +You can see an example of how the `verify` function is called in the example zk voting application [here](https://github.com/noir-lang/noir-examples/blob/33e598c257e2402ea3a6b68dd4c5ad492bce1b0a/foundry-voting/src/zkVote.sol#L35): + +```solidity +function castVote(bytes calldata proof, uint proposalId, uint vote, bytes32 nullifierHash) public returns (bool) { + // ... + bytes32[] memory publicInputs = new bytes32[](4); + publicInputs[0] = merkleRoot; + publicInputs[1] = bytes32(proposalId); + publicInputs[2] = bytes32(vote); + publicInputs[3] = nullifierHash; + require(verifier.verify(proof, publicInputs), "Invalid proof"); +``` + +### Public Inputs + +:::tip + +A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in +Noir. + +Under the hood, the return value is passed as an input to the circuit and is checked at the end of +the circuit program. + +::: + +The verifier contract uses the output (return) value of a Noir program as a public input. So if you +have the following function + +```rust +fn main( + // Public inputs + pubkey_x: pub Field, + pubkey_y: pub Field, + // Private inputs + priv_key: Field, +) -> pub Field +``` + +then `verify` in `plonk_vk.sol` will expect 3 public inputs. Passing two inputs will result in an +error like `Reason: PUBLIC_INPUT_COUNT_INVALID(3, 2)`. + +In this case the 3 inputs to `verify` would be ordered as `[pubkey_x, pubkey_y, return]`. + +#### Struct inputs + +Consider the following program: + +```rust +struct Type1 { + val1: Field, + val2: Field, +} + +struct Nested { + t1: Type1, + is_true: bool, +} + +fn main(x: pub Field, nested: pub Nested, y: pub Field) { + //... +} +``` + +Structs will be flattened so that the array of inputs is 1-dimensional array. The order of these inputs would be flattened to: `[x, nested.t1.val1, nested.t1.val2, nested.is_true, y]` + +## Noir for EVM chains + +You can currently deploy the Solidity verifier contracts to most EVM compatible chains. EVM chains that have been tested and are known to work include: + +- Optimism +- Arbitrum +- Polygon PoS +- Scroll +- Celo + +Other EVM chains should work, but have not been tested directly by our team. If you test any other chains, please open a PR on this page to update the list. See [this doc](https://github.com/noir-lang/noir-starter/tree/main/with-foundry#testing-on-chain) for more info about testing verifier contracts on different EVM chains. + +### Unsupported chains + +Unfortunately not all "EVM" chains are supported. + +**zkSync** and the **Polygon zkEVM** do _not_ currently support proof verification via Solidity verifier contracts. They are missing the bn256 precompile contract that the verifier contract requires. Once these chains support this precompile, they may work. diff --git a/docs/versioned_docs/version-v0.19.4/nargo/04_language_server.md b/docs/versioned_docs/version-v0.19.4/nargo/04_language_server.md new file mode 100644 index 00000000000..48c01465f6e --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/nargo/04_language_server.md @@ -0,0 +1,42 @@ +--- +title: Language Server +description: Learn about the Noir Language Server, how to install the components, and configuration that may be required. +keywords: [Nargo, Language Server, LSP, VSCode, Visual Studio Code] +--- + +This section helps you install and configure the Noir Language Server. + +The Language Server Protocol (LSP) has two components, the [Server](#language-server) and the [Client](#language-client). Below we describe each in the context of Noir. + +## Language Server + +The Server component is provided by the Nargo command line tool that you installed at the beginning of this guide. +As long as Nargo is installed and you've used it to run other commands in this guide, it should be good to go! + +If you'd like to verify that the `nargo lsp` command is available, you can run `nargo --help` and look for `lsp` in the list of commands. If you see it, you're using a version of Noir with LSP support. + +## Language Client + +The Client component is usually an editor plugin that launches the Server. It communicates LSP messages between the editor and the Server. For example, when you save a file, the Client will alert the Server, so it can try to compile the project and report any errors. + +Currently, Noir provides a Language Client for Visual Studio Code via the [vscode-noir](https://github.com/noir-lang/vscode-noir) extension. You can install it via the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). + +> **Note:** Noir's Language Server Protocol support currently assumes users' VSCode workspace root to be the same as users' Noir project root (i.e. where Nargo.toml lies). +> +> If LSP features seem to be missing / malfunctioning, make sure you are opening your Noir project directly (instead of as a sub-folder) in your VSCode instance. + +When your language server is running correctly and the VSCode plugin is installed, you should see handy codelens buttons for compilation, measuring circuit size, execution, and tests: + +![Compile and Execute](@site/static/img/codelens_compile_execute.png) +![Run test](@site/static/img/codelens_run_test.png) + +You should also see your tests in the `testing` panel: + +![Testing panel](@site/static/img/codelens_testing_panel.png) + +### Configuration + +- **Noir: Enable LSP** - If checked, the extension will launch the Language Server via `nargo lsp` and communicate with it. +- **Noir: Nargo Flags** - Additional flags may be specified if you require them to be added when the extension calls `nargo lsp`. +- **Noir: Nargo Path** - An absolute path to a Nargo binary with the `lsp` command. This may be useful if Nargo is not within the `PATH` of your editor. +- **Noir > Trace: Server** - Setting this to `"messages"` or `"verbose"` will log LSP messages between the Client and Server. Useful for debugging. diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/getting_started/01_tiny_noir_app.md b/docs/versioned_docs/version-v0.19.4/noir_js/getting_started/01_tiny_noir_app.md new file mode 100644 index 00000000000..c51ed61de52 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/getting_started/01_tiny_noir_app.md @@ -0,0 +1,260 @@ +--- +title: End-to-end +description: Learn how to setup a new app that uses Noir to generate and verify zero-knowledge SNARK proofs in a typescript or javascript environment +keywords: [how to, guide, javascript, typescript, noir, barretenberg, zero-knowledge, proofs] +--- + +NoirJS works both on the browser and on the server, and works for both ESM and CJS module systems. In this page, we will learn how can we write a simple test and a simple web app to verify the standard Noir example. + +You can find the complete app code for this guide [here](https://github.com/noir-lang/tiny-noirjs-app). + +## Before we start + +:::note + +Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.18.x matches `noir_js@0.18.x`, etc. + +In this guide, we will be pinned to 0.17.0. + +::: + +Make sure you have Node installed on your machine by opening a terminal and executing `node --version`. If you don't see a version, you should install [node](https://github.com/nvm-sh/nvm). You can also use `yarn` if you prefer that package manager over npm (which comes with node). + +First of all, follow the the [Nargo guide](../../getting_started/00_nargo_installation.md) to install nargo version 0.17.0 and create a new project with `nargo new circuit`. Once there, `cd` into the `circuit` folder. You should then be able to compile your circuit into `json` format and see it inside the `target` folder: + +```bash +nargo compile +``` + +Your folder structure should look like: + +```tree +. +└── circuit + ├── Nargo.toml + ├── src + │ └── main.nr + └── target + └── circuit.json +``` + +## Starting a new project + +Go back to the previous folder and start a new project by running run `npm init`. You can configure your project or just leave the defaults, and see a `package.json` appear in your root folder. + +## Installing dependencies + +We'll need two `npm` packages. These packages will provide us the methods we need to run and verify proofs: + +```bash +npm i @noir-lang/backend_barretenberg@^0.17.0 @noir-lang/noir_js@^0.17.0 +``` + +To serve our page, we can use a build tool such as `vite`. Because we're gonna use some `wasm` files, we need to install a plugin as well. Run: + +```bash +npm i --save-dev vite rollup-plugin-copy +``` + +Since we're on the dependency world, we may as well define a nice starting script. Vite makes it easy. Just open `package.json`, find the block "scripts" and add this just below the line with `"test" : "echo......."`: + +```json + "start": "vite --open" +``` + +If you want do build a static website, you can also add some build and preview scripts: + +```json + "build": "vite build", + "preview": "vite preview" +``` + +## Vite plugins + +Vite is great, but support from `wasm` doesn't work out-of-the-box. We're gonna write a quick plugin and use another one. Just copy and paste this into a file named `vite.config.js`. You don't need to understand it, just trust me bro. + +```js +import { defineConfig } from 'vite'; +import copy from 'rollup-plugin-copy'; +import fs from 'fs'; +import path from 'path'; + +const wasmContentTypePlugin = { + name: 'wasm-content-type-plugin', + configureServer(server) { + server.middlewares.use(async (req, res, next) => { + if (req.url.endsWith('.wasm')) { + res.setHeader('Content-Type', 'application/wasm'); + const newPath = req.url.replace('deps', 'dist'); + const targetPath = path.join(__dirname, newPath); + const wasmContent = fs.readFileSync(targetPath); + return res.end(wasmContent); + } + next(); + }); + }, +}; + +export default defineConfig(({ command }) => { + if (command === 'serve') { + return { + plugins: [ + copy({ + targets: [{ src: 'node_modules/**/*.wasm', dest: 'node_modules/.vite/dist' }], + copySync: true, + hook: 'buildStart', + }), + command === 'serve' ? wasmContentTypePlugin : [], + ], + }; + } + + return {}; +}); +``` + +## HTML + +Here's the simplest HTML with some terrible UI. Create a file called `index.html` and paste this: + +```html + + + + + + +

Very basic Noir app

+
+

Logs

+

Proof

+
+ + +``` + +## Some good old vanilla Javascript + +Create a new file `app.js`, which is where our javascript code will live. Let's start with this code inside: + +```js +document.addEventListener('DOMContentLoaded', async () => { + // here's where the magic happens +}); + +function display(container, msg) { + const c = document.getElementById(container); + const p = document.createElement('p'); + p.textContent = msg; + c.appendChild(p); +} +``` + +We can manipulate our website with this little function, so we can see our website working. + +## Adding Noir + +If you come from the previous page, your folder structure should look like this: + +```tree +├── app.js +├── circuit +│ ├── Nargo.toml +│ ├── src +│ │ └── main.nr +│ └── target +│ └── circuit.json +├── index.html +├── package.json +└── vite.config.js +``` + +You'll see other files and folders showing up (like `package-lock.json`, `yarn.lock`, `node_modules`) but you shouldn't have to care about those. + +## Importing our dependencies + +We're starting with the good stuff now. At the top of the new javascript file, import the packages: + +```ts +import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; +import { Noir } from '@noir-lang/noir_js'; +``` + +We also need to import the `circuit` JSON file we created. If you have the suggested folder structure, you can add this line: + +```ts +import circuit from './circuit/target/circuit.json'; +``` + +## Write code + +:::note + +We're gonna be adding code inside the `document.addEventListener...etc` block: + +```js +// forget stuff here +document.addEventListener('DOMContentLoaded', async () => { + // here's where the magic happens +}); +// forget stuff here +``` + +::: + +Our dependencies exported two classes: `BarretenbergBackend` and `Noir`. Let's `init` them and add some logs, just to flex: + +```ts +const backend = new BarretenbergBackend(circuit); +const noir = new Noir(circuit, backend); +``` + +## Proving + +Now we're ready to prove stuff! Let's feed some inputs to our circuit and calculate the proof: + +```js +const input = { x: 1, y: 2 }; +display('logs', 'Generating proof... ⌛'); +const proof = await noir.generateFinalProof(input); +display('logs', 'Generating proof... ✅'); +display('results', proof.proof); +``` + +You're probably eager to see stuff happening, so go and run your app now! + +From your terminal, run `npm start` (or `yarn start`). If it doesn't open a browser for you, just visit `localhost:5173`. On a modern laptop, proof will generate in less than 100ms, and you'll see this: + +![Getting Started 0](@site/static/img/noir_getting_started_1.png) + +If you're human, you shouldn't be able to understand anything on the "proof" box. That's OK. We like you, human. + +In any case, this means your proof was generated! But you shouldn't trust me just yet. Add these lines to see it being verified: + +```js +display('logs', 'Verifying proof... ⌛'); +const verification = await noir.verifyFinalProof(proof); +if (verification) display('logs', 'Verifying proof... ✅'); +``` + +By saving, your app will refresh and here's our complete Tiny Noir App! + +You can find the complete app code for this guide [here](https://github.com/noir-lang/tiny-noirjs-app). + +## Further Reading + +You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/next-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. + +You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/noir_js.md b/docs/versioned_docs/version-v0.19.4/noir_js/noir_js.md new file mode 100644 index 00000000000..f895b22eaf8 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/noir_js.md @@ -0,0 +1,36 @@ +--- +title: NoirJS +description: Interact with Noir in Typescript or Javascript +keywords: [Noir project, javascript, typescript, node.js, browser, react] +--- + +NoirJS is a TypeScript library that make it easy to use Noir on your dapp, webapp, Node.js server, website, etc. + +A typical workflow would be composed of two major elements: + +- NoirJS +- Proving backend of choice's JavaScript package + + + +To install NoirJS, install Node.js if you have not already and run this in your JavaScript project: + +```bash +npm i @noir-lang/noir_js +``` + +## Proving backend + +Since Noir is backend agnostic, you can instantiate NoirJS without any backend (i.e. to execute a function). But for proving, you would have to instantiate NoirJS with any of the supported backends through their own `js` interface. + +### Barretenberg + +Aztec Labs maintains the `barretenberg` proving backend, which you can instantiate and make use of alongside NoirJS. It is also the default proving backend installed and used with Nargo, the Noir CLI tool. + +To install its JavaScript library, run this in your project: + +```bash +npm i @noir-lang/backend_barretenberg +``` + +For more details on how to instantiate and use the libraries, refer to the [Full Noir App Guide](./getting_started/01_tiny_noir_app.md) and [Reference](./reference/noir_js/classes/Noir.md) sections. diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/.nojekyll b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/classes/BarretenbergBackend.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/classes/BarretenbergBackend.md new file mode 100644 index 00000000000..5cbe9421b92 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/classes/BarretenbergBackend.md @@ -0,0 +1,185 @@ +# BarretenbergBackend + +## Implements + +- [`Backend`](../interfaces/Backend.md) + +## Constructors + +### new BarretenbergBackend(acirCircuit, options) + +```ts +new BarretenbergBackend(acirCircuit, options): BarretenbergBackend +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `acirCircuit` | [`CompiledCircuit`](../type-aliases/CompiledCircuit.md) | +| `options` | [`BackendOptions`](../type-aliases/BackendOptions.md) | + +#### Returns + +[`BarretenbergBackend`](BarretenbergBackend.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`destroy`](../interfaces/Backend.md#destroy) + +#### Description + +Destroys the backend + +*** + +### generateFinalProof() + +```ts +generateFinalProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateFinalProof`](../interfaces/Backend.md#generatefinalproof) + +#### Description + +Generates a final proof (not meant to be verified in another circuit) + +*** + +### generateIntermediateProof() + +```ts +generateIntermediateProof(witness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `witness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateIntermediateProof`](../interfaces/Backend.md#generateintermediateproof) + +#### Example + +```typescript +const intermediateProof = await backend.generateIntermediateProof(witness); +``` + +*** + +### generateIntermediateProofArtifacts() + +```ts +generateIntermediateProofArtifacts(proofData, numOfPublicInputs): Promise +``` + +#### Parameters + +| Parameter | Type | Default value | +| :------ | :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | `undefined` | +| `numOfPublicInputs` | `number` | `0` | + +#### Returns + +`Promise`\<`object`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateIntermediateProofArtifacts`](../interfaces/Backend.md#generateintermediateproofartifacts) + +#### Example + +```typescript +const artifacts = await backend.generateIntermediateProofArtifacts(proof, numOfPublicInputs); +``` + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`verifyFinalProof`](../interfaces/Backend.md#verifyfinalproof) + +#### Description + +Verifies a final proof + +*** + +### verifyIntermediateProof() + +```ts +verifyIntermediateProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`verifyIntermediateProof`](../interfaces/Backend.md#verifyintermediateproof) + +#### Example + +```typescript +const isValidIntermediate = await backend.verifyIntermediateProof(proof); +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/index.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/index.md new file mode 100644 index 00000000000..93b248b0f65 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/index.md @@ -0,0 +1,45 @@ +# Backend Barretenberg + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [BarretenbergBackend](classes/BarretenbergBackend.md) | - | + +### Interfaces + +| Interface | Description | +| :------ | :------ | +| [Backend](interfaces/Backend.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [BackendOptions](type-aliases/BackendOptions.md) | - | +| [CompiledCircuit](type-aliases/CompiledCircuit.md) | - | +| [ProofData](type-aliases/ProofData.md) | - | + +## Functions + +### flattenPublicInputs() + +```ts +flattenPublicInputs(publicInputs): string[] +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `publicInputs` | `WitnessMap` | + +#### Returns + +`string`[] + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/interfaces/Backend.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/interfaces/Backend.md new file mode 100644 index 00000000000..3eb9645c8d2 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/interfaces/Backend.md @@ -0,0 +1,132 @@ +# Backend + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Description + +Destroys the backend + +*** + +### generateFinalProof() + +```ts +generateFinalProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates a final proof (not meant to be verified in another circuit) + +*** + +### generateIntermediateProof() + +```ts +generateIntermediateProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates an intermediate proof (meant to be verified in another circuit) + +*** + +### generateIntermediateProofArtifacts() + +```ts +generateIntermediateProofArtifacts(proofData, numOfPublicInputs): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | +| `numOfPublicInputs` | `number` | + +#### Returns + +`Promise`\<`object`\> + +#### Description + +Retrieves the artifacts from a proof in the Field format + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies a final proof + +*** + +### verifyIntermediateProof() + +```ts +verifyIntermediateProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies an intermediate proof + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/BackendOptions.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/BackendOptions.md new file mode 100644 index 00000000000..266ade75d17 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/BackendOptions.md @@ -0,0 +1,19 @@ +# BackendOptions + +```ts +type BackendOptions: object; +``` + +## Description + +An options object, currently only used to specify the number of threads to use. + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `threads` | `number` | **Description**

Number of threads | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit.md new file mode 100644 index 00000000000..34e0dd04205 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit.md @@ -0,0 +1,20 @@ +# CompiledCircuit + +```ts +type CompiledCircuit: object; +``` + +## Description + +The representation of a compiled circuit + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `abi` | `Abi` | **Description**

ABI representation of the circuit | +| `bytecode` | `string` | **Description**

The bytecode of the circuit | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/ProofData.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/ProofData.md new file mode 100644 index 00000000000..3eb360a78f1 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/ProofData.md @@ -0,0 +1,20 @@ +# ProofData + +```ts +type ProofData: object; +``` + +## Description + +The representation of a proof + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `proof` | `Uint8Array` | **Description**

An byte array representing the proof | +| `publicInputs` | `WitnessMap` | **Description**

Public inputs of a proof | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/typedoc-sidebar.cjs b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/typedoc-sidebar.cjs new file mode 100644 index 00000000000..04e662c845f --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"noir_js/reference/backend_barretenberg/classes/BarretenbergBackend","label":"BarretenbergBackend"}]},{"type":"category","label":"Interfaces","items":[{"type":"doc","id":"noir_js/reference/backend_barretenberg/interfaces/Backend","label":"Backend"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"noir_js/reference/backend_barretenberg/type-aliases/BackendOptions","label":"BackendOptions"},{"type":"doc","id":"noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit","label":"CompiledCircuit"},{"type":"doc","id":"noir_js/reference/backend_barretenberg/type-aliases/ProofData","label":"ProofData"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/.nojekyll b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/classes/Noir.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/classes/Noir.md new file mode 100644 index 00000000000..e54116fb1d8 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/classes/Noir.md @@ -0,0 +1,131 @@ +# Noir + +## Constructors + +### new Noir(circuit, backend) + +```ts +new Noir(circuit, backend?): Noir +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `circuit` | [`CompiledCircuit`](../type-aliases/CompiledCircuit.md) | +| `backend`? | `Backend` | + +#### Returns + +[`Noir`](Noir.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Description + +Destroys the underlying backend instance. + +#### Example + +```typescript +await noir.destroy(); +``` + +*** + +### execute() + +```ts +execute(inputs, foreignCallHandler?): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | [`InputMap`](../type-aliases/InputMap.md) | +| `foreignCallHandler`? | [`ForeignCallHandler`](../type-aliases/ForeignCallHandler.md) | + +#### Returns + +`Promise`\<`object`\> + +#### Description + +Allows to execute a circuit to get its witness and return value. + +#### Example + +```typescript +async execute(inputs) +``` + +*** + +### generateFinalProof() + +```ts +generateFinalProof(inputs): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | [`InputMap`](../type-aliases/InputMap.md) | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates a witness and a proof given an object as input. + +#### Example + +```typescript +async generateFinalproof(input) +``` + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Instantiates the verification key and verifies a proof. + +#### Example + +```typescript +async verifyFinalProof(proof) +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/and.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/and.md new file mode 100644 index 00000000000..c783283e396 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/and.md @@ -0,0 +1,22 @@ +# and() + +```ts +and(lhs, rhs): string +``` + +Performs a bitwise AND operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/blake2s256.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/blake2s256.md new file mode 100644 index 00000000000..7882d0da8d5 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/blake2s256.md @@ -0,0 +1,21 @@ +# blake2s256() + +```ts +blake2s256(inputs): Uint8Array +``` + +Calculates the Blake2s256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify.md new file mode 100644 index 00000000000..0ba5783f0d5 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify.md @@ -0,0 +1,29 @@ +# ecdsa\_secp256k1\_verify() + +```ts +ecdsa_secp256k1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Calculates the Blake2s256 hash of the input bytes and represents these as a single field element. +Verifies a ECDSA signature over the secp256k1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify.md new file mode 100644 index 00000000000..0b20ff68957 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify.md @@ -0,0 +1,28 @@ +# ecdsa\_secp256r1\_verify() + +```ts +ecdsa_secp256r1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Verifies a ECDSA signature over the secp256r1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/keccak256.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/keccak256.md new file mode 100644 index 00000000000..d10f155ce86 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/keccak256.md @@ -0,0 +1,21 @@ +# keccak256() + +```ts +keccak256(inputs): Uint8Array +``` + +Calculates the Keccak256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/sha256.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/sha256.md new file mode 100644 index 00000000000..6ba4ecac022 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/sha256.md @@ -0,0 +1,21 @@ +# sha256() + +```ts +sha256(inputs): Uint8Array +``` + +Calculates the SHA256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/xor.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/xor.md new file mode 100644 index 00000000000..8d762b895d3 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/xor.md @@ -0,0 +1,22 @@ +# xor() + +```ts +xor(lhs, rhs): string +``` + +Performs a bitwise XOR operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/index.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/index.md new file mode 100644 index 00000000000..348453c0059 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/index.md @@ -0,0 +1,37 @@ +# Noir JS + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [Noir](classes/Noir.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [CompiledCircuit](type-aliases/CompiledCircuit.md) | - | +| [ForeignCallHandler](type-aliases/ForeignCallHandler.md) | A callback which performs an foreign call and returns the response. | +| [ForeignCallInput](type-aliases/ForeignCallInput.md) | - | +| [ForeignCallOutput](type-aliases/ForeignCallOutput.md) | - | +| [InputMap](type-aliases/InputMap.md) | - | +| [ProofData](type-aliases/ProofData.md) | - | +| [WitnessMap](type-aliases/WitnessMap.md) | - | + +### Functions + +| Function | Description | +| :------ | :------ | +| [and](functions/and.md) | Performs a bitwise AND operation between `lhs` and `rhs` | +| [blake2s256](functions/blake2s256.md) | Calculates the Blake2s256 hash of the input bytes | +| [ecdsa\_secp256k1\_verify](functions/ecdsa_secp256k1_verify.md) | Calculates the Blake2s256 hash of the input bytes and represents these as a single field element. | +| [ecdsa\_secp256r1\_verify](functions/ecdsa_secp256r1_verify.md) | Verifies a ECDSA signature over the secp256r1 curve. | +| [keccak256](functions/keccak256.md) | Calculates the Keccak256 hash of the input bytes | +| [sha256](functions/sha256.md) | Calculates the SHA256 hash of the input bytes | +| [xor](functions/xor.md) | Performs a bitwise XOR operation between `lhs` and `rhs` | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/CompiledCircuit.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/CompiledCircuit.md new file mode 100644 index 00000000000..34e0dd04205 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/CompiledCircuit.md @@ -0,0 +1,20 @@ +# CompiledCircuit + +```ts +type CompiledCircuit: object; +``` + +## Description + +The representation of a compiled circuit + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `abi` | `Abi` | **Description**

ABI representation of the circuit | +| `bytecode` | `string` | **Description**

The bytecode of the circuit | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallHandler.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallHandler.md new file mode 100644 index 00000000000..812b8b16481 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallHandler.md @@ -0,0 +1,24 @@ +# ForeignCallHandler + +```ts +type ForeignCallHandler: (name, inputs) => Promise; +``` + +A callback which performs an foreign call and returns the response. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `name` | `string` | The identifier for the type of foreign call being performed. | +| `inputs` | [`ForeignCallInput`](ForeignCallInput.md)[] | An array of hex encoded inputs to the foreign call. | + +## Returns + +`Promise`\<[`ForeignCallOutput`](ForeignCallOutput.md)[]\> + +outputs - An array of hex encoded outputs containing the results of the foreign call. + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallInput.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallInput.md new file mode 100644 index 00000000000..dd95809186a --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallInput.md @@ -0,0 +1,9 @@ +# ForeignCallInput + +```ts +type ForeignCallInput: string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallOutput.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallOutput.md new file mode 100644 index 00000000000..b71fb78a946 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallOutput.md @@ -0,0 +1,9 @@ +# ForeignCallOutput + +```ts +type ForeignCallOutput: string | string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/InputMap.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/InputMap.md new file mode 100644 index 00000000000..c714e999d93 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/InputMap.md @@ -0,0 +1,13 @@ +# InputMap + +```ts +type InputMap: object; +``` + +## Index signature + + \[`key`: `string`\]: `InputValue` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ProofData.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ProofData.md new file mode 100644 index 00000000000..3eb360a78f1 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ProofData.md @@ -0,0 +1,20 @@ +# ProofData + +```ts +type ProofData: object; +``` + +## Description + +The representation of a proof + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `proof` | `Uint8Array` | **Description**

An byte array representing the proof | +| `publicInputs` | `WitnessMap` | **Description**

Public inputs of a proof | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/WitnessMap.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/WitnessMap.md new file mode 100644 index 00000000000..258c46f9d0c --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/WitnessMap.md @@ -0,0 +1,9 @@ +# WitnessMap + +```ts +type WitnessMap: Map; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/typedoc-sidebar.cjs b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/typedoc-sidebar.cjs new file mode 100644 index 00000000000..077ebeb133e --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"noir_js/reference/noir_js/classes/Noir","label":"Noir"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/CompiledCircuit","label":"CompiledCircuit"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/ForeignCallHandler","label":"ForeignCallHandler"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/ForeignCallInput","label":"ForeignCallInput"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/ForeignCallOutput","label":"ForeignCallOutput"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/InputMap","label":"InputMap"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/ProofData","label":"ProofData"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/WitnessMap","label":"WitnessMap"}]},{"type":"category","label":"Functions","items":[{"type":"doc","id":"noir_js/reference/noir_js/functions/and","label":"and"},{"type":"doc","id":"noir_js/reference/noir_js/functions/blake2s256","label":"blake2s256"},{"type":"doc","id":"noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify","label":"ecdsa_secp256k1_verify"},{"type":"doc","id":"noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify","label":"ecdsa_secp256r1_verify"},{"type":"doc","id":"noir_js/reference/noir_js/functions/keccak256","label":"keccak256"},{"type":"doc","id":"noir_js/reference/noir_js/functions/sha256","label":"sha256"},{"type":"doc","id":"noir_js/reference/noir_js/functions/xor","label":"xor"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md new file mode 100644 index 00000000000..1dfabfe8f22 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md @@ -0,0 +1,46 @@ +--- +title: Black Box Functions +description: Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. +keywords: [noir, black box functions] +--- + +Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. This makes certain zk-snark unfriendly computations cheaper than if they were implemented in Noir. + +:::warning + +It is likely that not all backends will support a particular black box function. + +::: + +Because it is not guaranteed that all backends will support black box functions, it is possible that certain Noir programs won't compile against a particular backend if they use an unsupported black box function. It is possible to fallback to less efficient implementations written in Noir/ACIR in some cases. + +Black box functions are specified with the `#[foreign(black_box_fn)]` attribute. For example, the SHA256 function in the Noir [source code](https://github.com/noir-lang/noir/blob/v0.5.1/noir_stdlib/src/hash.nr) looks like: + +```rust +#[foreign(sha256)] +fn sha256(_input : [u8; N]) -> [u8; 32] {} +``` + +## Function list + +Here is a list of the current black box functions that are supported by UltraPlonk: + +- AES +- [SHA256](./cryptographic_primitives/hashes#sha256) +- [Schnorr signature verification](./cryptographic_primitives/schnorr) +- [Blake2s](./cryptographic_primitives/hashes#blake2s) +- [Pedersen Hash](./cryptographic_primitives/hashes#pedersen_hash) +- [Pedersen Commitment](./cryptographic_primitives/hashes#pedersen_commitment) +- [HashToField128Security](./cryptographic_primitives/hashes#hash_to_field) +- [ECDSA signature verification](./cryptographic_primitives/ecdsa_sig_verification) +- [Fixed base scalar multiplication](./cryptographic_primitives/scalar) +- [Compute merkle root](./merkle_trees#compute_merkle_root) +- AND +- XOR +- RANGE +- [Keccak256](./cryptographic_primitives/hashes#keccak256) +- [Recursive proof verification](./recursion) + +Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. + +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives.md b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives.md new file mode 100644 index 00000000000..2df4f929474 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives.md @@ -0,0 +1,14 @@ +--- +title: Cryptographic primitives in Noir +description: + Learn about the cryptographic primitives ready to use for any Noir project +keywords: + [ + cryptographic primitives, + Noir project, + ] +--- + +The Noir team is progressively adding new cryptographic primitives to the standard library. Reach out for news or if you would be interested in adding more of these calculations in Noir. + +Some methods are available thanks to the Aztec backend, not being performed using Noir. When using other backends, these methods may or may not be supplied. diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/00_hashes.mdx b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/00_hashes.mdx new file mode 100644 index 00000000000..76745196681 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/00_hashes.mdx @@ -0,0 +1,167 @@ +--- +title: Hash methods +description: + Learn about the cryptographic primitives ready to use for any Noir project, including sha256, + blake2s, pedersen, mimc_bn254 and mimc +keywords: + [cryptographic primitives, Noir project, sha256, blake2s, pedersen, mimc_bn254, mimc, hash] +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## sha256 + +Given an array of bytes, returns the resulting sha256 hash. + +```rust +fn sha256(_input : [u8]) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::sha256(x); +} +``` + + + +## blake2s + +Given an array of bytes, returns an array with the Blake2 hash + +```rust +fn blake2s(_input : [u8]) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::blake2s(x); +} +``` + + + +## pedersen_hash + +Given an array of Fields, returns the Pedersen hash. + +```rust +fn pedersen_hash(_input : [Field]) -> Field +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::pedersen_hash(x); +} +``` + + + + + +## pedersen_commitment + +Given an array of Fields, returns the Pedersen commitment. + +```rust +fn pedersen_commitment(_input : [Field]) -> [Field; 2] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let commitment = std::hash::pedersen_commitment(x); +} +``` + + + +## keccak256 + +Given an array of bytes (`u8`), returns the resulting keccak hash as an array of 32 bytes +(`[u8; 32]`). Specify a message_size to hash only the first `message_size` bytes +of the input. + +```rust +fn keccak256(_input : [u8; N], _message_size: u32) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let message_size = 4; + let hash = std::hash::keccak256(x, message_size); +} +``` + + + +## poseidon + +Given an array of Fields, returns a new Field with the Poseidon Hash. Mind that you need to specify +how many inputs are there to your Poseidon function. + +```rust +// example for hash_1, hash_2 accepts an array of length 2, etc +fn hash_1(input: [Field; 1]) -> Field +``` + +example: + +```rust +fn main() +{ + let hash1 = std::hash::poseidon::bn254::hash_2([1, 2]); + assert(hash1 == 0x115cc0f5e7d690413df64c6b9662e9cf2a3617f2743245519e19607a4417189a); +} +``` + +## mimc_bn254 and mimc + +`mimc_bn254` is `mimc`, but with hardcoded parameters for the BN254 curve. You can use it by +providing an array of Fields, and it returns a Field with the hash. You can use the `mimc` method if +you're willing to input your own constants: + +```rust +fn mimc(x: Field, k: Field, constants: [Field; N], exp : Field) -> Field +``` + +otherwise, use the `mimc_bn254` method: + +```rust +fn mimc_bn254(array: [Field; N]) -> Field +``` + +example: + +```rust + +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::mimc::mimc_bn254(x); +} +``` + +## hash_to_field + +```rust +fn hash_to_field(_input : [Field; N]) -> Field {} +``` + +Calculates the `blake2s` hash of the inputs and returns the hash modulo the field modulus to return +a value which can be represented as a `Field`. + + diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/01_scalar.mdx b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/01_scalar.mdx new file mode 100644 index 00000000000..c7eed820a80 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/01_scalar.mdx @@ -0,0 +1,27 @@ +--- +title: Scalar multiplication +description: See how you can perform scalar multiplications over a fixed base in Noir +keywords: [cryptographic primitives, Noir project, scalar multiplication] +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## scalar_mul::fixed_base_embedded_curve + +Performs scalar multiplication over the embedded curve whose coordinates are defined by the +configured noir field. For the BN254 scalar field, this is BabyJubJub or Grumpkin. + +```rust +fn fixed_base_embedded_curve(_input : Field) -> [Field; 2] +``` + +example + +```rust +fn main(x : Field) { + let scal = std::scalar_mul::fixed_base_embedded_curve(x); + std::println(scal); +} +``` + + diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/02_schnorr.mdx b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/02_schnorr.mdx new file mode 100644 index 00000000000..c184ce28120 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/02_schnorr.mdx @@ -0,0 +1,37 @@ +--- +title: Schnorr Signatures +description: Learn how you can verify Schnorr signatures using Noir +keywords: [cryptographic primitives, Noir project, schnorr, signatures] +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## schnorr::verify_signature + +Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin). + +```rust +fn verify_signature(_public_key_x: Field, _public_key_y: Field, _signature: [u8; 64], _message: [u8]) -> bool +``` + +where `_signature` can be generated like so using the npm package +[@noir-lang/barretenberg](https://www.npmjs.com/package/@noir-lang/barretenberg) + +```js +const { BarretenbergWasm } = require('@noir-lang/barretenberg/dest/wasm'); +const { Schnorr } = require('@noir-lang/barretenberg/dest/crypto/schnorr'); + +... + +const barretenberg = await BarretenbergWasm.new(); +const schnorr = new Schnorr(barretenberg); +const pubKey = schnorr.computePublicKey(privateKey); +const message = ... +const signature = Array.from( + schnorr.constructSignature(hash, privateKey).toBuffer() +); + +... +``` + + diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx new file mode 100644 index 00000000000..72bce984821 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx @@ -0,0 +1,45 @@ +--- +title: ECDSA Signature Verification +description: Learn about the cryptographic primitives regarding ECDSA over the secp256k1 and secp256r1 curves +keywords: [cryptographic primitives, Noir project, ecdsa, secp256k1, secp256r1, signatures] +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +Noir supports ECDSA signatures verification over the secp256k1 and secp256r1 curves. + +## ecdsa_secp256k1::verify_signature + +Verifier for ECDSA Secp256k1 signatures + +```rust +fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> bool +``` + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + +## ecdsa_secp256r1::verify_signature + +Verifier for ECDSA Secp256r1 signatures + +```rust +fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> bool +``` + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + + diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/04_ec_primitives.md b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/04_ec_primitives.md new file mode 100644 index 00000000000..6e6b19b6861 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/04_ec_primitives.md @@ -0,0 +1,101 @@ +--- +title: Elliptic Curve Primitives +keywords: [cryptographic primitives, Noir project] +--- + +Data structures and methods on them that allow you to carry out computations involving elliptic +curves over the (mathematical) field corresponding to `Field`. For the field currently at our +disposal, applications would involve a curve embedded in BN254, e.g. the +[Baby Jubjub curve](https://eips.ethereum.org/EIPS/eip-2494). + +## Data structures + +### Elliptic curve configurations + +(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Curve`), i.e. the specific elliptic +curve you want to use, which would be specified using any one of the methods +`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::new` which take the coefficients in the +defining equation together with a generator point as parameters. You can find more detail in the +comments in +[`noir_stdlib/src/ec.nr`](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr), but +the gist of it is that the elliptic curves of interest are usually expressed in one of the standard +forms implemented here (Twisted Edwards, Montgomery and Short Weierstraß), and in addition to that, +you could choose to use `affine` coordinates (Cartesian coordinates - the usual (x,y) - possibly +together with a point at infinity) or `curvegroup` coordinates (some form of projective coordinates +requiring more coordinates but allowing for more efficient implementations of elliptic curve +operations). Conversions between all of these forms are provided, and under the hood these +conversions are done whenever an operation is more efficient in a different representation (or a +mixed coordinate representation is employed). + +### Points + +(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Point`), i.e. points lying on the +elliptic curve. For a curve configuration `c` and a point `p`, it may be checked that `p` +does indeed lie on `c` by calling `c.contains(p1)`. + +## Methods + +(given a choice of curve representation, e.g. use `std::ec::tecurve::affine::Curve` and use +`std::ec::tecurve::affine::Point`) + +- The **zero element** is given by `Point::zero()`, and we can verify whether a point `p: Point` is + zero by calling `p.is_zero()`. +- **Equality**: Points `p1: Point` and `p2: Point` may be checked for equality by calling + `p1.eq(p2)`. +- **Addition**: For `c: Curve` and points `p1: Point` and `p2: Point` on the curve, adding these two + points is accomplished by calling `c.add(p1,p2)`. +- **Negation**: For a point `p: Point`, `p.negate()` is its negation. +- **Subtraction**: For `c` and `p1`, `p2` as above, subtracting `p2` from `p1` is accomplished by + calling `c.subtract(p1,p2)`. +- **Scalar multiplication**: For `c` as above, `p: Point` a point on the curve and `n: Field`, + scalar multiplication is given by `c.mul(n,p)`. If instead `n :: [u1; N]`, i.e. `n` is a bit + array, the `bit_mul` method may be used instead: `c.bit_mul(n,p)` +- **Multi-scalar multiplication**: For `c` as above and arrays `n: [Field; N]` and `p: [Point; N]`, + multi-scalar multiplication is given by `c.msm(n,p)`. +- **Coordinate representation conversions**: The `into_group` method converts a point or curve + configuration in the affine representation to one in the CurveGroup representation, and + `into_affine` goes in the other direction. +- **Curve representation conversions**: `tecurve` and `montcurve` curves and points are equivalent + and may be converted between one another by calling `into_montcurve` or `into_tecurve` on their + configurations or points. `swcurve` is more general and a curve c of one of the other two types + may be converted to this representation by calling `c.into_swcurve()`, whereas a point `p` lying + on the curve given by `c` may be mapped to its corresponding `swcurve` point by calling + `c.map_into_swcurve(p)`. +- **Map-to-curve methods**: The Elligator 2 method of mapping a field element `n: Field` into a + `tecurve` or `montcurve` with configuration `c` may be called as `c.elligator2_map(n)`. For all of + the curve configurations, the SWU map-to-curve method may be called as `c.swu_map(z,n)`, where + `z: Field` depends on `Field` and `c` and must be chosen by the user (the conditions it needs to + satisfy are specified in the comments + [here](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr)). + +## Examples + +The +[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr) +illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more +interesting examples in Noir would be: + +Public-key cryptography: Given an elliptic curve and a 'base point' on it, determine the public key +from the private key. This is a matter of using scalar multiplication. In the case of Baby Jubjub, +for example, this code would do: + +```rust +use dep::std::ec::tecurve::affine::{Curve, Point}; + +fn bjj_pub_key(priv_key: Field) -> Point +{ + + let bjj = Curve::new(168700, 168696, G::new(995203441582195749578291179787384436505546430278305826713579947235728471134,5472060717959818805561601436314318772137091100104008585924551046643952123905)); + + let base_pt = Point::new(5299619240641551281634865583518297030282874472190772894086521144482721001553, 16950150798460657717958625567821834550301663161624707787222815936182638968203); + + bjj.mul(priv_key,base_pt) +} +``` + +This would come in handy in a Merkle proof. + +- EdDSA signature verification: This is a matter of combining these primitives with a suitable hash + function. See + [feat(stdlib): EdDSA sig verification noir#1136](https://github.com/noir-lang/noir/pull/1136) for + the case of Baby Jubjub and the Poseidon hash function. diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/05_eddsa.mdx b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/05_eddsa.mdx new file mode 100644 index 00000000000..9a5beb55ee9 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/05_eddsa.mdx @@ -0,0 +1,17 @@ +--- +title: EdDSA Verification +description: Learn about the cryptographic primitives regarding EdDSA +keywords: [cryptographic primitives, Noir project, eddsa, signatures] +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## eddsa::eddsa_poseidon_verify + +Verifier for EdDSA signatures + +```rust +fn eddsa_poseidon_verify(public_key_x : Field, public_key_y : Field, signature_s: Field, signature_r8_x: Field, signature_r8_y: Field, message: Field) -> bool +``` + + diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/logging.md b/docs/versioned_docs/version-v0.19.4/standard_library/logging.md new file mode 100644 index 00000000000..7e2fd9b9aff --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/logging.md @@ -0,0 +1,62 @@ +--- +title: Logging +description: + Learn how to use the println statement for debugging in Noir with this tutorial. Understand the + basics of logging in Noir and how to implement it in your code. +keywords: + [ + noir logging, + println statement, + debugging in noir, + noir std library, + logging tutorial, + basic logging in noir, + noir logging implementation, + noir debugging techniques, + rust, + ] +--- + +The standard library provides a familiar `println` statement you can use. Despite being a limited +implementation of rust's `println!` macro, this construct can be useful for debugging. + +You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). + +It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. + +The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: + +```rust +use dep::std; + +struct Person { + age : Field, + height : Field, +} + +fn main(age : Field, height : Field) { + let person = Person { age : age, height : height }; + std::println(person); + std::println(age + height); + std::println("Hello world!"); +} + +``` + +You can print multiple different types in the same statement and string as well as a new "fmtstr" type. A `fmtstr` can be specified in the same way as a normal string it just should be prepended with an "f" character: + +```rust + let fmt_str = f"i: {i}, j: {j}"; + std::println(fmt_str); + + let s = myStruct { y: x, x: y }; + std::println(s); + + std::println(f"i: {i}, s: {s}"); + + std::println(x); + std::println([x, y]); + + let foo = fooStruct { my_struct: s, foo: 15 }; + std::println(f"s: {s}, foo: {foo}"); +``` diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/merkle_trees.md b/docs/versioned_docs/version-v0.19.4/standard_library/merkle_trees.md new file mode 100644 index 00000000000..dc383a1426b --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/merkle_trees.md @@ -0,0 +1,58 @@ +--- +title: Merkle Trees +description: Learn about Merkle Trees in Noir with this tutorial. Explore the basics of computing a merkle root using a proof, with examples. +keywords: + [ + Merkle trees in Noir, + Noir programming language, + check membership, + computing root from leaf, + Noir Merkle tree implementation, + Merkle tree tutorial, + Merkle tree code examples, + Noir libraries, + pedersen hash., + ] +--- + +## compute_merkle_root + +Returns the root of the tree from the provided leaf and its hash path, using a [Pedersen hash](cryptographic_primitives/00_hashes.mdx#pedersen_hash). + +```rust +fn compute_merkle_root(leaf : Field, index : Field, hash_path: [Field]) -> Field +``` + +example: + +```rust +/** + // these values are for this example only + index = "0" + priv_key = "0x000000000000000000000000000000000000000000000000000000616c696365" + secret = "0x1929ea3ab8d9106a899386883d9428f8256cfedb3c4f6b66bf4aa4d28a79988f" + note_hash_path = [ + "0x1e61bdae0f027b1b2159e1f9d3f8d00fa668a952dddd822fda80dc745d6f65cc", + "0x0e4223f3925f98934393c74975142bd73079ab0621f4ee133cee050a3c194f1a", + "0x2fd7bb412155bf8693a3bd2a3e7581a679c95c68a052f835dddca85fa1569a40" + ] + */ +fn main(index: Field, priv_key: Field, secret: Field, note_hash_path: [Field; 3]) { + + let pubkey = std::scalar_mul::fixed_base_embedded_curve(priv_key); + let pubkey_x = pubkey[0]; + let pubkey_y = pubkey[1]; + let note_commitment = std::hash::pedersen([pubkey_x, pubkey_y, secret]); + + let root = std::merkle::compute_merkle_root(note_commitment[0], index, note_hash_path); + std::println(root); +} +``` + +To check merkle tree membership: + +1. Include a merkle root as a program input. +2. Compute the merkle root of a given leaf, index and hash path. +3. Assert the merkle roots are equal. + +For more info about merkle trees, see the Wikipedia [page](https://en.wikipedia.org/wiki/Merkle_tree). diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/options.md b/docs/versioned_docs/version-v0.19.4/standard_library/options.md new file mode 100644 index 00000000000..3d3139fb98b --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/options.md @@ -0,0 +1,99 @@ +--- +title: Option Type +--- + +The `Option` type is a way to express that a value might be present (`Some(T))` or absent (`None`). It's a safer way to handle potential absence of values, compared to using nulls in many other languages. + +```rust +struct Option { + None, + Some(T), +} +``` + +You can import the Option type into your Noir program like so: + +```rust +use dep::std::option::Option; + +fn main() { + let none = Option::none(); + let some = Option::some(3); +} +``` + +See [this test](https://github.com/noir-lang/noir/blob/5cbfb9c4a06c8865c98ff2b594464b037d821a5c/crates/nargo_cli/tests/test_data/option/src/main.nr) for a more comprehensive set of examples of each of the methods described below. + +## Methods + +### none + +Constructs a none value. + +### some + +Constructs a some wrapper around a given value. + +### is_none + +Returns true if the Option is None. + +### is_some + +Returns true of the Option is Some. + +### unwrap + +Asserts `self.is_some()` and returns the wrapped value. + +### unwrap_unchecked + +Returns the inner value without asserting `self.is_some()`. This method can be useful within an if condition when we already know that `option.is_some()`. If the option is None, there is no guarantee what value will be returned, only that it will be of type T for an `Option`. + +### unwrap_or + +Returns the wrapped value if `self.is_some()`. Otherwise, returns the given default value. + +### unwrap_or_else + +Returns the wrapped value if `self.is_some()`. Otherwise, calls the given function to return a default value. + +### map + +If self is `Some(x)`, this returns `Some(f(x))`. Otherwise, this returns `None`. + +### map_or + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns the given default value. + +### map_or_else + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns `default()`. + +### and + +Returns None if self is None. Otherwise, this returns `other`. + +### and_then + +If self is None, this returns None. Otherwise, this calls the given function with the Some value contained within self, and returns the result of that call. In some languages this function is called `flat_map` or `bind`. + +### or + +If self is Some, return self. Otherwise, return `other`. + +### or_else + +If self is Some, return self. Otherwise, return `default()`. + +### xor + +If only one of the two Options is Some, return that option. Otherwise, if both options are Some or both are None, None is returned. + +### filter + +Returns `Some(x)` if self is `Some(x)` and `predicate(x)` is true. Otherwise, this returns `None`. + +### flatten + +Flattens an `Option>` into a `Option`. This returns `None` if the outer Option is None. Otherwise, this returns the inner Option. diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/recursion.md b/docs/versioned_docs/version-v0.19.4/standard_library/recursion.md new file mode 100644 index 00000000000..ff4c63acaa7 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/recursion.md @@ -0,0 +1,96 @@ +--- +title: Recursive Proofs +description: Learn about how to write recursive proofs in Noir. +keywords: [recursion, recursive proofs, verification_key, aggregation object, verify_proof] +--- + +Noir supports recursively verifying proofs, meaning you verify the proof of a Noir program in another Noir program. This enables creating proofs of arbitrary size by doing step-wise verification of smaller components of a large proof. + +The `verify_proof` function takes a verification key, proof and public inputs for a zk program, as well as a key hash and an input aggregation object. The key hash is used to check the validity of the verification key and the input aggregation object is required by some proving systems. The `verify_proof` function returns an output aggregation object that can then be fed into future iterations of the proof verification if required. + +```rust +#[foreign(verify_proof)] +fn verify_proof(_verification_key : [Field], _proof : [Field], _public_input : Field, _key_hash : Field, _input_aggregation_object : [Field]) -> [Field] {} +``` + +:::info + +This is a black box function. Read [this section](./black_box_fns) to learn more about black box functions in Noir. + +::: + +## Aggregation Object + +The purpose of the input aggregation object is a little less clear though (and the output aggregation object that is returned from the `std::verify_proof` method). Recursive zkSNARK schemes do not necessarily "verify a proof" in the sense that you expect a true or false to be spit out by the verifier. Rather an aggregation object is built over the public inputs. In the case of PLONK the recursive aggregation object is two G1 points (expressed as 16 witness values). The final verifier (in our case this is most often the smart contract verifier) has to be aware of this aggregation object to execute a pairing and check the validity of these points (thus completing the recursive verification). + +So for example in this circuit: + +```rust +use dep::std; + +fn main( + verification_key : [Field; 114], + proof : [Field; 94], + public_inputs : [Field; 1], + key_hash : Field, + input_aggregation_object : [Field; 16], + proof_b : [Field; 94], +) -> pub [Field; 16] { + let output_aggregation_object_a = std::verify_proof( + verification_key, + proof, + public_inputs, + key_hash, + input_aggregation_object + ); + + let output_aggregation_object = std::verify_proof( + verification_key, + proof_b, + public_inputs, + key_hash, + output_aggregation_object_a + ); + + let mut output = [0; 16]; + for i in 0..16 { + output[i] = output_aggregation_object[i]; + } + output +} +``` + +In this example we have a circuit, that generates proofs A and B, that is being verified in circuit C. Assuming that the proof being passed in is not already a recursive proof, the `input_aggregation_object` will be all zeros. It will then generate an `output_aggregation_object`. This blob of data then becomes the `input_aggregation_object` of the next recursive aggregation we wish to compute. We can see here as the same public inputs, verification key, and key hash are used that we are verifying two proofs generated from the same circuit in this single circuit. `std::verify_proof` returns a `[Field]` because the size of an aggregation object is proof system dependent--in barretenberg, aggregation objects are two G1 points, while in Halo2, the aggregation object is a list of G1 points that is log the circuit size. So for the final step we convert the slice into an array of size 16 because we are generating proofs using UltraPlonk. + +## Parameters + +### `verification_key` + +The verification key for the zk program that is being verified. + +### `proof` + +The proof for the zk program that is being verified. + +### `public_inputs` + +These represent the public inputs of the proof we are verifying. They should be checked against in the circuit after construction of a new aggregation state. + +### `key_hash` + +A key hash is used to check the validity of the verification key. The circuit implementing this opcode can use this hash to ensure that the key provided to the circuit matches the key produced by the circuit creator. + +### `input_aggregation_object` + +An aggregation object is blob of data that the top-level verifier must run some proof system specific algorithm on to complete verification. The size is proof system specific and will be set by the backend integrating this opcode. The input aggregation object is only not `None` when we are verifying a previous recursive aggregation in the current circuit. If this is the first recursive aggregation there is no input aggregation object. It is left to the backend to determine how to handle when there is no input aggregation object. + +## Return value + +### `output_aggregation_object` + +This is the result of a recursive aggregation and is what will be fed into the next verifier. +The next verifier can either perform a final verification (returning true or false) or perform another recursive aggregation where this output aggregation object will be the input aggregation object of the next recursive aggregation. + +## Example + +You can see an example of how to do recursive proofs in [this example recursion demo repo](https://github.com/noir-lang/noir-examples/tree/master/recursion). diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/zeroed.md b/docs/versioned_docs/version-v0.19.4/standard_library/zeroed.md new file mode 100644 index 00000000000..97dab02dac2 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/zeroed.md @@ -0,0 +1,25 @@ +--- +title: Zeroed Function +description: + The zeroed function returns a zeroed value of any type. +keywords: + [ + zeroed + ] +--- + +Implements `fn zeroed() -> T` to return a zeroed value of any type. This function is generally unsafe to use as the zeroed bit pattern is not guaranteed to be valid for all types. It can however, be useful in cases when the value is guaranteed not to be used such as in a BoundedVec library implementing a growable vector, up to a certain length, backed by an array. The array can be initialized with zeroed values which are guaranteed to be inaccessible until the vector is pushed to. Similarly, enumerations in noir can be implemented using this method by providing zeroed values for the unused variants. + +You can access the function at `std::unsafe::zeroed`. + +This function currently supports the following types: + +- Field +- Bool +- Uint +- Array +- String +- Tuple +- Function + +Using it on other types could result in unexpected behavior. diff --git a/docs/versioned_docs/version-v0.6.0/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.6.0/language_concepts/01_functions.md index c4bc0545a1c..171b7d3dbda 100644 --- a/docs/versioned_docs/version-v0.6.0/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.6.0/language_concepts/01_functions.md @@ -18,7 +18,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -26,7 +26,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.6.0/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.6.0/language_concepts/07_mutability.md index c8ccb4f8b9f..a7240a54e5c 100644 --- a/docs/versioned_docs/version-v0.6.0/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.6.0/language_concepts/07_mutability.md @@ -39,7 +39,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_docs/version-v0.7.1/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.7.1/language_concepts/01_functions.md index 54c618599d2..8fd63293c13 100644 --- a/docs/versioned_docs/version-v0.7.1/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.7.1/language_concepts/01_functions.md @@ -18,7 +18,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -26,7 +26,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.7.1/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.7.1/language_concepts/07_mutability.md index 5631a322659..90a1f3379a2 100644 --- a/docs/versioned_docs/version-v0.7.1/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.7.1/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_docs/version-v0.9.0/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.9.0/language_concepts/01_functions.md index 54c618599d2..8fd63293c13 100644 --- a/docs/versioned_docs/version-v0.9.0/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.9.0/language_concepts/01_functions.md @@ -18,7 +18,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -26,7 +26,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.9.0/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.9.0/language_concepts/07_mutability.md index 69798c7a276..a9c93e61167 100644 --- a/docs/versioned_docs/version-v0.9.0/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.9.0/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_sidebars/version-v0.19.4-sidebars.json b/docs/versioned_sidebars/version-v0.19.4-sidebars.json new file mode 100644 index 00000000000..a1675eca18d --- /dev/null +++ b/docs/versioned_sidebars/version-v0.19.4-sidebars.json @@ -0,0 +1,293 @@ +{ + "sidebar": [ + { + "type": "doc", + "id": "index", + "label": "Noir" + }, + { + "type": "category", + "label": "Getting Started", + "items": [ + { + "type": "autogenerated", + "dirName": "getting_started" + } + ] + }, + { + "type": "category", + "label": "Examples", + "items": [ + { + "type": "autogenerated", + "dirName": "examples" + } + ] + }, + { + "type": "category", + "label": "Nargo", + "items": [ + { + "type": "autogenerated", + "dirName": "nargo" + } + ] + }, + { + "type": "category", + "label": "Language Concepts", + "items": [ + { + "type": "category", + "label": "Data Types", + "link": { + "type": "doc", + "id": "language_concepts/data_types" + }, + "items": [ + { + "type": "autogenerated", + "dirName": "language_concepts/data_types" + } + ] + }, + "language_concepts/functions", + "language_concepts/control_flow", + "language_concepts/ops", + "language_concepts/assert", + "language_concepts/unconstrained", + "language_concepts/generics", + "language_concepts/mutability", + "language_concepts/lambdas", + "language_concepts/comments", + "language_concepts/distinct", + "language_concepts/shadowing" + ] + }, + { + "type": "category", + "label": "Noir Standard Library", + "items": [ + { + "type": "category", + "label": "Cryptographic Primitives", + "link": { + "type": "doc", + "id": "standard_library/cryptographic_primitives" + }, + "items": [ + { + "type": "autogenerated", + "dirName": "standard_library/cryptographic_primitives" + } + ] + }, + "standard_library/recursion", + "standard_library/logging", + "standard_library/merkle_trees", + "standard_library/zeroed", + "standard_library/black_box_fns", + "standard_library/options" + ] + }, + { + "type": "category", + "label": "Modules, Packages and Crates", + "items": [ + { + "type": "autogenerated", + "dirName": "modules_packages_crates" + } + ] + }, + { + "type": "category", + "label": "NoirJS", + "link": { + "type": "doc", + "id": "noir_js/noir_js" + }, + "items": [ + { + "type": "category", + "label": "Guides", + "items": [ + { + "type": "autogenerated", + "dirName": "noir_js/getting_started" + } + ] + }, + { + "type": "category", + "label": "Reference", + "items": [ + { + "type": "category", + "label": "Noir JS", + "link": { + "type": "doc", + "id": "noir_js/reference/noir_js/index" + }, + "items": [ + { + "type": "category", + "label": "Classes", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/noir_js/classes/Noir", + "label": "Noir" + } + ] + }, + { + "type": "category", + "label": "Type Aliases", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/CompiledCircuit", + "label": "CompiledCircuit" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/ForeignCallHandler", + "label": "ForeignCallHandler" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/ForeignCallInput", + "label": "ForeignCallInput" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/ForeignCallOutput", + "label": "ForeignCallOutput" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/InputMap", + "label": "InputMap" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/ProofData", + "label": "ProofData" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/WitnessMap", + "label": "WitnessMap" + } + ] + }, + { + "type": "category", + "label": "Functions", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/and", + "label": "and" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/blake2s256", + "label": "blake2s256" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify", + "label": "ecdsa_secp256k1_verify" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify", + "label": "ecdsa_secp256r1_verify" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/keccak256", + "label": "keccak256" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/sha256", + "label": "sha256" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/xor", + "label": "xor" + } + ] + } + ] + }, + { + "type": "category", + "label": "Backend Barretenberg", + "link": { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/index" + }, + "items": [ + { + "type": "category", + "label": "Classes", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/classes/BarretenbergBackend", + "label": "BarretenbergBackend" + } + ] + }, + { + "type": "category", + "label": "Interfaces", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/interfaces/Backend", + "label": "Backend" + } + ] + }, + { + "type": "category", + "label": "Type Aliases", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/type-aliases/BackendOptions", + "label": "BackendOptions" + }, + { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit", + "label": "CompiledCircuit" + }, + { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/type-aliases/ProofData", + "label": "ProofData" + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "doc", + "id": "migration_notes", + "label": "Migration notes" + } + ] +} diff --git a/docs/versions.json b/docs/versions.json index 7391f71ae5b..7f45b37e432 100644 --- a/docs/versions.json +++ b/docs/versions.json @@ -1,6 +1,6 @@ [ - "v0.19.3", "v0.19.2", - "v0.17.0", - "v0.10.5" -] + "v0.19.1", + "v0.19.0", + "v0.17.0" +] \ No newline at end of file diff --git a/noir_stdlib/src/slice.nr b/noir_stdlib/src/slice.nr index ca06e2aae44..a5a9a38ed53 100644 --- a/noir_stdlib/src/slice.nr +++ b/noir_stdlib/src/slice.nr @@ -21,28 +21,16 @@ impl [T] { #[builtin(slice_pop_front)] pub fn pop_front(_self: Self) -> (T, Self) { } - pub fn insert(self, _index: Field, _elem: T) -> Self { - // TODO(#2462): Slice insert with a dynamic index - crate::assert_constant(_index); - self.__slice_insert(_index, _elem) - } - /// Insert an element at a specified index, shifting all elements /// after it to the right #[builtin(slice_insert)] - fn __slice_insert(_self: Self, _index: Field, _elem: T) -> Self { } - - pub fn remove(self, _index: Field) -> (Self, T) { - // TODO(#2462): Slice remove with a dynamic index - crate::assert_constant(_index); - self.__slice_remove(_index) - } + pub fn insert(_self: Self, _index: Field, _elem: T) -> Self { } /// Remove an element at a specified index, shifting all elements /// after it to the left, returning the altered slice and /// the removed element #[builtin(slice_remove)] - fn __slice_remove(_self: Self, _index: Field) -> (Self, T) { } + pub fn remove(_self: Self, _index: Field) -> (Self, T) { } // Append each element of the `other` slice to the end of `self`. // This returns a new slice and leaves both input slices unchanged. diff --git a/release-please-config.json b/release-please-config.json index afc0bfd420d..7995ea37381 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -48,6 +48,11 @@ "type": "json", "path": "tooling/noirc_abi_wasm/package.json", "jsonpath": "$.version" + }, + { + "type": "json", + "path": "docs/docs/package.json", + "jsonpath": "$.version" } ] }, @@ -67,6 +72,46 @@ "brillig_vm/Cargo.toml", "stdlib/Cargo.toml", "flake.nix", + { + "type": "json", + "path": "acvm_js/package.json", + "jsonpath": "$.version" + }, + { + "type": "toml", + "path": "acir/Cargo.toml", + "jsonpath": "$.workspace.dependencies.acir.version" + }, + { + "type": "toml", + "path": "acir_field/Cargo.toml", + "jsonpath": "$.workspace.dependencies.acir_field.version" + }, + { + "type": "toml", + "path": "stdlib/Cargo.toml", + "jsonpath": "$.workspace.dependencies.stdlib.version" + }, + { + "type": "toml", + "path": "brillig/Cargo.toml", + "jsonpath": "$.workspace.dependencies.brillig.version" + }, + { + "type": "toml", + "path": "brillig_vm/Cargo.toml", + "jsonpath": "$.workspace.dependencies.brillig_vm.version" + }, + { + "type": "toml", + "path": "blackbox_solver/Cargo.toml", + "jsonpath": "$.workspace.dependencies.acvm_blackbox_solver.version" + }, + { + "type": "toml", + "path": "barretenberg_blackbox_solver/Cargo.toml", + "jsonpath": "$.workspace.dependencies.barretenberg_blackbox_solver.version" + }, { "type": "json", "path": "acvm_js/package.json", diff --git a/scripts/bootstrap_native.sh b/scripts/bootstrap_native.sh index 26cd44704aa..693a9d9678e 100755 --- a/scripts/bootstrap_native.sh +++ b/scripts/bootstrap_native.sh @@ -4,11 +4,13 @@ set -eu cd $(dirname "$0")/.. # If this project has been subrepod into another project, set build data manually. +export SOURCE_DATE_EPOCH=$(date +%s) +export GIT_DIRTY=false if [ -f ".gitrepo" ]; then - export SOURCE_DATE_EPOCH=$(date +%s) - export GIT_DIRTY=false export GIT_COMMIT=$(awk '/commit =/ {print $3}' .gitrepo) +else + export GIT_COMMIT=$(git rev-parse --verify HEAD) fi # Build native. -cargo build --features="noirc_frontend/aztec" --release \ No newline at end of file +cargo build --features="noirc_driver/aztec" --release diff --git a/scripts/bootstrap_packages.sh b/scripts/bootstrap_packages.sh index 552ddd7597a..5fce2675037 100755 --- a/scripts/bootstrap_packages.sh +++ b/scripts/bootstrap_packages.sh @@ -6,13 +6,15 @@ cd $(dirname "$0")/.. ./scripts/install_wasm-bindgen.sh # If this project has been subrepod into another project, set build data manually. +export SOURCE_DATE_EPOCH=$(date +%s) +export GIT_DIRTY=false if [ -f ".gitrepo" ]; then - export SOURCE_DATE_EPOCH=$(date +%s) - export GIT_DIRTY=false export GIT_COMMIT=$(awk '/commit =/ {print $3}' .gitrepo) +else + export GIT_COMMIT=$(git rev-parse --verify HEAD) fi -export cargoExtraArgs="--features noirc_frontend/aztec" +export cargoExtraArgs="--features noirc_driver/aztec" yarn yarn build diff --git a/test_programs/.gitignore b/test_programs/.gitignore new file mode 100644 index 00000000000..01a3426160c --- /dev/null +++ b/test_programs/.gitignore @@ -0,0 +1 @@ +acir_artifacts diff --git a/test_programs/acir_artifacts/1327_concrete_in_generic/target/acir.gz b/test_programs/acir_artifacts/1327_concrete_in_generic/target/acir.gz deleted file mode 100644 index cc92863a4a8..00000000000 Binary files a/test_programs/acir_artifacts/1327_concrete_in_generic/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/1327_concrete_in_generic/target/witness.gz b/test_programs/acir_artifacts/1327_concrete_in_generic/target/witness.gz deleted file mode 100644 index 454a9b75e04..00000000000 Binary files a/test_programs/acir_artifacts/1327_concrete_in_generic/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/1_mul/target/acir.gz b/test_programs/acir_artifacts/1_mul/target/acir.gz deleted file mode 100644 index 7572c9ac2cf..00000000000 Binary files a/test_programs/acir_artifacts/1_mul/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/1_mul/target/witness.gz b/test_programs/acir_artifacts/1_mul/target/witness.gz deleted file mode 100644 index 76f5c8a2fe2..00000000000 Binary files a/test_programs/acir_artifacts/1_mul/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/2_div/target/acir.gz b/test_programs/acir_artifacts/2_div/target/acir.gz deleted file mode 100644 index 46405fc2029..00000000000 Binary files a/test_programs/acir_artifacts/2_div/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/2_div/target/witness.gz b/test_programs/acir_artifacts/2_div/target/witness.gz deleted file mode 100644 index 3145b77d957..00000000000 Binary files a/test_programs/acir_artifacts/2_div/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/3_add/target/acir.gz b/test_programs/acir_artifacts/3_add/target/acir.gz deleted file mode 100644 index 42e66d90f73..00000000000 Binary files a/test_programs/acir_artifacts/3_add/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/3_add/target/witness.gz b/test_programs/acir_artifacts/3_add/target/witness.gz deleted file mode 100644 index 0cfc48c525e..00000000000 Binary files a/test_programs/acir_artifacts/3_add/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/4_sub/target/acir.gz b/test_programs/acir_artifacts/4_sub/target/acir.gz deleted file mode 100644 index 633bec13563..00000000000 Binary files a/test_programs/acir_artifacts/4_sub/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/4_sub/target/witness.gz b/test_programs/acir_artifacts/4_sub/target/witness.gz deleted file mode 100644 index 68e9df80789..00000000000 Binary files a/test_programs/acir_artifacts/4_sub/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/5_over/target/acir.gz b/test_programs/acir_artifacts/5_over/target/acir.gz deleted file mode 100644 index 681a0290f75..00000000000 Binary files a/test_programs/acir_artifacts/5_over/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/5_over/target/witness.gz b/test_programs/acir_artifacts/5_over/target/witness.gz deleted file mode 100644 index b0a38188cab..00000000000 Binary files a/test_programs/acir_artifacts/5_over/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/6/target/acir.gz b/test_programs/acir_artifacts/6/target/acir.gz deleted file mode 100644 index 0cc489d8932..00000000000 Binary files a/test_programs/acir_artifacts/6/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/6/target/witness.gz b/test_programs/acir_artifacts/6/target/witness.gz deleted file mode 100644 index 5c060e1b469..00000000000 Binary files a/test_programs/acir_artifacts/6/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/6_array/target/acir.gz b/test_programs/acir_artifacts/6_array/target/acir.gz deleted file mode 100644 index 787db190b49..00000000000 Binary files a/test_programs/acir_artifacts/6_array/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/6_array/target/witness.gz b/test_programs/acir_artifacts/6_array/target/witness.gz deleted file mode 100644 index cc96fd18e00..00000000000 Binary files a/test_programs/acir_artifacts/6_array/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/7/target/acir.gz b/test_programs/acir_artifacts/7/target/acir.gz deleted file mode 100644 index 7f14d2a932c..00000000000 Binary files a/test_programs/acir_artifacts/7/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/7/target/witness.gz b/test_programs/acir_artifacts/7/target/witness.gz deleted file mode 100644 index d51356eb6c1..00000000000 Binary files a/test_programs/acir_artifacts/7/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/7_function/target/acir.gz b/test_programs/acir_artifacts/7_function/target/acir.gz deleted file mode 100644 index 5ddc1ba38e6..00000000000 Binary files a/test_programs/acir_artifacts/7_function/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/7_function/target/witness.gz b/test_programs/acir_artifacts/7_function/target/witness.gz deleted file mode 100644 index 0bb522d210e..00000000000 Binary files a/test_programs/acir_artifacts/7_function/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/arithmetic_binary_operations/target/acir.gz b/test_programs/acir_artifacts/arithmetic_binary_operations/target/acir.gz deleted file mode 100644 index fd31cc3bfa6..00000000000 Binary files a/test_programs/acir_artifacts/arithmetic_binary_operations/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/arithmetic_binary_operations/target/witness.gz b/test_programs/acir_artifacts/arithmetic_binary_operations/target/witness.gz deleted file mode 100644 index 450a83edc9c..00000000000 Binary files a/test_programs/acir_artifacts/arithmetic_binary_operations/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/array_dynamic/target/acir.gz b/test_programs/acir_artifacts/array_dynamic/target/acir.gz deleted file mode 100644 index e6111539302..00000000000 Binary files a/test_programs/acir_artifacts/array_dynamic/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/array_dynamic/target/witness.gz b/test_programs/acir_artifacts/array_dynamic/target/witness.gz deleted file mode 100644 index 102bb7ad178..00000000000 Binary files a/test_programs/acir_artifacts/array_dynamic/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/array_eq/target/acir.gz b/test_programs/acir_artifacts/array_eq/target/acir.gz deleted file mode 100644 index b274cd72649..00000000000 Binary files a/test_programs/acir_artifacts/array_eq/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/array_eq/target/witness.gz b/test_programs/acir_artifacts/array_eq/target/witness.gz deleted file mode 100644 index f000e986c3d..00000000000 Binary files a/test_programs/acir_artifacts/array_eq/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/array_len/target/acir.gz b/test_programs/acir_artifacts/array_len/target/acir.gz deleted file mode 100644 index 795d22712b2..00000000000 Binary files a/test_programs/acir_artifacts/array_len/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/array_len/target/witness.gz b/test_programs/acir_artifacts/array_len/target/witness.gz deleted file mode 100644 index c3763958eeb..00000000000 Binary files a/test_programs/acir_artifacts/array_len/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/array_neq/target/acir.gz b/test_programs/acir_artifacts/array_neq/target/acir.gz deleted file mode 100644 index 8d87f8bc575..00000000000 Binary files a/test_programs/acir_artifacts/array_neq/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/array_neq/target/witness.gz b/test_programs/acir_artifacts/array_neq/target/witness.gz deleted file mode 100644 index c56b373217d..00000000000 Binary files a/test_programs/acir_artifacts/array_neq/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/array_sort/target/acir.gz b/test_programs/acir_artifacts/array_sort/target/acir.gz deleted file mode 100644 index 42d701ede8a..00000000000 Binary files a/test_programs/acir_artifacts/array_sort/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/array_sort/target/witness.gz b/test_programs/acir_artifacts/array_sort/target/witness.gz deleted file mode 100644 index 8229809cc95..00000000000 Binary files a/test_programs/acir_artifacts/array_sort/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/assert/target/acir.gz b/test_programs/acir_artifacts/assert/target/acir.gz deleted file mode 100644 index c4e7f86f219..00000000000 Binary files a/test_programs/acir_artifacts/assert/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/assert/target/witness.gz b/test_programs/acir_artifacts/assert/target/witness.gz deleted file mode 100644 index 16880cedea2..00000000000 Binary files a/test_programs/acir_artifacts/assert/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/assert_statement/target/acir.gz b/test_programs/acir_artifacts/assert_statement/target/acir.gz deleted file mode 100644 index d71ac1b6b0e..00000000000 Binary files a/test_programs/acir_artifacts/assert_statement/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/assert_statement/target/witness.gz b/test_programs/acir_artifacts/assert_statement/target/witness.gz deleted file mode 100644 index 3e073aac635..00000000000 Binary files a/test_programs/acir_artifacts/assert_statement/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/assign_ex/target/acir.gz b/test_programs/acir_artifacts/assign_ex/target/acir.gz deleted file mode 100644 index a682df0b963..00000000000 Binary files a/test_programs/acir_artifacts/assign_ex/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/assign_ex/target/witness.gz b/test_programs/acir_artifacts/assign_ex/target/witness.gz deleted file mode 100644 index 35e05b7622b..00000000000 Binary files a/test_programs/acir_artifacts/assign_ex/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/bit_and/target/acir.gz b/test_programs/acir_artifacts/bit_and/target/acir.gz deleted file mode 100644 index 5fb7041cdf1..00000000000 Binary files a/test_programs/acir_artifacts/bit_and/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/bit_and/target/witness.gz b/test_programs/acir_artifacts/bit_and/target/witness.gz deleted file mode 100644 index 0c5dc12cf1c..00000000000 Binary files a/test_programs/acir_artifacts/bit_and/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/bit_shifts_comptime/target/acir.gz b/test_programs/acir_artifacts/bit_shifts_comptime/target/acir.gz deleted file mode 100644 index d6c006fd102..00000000000 Binary files a/test_programs/acir_artifacts/bit_shifts_comptime/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/bit_shifts_comptime/target/witness.gz b/test_programs/acir_artifacts/bit_shifts_comptime/target/witness.gz deleted file mode 100644 index 81bae695da1..00000000000 Binary files a/test_programs/acir_artifacts/bit_shifts_comptime/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/bit_shifts_runtime/target/acir.gz b/test_programs/acir_artifacts/bit_shifts_runtime/target/acir.gz deleted file mode 100644 index 1a800a63a57..00000000000 Binary files a/test_programs/acir_artifacts/bit_shifts_runtime/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/bit_shifts_runtime/target/witness.gz b/test_programs/acir_artifacts/bit_shifts_runtime/target/witness.gz deleted file mode 100644 index 2af844993dd..00000000000 Binary files a/test_programs/acir_artifacts/bit_shifts_runtime/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/bool_not/target/acir.gz b/test_programs/acir_artifacts/bool_not/target/acir.gz deleted file mode 100644 index 233a1e25f33..00000000000 Binary files a/test_programs/acir_artifacts/bool_not/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/bool_not/target/witness.gz b/test_programs/acir_artifacts/bool_not/target/witness.gz deleted file mode 100644 index 16880cedea2..00000000000 Binary files a/test_programs/acir_artifacts/bool_not/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/bool_or/target/acir.gz b/test_programs/acir_artifacts/bool_or/target/acir.gz deleted file mode 100644 index 697832be207..00000000000 Binary files a/test_programs/acir_artifacts/bool_or/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/bool_or/target/witness.gz b/test_programs/acir_artifacts/bool_or/target/witness.gz deleted file mode 100644 index 10cffba7141..00000000000 Binary files a/test_programs/acir_artifacts/bool_or/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_acir_as_brillig/target/acir.gz b/test_programs/acir_artifacts/brillig_acir_as_brillig/target/acir.gz deleted file mode 100644 index 69cbde31d9d..00000000000 Binary files a/test_programs/acir_artifacts/brillig_acir_as_brillig/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_acir_as_brillig/target/witness.gz b/test_programs/acir_artifacts/brillig_acir_as_brillig/target/witness.gz deleted file mode 100644 index 844178f0430..00000000000 Binary files a/test_programs/acir_artifacts/brillig_acir_as_brillig/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_arrays/target/acir.gz b/test_programs/acir_artifacts/brillig_arrays/target/acir.gz deleted file mode 100644 index a093703d4b3..00000000000 Binary files a/test_programs/acir_artifacts/brillig_arrays/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_arrays/target/witness.gz b/test_programs/acir_artifacts/brillig_arrays/target/witness.gz deleted file mode 100644 index 2d9b4cf245b..00000000000 Binary files a/test_programs/acir_artifacts/brillig_arrays/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_assert/target/acir.gz b/test_programs/acir_artifacts/brillig_assert/target/acir.gz deleted file mode 100644 index 2cf3b7251e6..00000000000 Binary files a/test_programs/acir_artifacts/brillig_assert/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_assert/target/witness.gz b/test_programs/acir_artifacts/brillig_assert/target/witness.gz deleted file mode 100644 index 628e5fbc6d8..00000000000 Binary files a/test_programs/acir_artifacts/brillig_assert/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_blake2s/target/acir.gz b/test_programs/acir_artifacts/brillig_blake2s/target/acir.gz deleted file mode 100644 index be47506c42f..00000000000 Binary files a/test_programs/acir_artifacts/brillig_blake2s/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_blake2s/target/witness.gz b/test_programs/acir_artifacts/brillig_blake2s/target/witness.gz deleted file mode 100644 index d51356eb6c1..00000000000 Binary files a/test_programs/acir_artifacts/brillig_blake2s/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_calls/target/acir.gz b/test_programs/acir_artifacts/brillig_calls/target/acir.gz deleted file mode 100644 index b69e231774b..00000000000 Binary files a/test_programs/acir_artifacts/brillig_calls/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_calls/target/witness.gz b/test_programs/acir_artifacts/brillig_calls/target/witness.gz deleted file mode 100644 index 844178f0430..00000000000 Binary files a/test_programs/acir_artifacts/brillig_calls/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_calls_array/target/acir.gz b/test_programs/acir_artifacts/brillig_calls_array/target/acir.gz deleted file mode 100644 index 59b89c22bc3..00000000000 Binary files a/test_programs/acir_artifacts/brillig_calls_array/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_calls_array/target/witness.gz b/test_programs/acir_artifacts/brillig_calls_array/target/witness.gz deleted file mode 100644 index 266c94d043a..00000000000 Binary files a/test_programs/acir_artifacts/brillig_calls_array/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_calls_conditionals/target/acir.gz b/test_programs/acir_artifacts/brillig_calls_conditionals/target/acir.gz deleted file mode 100644 index e4155f58ead..00000000000 Binary files a/test_programs/acir_artifacts/brillig_calls_conditionals/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_calls_conditionals/target/witness.gz b/test_programs/acir_artifacts/brillig_calls_conditionals/target/witness.gz deleted file mode 100644 index 3e7c051ffc4..00000000000 Binary files a/test_programs/acir_artifacts/brillig_calls_conditionals/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_conditional/target/acir.gz b/test_programs/acir_artifacts/brillig_conditional/target/acir.gz deleted file mode 100644 index 9117511d800..00000000000 Binary files a/test_programs/acir_artifacts/brillig_conditional/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_conditional/target/witness.gz b/test_programs/acir_artifacts/brillig_conditional/target/witness.gz deleted file mode 100644 index 162d33a5fd3..00000000000 Binary files a/test_programs/acir_artifacts/brillig_conditional/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_ecdsa/target/acir.gz b/test_programs/acir_artifacts/brillig_ecdsa/target/acir.gz deleted file mode 100644 index cdc28517544..00000000000 Binary files a/test_programs/acir_artifacts/brillig_ecdsa/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_ecdsa/target/witness.gz b/test_programs/acir_artifacts/brillig_ecdsa/target/witness.gz deleted file mode 100644 index 5fe202b72f0..00000000000 Binary files a/test_programs/acir_artifacts/brillig_ecdsa/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_fns_as_values/target/acir.gz b/test_programs/acir_artifacts/brillig_fns_as_values/target/acir.gz deleted file mode 100644 index d1819212993..00000000000 Binary files a/test_programs/acir_artifacts/brillig_fns_as_values/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_fns_as_values/target/witness.gz b/test_programs/acir_artifacts/brillig_fns_as_values/target/witness.gz deleted file mode 100644 index f4a9c9f6dda..00000000000 Binary files a/test_programs/acir_artifacts/brillig_fns_as_values/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_hash_to_field/target/acir.gz b/test_programs/acir_artifacts/brillig_hash_to_field/target/acir.gz deleted file mode 100644 index 73c742a2dd1..00000000000 Binary files a/test_programs/acir_artifacts/brillig_hash_to_field/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_hash_to_field/target/witness.gz b/test_programs/acir_artifacts/brillig_hash_to_field/target/witness.gz deleted file mode 100644 index 1529254d597..00000000000 Binary files a/test_programs/acir_artifacts/brillig_hash_to_field/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_identity_function/target/acir.gz b/test_programs/acir_artifacts/brillig_identity_function/target/acir.gz deleted file mode 100644 index 4e17ecc5d7b..00000000000 Binary files a/test_programs/acir_artifacts/brillig_identity_function/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_identity_function/target/witness.gz b/test_programs/acir_artifacts/brillig_identity_function/target/witness.gz deleted file mode 100644 index 9a911d62512..00000000000 Binary files a/test_programs/acir_artifacts/brillig_identity_function/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_keccak/target/acir.gz b/test_programs/acir_artifacts/brillig_keccak/target/acir.gz deleted file mode 100644 index 1a64fd03980..00000000000 Binary files a/test_programs/acir_artifacts/brillig_keccak/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_keccak/target/witness.gz b/test_programs/acir_artifacts/brillig_keccak/target/witness.gz deleted file mode 100644 index 95b5064a5a7..00000000000 Binary files a/test_programs/acir_artifacts/brillig_keccak/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_loop/target/acir.gz b/test_programs/acir_artifacts/brillig_loop/target/acir.gz deleted file mode 100644 index 04baae8d290..00000000000 Binary files a/test_programs/acir_artifacts/brillig_loop/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_loop/target/witness.gz b/test_programs/acir_artifacts/brillig_loop/target/witness.gz deleted file mode 100644 index 6e9e8ecd1d0..00000000000 Binary files a/test_programs/acir_artifacts/brillig_loop/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_nested_arrays/target/acir.gz b/test_programs/acir_artifacts/brillig_nested_arrays/target/acir.gz deleted file mode 100644 index f69df4781ec..00000000000 Binary files a/test_programs/acir_artifacts/brillig_nested_arrays/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_nested_arrays/target/witness.gz b/test_programs/acir_artifacts/brillig_nested_arrays/target/witness.gz deleted file mode 100644 index 87cf83430f7..00000000000 Binary files a/test_programs/acir_artifacts/brillig_nested_arrays/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_nested_slices/target/acir.gz b/test_programs/acir_artifacts/brillig_nested_slices/target/acir.gz deleted file mode 100644 index 100a208bcd8..00000000000 Binary files a/test_programs/acir_artifacts/brillig_nested_slices/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_nested_slices/target/witness.gz b/test_programs/acir_artifacts/brillig_nested_slices/target/witness.gz deleted file mode 100644 index 3530c6f59c1..00000000000 Binary files a/test_programs/acir_artifacts/brillig_nested_slices/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_not/target/acir.gz b/test_programs/acir_artifacts/brillig_not/target/acir.gz deleted file mode 100644 index 9702ca340a5..00000000000 Binary files a/test_programs/acir_artifacts/brillig_not/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_not/target/witness.gz b/test_programs/acir_artifacts/brillig_not/target/witness.gz deleted file mode 100644 index 3fbf07be37e..00000000000 Binary files a/test_programs/acir_artifacts/brillig_not/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_oracle/target/acir.gz b/test_programs/acir_artifacts/brillig_oracle/target/acir.gz deleted file mode 100644 index db158f61882..00000000000 Binary files a/test_programs/acir_artifacts/brillig_oracle/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_oracle/target/witness.gz b/test_programs/acir_artifacts/brillig_oracle/target/witness.gz deleted file mode 100644 index 3fead7f6b2e..00000000000 Binary files a/test_programs/acir_artifacts/brillig_oracle/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_pedersen/target/acir.gz b/test_programs/acir_artifacts/brillig_pedersen/target/acir.gz deleted file mode 100644 index 27f6f353d25..00000000000 Binary files a/test_programs/acir_artifacts/brillig_pedersen/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_pedersen/target/witness.gz b/test_programs/acir_artifacts/brillig_pedersen/target/witness.gz deleted file mode 100644 index b26110156a0..00000000000 Binary files a/test_programs/acir_artifacts/brillig_pedersen/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_recursion/target/acir.gz b/test_programs/acir_artifacts/brillig_recursion/target/acir.gz deleted file mode 100644 index c0c91d81546..00000000000 Binary files a/test_programs/acir_artifacts/brillig_recursion/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_recursion/target/witness.gz b/test_programs/acir_artifacts/brillig_recursion/target/witness.gz deleted file mode 100644 index 46e192995f3..00000000000 Binary files a/test_programs/acir_artifacts/brillig_recursion/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_references/target/acir.gz b/test_programs/acir_artifacts/brillig_references/target/acir.gz deleted file mode 100644 index 4069ca2cb5e..00000000000 Binary files a/test_programs/acir_artifacts/brillig_references/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_references/target/witness.gz b/test_programs/acir_artifacts/brillig_references/target/witness.gz deleted file mode 100644 index bf62ea672eb..00000000000 Binary files a/test_programs/acir_artifacts/brillig_references/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_scalar_mul/target/acir.gz b/test_programs/acir_artifacts/brillig_scalar_mul/target/acir.gz deleted file mode 100644 index 9f5f787c655..00000000000 Binary files a/test_programs/acir_artifacts/brillig_scalar_mul/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_scalar_mul/target/witness.gz b/test_programs/acir_artifacts/brillig_scalar_mul/target/witness.gz deleted file mode 100644 index 3204207ec63..00000000000 Binary files a/test_programs/acir_artifacts/brillig_scalar_mul/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_schnorr/target/acir.gz b/test_programs/acir_artifacts/brillig_schnorr/target/acir.gz deleted file mode 100644 index 625ae64a11d..00000000000 Binary files a/test_programs/acir_artifacts/brillig_schnorr/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_schnorr/target/witness.gz b/test_programs/acir_artifacts/brillig_schnorr/target/witness.gz deleted file mode 100644 index 17d93cc4d19..00000000000 Binary files a/test_programs/acir_artifacts/brillig_schnorr/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_sha256/target/acir.gz b/test_programs/acir_artifacts/brillig_sha256/target/acir.gz deleted file mode 100644 index 49f7e6afcf4..00000000000 Binary files a/test_programs/acir_artifacts/brillig_sha256/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_sha256/target/witness.gz b/test_programs/acir_artifacts/brillig_sha256/target/witness.gz deleted file mode 100644 index 118042d5841..00000000000 Binary files a/test_programs/acir_artifacts/brillig_sha256/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_slices/target/acir.gz b/test_programs/acir_artifacts/brillig_slices/target/acir.gz deleted file mode 100644 index 57ca1d59fed..00000000000 Binary files a/test_programs/acir_artifacts/brillig_slices/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_slices/target/witness.gz b/test_programs/acir_artifacts/brillig_slices/target/witness.gz deleted file mode 100644 index 3530c6f59c1..00000000000 Binary files a/test_programs/acir_artifacts/brillig_slices/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_to_be_bytes/target/acir.gz b/test_programs/acir_artifacts/brillig_to_be_bytes/target/acir.gz deleted file mode 100644 index 1249975b27c..00000000000 Binary files a/test_programs/acir_artifacts/brillig_to_be_bytes/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_to_be_bytes/target/witness.gz b/test_programs/acir_artifacts/brillig_to_be_bytes/target/witness.gz deleted file mode 100644 index 0d48d549824..00000000000 Binary files a/test_programs/acir_artifacts/brillig_to_be_bytes/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_to_bytes_integration/target/acir.gz b/test_programs/acir_artifacts/brillig_to_bytes_integration/target/acir.gz deleted file mode 100644 index fe9ac34ed17..00000000000 Binary files a/test_programs/acir_artifacts/brillig_to_bytes_integration/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_to_bytes_integration/target/witness.gz b/test_programs/acir_artifacts/brillig_to_bytes_integration/target/witness.gz deleted file mode 100644 index b3813a1f976..00000000000 Binary files a/test_programs/acir_artifacts/brillig_to_bytes_integration/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_to_le_bytes/target/acir.gz b/test_programs/acir_artifacts/brillig_to_le_bytes/target/acir.gz deleted file mode 100644 index 834f57e1ee7..00000000000 Binary files a/test_programs/acir_artifacts/brillig_to_le_bytes/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_to_le_bytes/target/witness.gz b/test_programs/acir_artifacts/brillig_to_le_bytes/target/witness.gz deleted file mode 100644 index d12168c557b..00000000000 Binary files a/test_programs/acir_artifacts/brillig_to_le_bytes/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_top_level/target/acir.gz b/test_programs/acir_artifacts/brillig_top_level/target/acir.gz deleted file mode 100644 index 4b2fbcd3462..00000000000 Binary files a/test_programs/acir_artifacts/brillig_top_level/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_top_level/target/witness.gz b/test_programs/acir_artifacts/brillig_top_level/target/witness.gz deleted file mode 100644 index 38bdf1f7263..00000000000 Binary files a/test_programs/acir_artifacts/brillig_top_level/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_unitialised_arrays/target/acir.gz b/test_programs/acir_artifacts/brillig_unitialised_arrays/target/acir.gz deleted file mode 100644 index bcaa4634aaf..00000000000 Binary files a/test_programs/acir_artifacts/brillig_unitialised_arrays/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/brillig_unitialised_arrays/target/witness.gz b/test_programs/acir_artifacts/brillig_unitialised_arrays/target/witness.gz deleted file mode 100644 index 9724de0f1d9..00000000000 Binary files a/test_programs/acir_artifacts/brillig_unitialised_arrays/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/cast_bool/target/acir.gz b/test_programs/acir_artifacts/cast_bool/target/acir.gz deleted file mode 100644 index 032b36d1629..00000000000 Binary files a/test_programs/acir_artifacts/cast_bool/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/cast_bool/target/witness.gz b/test_programs/acir_artifacts/cast_bool/target/witness.gz deleted file mode 100644 index fa79236ad55..00000000000 Binary files a/test_programs/acir_artifacts/cast_bool/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/closures_mut_ref/target/acir.gz b/test_programs/acir_artifacts/closures_mut_ref/target/acir.gz deleted file mode 100644 index 271b0ddd649..00000000000 Binary files a/test_programs/acir_artifacts/closures_mut_ref/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/closures_mut_ref/target/witness.gz b/test_programs/acir_artifacts/closures_mut_ref/target/witness.gz deleted file mode 100644 index 37c6d67fada..00000000000 Binary files a/test_programs/acir_artifacts/closures_mut_ref/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/conditional_1/target/acir.gz b/test_programs/acir_artifacts/conditional_1/target/acir.gz deleted file mode 100644 index 16ec8f28b53..00000000000 Binary files a/test_programs/acir_artifacts/conditional_1/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/conditional_1/target/witness.gz b/test_programs/acir_artifacts/conditional_1/target/witness.gz deleted file mode 100644 index 30cc2834841..00000000000 Binary files a/test_programs/acir_artifacts/conditional_1/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/conditional_2/target/acir.gz b/test_programs/acir_artifacts/conditional_2/target/acir.gz deleted file mode 100644 index 8b56f25b2cf..00000000000 Binary files a/test_programs/acir_artifacts/conditional_2/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/conditional_2/target/witness.gz b/test_programs/acir_artifacts/conditional_2/target/witness.gz deleted file mode 100644 index 310c2cba8c4..00000000000 Binary files a/test_programs/acir_artifacts/conditional_2/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/conditional_regression_421/target/acir.gz b/test_programs/acir_artifacts/conditional_regression_421/target/acir.gz deleted file mode 100644 index bb060b5ebcc..00000000000 Binary files a/test_programs/acir_artifacts/conditional_regression_421/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/conditional_regression_421/target/witness.gz b/test_programs/acir_artifacts/conditional_regression_421/target/witness.gz deleted file mode 100644 index 025b2d9ea44..00000000000 Binary files a/test_programs/acir_artifacts/conditional_regression_421/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/conditional_regression_661/target/acir.gz b/test_programs/acir_artifacts/conditional_regression_661/target/acir.gz deleted file mode 100644 index 51e30b8bbc1..00000000000 Binary files a/test_programs/acir_artifacts/conditional_regression_661/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/conditional_regression_661/target/witness.gz b/test_programs/acir_artifacts/conditional_regression_661/target/witness.gz deleted file mode 100644 index 2683a9ba4ae..00000000000 Binary files a/test_programs/acir_artifacts/conditional_regression_661/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/conditional_regression_short_circuit/target/acir.gz b/test_programs/acir_artifacts/conditional_regression_short_circuit/target/acir.gz deleted file mode 100644 index 75f2bcfdb0b..00000000000 Binary files a/test_programs/acir_artifacts/conditional_regression_short_circuit/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/conditional_regression_short_circuit/target/witness.gz b/test_programs/acir_artifacts/conditional_regression_short_circuit/target/witness.gz deleted file mode 100644 index b2aa4a0b23f..00000000000 Binary files a/test_programs/acir_artifacts/conditional_regression_short_circuit/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/conditional_regression_underflow/target/acir.gz b/test_programs/acir_artifacts/conditional_regression_underflow/target/acir.gz deleted file mode 100644 index df762d9205e..00000000000 Binary files a/test_programs/acir_artifacts/conditional_regression_underflow/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/conditional_regression_underflow/target/witness.gz b/test_programs/acir_artifacts/conditional_regression_underflow/target/witness.gz deleted file mode 100644 index 939eb503b6f..00000000000 Binary files a/test_programs/acir_artifacts/conditional_regression_underflow/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/custom_entry/target/acir.gz b/test_programs/acir_artifacts/custom_entry/target/acir.gz deleted file mode 100644 index c4e7f86f219..00000000000 Binary files a/test_programs/acir_artifacts/custom_entry/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/custom_entry/target/witness.gz b/test_programs/acir_artifacts/custom_entry/target/witness.gz deleted file mode 100644 index 16880cedea2..00000000000 Binary files a/test_programs/acir_artifacts/custom_entry/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/debug_logs/target/acir.gz b/test_programs/acir_artifacts/debug_logs/target/acir.gz deleted file mode 100644 index ea9187f4084..00000000000 Binary files a/test_programs/acir_artifacts/debug_logs/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/debug_logs/target/witness.gz b/test_programs/acir_artifacts/debug_logs/target/witness.gz deleted file mode 100644 index 3199dac0924..00000000000 Binary files a/test_programs/acir_artifacts/debug_logs/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/diamond_deps_0/target/acir.gz b/test_programs/acir_artifacts/diamond_deps_0/target/acir.gz deleted file mode 100644 index e73668fd86c..00000000000 Binary files a/test_programs/acir_artifacts/diamond_deps_0/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/diamond_deps_0/target/witness.gz b/test_programs/acir_artifacts/diamond_deps_0/target/witness.gz deleted file mode 100644 index d2a6bdba5c8..00000000000 Binary files a/test_programs/acir_artifacts/diamond_deps_0/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/distinct_keyword/target/acir.gz b/test_programs/acir_artifacts/distinct_keyword/target/acir.gz deleted file mode 100644 index b3411dc96a7..00000000000 Binary files a/test_programs/acir_artifacts/distinct_keyword/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/distinct_keyword/target/witness.gz b/test_programs/acir_artifacts/distinct_keyword/target/witness.gz deleted file mode 100644 index d79dfba9359..00000000000 Binary files a/test_programs/acir_artifacts/distinct_keyword/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/double_verify_proof/target/acir.gz b/test_programs/acir_artifacts/double_verify_proof/target/acir.gz deleted file mode 100644 index a2faad65143..00000000000 Binary files a/test_programs/acir_artifacts/double_verify_proof/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/double_verify_proof/target/witness.gz b/test_programs/acir_artifacts/double_verify_proof/target/witness.gz deleted file mode 100644 index 251984d6292..00000000000 Binary files a/test_programs/acir_artifacts/double_verify_proof/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/ecdsa_secp256k1/target/acir.gz b/test_programs/acir_artifacts/ecdsa_secp256k1/target/acir.gz deleted file mode 100644 index 9108d663e86..00000000000 Binary files a/test_programs/acir_artifacts/ecdsa_secp256k1/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/ecdsa_secp256k1/target/witness.gz b/test_programs/acir_artifacts/ecdsa_secp256k1/target/witness.gz deleted file mode 100644 index a094ba3246b..00000000000 Binary files a/test_programs/acir_artifacts/ecdsa_secp256k1/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/ecdsa_secp256r1/target/acir.gz b/test_programs/acir_artifacts/ecdsa_secp256r1/target/acir.gz deleted file mode 100644 index ec6bc2c73a0..00000000000 Binary files a/test_programs/acir_artifacts/ecdsa_secp256r1/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/ecdsa_secp256r1/target/witness.gz b/test_programs/acir_artifacts/ecdsa_secp256r1/target/witness.gz deleted file mode 100644 index 79d009caea4..00000000000 Binary files a/test_programs/acir_artifacts/ecdsa_secp256r1/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/eddsa/target/acir.gz b/test_programs/acir_artifacts/eddsa/target/acir.gz deleted file mode 100644 index b8577dbd3ac..00000000000 Binary files a/test_programs/acir_artifacts/eddsa/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/eddsa/target/witness.gz b/test_programs/acir_artifacts/eddsa/target/witness.gz deleted file mode 100644 index 7c125021d96..00000000000 Binary files a/test_programs/acir_artifacts/eddsa/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/field_attribute/target/acir.gz b/test_programs/acir_artifacts/field_attribute/target/acir.gz deleted file mode 100644 index 9401237fd8c..00000000000 Binary files a/test_programs/acir_artifacts/field_attribute/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/field_attribute/target/witness.gz b/test_programs/acir_artifacts/field_attribute/target/witness.gz deleted file mode 100644 index 5f3c241de56..00000000000 Binary files a/test_programs/acir_artifacts/field_attribute/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/generics/target/acir.gz b/test_programs/acir_artifacts/generics/target/acir.gz deleted file mode 100644 index c9462cfa87f..00000000000 Binary files a/test_programs/acir_artifacts/generics/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/generics/target/witness.gz b/test_programs/acir_artifacts/generics/target/witness.gz deleted file mode 100644 index 4d120219b14..00000000000 Binary files a/test_programs/acir_artifacts/generics/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/global_consts/target/acir.gz b/test_programs/acir_artifacts/global_consts/target/acir.gz deleted file mode 100644 index 8b6a0d9db65..00000000000 Binary files a/test_programs/acir_artifacts/global_consts/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/global_consts/target/witness.gz b/test_programs/acir_artifacts/global_consts/target/witness.gz deleted file mode 100644 index 41fe927e809..00000000000 Binary files a/test_programs/acir_artifacts/global_consts/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/hash_to_field/target/acir.gz b/test_programs/acir_artifacts/hash_to_field/target/acir.gz deleted file mode 100644 index 9be98aef491..00000000000 Binary files a/test_programs/acir_artifacts/hash_to_field/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/hash_to_field/target/witness.gz b/test_programs/acir_artifacts/hash_to_field/target/witness.gz deleted file mode 100644 index 743d797096b..00000000000 Binary files a/test_programs/acir_artifacts/hash_to_field/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/higher_order_functions/target/acir.gz b/test_programs/acir_artifacts/higher_order_functions/target/acir.gz deleted file mode 100644 index eab354be13d..00000000000 Binary files a/test_programs/acir_artifacts/higher_order_functions/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/higher_order_functions/target/witness.gz b/test_programs/acir_artifacts/higher_order_functions/target/witness.gz deleted file mode 100644 index 329d15dfb17..00000000000 Binary files a/test_programs/acir_artifacts/higher_order_functions/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/if_else_chain/target/acir.gz b/test_programs/acir_artifacts/if_else_chain/target/acir.gz deleted file mode 100644 index 21cb3898a2d..00000000000 Binary files a/test_programs/acir_artifacts/if_else_chain/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/if_else_chain/target/witness.gz b/test_programs/acir_artifacts/if_else_chain/target/witness.gz deleted file mode 100644 index 4ab0b124e70..00000000000 Binary files a/test_programs/acir_artifacts/if_else_chain/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/import/target/acir.gz b/test_programs/acir_artifacts/import/target/acir.gz deleted file mode 100644 index ff6e6f6c394..00000000000 Binary files a/test_programs/acir_artifacts/import/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/import/target/witness.gz b/test_programs/acir_artifacts/import/target/witness.gz deleted file mode 100644 index 93c5b96bdf3..00000000000 Binary files a/test_programs/acir_artifacts/import/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/integer_array_indexing/target/acir.gz b/test_programs/acir_artifacts/integer_array_indexing/target/acir.gz deleted file mode 100644 index 1c4c50039eb..00000000000 Binary files a/test_programs/acir_artifacts/integer_array_indexing/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/integer_array_indexing/target/witness.gz b/test_programs/acir_artifacts/integer_array_indexing/target/witness.gz deleted file mode 100644 index b3d60e315ec..00000000000 Binary files a/test_programs/acir_artifacts/integer_array_indexing/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/keccak256/target/acir.gz b/test_programs/acir_artifacts/keccak256/target/acir.gz deleted file mode 100644 index cb74273e4d7..00000000000 Binary files a/test_programs/acir_artifacts/keccak256/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/keccak256/target/witness.gz b/test_programs/acir_artifacts/keccak256/target/witness.gz deleted file mode 100644 index 42bcc0ccbd1..00000000000 Binary files a/test_programs/acir_artifacts/keccak256/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/main_bool_arg/target/acir.gz b/test_programs/acir_artifacts/main_bool_arg/target/acir.gz deleted file mode 100644 index d054abe1df0..00000000000 Binary files a/test_programs/acir_artifacts/main_bool_arg/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/main_bool_arg/target/witness.gz b/test_programs/acir_artifacts/main_bool_arg/target/witness.gz deleted file mode 100644 index 80a779d4464..00000000000 Binary files a/test_programs/acir_artifacts/main_bool_arg/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/merkle_insert/target/acir.gz b/test_programs/acir_artifacts/merkle_insert/target/acir.gz deleted file mode 100644 index 75b1cbb5072..00000000000 Binary files a/test_programs/acir_artifacts/merkle_insert/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/merkle_insert/target/witness.gz b/test_programs/acir_artifacts/merkle_insert/target/witness.gz deleted file mode 100644 index 6351d29dd11..00000000000 Binary files a/test_programs/acir_artifacts/merkle_insert/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/mock_oracle/target/acir.gz b/test_programs/acir_artifacts/mock_oracle/target/acir.gz deleted file mode 100644 index c45cd40f28e..00000000000 Binary files a/test_programs/acir_artifacts/mock_oracle/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/mock_oracle/target/witness.gz b/test_programs/acir_artifacts/mock_oracle/target/witness.gz deleted file mode 100644 index 4e90289d5e1..00000000000 Binary files a/test_programs/acir_artifacts/mock_oracle/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/modules/target/acir.gz b/test_programs/acir_artifacts/modules/target/acir.gz deleted file mode 100644 index 05b5e23ae3e..00000000000 Binary files a/test_programs/acir_artifacts/modules/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/modules/target/witness.gz b/test_programs/acir_artifacts/modules/target/witness.gz deleted file mode 100644 index 58c7b52ef85..00000000000 Binary files a/test_programs/acir_artifacts/modules/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/modules_more/target/acir.gz b/test_programs/acir_artifacts/modules_more/target/acir.gz deleted file mode 100644 index ff6e6f6c394..00000000000 Binary files a/test_programs/acir_artifacts/modules_more/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/modules_more/target/witness.gz b/test_programs/acir_artifacts/modules_more/target/witness.gz deleted file mode 100644 index 6f12eac202f..00000000000 Binary files a/test_programs/acir_artifacts/modules_more/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/modulus/target/acir.gz b/test_programs/acir_artifacts/modulus/target/acir.gz deleted file mode 100644 index a99124a5e3a..00000000000 Binary files a/test_programs/acir_artifacts/modulus/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/modulus/target/witness.gz b/test_programs/acir_artifacts/modulus/target/witness.gz deleted file mode 100644 index 02931c632ff..00000000000 Binary files a/test_programs/acir_artifacts/modulus/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/nested_array_dynamic/target/acir.gz b/test_programs/acir_artifacts/nested_array_dynamic/target/acir.gz deleted file mode 100644 index 762ace26416..00000000000 Binary files a/test_programs/acir_artifacts/nested_array_dynamic/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/nested_array_dynamic/target/witness.gz b/test_programs/acir_artifacts/nested_array_dynamic/target/witness.gz deleted file mode 100644 index e469a0ee7a7..00000000000 Binary files a/test_programs/acir_artifacts/nested_array_dynamic/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/nested_arrays_from_brillig/target/acir.gz b/test_programs/acir_artifacts/nested_arrays_from_brillig/target/acir.gz deleted file mode 100644 index 270cfcaf53c..00000000000 Binary files a/test_programs/acir_artifacts/nested_arrays_from_brillig/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/nested_arrays_from_brillig/target/witness.gz b/test_programs/acir_artifacts/nested_arrays_from_brillig/target/witness.gz deleted file mode 100644 index a3161ac44f9..00000000000 Binary files a/test_programs/acir_artifacts/nested_arrays_from_brillig/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/nested_slice_dynamic/target/acir.gz b/test_programs/acir_artifacts/nested_slice_dynamic/target/acir.gz deleted file mode 100644 index 3db0a495a9d..00000000000 Binary files a/test_programs/acir_artifacts/nested_slice_dynamic/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/nested_slice_dynamic/target/witness.gz b/test_programs/acir_artifacts/nested_slice_dynamic/target/witness.gz deleted file mode 100644 index 9c9e80efe8f..00000000000 Binary files a/test_programs/acir_artifacts/nested_slice_dynamic/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/pedersen_check/target/acir.gz b/test_programs/acir_artifacts/pedersen_check/target/acir.gz deleted file mode 100644 index 02c9f32e3c5..00000000000 Binary files a/test_programs/acir_artifacts/pedersen_check/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/pedersen_check/target/witness.gz b/test_programs/acir_artifacts/pedersen_check/target/witness.gz deleted file mode 100644 index caf34e2b734..00000000000 Binary files a/test_programs/acir_artifacts/pedersen_check/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/poseidon_bn254_hash/target/acir.gz b/test_programs/acir_artifacts/poseidon_bn254_hash/target/acir.gz deleted file mode 100644 index 0327f600884..00000000000 Binary files a/test_programs/acir_artifacts/poseidon_bn254_hash/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/poseidon_bn254_hash/target/witness.gz b/test_programs/acir_artifacts/poseidon_bn254_hash/target/witness.gz deleted file mode 100644 index b3f3f1a3b24..00000000000 Binary files a/test_programs/acir_artifacts/poseidon_bn254_hash/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/poseidonsponge_x5_254/target/acir.gz b/test_programs/acir_artifacts/poseidonsponge_x5_254/target/acir.gz deleted file mode 100644 index dc260ce2aa7..00000000000 Binary files a/test_programs/acir_artifacts/poseidonsponge_x5_254/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/poseidonsponge_x5_254/target/witness.gz b/test_programs/acir_artifacts/poseidonsponge_x5_254/target/witness.gz deleted file mode 100644 index f61ba4ec0cf..00000000000 Binary files a/test_programs/acir_artifacts/poseidonsponge_x5_254/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/pred_eq/target/acir.gz b/test_programs/acir_artifacts/pred_eq/target/acir.gz deleted file mode 100644 index 032b36d1629..00000000000 Binary files a/test_programs/acir_artifacts/pred_eq/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/pred_eq/target/witness.gz b/test_programs/acir_artifacts/pred_eq/target/witness.gz deleted file mode 100644 index f1ea0249fe9..00000000000 Binary files a/test_programs/acir_artifacts/pred_eq/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/references/target/acir.gz b/test_programs/acir_artifacts/references/target/acir.gz deleted file mode 100644 index 0668e2eca25..00000000000 Binary files a/test_programs/acir_artifacts/references/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/references/target/witness.gz b/test_programs/acir_artifacts/references/target/witness.gz deleted file mode 100644 index bf62ea672eb..00000000000 Binary files a/test_programs/acir_artifacts/references/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/regression/target/acir.gz b/test_programs/acir_artifacts/regression/target/acir.gz deleted file mode 100644 index f92aa2603b4..00000000000 Binary files a/test_programs/acir_artifacts/regression/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/regression/target/witness.gz b/test_programs/acir_artifacts/regression/target/witness.gz deleted file mode 100644 index dc1f8e0e3f7..00000000000 Binary files a/test_programs/acir_artifacts/regression/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/regression_2854/target/acir.gz b/test_programs/acir_artifacts/regression_2854/target/acir.gz deleted file mode 100644 index 6f4ffaa488f..00000000000 Binary files a/test_programs/acir_artifacts/regression_2854/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/regression_2854/target/witness.gz b/test_programs/acir_artifacts/regression_2854/target/witness.gz deleted file mode 100644 index c0b900e8119..00000000000 Binary files a/test_programs/acir_artifacts/regression_2854/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/regression_mem_op_predicate/target/acir.gz b/test_programs/acir_artifacts/regression_mem_op_predicate/target/acir.gz deleted file mode 100644 index a408771e244..00000000000 Binary files a/test_programs/acir_artifacts/regression_mem_op_predicate/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/regression_mem_op_predicate/target/witness.gz b/test_programs/acir_artifacts/regression_mem_op_predicate/target/witness.gz deleted file mode 100644 index 2d0e8a4f685..00000000000 Binary files a/test_programs/acir_artifacts/regression_mem_op_predicate/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/regression_method_cannot_be_found/target/acir.gz b/test_programs/acir_artifacts/regression_method_cannot_be_found/target/acir.gz deleted file mode 100644 index f29fbef8d3f..00000000000 Binary files a/test_programs/acir_artifacts/regression_method_cannot_be_found/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/regression_method_cannot_be_found/target/witness.gz b/test_programs/acir_artifacts/regression_method_cannot_be_found/target/witness.gz deleted file mode 100644 index 4e90289d5e1..00000000000 Binary files a/test_programs/acir_artifacts/regression_method_cannot_be_found/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/scalar_mul/target/acir.gz b/test_programs/acir_artifacts/scalar_mul/target/acir.gz deleted file mode 100644 index 0bf8db7df70..00000000000 Binary files a/test_programs/acir_artifacts/scalar_mul/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/scalar_mul/target/witness.gz b/test_programs/acir_artifacts/scalar_mul/target/witness.gz deleted file mode 100644 index 637e61f60de..00000000000 Binary files a/test_programs/acir_artifacts/scalar_mul/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/schnorr/target/acir.gz b/test_programs/acir_artifacts/schnorr/target/acir.gz deleted file mode 100644 index 047e59422ee..00000000000 Binary files a/test_programs/acir_artifacts/schnorr/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/schnorr/target/witness.gz b/test_programs/acir_artifacts/schnorr/target/witness.gz deleted file mode 100644 index 91bf1aeb7ad..00000000000 Binary files a/test_programs/acir_artifacts/schnorr/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/sha256/target/acir.gz b/test_programs/acir_artifacts/sha256/target/acir.gz deleted file mode 100644 index a2de8064bb5..00000000000 Binary files a/test_programs/acir_artifacts/sha256/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/sha256/target/witness.gz b/test_programs/acir_artifacts/sha256/target/witness.gz deleted file mode 100644 index d5762dfc7d5..00000000000 Binary files a/test_programs/acir_artifacts/sha256/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/sha2_byte/target/acir.gz b/test_programs/acir_artifacts/sha2_byte/target/acir.gz deleted file mode 100644 index 571fde25f2b..00000000000 Binary files a/test_programs/acir_artifacts/sha2_byte/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/sha2_byte/target/witness.gz b/test_programs/acir_artifacts/sha2_byte/target/witness.gz deleted file mode 100644 index 08ca373be7e..00000000000 Binary files a/test_programs/acir_artifacts/sha2_byte/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/signed_arithmetic/target/acir.gz b/test_programs/acir_artifacts/signed_arithmetic/target/acir.gz deleted file mode 100644 index 82747c17417..00000000000 Binary files a/test_programs/acir_artifacts/signed_arithmetic/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/signed_arithmetic/target/witness.gz b/test_programs/acir_artifacts/signed_arithmetic/target/witness.gz deleted file mode 100644 index 6627fd7d53f..00000000000 Binary files a/test_programs/acir_artifacts/signed_arithmetic/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/signed_division/target/acir.gz b/test_programs/acir_artifacts/signed_division/target/acir.gz deleted file mode 100644 index 39a17a5a529..00000000000 Binary files a/test_programs/acir_artifacts/signed_division/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/signed_division/target/witness.gz b/test_programs/acir_artifacts/signed_division/target/witness.gz deleted file mode 100644 index a35e3011ee6..00000000000 Binary files a/test_programs/acir_artifacts/signed_division/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_2d_array/target/acir.gz b/test_programs/acir_artifacts/simple_2d_array/target/acir.gz deleted file mode 100644 index 59b62b9c99d..00000000000 Binary files a/test_programs/acir_artifacts/simple_2d_array/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_2d_array/target/witness.gz b/test_programs/acir_artifacts/simple_2d_array/target/witness.gz deleted file mode 100644 index 321a76492da..00000000000 Binary files a/test_programs/acir_artifacts/simple_2d_array/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_add_and_ret_arr/target/acir.gz b/test_programs/acir_artifacts/simple_add_and_ret_arr/target/acir.gz deleted file mode 100644 index d7ec1ebc477..00000000000 Binary files a/test_programs/acir_artifacts/simple_add_and_ret_arr/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_add_and_ret_arr/target/witness.gz b/test_programs/acir_artifacts/simple_add_and_ret_arr/target/witness.gz deleted file mode 100644 index 35e05b7622b..00000000000 Binary files a/test_programs/acir_artifacts/simple_add_and_ret_arr/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_bitwise/target/acir.gz b/test_programs/acir_artifacts/simple_bitwise/target/acir.gz deleted file mode 100644 index 84fc5cc5de2..00000000000 Binary files a/test_programs/acir_artifacts/simple_bitwise/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_bitwise/target/witness.gz b/test_programs/acir_artifacts/simple_bitwise/target/witness.gz deleted file mode 100644 index 2afa317a120..00000000000 Binary files a/test_programs/acir_artifacts/simple_bitwise/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_comparison/target/acir.gz b/test_programs/acir_artifacts/simple_comparison/target/acir.gz deleted file mode 100644 index 452780c4d30..00000000000 Binary files a/test_programs/acir_artifacts/simple_comparison/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_comparison/target/witness.gz b/test_programs/acir_artifacts/simple_comparison/target/witness.gz deleted file mode 100644 index 5896584fa8a..00000000000 Binary files a/test_programs/acir_artifacts/simple_comparison/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_mut/target/acir.gz b/test_programs/acir_artifacts/simple_mut/target/acir.gz deleted file mode 100644 index 9338b74aabd..00000000000 Binary files a/test_programs/acir_artifacts/simple_mut/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_mut/target/witness.gz b/test_programs/acir_artifacts/simple_mut/target/witness.gz deleted file mode 100644 index 9e7641cea1e..00000000000 Binary files a/test_programs/acir_artifacts/simple_mut/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_not/target/acir.gz b/test_programs/acir_artifacts/simple_not/target/acir.gz deleted file mode 100644 index a47defb9fe6..00000000000 Binary files a/test_programs/acir_artifacts/simple_not/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_not/target/witness.gz b/test_programs/acir_artifacts/simple_not/target/witness.gz deleted file mode 100644 index a8e277ea795..00000000000 Binary files a/test_programs/acir_artifacts/simple_not/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_print/target/acir.gz b/test_programs/acir_artifacts/simple_print/target/acir.gz deleted file mode 100644 index 5a2c524f26b..00000000000 Binary files a/test_programs/acir_artifacts/simple_print/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_print/target/witness.gz b/test_programs/acir_artifacts/simple_print/target/witness.gz deleted file mode 100644 index 35e05b7622b..00000000000 Binary files a/test_programs/acir_artifacts/simple_print/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_program_addition/target/acir.gz b/test_programs/acir_artifacts/simple_program_addition/target/acir.gz deleted file mode 100644 index d7ec1ebc477..00000000000 Binary files a/test_programs/acir_artifacts/simple_program_addition/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_program_addition/target/witness.gz b/test_programs/acir_artifacts/simple_program_addition/target/witness.gz deleted file mode 100644 index 94ea8c8f2b1..00000000000 Binary files a/test_programs/acir_artifacts/simple_program_addition/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_radix/target/acir.gz b/test_programs/acir_artifacts/simple_radix/target/acir.gz deleted file mode 100644 index 8c5fd3c0bd4..00000000000 Binary files a/test_programs/acir_artifacts/simple_radix/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_radix/target/witness.gz b/test_programs/acir_artifacts/simple_radix/target/witness.gz deleted file mode 100644 index 4b051d62ee2..00000000000 Binary files a/test_programs/acir_artifacts/simple_radix/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_shield/target/acir.gz b/test_programs/acir_artifacts/simple_shield/target/acir.gz deleted file mode 100644 index 1916c475919..00000000000 Binary files a/test_programs/acir_artifacts/simple_shield/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_shield/target/witness.gz b/test_programs/acir_artifacts/simple_shield/target/witness.gz deleted file mode 100644 index 171330f5142..00000000000 Binary files a/test_programs/acir_artifacts/simple_shield/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_shift_left_right/target/acir.gz b/test_programs/acir_artifacts/simple_shift_left_right/target/acir.gz deleted file mode 100644 index bae747f46c6..00000000000 Binary files a/test_programs/acir_artifacts/simple_shift_left_right/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/simple_shift_left_right/target/witness.gz b/test_programs/acir_artifacts/simple_shift_left_right/target/witness.gz deleted file mode 100644 index 6bc0b91e147..00000000000 Binary files a/test_programs/acir_artifacts/simple_shift_left_right/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/slice_dynamic_index/target/acir.gz b/test_programs/acir_artifacts/slice_dynamic_index/target/acir.gz deleted file mode 100644 index 1bbc8ea075c..00000000000 Binary files a/test_programs/acir_artifacts/slice_dynamic_index/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/slice_dynamic_index/target/witness.gz b/test_programs/acir_artifacts/slice_dynamic_index/target/witness.gz deleted file mode 100644 index 8c7e5f4fb95..00000000000 Binary files a/test_programs/acir_artifacts/slice_dynamic_index/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/slice_struct_field/target/acir.gz b/test_programs/acir_artifacts/slice_struct_field/target/acir.gz deleted file mode 100644 index 6b1f189a331..00000000000 Binary files a/test_programs/acir_artifacts/slice_struct_field/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/slice_struct_field/target/witness.gz b/test_programs/acir_artifacts/slice_struct_field/target/witness.gz deleted file mode 100644 index f404e58ade3..00000000000 Binary files a/test_programs/acir_artifacts/slice_struct_field/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/slices/target/acir.gz b/test_programs/acir_artifacts/slices/target/acir.gz deleted file mode 100644 index 7a053fcb196..00000000000 Binary files a/test_programs/acir_artifacts/slices/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/slices/target/witness.gz b/test_programs/acir_artifacts/slices/target/witness.gz deleted file mode 100644 index 359b2f75601..00000000000 Binary files a/test_programs/acir_artifacts/slices/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/strings/target/acir.gz b/test_programs/acir_artifacts/strings/target/acir.gz deleted file mode 100644 index 424f4bd2d0f..00000000000 Binary files a/test_programs/acir_artifacts/strings/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/strings/target/witness.gz b/test_programs/acir_artifacts/strings/target/witness.gz deleted file mode 100644 index 72a93aabbfe..00000000000 Binary files a/test_programs/acir_artifacts/strings/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/struct/target/acir.gz b/test_programs/acir_artifacts/struct/target/acir.gz deleted file mode 100644 index e9de8adcb38..00000000000 Binary files a/test_programs/acir_artifacts/struct/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/struct/target/witness.gz b/test_programs/acir_artifacts/struct/target/witness.gz deleted file mode 100644 index a8e277ea795..00000000000 Binary files a/test_programs/acir_artifacts/struct/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/struct_array_inputs/target/acir.gz b/test_programs/acir_artifacts/struct_array_inputs/target/acir.gz deleted file mode 100644 index f66ed17a0cf..00000000000 Binary files a/test_programs/acir_artifacts/struct_array_inputs/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/struct_array_inputs/target/witness.gz b/test_programs/acir_artifacts/struct_array_inputs/target/witness.gz deleted file mode 100644 index 82307dcb96e..00000000000 Binary files a/test_programs/acir_artifacts/struct_array_inputs/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/struct_fields_ordering/target/acir.gz b/test_programs/acir_artifacts/struct_fields_ordering/target/acir.gz deleted file mode 100644 index 8ddb62e8799..00000000000 Binary files a/test_programs/acir_artifacts/struct_fields_ordering/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/struct_fields_ordering/target/witness.gz b/test_programs/acir_artifacts/struct_fields_ordering/target/witness.gz deleted file mode 100644 index e2eb3145306..00000000000 Binary files a/test_programs/acir_artifacts/struct_fields_ordering/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/struct_inputs/target/acir.gz b/test_programs/acir_artifacts/struct_inputs/target/acir.gz deleted file mode 100644 index b658b4111f6..00000000000 Binary files a/test_programs/acir_artifacts/struct_inputs/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/struct_inputs/target/witness.gz b/test_programs/acir_artifacts/struct_inputs/target/witness.gz deleted file mode 100644 index b52a8644265..00000000000 Binary files a/test_programs/acir_artifacts/struct_inputs/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/submodules/target/acir.gz b/test_programs/acir_artifacts/submodules/target/acir.gz deleted file mode 100644 index 697832be207..00000000000 Binary files a/test_programs/acir_artifacts/submodules/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/submodules/target/witness.gz b/test_programs/acir_artifacts/submodules/target/witness.gz deleted file mode 100644 index 10cffba7141..00000000000 Binary files a/test_programs/acir_artifacts/submodules/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/to_be_bytes/target/acir.gz b/test_programs/acir_artifacts/to_be_bytes/target/acir.gz deleted file mode 100644 index df6294bc970..00000000000 Binary files a/test_programs/acir_artifacts/to_be_bytes/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/to_be_bytes/target/witness.gz b/test_programs/acir_artifacts/to_be_bytes/target/witness.gz deleted file mode 100644 index b2ac9601bae..00000000000 Binary files a/test_programs/acir_artifacts/to_be_bytes/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/to_bytes_consistent/target/acir.gz b/test_programs/acir_artifacts/to_bytes_consistent/target/acir.gz deleted file mode 100644 index 2371186e8fc..00000000000 Binary files a/test_programs/acir_artifacts/to_bytes_consistent/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/to_bytes_consistent/target/witness.gz b/test_programs/acir_artifacts/to_bytes_consistent/target/witness.gz deleted file mode 100644 index 610802628c6..00000000000 Binary files a/test_programs/acir_artifacts/to_bytes_consistent/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/to_bytes_integration/target/acir.gz b/test_programs/acir_artifacts/to_bytes_integration/target/acir.gz deleted file mode 100644 index 4deef489b9c..00000000000 Binary files a/test_programs/acir_artifacts/to_bytes_integration/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/to_bytes_integration/target/witness.gz b/test_programs/acir_artifacts/to_bytes_integration/target/witness.gz deleted file mode 100644 index 71d29209eba..00000000000 Binary files a/test_programs/acir_artifacts/to_bytes_integration/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/to_le_bytes/target/acir.gz b/test_programs/acir_artifacts/to_le_bytes/target/acir.gz deleted file mode 100644 index 02d2bd105f1..00000000000 Binary files a/test_programs/acir_artifacts/to_le_bytes/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/to_le_bytes/target/witness.gz b/test_programs/acir_artifacts/to_le_bytes/target/witness.gz deleted file mode 100644 index 610802628c6..00000000000 Binary files a/test_programs/acir_artifacts/to_le_bytes/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/trait_as_return_type/target/acir.gz b/test_programs/acir_artifacts/trait_as_return_type/target/acir.gz deleted file mode 100644 index 1d34f5becaa..00000000000 Binary files a/test_programs/acir_artifacts/trait_as_return_type/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/trait_as_return_type/target/witness.gz b/test_programs/acir_artifacts/trait_as_return_type/target/witness.gz deleted file mode 100644 index c3b8e758662..00000000000 Binary files a/test_programs/acir_artifacts/trait_as_return_type/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/trait_impl_base_type/target/acir.gz b/test_programs/acir_artifacts/trait_impl_base_type/target/acir.gz deleted file mode 100644 index 531a1baf42e..00000000000 Binary files a/test_programs/acir_artifacts/trait_impl_base_type/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/trait_impl_base_type/target/witness.gz b/test_programs/acir_artifacts/trait_impl_base_type/target/witness.gz deleted file mode 100644 index c3b8e758662..00000000000 Binary files a/test_programs/acir_artifacts/trait_impl_base_type/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/traits_in_crates_1/target/acir.gz b/test_programs/acir_artifacts/traits_in_crates_1/target/acir.gz deleted file mode 100644 index 6eb630ce2ff..00000000000 Binary files a/test_programs/acir_artifacts/traits_in_crates_1/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/traits_in_crates_1/target/witness.gz b/test_programs/acir_artifacts/traits_in_crates_1/target/witness.gz deleted file mode 100644 index 60fc9526465..00000000000 Binary files a/test_programs/acir_artifacts/traits_in_crates_1/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/traits_in_crates_2/target/acir.gz b/test_programs/acir_artifacts/traits_in_crates_2/target/acir.gz deleted file mode 100644 index 6eb630ce2ff..00000000000 Binary files a/test_programs/acir_artifacts/traits_in_crates_2/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/traits_in_crates_2/target/witness.gz b/test_programs/acir_artifacts/traits_in_crates_2/target/witness.gz deleted file mode 100644 index 60fc9526465..00000000000 Binary files a/test_programs/acir_artifacts/traits_in_crates_2/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/tuple_inputs/target/acir.gz b/test_programs/acir_artifacts/tuple_inputs/target/acir.gz deleted file mode 100644 index 79ae7dccb3d..00000000000 Binary files a/test_programs/acir_artifacts/tuple_inputs/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/tuple_inputs/target/witness.gz b/test_programs/acir_artifacts/tuple_inputs/target/witness.gz deleted file mode 100644 index 0eb0d6f09f6..00000000000 Binary files a/test_programs/acir_artifacts/tuple_inputs/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/tuples/target/acir.gz b/test_programs/acir_artifacts/tuples/target/acir.gz deleted file mode 100644 index a053f565e5b..00000000000 Binary files a/test_programs/acir_artifacts/tuples/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/tuples/target/witness.gz b/test_programs/acir_artifacts/tuples/target/witness.gz deleted file mode 100644 index 10cffba7141..00000000000 Binary files a/test_programs/acir_artifacts/tuples/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/type_aliases/target/acir.gz b/test_programs/acir_artifacts/type_aliases/target/acir.gz deleted file mode 100644 index 7855747826f..00000000000 Binary files a/test_programs/acir_artifacts/type_aliases/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/type_aliases/target/witness.gz b/test_programs/acir_artifacts/type_aliases/target/witness.gz deleted file mode 100644 index 8137a9d31c5..00000000000 Binary files a/test_programs/acir_artifacts/type_aliases/target/witness.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/xor/target/acir.gz b/test_programs/acir_artifacts/xor/target/acir.gz deleted file mode 100644 index eda28c748c5..00000000000 Binary files a/test_programs/acir_artifacts/xor/target/acir.gz and /dev/null differ diff --git a/test_programs/acir_artifacts/xor/target/witness.gz b/test_programs/acir_artifacts/xor/target/witness.gz deleted file mode 100644 index 444c8e01cd1..00000000000 Binary files a/test_programs/acir_artifacts/xor/target/witness.gz and /dev/null differ diff --git a/test_programs/compile_failure/raw_string_huge/Nargo.toml b/test_programs/compile_failure/raw_string_huge/Nargo.toml new file mode 100644 index 00000000000..ecef0e2a07c --- /dev/null +++ b/test_programs/compile_failure/raw_string_huge/Nargo.toml @@ -0,0 +1,5 @@ +[package] +name = "raw_string_huge" +type = "bin" +authors = [""] +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/raw_string_huge/src/main.nr b/test_programs/compile_failure/raw_string_huge/src/main.nr new file mode 100644 index 00000000000..7bca9942e7a --- /dev/null +++ b/test_programs/compile_failure/raw_string_huge/src/main.nr @@ -0,0 +1,4 @@ +fn main() { + // Fails because of too many hashes for raw string (256+ hashes) + let _a = r##############################################################################################################################################################################################################################################################################"hello"##############################################################################################################################################################################################################################################################################; +} diff --git a/test_programs/compile_success_empty/raw_string/Nargo.toml b/test_programs/compile_success_empty/raw_string/Nargo.toml new file mode 100644 index 00000000000..81147e65f34 --- /dev/null +++ b/test_programs/compile_success_empty/raw_string/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "raw_string" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/compile_success_empty/raw_string/src/main.nr b/test_programs/compile_success_empty/raw_string/src/main.nr new file mode 100644 index 00000000000..ad8dfe82ae5 --- /dev/null +++ b/test_programs/compile_success_empty/raw_string/src/main.nr @@ -0,0 +1,13 @@ +global D = r#####"Hello "world""#####; + +fn main() { + let a = "Hello \"world\""; + let b = r#"Hello "world""#; + let c = r##"Hello "world""##; + assert(a == b); + assert(b == c); + assert(c == D); + let x = r#"Hello World"#; + let y = r"Hello World"; + assert(x == y); +} diff --git a/test_programs/execution_success/brillig_cow/Nargo.toml b/test_programs/execution_success/brillig_cow/Nargo.toml new file mode 100644 index 00000000000..d191eb53ddf --- /dev/null +++ b/test_programs/execution_success/brillig_cow/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "brillig_cow" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/brillig_cow/Prover.toml b/test_programs/execution_success/brillig_cow/Prover.toml new file mode 100644 index 00000000000..6533d218b15 --- /dev/null +++ b/test_programs/execution_success/brillig_cow/Prover.toml @@ -0,0 +1,7 @@ +original = [0, 1, 2, 3, 4] +index = 2 + +[expected_result] +original = [0, 1, 2, 3, 4] +modified_once = [0, 1, 27, 3, 4] +modified_twice = [0, 1, 27, 27, 4] diff --git a/test_programs/execution_success/brillig_cow/src/main.nr b/test_programs/execution_success/brillig_cow/src/main.nr new file mode 100644 index 00000000000..7d847e085fe --- /dev/null +++ b/test_programs/execution_success/brillig_cow/src/main.nr @@ -0,0 +1,54 @@ +// Tests the copy on write optimization for arrays. We look for cases where we are modifying an array in place when we shouldn't. + +global ARRAY_SIZE = 5; + +struct ExecutionResult { + original: [Field; ARRAY_SIZE], + modified_once: [Field; ARRAY_SIZE], + modified_twice: [Field; ARRAY_SIZE], +} + +impl ExecutionResult { + fn is_equal(self, other: ExecutionResult) -> bool { + (self.original == other.original) & + (self.modified_once == other.modified_once) & + (self.modified_twice == other.modified_twice) + } +} + +fn modify_in_inlined_constrained(original: [Field; ARRAY_SIZE], index: u64) -> ExecutionResult { + let mut modified = original; + + modified[index] = 27; + + let modified_once = modified; + + modified[index+1] = 27; + + ExecutionResult { + original, + modified_once, + modified_twice: modified + } +} + +unconstrained fn modify_in_unconstrained(original: [Field; ARRAY_SIZE], index: u64) -> ExecutionResult { + let mut modified = original; + + modified[index] = 27; + + let modified_once = modified; + + modified[index+1] = 27; + + ExecutionResult { + original, + modified_once, + modified_twice: modified + } +} + +unconstrained fn main(original: [Field; ARRAY_SIZE], index: u64, expected_result: ExecutionResult) { + assert(expected_result.is_equal(modify_in_unconstrained(original, index))); + assert(expected_result.is_equal(modify_in_inlined_constrained(original, index))); +} diff --git a/test_programs/execution_success/brillig_oracle/src/main.nr b/test_programs/execution_success/brillig_oracle/src/main.nr index 86cf6ff1498..490b7b605e3 100644 --- a/test_programs/execution_success/brillig_oracle/src/main.nr +++ b/test_programs/execution_success/brillig_oracle/src/main.nr @@ -1,25 +1,41 @@ use dep::std::slice; +use dep::std::test::OracleMock; + // Tests oracle usage in brillig/unconstrained functions fn main(x: Field) { - get_number_sequence_wrapper(20); + let size = 20; + // TODO: Add a method along the lines of `(0..size).to_array()`. + let mut mock_oracle_response = [0; 20]; + // TODO: Add an `array.reverse()` method. + let mut reversed_mock_oracle_response = [0; 20]; + for i in 0..size { + mock_oracle_response[i] = i; + reversed_mock_oracle_response[19 - i] = i; + } + + // TODO: this method of returning a slice feels hacky. + let _ = OracleMock::mock("get_number_sequence").with_params(size).returns((20, mock_oracle_response)); + let _ = OracleMock::mock("get_reverse_number_sequence").with_params(size).returns((20, reversed_mock_oracle_response)); + + get_number_sequence_wrapper(size); } -// TODO(#1911): This function does not need to be an oracle but acts -// as a useful test while we finalize code generation for slices in Brillig + +// Define oracle functions which we have mocked above #[oracle(get_number_sequence)] unconstrained fn get_number_sequence(_size: Field) -> [Field] {} -// TODO(#1911) + #[oracle(get_reverse_number_sequence)] unconstrained fn get_reverse_number_sequence(_size: Field) -> [Field] {} unconstrained fn get_number_sequence_wrapper(size: Field) { let slice = get_number_sequence(size); - for i in 0..19 as u32 { + for i in 0..20 as u32 { assert(slice[i] == i as Field); } let reversed_slice = get_reverse_number_sequence(size); // Regression test that we have not overwritten memory - for i in 0..19 as u32 { + for i in 0..20 as u32 { assert(slice[i] == reversed_slice[19 - i]); } } diff --git a/test_programs/execution_success/brillig_set_slice_of_slice/Nargo.toml b/test_programs/execution_success/brillig_set_slice_of_slice/Nargo.toml new file mode 100644 index 00000000000..071254266f4 --- /dev/null +++ b/test_programs/execution_success/brillig_set_slice_of_slice/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "brillig_set_slice_of_slice" +type = "bin" +authors = [""] +compiler_version = ">=0.19.4" + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/brillig_set_slice_of_slice/src/main.nr b/test_programs/execution_success/brillig_set_slice_of_slice/src/main.nr new file mode 100644 index 00000000000..c0e9c7d172f --- /dev/null +++ b/test_programs/execution_success/brillig_set_slice_of_slice/src/main.nr @@ -0,0 +1,51 @@ +struct Property +{ + key : [u8], + value : [u8], +} + +struct JSON +{ + doc : [Property] +} + +unconstrained fn slice_eq(self: [u8], other: [u8]) -> bool { + let mut equal = true; + for i in 0..self.len() { + if self[i] != other[i] { + equal = false; + } + } + equal +} + +// This test acts a regression for issue #3476 +unconstrained fn main() { + let mut json = JSON { doc: [] }; + let mut prop = Property { key: [], value:[] }; + + let other_prop = Property { key: [0, 1, 2], value:[10] }; + json.doc = json.doc.push_back(other_prop); + + for i in 0..3 { + prop.key = prop.key.push_back(i as u8); + } + prop.value = prop.value.push_back(5); + + // add property to json or replace existing + let len : Field = json.doc.len(); + let mut found = false; + for i in 0..len + { + if (!found) + { + if (slice_eq(prop.key, json.doc[i].key)) + { + json.doc[i].value = prop.value; + found = true; + } + } + } + assert(found == true); + assert(json.doc[0].value[0] == 5); +} \ No newline at end of file diff --git a/test_programs/execution_success/slice_dynamic_index/src/main.nr b/test_programs/execution_success/slice_dynamic_index/src/main.nr index 2e5c0122dfb..374d2ba4c26 100644 --- a/test_programs/execution_success/slice_dynamic_index/src/main.nr +++ b/test_programs/execution_success/slice_dynamic_index/src/main.nr @@ -123,13 +123,13 @@ fn dynamic_slice_merge_if(mut slice: [Field], x: Field) { let (first_elem, rest_of_slice) = popped_slice.pop_front(); assert(first_elem == 12); assert(rest_of_slice.len() == 6); - // TODO(#2462): SliceInsert and SliceRemove with a dynamic index are not yet implemented in ACIR gen - slice = rest_of_slice.insert(2, 20); + + slice = rest_of_slice.insert(x - 2, 20); assert(slice[2] == 20); assert(slice[6] == 30); assert(slice.len() == 7); - // TODO(#2462): SliceInsert and SliceRemove with a dynamic index are not yet implemented in ACIR gen - let (removed_slice, removed_elem) = slice.remove(3); + + let (removed_slice, removed_elem) = slice.remove(x - 1); // The deconstructed tuple assigns to the slice but is not seen outside of the if statement // without a direct assignment slice = removed_slice; diff --git a/test_programs/execution_success/slice_struct_field/src/main.nr b/test_programs/execution_success/slice_struct_field/src/main.nr index c00fdf85180..a5b971ada4b 100644 --- a/test_programs/execution_success/slice_struct_field/src/main.nr +++ b/test_programs/execution_success/slice_struct_field/src/main.nr @@ -23,12 +23,12 @@ fn main(y: pub Field) { let foo_two = Foo { a: 4, b: b_two, bar: Bar { inner: [103, 104, 105] } }; let foo_three = Foo { a: 7, b: [8, 9, 22], bar: Bar { inner: [106, 107, 108] } }; - let foo_four = Foo { a: 10, b: [11, 12, 23], bar: Bar { inner: [109, 110, 111] } }; + let mut foo_four = Foo { a: 10, b: [11, 12, 23], bar: Bar { inner: [109, 110, 111] } }; let mut x = [foo_one, foo_two]; x = x.push_back(foo_three); x = x.push_back(foo_four); - + assert(x[y - 3].a == 1); let struct_slice = x[y - 3].b; for i in 0..4 { @@ -60,6 +60,14 @@ fn main(y: pub Field) { assert(x[y].bar.inner == [109, 110, 111]); // Check that switching the lhs and rhs is still valid assert([109, 110, 111] == x[y].bar.inner); + + assert(x[y - 3].bar.inner == [100, 101, 102]); + assert(x[y - 2].bar.inner == [103, 104, 105]); + assert(x[y - 1].bar.inner == [106, 107, 108]); + assert(x[y].bar.inner == [109, 110, 111]); + // Check that switching the lhs and rhs is still valid + assert([109, 110, 111] == x[y].bar.inner); + // TODO: Enable merging nested slices // if y != 2 { // x[y].a = 50; @@ -75,12 +83,13 @@ fn main(y: pub Field) { // assert(x[2].b[0] == 100); // assert(x[2].b[1] == 101); // assert(x[2].b[2] == 102); + let q = x.push_back(foo_four); let foo_parent_one = FooParent { parent_arr: [0, 1, 2], foos: x }; let foo_parent_two = FooParent { parent_arr: [3, 4, 5], foos: q }; let mut foo_parents = [foo_parent_one]; foo_parents = foo_parents.push_back(foo_parent_two); - // TODO: make a separate test for compile time + // TODO: make a separate test for entirely compile time // foo_parents[1].foos.push_back(foo_four); // TODO: Merging nested slices is broken // if y == 3 { @@ -88,6 +97,7 @@ fn main(y: pub Field) { // } else { // foo_parents[y - 2].foos[y - 1].b[y - 1] = 1000; // } + assert(foo_parents[y - 2].foos[y - 2].b[y - 1] == 21); foo_parents[y - 2].foos[y - 2].b[y - 1] = 5000; assert(foo_parents[y - 2].foos[y - 2].b[y - 1] == 5000); @@ -108,11 +118,15 @@ fn main(y: pub Field) { assert(foo_parents[y - 2].foos[y - 1].a == 7); foo_parents[y - 2].foos[y - 1].a = 50; + assert(foo_parents[y - 2].foos[y - 1].a == 50); let b_array = foo_parents[y - 2].foos[y - 1].b; + assert(b_array[0] == 8); + assert(b_array[1] == 9); assert(b_array[2] == 22); assert(b_array.len() == 3); - // Test setting a nested array with non-dynamic + + // // Test setting a nested array with non-dynamic let x = [5, 6, 5000, 21, 100, 101].as_slice(); foo_parents[y - 2].foos[y - 1].b = x; @@ -120,16 +134,52 @@ fn main(y: pub Field) { assert(foo_parents[y - 2].foos[y - 1].b[4] == 100); assert(foo_parents[y - 2].foos[y - 1].b[5] == 101); + // Need to account for that foo_parents is not modified outside of this function test_basic_intrinsics_nested_slices(foo_parents, y); - // TODO(#3364): still have to enable slice intrinsics on dynamic nested slices - // assert(foo_parents[y - 2].foos.len() == 5); - // foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); - // assert(foo_parents[y - 2].foos.len() == 6); + test_complex_intrinsic_nested_slices(foo_parents, y); + + foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_back(500); + assert(foo_parents[y - 2].foos[y - 1].b.len() == 7); + assert(foo_parents[y - 2].foos[y - 1].b[6] == 500); + + let (popped_slice, last_elem) = foo_parents[y - 2].foos[y - 1].b.pop_back(); + foo_parents[y - 2].foos[y - 1].b = popped_slice; + assert(foo_parents[y - 2].foos[y - 1].b.len() == 6); + assert(last_elem == 500); + + foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_front(11); + assert(foo_parents[y - 2].foos[y - 1].b.len() == 7); + assert(foo_parents[y - 2].foos[y - 1].b[0] == 11); + + assert(foo_parents[y - 2].foos.len() == 5); + foo_four.a = 40; + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); + assert(foo_parents[y - 2].foos.len() == 6); + assert(foo_parents[y - 2].foos[y + 2].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); + assert(foo_parents[y - 2].foos.len() == 7); + assert(foo_parents[y - 2].foos[6].a == 40); + assert(foo_parents[y - 2].foos[5].bar.inner == [109, 110, 111]); + assert(foo_parents[y - 2].foos[6].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); + assert(foo_parents[y - 2].foos.len() == 8); + assert(foo_parents[y - 2].foos[6].a == 40); + assert(foo_parents[y - 2].foos[5].bar.inner == [109, 110, 111]); + assert(foo_parents[y - 2].foos[6].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); + assert(foo_parents[y - 2].foos.len() == 9); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); + assert(foo_parents[y - 2].foos.len() == 10); + let b_array = foo_parents[y - 2].foos[y - 1].b; - assert(b_array[0] == 5); - assert(b_array[1] == 6); - assert(b_array[2] == 5000); - assert(b_array[3] == 21); + assert(b_array[0] == 11); + assert(b_array[1] == 5); + assert(b_array[2] == 6); + assert(b_array[3] == 5000); let b_array = foo_parents[y - 2].foos[y].b; assert(foo_parents[y - 2].foos[y].a == 10); @@ -175,3 +225,248 @@ fn test_basic_intrinsics_nested_slices(mut foo_parents: [FooParent], y: Field) { assert(foo_parents[y - 2].foos[y - 1].b[2] == 20); assert(foo_parents[y - 2].foos[y - 1].b[3] == 21); } + +// This method test intrinsics on nested slices with complex inputs such as +// pushing a `Foo` struct onto a slice in `FooParents`. +fn test_complex_intrinsic_nested_slices(mut foo_parents: [FooParent], y: Field) { + let mut foo = Foo { a: 13, b: [14, 15, 16], bar: Bar { inner: [109, 110, 111] } }; + assert(foo_parents[y - 2].foos.len() == 5); + foo.a = 40; + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo); + assert(foo_parents[1].foos.len() == 6); + assert(foo_parents[1].foos[5].a == 40); + assert(foo_parents[1].foos[5].b[0] == 14); + assert(foo_parents[1].foos[5].b[2] == 16); + assert(foo_parents[1].foos[5].b.len() == 3); + assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_back(500); + assert(foo_parents[1].foos[2].b.len() == 7); + assert(foo_parents[1].foos[2].b[6] == 500); + assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); + assert(foo_parents[1].foos[5].a == 40); + assert(foo_parents[1].foos[5].b[0] == 14); + assert(foo_parents[1].foos[5].b[2] == 16); + assert(foo_parents[1].foos[5].b.len() == 3); + assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); + + let (popped_slice, last_foo) = foo_parents[y - 2].foos.pop_back(); + foo_parents[y - 2].foos = popped_slice; + assert(foo_parents[y - 2].foos.len() == 5); + assert(last_foo.a == 40); + assert(last_foo.b[0] == 14); + assert(last_foo.b[1] == 15); + assert(last_foo.b[2] == 16); + assert(last_foo.bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_front(foo); + assert(foo_parents[1].foos.len() == 6); + assert(foo_parents[1].foos[0].a == 40); + assert(foo_parents[1].foos[0].b[0] == 14); + assert(foo_parents[1].foos[0].b[1] == 15); + assert(foo_parents[1].foos[0].b[2] == 16); + assert(foo_parents[1].foos[5].a == 10); + assert(foo_parents[1].foos[5].b.len() == 3); + assert(foo_parents[1].foos[5].b[0] == 11); + assert(foo_parents[1].foos[5].b[2] == 23); + assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[1].a == 1); + assert(foo_parents[1].foos[1].bar.inner == [100, 101, 102]); + + let (first_foo, rest_of_slice) = foo_parents[y - 2].foos.pop_front(); + + foo_parents[y - 2].foos = rest_of_slice; + assert(first_foo.a == 40); + assert(first_foo.b[0] == 14); + assert(first_foo.b[1] == 15); + assert(first_foo.b[2] == 16); + assert(first_foo.bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[0].a == 1); + assert(foo_parents[1].foos[0].b[0] == 2); + assert(foo_parents[1].foos[0].b[1] == 3); + assert(foo_parents[1].foos[0].b[2] == 20); + assert(foo_parents[1].foos[0].b[3] == 20); + assert(foo_parents[1].foos[0].bar.inner == [100, 101, 102]); + + test_insert_remove_const_index(foo_parents, y, foo); + + // Check values before insertion + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos.len() == 5); + assert(foo_parents[1].foos[2].a == 50); + assert(foo_parents[1].foos[2].b[0] == 5); + assert(foo_parents[1].foos[2].b[2] == 5000); + assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[3].a == 10); + assert(foo_parents[1].foos[3].b[0] == 11); + assert(foo_parents[1].foos[3].b[2] == 23); + assert(foo_parents[1].foos[3].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.insert(y - 1, foo); + assert(foo_parents[1].foos.len() == 6); + + // Check values correctly moved after insertion + assert(foo_parents[1].foos[0].a == 1); + assert(foo_parents[1].foos[0].b[0] == 2); + assert(foo_parents[1].foos[0].b[1] == 3); + assert(foo_parents[1].foos[0].b[2] == 20); + assert(foo_parents[1].foos[0].b[3] == 20); + assert(foo_parents[1].foos[0].bar.inner == [100, 101, 102]); + + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos[2].a == 40); + assert(foo_parents[1].foos[2].b[0] == 14); + assert(foo_parents[1].foos[2].b[2] == 16); + assert(foo_parents[1].foos[2].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[3].a == 50); + assert(foo_parents[1].foos[3].b[0] == 5); + assert(foo_parents[1].foos[3].b[2] == 5000); + assert(foo_parents[1].foos[3].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[4].a == 10); + assert(foo_parents[1].foos[4].b[0] == 11); + assert(foo_parents[1].foos[4].b[2] == 23); + assert(foo_parents[1].foos[4].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[5].a == 10); + assert(foo_parents[1].foos[5].b[0] == 11); + assert(foo_parents[1].foos[5].b[2] == 23); + assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); + + let (rest_of_slice, removed_elem) = foo_parents[y - 2].foos.remove(y - 1); + foo_parents[1].foos = rest_of_slice; + + // Check that the accurate element was removed + assert(removed_elem.a == 40); + assert(removed_elem.b[0] == 14); + assert(removed_elem.b[2] == 16); + assert(removed_elem.bar.inner == [109, 110, 111]); + + // Check that we have altered our slice accurately following a removal + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos[2].a == 50); + assert(foo_parents[1].foos[2].b[0] == 5); + assert(foo_parents[1].foos[2].b[2] == 5000); + assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[3].a == 10); + assert(foo_parents[1].foos[3].b[0] == 11); + assert(foo_parents[1].foos[3].b[2] == 23); + assert(foo_parents[1].foos[3].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[4].b[0] == 11); + assert(foo_parents[1].foos[4].b[2] == 23); + assert(foo_parents[1].foos[4].bar.inner == [109, 110, 111]); +} + +fn test_insert_remove_const_index(mut foo_parents: [FooParent], y: Field, foo: Foo) { + // Check values before insertion + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos.len() == 5); + assert(foo_parents[1].foos[2].a == 50); + assert(foo_parents[1].foos[2].b[0] == 5); + assert(foo_parents[1].foos[2].b[2] == 5000); + assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[3].a == 10); + assert(foo_parents[1].foos[3].b[0] == 11); + assert(foo_parents[1].foos[3].b[2] == 23); + assert(foo_parents[1].foos[3].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.insert(2, foo); + assert(foo_parents[1].foos.len() == 6); + + // Check values correctly moved after insertion + assert(foo_parents[1].foos[0].a == 1); + assert(foo_parents[1].foos[0].b[0] == 2); + assert(foo_parents[1].foos[0].b[1] == 3); + assert(foo_parents[1].foos[0].b[2] == 20); + assert(foo_parents[1].foos[0].b[3] == 20); + assert(foo_parents[1].foos[0].bar.inner == [100, 101, 102]); + + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos[2].a == 40); + assert(foo_parents[1].foos[2].b[0] == 14); + assert(foo_parents[1].foos[2].b[2] == 16); + assert(foo_parents[1].foos[2].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[3].a == 50); + assert(foo_parents[1].foos[3].b[0] == 5); + assert(foo_parents[1].foos[3].b[2] == 5000); + assert(foo_parents[1].foos[3].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[4].a == 10); + assert(foo_parents[1].foos[4].b[0] == 11); + assert(foo_parents[1].foos[4].b[2] == 23); + assert(foo_parents[1].foos[4].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[5].a == 10); + assert(foo_parents[1].foos[5].b[0] == 11); + assert(foo_parents[1].foos[5].b[2] == 23); + assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); + + let (rest_of_slice, removed_elem) = foo_parents[y - 2].foos.remove(2); + foo_parents[1].foos = rest_of_slice; + + // Check that the accurate element was removed + assert(removed_elem.a == 40); + assert(removed_elem.b[0] == 14); + assert(removed_elem.b[2] == 16); + assert(removed_elem.bar.inner == [109, 110, 111]); + + // Check that we have altered our slice accurately following a removal + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos[2].a == 50); + assert(foo_parents[1].foos[2].b[0] == 5); + assert(foo_parents[1].foos[2].b[2] == 5000); + assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[3].a == 10); + assert(foo_parents[1].foos[3].b[0] == 11); + assert(foo_parents[1].foos[3].b[2] == 23); + assert(foo_parents[1].foos[3].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[4].b[0] == 11); + assert(foo_parents[1].foos[4].b[2] == 23); + assert(foo_parents[1].foos[4].bar.inner == [109, 110, 111]); +} diff --git a/test_programs/rebuild.sh b/test_programs/rebuild.sh index d89402fb1cf..dfc3dc5c967 100755 --- a/test_programs/rebuild.sh +++ b/test_programs/rebuild.sh @@ -1,56 +1,72 @@ #!/bin/bash set -e -excluded_dirs=("workspace" "workspace_default_member") +process_dir() { + local dir=$1 + local current_dir=$2 + local dir_name=$(basename "$dir") + + if [[ ! -d "$current_dir/acir_artifacts/$dir_name" ]]; then + mkdir -p $current_dir/acir_artifacts/$dir_name + fi + + cd $dir + if [ -d ./target/ ]; then + rm -r ./target/ + fi + nargo compile && nargo execute witness + + if [ -f ./target/witness.tr ]; then + mv ./target/witness.tr ./target/witness.gz + fi + + if [ -f ./target/${dir_name}.json ]; then + jq -r '.bytecode' ./target/${dir_name}.json | base64 -d > ./target/acir.gz + fi + + rm ./target/${dir_name}.json + + if [ -d "$current_dir/acir_artifacts/$dir_name/target" ]; then + rm -r "$current_dir/acir_artifacts/$dir_name/target" + fi + mkdir $current_dir/acir_artifacts/$dir_name/target + + mv ./target/*.gz $current_dir/acir_artifacts/$dir_name/target/ + cd $current_dir +} + +export -f process_dir + +excluded_dirs=("workspace" "workspace_default_member") current_dir=$(pwd) base_path="$current_dir/execution_success" -# Clear the acir_artifacts directory of any existing artifacts rm -rf $current_dir/acir_artifacts mkdir -p $current_dir/acir_artifacts -# Loop over every directory +# Gather directories to process. +dirs_to_process=() for dir in $base_path/*; do - if [[ ! -d $dir ]]; then - continue - fi - - dir_name=$(basename "$dir") - - if [[ ! " ${excluded_dirs[@]} " =~ " ${dir_name} " ]]; then - if [[ ! -d "$current_dir/acir_artifacts/$dir_name" ]]; then - mkdir -p $current_dir/acir_artifacts/$dir_name - fi - - cd $dir - if [ -d ./target/ ]; then - rm -r ./target/ - fi - nargo compile && nargo execute witness - - # Rename witness.tr to witness.gz - if [ -f ./target/witness.tr ]; then - mv ./target/witness.tr ./target/witness.gz - fi - - # Extract bytecode field from JSON, base64 decode it, and save it to the target directory - if [ -f ./target/${dir_name}.json ]; then - jq -r '.bytecode' ./target/${dir_name}.json | base64 -d > ./target/acir.gz - fi - - # Delete the JSON file after extracting bytecode field - rm ./target/${dir_name}.json - - # Clear the target directory in acir_artifacts - if [ -d "$current_dir/acir_artifacts/$dir_name/target" ]; then - rm -r "$current_dir/acir_artifacts/$dir_name/target" - fi - mkdir $current_dir/acir_artifacts/$dir_name/target - - # Move the artifacts from the target directory to the corresponding directory in acir_artifacts - mv ./target/*.gz $current_dir/acir_artifacts/$dir_name/target/ - - cd $base_path - fi + if [[ ! -d $dir ]] || [[ " ${excluded_dirs[@]} " =~ " $(basename "$dir") " ]]; then + continue + fi + dirs_to_process+=("$dir") +done + +# Process each directory in parallel +pids=() +for dir in "${dirs_to_process[@]}"; do + process_dir "$dir" "$current_dir" & + pids+=($!) +done + +# Check the exit status of each background job. +for pid in "${pids[@]}"; do + wait $pid || exit_status=$? done + +# Exit with a failure status if any job failed. +if [ ! -z "$exit_status" ]; then + exit $exit_status +fi \ No newline at end of file diff --git a/tooling/nargo/src/ops/foreign_calls.rs b/tooling/nargo/src/ops/foreign_calls.rs index 6cc78febab3..1ca270a5bf7 100644 --- a/tooling/nargo/src/ops/foreign_calls.rs +++ b/tooling/nargo/src/ops/foreign_calls.rs @@ -2,7 +2,6 @@ use acvm::{ acir::brillig::{ForeignCallParam, ForeignCallResult, Value}, pwg::ForeignCallWaitInfo, }; -use iter_extended::vecmap; use noirc_printable_type::{decode_string_value, ForeignCallError, PrintableValueDisplay}; pub trait ForeignCallExecutor { @@ -16,8 +15,6 @@ pub trait ForeignCallExecutor { /// After resolution of a foreign call, nargo will restart execution of the ACVM pub(crate) enum ForeignCall { Println, - Sequence, - ReverseSequence, CreateMock, SetMockParams, SetMockReturns, @@ -35,8 +32,6 @@ impl ForeignCall { pub(crate) fn name(&self) -> &'static str { match self { ForeignCall::Println => "println", - ForeignCall::Sequence => "get_number_sequence", - ForeignCall::ReverseSequence => "get_reverse_number_sequence", ForeignCall::CreateMock => "create_mock", ForeignCall::SetMockParams => "set_mock_params", ForeignCall::SetMockReturns => "set_mock_returns", @@ -48,8 +43,6 @@ impl ForeignCall { pub(crate) fn lookup(op_name: &str) -> Option { match op_name { "println" => Some(ForeignCall::Println), - "get_number_sequence" => Some(ForeignCall::Sequence), - "get_reverse_number_sequence" => Some(ForeignCall::ReverseSequence), "create_mock" => Some(ForeignCall::CreateMock), "set_mock_params" => Some(ForeignCall::SetMockParams), "set_mock_returns" => Some(ForeignCall::SetMockReturns), @@ -147,30 +140,6 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { } Ok(ForeignCallResult { values: vec![] }) } - Some(ForeignCall::Sequence) => { - let sequence_length: u128 = - foreign_call.inputs[0].unwrap_value().to_field().to_u128(); - let sequence = vecmap(0..sequence_length, Value::from); - - Ok(ForeignCallResult { - values: vec![ - ForeignCallParam::Single(sequence_length.into()), - ForeignCallParam::Array(sequence), - ], - }) - } - Some(ForeignCall::ReverseSequence) => { - let sequence_length: u128 = - foreign_call.inputs[0].unwrap_value().to_field().to_u128(); - let sequence = vecmap((0..sequence_length).rev(), Value::from); - - Ok(ForeignCallResult { - values: vec![ - ForeignCallParam::Single(sequence_length.into()), - ForeignCallParam::Array(sequence), - ], - }) - } Some(ForeignCall::CreateMock) => { let mock_oracle_name = Self::parse_string(&foreign_call.inputs[0]); assert!(ForeignCall::lookup(&mock_oracle_name).is_none()); diff --git a/tooling/nargo_cli/src/cli/debug_cmd.rs b/tooling/nargo_cli/src/cli/debug_cmd.rs index 0e7579b0721..5204e0f122c 100644 --- a/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -1,3 +1,5 @@ +use std::path::PathBuf; + use acvm::acir::native_types::WitnessMap; use clap::Args; @@ -64,27 +66,42 @@ pub(crate) fn run( &opcode_support, )?; - println!("[{}] Starting debugger", package.name); - let (return_value, solved_witness) = - debug_program_and_decode(compiled_program, package, &args.prover_name)?; + run_async(package, compiled_program, &args.prover_name, &args.witness_name, target_dir) +} + +fn run_async( + package: &Package, + program: CompiledProgram, + prover_name: &str, + witness_name: &Option, + target_dir: &PathBuf, +) -> Result<(), CliError> { + use tokio::runtime::Builder; + let runtime = Builder::new_current_thread().enable_all().build().unwrap(); + + runtime.block_on(async { + println!("[{}] Starting debugger", package.name); + let (return_value, solved_witness) = + debug_program_and_decode(program, package, prover_name)?; - if let Some(solved_witness) = solved_witness { - println!("[{}] Circuit witness successfully solved", package.name); + if let Some(solved_witness) = solved_witness { + println!("[{}] Circuit witness successfully solved", package.name); - if let Some(return_value) = return_value { - println!("[{}] Circuit output: {return_value:?}", package.name); - } + if let Some(return_value) = return_value { + println!("[{}] Circuit output: {return_value:?}", package.name); + } - if let Some(witness_name) = &args.witness_name { - let witness_path = save_witness_to_dir(solved_witness, witness_name, target_dir)?; + if let Some(witness_name) = witness_name { + let witness_path = save_witness_to_dir(solved_witness, witness_name, target_dir)?; - println!("[{}] Witness saved to {}", package.name, witness_path.display()); + println!("[{}] Witness saved to {}", package.name, witness_path.display()); + } + } else { + println!("Debugger execution halted."); } - } else { - println!("Debugger execution halted."); - } - Ok(()) + Ok(()) + }) } fn debug_program_and_decode( diff --git a/tooling/nargo_fmt/build.rs b/tooling/nargo_fmt/build.rs index cd93866ece9..c356b403ae5 100644 --- a/tooling/nargo_fmt/build.rs +++ b/tooling/nargo_fmt/build.rs @@ -46,8 +46,8 @@ fn generate_formatter_tests(test_file: &mut File, test_data_dir: &Path) { .collect::>() .join("\n"); - let output_source_path = outputs_dir.join(file_name); - let output_source = std::fs::read_to_string(output_source_path).unwrap(); + let output_source_path = outputs_dir.join(file_name).display().to_string(); + let output_source = std::fs::read_to_string(output_source_path.clone()).unwrap(); write!( test_file, @@ -58,11 +58,14 @@ fn format_{test_name}() {{ let expected_output = r#"{output_source}"#; - let (parsed_module, errors) = noirc_frontend::parse_program(&input); + let (parsed_module, _errors) = noirc_frontend::parse_program(&input); let config = nargo_fmt::Config::of("{config}").unwrap(); let fmt_text = nargo_fmt::format(&input, parsed_module, &config); + if std::env::var("UPDATE_EXPECT").is_ok() {{ + std::fs::write("{output_source_path}", fmt_text.clone()).unwrap(); + }} similar_asserts::assert_eq!(fmt_text, expected_output); }} diff --git a/tooling/nargo_fmt/src/rewrite.rs b/tooling/nargo_fmt/src/rewrite.rs index 5a9baf0aa05..6a95eba8759 100644 --- a/tooling/nargo_fmt/src/rewrite.rs +++ b/tooling/nargo_fmt/src/rewrite.rs @@ -2,8 +2,10 @@ mod array; mod expr; mod infix; mod parenthesized; +mod typ; pub(crate) use array::rewrite as array; pub(crate) use expr::{rewrite as expr, rewrite_sub_expr as sub_expr}; pub(crate) use infix::rewrite as infix; pub(crate) use parenthesized::rewrite as parenthesized; +pub(crate) use typ::rewrite as typ; diff --git a/tooling/nargo_fmt/src/rewrite/expr.rs b/tooling/nargo_fmt/src/rewrite/expr.rs index e026d515333..3c46319c1aa 100644 --- a/tooling/nargo_fmt/src/rewrite/expr.rs +++ b/tooling/nargo_fmt/src/rewrite/expr.rs @@ -110,9 +110,11 @@ pub(crate) fn rewrite( NewlineMode::Normal, ), ExpressionKind::Literal(literal) => match literal { - Literal::Integer(_) | Literal::Bool(_) | Literal::Str(_) | Literal::FmtStr(_) => { - visitor.slice(span).to_string() - } + Literal::Integer(_) + | Literal::Bool(_) + | Literal::Str(_) + | Literal::RawStr(..) + | Literal::FmtStr(_) => visitor.slice(span).to_string(), Literal::Array(ArrayLiteral::Repeated { repeated_element, length }) => { let repeated = rewrite_sub_expr(visitor, shape, *repeated_element); let length = rewrite_sub_expr(visitor, shape, *length); diff --git a/tooling/nargo_fmt/src/rewrite/typ.rs b/tooling/nargo_fmt/src/rewrite/typ.rs new file mode 100644 index 00000000000..4c6411e92b8 --- /dev/null +++ b/tooling/nargo_fmt/src/rewrite/typ.rs @@ -0,0 +1,70 @@ +use noirc_frontend::{UnresolvedType, UnresolvedTypeData}; + +use crate::{ + utils::span_is_empty, + visitor::{FmtVisitor, Shape}, +}; + +pub(crate) fn rewrite(visitor: &FmtVisitor, _shape: Shape, typ: UnresolvedType) -> String { + match typ.typ { + UnresolvedTypeData::Array(length, element) => { + let typ = rewrite(visitor, _shape, *element); + if let Some(length) = length { + let length = visitor.slice(length.span()); + format!("[{typ}; {length}]") + } else { + format!("[{typ}]") + } + } + UnresolvedTypeData::Parenthesized(typ) => { + let typ = rewrite(visitor, _shape, *typ); + format!("({typ})") + } + UnresolvedTypeData::MutableReference(typ) => { + let typ = rewrite(visitor, _shape, *typ); + format!("&mut {typ}") + } + UnresolvedTypeData::Tuple(mut types) => { + if types.len() == 1 { + let typ = types.pop().unwrap(); + let typ = rewrite(visitor, _shape, typ); + + format!("({typ},)") + } else { + let types: Vec<_> = + types.into_iter().map(|typ| rewrite(visitor, _shape, typ)).collect(); + let types = types.join(", "); + format!("({types})") + } + } + UnresolvedTypeData::Function(args, return_type, env) => { + let env = if span_is_empty(env.span.unwrap()) { + "".into() + } else { + let ty = rewrite(visitor, _shape, *env); + format!("[{ty}]") + }; + + let args = args + .into_iter() + .map(|arg| rewrite(visitor, _shape, arg)) + .collect::>() + .join(", "); + + let return_type = rewrite(visitor, _shape, *return_type); + + format!("fn{env}({args}) -> {return_type}") + } + UnresolvedTypeData::Unspecified => todo!(), + UnresolvedTypeData::FieldElement + | UnresolvedTypeData::Integer(_, _) + | UnresolvedTypeData::Bool + | UnresolvedTypeData::Named(_, _) + | UnresolvedTypeData::Unit + | UnresolvedTypeData::Expression(_) + | UnresolvedTypeData::String(_) + | UnresolvedTypeData::FormatString(_, _) + | UnresolvedTypeData::TraitAsType(_, _) => visitor.slice(typ.span.unwrap()).into(), + UnresolvedTypeData::Error => unreachable!(), + } +} diff --git a/tooling/nargo_fmt/src/utils.rs b/tooling/nargo_fmt/src/utils.rs index c02f1ee4725..1160f01972f 100644 --- a/tooling/nargo_fmt/src/utils.rs +++ b/tooling/nargo_fmt/src/utils.rs @@ -250,14 +250,14 @@ impl Item for Param { self.span } - fn format(self, visitor: &FmtVisitor, _shape: Shape) -> String { + fn format(self, visitor: &FmtVisitor, shape: Shape) -> String { let visibility = match self.visibility { Visibility::Public => "pub ", Visibility::Private => "", Visibility::DataBus => "call_data", }; let pattern = visitor.slice(self.pattern.span()); - let ty = visitor.slice(self.typ.span.unwrap()); + let ty = rewrite::typ(visitor, shape, self.typ); format!("{pattern}: {visibility}{ty}") } @@ -296,3 +296,7 @@ pub(crate) fn last_line_used_width(s: &str, offset: usize) -> usize { offset + s.chars().count() } } + +pub(crate) fn span_is_empty(span: Span) -> bool { + span.start() == span.end() +} diff --git a/tooling/nargo_fmt/src/visitor/item.rs b/tooling/nargo_fmt/src/visitor/item.rs index c0a255b7ef6..eb2086168ba 100644 --- a/tooling/nargo_fmt/src/visitor/item.rs +++ b/tooling/nargo_fmt/src/visitor/item.rs @@ -6,6 +6,7 @@ use noirc_frontend::{ }; use crate::{ + rewrite, utils::{last_line_contains_single_line_comment, last_line_used_width, FindToken}, visitor::expr::{format_seq, NewlineMode}, }; @@ -122,7 +123,8 @@ impl super::FmtVisitor<'_> { result.push_str("pub "); } - result.push_str(self.slice(span)); + let typ = rewrite::typ(self, self.shape(), func.return_type()); + result.push_str(&typ); let slice = self.slice(span.end()..func_span.start()); if !slice.trim().is_empty() { diff --git a/tooling/nargo_fmt/tests/expected/fn.nr b/tooling/nargo_fmt/tests/expected/fn.nr index 7fd45648c67..0088dba6a8f 100644 --- a/tooling/nargo_fmt/tests/expected/fn.nr +++ b/tooling/nargo_fmt/tests/expected/fn.nr @@ -36,7 +36,21 @@ fn apply_binary_field_op( registers: &mut Registers ) -> bool {} -fn main() -> distinct pub [Field;2] {} +fn main() -> distinct pub [Field; 2] {} + +fn ret_normal_lambda1() -> ((fn() -> Field)) {} + +fn ret_normal_lambda1() -> fn() -> Field {} + +fn ret_closure1() -> fn[(Field,)]() -> Field {} + +fn ret_closure2() -> fn[(Field, Field)]() -> Field {} + +fn ret_closure3() -> fn[(u32, u64)]() -> u64 {} + +fn make_counter() -> fn[(&mut Field,)]() -> Field {} + +fn get_some(generator: fn[Env]() -> Field) -> [Field; 5] {} fn main( message: [u8; 10], @@ -45,3 +59,5 @@ fn main( pub_key_y: Field, signature: [u8; 64] ) {} + +pub fn from_baz(x: [Field; crate::foo::MAGIC_NUMBER]) {} diff --git a/tooling/nargo_fmt/tests/input/fn.nr b/tooling/nargo_fmt/tests/input/fn.nr index 45dc3370f14..26ff5933802 100644 --- a/tooling/nargo_fmt/tests/input/fn.nr +++ b/tooling/nargo_fmt/tests/input/fn.nr @@ -25,6 +25,22 @@ fn apply_binary_field_op(lhs: RegisterIndex, rhs: RegisterIndex, result: Regi fn main() -> distinct pub [Field;2] {} +fn ret_normal_lambda1() -> ((fn() -> Field)) {} + +fn ret_normal_lambda1() -> fn() -> Field {} + +fn ret_closure1() -> fn[(Field,)]() -> Field {} + +fn ret_closure2() -> fn[(Field,Field)]() -> Field {} + +fn ret_closure3() -> fn[(u32,u64)]() -> u64 {} + +fn make_counter() -> fn[(&mut Field,)]() -> Field {} + +fn get_some(generator: fn[Env]() -> Field) -> [Field;5] {} + fn main( message: [u8; 10], message_field: Field, pub_key_x: Field, pub_key_y: Field, signature: [u8; 64] ) {} + +pub fn from_baz(x: [Field; crate::foo::MAGIC_NUMBER]) {}