diff --git a/.github/workflows/docs-new-version.yml b/.github/workflows/docs-new-version.yml deleted file mode 100644 index 9b109e170bb..00000000000 --- a/.github/workflows/docs-new-version.yml +++ /dev/null @@ -1,112 +0,0 @@ -name: Cut a new version of the docs - -on: - workflow_dispatch: - inputs: - tag: - description: The tag to build Docs for - required: false - -jobs: - publish-docs: - runs-on: ubuntu-latest - if: ${{ inputs.tag != '' }} - permissions: - pull-requests: write - contents: write - steps: - - name: Checkout sources - uses: actions/checkout@v4 - with: - ref: ${{ inputs.tag }} - - - name: Create new branch - run: | - git checkout -b new-docs-version-${{ github.event.inputs.tag }} - - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: '18' - - - name: Install wasm-bindgen-cli - uses: taiki-e/install-action@v2 - with: - tool: wasm-bindgen-cli@0.2.86 - - - name: Install wasm-opt - run: | - npm i wasm-opt -g - - - name: Install Yarn - run: npm install -g yarn - - - name: Install Yarn dependencies - run: yarn - - - name: Build acvm_js - run: yarn workspace @noir-lang/acvm_js build - - - name: Build noirc_abi - run: yarn workspace @noir-lang/noirc_abi build - - - name: Build noir_js_types - run: yarn workspace @noir-lang/types build - - - name: Build barretenberg wrapper - run: yarn workspace @noir-lang/backend_barretenberg build - - - name: Run noir_js - run: | - yarn workspace @noir-lang/noir_js build - - - name: Build docs - run: - yarn workspace docs build - - - name: Cut a new version - working-directory: ./docs - run: yarn docusaurus docs:version ${{ inputs.tag }} - - - name: Remove pre-releases - id: get_version - run: | - cd docs && yarn setStable - - - name: Commit new documentation version - run: | - git config --local user.name 'signorecello' - git config --local user.email 'github@zepedro.me' - git add . - git commit -m "chore(docs): cut new docs version for tag ${{ github.event.inputs.tag }}" - - - name: Push changes to new branch - run: git push origin new-docs-version-${{ github.event.inputs.tag }} - - - name: Create Pull Request - run: | - gh pr create \ - --title "chore(docs): docs for ${{ github.event.inputs.tag }}" \ - --body "Updates documentation to new version for tag ${{ github.event.inputs.tag }}." \ - --base master \ - --head new-docs-version-${{ github.event.inputs.tag }} \ - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Build docs - run: yarn workspace docs build - - - name: Deploy to Netlify - uses: nwtgck/actions-netlify@v2.1 - with: - publish-dir: './docs/build' - production-branch: master - production-deploy: true - github-token: ${{ secrets.GITHUB_TOKEN }} - enable-github-deployment: false - deploy-message: "Deploy from GitHub Actions for tag ${{ inputs.tag }}" - env: - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} - timeout-minutes: 1 - diff --git a/.github/workflows/docs-pr.yml b/.github/workflows/docs-pr.yml index 2b304b72b6f..12526df80f0 100644 --- a/.github/workflows/docs-pr.yml +++ b/.github/workflows/docs-pr.yml @@ -74,12 +74,6 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup - - name: Remove pre-releases - working-directory: docs - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: yarn setStable - - name: Build docs working-directory: docs run: diff --git a/.github/workflows/docs-release.yml b/.github/workflows/docs-release.yml deleted file mode 100644 index 4cd9d9998cb..00000000000 --- a/.github/workflows/docs-release.yml +++ /dev/null @@ -1,72 +0,0 @@ -name: Rebuild docs with the latest release - -on: - release: - types: [released] - workflow_dispatch: - -jobs: - build_and_deploy: - runs-on: ubuntu-latest - permissions: - pull-requests: write - steps: - - name: Checkout code - uses: actions/checkout@v2 - - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: '18' - - - name: Install wasm-bindgen-cli - uses: taiki-e/install-action@v2 - with: - tool: wasm-bindgen-cli@0.2.86 - - - name: Install wasm-opt - run: | - npm i wasm-opt -g - - - name: Install dependencies - run: yarn - - - name: Build acvm_js - run: yarn workspace @noir-lang/acvm_js build - - - name: Build noirc_abi - run: yarn workspace @noir-lang/noirc_abi build - - - name: Build noir_js_types - run: yarn workspace @noir-lang/types build - - - name: Build barretenberg wrapper - run: yarn workspace @noir-lang/backend_barretenberg build - - - name: Run noir_js - run: | - yarn workspace @noir-lang/noir_js build - - - name: Remove pre-releases - working-directory: docs - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: yarn setStable - - - name: Build docs - run: - yarn workspace docs build - - - name: Deploy to Netlify - uses: nwtgck/actions-netlify@v2.1 - with: - publish-dir: './docs/build' - production-branch: master - production-deploy: true - github-token: ${{ secrets.GITHUB_TOKEN }} - enable-github-deployment: false - deploy-message: "Deploy from GitHub Actions for release" - env: - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} - timeout-minutes: 1 diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml new file mode 100644 index 00000000000..4ef7dd89777 --- /dev/null +++ b/.github/workflows/publish-docs.yml @@ -0,0 +1,57 @@ +name: Publish documentation + +on: + workflow_dispatch: + inputs: + noir-ref: + description: The noir reference to checkout + required: false + default: 'master' + +jobs: + publish-docs: + name: Publish docs + runs-on: ubuntu-latest + + steps: + - name: Checkout release branch + uses: actions/checkout@v4 + with: + ref: ${{ inputs.noir-ref }} + token: ${{ secrets.NOIR_RELEASES_TOKEN }} + + - name: Setup Node.js + uses: actions/setup-node@v2 + with: + node-version: '18' + + - name: Install wasm-bindgen-cli + uses: taiki-e/install-action@v2 + with: + tool: wasm-bindgen-cli@0.2.86 + + - name: Install wasm-opt + run: | + npm i wasm-opt -g + + - name: Install Yarn dependencies + uses: ./.github/actions/setup + + - name: Build docs for deploying + working-directory: docs + run: + yarn workspaces foreach -Rt run build + + - name: Deploy to Netlify + uses: nwtgck/actions-netlify@v2.1 + with: + publish-dir: './docs/build' + production-branch: master + production-deploy: true + github-token: ${{ secrets.GITHUB_TOKEN }} + enable-github-deployment: false + deploy-message: "Deploy from GitHub Actions for tag ${{ inputs.noir-ref }}" + env: + NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} + NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} + timeout-minutes: 1 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 95da6792f04..744b4e3effc 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -50,6 +50,42 @@ jobs: git commit -m 'chore: Update lockfile' git push + + update-docs: + name: Update docs + needs: [release-please, update-lockfile] + if: ${{ needs.release-please.outputs.release-pr }} + runs-on: ubuntu-latest + steps: + - name: Checkout release branch + uses: actions/checkout@v4 + with: + ref: ${{ fromJSON(needs.release-please.outputs.release-pr).headBranchName }} + token: ${{ secrets.NOIR_RELEASES_TOKEN }} + + - name: Setup Node.js + uses: actions/setup-node@v2 + with: + node-version: '18' + + - name: Install Yarn dependencies + uses: ./.github/actions/setup + + - name: Cut a new version + working-directory: ./docs + run: yarn docusaurus docs:version ${{ needs.release-please.outputs.tag-name }} + + - name: Configure git + run: | + git config --local user.name 'signorecello' + git config --local user.email 'github@zepedro.me' + + - name: Commit new documentation version + run: | + git add . + git commit -m "chore(docs): cut new docs version for tag ${{ needs.release-please.outputs.tag-name }}" + git push + build-binaries: name: Build binaries needs: [release-please] @@ -78,19 +114,18 @@ jobs: ref: master token: ${{ secrets.NOIR_REPO_TOKEN }} inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}", "npm-tag": "latest" }' - publish-docs: name: Publish docs needs: [release-please] if: ${{ needs.release-please.outputs.tag-name }} runs-on: ubuntu-latest + steps: - - name: Dispatch to publish workflow + - name: Dispatch to publish-docs uses: benc-uk/workflow-dispatch@v1 with: - workflow: docs-new-version.yml - repo: noir-lang/noir + workflow: publish-docs.yml ref: master - token: ${{ secrets.GITHUB_TOKEN }} - inputs: '{ "tag": "${{ needs.release-please.outputs.tag-name }}"}' + token: ${{ secrets.NOIR_REPO_TOKEN }} + inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}" }' diff --git a/Cargo.toml b/Cargo.toml index 1a37a4f53e1..5738fe94984 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -50,14 +50,14 @@ repository = "https://github.com/noir-lang/noir/" [workspace.dependencies] # ACVM workspace dependencies -acir = { path = "acvm-repo/acir", default-features = false } -acvm = { path = "acvm-repo/acvm" } -acir_field = { path = "acvm-repo/acir_field", default-features = false } -stdlib = { package = "acvm_stdlib", path = "acvm-repo/stdlib", default-features = false } -brillig = { path = "acvm-repo/brillig", default-features = false } -brillig_vm = { path = "acvm-repo/brillig_vm", default-features = false } -acvm_blackbox_solver = { path = "acvm-repo/blackbox_solver", default-features = false } -barretenberg_blackbox_solver = { path = "acvm-repo/barretenberg_blackbox_solver", default-features = false } +acir_field = { version = "0.35.0", path = "acvm-repo/acir_field", default-features = false } +acir = { version = "0.35.0", path = "acvm-repo/acir", default-features = false } +acvm = { version = "0.35.0", path = "acvm-repo/acvm" } +stdlib = { version = "0.35.0", package = "acvm_stdlib", path = "acvm-repo/stdlib", default-features = false } +brillig = { version = "0.35.0", path = "acvm-repo/brillig", default-features = false } +brillig_vm = { version = "0.35.0", path = "acvm-repo/brillig_vm", default-features = false } +acvm_blackbox_solver = { version = "0.35.0", path = "acvm-repo/blackbox_solver", default-features = false } +barretenberg_blackbox_solver = { version = "0.35.0", path = "acvm-repo/barretenberg_blackbox_solver", default-features = false } # Noir compiler workspace dependencies arena = { path = "compiler/utils/arena" } diff --git a/acvm-repo/acir_field/src/generic_ark.rs b/acvm-repo/acir_field/src/generic_ark.rs index 0f4be21ad54..e3e66fe7a92 100644 --- a/acvm-repo/acir_field/src/generic_ark.rs +++ b/acvm-repo/acir_field/src/generic_ark.rs @@ -143,6 +143,12 @@ impl From for FieldElement { } } +impl From for FieldElement { + fn from(a: usize) -> FieldElement { + FieldElement::from(a as u128) + } +} + impl From for FieldElement { fn from(boolean: bool) -> FieldElement { if boolean { diff --git a/compiler/integration-tests/circuits/recursion/src/main.nr b/compiler/integration-tests/circuits/recursion/src/main.nr index 7cf956d1950..e60e4e0b61a 100644 --- a/compiler/integration-tests/circuits/recursion/src/main.nr +++ b/compiler/integration-tests/circuits/recursion/src/main.nr @@ -1,14 +1,17 @@ use dep::std; fn main( - verification_key: [Field; 114], - proof: [Field; 94], - public_inputs: [Field; 1], - key_hash: Field, - input_aggregation_object: [Field; 16] -) -> pub [Field; 16] { - let vk : [Field] = verification_key; - let p : [Field] = proof; - let pi : [Field] = public_inputs; - std::verify_proof(vk, p, pi, key_hash, input_aggregation_object) + verification_key : [Field; 114], + proof : [Field; 94], + public_inputs : [Field; 1], + key_hash : Field, +) -> pub [Field;16]{ + let input_aggregation_object = [0; 16]; + std::verify_proof( + verification_key.as_slice(), + proof.as_slice(), + public_inputs.as_slice(), + key_hash, + input_aggregation_object + ) } diff --git a/compiler/integration-tests/hardhat.config.ts b/compiler/integration-tests/hardhat.config.ts index 2c68b3fae0f..af4728c68ec 100644 --- a/compiler/integration-tests/hardhat.config.ts +++ b/compiler/integration-tests/hardhat.config.ts @@ -12,6 +12,9 @@ const config: HardhatUserConfig = { }, }, }, + mocha: { + timeout: 5 * 60 * 1000, + }, }; export default config; diff --git a/compiler/integration-tests/scripts/codegen-verifiers.sh b/compiler/integration-tests/scripts/codegen-verifiers.sh index 13667038728..b3a52217271 100644 --- a/compiler/integration-tests/scripts/codegen-verifiers.sh +++ b/compiler/integration-tests/scripts/codegen-verifiers.sh @@ -12,9 +12,15 @@ nargo --program-dir $mul_dir codegen-verifier assert_statement_dir=$repo_root/test_programs/execution_success/assert_statement nargo --program-dir $assert_statement_dir codegen-verifier +# Run codegen-verifier for recursion +recursion_dir=$repo_root/compiler/integration-tests/circuits/recursion +nargo --program-dir $recursion_dir codegen-verifier + # Copy compiled contracts from the root of compiler/integration-tests contracts_dir=$self_path/../contracts +rm -rf $contracts_dir mkdir $contracts_dir cp $mul_dir/contract/1_mul/plonk_vk.sol $contracts_dir/1_mul.sol cp $assert_statement_dir/contract/assert_statement/plonk_vk.sol $contracts_dir/assert_statement.sol +cp $recursion_dir/contract/recursion/plonk_vk.sol $contracts_dir/recursion.sol diff --git a/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts b/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts new file mode 100644 index 00000000000..353678b470b --- /dev/null +++ b/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts @@ -0,0 +1,67 @@ +import { expect } from 'chai'; +import { ethers } from 'hardhat'; + +import { readFileSync } from 'node:fs'; +import { resolve } from 'path'; +import toml from 'toml'; + +import { compile, CompiledProgram, init_log_level as compilerLogLevel } from '@noir-lang/noir_wasm'; +import { Noir } from '@noir-lang/noir_js'; +import { BarretenbergBackend, flattenPublicInputs } from '@noir-lang/backend_barretenberg'; +import { Field, InputMap } from '@noir-lang/noirc_abi'; + +compilerLogLevel('INFO'); + +it(`smart contract can verify a recursive proof`, async () => { + const inner_source_path = resolve(`../../test_programs/execution_success/assert_statement/src/main.nr`); + const inner_program = (compile(inner_source_path) as { program: CompiledProgram }).program; + + const recursion_source_path = resolve(`./circuits/recursion/src/main.nr`); + const recursion_program = (compile(recursion_source_path) as { program: CompiledProgram }).program; + + // Intermediate proof + + const inner_backend = new BarretenbergBackend(inner_program); + const inner = new Noir(inner_program); + + const inner_prover_toml = readFileSync( + resolve(`../../test_programs/execution_success/assert_statement/Prover.toml`), + ).toString(); + const inner_inputs = toml.parse(inner_prover_toml); + + const { witness: main_witness } = await inner.execute(inner_inputs); + const intermediate_proof = await inner_backend.generateIntermediateProof(main_witness); + + expect(await inner_backend.verifyIntermediateProof(intermediate_proof)).to.be.true; + + const { proofAsFields, vkAsFields, vkHash } = await inner_backend.generateIntermediateProofArtifacts( + intermediate_proof, + 1, // 1 public input + ); + + // Final proof + + const recursion_backend = new BarretenbergBackend(recursion_program); + const recursion = new Noir(recursion_program, recursion_backend); + + const recursion_inputs: InputMap = { + verification_key: vkAsFields, + proof: proofAsFields, + public_inputs: [inner_inputs.y as Field], + key_hash: vkHash, + }; + + const recursion_proof = await recursion.generateFinalProof(recursion_inputs); + expect(await recursion.verifyFinalProof(recursion_proof)).to.be.true; + + // Smart contract verification + + const contract = await ethers.deployContract('contracts/recursion.sol:UltraVerifier', []); + + const result = await contract.verify.staticCall( + recursion_proof.proof, + flattenPublicInputs(recursion_proof.publicInputs), + ); + + expect(result).to.be.true; +}); diff --git a/compiler/integration-tests/test/node/smart_contract_verifier.test.ts b/compiler/integration-tests/test/node/smart_contract_verifier.test.ts index 57199fc8667..7dafada0ffb 100644 --- a/compiler/integration-tests/test/node/smart_contract_verifier.test.ts +++ b/compiler/integration-tests/test/node/smart_contract_verifier.test.ts @@ -57,7 +57,7 @@ test_cases.forEach((testInfo) => { // Smart contract verification - const contract = await ethers.deployContract(testInfo.compiled, [], {}); + const contract = await ethers.deployContract(testInfo.compiled, []); const result = await contract.verify(proofData.proof, flattenPublicInputs(proofData.publicInputs)); diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index 09a3bd4e44b..17f9b742625 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -130,8 +130,8 @@ impl AcirContext { } /// Adds a constant to the context and assigns a Variable to represent it - pub(crate) fn add_constant(&mut self, constant: FieldElement) -> AcirVar { - let constant_data = AcirVarData::Const(constant); + pub(crate) fn add_constant(&mut self, constant: impl Into) -> AcirVar { + let constant_data = AcirVarData::Const(constant.into()); self.add_data(constant_data) } @@ -353,7 +353,7 @@ impl AcirContext { // Check to see if equality can be determined at compile-time. if diff_expr.is_const() { - return Ok(self.add_constant(diff_expr.is_zero().into())); + return Ok(self.add_constant(diff_expr.is_zero())); } let is_equal_witness = self.acir_ir.is_equal(&lhs_expr, &rhs_expr); @@ -410,7 +410,7 @@ impl AcirContext { // max - ((max - a) AND (max -b)) // Subtracting from max flips the bits, so this is effectively: // (NOT a) NAND (NOT b) - let max = self.add_constant(FieldElement::from((1_u128 << bit_size) - 1)); + let max = self.add_constant((1_u128 << bit_size) - 1); let a = self.sub_var(max, lhs)?; let b = self.sub_var(max, rhs)?; let inputs = vec![AcirValue::Var(a, typ.clone()), AcirValue::Var(b, typ)]; @@ -553,7 +553,7 @@ impl AcirContext { pub(crate) fn not_var(&mut self, x: AcirVar, typ: AcirType) -> Result { let bit_size = typ.bit_size(); // Subtracting from max flips the bits - let max = self.add_constant(FieldElement::from((1_u128 << bit_size) - 1)); + let max = self.add_constant((1_u128 << bit_size) - 1); self.sub_var(max, x) } @@ -580,8 +580,8 @@ impl AcirContext { let quotient = lhs_const.to_u128() / rhs_const.to_u128(); let remainder = lhs_const.to_u128() - quotient * rhs_const.to_u128(); - let quotient_var = self.add_constant(FieldElement::from(quotient)); - let remainder_var = self.add_constant(FieldElement::from(remainder)); + let quotient_var = self.add_constant(quotient); + let remainder_var = self.add_constant(remainder); return Ok((quotient_var, remainder_var)); } @@ -778,7 +778,7 @@ impl AcirContext { // witness = lhs_offset + r assert!(bits + r_bit_size < FieldElement::max_num_bits()); //we need to ensure lhs_offset + r does not overflow - let r_var = self.add_constant(r.into()); + let r_var = self.add_constant(r); let aor = self.add_var(lhs_offset, r_var)?; // lhs_offset<=rhs_offset <=> lhs_offset + r < rhs_offset + r = 2^bit_size <=> witness < 2^bit_size self.range_constrain_var(aor, &NumericType::Unsigned { bit_size }, None)?; @@ -926,7 +926,7 @@ impl AcirContext { ) -> Result { // 2^{rhs} let divisor = - self.add_constant(FieldElement::from(2_i128).pow(&FieldElement::from(rhs as i128))); + self.add_constant(FieldElement::from(2_u128).pow(&FieldElement::from(rhs as u128))); let one = self.add_constant(FieldElement::one()); // Computes lhs = 2^{rhs} * q + r @@ -937,7 +937,7 @@ impl AcirContext { /// Returns an `AcirVar` which will be `1` if lhs >= rhs /// and `0` otherwise. - fn more_than_eq_var( + pub(crate) fn more_than_eq_var( &mut self, lhs: AcirVar, rhs: AcirVar, @@ -1150,10 +1150,7 @@ impl AcirContext { // `Intrinsic::ToRadix` returns slices which are represented // by tuples with the structure (length, slice contents) Ok(vec![ - AcirValue::Var( - self.add_constant(FieldElement::from(limb_vars.len() as u128)), - AcirType::field(), - ), + AcirValue::Var(self.add_constant(limb_vars.len()), AcirType::field()), AcirValue::Array(limb_vars.into()), ]) } @@ -1166,7 +1163,7 @@ impl AcirContext { limb_count_var: AcirVar, result_element_type: AcirType, ) -> Result, RuntimeError> { - let two_var = self.add_constant(FieldElement::from(2_u128)); + let two_var = self.add_constant(2_u128); self.radix_decompose(endian, input_var, two_var, limb_count_var, result_element_type) } @@ -1274,7 +1271,7 @@ impl AcirContext { AcirValue::DynamicArray(AcirDynamicArray { block_id, len, .. }) => { for i in 0..len { // We generate witnesses corresponding to the array values - let index_var = self.add_constant(FieldElement::from(i as u128)); + let index_var = self.add_constant(i); let value_read_var = self.read_from_memory(block_id, &index_var)?; let value_read = AcirValue::Var(value_read_var, AcirType::field()); diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 331c837a521..c41674778ad 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -133,6 +133,16 @@ impl AcirValue { } } + fn borrow_var(&self) -> Result { + match self { + AcirValue::Var(var, _) => Ok(*var), + AcirValue::DynamicArray(_) | AcirValue::Array(_) => Err(InternalError::General { + message: "Called AcirValue::borrow_var on an array".to_string(), + call_stack: CallStack::new(), + }), + } + } + fn flatten(self) -> Vec<(AcirVar, AcirType)> { match self { AcirValue::Var(var, typ) => vec![(var, typ)], @@ -441,8 +451,7 @@ impl Context { let mut read_dynamic_array_index = |block_id: BlockId, array_index: usize| -> Result { - let index_var = - self.acir_context.add_constant(FieldElement::from(array_index as u128)); + let index_var = self.acir_context.add_constant(array_index); self.acir_context.read_from_memory(block_id, &index_var) }; @@ -852,7 +861,7 @@ impl Context { ); let values = try_vecmap(0..*len, |i| { - let index_var = self.acir_context.add_constant(FieldElement::from(i as u128)); + let index_var = self.acir_context.add_constant(i); let read = self.acir_context.read_from_memory(*block_id, &index_var)?; Ok::(AcirValue::Var(read, AcirType::field())) @@ -897,7 +906,7 @@ impl Context { // The first max size is going to be the length of the parent slice // As we are fetching from the parent slice we just want its internal - // slize sizes. + // slice sizes. let slice_sizes = slice_sizes[1..].to_vec(); let value = self.array_get_value(&res_typ, block_id, &mut var_index, &slice_sizes)?; @@ -1022,7 +1031,7 @@ impl Context { self.copy_dynamic_array(block_id, result_block_id, array_len)?; } - self.array_set_value(store_value, result_block_id, &mut var_index)?; + self.array_set_value(&store_value, result_block_id, &mut var_index)?; // Set new resulting array to have the same slice sizes as the instruction input if let Type::Slice(element_types) = &array_typ { @@ -1057,7 +1066,7 @@ impl Context { fn array_set_value( &mut self, - value: AcirValue, + value: &AcirValue, block_id: BlockId, var_index: &mut AcirVar, ) -> Result<(), RuntimeError> { @@ -1065,8 +1074,8 @@ impl Context { match value { AcirValue::Var(store_var, _) => { // Write the new value into the new array at the specified index - self.acir_context.write_to_memory(block_id, var_index, &store_var)?; - // Incremement the var_index in case of a nested array + self.acir_context.write_to_memory(block_id, var_index, store_var)?; + // Increment the var_index in case of a nested array *var_index = self.acir_context.add_var(*var_index, one)?; } AcirValue::Array(values) => { @@ -1075,13 +1084,13 @@ impl Context { } } AcirValue::DynamicArray(AcirDynamicArray { block_id: inner_block_id, len, .. }) => { - let values = try_vecmap(0..len, |i| { - let index_var = self.acir_context.add_constant(FieldElement::from(i as u128)); + let values = try_vecmap(0..*len, |i| { + let index_var = self.acir_context.add_constant(i); - let read = self.acir_context.read_from_memory(inner_block_id, &index_var)?; + let read = self.acir_context.read_from_memory(*inner_block_id, &index_var)?; Ok::(AcirValue::Var(read, AcirType::field())) })?; - self.array_set_value(AcirValue::Array(values.into()), block_id, var_index)?; + self.array_set_value(&AcirValue::Array(values.into()), block_id, var_index)?; } } Ok(()) @@ -1235,7 +1244,7 @@ impl Context { // The final array should will the flattened index at each outer array index let init_values = vecmap(flat_elem_type_sizes, |type_size| { - let var = self.acir_context.add_constant(FieldElement::from(type_size as u128)); + let var = self.acir_context.add_constant(type_size); AcirValue::Var(var, AcirType::field()) }); let element_type_sizes_len = init_values.len(); @@ -1292,7 +1301,7 @@ impl Context { array_len: usize, ) -> Result<(), RuntimeError> { let init_values = try_vecmap(0..array_len, |i| { - let index_var = self.acir_context.add_constant(FieldElement::from(i as u128)); + let index_var = self.acir_context.add_constant(i); let read = self.acir_context.read_from_memory(source, &index_var)?; Ok::(AcirValue::Var(read, AcirType::field())) @@ -1653,8 +1662,7 @@ impl Context { ) { // Subtractions must first have the integer modulus added before truncation can be // applied. This is done in order to prevent underflow. - let integer_modulus = - self.acir_context.add_constant(FieldElement::from(2_u128.pow(bit_size))); + let integer_modulus = self.acir_context.add_constant(2_u128.pow(bit_size)); var = self.acir_context.add_var(var, integer_modulus)?; } } @@ -1760,45 +1768,72 @@ impl Context { Intrinsic::ArrayLen => { let len = match self.convert_value(arguments[0], dfg) { AcirValue::Var(_, _) => unreachable!("Non-array passed to array.len() method"), - AcirValue::Array(values) => (values.len() as u128).into(), - AcirValue::DynamicArray(array) => (array.len as u128).into(), + AcirValue::Array(values) => values.len(), + AcirValue::DynamicArray(array) => array.len, }; Ok(vec![AcirValue::Var(self.acir_context.add_constant(len), AcirType::field())]) } Intrinsic::SlicePushBack => { + // arguments = [slice_length, slice_contents, ...elements_to_push] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let (array_id, array_typ, _) = + let (slice_contents, slice_typ, _) = self.check_array_is_initialized(arguments[1], dfg)?; - let slice = self.convert_value(arguments[1], dfg); + let slice = self.convert_value(slice_contents, dfg); - // TODO(#3364): make sure that we have handled nested struct inputs - let element = self.convert_value(arguments[2], dfg); - let one = self.acir_context.add_constant(FieldElement::one()); - let new_slice_length = self.acir_context.add_var(slice_length, one)?; + let mut new_elem_size = Self::flattened_value_size(&slice); - // We attach the element no matter what in case len == capacity, as otherwise we - // may get an out of bounds error. let mut new_slice = Vector::new(); - self.slice_intrinsic_input(&mut new_slice, slice.clone())?; - new_slice.push_back(element.clone()); + self.slice_intrinsic_input(&mut new_slice, slice)?; + + let elements_to_push = &arguments[2..]; + // We only fill internal slices for nested slices (a slice inside of a slice). + // So we must directly push back elements for slices which are not a nested slice. + if !slice_typ.is_nested_slice() { + for elem in elements_to_push { + let element = self.convert_value(*elem, dfg); + + new_elem_size += Self::flattened_value_size(&element); + new_slice.push_back(element); + } + } + + // Increase the slice length by one to enable accessing more elements in the slice. + let one = self.acir_context.add_constant(FieldElement::one()); + let new_slice_length = self.acir_context.add_var(slice_length, one)?; - // TODO(#3364): This works for non-nested outputs - let len = Self::flattened_value_size(&slice); - let new_elem_size = Self::flattened_value_size(&element); let new_slice_val = AcirValue::Array(new_slice); let result_block_id = self.block_id(&result_ids[1]); - self.initialize_array( - result_block_id, - len + new_elem_size, - Some(new_slice_val.clone()), - )?; + self.initialize_array(result_block_id, new_elem_size, Some(new_slice_val.clone()))?; + // The previous slice length represents the index we want to write into. let mut var_index = slice_length; - self.array_set_value(element, result_block_id, &mut var_index)?; + // Dynamic arrays are represented as flat memory. We must flatten the user facing index + // to a flattened index that matches the complex slice structure. + if slice_typ.is_nested_slice() { + let element_size = slice_typ.element_size(); + + // Multiply the element size against the var index before fetching the flattened index + // This operation makes sure our user-facing slice index matches the strategy for indexing in SSA, + // which is how `get_flattened_index` expects its index input. + let element_size_var = self.acir_context.add_constant(element_size); + var_index = self.acir_context.mul_var(slice_length, element_size_var)?; + var_index = + self.get_flattened_index(&slice_typ, slice_contents, var_index, dfg)?; + } - let element_type_sizes = if !can_omit_element_sizes_array(&array_typ) { + // Write the elements we wish to push back directly. + // The slice's underlying array value should already be filled with dummy data + // to enable this write to be within bounds. + // The dummy data is either attached during SSA gen or in this match case for non-nested slices. + // These values can then be accessed due to the increased dynamic slice length. + for elem in elements_to_push { + let element = self.convert_value(*elem, dfg); + self.array_set_value(&element, result_block_id, &mut var_index)?; + } + + let element_type_sizes = if !can_omit_element_sizes_array(&slice_typ) { Some(self.init_element_type_sizes_array( - &array_typ, - array_id, + &slice_typ, + slice_contents, Some(new_slice_val), dfg, )?) @@ -1807,155 +1842,501 @@ impl Context { }; let result = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, - len: len + new_elem_size, + len: new_elem_size, element_type_sizes, }); Ok(vec![AcirValue::Var(new_slice_length, AcirType::field()), result]) } Intrinsic::SlicePushFront => { + // arguments = [slice_length, slice_contents, ...elements_to_push] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let slice: AcirValue = self.convert_value(arguments[1], dfg); - // TODO(#3364): make sure that we have handled nested struct inputs - let element = self.convert_value(arguments[2], dfg); + let (slice_contents, slice_typ, _) = + self.check_array_is_initialized(arguments[1], dfg)?; + let slice: AcirValue = self.convert_value(slice_contents, dfg); + + let mut new_slice_size = Self::flattened_value_size(&slice); + + // Increase the slice length by one to enable accessing more elements in the slice. let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.add_var(slice_length, one)?; let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; - new_slice.push_front(element); - Ok(vec![ - AcirValue::Var(new_slice_length, AcirType::field()), - AcirValue::Array(new_slice), - ]) + let elements_to_push = &arguments[2..]; + let mut elem_size = 0; + // We only fill internal slices for nested slices (a slice inside of a slice). + // So we must directly push front elements for slices which are not a nested slice. + if !slice_typ.is_nested_slice() { + for elem in elements_to_push.iter().rev() { + let element = self.convert_value(*elem, dfg); + + elem_size += Self::flattened_value_size(&element); + new_slice.push_front(element); + } + new_slice_size += elem_size; + } else { + // We have already filled the appropriate dummy values for nested slice during SSA gen. + // We need to account for that we do not go out of bounds by removing dummy data as we + // push elements to the front of our slice. + // Using this strategy we are able to avoid dynamic writes like we do for a SlicePushBack. + for elem in elements_to_push.iter().rev() { + let element = self.convert_value(*elem, dfg); + + let elem_size = Self::flattened_value_size(&element); + // Have to pop based off of the flattened value size as we read the + // slice intrinsic as a flat list of AcirValue::Var + for _ in 0..elem_size { + new_slice.pop_back(); + } + new_slice.push_front(element); + } + } + + let new_slice_val = AcirValue::Array(new_slice.clone()); + + let result_block_id = self.block_id(&result_ids[1]); + self.initialize_array( + result_block_id, + new_slice_size, + Some(new_slice_val.clone()), + )?; + + let element_type_sizes = if !can_omit_element_sizes_array(&slice_typ) { + Some(self.init_element_type_sizes_array( + &slice_typ, + slice_contents, + Some(new_slice_val), + dfg, + )?) + } else { + None + }; + let result = AcirValue::DynamicArray(AcirDynamicArray { + block_id: result_block_id, + len: new_slice_size, + element_type_sizes, + }); + + Ok(vec![AcirValue::Var(new_slice_length, AcirType::field()), result]) } Intrinsic::SlicePopBack => { + // arguments = [slice_length, slice_contents] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let slice = self.convert_value(arguments[1], dfg); let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.sub_var(slice_length, one)?; - - let (_, _, block_id) = self.check_array_is_initialized(arguments[1], dfg)?; + // For a pop back operation we want to fetch from the `length - 1` as this is the + // last valid index that can be accessed in a slice. After the pop back operation + // the elements stored at that index will no longer be able to be accessed. let mut var_index = new_slice_length; - let elem = self.array_get_value( - &dfg.type_of_value(result_ids[2]), - block_id, - &mut var_index, - &[], - )?; - // TODO(#3364): make sure that we have handled nested struct inputs + let (slice_contents, slice_typ, block_id) = + self.check_array_is_initialized(arguments[1], dfg)?; + let slice = self.convert_value(slice_contents, dfg); + + let element_size = slice_typ.element_size(); + + let mut popped_elements = Vec::new(); + // Fetch the values we are popping off of the slice. + // In the case of non-nested slice the logic is simple as we do not + // need to account for the internal slice sizes or flattening the index. + // + // The pop back operation results are of the format [slice length, slice contents, popped elements]. + // Thus, we look at the result ids at index 2 and onwards to determine the type of each popped element. + if !slice_typ.is_nested_slice() { + for res in &result_ids[2..] { + let elem = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut var_index, + &[], + )?; + popped_elements.push(elem); + } + } else { + // Fetch the slice sizes of the nested slice. + let slice_sizes = self.slice_sizes.get(&slice_contents); + let mut slice_sizes = + slice_sizes.expect("ICE: should have slice sizes").clone(); + // We want to remove the parent size as we are fetching the child + slice_sizes.remove(0); + + // Multiply the element size against the var index before fetching the flattened index + // This operation makes sure our user-facing slice index matches the strategy for indexing in SSA, + // which is how `get_flattened_index` expects its index input. + let element_size_var = self.acir_context.add_constant(element_size); + // We want to use an index one less than the slice length + var_index = self.acir_context.mul_var(var_index, element_size_var)?; + var_index = + self.get_flattened_index(&slice_typ, slice_contents, var_index, dfg)?; + + for res in &result_ids[2..] { + let elem = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut var_index, + &slice_sizes, + )?; + popped_elements.push(elem); + } + } + let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; - Ok(vec![ + let mut results = vec![ AcirValue::Var(new_slice_length, AcirType::field()), AcirValue::Array(new_slice), - elem, - ]) + ]; + results.append(&mut popped_elements); + + Ok(results) } Intrinsic::SlicePopFront => { + // arguments = [slice_length, slice_contents] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let slice = self.convert_value(arguments[1], dfg); + + let (slice_contents, slice_typ, block_id) = + self.check_array_is_initialized(arguments[1], dfg)?; + let slice = self.convert_value(slice_contents, dfg); let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.sub_var(slice_length, one)?; let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; - // TODO(#3364): make sure that we have handled nested struct inputs - let elem = new_slice - .pop_front() - .expect("There are no elements in this slice to be removed"); - Ok(vec![ - elem, - AcirValue::Var(new_slice_length, AcirType::field()), - AcirValue::Array(new_slice), - ]) + let element_size = slice_typ.element_size(); + + let mut popped_elements: Vec = Vec::new(); + let mut popped_elements_size = 0; + let mut var_index = self.acir_context.add_constant(FieldElement::zero()); + // Fetch the values we are popping off of the slice. + // In the case of non-nested slice the logic is simple as we do not + // need to account for the internal slice sizes or flattening the index. + // + // The pop front operation results are of the format [popped elements, slice length, slice contents]. + // Thus, we look at the result ids up to the element size to determine the type of each popped element. + if !slice_typ.is_nested_slice() { + for res in &result_ids[..element_size] { + let element = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut var_index, + &[], + )?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); + } + } else { + let slice_sizes = self.slice_sizes.get(&slice_contents); + let mut slice_sizes = + slice_sizes.expect("ICE: should have slice sizes").clone(); + // We want to remove the parent size as we are fetching the child + slice_sizes.remove(0); + + for res in &result_ids[..element_size] { + let element = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut var_index, + &slice_sizes, + )?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); + } + } + // It is expected that the `popped_elements_size` is the flattened size of the elements, + // as the input slice should be a dynamic array which is represented by flat memory. + new_slice = new_slice.slice(popped_elements_size..); + + popped_elements.push(AcirValue::Var(new_slice_length, AcirType::field())); + popped_elements.push(AcirValue::Array(new_slice)); + + Ok(popped_elements) } Intrinsic::SliceInsert => { - // Slice insert with a constant index + // arguments = [slice_length, slice_contents, insert_index, ...elements_to_insert] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let slice = self.convert_value(arguments[1], dfg); - let index = self.convert_value(arguments[2], dfg).into_var()?; - let element = self.convert_value(arguments[3], dfg); + + let (slice_contents, slice_typ, block_id) = + self.check_array_is_initialized(arguments[1], dfg)?; + + let slice = self.convert_value(slice_contents, dfg); + let insert_index = self.convert_value(arguments[2], dfg).into_var()?; let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.add_var(slice_length, one)?; - // TODO(#2462): Slice insert is a little less obvious on how to implement due to the case - // of having a dynamic index - // The slice insert logic will need a more involved codegen - let index = self.acir_context.var_to_expression(index)?.to_const(); - let index = index - .expect("ICE: slice length should be fully tracked and constant by ACIR gen"); - let index = index.to_u128() as usize; + let slice_size = Self::flattened_value_size(&slice); + + // Fetch the flattened index from the user provided index argument. + let element_size = slice_typ.element_size(); + let element_size_var = self.acir_context.add_constant(element_size); + let flat_insert_index = + self.acir_context.mul_var(insert_index, element_size_var)?; + let flat_user_index = + self.get_flattened_index(&slice_typ, slice_contents, flat_insert_index, dfg)?; + + let elements_to_insert = &arguments[3..]; + // Determine the elements we need to write into our resulting dynamic array. + // We need to a fully flat list of AcirVar's as a dynamic array is represented with flat memory. + let mut inner_elem_size_usize = 0; + let mut flattened_elements = Vec::new(); + for elem in elements_to_insert { + let element = self.convert_value(*elem, dfg); + let elem_size = Self::flattened_value_size(&element); + inner_elem_size_usize += elem_size; + let mut flat_elem = element.flatten().into_iter().map(|(var, _)| var).collect(); + flattened_elements.append(&mut flat_elem); + } + let inner_elem_size = self.acir_context.add_constant(inner_elem_size_usize); + // Set the maximum flattened index at which a new element should be inserted. + let max_flat_user_index = + self.acir_context.add_var(flat_user_index, inner_elem_size)?; + + // Go through the entire slice argument and determine what value should be written to the new slice. + // 1. If we are below the starting insertion index we should insert the value that was already + // in the original slice. + // 2. If we are above the starting insertion index but below the max insertion index we should insert + // the flattened element arguments. + // 3. If we are above the max insertion index we should insert the previous value from the original slice, + // as during an insertion we want to shift all elements after the insertion up an index. + let result_block_id = self.block_id(&result_ids[1]); + self.initialize_array(result_block_id, slice_size, None)?; + let mut current_insert_index = 0; + for i in 0..slice_size { + let current_index = self.acir_context.add_constant(i); + + // Check that we are above the lower bound of the insertion index + let greater_eq_than_idx = self.acir_context.more_than_eq_var( + current_index, + flat_user_index, + 64, + self.current_side_effects_enabled_var, + )?; + // Check that we are below the upper bound of the insertion index + let less_than_idx = self.acir_context.less_than_var( + current_index, + max_flat_user_index, + 64, + self.current_side_effects_enabled_var, + )?; + + // Read from the original slice the value we want to insert into our new slice. + // We need to make sure of two things: + // 1. That we read the previous element for when we have an index greater than insertion index. + // 2. That the index we are reading from is within the array bounds + let shifted_index = if i < inner_elem_size_usize { + current_index + } else { + let index_minus_elem_size = + self.acir_context.add_constant(i - inner_elem_size_usize); - let mut new_slice = Vector::new(); - self.slice_intrinsic_input(&mut new_slice, slice)?; + let use_shifted_index_pred = self + .acir_context + .mul_var(index_minus_elem_size, greater_eq_than_idx)?; - // We do not return an index out of bounds error directly here - // as the length of the slice is dynamic, and length of `new_slice` - // represents the capacity of the slice, not the actual length. - // - // Constraints should be generated during SSA gen to tell the user - // they are attempting to insert at too large of an index. - // This check prevents a panic inside of the im::Vector insert method. - if index <= new_slice.len() { - // TODO(#3364): make sure that we have handled nested struct inputs - new_slice.insert(index, element); + let not_pred = self.acir_context.sub_var(one, greater_eq_than_idx)?; + let use_current_index_pred = + self.acir_context.mul_var(not_pred, current_index)?; + + self.acir_context.add_var(use_shifted_index_pred, use_current_index_pred)? + }; + + let value_shifted_index = + self.acir_context.read_from_memory(block_id, &shifted_index)?; + + // Final predicate to determine whether we are within the insertion bounds + let should_insert_value_pred = + self.acir_context.mul_var(greater_eq_than_idx, less_than_idx)?; + let insert_value_pred = self.acir_context.mul_var( + flattened_elements[current_insert_index], + should_insert_value_pred, + )?; + + let not_pred = self.acir_context.sub_var(one, should_insert_value_pred)?; + let shifted_value_pred = + self.acir_context.mul_var(not_pred, value_shifted_index)?; + + let new_value = + self.acir_context.add_var(insert_value_pred, shifted_value_pred)?; + + self.acir_context.write_to_memory( + result_block_id, + ¤t_index, + &new_value, + )?; + + current_insert_index += 1; + if inner_elem_size_usize == current_insert_index { + current_insert_index = 0; + } } - Ok(vec![ - AcirValue::Var(new_slice_length, AcirType::field()), - AcirValue::Array(new_slice), - ]) + // let new_slice_val = AcirValue::Array(new_slice); + let element_type_sizes = if !can_omit_element_sizes_array(&slice_typ) { + Some(self.init_element_type_sizes_array( + &slice_typ, + slice_contents, + Some(slice), + dfg, + )?) + } else { + None + }; + let result = AcirValue::DynamicArray(AcirDynamicArray { + block_id: result_block_id, + len: slice_size, + element_type_sizes, + }); + + Ok(vec![AcirValue::Var(new_slice_length, AcirType::field()), result]) } Intrinsic::SliceRemove => { - // Slice insert with a constant index + // arguments = [slice_length, slice_contents, remove_index] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let slice = self.convert_value(arguments[1], dfg); - let index = self.convert_value(arguments[2], dfg).into_var()?; + + let (slice_contents, slice_typ, block_id) = + self.check_array_is_initialized(arguments[1], dfg)?; + + let slice = self.convert_value(slice_contents, dfg); + let remove_index = self.convert_value(arguments[2], dfg).into_var()?; let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.sub_var(slice_length, one)?; - // TODO(#2462): allow slice remove with a constant index - // Slice remove is a little less obvious on how to implement due to the case - // of having a dynamic index - // The slice remove logic will need a more involved codegen - let index = self.acir_context.var_to_expression(index)?.to_const(); - let index = index - .expect("ICE: slice length should be fully tracked and constant by ACIR gen"); - let index = index.to_u128() as usize; + let slice_size = Self::flattened_value_size(&slice); let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; - // We do not return an index out of bounds error directly here - // as the length of the slice is dynamic, and length of `new_slice` - // represents the capacity of the slice, not the actual length. - // - // Constraints should be generated during SSA gen to tell the user - // they are attempting to remove at too large of an index. - // This check prevents a panic inside of the im::Vector remove method. - let removed_elem = if index < new_slice.len() { - // TODO(#3364): make sure that we have handled nested struct inputs - new_slice.remove(index) + // Compiler sanity check + assert_eq!( + new_slice.len(), + slice_size, + "ICE: The read flattened slice should match the computed size" + ); + + // Fetch the flattened index from the user provided index argument. + let element_size = slice_typ.element_size(); + let element_size_var = self.acir_context.add_constant(element_size); + let flat_remove_index = + self.acir_context.mul_var(remove_index, element_size_var)?; + let flat_user_index = + self.get_flattened_index(&slice_typ, slice_contents, flat_remove_index, dfg)?; + + // Fetch the values we are remove from the slice. + // In the case of non-nested slice the logic is simple as we do not + // need to account for the internal slice sizes or flattening the index. + // As we fetch the values we can determine the size of the removed values + // which we will later use for writing the correct resulting slice. + let mut popped_elements = Vec::new(); + let mut popped_elements_size = 0; + // Set a temp index just for fetching from the original slice as `array_get_value` mutates + // the index internally. + let mut temp_index = flat_user_index; + if !slice_typ.is_nested_slice() { + for res in &result_ids[2..(2 + element_size)] { + let element = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut temp_index, + &[], + )?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); + } } else { - // This is a dummy value which should never be used if the appropriate - // slice access checks are generated before this slice remove call. - AcirValue::Var(slice_length, AcirType::field()) + let slice_sizes = self.slice_sizes.get(&slice_contents); + let mut slice_sizes = + slice_sizes.expect("ICE: should have slice sizes").clone(); + // We want to remove the parent size as we are fetching the child + slice_sizes.remove(0); + + for res in &result_ids[2..(2 + element_size)] { + let element = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut temp_index, + &slice_sizes, + )?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); + } + } + + // Go through the entire slice argument and determine what value should be written to the new slice. + // 1. If the current index is greater than the removal index we must write the next value + // from the original slice to the current index + // 2. At the end of the slice reading from the next value of the original slice + // can lead to a potential out of bounds error. In this case we just fetch from the original slice + // at the current index. As we are decreasing the slice in length, this is a safe operation. + let result_block_id = self.block_id(&result_ids[1]); + self.initialize_array(result_block_id, slice_size, None)?; + for i in 0..slice_size { + let current_index = self.acir_context.add_constant(i); + + let shifted_index = if (i + popped_elements_size) >= slice_size { + current_index + } else { + self.acir_context.add_constant(i + popped_elements_size) + }; + + let value_shifted_index = + self.acir_context.read_from_memory(block_id, &shifted_index)?; + let value_current_index = new_slice[i].borrow_var()?; + + let use_shifted_value = self.acir_context.more_than_eq_var( + current_index, + flat_user_index, + 64, + self.current_side_effects_enabled_var, + )?; + + let shifted_value_pred = + self.acir_context.mul_var(value_shifted_index, use_shifted_value)?; + let not_pred = self.acir_context.sub_var(one, use_shifted_value)?; + let current_value_pred = + self.acir_context.mul_var(not_pred, value_current_index)?; + + let new_value = + self.acir_context.add_var(shifted_value_pred, current_value_pred)?; + + self.acir_context.write_to_memory( + result_block_id, + ¤t_index, + &new_value, + )?; + } + + let new_slice_val = AcirValue::Array(new_slice); + let element_type_sizes = if !can_omit_element_sizes_array(&slice_typ) { + Some(self.init_element_type_sizes_array( + &slice_typ, + slice_contents, + Some(new_slice_val), + dfg, + )?) + } else { + None }; + let result = AcirValue::DynamicArray(AcirDynamicArray { + block_id: result_block_id, + len: slice_size, + element_type_sizes, + }); - Ok(vec![ - AcirValue::Var(new_slice_length, AcirType::field()), - AcirValue::Array(new_slice), - removed_elem, - ]) + let mut result = vec![AcirValue::Var(new_slice_length, AcirType::field()), result]; + result.append(&mut popped_elements); + + Ok(result) } _ => todo!("expected a black box function"), } @@ -1978,7 +2359,7 @@ impl Context { AcirValue::DynamicArray(AcirDynamicArray { block_id, len, .. }) => { for i in 0..len { // We generate witnesses corresponding to the array values - let index_var = self.acir_context.add_constant(FieldElement::from(i as u128)); + let index_var = self.acir_context.add_constant(i); let value_read_var = self.acir_context.read_from_memory(block_id, &index_var)?; diff --git a/compiler/noirc_evaluator/src/ssa/ir/types.rs b/compiler/noirc_evaluator/src/ssa/ir/types.rs index 8f2fe2d236b..fbc95a16387 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -104,6 +104,14 @@ impl Type { } } + pub(crate) fn is_nested_slice(&self) -> bool { + if let Type::Slice(element_types) = self { + element_types.as_ref().iter().any(|typ| typ.contains_slice_element()) + } else { + false + } + } + /// True if this type is an array (or slice) or internally contains an array (or slice) pub(crate) fn contains_an_array(&self) -> bool { match self { diff --git a/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs b/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs index d40ef1f996e..fede0e540e2 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs @@ -174,6 +174,9 @@ impl<'f> Context<'f> { panic!("ICE: should have inner slice set for {slice_value}") }); slice_sizes.insert(results[0], inner_slice.clone()); + if slice_value != results[0] { + self.mapped_slice_values.insert(slice_value, results[0]); + } } } } @@ -198,17 +201,11 @@ impl<'f> Context<'f> { let inner_sizes = slice_sizes.get_mut(array).expect("ICE expected slice sizes"); inner_sizes.1.push(*value); - - let value_parent = self.resolve_slice_parent(*value); - if slice_values.contains(&value_parent) { - // Map the value parent to the current array in case nested slices - // from the current array are set to larger values later in the program - self.mapped_slice_values.insert(value_parent, *array); - } } if let Some(inner_sizes) = slice_sizes.get_mut(array) { let inner_sizes = inner_sizes.clone(); + slice_sizes.insert(results[0], inner_sizes); self.mapped_slice_values.insert(*array, results[0]); @@ -224,14 +221,27 @@ impl<'f> Context<'f> { | Intrinsic::SlicePopBack | Intrinsic::SliceInsert | Intrinsic::SliceRemove => (1, 1), - Intrinsic::SlicePopFront => (1, 2), + // `pop_front` returns the popped element, and then the respective slice. + // This means in the case of a slice with structs, the result index of the popped slice + // will change depending on the number of elements in the struct. + // For example, a slice with four elements will look as such in SSA: + // v3, v4, v5, v6, v7, v8 = call slice_pop_front(v1, v2) + // where v7 is the slice length and v8 is the popped slice itself. + Intrinsic::SlicePopFront => (1, results.len() - 1), _ => return, }; + let slice_contents = arguments[argument_index]; match intrinsic { Intrinsic::SlicePushBack | Intrinsic::SlicePushFront | Intrinsic::SliceInsert => { - let slice_contents = arguments[argument_index]; + for arg in &arguments[(argument_index + 1)..] { + let element_typ = self.inserter.function.dfg.type_of_value(*arg); + if element_typ.contains_slice_element() { + slice_values.push(*arg); + self.compute_slice_sizes(*arg, slice_sizes); + } + } if let Some(inner_sizes) = slice_sizes.get_mut(&slice_contents) { inner_sizes.0 += 1; @@ -240,12 +250,12 @@ impl<'f> Context<'f> { self.mapped_slice_values .insert(slice_contents, results[result_index]); + self.slice_parents.insert(results[result_index], slice_contents); } } Intrinsic::SlicePopBack - | Intrinsic::SlicePopFront - | Intrinsic::SliceRemove => { - let slice_contents = arguments[argument_index]; + | Intrinsic::SliceRemove + | Intrinsic::SlicePopFront => { // We do not decrement the size on intrinsics that could remove values from a slice. // This is because we could potentially go back to the smaller slice and not fill in dummies. // This pass should be tracking the potential max that a slice ***could be*** @@ -255,6 +265,7 @@ impl<'f> Context<'f> { self.mapped_slice_values .insert(slice_contents, results[result_index]); + self.slice_parents.insert(results[result_index], slice_contents); } } _ => {} @@ -277,7 +288,6 @@ impl<'f> Context<'f> { if slice_values.contains(array) { let (new_array_op_instr, call_stack) = self.get_updated_array_op_instr(*array, slice_sizes, instruction); - self.inserter.push_instruction_value( new_array_op_instr, instruction, @@ -288,6 +298,55 @@ impl<'f> Context<'f> { self.inserter.push_instruction(instruction, block); } } + Instruction::Call { func: _, arguments } => { + let mut args_to_replace = Vec::new(); + for (i, arg) in arguments.iter().enumerate() { + let element_typ = self.inserter.function.dfg.type_of_value(*arg); + if slice_values.contains(arg) && element_typ.contains_slice_element() { + args_to_replace.push((i, *arg)); + } + } + if args_to_replace.is_empty() { + self.inserter.push_instruction(instruction, block); + } else { + // Using the original slice is ok to do as during collection of slice information + // we guarantee that only the arguments to slice intrinsic calls can be replaced. + let slice_contents = arguments[1]; + + let element_typ = self.inserter.function.dfg.type_of_value(arguments[1]); + let elem_depth = Self::compute_nested_slice_depth(&element_typ); + + let mut max_sizes = Vec::new(); + max_sizes.resize(elem_depth, 0); + // We want the max for the parent of the argument + let parent = self.resolve_slice_parent(slice_contents); + self.compute_slice_max_sizes(parent, slice_sizes, &mut max_sizes, 0); + + for (index, arg) in args_to_replace { + let element_typ = self.inserter.function.dfg.type_of_value(arg); + max_sizes.remove(0); + let new_array = + self.attach_slice_dummies(&element_typ, Some(arg), false, &max_sizes); + + let instruction_id = instruction; + let (instruction, call_stack) = + self.inserter.map_instruction(instruction_id); + let new_call_instr = match instruction { + Instruction::Call { func, mut arguments } => { + arguments[index] = new_array; + Instruction::Call { func, arguments } + } + _ => panic!("Expected call instruction"), + }; + self.inserter.push_instruction_value( + new_call_instr, + instruction_id, + block, + call_stack, + ); + } + } + } _ => { self.inserter.push_instruction(instruction, block); } @@ -314,6 +373,7 @@ impl<'f> Context<'f> { let typ = self.inserter.function.dfg.type_of_value(array_id); let depth = Self::compute_nested_slice_depth(&typ); max_sizes.resize(depth, 0); + max_sizes[0] = *current_size; self.compute_slice_max_sizes(array_id, slice_sizes, &mut max_sizes, 1); @@ -370,9 +430,12 @@ impl<'f> Context<'f> { if let Some(value) = value { let mut slice = im::Vector::new(); - let array = match self.inserter.function.dfg[value].clone() { + let value = self.inserter.function.dfg[value].clone(); + let array = match value { Value::Array { array, .. } => array, - _ => panic!("Expected an array value"), + _ => { + panic!("Expected an array value"); + } }; if is_parent_slice { @@ -487,7 +550,6 @@ impl<'f> Context<'f> { self.compute_slice_max_sizes(*inner_slice, slice_sizes, max_sizes, depth + 1); } - max_sizes[depth] = max; if max > max_sizes[depth] { max_sizes[depth] = max; } diff --git a/cspell.json b/cspell.json index 90d3963566d..fc8d8d7e82c 100644 --- a/cspell.json +++ b/cspell.json @@ -82,6 +82,7 @@ "nixpkgs", "noirc", "noirup", + "nomicfoundation", "pedersen", "peekable", "plonkc", diff --git a/docs/docs/language_concepts/01_functions.md b/docs/docs/language_concepts/01_functions.md index 47cdea0cf04..5eb22170e54 100644 --- a/docs/docs/language_concepts/01_functions.md +++ b/docs/docs/language_concepts/01_functions.md @@ -30,7 +30,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -38,7 +38,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/docs/language_concepts/07_mutability.md b/docs/docs/language_concepts/07_mutability.md index 4641521b1d9..ad902c42c9b 100644 --- a/docs/docs/language_concepts/07_mutability.md +++ b/docs/docs/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/package.json b/docs/package.json index db0efbe7543..ee211065683 100644 --- a/docs/package.json +++ b/docs/package.json @@ -4,8 +4,8 @@ "private": true, "scripts": { "start": "docusaurus start", - "build": "docusaurus build", - "setStable": "node ./scripts/setStable.js" + "build": "yarn version::stables && docusaurus build", + "version::stables": "node ./scripts/setStable.js" }, "dependencies": { "@docusaurus/core": "^2.4.0", diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.10.5/language_concepts/01_functions.md index 7cb43c4c5f2..069d86c46d0 100644 --- a/docs/versioned_docs/version-v0.10.5/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.10.5/language_concepts/01_functions.md @@ -18,7 +18,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -26,7 +26,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.10.5/language_concepts/07_mutability.md index 4641521b1d9..ad902c42c9b 100644 --- a/docs/versioned_docs/version-v0.10.5/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.10.5/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_docs/version-v0.17.0/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.17.0/language_concepts/01_functions.md index 47cdea0cf04..5eb22170e54 100644 --- a/docs/versioned_docs/version-v0.17.0/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.17.0/language_concepts/01_functions.md @@ -30,7 +30,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -38,7 +38,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md index 4641521b1d9..ad902c42c9b 100644 --- a/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_docs/version-v0.19.0/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.19.0/language_concepts/01_functions.md index 47cdea0cf04..5eb22170e54 100644 --- a/docs/versioned_docs/version-v0.19.0/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.19.0/language_concepts/01_functions.md @@ -30,7 +30,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -38,7 +38,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md index 4641521b1d9..ad902c42c9b 100644 --- a/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_docs/version-v0.19.1/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.19.1/language_concepts/01_functions.md index 47cdea0cf04..5eb22170e54 100644 --- a/docs/versioned_docs/version-v0.19.1/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.19.1/language_concepts/01_functions.md @@ -30,7 +30,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -38,7 +38,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md index 4641521b1d9..ad902c42c9b 100644 --- a/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_docs/version-v0.19.2/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.19.2/language_concepts/01_functions.md index 47cdea0cf04..5eb22170e54 100644 --- a/docs/versioned_docs/version-v0.19.2/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.19.2/language_concepts/01_functions.md @@ -30,7 +30,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -38,7 +38,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md index 4641521b1d9..ad902c42c9b 100644 --- a/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.19.3/language_concepts/01_functions.md index 47cdea0cf04..5eb22170e54 100644 --- a/docs/versioned_docs/version-v0.19.3/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/01_functions.md @@ -30,7 +30,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -38,7 +38,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md index 4641521b1d9..ad902c42c9b 100644 --- a/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.19.4/language_concepts/01_functions.md index 47cdea0cf04..5eb22170e54 100644 --- a/docs/versioned_docs/version-v0.19.4/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/01_functions.md @@ -30,7 +30,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -38,7 +38,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md index 4641521b1d9..ad902c42c9b 100644 --- a/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_docs/version-v0.6.0/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.6.0/language_concepts/01_functions.md index c4bc0545a1c..171b7d3dbda 100644 --- a/docs/versioned_docs/version-v0.6.0/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.6.0/language_concepts/01_functions.md @@ -18,7 +18,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -26,7 +26,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.6.0/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.6.0/language_concepts/07_mutability.md index c8ccb4f8b9f..a7240a54e5c 100644 --- a/docs/versioned_docs/version-v0.6.0/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.6.0/language_concepts/07_mutability.md @@ -39,7 +39,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_docs/version-v0.7.1/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.7.1/language_concepts/01_functions.md index 54c618599d2..8fd63293c13 100644 --- a/docs/versioned_docs/version-v0.7.1/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.7.1/language_concepts/01_functions.md @@ -18,7 +18,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -26,7 +26,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.7.1/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.7.1/language_concepts/07_mutability.md index 5631a322659..90a1f3379a2 100644 --- a/docs/versioned_docs/version-v0.7.1/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.7.1/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/docs/versioned_docs/version-v0.9.0/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.9.0/language_concepts/01_functions.md index 54c618599d2..8fd63293c13 100644 --- a/docs/versioned_docs/version-v0.9.0/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.9.0/language_concepts/01_functions.md @@ -18,7 +18,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -26,7 +26,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.9.0/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.9.0/language_concepts/07_mutability.md index 69798c7a276..a9c93e61167 100644 --- a/docs/versioned_docs/version-v0.9.0/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.9.0/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 diff --git a/noir_stdlib/src/slice.nr b/noir_stdlib/src/slice.nr index ca06e2aae44..a5a9a38ed53 100644 --- a/noir_stdlib/src/slice.nr +++ b/noir_stdlib/src/slice.nr @@ -21,28 +21,16 @@ impl [T] { #[builtin(slice_pop_front)] pub fn pop_front(_self: Self) -> (T, Self) { } - pub fn insert(self, _index: Field, _elem: T) -> Self { - // TODO(#2462): Slice insert with a dynamic index - crate::assert_constant(_index); - self.__slice_insert(_index, _elem) - } - /// Insert an element at a specified index, shifting all elements /// after it to the right #[builtin(slice_insert)] - fn __slice_insert(_self: Self, _index: Field, _elem: T) -> Self { } - - pub fn remove(self, _index: Field) -> (Self, T) { - // TODO(#2462): Slice remove with a dynamic index - crate::assert_constant(_index); - self.__slice_remove(_index) - } + pub fn insert(_self: Self, _index: Field, _elem: T) -> Self { } /// Remove an element at a specified index, shifting all elements /// after it to the left, returning the altered slice and /// the removed element #[builtin(slice_remove)] - fn __slice_remove(_self: Self, _index: Field) -> (Self, T) { } + pub fn remove(_self: Self, _index: Field) -> (Self, T) { } // Append each element of the `other` slice to the end of `self`. // This returns a new slice and leaves both input slices unchanged. diff --git a/release-please-config.json b/release-please-config.json index afc0bfd420d..7995ea37381 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -48,6 +48,11 @@ "type": "json", "path": "tooling/noirc_abi_wasm/package.json", "jsonpath": "$.version" + }, + { + "type": "json", + "path": "docs/docs/package.json", + "jsonpath": "$.version" } ] }, @@ -67,6 +72,46 @@ "brillig_vm/Cargo.toml", "stdlib/Cargo.toml", "flake.nix", + { + "type": "json", + "path": "acvm_js/package.json", + "jsonpath": "$.version" + }, + { + "type": "toml", + "path": "acir/Cargo.toml", + "jsonpath": "$.workspace.dependencies.acir.version" + }, + { + "type": "toml", + "path": "acir_field/Cargo.toml", + "jsonpath": "$.workspace.dependencies.acir_field.version" + }, + { + "type": "toml", + "path": "stdlib/Cargo.toml", + "jsonpath": "$.workspace.dependencies.stdlib.version" + }, + { + "type": "toml", + "path": "brillig/Cargo.toml", + "jsonpath": "$.workspace.dependencies.brillig.version" + }, + { + "type": "toml", + "path": "brillig_vm/Cargo.toml", + "jsonpath": "$.workspace.dependencies.brillig_vm.version" + }, + { + "type": "toml", + "path": "blackbox_solver/Cargo.toml", + "jsonpath": "$.workspace.dependencies.acvm_blackbox_solver.version" + }, + { + "type": "toml", + "path": "barretenberg_blackbox_solver/Cargo.toml", + "jsonpath": "$.workspace.dependencies.barretenberg_blackbox_solver.version" + }, { "type": "json", "path": "acvm_js/package.json", diff --git a/test_programs/execution_success/slice_dynamic_index/src/main.nr b/test_programs/execution_success/slice_dynamic_index/src/main.nr index 2e5c0122dfb..374d2ba4c26 100644 --- a/test_programs/execution_success/slice_dynamic_index/src/main.nr +++ b/test_programs/execution_success/slice_dynamic_index/src/main.nr @@ -123,13 +123,13 @@ fn dynamic_slice_merge_if(mut slice: [Field], x: Field) { let (first_elem, rest_of_slice) = popped_slice.pop_front(); assert(first_elem == 12); assert(rest_of_slice.len() == 6); - // TODO(#2462): SliceInsert and SliceRemove with a dynamic index are not yet implemented in ACIR gen - slice = rest_of_slice.insert(2, 20); + + slice = rest_of_slice.insert(x - 2, 20); assert(slice[2] == 20); assert(slice[6] == 30); assert(slice.len() == 7); - // TODO(#2462): SliceInsert and SliceRemove with a dynamic index are not yet implemented in ACIR gen - let (removed_slice, removed_elem) = slice.remove(3); + + let (removed_slice, removed_elem) = slice.remove(x - 1); // The deconstructed tuple assigns to the slice but is not seen outside of the if statement // without a direct assignment slice = removed_slice; diff --git a/test_programs/execution_success/slice_struct_field/src/main.nr b/test_programs/execution_success/slice_struct_field/src/main.nr index c00fdf85180..a5b971ada4b 100644 --- a/test_programs/execution_success/slice_struct_field/src/main.nr +++ b/test_programs/execution_success/slice_struct_field/src/main.nr @@ -23,12 +23,12 @@ fn main(y: pub Field) { let foo_two = Foo { a: 4, b: b_two, bar: Bar { inner: [103, 104, 105] } }; let foo_three = Foo { a: 7, b: [8, 9, 22], bar: Bar { inner: [106, 107, 108] } }; - let foo_four = Foo { a: 10, b: [11, 12, 23], bar: Bar { inner: [109, 110, 111] } }; + let mut foo_four = Foo { a: 10, b: [11, 12, 23], bar: Bar { inner: [109, 110, 111] } }; let mut x = [foo_one, foo_two]; x = x.push_back(foo_three); x = x.push_back(foo_four); - + assert(x[y - 3].a == 1); let struct_slice = x[y - 3].b; for i in 0..4 { @@ -60,6 +60,14 @@ fn main(y: pub Field) { assert(x[y].bar.inner == [109, 110, 111]); // Check that switching the lhs and rhs is still valid assert([109, 110, 111] == x[y].bar.inner); + + assert(x[y - 3].bar.inner == [100, 101, 102]); + assert(x[y - 2].bar.inner == [103, 104, 105]); + assert(x[y - 1].bar.inner == [106, 107, 108]); + assert(x[y].bar.inner == [109, 110, 111]); + // Check that switching the lhs and rhs is still valid + assert([109, 110, 111] == x[y].bar.inner); + // TODO: Enable merging nested slices // if y != 2 { // x[y].a = 50; @@ -75,12 +83,13 @@ fn main(y: pub Field) { // assert(x[2].b[0] == 100); // assert(x[2].b[1] == 101); // assert(x[2].b[2] == 102); + let q = x.push_back(foo_four); let foo_parent_one = FooParent { parent_arr: [0, 1, 2], foos: x }; let foo_parent_two = FooParent { parent_arr: [3, 4, 5], foos: q }; let mut foo_parents = [foo_parent_one]; foo_parents = foo_parents.push_back(foo_parent_two); - // TODO: make a separate test for compile time + // TODO: make a separate test for entirely compile time // foo_parents[1].foos.push_back(foo_four); // TODO: Merging nested slices is broken // if y == 3 { @@ -88,6 +97,7 @@ fn main(y: pub Field) { // } else { // foo_parents[y - 2].foos[y - 1].b[y - 1] = 1000; // } + assert(foo_parents[y - 2].foos[y - 2].b[y - 1] == 21); foo_parents[y - 2].foos[y - 2].b[y - 1] = 5000; assert(foo_parents[y - 2].foos[y - 2].b[y - 1] == 5000); @@ -108,11 +118,15 @@ fn main(y: pub Field) { assert(foo_parents[y - 2].foos[y - 1].a == 7); foo_parents[y - 2].foos[y - 1].a = 50; + assert(foo_parents[y - 2].foos[y - 1].a == 50); let b_array = foo_parents[y - 2].foos[y - 1].b; + assert(b_array[0] == 8); + assert(b_array[1] == 9); assert(b_array[2] == 22); assert(b_array.len() == 3); - // Test setting a nested array with non-dynamic + + // // Test setting a nested array with non-dynamic let x = [5, 6, 5000, 21, 100, 101].as_slice(); foo_parents[y - 2].foos[y - 1].b = x; @@ -120,16 +134,52 @@ fn main(y: pub Field) { assert(foo_parents[y - 2].foos[y - 1].b[4] == 100); assert(foo_parents[y - 2].foos[y - 1].b[5] == 101); + // Need to account for that foo_parents is not modified outside of this function test_basic_intrinsics_nested_slices(foo_parents, y); - // TODO(#3364): still have to enable slice intrinsics on dynamic nested slices - // assert(foo_parents[y - 2].foos.len() == 5); - // foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); - // assert(foo_parents[y - 2].foos.len() == 6); + test_complex_intrinsic_nested_slices(foo_parents, y); + + foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_back(500); + assert(foo_parents[y - 2].foos[y - 1].b.len() == 7); + assert(foo_parents[y - 2].foos[y - 1].b[6] == 500); + + let (popped_slice, last_elem) = foo_parents[y - 2].foos[y - 1].b.pop_back(); + foo_parents[y - 2].foos[y - 1].b = popped_slice; + assert(foo_parents[y - 2].foos[y - 1].b.len() == 6); + assert(last_elem == 500); + + foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_front(11); + assert(foo_parents[y - 2].foos[y - 1].b.len() == 7); + assert(foo_parents[y - 2].foos[y - 1].b[0] == 11); + + assert(foo_parents[y - 2].foos.len() == 5); + foo_four.a = 40; + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); + assert(foo_parents[y - 2].foos.len() == 6); + assert(foo_parents[y - 2].foos[y + 2].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); + assert(foo_parents[y - 2].foos.len() == 7); + assert(foo_parents[y - 2].foos[6].a == 40); + assert(foo_parents[y - 2].foos[5].bar.inner == [109, 110, 111]); + assert(foo_parents[y - 2].foos[6].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); + assert(foo_parents[y - 2].foos.len() == 8); + assert(foo_parents[y - 2].foos[6].a == 40); + assert(foo_parents[y - 2].foos[5].bar.inner == [109, 110, 111]); + assert(foo_parents[y - 2].foos[6].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); + assert(foo_parents[y - 2].foos.len() == 9); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); + assert(foo_parents[y - 2].foos.len() == 10); + let b_array = foo_parents[y - 2].foos[y - 1].b; - assert(b_array[0] == 5); - assert(b_array[1] == 6); - assert(b_array[2] == 5000); - assert(b_array[3] == 21); + assert(b_array[0] == 11); + assert(b_array[1] == 5); + assert(b_array[2] == 6); + assert(b_array[3] == 5000); let b_array = foo_parents[y - 2].foos[y].b; assert(foo_parents[y - 2].foos[y].a == 10); @@ -175,3 +225,248 @@ fn test_basic_intrinsics_nested_slices(mut foo_parents: [FooParent], y: Field) { assert(foo_parents[y - 2].foos[y - 1].b[2] == 20); assert(foo_parents[y - 2].foos[y - 1].b[3] == 21); } + +// This method test intrinsics on nested slices with complex inputs such as +// pushing a `Foo` struct onto a slice in `FooParents`. +fn test_complex_intrinsic_nested_slices(mut foo_parents: [FooParent], y: Field) { + let mut foo = Foo { a: 13, b: [14, 15, 16], bar: Bar { inner: [109, 110, 111] } }; + assert(foo_parents[y - 2].foos.len() == 5); + foo.a = 40; + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo); + assert(foo_parents[1].foos.len() == 6); + assert(foo_parents[1].foos[5].a == 40); + assert(foo_parents[1].foos[5].b[0] == 14); + assert(foo_parents[1].foos[5].b[2] == 16); + assert(foo_parents[1].foos[5].b.len() == 3); + assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_back(500); + assert(foo_parents[1].foos[2].b.len() == 7); + assert(foo_parents[1].foos[2].b[6] == 500); + assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); + assert(foo_parents[1].foos[5].a == 40); + assert(foo_parents[1].foos[5].b[0] == 14); + assert(foo_parents[1].foos[5].b[2] == 16); + assert(foo_parents[1].foos[5].b.len() == 3); + assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); + + let (popped_slice, last_foo) = foo_parents[y - 2].foos.pop_back(); + foo_parents[y - 2].foos = popped_slice; + assert(foo_parents[y - 2].foos.len() == 5); + assert(last_foo.a == 40); + assert(last_foo.b[0] == 14); + assert(last_foo.b[1] == 15); + assert(last_foo.b[2] == 16); + assert(last_foo.bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_front(foo); + assert(foo_parents[1].foos.len() == 6); + assert(foo_parents[1].foos[0].a == 40); + assert(foo_parents[1].foos[0].b[0] == 14); + assert(foo_parents[1].foos[0].b[1] == 15); + assert(foo_parents[1].foos[0].b[2] == 16); + assert(foo_parents[1].foos[5].a == 10); + assert(foo_parents[1].foos[5].b.len() == 3); + assert(foo_parents[1].foos[5].b[0] == 11); + assert(foo_parents[1].foos[5].b[2] == 23); + assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[1].a == 1); + assert(foo_parents[1].foos[1].bar.inner == [100, 101, 102]); + + let (first_foo, rest_of_slice) = foo_parents[y - 2].foos.pop_front(); + + foo_parents[y - 2].foos = rest_of_slice; + assert(first_foo.a == 40); + assert(first_foo.b[0] == 14); + assert(first_foo.b[1] == 15); + assert(first_foo.b[2] == 16); + assert(first_foo.bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[0].a == 1); + assert(foo_parents[1].foos[0].b[0] == 2); + assert(foo_parents[1].foos[0].b[1] == 3); + assert(foo_parents[1].foos[0].b[2] == 20); + assert(foo_parents[1].foos[0].b[3] == 20); + assert(foo_parents[1].foos[0].bar.inner == [100, 101, 102]); + + test_insert_remove_const_index(foo_parents, y, foo); + + // Check values before insertion + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos.len() == 5); + assert(foo_parents[1].foos[2].a == 50); + assert(foo_parents[1].foos[2].b[0] == 5); + assert(foo_parents[1].foos[2].b[2] == 5000); + assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[3].a == 10); + assert(foo_parents[1].foos[3].b[0] == 11); + assert(foo_parents[1].foos[3].b[2] == 23); + assert(foo_parents[1].foos[3].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.insert(y - 1, foo); + assert(foo_parents[1].foos.len() == 6); + + // Check values correctly moved after insertion + assert(foo_parents[1].foos[0].a == 1); + assert(foo_parents[1].foos[0].b[0] == 2); + assert(foo_parents[1].foos[0].b[1] == 3); + assert(foo_parents[1].foos[0].b[2] == 20); + assert(foo_parents[1].foos[0].b[3] == 20); + assert(foo_parents[1].foos[0].bar.inner == [100, 101, 102]); + + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos[2].a == 40); + assert(foo_parents[1].foos[2].b[0] == 14); + assert(foo_parents[1].foos[2].b[2] == 16); + assert(foo_parents[1].foos[2].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[3].a == 50); + assert(foo_parents[1].foos[3].b[0] == 5); + assert(foo_parents[1].foos[3].b[2] == 5000); + assert(foo_parents[1].foos[3].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[4].a == 10); + assert(foo_parents[1].foos[4].b[0] == 11); + assert(foo_parents[1].foos[4].b[2] == 23); + assert(foo_parents[1].foos[4].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[5].a == 10); + assert(foo_parents[1].foos[5].b[0] == 11); + assert(foo_parents[1].foos[5].b[2] == 23); + assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); + + let (rest_of_slice, removed_elem) = foo_parents[y - 2].foos.remove(y - 1); + foo_parents[1].foos = rest_of_slice; + + // Check that the accurate element was removed + assert(removed_elem.a == 40); + assert(removed_elem.b[0] == 14); + assert(removed_elem.b[2] == 16); + assert(removed_elem.bar.inner == [109, 110, 111]); + + // Check that we have altered our slice accurately following a removal + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos[2].a == 50); + assert(foo_parents[1].foos[2].b[0] == 5); + assert(foo_parents[1].foos[2].b[2] == 5000); + assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[3].a == 10); + assert(foo_parents[1].foos[3].b[0] == 11); + assert(foo_parents[1].foos[3].b[2] == 23); + assert(foo_parents[1].foos[3].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[4].b[0] == 11); + assert(foo_parents[1].foos[4].b[2] == 23); + assert(foo_parents[1].foos[4].bar.inner == [109, 110, 111]); +} + +fn test_insert_remove_const_index(mut foo_parents: [FooParent], y: Field, foo: Foo) { + // Check values before insertion + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos.len() == 5); + assert(foo_parents[1].foos[2].a == 50); + assert(foo_parents[1].foos[2].b[0] == 5); + assert(foo_parents[1].foos[2].b[2] == 5000); + assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[3].a == 10); + assert(foo_parents[1].foos[3].b[0] == 11); + assert(foo_parents[1].foos[3].b[2] == 23); + assert(foo_parents[1].foos[3].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.insert(2, foo); + assert(foo_parents[1].foos.len() == 6); + + // Check values correctly moved after insertion + assert(foo_parents[1].foos[0].a == 1); + assert(foo_parents[1].foos[0].b[0] == 2); + assert(foo_parents[1].foos[0].b[1] == 3); + assert(foo_parents[1].foos[0].b[2] == 20); + assert(foo_parents[1].foos[0].b[3] == 20); + assert(foo_parents[1].foos[0].bar.inner == [100, 101, 102]); + + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos[2].a == 40); + assert(foo_parents[1].foos[2].b[0] == 14); + assert(foo_parents[1].foos[2].b[2] == 16); + assert(foo_parents[1].foos[2].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[3].a == 50); + assert(foo_parents[1].foos[3].b[0] == 5); + assert(foo_parents[1].foos[3].b[2] == 5000); + assert(foo_parents[1].foos[3].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[4].a == 10); + assert(foo_parents[1].foos[4].b[0] == 11); + assert(foo_parents[1].foos[4].b[2] == 23); + assert(foo_parents[1].foos[4].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[5].a == 10); + assert(foo_parents[1].foos[5].b[0] == 11); + assert(foo_parents[1].foos[5].b[2] == 23); + assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); + + let (rest_of_slice, removed_elem) = foo_parents[y - 2].foos.remove(2); + foo_parents[1].foos = rest_of_slice; + + // Check that the accurate element was removed + assert(removed_elem.a == 40); + assert(removed_elem.b[0] == 14); + assert(removed_elem.b[2] == 16); + assert(removed_elem.bar.inner == [109, 110, 111]); + + // Check that we have altered our slice accurately following a removal + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos[2].a == 50); + assert(foo_parents[1].foos[2].b[0] == 5); + assert(foo_parents[1].foos[2].b[2] == 5000); + assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[3].a == 10); + assert(foo_parents[1].foos[3].b[0] == 11); + assert(foo_parents[1].foos[3].b[2] == 23); + assert(foo_parents[1].foos[3].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[4].b[0] == 11); + assert(foo_parents[1].foos[4].b[2] == 23); + assert(foo_parents[1].foos[4].bar.inner == [109, 110, 111]); +} diff --git a/tooling/nargo_fmt/build.rs b/tooling/nargo_fmt/build.rs index fba6a1730a8..c356b403ae5 100644 --- a/tooling/nargo_fmt/build.rs +++ b/tooling/nargo_fmt/build.rs @@ -58,12 +58,14 @@ fn format_{test_name}() {{ let expected_output = r#"{output_source}"#; - let (parsed_module, errors) = noirc_frontend::parse_program(&input); + let (parsed_module, _errors) = noirc_frontend::parse_program(&input); let config = nargo_fmt::Config::of("{config}").unwrap(); let fmt_text = nargo_fmt::format(&input, parsed_module, &config); - std::fs::write("{output_source_path}", fmt_text.clone()); + if std::env::var("UPDATE_EXPECT").is_ok() {{ + std::fs::write("{output_source_path}", fmt_text.clone()).unwrap(); + }} similar_asserts::assert_eq!(fmt_text, expected_output); }}