diff --git a/.aztec-sync-commit b/.aztec-sync-commit index 6e1f033e292..8cfda4ff013 100644 --- a/.aztec-sync-commit +++ b/.aztec-sync-commit @@ -1 +1 @@ -07d6dc29db2eb04154b8f0c66bd1efa74c0e8b9d +d9de430e4a01d6908a9b1fe5e6ede9309aa8a10d diff --git a/.github/workflows/gates_report_brillig_execution.yml b/.github/workflows/gates_report_brillig_execution.yml new file mode 100644 index 00000000000..0ef98f5045b --- /dev/null +++ b/.github/workflows/gates_report_brillig_execution.yml @@ -0,0 +1,92 @@ +name: Report Brillig opcodes executed diff + +on: + push: + branches: + - master + pull_request: + +jobs: + build-nargo: + runs-on: ubuntu-latest + strategy: + matrix: + target: [x86_64-unknown-linux-gnu] + + steps: + - name: Checkout Noir repo + uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.74.1 + + - uses: Swatinem/rust-cache@v2 + with: + key: ${{ matrix.target }} + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + + - name: Build Nargo + run: cargo build --package nargo_cli --release + + - name: Package artifacts + run: | + mkdir dist + cp ./target/release/nargo ./dist/nargo + 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz + + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: nargo + path: ./dist/* + retention-days: 3 + + compare_brillig_execution_reports: + needs: [build-nargo] + runs-on: ubuntu-latest + permissions: + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Generate Brillig execution report + working-directory: ./test_programs + run: | + chmod +x gates_report_brillig_execution.sh + ./gates_report_brillig_execution.sh + mv gates_report_brillig_execution.json ../gates_report_brillig_execution.json + + - name: Compare Brillig execution reports + id: brillig_execution_diff + uses: noir-lang/noir-gates-diff@d88f7523b013b9edd3f31c5cfddaef87a3fe1b48 + with: + report: gates_report_brillig_execution.json + header: | + # Changes to number of Brillig opcodes executed + brillig_report: true + summaryQuantile: 0.9 # only display the 10% most significant bytecode size diffs in the summary (defaults to 20%) + + - name: Add bytecode size diff to sticky comment + if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' + uses: marocchino/sticky-pull-request-comment@v2 + with: + header: brillig_execution + # delete the comment in case changes no longer impact brillig bytecode sizes + delete: ${{ !steps.brillig_execution_diff.outputs.markdown }} + message: ${{ steps.brillig_execution_diff.outputs.markdown }} \ No newline at end of file diff --git a/.gitignore b/.gitignore index aeb7d8757c4..f1f0ea47bcf 100644 --- a/.gitignore +++ b/.gitignore @@ -34,6 +34,7 @@ tooling/noir_js/lib gates_report.json gates_report_brillig.json +gates_report_brillig_execution.json # Github Actions scratch space # This gives a location to download artifacts into the repository in CI without making git dirty. diff --git a/Cargo.lock b/Cargo.lock index 3add28d3939..22b684fc13a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2743,14 +2743,12 @@ dependencies = [ "nargo", "nargo_fmt", "nargo_toml", - "noirc_artifacts", "noirc_driver", "noirc_errors", "noirc_frontend", "rayon", "serde", "serde_json", - "serde_with", "strum", "thiserror", "tokio", @@ -2776,6 +2774,7 @@ dependencies = [ "noirc_artifacts", "noirc_driver", "noirc_errors", + "noirc_evaluator", "serde", "serde_json", "tempfile", diff --git a/acvm-repo/acir/codegen/acir.cpp b/acvm-repo/acir/codegen/acir.cpp index 1bb8931c642..0880b5a0cbe 100644 --- a/acvm-repo/acir/codegen/acir.cpp +++ b/acvm-repo/acir/codegen/acir.cpp @@ -694,7 +694,7 @@ namespace Program { }; struct Trap { - Program::HeapArray revert_data; + Program::HeapVector revert_data; friend bool operator==(const Trap&, const Trap&); std::vector bincodeSerialize() const; diff --git a/acvm-repo/acvm/tests/solver.rs b/acvm-repo/acvm/tests/solver.rs index efa8de289e5..2828ea3d79e 100644 --- a/acvm-repo/acvm/tests/solver.rs +++ b/acvm-repo/acvm/tests/solver.rs @@ -1,10 +1,10 @@ use std::collections::{BTreeMap, HashSet}; use std::sync::Arc; -use acir::brillig::{BitSize, IntegerBitSize}; +use acir::brillig::{BitSize, HeapVector, IntegerBitSize}; use acir::{ acir_field::GenericFieldElement, - brillig::{BinaryFieldOp, HeapArray, MemoryAddress, Opcode as BrilligOpcode, ValueOrArray}, + brillig::{BinaryFieldOp, MemoryAddress, Opcode as BrilligOpcode, ValueOrArray}, circuit::{ brillig::{BrilligBytecode, BrilligFunctionId, BrilligInputs, BrilligOutputs}, opcodes::{BlackBoxFuncCall, BlockId, BlockType, FunctionInput, MemOp}, @@ -667,7 +667,12 @@ fn unsatisfied_opcode_resolved_brillig() { let jmp_if_opcode = BrilligOpcode::JumpIf { condition: MemoryAddress::direct(2), location: location_of_stop }; - let trap_opcode = BrilligOpcode::Trap { revert_data: HeapArray::default() }; + let trap_opcode = BrilligOpcode::Trap { + revert_data: HeapVector { + pointer: MemoryAddress::direct(0), + size: MemoryAddress::direct(3), + }, + }; let stop_opcode = BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 0 }; let brillig_bytecode = BrilligBytecode { @@ -682,6 +687,11 @@ fn unsatisfied_opcode_resolved_brillig() { bit_size: BitSize::Integer(IntegerBitSize::U32), value: FieldElement::from(0u64), }, + BrilligOpcode::Const { + destination: MemoryAddress::direct(3), + bit_size: BitSize::Integer(IntegerBitSize::U32), + value: FieldElement::from(0u64), + }, calldata_copy_opcode, equal_opcode, jmp_if_opcode, @@ -739,7 +749,7 @@ fn unsatisfied_opcode_resolved_brillig() { ACVMStatus::Failure(OpcodeResolutionError::BrilligFunctionFailed { function_id: BrilligFunctionId(0), payload: None, - call_stack: vec![OpcodeLocation::Brillig { acir_index: 0, brillig_index: 5 }] + call_stack: vec![OpcodeLocation::Brillig { acir_index: 0, brillig_index: 6 }] }), "The first opcode is not satisfiable, expected an error indicating this" ); diff --git a/acvm-repo/brillig/src/opcodes.rs b/acvm-repo/brillig/src/opcodes.rs index 69ca9ed379a..0d87c5b9410 100644 --- a/acvm-repo/brillig/src/opcodes.rs +++ b/acvm-repo/brillig/src/opcodes.rs @@ -305,7 +305,7 @@ pub enum BrilligOpcode { BlackBox(BlackBoxOp), /// Used to denote execution failure, returning data after the offset Trap { - revert_data: HeapArray, + revert_data: HeapVector, }, /// Stop execution, returning data after the offset Stop { diff --git a/acvm-repo/brillig_vm/src/black_box.rs b/acvm-repo/brillig_vm/src/black_box.rs index 04dd85d9324..0d90a4c8502 100644 --- a/acvm-repo/brillig_vm/src/black_box.rs +++ b/acvm-repo/brillig_vm/src/black_box.rs @@ -330,18 +330,18 @@ pub(crate) fn evaluate_black_box let mut input = BigUint::from_bytes_be(&input.to_be_bytes()); let radix = BigUint::from_bytes_be(&radix.to_be_bytes()); - let mut limbs: Vec> = Vec::with_capacity(output.size); + let mut limbs: Vec> = vec![MemoryValue::default(); output.size]; - for _ in 0..output.size { + for i in (0..output.size).rev() { let limb = &input % &radix; if *output_bits { - limbs.push(MemoryValue::new_integer( + limbs[i] = MemoryValue::new_integer( if limb.is_zero() { 0 } else { 1 }, IntegerBitSize::U1, - )); + ); } else { let limb: u8 = limb.try_into().unwrap(); - limbs.push(MemoryValue::new_integer(limb as u128, IntegerBitSize::U8)); + limbs[i] = MemoryValue::new_integer(limb as u128, IntegerBitSize::U8); }; input /= &radix; } diff --git a/acvm-repo/brillig_vm/src/lib.rs b/acvm-repo/brillig_vm/src/lib.rs index 4d5ce4203f9..89d72c4614b 100644 --- a/acvm-repo/brillig_vm/src/lib.rs +++ b/acvm-repo/brillig_vm/src/lib.rs @@ -347,10 +347,11 @@ impl<'a, F: AcirField, B: BlackBoxFunctionSolver> VM<'a, F, B> { self.increment_program_counter() } Opcode::Trap { revert_data } => { - if revert_data.size > 0 { + let revert_data_size = self.memory.read(revert_data.size).to_usize(); + if revert_data_size > 0 { self.trap( self.memory.read_ref(revert_data.pointer).unwrap_direct(), - revert_data.size, + revert_data_size, ) } else { self.trap(0, 0) @@ -937,8 +938,18 @@ mod tests { size_address: MemoryAddress::direct(0), offset_address: MemoryAddress::direct(1), }, - Opcode::Jump { location: 5 }, - Opcode::Trap { revert_data: HeapArray::default() }, + Opcode::Jump { location: 6 }, + Opcode::Const { + destination: MemoryAddress::direct(0), + bit_size: BitSize::Integer(IntegerBitSize::U32), + value: FieldElement::from(0u64), + }, + Opcode::Trap { + revert_data: HeapVector { + pointer: MemoryAddress::direct(0), + size: MemoryAddress::direct(0), + }, + }, Opcode::BinaryFieldOp { op: BinaryFieldOp::Equals, lhs: MemoryAddress::direct(0), @@ -966,6 +977,8 @@ mod tests { assert_eq!(status, VMStatus::InProgress); let status = vm.process_opcode(); assert_eq!(status, VMStatus::InProgress); + let status = vm.process_opcode(); + assert_eq!(status, VMStatus::InProgress); let output_cmp_value = vm.memory.read(MemoryAddress::direct(2)); assert_eq!(output_cmp_value.to_field(), false.into()); @@ -978,7 +991,7 @@ mod tests { status, VMStatus::Failure { reason: FailureReason::Trap { revert_data_offset: 0, revert_data_size: 0 }, - call_stack: vec![4] + call_stack: vec![5] } ); diff --git a/compiler/integration-tests/package.json b/compiler/integration-tests/package.json index 62ae699fc4e..a438c2965c3 100644 --- a/compiler/integration-tests/package.json +++ b/compiler/integration-tests/package.json @@ -13,7 +13,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "0.60.0", + "@aztec/bb.js": "0.61.0", "@noir-lang/noir_js": "workspace:*", "@noir-lang/noir_wasm": "workspace:*", "@nomicfoundation/hardhat-chai-matchers": "^2.0.0", diff --git a/compiler/noirc_errors/src/debug_info.rs b/compiler/noirc_errors/src/debug_info.rs index b480d20fde4..77028f739bd 100644 --- a/compiler/noirc_errors/src/debug_info.rs +++ b/compiler/noirc_errors/src/debug_info.rs @@ -12,7 +12,6 @@ use serde::Serializer; use serde_with::serde_as; use serde_with::DisplayFromStr; use std::collections::BTreeMap; -use std::collections::HashMap; use std::io::Read; use std::io::Write; use std::mem; @@ -48,6 +47,9 @@ pub type DebugVariables = BTreeMap; pub type DebugFunctions = BTreeMap; pub type DebugTypes = BTreeMap; +#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize)] +pub struct ProcedureDebugId(pub u32); + #[derive(Default, Debug, Clone, Deserialize, Serialize)] pub struct ProgramDebugInfo { pub debug_infos: Vec, @@ -104,14 +106,9 @@ pub struct DebugInfo { pub variables: DebugVariables, pub functions: DebugFunctions, pub types: DebugTypes, -} - -/// Holds OpCodes Counts for Acir and Brillig Opcodes -/// To be printed with `nargo info --profile-info` -#[derive(Default, Debug, Serialize, Deserialize, Clone)] -pub struct OpCodesCount { - pub acir_size: usize, - pub brillig_size: usize, + /// This a map per brillig function representing the range of opcodes where a procedure is activated. + pub brillig_procedure_locs: + BTreeMap>, } impl DebugInfo { @@ -124,8 +121,12 @@ impl DebugInfo { variables: DebugVariables, functions: DebugFunctions, types: DebugTypes, + brillig_procedure_locs: BTreeMap< + BrilligFunctionId, + BTreeMap, + >, ) -> Self { - Self { locations, brillig_locations, variables, functions, types } + Self { locations, brillig_locations, variables, functions, types, brillig_procedure_locs } } /// Updates the locations map when the [`Circuit`][acvm::acir::circuit::Circuit] is modified. @@ -147,38 +148,4 @@ impl DebugInfo { pub fn opcode_location(&self, loc: &OpcodeLocation) -> Option> { self.locations.get(loc).cloned() } - - pub fn count_span_opcodes(&self) -> HashMap { - let mut accumulator: HashMap> = HashMap::new(); - - for (opcode_location, locations) in self.locations.iter() { - for location in locations.iter() { - let opcodes = accumulator.entry(*location).or_default(); - opcodes.push(opcode_location); - } - } - - let counted_opcodes = accumulator - .iter() - .map(|(location, opcodes)| { - let acir_opcodes: Vec<_> = opcodes - .iter() - .filter(|opcode_location| matches!(opcode_location, OpcodeLocation::Acir(_))) - .collect(); - let brillig_opcodes: Vec<_> = opcodes - .iter() - .filter(|opcode_location| { - matches!(opcode_location, OpcodeLocation::Brillig { .. }) - }) - .collect(); - let opcodes_count = OpCodesCount { - acir_size: acir_opcodes.len(), - brillig_size: brillig_opcodes.len(), - }; - (*location, opcodes_count) - }) - .collect(); - - counted_opcodes - } } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index e720bf14a4c..23c802e3a14 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -548,7 +548,7 @@ impl<'block> BrilligBlock<'block> { source, target_array, radix, - matches!(endianness, Endian::Big), + matches!(endianness, Endian::Little), false, ); } @@ -573,7 +573,7 @@ impl<'block> BrilligBlock<'block> { source, target_array, two, - matches!(endianness, Endian::Big), + matches!(endianness, Endian::Little), true, ); @@ -583,7 +583,31 @@ impl<'block> BrilligBlock<'block> { // `Intrinsic::AsWitness` is used to provide hints to acir-gen on optimal expression splitting. // It is then useless in the brillig runtime and so we can ignore it Value::Intrinsic(Intrinsic::AsWitness) => (), + Value::Intrinsic(Intrinsic::FieldLessThan) => { + let lhs = self.convert_ssa_single_addr_value(arguments[0], dfg); + assert!(lhs.bit_size == FieldElement::max_num_bits()); + let rhs = self.convert_ssa_single_addr_value(arguments[1], dfg); + assert!(rhs.bit_size == FieldElement::max_num_bits()); + let results = dfg.instruction_results(instruction_id); + let destination = self + .variables + .define_variable( + self.function_context, + self.brillig_context, + results[0], + dfg, + ) + .extract_single_addr(); + assert!(destination.bit_size == 1); + + self.brillig_context.binary_instruction( + lhs, + rhs, + destination, + BrilligBinaryOp::LessThan, + ); + } _ => { unreachable!("unsupported function call type {:?}", dfg[*func]) } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_directive.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_directive.rs index f066d967e0d..49e259e0ce5 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_directive.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_directive.rs @@ -67,9 +67,8 @@ pub(crate) fn directive_invert() -> GeneratedBrillig { }, BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 1 }, ], - assert_messages: Default::default(), - locations: Default::default(), name: "directive_invert".to_string(), + ..Default::default() } } @@ -132,8 +131,7 @@ pub(crate) fn directive_quotient() -> GeneratedBrillig { }, BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 2 }, ], - assert_messages: Default::default(), - locations: Default::default(), name: "directive_integer_quotient".to_string(), + ..Default::default() } } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index 416f0300508..3633700a795 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -36,6 +36,8 @@ use acvm::{ }; use debug_show::DebugShow; +use super::ProcedureId; + /// The Brillig VM does not apply a limit to the memory address space, /// As a convention, we take use 32 bits. This means that we assume that /// memory has 2^32 memory slots. @@ -111,9 +113,14 @@ impl BrilligContext { /// Special brillig context to codegen compiler intrinsic shared procedures impl BrilligContext { - pub(crate) fn new_for_procedure(enable_debug_trace: bool) -> BrilligContext { + pub(crate) fn new_for_procedure( + enable_debug_trace: bool, + procedure_id: ProcedureId, + ) -> BrilligContext { + let mut obj = BrilligArtifact::default(); + obj.procedure = Some(procedure_id); BrilligContext { - obj: BrilligArtifact::default(), + obj, registers: ScratchSpace::new(), context_label: Label::entrypoint(), current_section: 0, @@ -146,8 +153,8 @@ pub(crate) mod tests { use std::vec; use acvm::acir::brillig::{ - BitSize, ForeignCallParam, ForeignCallResult, HeapArray, HeapVector, IntegerBitSize, - MemoryAddress, ValueOrArray, + BitSize, ForeignCallParam, ForeignCallResult, HeapVector, IntegerBitSize, MemoryAddress, + ValueOrArray, }; use acvm::brillig_vm::brillig::HeapValueType; use acvm::brillig_vm::{VMStatus, VM}; @@ -289,8 +296,18 @@ pub(crate) mod tests { // We push a JumpIf and Trap opcode directly as the constrain instruction // uses unresolved jumps which requires a block to be constructed in SSA and // we don't need this for Brillig IR tests - context.push_opcode(BrilligOpcode::JumpIf { condition: r_equality, location: 8 }); - context.push_opcode(BrilligOpcode::Trap { revert_data: HeapArray::default() }); + context.push_opcode(BrilligOpcode::JumpIf { condition: r_equality, location: 9 }); + context.push_opcode(BrilligOpcode::Const { + destination: MemoryAddress::direct(0), + bit_size: BitSize::Integer(IntegerBitSize::U32), + value: FieldElement::from(0u64), + }); + context.push_opcode(BrilligOpcode::Trap { + revert_data: HeapVector { + pointer: MemoryAddress::direct(0), + size: MemoryAddress::direct(0), + }, + }); context.stop_instruction(); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs index d0bd91e185c..276456fc40d 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs @@ -1,9 +1,10 @@ use acvm::acir::brillig::Opcode as BrilligOpcode; use std::collections::{BTreeMap, HashMap}; -use crate::brillig::brillig_ir::procedures::ProcedureId; use crate::ssa::ir::{basic_block::BasicBlockId, dfg::CallStack, function::FunctionId}; +use super::procedures::ProcedureId; + /// Represents a parameter or a return value of an entry point function. #[derive(Debug, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)] pub(crate) enum BrilligParameter { @@ -24,6 +25,7 @@ pub(crate) struct GeneratedBrillig { pub(crate) locations: BTreeMap, pub(crate) assert_messages: BTreeMap, pub(crate) name: String, + pub(crate) procedure_locations: HashMap, } #[derive(Default, Debug, Clone)] @@ -53,6 +55,14 @@ pub(crate) struct BrilligArtifact { call_stack: CallStack, /// Name of the function, only used for debugging purposes. pub(crate) name: String, + + /// This field contains the given procedure id if this artifact originates from as procedure + pub(crate) procedure: Option, + /// Procedure ID mapped to the range of its opcode locations + /// This is created as artifacts are linked together and allows us to determine + /// which opcodes originate from reusable procedures.s + /// The range is inclusive for both start and end opcode locations. + pub(crate) procedure_locations: HashMap, } /// A pointer to a location in the opcode. @@ -149,6 +159,7 @@ impl BrilligArtifact { locations: self.locations, assert_messages: self.assert_messages, name: self.name, + procedure_locations: self.procedure_locations, } } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_control_flow.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_control_flow.rs index b74154c16be..540d957d5be 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_control_flow.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_control_flow.rs @@ -1,5 +1,5 @@ use acvm::{ - acir::brillig::{HeapArray, MemoryAddress}, + acir::brillig::{HeapVector, MemoryAddress}, AcirField, }; @@ -192,12 +192,12 @@ impl BrilligContext< assert!(condition.bit_size == 1); self.codegen_if_not(condition.address, |ctx| { - let revert_data = HeapArray { - pointer: ctx.allocate_register(), - // + 1 due to the revert data id being the first item returned - size: Self::flattened_tuple_size(&revert_data_types) + 1, - }; - ctx.codegen_allocate_immediate_mem(revert_data.pointer, revert_data.size); + // + 1 due to the revert data id being the first item returned + let revert_data_size = Self::flattened_tuple_size(&revert_data_types) + 1; + let revert_data_size_var = ctx.make_usize_constant_instruction(revert_data_size.into()); + let revert_data = + HeapVector { pointer: ctx.allocate_register(), size: revert_data_size_var.address }; + ctx.codegen_allocate_immediate_mem(revert_data.pointer, revert_data_size); let current_revert_data_pointer = ctx.allocate_register(); ctx.mov_instruction(current_revert_data_pointer, revert_data.pointer); @@ -243,6 +243,7 @@ impl BrilligContext< ); } ctx.trap_instruction(revert_data); + ctx.deallocate_single_addr(revert_data_size_var); ctx.deallocate_register(revert_data.pointer); ctx.deallocate_register(current_revert_data_pointer); }); @@ -258,7 +259,12 @@ impl BrilligContext< assert!(condition.bit_size == 1); self.codegen_if_not(condition.address, |ctx| { - ctx.trap_instruction(HeapArray::default()); + let revert_data_size_var = ctx.make_usize_constant_instruction(F::zero()); + ctx.trap_instruction(HeapVector { + pointer: MemoryAddress::direct(0), + size: revert_data_size_var.address, + }); + ctx.deallocate_single_addr(revert_data_size_var); if let Some(assert_message) = assert_message { ctx.obj.add_assert_message_to_last_opcode(assert_message); } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs index 5f4781788f5..ba89823ef13 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs @@ -69,7 +69,7 @@ impl BrilligContext< source_field: SingleAddrVariable, target_array: BrilligArray, radix: SingleAddrVariable, - big_endian: bool, + little_endian: bool, output_bits: bool, // If true will generate bit limbs, if false will generate byte limbs ) { assert!(source_field.bit_size == F::max_num_bits()); @@ -79,6 +79,7 @@ impl BrilligContext< let heap_array = self.codegen_brillig_array_to_heap_array(target_array); + // Perform big-endian ToRadix self.black_box_op_instruction(BlackBoxOp::ToRadix { input: source_field.address, radix: radix.address, @@ -86,7 +87,7 @@ impl BrilligContext< output_bits, }); - if big_endian { + if little_endian { let items_len = self.make_usize_constant_instruction(target_array.size.into()); self.codegen_array_reverse(heap_array.pointer, items_len.address); self.deallocate_single_addr(items_len); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs index 2a46a04cc91..4e82a0d3af5 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs @@ -117,7 +117,7 @@ impl DebugShow { } /// Emits a `trap` instruction. - pub(crate) fn trap_instruction(&self, revert_data: HeapArray) { + pub(crate) fn trap_instruction(&self, revert_data: HeapVector) { debug_println!(self.enable_debug_trace, " TRAP {}", revert_data); } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/instructions.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/instructions.rs index 1ac672687f3..5f0aedb9c5e 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/instructions.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/instructions.rs @@ -1,7 +1,7 @@ use acvm::{ acir::{ brillig::{ - BinaryFieldOp, BinaryIntOp, BitSize, BlackBoxOp, HeapArray, HeapValueType, + BinaryFieldOp, BinaryIntOp, BitSize, BlackBoxOp, HeapValueType, HeapVector, MemoryAddress, Opcode as BrilligOpcode, ValueOrArray, }, AcirField, @@ -425,7 +425,7 @@ impl BrilligContext< self.deallocate_single_addr(offset_var); } - pub(super) fn trap_instruction(&mut self, revert_data: HeapArray) { + pub(super) fn trap_instruction(&mut self, revert_data: HeapVector) { self.debug_show.trap_instruction(revert_data); self.push_opcode(BrilligOpcode::Trap { revert_data }); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/procedures/mod.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/procedures/mod.rs index 1c3d1f4d0ad..e17b4ac9359 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/procedures/mod.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/procedures/mod.rs @@ -13,8 +13,10 @@ use array_copy::compile_array_copy_procedure; use array_reverse::compile_array_reverse_procedure; use check_max_stack_depth::compile_check_max_stack_depth_procedure; use mem_copy::compile_mem_copy_procedure; +use noirc_errors::debug_info::ProcedureDebugId; use prepare_vector_insert::compile_prepare_vector_insert_procedure; use prepare_vector_push::compile_prepare_vector_push_procedure; +use serde::{Deserialize, Serialize}; use vector_copy::compile_vector_copy_procedure; use vector_pop_back::compile_vector_pop_back_procedure; use vector_pop_front::compile_vector_pop_front_procedure; @@ -31,8 +33,8 @@ use super::{ /// Procedures are a set of complex operations that are common in the noir language. /// Extracting them to reusable procedures allows us to reduce the size of the generated Brillig. /// Procedures receive their arguments on scratch space to avoid stack dumping&restoring. -#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] -pub(crate) enum ProcedureId { +#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, PartialOrd, Ord, Deserialize, Serialize)] +pub enum ProcedureId { ArrayCopy, ArrayReverse, VectorCopy, @@ -45,10 +47,63 @@ pub(crate) enum ProcedureId { CheckMaxStackDepth, } +impl ProcedureId { + pub(crate) fn to_debug_id(self) -> ProcedureDebugId { + ProcedureDebugId(match self { + ProcedureId::ArrayCopy => 0, + ProcedureId::ArrayReverse => 1, + ProcedureId::VectorCopy => 2, + ProcedureId::MemCopy => 3, + ProcedureId::PrepareVectorPush(true) => 4, + ProcedureId::PrepareVectorPush(false) => 5, + ProcedureId::VectorPopFront => 6, + ProcedureId::VectorPopBack => 7, + ProcedureId::PrepareVectorInsert => 8, + ProcedureId::VectorRemove => 9, + ProcedureId::CheckMaxStackDepth => 10, + }) + } + + pub fn from_debug_id(debug_id: ProcedureDebugId) -> Self { + let inner = debug_id.0; + match inner { + 0 => ProcedureId::ArrayCopy, + 1 => ProcedureId::ArrayReverse, + 2 => ProcedureId::VectorCopy, + 3 => ProcedureId::MemCopy, + 4 => ProcedureId::PrepareVectorPush(true), + 5 => ProcedureId::PrepareVectorPush(false), + 6 => ProcedureId::VectorPopFront, + 7 => ProcedureId::VectorPopBack, + 8 => ProcedureId::PrepareVectorInsert, + 9 => ProcedureId::VectorRemove, + 10 => ProcedureId::CheckMaxStackDepth, + _ => panic!("Unsupported procedure debug ID of {inner} was supplied"), + } + } +} + +impl std::fmt::Display for ProcedureId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ProcedureId::ArrayCopy => write!(f, "ArrayCopy"), + ProcedureId::ArrayReverse => write!(f, "ArrayReverse"), + ProcedureId::VectorCopy => write!(f, "VectorCopy"), + ProcedureId::MemCopy => write!(f, "MemCopy"), + ProcedureId::PrepareVectorPush(flag) => write!(f, "PrepareVectorPush({flag})"), + ProcedureId::VectorPopFront => write!(f, "VectorPopFront"), + ProcedureId::VectorPopBack => write!(f, "VectorPopBack"), + ProcedureId::PrepareVectorInsert => write!(f, "PrepareVectorInsert"), + ProcedureId::VectorRemove => write!(f, "VectorRemove"), + ProcedureId::CheckMaxStackDepth => write!(f, "CheckMaxStackDepth"), + } + } +} + pub(crate) fn compile_procedure( procedure_id: ProcedureId, ) -> BrilligArtifact { - let mut brillig_context = BrilligContext::new_for_procedure(false); + let mut brillig_context = BrilligContext::new_for_procedure(false, procedure_id); brillig_context.enter_context(Label::procedure(procedure_id)); match procedure_id { diff --git a/compiler/noirc_evaluator/src/brillig/mod.rs b/compiler/noirc_evaluator/src/brillig/mod.rs index f1da76669cd..1b61ae1a864 100644 --- a/compiler/noirc_evaluator/src/brillig/mod.rs +++ b/compiler/noirc_evaluator/src/brillig/mod.rs @@ -18,6 +18,8 @@ use crate::ssa::{ use fxhash::FxHashMap as HashMap; use std::{borrow::Cow, collections::BTreeSet}; +pub use self::brillig_ir::procedures::ProcedureId; + /// Context structure for the brillig pass. /// It stores brillig-related data required for brillig generation. #[derive(Default)] diff --git a/compiler/noirc_evaluator/src/ssa.rs b/compiler/noirc_evaluator/src/ssa.rs index ea41b0cfb32..c993ec291dd 100644 --- a/compiler/noirc_evaluator/src/ssa.rs +++ b/compiler/noirc_evaluator/src/ssa.rs @@ -291,6 +291,7 @@ fn convert_generated_acir_into_circuit( assertion_payloads: assert_messages, warnings, name, + brillig_procedure_locs, .. } = generated_acir; @@ -328,8 +329,14 @@ fn convert_generated_acir_into_circuit( }) .collect(); - let mut debug_info = - DebugInfo::new(locations, brillig_locations, debug_variables, debug_functions, debug_types); + let mut debug_info = DebugInfo::new( + locations, + brillig_locations, + debug_variables, + debug_functions, + debug_types, + brillig_procedure_locs, + ); // Perform any ACIR-level optimizations let (optimized_circuit, transformation_map) = acvm::compiler::optimize(circuit); diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs index 01fcaef9042..9a12fbe2441 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs @@ -20,7 +20,9 @@ use acvm::{ acir::AcirField, acir::{circuit::directives::Directive, native_types::Expression}, }; + use iter_extended::vecmap; +use noirc_errors::debug_info::ProcedureDebugId; use num_bigint::BigUint; /// Brillig calls such as for the Brillig std lib are resolved only after code generation is finished. @@ -72,6 +74,11 @@ pub(crate) struct GeneratedAcir { /// As to avoid passing the ACIR gen shared context into each individual ACIR /// we can instead keep this map and resolve the Brillig calls at the end of code generation. pub(crate) brillig_stdlib_func_locations: BTreeMap, + + /// Brillig function id -> Brillig procedure locations map + /// This maps allows a profiler to determine which Brillig opcodes + /// originated from a reusable procedure. + pub(crate) brillig_procedure_locs: BTreeMap, } /// Correspondence between an opcode index (in opcodes) and the source code call stack which generated it @@ -79,6 +86,8 @@ pub(crate) type OpcodeToLocationsMap = BTreeMap; pub(crate) type BrilligOpcodeToLocationsMap = BTreeMap; +pub(crate) type BrilligProcedureRangeMap = BTreeMap; + #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub(crate) enum BrilligStdlibFunc { Inverse, @@ -591,6 +600,14 @@ impl GeneratedAcir { return; } + for (procedure_id, (start_index, end_index)) in generated_brillig.procedure_locations.iter() + { + self.brillig_procedure_locs + .entry(brillig_function_index) + .or_default() + .insert(procedure_id.to_debug_id(), (*start_index, *end_index)); + } + for (brillig_index, call_stack) in generated_brillig.locations.iter() { self.brillig_locations .entry(brillig_function_index) diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index a5c51392114..ea44ffeb7fa 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -1003,6 +1003,15 @@ impl<'a> Context<'a> { } }; entry_point.link_with(artifact); + // Insert the range of opcode locations occupied by a procedure + if let Some(procedure_id) = artifact.procedure { + let num_opcodes = entry_point.byte_code.len(); + let previous_num_opcodes = entry_point.byte_code.len() - artifact.byte_code.len(); + // We subtract one as to keep the range inclusive on both ends + entry_point + .procedure_locations + .insert(procedure_id, (previous_num_opcodes, num_opcodes - 1)); + } } // Generate the final bytecode Ok(entry_point.finish()) @@ -2789,6 +2798,9 @@ impl<'a> Context<'a> { Intrinsic::DerivePedersenGenerators => { unreachable!("DerivePedersenGenerators can only be called with constants") } + Intrinsic::FieldLessThan => { + unreachable!("FieldLessThan can only be called in unconstrained") + } } } diff --git a/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs b/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs index 7bee18d24a0..90eb79ccb69 100644 --- a/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs +++ b/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs @@ -218,7 +218,8 @@ impl Context { | Intrinsic::StaticAssert | Intrinsic::StrAsBytes | Intrinsic::ToBits(..) - | Intrinsic::ToRadix(..) => { + | Intrinsic::ToRadix(..) + | Intrinsic::FieldLessThan => { self.value_sets.push(instruction_arguments_and_results); } }, diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index d8dba499a43..f187a279b9b 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -72,6 +72,7 @@ pub(crate) enum Intrinsic { AsWitness, IsUnconstrained, DerivePedersenGenerators, + FieldLessThan, } impl std::fmt::Display for Intrinsic { @@ -100,6 +101,7 @@ impl std::fmt::Display for Intrinsic { Intrinsic::AsWitness => write!(f, "as_witness"), Intrinsic::IsUnconstrained => write!(f, "is_unconstrained"), Intrinsic::DerivePedersenGenerators => write!(f, "derive_pedersen_generators"), + Intrinsic::FieldLessThan => write!(f, "field_less_than"), } } } @@ -131,7 +133,8 @@ impl Intrinsic { | Intrinsic::FromField | Intrinsic::AsField | Intrinsic::IsUnconstrained - | Intrinsic::DerivePedersenGenerators => false, + | Intrinsic::DerivePedersenGenerators + | Intrinsic::FieldLessThan => false, // Some black box functions have side-effects Intrinsic::BlackBox(func) => matches!( @@ -169,6 +172,8 @@ impl Intrinsic { "as_witness" => Some(Intrinsic::AsWitness), "is_unconstrained" => Some(Intrinsic::IsUnconstrained), "derive_pedersen_generators" => Some(Intrinsic::DerivePedersenGenerators), + "field_less_than" => Some(Intrinsic::FieldLessThan), + other => BlackBoxFunc::lookup(other).map(Intrinsic::BlackBox), } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 0bf7fe6a146..9dbd2c56993 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -355,6 +355,16 @@ pub(super) fn simplify_call( unreachable!("Derive Pedersen Generators must return an array"); } } + Intrinsic::FieldLessThan => { + if let Some(constants) = constant_args { + let lhs = constants[0]; + let rhs = constants[1]; + let result = dfg.make_constant((lhs < rhs).into(), Type::bool()); + SimplifyResult::SimplifiedTo(result) + } else { + SimplifyResult::None + } + } } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs b/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs index 222ae0aaf29..012f6e6b27d 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs @@ -178,7 +178,8 @@ impl Context { | Intrinsic::AsSlice | Intrinsic::AsWitness | Intrinsic::IsUnconstrained - | Intrinsic::DerivePedersenGenerators => false, + | Intrinsic::DerivePedersenGenerators + | Intrinsic::FieldLessThan => false, }, // We must assume that functions contain a side effect as we cannot inspect more deeply. diff --git a/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs b/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs index bfcfada2d94..c387e0b6234 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs @@ -237,6 +237,7 @@ fn slice_capacity_change( | Intrinsic::IsUnconstrained | Intrinsic::DerivePedersenGenerators | Intrinsic::ToBits(_) - | Intrinsic::ToRadix(_) => SizeChange::None, + | Intrinsic::ToRadix(_) + | Intrinsic::FieldLessThan => SizeChange::None, } } diff --git a/compiler/noirc_frontend/src/ast/expression.rs b/compiler/noirc_frontend/src/ast/expression.rs index 64edae8322f..67ddffe3277 100644 --- a/compiler/noirc_frontend/src/ast/expression.rs +++ b/compiler/noirc_frontend/src/ast/expression.rs @@ -802,8 +802,8 @@ impl Display for AsTraitPath { impl Display for TypePath { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}::{}", self.typ, self.item)?; - if !self.turbofish.is_empty() { - write!(f, "::{}", self.turbofish)?; + if let Some(turbofish) = &self.turbofish { + write!(f, "::{}", turbofish)?; } Ok(()) } diff --git a/compiler/noirc_frontend/src/ast/statement.rs b/compiler/noirc_frontend/src/ast/statement.rs index a25b87b7395..49f585894d9 100644 --- a/compiler/noirc_frontend/src/ast/statement.rs +++ b/compiler/noirc_frontend/src/ast/statement.rs @@ -408,7 +408,7 @@ pub struct AsTraitPath { pub struct TypePath { pub typ: UnresolvedType, pub item: Ident, - pub turbofish: GenericTypeArgs, + pub turbofish: Option, } // Note: Path deliberately doesn't implement Recoverable. diff --git a/compiler/noirc_frontend/src/ast/visitor.rs b/compiler/noirc_frontend/src/ast/visitor.rs index 16ee0fc4c02..9e12b29677c 100644 --- a/compiler/noirc_frontend/src/ast/visitor.rs +++ b/compiler/noirc_frontend/src/ast/visitor.rs @@ -1245,7 +1245,9 @@ impl TypePath { pub fn accept_children(&self, visitor: &mut impl Visitor) { self.typ.accept(visitor); - self.turbofish.accept(visitor); + if let Some(turbofish) = &self.turbofish { + turbofish.accept(visitor); + } } } diff --git a/compiler/noirc_frontend/src/elaborator/comptime.rs b/compiler/noirc_frontend/src/elaborator/comptime.rs index 65cb6072c62..13f51abe6ba 100644 --- a/compiler/noirc_frontend/src/elaborator/comptime.rs +++ b/compiler/noirc_frontend/src/elaborator/comptime.rs @@ -91,6 +91,7 @@ impl<'context> Elaborator<'context> { let mut elaborator = Elaborator::new( self.interner, self.def_maps, + self.usage_tracker, self.crate_id, self.debug_comptime_in_file, self.interpreter_call_stack.clone(), @@ -412,6 +413,7 @@ impl<'context> Elaborator<'context> { if let Some(id) = dc_mod::collect_function( self.interner, self.def_maps.get_mut(&self.crate_id).unwrap(), + self.usage_tracker, &function, module_id, self.file, @@ -461,6 +463,7 @@ impl<'context> Elaborator<'context> { let (global, error) = dc_mod::collect_global( self.interner, self.def_maps.get_mut(&self.crate_id).unwrap(), + self.usage_tracker, Documented::new(global, item.doc_comments), visibility, self.file, @@ -477,6 +480,7 @@ impl<'context> Elaborator<'context> { if let Some((type_id, the_struct)) = dc_mod::collect_struct( self.interner, self.def_maps.get_mut(&self.crate_id).unwrap(), + self.usage_tracker, Documented::new(struct_def, item.doc_comments), self.file, self.local_module, diff --git a/compiler/noirc_frontend/src/elaborator/expressions.rs b/compiler/noirc_frontend/src/elaborator/expressions.rs index 6dfd295db6f..ff482dca4fb 100644 --- a/compiler/noirc_frontend/src/elaborator/expressions.rs +++ b/compiler/noirc_frontend/src/elaborator/expressions.rs @@ -591,7 +591,7 @@ impl<'context> Elaborator<'context> { pub(super) fn mark_struct_as_constructed(&mut self, struct_type: Shared) { let struct_type = struct_type.borrow(); let parent_module_id = struct_type.id.parent_module_id(self.def_maps); - self.interner.usage_tracker.mark_as_used(parent_module_id, &struct_type.name); + self.usage_tracker.mark_as_used(parent_module_id, &struct_type.name); } /// Resolve all the fields of a struct constructor expression. diff --git a/compiler/noirc_frontend/src/elaborator/mod.rs b/compiler/noirc_frontend/src/elaborator/mod.rs index 2a723286d8b..5af435f9201 100644 --- a/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/compiler/noirc_frontend/src/elaborator/mod.rs @@ -5,7 +5,8 @@ use std::{ use crate::{ ast::ItemVisibility, hir::resolution::import::PathResolutionItem, - hir_def::traits::ResolvedTraitBound, StructField, StructType, TypeBindings, + hir_def::traits::ResolvedTraitBound, usage_tracker::UsageTracker, StructField, StructType, + TypeBindings, }; use crate::{ ast::{ @@ -84,8 +85,8 @@ pub struct Elaborator<'context> { pub(crate) errors: Vec<(CompilationError, FileId)>, pub(crate) interner: &'context mut NodeInterner, - pub(crate) def_maps: &'context mut DefMaps, + pub(crate) usage_tracker: &'context mut UsageTracker, pub(crate) file: FileId, @@ -183,6 +184,7 @@ impl<'context> Elaborator<'context> { pub fn new( interner: &'context mut NodeInterner, def_maps: &'context mut DefMaps, + usage_tracker: &'context mut UsageTracker, crate_id: CrateId, debug_comptime_in_file: Option, interpreter_call_stack: im::Vector, @@ -192,6 +194,7 @@ impl<'context> Elaborator<'context> { errors: Vec::new(), interner, def_maps, + usage_tracker, file: FileId::dummy(), in_unsafe_block: false, nested_loops: 0, @@ -221,6 +224,7 @@ impl<'context> Elaborator<'context> { Self::new( &mut context.def_interner, &mut context.def_maps, + &mut context.usage_tracker, crate_id, debug_comptime_in_file, im::Vector::new(), diff --git a/compiler/noirc_frontend/src/elaborator/patterns.rs b/compiler/noirc_frontend/src/elaborator/patterns.rs index b45f455633b..a953c54dc4b 100644 --- a/compiler/noirc_frontend/src/elaborator/patterns.rs +++ b/compiler/noirc_frontend/src/elaborator/patterns.rs @@ -17,7 +17,7 @@ use crate::{ stmt::HirPattern, }, node_interner::{DefinitionId, DefinitionKind, ExprId, FuncId, GlobalId, TraitImplKind}, - Kind, ResolvedGeneric, Shared, StructType, Type, TypeBindings, + Kind, ResolvedGeneric, Shared, StructType, Type, TypeAlias, TypeBindings, }; use super::{Elaborator, ResolverMeta}; @@ -453,6 +453,30 @@ impl<'context> Elaborator<'context> { self.resolve_turbofish_generics(&struct_type.generics, turbofish_generics) } + pub(super) fn resolve_alias_turbofish_generics( + &mut self, + type_alias: &TypeAlias, + generics: Vec, + unresolved_turbofish: Option>, + span: Span, + ) -> Vec { + let Some(turbofish_generics) = unresolved_turbofish else { + return generics; + }; + + if turbofish_generics.len() != generics.len() { + self.push_err(TypeCheckError::GenericCountMismatch { + item: format!("alias {}", type_alias.name), + expected: generics.len(), + found: turbofish_generics.len(), + span, + }); + return generics; + } + + self.resolve_turbofish_generics(&type_alias.generics, turbofish_generics) + } + pub(super) fn resolve_turbofish_generics( &mut self, generics: &[ResolvedGeneric], @@ -526,10 +550,36 @@ impl<'context> Elaborator<'context> { generics.span, ) } - PathResolutionItem::TypeAliasFunction(_type_alias_id, Some(generics), _func_id) => { - // TODO: https://github.com/noir-lang/noir/issues/6311 - self.push_err(TypeCheckError::UnsupportedTurbofishUsage { span: generics.span }); - Vec::new() + PathResolutionItem::TypeAliasFunction(type_alias_id, generics, _func_id) => { + let type_alias = self.interner.get_type_alias(type_alias_id); + let type_alias = type_alias.borrow(); + let alias_generics = vecmap(&type_alias.generics, |generic| { + self.interner.next_type_variable_with_kind(generic.kind()) + }); + + // First solve the generics on the alias, if any + let generics = if let Some(generics) = generics { + self.resolve_alias_turbofish_generics( + &type_alias, + alias_generics, + Some(generics.generics), + generics.span, + ) + } else { + alias_generics + }; + + // Now instantiate the underlying struct with those generics, the struct might + // have more generics than those in the alias, like in this example: + // + // type Alias = Struct; + let typ = type_alias.get_type(&generics); + let Type::Struct(_, generics) = typ else { + // See https://github.com/noir-lang/noir/issues/6398 + panic!("Expected type alias to point to struct") + }; + + generics } PathResolutionItem::TraitFunction(_trait_id, Some(generics), _func_id) => { // TODO: https://github.com/noir-lang/noir/issues/6310 @@ -762,8 +812,8 @@ impl<'context> Elaborator<'context> { .func_id(self.interner) .expect("Expected trait function to be a DefinitionKind::Function"); - let generics = self.resolve_type_args(path.turbofish, func_id, span).0; - let generics = (!generics.is_empty()).then_some(generics); + let generics = + path.turbofish.map(|turbofish| self.resolve_type_args(turbofish, func_id, span).0); let location = Location::new(span, self.file); let id = self.interner.function_definition_id(func_id); diff --git a/compiler/noirc_frontend/src/elaborator/scope.rs b/compiler/noirc_frontend/src/elaborator/scope.rs index 8e033c914be..33e97c1df22 100644 --- a/compiler/noirc_frontend/src/elaborator/scope.rs +++ b/compiler/noirc_frontend/src/elaborator/scope.rs @@ -87,9 +87,10 @@ impl<'context> Elaborator<'context> { if !self.interner.lsp_mode { return resolver.resolve( + self.interner, self.def_maps, path, - &mut self.interner.usage_tracker, + self.usage_tracker, &mut None, ); } @@ -100,9 +101,10 @@ impl<'context> Elaborator<'context> { let mut references: Vec<_> = Vec::new(); let path_resolution = resolver.resolve( + self.interner, self.def_maps, path.clone(), - &mut self.interner.usage_tracker, + self.usage_tracker, &mut Some(&mut references), ); diff --git a/compiler/noirc_frontend/src/hir/comptime/display.rs b/compiler/noirc_frontend/src/hir/comptime/display.rs index 9f753f11e4b..bfbe39df241 100644 --- a/compiler/noirc_frontend/src/hir/comptime/display.rs +++ b/compiler/noirc_frontend/src/hir/comptime/display.rs @@ -620,7 +620,9 @@ fn remove_interned_in_expression_kind( } ExpressionKind::TypePath(mut path) => { path.typ = remove_interned_in_unresolved_type(interner, path.typ); - path.turbofish = remove_interned_in_generic_type_args(interner, path.turbofish); + path.turbofish = path + .turbofish + .map(|turbofish| remove_interned_in_generic_type_args(interner, turbofish)); ExpressionKind::TypePath(path) } ExpressionKind::Resolved(id) => { diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index 273f34a8a5e..d8842215a29 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -109,6 +109,7 @@ impl<'local, 'context> Interpreter<'local, 'context> { "expr_is_continue" => expr_is_continue(interner, arguments, location), "expr_resolve" => expr_resolve(self, arguments, location), "is_unconstrained" => Ok(Value::Bool(true)), + "field_less_than" => field_less_than(arguments, location), "fmtstr_as_ctstring" => fmtstr_as_ctstring(interner, arguments, location), "fmtstr_quoted_contents" => fmtstr_quoted_contents(interner, arguments, location), "fresh_type_variable" => fresh_type_variable(interner), @@ -2849,3 +2850,12 @@ fn derive_generators( Ok(Value::Array(results, return_type)) } + +fn field_less_than(arguments: Vec<(Value, Location)>, location: Location) -> IResult { + let (lhs, rhs) = check_two_arguments(arguments, location)?; + + let lhs = get_field(lhs)?; + let rhs = get_field(rhs)?; + + Ok(Value::Bool(lhs < rhs)) +} diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 20c162fbd3a..9917207d217 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -353,8 +353,9 @@ impl DefCollector { let resolved_import = resolve_import( crate_id, &collected_import, + &context.def_interner, &context.def_maps, - &mut context.def_interner.usage_tracker, + &mut context.usage_tracker, &mut Some(&mut references), ); @@ -375,8 +376,9 @@ impl DefCollector { resolve_import( crate_id, &collected_import, + &context.def_interner, &context.def_maps, - &mut context.def_interner.usage_tracker, + &mut context.usage_tracker, &mut None, ) }; @@ -421,7 +423,7 @@ impl DefCollector { krate: crate_id, local_id: resolved_import.module_scope, }; - context.def_interner.usage_tracker.add_unused_item( + context.usage_tracker.add_unused_item( module_id, name.clone(), UnusedItem::Import, @@ -501,7 +503,7 @@ impl DefCollector { crate_id: CrateId, errors: &mut Vec<(CompilationError, FileId)>, ) { - let unused_imports = context.def_interner.unused_items().iter(); + let unused_imports = context.usage_tracker.unused_items().iter(); let unused_imports = unused_imports.filter(|(module_id, _)| module_id.krate == crate_id); errors.extend(unused_imports.flat_map(|(module_id, usage_tracker)| { @@ -557,11 +559,12 @@ fn inject_prelude( }; if let Ok(PathResolution { item, errors }) = path_resolver::resolve_path( + &context.def_interner, &context.def_maps, ModuleId { krate: crate_id, local_id: crate_root }, None, path, - &mut context.def_interner.usage_tracker, + &mut context.usage_tracker, &mut None, ) { assert!(errors.is_empty(), "Tried to add private item to prelude"); diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 825a8414fe0..94c091f9791 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -18,7 +18,7 @@ use crate::ast::{ use crate::hir::resolution::errors::ResolverError; use crate::node_interner::{ModuleAttributes, NodeInterner, ReferenceId, StructId}; use crate::token::SecondaryAttribute; -use crate::usage_tracker::UnusedItem; +use crate::usage_tracker::{UnusedItem, UsageTracker}; use crate::{ graph::CrateId, hir::def_collector::dc_crate::{UnresolvedStruct, UnresolvedTrait}, @@ -147,6 +147,7 @@ impl<'a> ModCollector<'a> { let (global, error) = collect_global( &mut context.def_interner, &mut self.def_collector.def_map, + &mut context.usage_tracker, global, visibility, self.file_id, @@ -246,6 +247,7 @@ impl<'a> ModCollector<'a> { let Some(func_id) = collect_function( &mut context.def_interner, &mut self.def_collector.def_map, + &mut context.usage_tracker, &function.item, module, self.file_id, @@ -282,6 +284,7 @@ impl<'a> ModCollector<'a> { if let Some((id, the_struct)) = collect_struct( &mut context.def_interner, &mut self.def_collector.def_map, + &mut context.usage_tracker, struct_definition, self.file_id, self.module_id, @@ -336,7 +339,7 @@ impl<'a> ModCollector<'a> { ); let parent_module_id = ModuleId { krate, local_id: self.module_id }; - context.def_interner.usage_tracker.add_unused_item( + context.usage_tracker.add_unused_item( parent_module_id, name.clone(), UnusedItem::TypeAlias(type_alias_id), @@ -412,7 +415,7 @@ impl<'a> ModCollector<'a> { ); let parent_module_id = ModuleId { krate, local_id: self.module_id }; - context.def_interner.usage_tracker.add_unused_item( + context.usage_tracker.add_unused_item( parent_module_id, name.clone(), UnusedItem::Trait(trait_id), @@ -897,9 +900,11 @@ fn push_child_module( Ok(mod_id) } +#[allow(clippy::too_many_arguments)] pub fn collect_function( interner: &mut NodeInterner, def_map: &mut CrateDefMap, + usage_tracker: &mut UsageTracker, function: &NoirFunction, module: ModuleId, file: FileId, @@ -932,7 +937,7 @@ pub fn collect_function( if !is_test && !is_entry_point_function { let item = UnusedItem::Function(func_id); - interner.usage_tracker.add_unused_item(module, name.clone(), item, visibility); + usage_tracker.add_unused_item(module, name.clone(), item, visibility); } interner.set_doc_comments(ReferenceId::Function(func_id), doc_comments); @@ -950,9 +955,11 @@ pub fn collect_function( Some(func_id) } +#[allow(clippy::too_many_arguments)] pub fn collect_struct( interner: &mut NodeInterner, def_map: &mut CrateDefMap, + usage_tracker: &mut UsageTracker, struct_definition: Documented, file_id: FileId, module_id: LocalModuleId, @@ -1015,7 +1022,7 @@ pub fn collect_struct( let parent_module_id = ModuleId { krate, local_id: module_id }; if !unresolved.struct_def.is_abi() { - interner.usage_tracker.add_unused_item( + usage_tracker.add_unused_item( parent_module_id, name.clone(), UnusedItem::Struct(id), @@ -1191,9 +1198,11 @@ pub(crate) fn collect_trait_impl_items( (unresolved_functions, associated_types, associated_constants) } +#[allow(clippy::too_many_arguments)] pub(crate) fn collect_global( interner: &mut NodeInterner, def_map: &mut CrateDefMap, + usage_tracker: &mut UsageTracker, global: Documented, visibility: ItemVisibility, file_id: FileId, @@ -1221,7 +1230,7 @@ pub(crate) fn collect_global( if !is_abi { let parent_module_id = ModuleId { krate: crate_id, local_id: module_id }; - interner.usage_tracker.add_unused_item( + usage_tracker.add_unused_item( parent_module_id, name, UnusedItem::Global(global_id), diff --git a/compiler/noirc_frontend/src/hir/mod.rs b/compiler/noirc_frontend/src/hir/mod.rs index 015b7deb6e0..2bd1a852f05 100644 --- a/compiler/noirc_frontend/src/hir/mod.rs +++ b/compiler/noirc_frontend/src/hir/mod.rs @@ -11,6 +11,7 @@ use crate::graph::{CrateGraph, CrateId}; use crate::hir_def::function::FuncMeta; use crate::node_interner::{FuncId, NodeInterner, StructId}; use crate::parser::ParserError; +use crate::usage_tracker::UsageTracker; use crate::{Generics, Kind, ParsedModule, ResolvedGeneric, TypeVariable}; use def_collector::dc_crate::CompilationError; use def_map::{Contract, CrateDefMap}; @@ -33,6 +34,7 @@ pub struct Context<'file_manager, 'parsed_files> { pub def_interner: NodeInterner, pub crate_graph: CrateGraph, pub def_maps: BTreeMap, + pub usage_tracker: UsageTracker, // In the WASM context, we take ownership of the file manager, // which is why this needs to be a Cow. In all use-cases, the file manager // is read-only however, once it has been passed to the Context. @@ -64,6 +66,7 @@ impl Context<'_, '_> { Context { def_interner: NodeInterner::default(), def_maps: BTreeMap::new(), + usage_tracker: UsageTracker::default(), visited_files: BTreeMap::new(), crate_graph: CrateGraph::default(), file_manager: Cow::Owned(file_manager), @@ -80,6 +83,7 @@ impl Context<'_, '_> { Context { def_interner: NodeInterner::default(), def_maps: BTreeMap::new(), + usage_tracker: UsageTracker::default(), visited_files: BTreeMap::new(), crate_graph: CrateGraph::default(), file_manager: Cow::Borrowed(file_manager), diff --git a/compiler/noirc_frontend/src/hir/resolution/import.rs b/compiler/noirc_frontend/src/hir/resolution/import.rs index 58a3a841801..7afb056cbaf 100644 --- a/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -4,8 +4,11 @@ use thiserror::Error; use crate::graph::CrateId; use crate::hir::def_collector::dc_crate::CompilationError; -use crate::node_interner::{FuncId, GlobalId, ReferenceId, StructId, TraitId, TypeAliasId}; +use crate::node_interner::{ + FuncId, GlobalId, NodeInterner, ReferenceId, StructId, TraitId, TypeAliasId, +}; use crate::usage_tracker::UsageTracker; +use crate::Type; use std::collections::BTreeMap; @@ -100,6 +103,7 @@ pub struct Turbofish { enum IntermediatePathResolutionItem { Module(ModuleId), Struct(StructId, Option), + TypeAlias(TypeAliasId, Option), Trait(TraitId, Option), } @@ -162,6 +166,7 @@ impl<'a> From<&'a PathResolutionError> for CustomDiagnostic { pub fn resolve_import( crate_id: CrateId, import_directive: &ImportDirective, + interner: &NodeInterner, def_maps: &BTreeMap, usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, @@ -176,6 +181,7 @@ pub fn resolve_import( import_directive, crate_id, crate_id, + interner, def_maps, usage_tracker, path_references, @@ -215,6 +221,7 @@ fn resolve_path_to_ns( import_directive: &ImportDirective, crate_id: CrateId, importing_crate: CrateId, + interner: &NodeInterner, def_maps: &BTreeMap, usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, @@ -228,6 +235,7 @@ fn resolve_path_to_ns( crate_id, importing_crate, import_path, + interner, def_maps, usage_tracker, path_references, @@ -242,6 +250,7 @@ fn resolve_path_to_ns( importing_crate, import_path, import_directive.module_id, + interner, def_maps, true, // plain or crate usage_tracker, @@ -260,6 +269,7 @@ fn resolve_path_to_ns( crate_id, // def_map, import_directive, + interner, def_maps, usage_tracker, path_references, @@ -272,6 +282,7 @@ fn resolve_path_to_ns( importing_crate, import_path, import_directive.module_id, + interner, def_maps, true, // plain or crate usage_tracker, @@ -282,6 +293,7 @@ fn resolve_path_to_ns( crate::ast::PathKind::Dep => resolve_external_dep( crate_id, import_directive, + interner, def_maps, usage_tracker, path_references, @@ -297,6 +309,7 @@ fn resolve_path_to_ns( importing_crate, import_path, parent_module_id, + interner, def_maps, false, // plain or crate usage_tracker, @@ -314,8 +327,8 @@ fn resolve_path_to_ns( fn resolve_path_from_crate_root( crate_id: CrateId, importing_crate: CrateId, - import_path: &[PathSegment], + interner: &NodeInterner, def_maps: &BTreeMap, usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, @@ -326,6 +339,7 @@ fn resolve_path_from_crate_root( importing_crate, import_path, starting_mod, + interner, def_maps, true, // plain or crate usage_tracker, @@ -339,6 +353,7 @@ fn resolve_name_in_module( importing_crate: CrateId, import_path: &[PathSegment], starting_mod: LocalModuleId, + interner: &NodeInterner, def_maps: &BTreeMap, plain_or_crate: bool, usage_tracker: &mut UsageTracker, @@ -416,7 +431,37 @@ fn resolve_name_in_module( ), ) } - ModuleDefId::TypeAliasId(_) => panic!("type aliases cannot be used in type namespace"), + ModuleDefId::TypeAliasId(id) => { + if let Some(path_references) = path_references { + path_references.push(ReferenceId::Alias(id)); + } + + let type_alias = interner.get_type_alias(id); + let type_alias = type_alias.borrow(); + + let module_id = match &type_alias.typ { + Type::Struct(struct_id, _generics) => struct_id.borrow().id.module_id(), + Type::Error => { + return Err(PathResolutionError::Unresolved(last_ident.clone())); + } + _ => { + // For now we only allow type aliases that point to structs. + // The more general case is captured here: https://github.com/noir-lang/noir/issues/6398 + panic!("Type alias in path not pointing to struct not yet supported") + } + }; + + ( + module_id, + IntermediatePathResolutionItem::TypeAlias( + id, + last_segment_generics.as_ref().map(|generics| Turbofish { + generics: generics.clone(), + span: last_segment.turbofish_span(), + }), + ), + ) + } ModuleDefId::TraitId(id) => { if let Some(path_references) = path_references { path_references.push(ReferenceId::Trait(id)); @@ -485,6 +530,7 @@ fn resolve_path_name(import_directive: &ImportDirective) -> Ident { fn resolve_external_dep( crate_id: CrateId, directive: &ImportDirective, + interner: &NodeInterner, def_maps: &BTreeMap, usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, @@ -529,6 +575,7 @@ fn resolve_external_dep( &dep_directive, dep_module.krate, importing_crate, + interner, def_maps, usage_tracker, path_references, @@ -552,6 +599,9 @@ fn merge_intermediate_path_resolution_item_with_module_def_id( IntermediatePathResolutionItem::Struct(struct_id, generics) => { PathResolutionItem::StructFunction(struct_id, generics, func_id) } + IntermediatePathResolutionItem::TypeAlias(alias_id, generics) => { + PathResolutionItem::TypeAliasFunction(alias_id, generics, func_id) + } IntermediatePathResolutionItem::Trait(trait_id, generics) => { PathResolutionItem::TraitFunction(trait_id, generics, func_id) } diff --git a/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs b/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs index 705820e9101..98d4aee3f5c 100644 --- a/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs @@ -1,6 +1,6 @@ use super::import::{resolve_import, ImportDirective, PathResolution, PathResolutionResult}; use crate::ast::{ItemVisibility, Path}; -use crate::node_interner::ReferenceId; +use crate::node_interner::{NodeInterner, ReferenceId}; use crate::usage_tracker::UsageTracker; use std::collections::BTreeMap; @@ -15,6 +15,7 @@ pub trait PathResolver { /// a module or type). fn resolve( &self, + interner: &NodeInterner, def_maps: &BTreeMap, path: Path, usage_tracker: &mut UsageTracker, @@ -42,12 +43,14 @@ impl StandardPathResolver { impl PathResolver for StandardPathResolver { fn resolve( &self, + interner: &NodeInterner, def_maps: &BTreeMap, path: Path, usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, ) -> PathResolutionResult { resolve_path( + interner, def_maps, self.module_id, self.self_type_module_id, @@ -69,6 +72,7 @@ impl PathResolver for StandardPathResolver { /// Resolve the given path to a function or a type. /// In the case of a conflict, functions are given priority pub fn resolve_path( + interner: &NodeInterner, def_maps: &BTreeMap, module_id: ModuleId, self_type_module_id: Option, @@ -85,8 +89,14 @@ pub fn resolve_path( alias: None, is_prelude: false, }; - let resolved_import = - resolve_import(module_id.krate, &import, def_maps, usage_tracker, path_references)?; + let resolved_import = resolve_import( + module_id.krate, + &import, + interner, + def_maps, + usage_tracker, + path_references, + )?; Ok(PathResolution { item: resolved_import.item, errors: resolved_import.errors }) } diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index 77030b0e048..3d4f3e77792 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -241,11 +241,9 @@ impl Kind { /// during monomorphization. pub(crate) fn default_type(&self) -> Option { match self { - Kind::Any => None, Kind::IntegerOrField => Some(Type::default_int_or_field_type()), Kind::Integer => Some(Type::default_int_type()), - Kind::Normal => None, - Kind::Numeric(typ) => Some(*typ.clone()), + Kind::Any | Kind::Normal | Kind::Numeric(..) => None, } } diff --git a/compiler/noirc_frontend/src/lexer/errors.rs b/compiler/noirc_frontend/src/lexer/errors.rs index 66f79bd444b..8d799ef35d1 100644 --- a/compiler/noirc_frontend/src/lexer/errors.rs +++ b/compiler/noirc_frontend/src/lexer/errors.rs @@ -20,6 +20,8 @@ pub enum LexerErrorKind { IntegerLiteralTooLarge { span: Span, limit: String }, #[error("{:?} is not a valid attribute", found)] MalformedFuncAttribute { span: Span, found: String }, + #[error("Malformed test attribute")] + MalformedTestAttribute { span: Span }, #[error("{:?} is not a valid inner attribute", found)] InvalidInnerAttribute { span: Span, found: String }, #[error("Logical and used instead of bitwise and")] @@ -61,6 +63,7 @@ impl LexerErrorKind { LexerErrorKind::InvalidIntegerLiteral { span, .. } => *span, LexerErrorKind::IntegerLiteralTooLarge { span, .. } => *span, LexerErrorKind::MalformedFuncAttribute { span, .. } => *span, + LexerErrorKind::MalformedTestAttribute { span, .. } => *span, LexerErrorKind::InvalidInnerAttribute { span, .. } => *span, LexerErrorKind::LogicalAnd { span } => *span, LexerErrorKind::UnterminatedBlockComment { span } => *span, @@ -109,6 +112,11 @@ impl LexerErrorKind { format!(" {found} is not a valid attribute"), *span, ), + LexerErrorKind::MalformedTestAttribute { span } => ( + "Malformed test attribute".to_string(), + "The test attribute can be written in one of these forms: `#[test]`, `#[test(should_fail)]` or `#[test(should_fail_with = \"message\")]`".to_string(), + *span, + ), LexerErrorKind::InvalidInnerAttribute { span, found } => ( "Invalid inner attribute".to_string(), format!(" {found} is not a valid inner attribute"), diff --git a/compiler/noirc_frontend/src/lexer/lexer.rs b/compiler/noirc_frontend/src/lexer/lexer.rs index 904ce41fbf0..91ae544ddf0 100644 --- a/compiler/noirc_frontend/src/lexer/lexer.rs +++ b/compiler/noirc_frontend/src/lexer/lexer.rs @@ -935,13 +935,7 @@ mod tests { Err(err) => err, }; - // Check if error is MalformedFuncAttribute and found is "foo" - let sub_string = match err { - LexerErrorKind::MalformedFuncAttribute { found, .. } => found, - _ => panic!("expected malformed func attribute error"), - }; - - assert_eq!(sub_string, "test(invalid_scope)"); + assert!(matches!(err, LexerErrorKind::MalformedTestAttribute { .. })); } #[test] diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index daf59445982..593e78d01a0 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -751,10 +751,18 @@ impl Attribute { contents_span: Span, is_tag: bool, ) -> Result { - let word_segments: Vec<&str> = word - .split(|c| c == '(' || c == ')') - .filter(|string_segment| !string_segment.is_empty()) - .collect(); + // See if we can parse the word into "name ( contents )". + // We first split into "first_segment ( rest". + let word_segments = if let Some((first_segment, rest)) = word.trim().split_once('(') { + // Now we try to remove the final ")" (it must be at the end, if it exists) + if let Some(middle) = rest.strip_suffix(')') { + vec![first_segment.trim(), middle.trim()] + } else { + vec![word] + } + } else { + vec![word] + }; let validate = |slice: &str| { let is_valid = slice @@ -799,11 +807,9 @@ impl Attribute { ["inline_always"] => Attribute::Function(FunctionAttribute::InlineAlways), ["test", name] => { validate(name)?; - let malformed_scope = - LexerErrorKind::MalformedFuncAttribute { span, found: word.to_owned() }; match TestScope::lookup_str(name) { Some(scope) => Attribute::Function(FunctionAttribute::Test(scope)), - None => return Err(malformed_scope), + None => return Err(LexerErrorKind::MalformedTestAttribute { span }), } } ["field", name] => { diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index 68510f2ffbd..c743d564339 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -25,8 +25,6 @@ use crate::hir::def_map::{LocalModuleId, ModuleDefId, ModuleId}; use crate::hir::type_check::generics::TraitGenerics; use crate::hir_def::traits::NamedType; use crate::hir_def::traits::ResolvedTraitBound; -use crate::usage_tracker::UnusedItem; -use crate::usage_tracker::UsageTracker; use crate::QuotedType; use crate::ast::{BinaryOpKind, FunctionDefinition, ItemVisibility}; @@ -271,8 +269,6 @@ pub struct NodeInterner { /// share the same global values. pub(crate) comptime_scopes: Vec>, - pub(crate) usage_tracker: UsageTracker, - /// Captures the documentation comments for each module, struct, trait, function, etc. pub(crate) doc_comments: HashMap>, } @@ -680,7 +676,6 @@ impl Default for NodeInterner { auto_import_names: HashMap::default(), comptime_scopes: vec![HashMap::default()], trait_impl_associated_types: HashMap::default(), - usage_tracker: UsageTracker::default(), doc_comments: HashMap::default(), } } @@ -2320,12 +2315,6 @@ impl NodeInterner { pub fn doc_comments(&self, id: ReferenceId) -> Option<&Vec> { self.doc_comments.get(&id) } - - pub fn unused_items( - &self, - ) -> &std::collections::HashMap> { - self.usage_tracker.unused_items() - } } impl Methods { diff --git a/compiler/noirc_frontend/src/parser/parser/expression.rs b/compiler/noirc_frontend/src/parser/parser/expression.rs index 93bb4980801..06f51b16842 100644 --- a/compiler/noirc_frontend/src/parser/parser/expression.rs +++ b/compiler/noirc_frontend/src/parser/parser/expression.rs @@ -4,7 +4,7 @@ use noirc_errors::Span; use crate::{ ast::{ ArrayLiteral, BlockExpression, CallExpression, CastExpression, ConstructorExpression, - Expression, ExpressionKind, GenericTypeArgs, Ident, IfExpression, IndexExpression, Literal, + Expression, ExpressionKind, Ident, IfExpression, IndexExpression, Literal, MemberAccessExpression, MethodCallExpression, Statement, TypePath, UnaryOp, UnresolvedType, }, parser::{labels::ParsingRuleLabel, parser::parse_many::separated_by_comma, ParserErrorReason}, @@ -548,15 +548,13 @@ impl<'a> Parser<'a> { Ident::new(String::new(), self.span_at_previous_token_end()) }; - let turbofish = if self.eat_double_colon() { + let turbofish = self.eat_double_colon().then(|| { let generics = self.parse_generic_type_args(); if generics.is_empty() { self.expected_token(Token::Less); } generics - } else { - GenericTypeArgs::default() - }; + }); Some(ExpressionKind::TypePath(TypePath { typ, item, turbofish })) } @@ -1587,7 +1585,7 @@ mod tests { }; assert_eq!(type_path.typ.to_string(), "Field"); assert_eq!(type_path.item.to_string(), "foo"); - assert!(type_path.turbofish.is_empty()); + assert!(type_path.turbofish.is_none()); } #[test] @@ -1599,7 +1597,7 @@ mod tests { }; assert_eq!(type_path.typ.to_string(), "Field"); assert_eq!(type_path.item.to_string(), "foo"); - assert!(!type_path.turbofish.is_empty()); + assert!(type_path.turbofish.is_some()); } #[test] diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index 5ce3ec6686c..17accbd8366 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -3548,16 +3548,77 @@ fn uses_self_in_import() { } #[test] -fn alias_in_let_pattern() { +fn does_not_error_on_return_values_after_block_expression() { + // Regression test for https://github.com/noir-lang/noir/issues/4372 let src = r#" - struct Foo { x: T } + fn case1() -> [Field] { + if true { + } + &[1] + } + + fn case2() -> [u8] { + let mut var: u8 = 1; + { + var += 1; + } + &[var] + } + + fn main() { + let _ = case1(); + let _ = case2(); + } + "#; + assert_no_errors(src); +} + +#[test] +fn use_type_alias_in_method_call() { + let src = r#" + pub struct Foo { + } + + impl Foo { + fn new() -> Self { + Foo {} + } + } + + type Bar = Foo; - type Bar = Foo; + fn foo() -> Foo { + Bar::new() + } + + fn main() { + let _ = foo(); + } + "#; + assert_no_errors(src); +} + +#[test] +fn use_type_alias_to_generic_concrete_type_in_method_call() { + let src = r#" + pub struct Foo { + x: T, + } + + impl Foo { + fn new(x: T) -> Self { + Foo { x } + } + } + + type Bar = Foo; + + fn foo() -> Bar { + Bar::new(1) + } fn main() { - let Bar { x } = Foo { x: [0] }; - // This is just to show the compiler knows this is an array. - let _: [Field; 1] = x; + let _ = foo(); } "#; assert_no_errors(src); diff --git a/compiler/noirc_frontend/src/tests/aliases.rs b/compiler/noirc_frontend/src/tests/aliases.rs index 5239abcb366..8d3433299f6 100644 --- a/compiler/noirc_frontend/src/tests/aliases.rs +++ b/compiler/noirc_frontend/src/tests/aliases.rs @@ -31,3 +31,19 @@ fn allows_usage_of_type_alias_as_return_type() { "#; assert_no_errors(src); } + +#[test] +fn alias_in_let_pattern() { + let src = r#" + struct Foo { x: T } + + type Bar = Foo; + + fn main() { + let Bar { x } = Foo { x: [0] }; + // This is just to show the compiler knows this is an array. + let _: [Field; 1] = x; + } + "#; + assert_no_errors(src); +} diff --git a/compiler/noirc_frontend/src/tests/turbofish.rs b/compiler/noirc_frontend/src/tests/turbofish.rs index 3ded243a280..3bde3e17c80 100644 --- a/compiler/noirc_frontend/src/tests/turbofish.rs +++ b/compiler/noirc_frontend/src/tests/turbofish.rs @@ -269,3 +269,134 @@ fn turbofish_in_type_before_call_errors() { assert_eq!(expected_typ, "i32"); assert_eq!(expr_typ, "bool"); } + +#[test] +fn use_generic_type_alias_with_turbofish_in_method_call_does_not_error() { + let src = r#" + pub struct Foo { + } + + impl Foo { + fn new() -> Self { + Foo {} + } + } + + type Bar = Foo; + + fn foo() -> Foo { + Bar::::new() + } + + fn main() { + let _ = foo(); + } + "#; + assert_no_errors(src); +} + +#[test] +fn use_generic_type_alias_with_turbofish_in_method_call_errors() { + let src = r#" + pub struct Foo { + x: T, + } + + impl Foo { + fn new(x: T) -> Self { + Foo { x } + } + } + + type Bar = Foo; + + fn main() { + let _ = Bar::::new(true); + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::TypeMismatch { + expected_typ, + expr_typ, + expr_span: _, + }) = &errors[0].0 + else { + panic!("Expected a type mismatch error, got {:?}", errors[0].0); + }; + + assert_eq!(expected_typ, "i32"); + assert_eq!(expr_typ, "bool"); +} + +#[test] +fn use_generic_type_alias_with_partial_generics_with_turbofish_in_method_call_does_not_error() { + let src = r#" + pub struct Foo { + x: T, + y: U, + } + + impl Foo { + fn new(x: T, y: U) -> Self { + Foo { x, y } + } + } + + type Bar = Foo; + + fn main() { + let _ = Bar::::new(true, 1); + } + "#; + assert_no_errors(src); +} + +#[test] +fn use_generic_type_alias_with_partial_generics_with_turbofish_in_method_call_errors_first_type() { + let src = r#" + pub struct Foo { + x: T, + y: U, + } + + impl Foo { + fn new(x: T, y: U) -> Self { + Foo { x, y } + } + } + + type Bar = Foo; + + fn main() { + let _ = Bar::::new(1, 1); + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); +} + +#[test] +fn use_generic_type_alias_with_partial_generics_with_turbofish_in_method_call_errors_second_type() { + let src = r#" + pub struct Foo { + x: T, + y: U, + } + + impl Foo { + fn new(x: T, y: U) -> Self { + Foo { x, y } + } + } + + type Bar = Foo; + + fn main() { + let _ = Bar::::new(true, true); + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); +} diff --git a/compiler/noirc_frontend/src/usage_tracker.rs b/compiler/noirc_frontend/src/usage_tracker.rs index 0a112c6937d..fa87ca6961b 100644 --- a/compiler/noirc_frontend/src/usage_tracker.rs +++ b/compiler/noirc_frontend/src/usage_tracker.rs @@ -73,7 +73,7 @@ impl UsageTracker { }; } - pub(crate) fn unused_items(&self) -> &HashMap> { + pub fn unused_items(&self) -> &HashMap> { &self.unused_items } } diff --git a/noir_stdlib/src/field/bn254.nr b/noir_stdlib/src/field/bn254.nr index 9642c2aa1b8..a7ca7d77373 100644 --- a/noir_stdlib/src/field/bn254.nr +++ b/noir_stdlib/src/field/bn254.nr @@ -1,3 +1,4 @@ +use crate::field::field_less_than; use crate::runtime::is_unconstrained; // The low and high decomposition of the field modulus @@ -25,47 +26,20 @@ pub(crate) unconstrained fn decompose_hint(x: Field) -> (Field, Field) { compute_decomposition(x) } -fn compute_lt(x: Field, y: Field, num_bytes: u32) -> bool { - let x_bytes: [u8; 32] = x.to_le_bytes(); - let y_bytes: [u8; 32] = y.to_le_bytes(); - let mut x_is_lt = false; - let mut done = false; - for i in 0..num_bytes { - if (!done) { - let x_byte = x_bytes[num_bytes - 1 - i]; - let y_byte = y_bytes[num_bytes - 1 - i]; - let bytes_match = x_byte == y_byte; - if !bytes_match { - x_is_lt = x_byte < y_byte; - done = true; - } - } - } - x_is_lt -} - -fn compute_lte(x: Field, y: Field, num_bytes: u32) -> bool { +unconstrained fn lte_hint(x: Field, y: Field) -> bool { if x == y { true } else { - compute_lt(x, y, num_bytes) + field_less_than(x, y) } } -unconstrained fn lt_32_hint(x: Field, y: Field) -> bool { - compute_lt(x, y, 32) -} - -unconstrained fn lte_16_hint(x: Field, y: Field) -> bool { - compute_lte(x, y, 16) -} - // Assert that (alo > blo && ahi >= bhi) || (alo <= blo && ahi > bhi) fn assert_gt_limbs(a: (Field, Field), b: (Field, Field)) { let (alo, ahi) = a; let (blo, bhi) = b; unsafe { - let borrow = lte_16_hint(alo, blo); + let borrow = lte_hint(alo, blo); let rlo = alo - blo - 1 + (borrow as Field) * TWO_POW_128; let rhi = ahi - bhi - (borrow as Field); @@ -100,7 +74,7 @@ pub fn decompose(x: Field) -> (Field, Field) { pub fn assert_gt(a: Field, b: Field) { if is_unconstrained() { - assert(compute_lt(b, a, 32)); + assert(unsafe { field_less_than(b, a) }); } else { // Decompose a and b let a_limbs = decompose(a); @@ -117,13 +91,15 @@ pub fn assert_lt(a: Field, b: Field) { pub fn gt(a: Field, b: Field) -> bool { if is_unconstrained() { - compute_lt(b, a, 32) + unsafe { + field_less_than(b, a) + } } else if a == b { false } else { // Take a hint of the comparison and verify it unsafe { - if lt_32_hint(a, b) { + if field_less_than(a, b) { assert_gt(b, a); false } else { @@ -140,9 +116,7 @@ pub fn lt(a: Field, b: Field) -> bool { mod tests { // TODO: Allow imports from "super" - use crate::field::bn254::{ - assert_gt, compute_lt, compute_lte, decompose, gt, PHI, PLO, TWO_POW_128, - }; + use crate::field::bn254::{assert_gt, decompose, gt, lte_hint, PHI, PLO, TWO_POW_128}; #[test] fn check_decompose() { @@ -159,24 +133,15 @@ mod tests { } #[test] - fn check_compute_lt() { - assert(compute_lt(0, 1, 16)); - assert(compute_lt(0, 0x100, 16)); - assert(compute_lt(0x100, TWO_POW_128 - 1, 16)); - assert(!compute_lt(0, TWO_POW_128, 16)); - } - - #[test] - fn check_compute_lte() { - assert(compute_lte(0, 1, 16)); - assert(compute_lte(0, 0x100, 16)); - assert(compute_lte(0x100, TWO_POW_128 - 1, 16)); - assert(!compute_lte(0, TWO_POW_128, 16)); - - assert(compute_lte(0, 0, 16)); - assert(compute_lte(0x100, 0x100, 16)); - assert(compute_lte(TWO_POW_128 - 1, TWO_POW_128 - 1, 16)); - assert(compute_lte(TWO_POW_128, TWO_POW_128, 16)); + unconstrained fn check_lte_hint() { + assert(lte_hint(0, 1)); + assert(lte_hint(0, 0x100)); + assert(lte_hint(0x100, TWO_POW_128 - 1)); + assert(!lte_hint(0 - 1, 0)); + + assert(lte_hint(0, 0)); + assert(lte_hint(0x100, 0x100)); + assert(lte_hint(0 - 1, 0 - 1)); } #[test] diff --git a/noir_stdlib/src/field/mod.nr b/noir_stdlib/src/field/mod.nr index b632cf1f7a2..4b89cae4f30 100644 --- a/noir_stdlib/src/field/mod.nr +++ b/noir_stdlib/src/field/mod.nr @@ -211,6 +211,14 @@ pub comptime fn modulus_be_bytes() -> [u8] {} #[builtin(modulus_le_bytes)] pub comptime fn modulus_le_bytes() -> [u8] {} +/// An unconstrained only built in to efficiently compare fields. +#[builtin(field_less_than)] +unconstrained fn __field_less_than(x: Field, y: Field) -> bool {} + +pub(crate) unconstrained fn field_less_than(x: Field, y: Field) -> bool { + __field_less_than(x, y) +} + // Convert a 32 byte array to a field element by modding pub fn bytes32_to_field(bytes32: [u8; 32]) -> Field { // Convert it to a field element @@ -228,25 +236,33 @@ pub fn bytes32_to_field(bytes32: [u8; 32]) -> Field { } fn lt_fallback(x: Field, y: Field) -> bool { - let x_bytes: [u8; 32] = x.to_le_bytes(); - let y_bytes: [u8; 32] = y.to_le_bytes(); - let mut x_is_lt = false; - let mut done = false; - for i in 0..32 { - if (!done) { - let x_byte = x_bytes[32 - 1 - i] as u8; - let y_byte = y_bytes[32 - 1 - i] as u8; - let bytes_match = x_byte == y_byte; - if !bytes_match { - x_is_lt = x_byte < y_byte; - done = true; + if is_unconstrained() { + unsafe { + field_less_than(x, y) + } + } else { + let x_bytes: [u8; 32] = x.to_le_bytes(); + let y_bytes: [u8; 32] = y.to_le_bytes(); + let mut x_is_lt = false; + let mut done = false; + for i in 0..32 { + if (!done) { + let x_byte = x_bytes[32 - 1 - i] as u8; + let y_byte = y_bytes[32 - 1 - i] as u8; + let bytes_match = x_byte == y_byte; + if !bytes_match { + x_is_lt = x_byte < y_byte; + done = true; + } } } + x_is_lt } - x_is_lt } mod tests { + use super::field_less_than; + #[test] // docs:start:to_be_bits_example fn test_to_be_bits() { @@ -304,4 +320,12 @@ mod tests { assert_eq(Field::from_le_bytes::<8>(bits), field); } // docs:end:to_le_radix_example + + #[test] + unconstrained fn test_field_less_than() { + assert(field_less_than(0, 1)); + assert(field_less_than(0, 0x100)); + assert(field_less_than(0x100, 0 - 1)); + assert(!field_less_than(0 - 1, 0)); + } } diff --git a/noir_stdlib/src/hash/sha256.nr b/noir_stdlib/src/hash/sha256.nr index d3ab1b7f1b8..d55044907ac 100644 --- a/noir_stdlib/src/hash/sha256.nr +++ b/noir_stdlib/src/hash/sha256.nr @@ -508,9 +508,9 @@ fn hash_final_block(msg_block: MSG_BLOCK, mut state: STATE) -> HASH { // Return final hash as byte array for j in 0..8 { - let h_bytes: [u8; 4] = (state[7 - j] as Field).to_le_bytes(); + let h_bytes: [u8; 4] = (state[j] as Field).to_be_bytes(); for k in 0..4 { - out_h[31 - 4 * j - k] = h_bytes[k]; + out_h[4 * j + k] = h_bytes[k]; } } diff --git a/scripts/install_bb.sh b/scripts/install_bb.sh index 64baf78c182..cff81eedbac 100755 --- a/scripts/install_bb.sh +++ b/scripts/install_bb.sh @@ -1,6 +1,6 @@ #!/bin/bash -VERSION="0.60.0" +VERSION="0.61.0" BBUP_PATH=~/.bb/bbup diff --git a/test_programs/compile_failure/cannot_deduce_numeric_generic/Nargo.toml b/test_programs/compile_failure/cannot_deduce_numeric_generic/Nargo.toml new file mode 100644 index 00000000000..e87c464f6c7 --- /dev/null +++ b/test_programs/compile_failure/cannot_deduce_numeric_generic/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "cannot_deduce_numeric_generic" +type = "bin" +authors = [""] +compiler_version = ">=0.33.0" + +[dependencies] diff --git a/test_programs/compile_failure/cannot_deduce_numeric_generic/src/main.nr b/test_programs/compile_failure/cannot_deduce_numeric_generic/src/main.nr new file mode 100644 index 00000000000..ee889e805f7 --- /dev/null +++ b/test_programs/compile_failure/cannot_deduce_numeric_generic/src/main.nr @@ -0,0 +1,8 @@ +fn foo() -> u32 { + N +} + +fn main() { + let _ = foo(); +} + diff --git a/test_programs/compile_success_empty/type_path/src/main.nr b/test_programs/compile_success_empty/type_path/src/main.nr index 92f04104868..ca9a9008f6c 100644 --- a/test_programs/compile_success_empty/type_path/src/main.nr +++ b/test_programs/compile_success_empty/type_path/src/main.nr @@ -5,6 +5,11 @@ fn main() { $foo::static() } } + + // Make sure this call works fine: in the past we used to not distinguish + // whether a TypePath had generics or not, always resolved them, filling them + // up with Type::Error, and eventually leading to an ICE. + let _ = Field::from_be_bytes([1]); } pub struct Foo {} diff --git a/test_programs/gates_report_brillig_execution.sh b/test_programs/gates_report_brillig_execution.sh new file mode 100644 index 00000000000..024c7612541 --- /dev/null +++ b/test_programs/gates_report_brillig_execution.sh @@ -0,0 +1,45 @@ +#!/usr/bin/env bash +set -e + +# These tests are incompatible with gas reporting +excluded_dirs=( + "workspace" + "workspace_default_member" + "double_verify_nested_proof" + "overlapping_dep_and_mod" + "comptime_println" + # Takes a very long time to execute as large loops do not get simplified. + "regression_4709" + # bit sizes for bigint operation doesn't match up. + "bigint" + # Expected to fail as test asserts on which runtime it is in. + "is_unconstrained" +) + +current_dir=$(pwd) +base_path="$current_dir/execution_success" +test_dirs=$(ls $base_path) + +# We generate a Noir workspace which contains all of the test cases +# This allows us to generate a gates report using `nargo info` for all of them at once. + +echo "[workspace]" > Nargo.toml +echo "members = [" >> Nargo.toml + +for dir in $test_dirs; do + if [[ " ${excluded_dirs[@]} " =~ " ${dir} " ]]; then + continue + fi + + if [[ ${CI-false} = "true" ]] && [[ " ${ci_excluded_dirs[@]} " =~ " ${dir} " ]]; then + continue + fi + + echo " \"execution_success/$dir\"," >> Nargo.toml +done + +echo "]" >> Nargo.toml + +nargo info --profile-execution --json > gates_report_brillig_execution.json + +rm Nargo.toml \ No newline at end of file diff --git a/tooling/debugger/src/source_code_printer.rs b/tooling/debugger/src/source_code_printer.rs index d1c82ad96ba..b3682c9016f 100644 --- a/tooling/debugger/src/source_code_printer.rs +++ b/tooling/debugger/src/source_code_printer.rs @@ -275,6 +275,7 @@ mod tests { BTreeMap::default(), BTreeMap::default(), BTreeMap::default(), + BTreeMap::default(), )]; let debug_artifact = DebugArtifact::new(debug_symbols, &fm); diff --git a/tooling/lsp/Cargo.toml b/tooling/lsp/Cargo.toml index c15895d801f..04c8edf7ea9 100644 --- a/tooling/lsp/Cargo.toml +++ b/tooling/lsp/Cargo.toml @@ -22,13 +22,11 @@ nargo_toml.workspace = true noirc_driver.workspace = true noirc_errors.workspace = true noirc_frontend.workspace = true -noirc_artifacts.workspace = true serde.workspace = true serde_json.workspace = true strum = "0.24" tower.workspace = true async-lsp = { workspace = true, features = ["omni-trait"] } -serde_with = "3.2.0" thiserror.workspace = true fm.workspace = true rayon.workspace = true diff --git a/tooling/lsp/src/attribute_reference_finder.rs b/tooling/lsp/src/attribute_reference_finder.rs index 22afa086303..660ee8ffdfb 100644 --- a/tooling/lsp/src/attribute_reference_finder.rs +++ b/tooling/lsp/src/attribute_reference_finder.rs @@ -18,7 +18,7 @@ use noirc_frontend::{ path_resolver::{PathResolver, StandardPathResolver}, }, }, - node_interner::ReferenceId, + node_interner::{NodeInterner, ReferenceId}, parser::{ParsedSubModule, Parser}, token::CustomAttribute, usage_tracker::UsageTracker, @@ -30,6 +30,7 @@ pub(crate) struct AttributeReferenceFinder<'a> { /// The module ID in scope. This might change as we traverse the AST /// if we are analyzing something inside an inline module declaration. module_id: ModuleId, + interner: &'a NodeInterner, def_maps: &'a BTreeMap, reference_id: Option, } @@ -40,6 +41,7 @@ impl<'a> AttributeReferenceFinder<'a> { file: FileId, byte_index: usize, krate: CrateId, + interner: &'a NodeInterner, def_maps: &'a BTreeMap, ) -> Self { // Find the module the current file belongs to @@ -52,7 +54,7 @@ impl<'a> AttributeReferenceFinder<'a> { def_map.root() }; let module_id = ModuleId { krate, local_id }; - Self { byte_index, module_id, def_maps, reference_id: None } + Self { byte_index, module_id, interner, def_maps, reference_id: None } } pub(crate) fn find(&mut self, parsed_module: &ParsedModule) -> Option { @@ -102,7 +104,8 @@ impl<'a> Visitor for AttributeReferenceFinder<'a> { let resolver = StandardPathResolver::new(self.module_id, None); let mut usage_tracker = UsageTracker::default(); - let Ok(result) = resolver.resolve(self.def_maps, path, &mut usage_tracker, &mut None) + let Ok(result) = + resolver.resolve(self.interner, self.def_maps, path, &mut usage_tracker, &mut None) else { return; }; diff --git a/tooling/lsp/src/lib.rs b/tooling/lsp/src/lib.rs index a85b9d043b9..a152cc17c7b 100644 --- a/tooling/lsp/src/lib.rs +++ b/tooling/lsp/src/lib.rs @@ -42,6 +42,7 @@ use noirc_frontend::{ }, node_interner::NodeInterner, parser::ParserError, + usage_tracker::UsageTracker, ParsedModule, }; use rayon::prelude::*; @@ -54,9 +55,9 @@ use requests::{ on_code_action_request, on_code_lens_request, on_completion_request, on_document_symbol_request, on_formatting, on_goto_declaration_request, on_goto_definition_request, on_goto_type_definition_request, on_hover_request, on_initialize, - on_inlay_hint_request, on_prepare_rename_request, on_profile_run_request, - on_references_request, on_rename_request, on_shutdown, on_signature_help_request, - on_test_run_request, on_tests_request, LspInitializationOptions, + on_inlay_hint_request, on_prepare_rename_request, on_references_request, on_rename_request, + on_shutdown, on_signature_help_request, on_test_run_request, on_tests_request, + LspInitializationOptions, }; use serde_json::Value as JsonValue; use thiserror::Error; @@ -113,6 +114,7 @@ struct PackageCacheData { crate_graph: CrateGraph, node_interner: NodeInterner, def_maps: BTreeMap, + usage_tracker: UsageTracker, } impl LspState { @@ -154,7 +156,6 @@ impl NargoLspService { .request::(on_code_lens_request) .request::(on_tests_request) .request::(on_test_run_request) - .request::(on_profile_run_request) .request::(on_goto_definition_request) .request::(on_goto_declaration_request) .request::(on_goto_type_definition_request) diff --git a/tooling/lsp/src/notifications/mod.rs b/tooling/lsp/src/notifications/mod.rs index d228eb564e6..f6fdd082d0b 100644 --- a/tooling/lsp/src/notifications/mod.rs +++ b/tooling/lsp/src/notifications/mod.rs @@ -166,6 +166,7 @@ pub(crate) fn process_workspace_for_noir_document( crate_graph: context.crate_graph, node_interner: context.def_interner, def_maps: context.def_maps, + usage_tracker: context.usage_tracker, }, ); diff --git a/tooling/lsp/src/requests/code_action.rs b/tooling/lsp/src/requests/code_action.rs index 5c2831be7e9..38cc6bddf64 100644 --- a/tooling/lsp/src/requests/code_action.rs +++ b/tooling/lsp/src/requests/code_action.rs @@ -19,6 +19,7 @@ use noirc_frontend::{ graph::CrateId, hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}, node_interner::NodeInterner, + usage_tracker::UsageTracker, }; use noirc_frontend::{ parser::{Item, ItemKind, ParsedSubModule}, @@ -62,6 +63,7 @@ pub(crate) fn on_code_action_request( args.crate_id, args.def_maps, args.interner, + args.usage_tracker, ); finder.find(&parsed_module) }) @@ -82,6 +84,7 @@ struct CodeActionFinder<'a> { module_id: ModuleId, def_maps: &'a BTreeMap, interner: &'a NodeInterner, + usage_tracker: &'a UsageTracker, /// How many nested `mod` we are in deep nesting: usize, /// The line where an auto_import must be inserted @@ -103,6 +106,7 @@ impl<'a> CodeActionFinder<'a> { krate: CrateId, def_maps: &'a BTreeMap, interner: &'a NodeInterner, + usage_tracker: &'a UsageTracker, ) -> Self { // Find the module the current file belongs to let def_map = &def_maps[&krate]; @@ -124,6 +128,7 @@ impl<'a> CodeActionFinder<'a> { module_id, def_maps, interner, + usage_tracker, nesting: 0, auto_import_line: 0, use_segment_positions: UseSegmentPositions::default(), diff --git a/tooling/lsp/src/requests/code_action/remove_unused_import.rs b/tooling/lsp/src/requests/code_action/remove_unused_import.rs index c1a3c87d142..4822a9d61ec 100644 --- a/tooling/lsp/src/requests/code_action/remove_unused_import.rs +++ b/tooling/lsp/src/requests/code_action/remove_unused_import.rs @@ -24,7 +24,7 @@ impl<'a> CodeActionFinder<'a> { return; } - let Some(unused_items) = self.interner.unused_items().get(&self.module_id) else { + let Some(unused_items) = self.usage_tracker.unused_items().get(&self.module_id) else { return; }; diff --git a/tooling/lsp/src/requests/code_lens_request.rs b/tooling/lsp/src/requests/code_lens_request.rs index 42f2af3a7bf..0c1877c156d 100644 --- a/tooling/lsp/src/requests/code_lens_request.rs +++ b/tooling/lsp/src/requests/code_lens_request.rs @@ -24,9 +24,6 @@ const EXECUTE_CODELENS_TITLE: &str = "Execute"; const DEBUG_COMMAND: &str = "nargo.debug.dap"; const DEBUG_CODELENS_TITLE: &str = "Debug"; -const PROFILE_COMMAND: &str = "nargo.profile"; -const PROFILE_CODELENS_TITLE: &str = "Profile"; - fn with_arrow(title: &str) -> String { format!("{ARROW} {title}") } @@ -162,7 +159,6 @@ pub(crate) fn collect_lenses_for_package( let internal_command_lenses = [ (INFO_CODELENS_TITLE, INFO_COMMAND), (EXECUTE_CODELENS_TITLE, EXECUTE_COMMAND), - (PROFILE_CODELENS_TITLE, PROFILE_COMMAND), (DEBUG_CODELENS_TITLE, DEBUG_COMMAND), ] .map(|(title, command)| { @@ -214,16 +210,6 @@ pub(crate) fn collect_lenses_for_package( let info_lens = CodeLens { range, command: Some(info_command), data: None }; lenses.push(info_lens); - - let profile_command = Command { - title: PROFILE_CODELENS_TITLE.to_string(), - command: PROFILE_COMMAND.into(), - arguments: Some(package_selection_args(workspace, package)), - }; - - let profile_lens = CodeLens { range, command: Some(profile_command), data: None }; - - lenses.push(profile_lens); } } diff --git a/tooling/lsp/src/requests/completion/builtins.rs b/tooling/lsp/src/requests/completion/builtins.rs index 078e2faf036..c2c561ced32 100644 --- a/tooling/lsp/src/requests/completion/builtins.rs +++ b/tooling/lsp/src/requests/completion/builtins.rs @@ -107,6 +107,15 @@ impl<'a> NodeFinder<'a> { let one_argument_attributes = &["abi", "field", "foreign", "oracle"]; self.suggest_one_argument_attributes(prefix, one_argument_attributes); + if name_matches("test", prefix) || name_matches("should_fail", prefix) { + self.completion_items.push(snippet_completion_item( + "test(should_fail)", + CompletionItemKind::METHOD, + "test(should_fail)", + None, + )); + } + if name_matches("test", prefix) || name_matches("should_fail_with", prefix) { self.completion_items.push(snippet_completion_item( "test(should_fail_with = \"...\")", diff --git a/tooling/lsp/src/requests/goto_definition.rs b/tooling/lsp/src/requests/goto_definition.rs index a2443ea165d..9380209da5c 100644 --- a/tooling/lsp/src/requests/goto_definition.rs +++ b/tooling/lsp/src/requests/goto_definition.rs @@ -46,6 +46,7 @@ fn on_goto_definition_inner( file_id, byte_index, args.crate_id, + args.interner, args.def_maps, ); finder.find(&parsed_module) diff --git a/tooling/lsp/src/requests/hover.rs b/tooling/lsp/src/requests/hover.rs index 64974a42133..eb066b53b78 100644 --- a/tooling/lsp/src/requests/hover.rs +++ b/tooling/lsp/src/requests/hover.rs @@ -47,6 +47,7 @@ pub(crate) fn on_hover_request( file_id, byte_index, args.crate_id, + args.interner, args.def_maps, ); finder.find(&parsed_module) diff --git a/tooling/lsp/src/requests/mod.rs b/tooling/lsp/src/requests/mod.rs index 0ee66f1f618..22bdda8d7d7 100644 --- a/tooling/lsp/src/requests/mod.rs +++ b/tooling/lsp/src/requests/mod.rs @@ -18,6 +18,7 @@ use nargo_fmt::Config; use noirc_frontend::graph::CrateId; use noirc_frontend::hir::def_map::CrateDefMap; +use noirc_frontend::usage_tracker::UsageTracker; use noirc_frontend::{graph::Dependency, node_interner::NodeInterner}; use serde::{Deserialize, Serialize}; @@ -44,7 +45,6 @@ mod goto_declaration; mod goto_definition; mod hover; mod inlay_hint; -mod profile_run; mod references; mod rename; mod signature_help; @@ -56,8 +56,7 @@ pub(crate) use { code_lens_request::on_code_lens_request, completion::on_completion_request, document_symbol::on_document_symbol_request, goto_declaration::on_goto_declaration_request, goto_definition::on_goto_definition_request, goto_definition::on_goto_type_definition_request, - hover::on_hover_request, inlay_hint::on_inlay_hint_request, - profile_run::on_profile_run_request, references::on_references_request, + hover::on_hover_request, inlay_hint::on_inlay_hint_request, references::on_references_request, rename::on_prepare_rename_request, rename::on_rename_request, signature_help::on_signature_help_request, test_run::on_test_run_request, tests::on_tests_request, @@ -416,6 +415,7 @@ pub(crate) struct ProcessRequestCallbackArgs<'a> { crate_name: String, dependencies: &'a Vec, def_maps: &'a BTreeMap, + usage_tracker: &'a UsageTracker, } pub(crate) fn process_request( @@ -452,6 +452,7 @@ where let file_manager = &workspace_cache_data.file_manager; let interner = &package_cache_data.node_interner; let def_maps = &package_cache_data.def_maps; + let usage_tracker = &package_cache_data.usage_tracker; let crate_graph = &package_cache_data.crate_graph; let crate_id = package_cache_data.crate_id; @@ -472,6 +473,7 @@ where crate_name: package.name.to_string(), dependencies: &crate_graph[crate_id].dependencies, def_maps, + usage_tracker, })) } @@ -506,14 +508,17 @@ where let interner; let def_maps; + let usage_tracker; if let Some(package_cache) = state.package_cache.get(&package.root_dir) { interner = &package_cache.node_interner; def_maps = &package_cache.def_maps; + usage_tracker = &package_cache.usage_tracker; } else { // We ignore the warnings and errors produced by compilation while resolving the definition let _ = noirc_driver::check_crate(&mut context, crate_id, &Default::default()); interner = &context.def_interner; def_maps = &context.def_maps; + usage_tracker = &context.usage_tracker; } let files = workspace_file_manager.as_file_map(); @@ -533,6 +538,7 @@ where crate_name: package.name.to_string(), dependencies: &context.crate_graph[crate_id].dependencies, def_maps, + usage_tracker, })) } diff --git a/tooling/lsp/src/requests/profile_run.rs b/tooling/lsp/src/requests/profile_run.rs deleted file mode 100644 index a7362300adc..00000000000 --- a/tooling/lsp/src/requests/profile_run.rs +++ /dev/null @@ -1,122 +0,0 @@ -use std::{ - collections::{BTreeMap, HashMap}, - future::{self, Future}, -}; - -use crate::insert_all_files_for_workspace_into_file_manager; -use acvm::acir::circuit::ExpressionWidth; -use async_lsp::{ErrorCode, ResponseError}; -use nargo::ops::report_errors; -use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_artifacts::debug::DebugArtifact; -use noirc_driver::{CompileOptions, DebugFile, NOIR_ARTIFACT_VERSION_STRING}; -use noirc_errors::{debug_info::OpCodesCount, Location}; - -use crate::{ - parse_diff, - types::{NargoProfileRunParams, NargoProfileRunResult}, - LspState, -}; -use fm::FileId; - -pub(crate) fn on_profile_run_request( - state: &mut LspState, - params: NargoProfileRunParams, -) -> impl Future> { - future::ready(on_profile_run_request_inner(state, params)) -} - -fn on_profile_run_request_inner( - state: &mut LspState, - params: NargoProfileRunParams, -) -> Result { - let root_path = state.root_path.as_deref().ok_or_else(|| { - ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not find project root") - })?; - - let toml_path = find_package_manifest(root_path, root_path).map_err(|err| { - // If we cannot find a manifest, we can't run the test - ResponseError::new(ErrorCode::REQUEST_FAILED, err) - })?; - - let crate_name = params.package; - - let workspace = resolve_workspace_from_toml( - &toml_path, - PackageSelection::DefaultOrAll, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - ) - .map_err(|err| { - // If we found a manifest, but the workspace is invalid, we raise an error about it - ResponseError::new(ErrorCode::REQUEST_FAILED, err) - })?; - - let mut workspace_file_manager = workspace.new_file_manager(); - insert_all_files_for_workspace_into_file_manager( - state, - &workspace, - &mut workspace_file_manager, - ); - let parsed_files = parse_diff(&workspace_file_manager, state); - - // Since we filtered on crate name, this should be the only item in the iterator - match workspace.into_iter().next() { - Some(_package) => { - let expression_width = ExpressionWidth::Bounded { width: 3 }; - - let compiled_workspace = nargo::ops::compile_workspace( - &workspace_file_manager, - &parsed_files, - &workspace, - &CompileOptions::default(), - ); - - let (compiled_programs, compiled_contracts) = report_errors( - compiled_workspace, - &workspace_file_manager, - CompileOptions::default().deny_warnings, - CompileOptions::default().silence_warnings, - ) - .map_err(|err| ResponseError::new(ErrorCode::REQUEST_FAILED, err))?; - - let mut opcodes_counts: HashMap = HashMap::new(); - let mut file_map: BTreeMap = BTreeMap::new(); - for compiled_program in compiled_programs { - let compiled_program = - nargo::ops::transform_program(compiled_program, expression_width); - - for function_debug in compiled_program.debug.iter() { - let span_opcodes = function_debug.count_span_opcodes(); - opcodes_counts.extend(span_opcodes); - } - let debug_artifact: DebugArtifact = compiled_program.into(); - file_map.extend(debug_artifact.file_map); - } - - for compiled_contract in compiled_contracts { - let compiled_contract = - nargo::ops::transform_contract(compiled_contract, expression_width); - - let function_debug_info = compiled_contract - .functions - .iter() - .flat_map(|func| &func.debug) - .collect::>(); - for contract_function_debug in function_debug_info { - let span_opcodes = contract_function_debug.count_span_opcodes(); - opcodes_counts.extend(span_opcodes); - } - let debug_artifact: DebugArtifact = compiled_contract.into(); - file_map.extend(debug_artifact.file_map); - } - - let result = NargoProfileRunResult { file_map, opcodes_counts }; - - Ok(result) - } - None => Err(ResponseError::new( - ErrorCode::REQUEST_FAILED, - format!("Could not locate package named: {crate_name}"), - )), - } -} diff --git a/tooling/lsp/src/types.rs b/tooling/lsp/src/types.rs index 043c50a87fd..b49377787e8 100644 --- a/tooling/lsp/src/types.rs +++ b/tooling/lsp/src/types.rs @@ -1,15 +1,10 @@ -use fm::FileId; use lsp_types::{ CodeActionOptions, CompletionOptions, DeclarationCapability, DefinitionOptions, DocumentSymbolOptions, HoverOptions, InlayHintOptions, OneOf, ReferencesOptions, RenameOptions, SignatureHelpOptions, TypeDefinitionProviderCapability, }; -use noirc_driver::DebugFile; -use noirc_errors::{debug_info::OpCodesCount, Location}; use noirc_frontend::graph::CrateName; use serde::{Deserialize, Serialize}; -use serde_with::serde_as; -use std::collections::{BTreeMap, HashMap}; // Re-providing lsp_types that we don't need to override pub(crate) use lsp_types::{ @@ -23,8 +18,8 @@ pub(crate) mod request { use lsp_types::{request::Request, InitializeParams}; use super::{ - InitializeResult, NargoProfileRunParams, NargoProfileRunResult, NargoTestRunParams, - NargoTestRunResult, NargoTestsParams, NargoTestsResult, + InitializeResult, NargoTestRunParams, NargoTestRunResult, NargoTestsParams, + NargoTestsResult, }; // Re-providing lsp_types that we don't need to override @@ -56,14 +51,6 @@ pub(crate) mod request { type Result = NargoTestsResult; const METHOD: &'static str = "nargo/tests"; } - - #[derive(Debug)] - pub(crate) struct NargoProfileRun; - impl Request for NargoProfileRun { - type Params = NargoProfileRunParams; - type Result = NargoProfileRunResult; - const METHOD: &'static str = "nargo/profile/run"; - } } pub(crate) mod notification { @@ -253,17 +240,6 @@ pub(crate) struct NargoTestRunResult { pub(crate) result: String, pub(crate) message: Option, } -#[derive(Debug, Serialize, Deserialize)] -pub(crate) struct NargoProfileRunParams { - pub(crate) package: CrateName, -} -#[serde_as] -#[derive(Debug, Serialize, Deserialize)] -pub(crate) struct NargoProfileRunResult { - pub(crate) file_map: BTreeMap, - #[serde_as(as = "Vec<(_, _)>")] - pub(crate) opcodes_counts: HashMap, -} pub(crate) type CodeLensResult = Option>; pub(crate) type GotoDefinitionResult = Option; diff --git a/tooling/nargo_cli/src/cli/compile_cmd.rs b/tooling/nargo_cli/src/cli/compile_cmd.rs index 0ad07c91ff4..18fb407d413 100644 --- a/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -192,7 +192,6 @@ fn compile_programs( let target_width = get_target_width(package.expression_width, compile_options.expression_width); let program = nargo::ops::transform_program(program, target_width); - save_program_to_file(&program.into(), &package.name, workspace.target_directory_path()); Ok(((), warnings)) diff --git a/tooling/nargo_cli/src/cli/info_cmd.rs b/tooling/nargo_cli/src/cli/info_cmd.rs index f2e44fa893c..cf416b1fa5f 100644 --- a/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/tooling/nargo_cli/src/cli/info_cmd.rs @@ -1,18 +1,21 @@ -use std::collections::HashMap; - use acvm::acir::circuit::ExpressionWidth; +use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use iter_extended::vecmap; -use nargo::package::{CrateName, Package}; +use nargo::{ + constants::PROVER_INPUT_FILE, + ops::DefaultForeignCallExecutor, + package::{CrateName, Package}, +}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_artifacts::{debug::DebugArtifact, program::ProgramArtifact}; +use noirc_abi::input_parser::Format; +use noirc_artifacts::program::ProgramArtifact; use noirc_driver::{CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; -use noirc_errors::{debug_info::OpCodesCount, Location}; use prettytable::{row, table, Row}; use rayon::prelude::*; use serde::Serialize; -use crate::errors::CliError; +use crate::{cli::fs::inputs::read_inputs_from_file, errors::CliError}; use super::{ compile_cmd::{compile_workspace_full, get_target_width}, @@ -40,14 +43,18 @@ pub(crate) struct InfoCommand { #[clap(long, hide = true)] json: bool, - #[clap(long, hide = true)] - profile_info: bool, + #[clap(long)] + profile_execution: bool, + + /// The name of the toml file which contains the inputs for the prover + #[clap(long, short, default_value = PROVER_INPUT_FILE)] + prover_name: String, #[clap(flatten)] compile_options: CompileOptions, } -pub(crate) fn run(args: InfoCommand, config: NargoConfig) -> Result<(), CliError> { +pub(crate) fn run(mut args: InfoCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; @@ -58,6 +65,11 @@ pub(crate) fn run(args: InfoCommand, config: NargoConfig) -> Result<(), CliError Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; + if args.profile_execution { + // Execution profiling is only relevant with the Brillig VM + // as a constrained circuit should have totally flattened control flow (e.g. loops and if statements). + args.compile_options.force_brillig = true; + } // Compile the full workspace in order to generate any build artifacts. compile_workspace_full(&workspace, &args.compile_options)?; @@ -71,25 +83,29 @@ pub(crate) fn run(args: InfoCommand, config: NargoConfig) -> Result<(), CliError }) .collect::>()?; - if args.profile_info { - for (_, compiled_program) in &binary_packages { - let debug_artifact = DebugArtifact::from(compiled_program.clone()); - for function_debug in compiled_program.debug_symbols.debug_infos.iter() { - let span_opcodes = function_debug.count_span_opcodes(); - print_span_opcodes(span_opcodes, &debug_artifact); - } - } - } - - let program_info = binary_packages - .into_iter() - .par_bridge() - .map(|(package, program)| { - let target_width = - get_target_width(package.expression_width, args.compile_options.expression_width); - count_opcodes_and_gates_in_program(program, &package, target_width) - }) - .collect(); + let program_info = if args.profile_execution { + assert!( + args.compile_options.force_brillig, + "Internal CLI Error: --force-brillig must be active when --profile-execution is active" + ); + profile_brillig_execution( + binary_packages, + &args.prover_name, + args.compile_options.expression_width, + )? + } else { + binary_packages + .into_iter() + .par_bridge() + .map(|(package, program)| { + let target_width = get_target_width( + package.expression_width, + args.compile_options.expression_width, + ); + count_opcodes_and_gates_in_program(program, &package, target_width) + }) + .collect() + }; let info_report = InfoReport { programs: program_info }; @@ -114,55 +130,6 @@ pub(crate) fn run(args: InfoCommand, config: NargoConfig) -> Result<(), CliError Ok(()) } -/// Provides profiling information on -/// -/// Number of OpCodes in relation to Noir source file -/// and line number information -fn print_span_opcodes( - span_opcodes_map: HashMap, - debug_artifact: &DebugArtifact, -) { - let mut pairs: Vec<(&Location, &OpCodesCount)> = span_opcodes_map.iter().collect(); - - pairs.sort_by(|a, b| { - a.1.acir_size.cmp(&b.1.acir_size).then_with(|| a.1.brillig_size.cmp(&b.1.brillig_size)) - }); - - for (location, opcodes_count) in pairs { - let debug_file = debug_artifact.file_map.get(&location.file).unwrap(); - - let start_byte = byte_index(&debug_file.source, location.span.start() + 1); - let end_byte = byte_index(&debug_file.source, location.span.end() + 1); - let range = start_byte..end_byte; - let span_content = &debug_file.source[range]; - let line = debug_artifact.location_line_index(*location).unwrap() + 1; - println!( - "Ln. {}: {} (ACIR:{}, Brillig:{} opcode|s) in file: {}", - line, - span_content, - opcodes_count.acir_size, - opcodes_count.brillig_size, - debug_file.path.to_str().unwrap() - ); - } -} -fn byte_index(string: &str, index: u32) -> usize { - let mut byte_index = 0; - let mut char_index = 0; - - #[allow(clippy::explicit_counter_loop)] - for (byte_offset, _) in string.char_indices() { - if char_index == index { - return byte_index; - } - - byte_index = byte_offset; - char_index += 1; - } - - byte_index -} - #[derive(Debug, Default, Serialize)] struct InfoReport { programs: Vec, @@ -275,3 +242,42 @@ fn count_opcodes_and_gates_in_program( unconstrained_functions: unconstrained_info, } } + +fn profile_brillig_execution( + binary_packages: Vec<(Package, ProgramArtifact)>, + prover_name: &str, + expression_width: Option, +) -> Result, CliError> { + let mut program_info = Vec::new(); + for (package, program_artifact) in binary_packages.iter() { + // Parse the initial witness values from Prover.toml + let (inputs_map, _) = read_inputs_from_file( + &package.root_dir, + prover_name, + Format::Toml, + &program_artifact.abi, + )?; + let initial_witness = program_artifact.abi.encode(&inputs_map, None)?; + + let (_, profiling_samples) = nargo::ops::execute_program_with_profiling( + &program_artifact.bytecode, + initial_witness, + &Bn254BlackBoxSolver, + &mut DefaultForeignCallExecutor::new(false, None, None, None), + )?; + + let expression_width = get_target_width(package.expression_width, expression_width); + + program_info.push(ProgramInfo { + package_name: package.name.to_string(), + expression_width, + functions: vec![FunctionInfo { name: "main".to_string(), opcodes: 0 }], + unconstrained_functions_opcodes: profiling_samples.len(), + unconstrained_functions: vec![FunctionInfo { + name: "main".to_string(), + opcodes: profiling_samples.len(), + }], + }); + } + Ok(program_info) +} diff --git a/tooling/nargo_fmt/src/formatter/expression.rs b/tooling/nargo_fmt/src/formatter/expression.rs index 239b52b7d04..bff1799a298 100644 --- a/tooling/nargo_fmt/src/formatter/expression.rs +++ b/tooling/nargo_fmt/src/formatter/expression.rs @@ -382,9 +382,9 @@ impl<'a, 'b> ChunkFormatter<'a, 'b> { formatter.format_type(type_path.typ); formatter.write_token(Token::DoubleColon); formatter.write_identifier(type_path.item); - if !type_path.turbofish.is_empty() { + if let Some(turbofish) = type_path.turbofish { formatter.write_token(Token::DoubleColon); - formatter.format_generic_type_args(type_path.turbofish); + formatter.format_generic_type_args(turbofish); } })); group diff --git a/tooling/noirc_artifacts/src/debug.rs b/tooling/noirc_artifacts/src/debug.rs index 8e2add70ae7..5c47f1f2652 100644 --- a/tooling/noirc_artifacts/src/debug.rs +++ b/tooling/noirc_artifacts/src/debug.rs @@ -264,6 +264,7 @@ mod tests { BTreeMap::default(), BTreeMap::default(), BTreeMap::default(), + BTreeMap::default(), )]; let debug_artifact = DebugArtifact::new(debug_symbols, &fm); diff --git a/tooling/profiler/Cargo.toml b/tooling/profiler/Cargo.toml index c76b4c73e65..604208b5a54 100644 --- a/tooling/profiler/Cargo.toml +++ b/tooling/profiler/Cargo.toml @@ -33,6 +33,7 @@ acir.workspace = true nargo.workspace = true noirc_errors.workspace = true noirc_abi.workspace = true +noirc_evaluator.workspace = true # Logs tracing-subscriber.workspace = true diff --git a/tooling/profiler/src/flamegraph.rs b/tooling/profiler/src/flamegraph.rs index a72168a20af..7882ac903ef 100644 --- a/tooling/profiler/src/flamegraph.rs +++ b/tooling/profiler/src/flamegraph.rs @@ -11,6 +11,7 @@ use inferno::flamegraph::{from_lines, Options, TextTruncateDirection}; use noirc_errors::debug_info::DebugInfo; use noirc_errors::reporter::line_and_column_from_span; use noirc_errors::Location; +use noirc_evaluator::brillig::ProcedureId; use crate::opcode_formatter::AcirOrBrilligOpcode; @@ -111,6 +112,7 @@ fn generate_folded_sorted_lines<'files, F: AcirField>( if let Some(opcode) = &sample.opcode { location_names.push(format_opcode(opcode)); } + add_locations_to_folded_stack_items(&mut folded_stack_items, location_names, sample.count); } @@ -123,10 +125,20 @@ fn find_callsite_labels<'files>( brillig_function_id: Option, files: &'files impl Files<'files, FileId = fm::FileId>, ) -> Vec { + let mut procedure_id = None; let source_locations = debug_symbols.opcode_location(opcode_location).unwrap_or_else(|| { if let (Some(brillig_function_id), Some(brillig_location)) = (brillig_function_id, opcode_location.to_brillig_location()) { + let procedure_locs = debug_symbols.brillig_procedure_locs.get(&brillig_function_id); + if let Some(procedure_locs) = procedure_locs { + for (procedure, range) in procedure_locs.iter() { + if brillig_location.0 >= range.0 && brillig_location.0 <= range.1 { + procedure_id = Some(*procedure); + break; + } + } + } let brillig_locations = debug_symbols.brillig_locations.get(&brillig_function_id); if let Some(brillig_locations) = brillig_locations { brillig_locations.get(&brillig_location).cloned().unwrap_or_default() @@ -137,10 +149,16 @@ fn find_callsite_labels<'files>( vec![] } }); - let callsite_labels: Vec<_> = source_locations + + let mut callsite_labels: Vec<_> = source_locations .into_iter() .map(|location| location_to_callsite_label(location, files)) .collect(); + + if let Some(procedure_id) = procedure_id { + callsite_labels.push(format!("procedure::{}", ProcedureId::from_debug_id(procedure_id))); + } + callsite_labels } @@ -317,6 +335,7 @@ mod tests { BTreeMap::default(), BTreeMap::default(), BTreeMap::default(), + BTreeMap::default(), ); let samples: Vec> = vec![ diff --git a/yarn.lock b/yarn.lock index b39f9c257c6..748a6d18ecb 100644 --- a/yarn.lock +++ b/yarn.lock @@ -221,9 +221,9 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@npm:0.60.0": - version: 0.60.0 - resolution: "@aztec/bb.js@npm:0.60.0" +"@aztec/bb.js@npm:0.61.0": + version: 0.61.0 + resolution: "@aztec/bb.js@npm:0.61.0" dependencies: comlink: ^4.4.1 commander: ^10.0.1 @@ -232,7 +232,7 @@ __metadata: tslib: ^2.4.0 bin: bb.js: dest/node/main.js - checksum: 74ab79d060624362e9d44dda11dc2445973460577b93dc0c16801158db7efb71cd612966d6169b46bfbbb4fd7c9c1b95ad8b6198b3c69d9f6de0ab0fb92387aa + checksum: db544eeb1378e121554fda7c5b0161aa5b8ed92217eb47740e1c0d68d423084e05c5136acb75720cb39ef9f79aff9ac1d3037be86837d2e2a8d02ff500e175ae languageName: node linkType: hard @@ -14123,7 +14123,7 @@ __metadata: version: 0.0.0-use.local resolution: "integration-tests@workspace:compiler/integration-tests" dependencies: - "@aztec/bb.js": 0.60.0 + "@aztec/bb.js": 0.61.0 "@noir-lang/noir_js": "workspace:*" "@noir-lang/noir_wasm": "workspace:*" "@nomicfoundation/hardhat-chai-matchers": ^2.0.0