diff --git a/.noir-sync-commit b/.noir-sync-commit index b58185944f8..29560ec9797 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -2f46f5a6832067dd1cfbba89c866e6c119cb5438 \ No newline at end of file +6d0f86ba389a5b59b1d7fdcadcbce3e40eecaa48 diff --git a/noir/noir-repo/.github/workflows/test-js-packages.yml b/noir/noir-repo/.github/workflows/test-js-packages.yml index 6a9a918b955..36ece11b1bf 100644 --- a/noir/noir-repo/.github/workflows/test-js-packages.yml +++ b/noir/noir-repo/.github/workflows/test-js-packages.yml @@ -521,8 +521,27 @@ jobs: working-directory: ./examples/codegen_verifier run: ./test.sh + critical-library-list: + name: Load critical library list + runs-on: ubuntu-latest + outputs: + libraries: ${{ steps.get_critical_libraries.outputs.libraries }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Build list of libraries + id: get_critical_libraries + run: | + LIBRARIES=$(grep -Po "^https://github.com/\K.+" ./CRITICAL_NOIR_LIBRARIES | jq -R -s -c 'split("\n") | map(select(. != "")) | map({ repo: ., path: "./"})') + echo "libraries=$LIBRARIES" + echo "libraries=$LIBRARIES" >> $GITHUB_OUTPUT + env: + GH_TOKEN: ${{ github.token }} + external-repo-checks: - needs: [build-nargo] + needs: [build-nargo, critical-library-list] runs-on: ubuntu-latest # Only run when 'run-external-checks' label is present if: contains(github.event.pull_request.labels.*.name, 'run-external-checks') @@ -530,25 +549,15 @@ jobs: strategy: fail-fast: false matrix: - project: - - { repo: noir-lang/ec, path: ./ } - - { repo: noir-lang/eddsa, path: ./ } - - { repo: noir-lang/mimc, path: ./ } - - { repo: noir-lang/noir_sort, path: ./ } - - { repo: noir-lang/noir-edwards, path: ./ } - - { repo: noir-lang/noir-bignum, path: ./ } - - { repo: noir-lang/noir_bigcurve, path: ./ } - - { repo: noir-lang/noir_base64, path: ./ } - - { repo: noir-lang/noir_string_search, path: ./ } - - { repo: noir-lang/sparse_array, path: ./ } - - { repo: noir-lang/noir_rsa, path: ./lib } - - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/aztec-nr } - - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-contracts } - - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/parity-lib } - - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/private-kernel-lib } - - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/reset-kernel-lib } - - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/rollup-lib } - - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/types } + project: ${{ fromJson( needs.critical-library-list.outputs.libraries )}} + include: + - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/aztec-nr } + - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-contracts } + - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/parity-lib } + - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/private-kernel-lib } + - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/reset-kernel-lib } + - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/rollup-lib } + - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/types } name: Check external repo - ${{ matrix.project.repo }} steps: diff --git a/noir/noir-repo/CRITICAL_NOIR_LIBRARIES b/noir/noir-repo/CRITICAL_NOIR_LIBRARIES new file mode 100644 index 00000000000..c753b76a4fc --- /dev/null +++ b/noir/noir-repo/CRITICAL_NOIR_LIBRARIES @@ -0,0 +1,13 @@ +https://github.com/noir-lang/ec +https://github.com/noir-lang/eddsa +https://github.com/noir-lang/mimc +https://github.com/noir-lang/schnorr +https://github.com/noir-lang/noir_sort +https://github.com/noir-lang/noir-edwards +https://github.com/noir-lang/noir-bignum +https://github.com/noir-lang/noir_bigcurve +https://github.com/noir-lang/noir_base64 +https://github.com/noir-lang/noir_string_search +https://github.com/noir-lang/sparse_array +https://github.com/noir-lang/noir_rsa +https://github.com/noir-lang/noir_json_parser diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index 96ceb94fcdd..e8226d5fc58 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -2882,6 +2882,7 @@ dependencies = [ "noirc_frontend", "semver", "serde", + "test-case", "thiserror", "toml 0.7.8", "url 2.5.3", @@ -3206,7 +3207,6 @@ dependencies = [ "proptest", "proptest-derive 0.5.0", "rangemap", - "regex", "rustc-hash", "serde", "serde_json", @@ -3225,7 +3225,6 @@ dependencies = [ "acvm", "iter-extended", "jsonrpc", - "regex", "serde", "serde_json", "thiserror", diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/merge_expressions.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/merge_expressions.rs index 0a55e4ca17c..f49cd61e813 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/merge_expressions.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/merge_expressions.rs @@ -12,26 +12,36 @@ use acir::{ use crate::compiler::CircuitSimulator; -pub(crate) struct MergeExpressionsOptimizer { +pub(crate) struct MergeExpressionsOptimizer { resolved_blocks: HashMap>, + modified_gates: HashMap>, + deleted_gates: BTreeSet, } -impl MergeExpressionsOptimizer { +impl MergeExpressionsOptimizer { pub(crate) fn new() -> Self { - MergeExpressionsOptimizer { resolved_blocks: HashMap::new() } + MergeExpressionsOptimizer { + resolved_blocks: HashMap::new(), + modified_gates: HashMap::new(), + deleted_gates: BTreeSet::new(), + } } /// This pass analyzes the circuit and identifies intermediate variables that are /// only used in two gates. It then merges the gate that produces the /// intermediate variable into the second one that uses it /// Note: This pass is only relevant for backends that can handle unlimited width - pub(crate) fn eliminate_intermediate_variable( + pub(crate) fn eliminate_intermediate_variable( &mut self, circuit: &Circuit, acir_opcode_positions: Vec, ) -> (Vec>, Vec) { + // Initialization + self.modified_gates.clear(); + self.deleted_gates.clear(); + self.resolved_blocks.clear(); + // Keep track, for each witness, of the gates that use it let circuit_inputs = circuit.circuit_arguments(); - self.resolved_blocks = HashMap::new(); let mut used_witness: BTreeMap> = BTreeMap::new(); for (i, opcode) in circuit.opcodes.iter().enumerate() { let witnesses = self.witness_inputs(opcode); @@ -46,80 +56,89 @@ impl MergeExpressionsOptimizer { } } - let mut modified_gates: HashMap> = HashMap::new(); - let mut new_circuit = Vec::new(); - let mut new_acir_opcode_positions = Vec::new(); // For each opcode, try to get a target opcode to merge with - for (i, (opcode, opcode_position)) in - circuit.opcodes.iter().zip(acir_opcode_positions).enumerate() - { + for (i, opcode) in circuit.opcodes.iter().enumerate() { if !matches!(opcode, Opcode::AssertZero(_)) { - new_circuit.push(opcode.clone()); - new_acir_opcode_positions.push(opcode_position); continue; } - let opcode = modified_gates.get(&i).unwrap_or(opcode).clone(); - let mut to_keep = true; - let input_witnesses = self.witness_inputs(&opcode); - for w in input_witnesses { - let Some(gates_using_w) = used_witness.get(&w) else { - continue; - }; - // We only consider witness which are used in exactly two arithmetic gates - if gates_using_w.len() == 2 { - let first = *gates_using_w.first().expect("gates_using_w.len == 2"); - let second = *gates_using_w.last().expect("gates_using_w.len == 2"); - let b = if second == i { - first - } else { - // sanity check - assert!(i == first); - second + if let Some(opcode) = self.get_opcode(i, circuit) { + let input_witnesses = self.witness_inputs(&opcode); + for w in input_witnesses { + let Some(gates_using_w) = used_witness.get(&w) else { + continue; }; - - let second_gate = modified_gates.get(&b).unwrap_or(&circuit.opcodes[b]); - if let (Opcode::AssertZero(expr_define), Opcode::AssertZero(expr_use)) = - (&opcode, second_gate) - { - // We cannot merge an expression into an earlier opcode, because this - // would break the 'execution ordering' of the opcodes - // This case can happen because a previous merge would change an opcode - // and eliminate a witness from it, giving new opportunities for this - // witness to be used in only two expressions - // TODO: the missed optimization for the i>b case can be handled by - // - doing this pass again until there is no change, or - // - merging 'b' into 'i' instead - if i < b { - if let Some(expr) = Self::merge(expr_use, expr_define, w) { - modified_gates.insert(b, Opcode::AssertZero(expr)); - to_keep = false; - // Update the 'used_witness' map to account for the merge. - for w2 in CircuitSimulator::expr_wit(expr_define) { - if !circuit_inputs.contains(&w2) { - let v = used_witness.entry(w2).or_default(); - v.insert(b); - v.remove(&i); + // We only consider witness which are used in exactly two arithmetic gates + if gates_using_w.len() == 2 { + let first = *gates_using_w.first().expect("gates_using_w.len == 2"); + let second = *gates_using_w.last().expect("gates_using_w.len == 2"); + let b = if second == i { + first + } else { + // sanity check + assert!(i == first); + second + }; + // Merge the opcode with smaller index into the other one + // by updating modified_gates/deleted_gates/used_witness + // returns false if it could not merge them + let mut merge_opcodes = |op1, op2| -> bool { + if op1 == op2 { + return false; + } + let (source, target) = if op1 < op2 { (op1, op2) } else { (op2, op1) }; + let source_opcode = self.get_opcode(source, circuit); + let target_opcode = self.get_opcode(target, circuit); + if let ( + Some(Opcode::AssertZero(expr_use)), + Some(Opcode::AssertZero(expr_define)), + ) = (target_opcode, source_opcode) + { + if let Some(expr) = + Self::merge_expression(&expr_use, &expr_define, w) + { + self.modified_gates.insert(target, Opcode::AssertZero(expr)); + self.deleted_gates.insert(source); + // Update the 'used_witness' map to account for the merge. + let mut witness_list = CircuitSimulator::expr_wit(&expr_use); + witness_list.extend(CircuitSimulator::expr_wit(&expr_define)); + for w2 in witness_list { + if !circuit_inputs.contains(&w2) { + used_witness.entry(w2).and_modify(|v| { + v.insert(target); + v.remove(&source); + }); + } } + return true; } - // We need to stop here and continue with the next opcode - // because the merge invalidates the current opcode. - break; } + false + }; + + if merge_opcodes(b, i) { + // We need to stop here and continue with the next opcode + // because the merge invalidates the current opcode. + break; } } } } + } + + // Construct the new circuit from modified/deleted gates + let mut new_circuit = Vec::new(); + let mut new_acir_opcode_positions = Vec::new(); - if to_keep { - let opcode = modified_gates.get(&i).cloned().unwrap_or(opcode); - new_circuit.push(opcode); - new_acir_opcode_positions.push(opcode_position); + for (i, opcode_position) in acir_opcode_positions.iter().enumerate() { + if let Some(op) = self.get_opcode(i, circuit) { + new_circuit.push(op); + new_acir_opcode_positions.push(*opcode_position); } } (new_circuit, new_acir_opcode_positions) } - fn brillig_input_wit(&self, input: &BrilligInputs) -> BTreeSet { + fn brillig_input_wit(&self, input: &BrilligInputs) -> BTreeSet { let mut result = BTreeSet::new(); match input { BrilligInputs::Single(expr) => { @@ -152,7 +171,7 @@ impl MergeExpressionsOptimizer { } // Returns the input witnesses used by the opcode - fn witness_inputs(&self, opcode: &Opcode) -> BTreeSet { + fn witness_inputs(&self, opcode: &Opcode) -> BTreeSet { match opcode { Opcode::AssertZero(expr) => CircuitSimulator::expr_wit(expr), Opcode::BlackBoxFuncCall(bb_func) => { @@ -198,7 +217,7 @@ impl MergeExpressionsOptimizer { // Merge 'expr' into 'target' via Gaussian elimination on 'w' // Returns None if the expressions cannot be merged - fn merge( + fn merge_expression( target: &Expression, expr: &Expression, w: Witness, @@ -226,6 +245,13 @@ impl MergeExpressionsOptimizer { } None } + + fn get_opcode(&self, g: usize, circuit: &Circuit) -> Option> { + if self.deleted_gates.contains(&g) { + return None; + } + self.modified_gates.get(&g).or(circuit.opcodes.get(g)).cloned() + } } #[cfg(test)] diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/lib.rs b/noir/noir-repo/compiler/noirc_evaluator/src/lib.rs index 8127e3d03ef..75ea557d3de 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/lib.rs @@ -12,8 +12,7 @@ pub mod ssa; pub use ssa::create_program; pub use ssa::ir::instruction::ErrorType; -/// Trims leading whitespace from each line of the input string, according to -/// how much leading whitespace there is on the first non-empty line. +/// Trims leading whitespace from each line of the input string #[cfg(test)] pub(crate) fn trim_leading_whitespace_from_lines(src: &str) -> String { let mut lines = src.trim_end().lines(); @@ -21,11 +20,10 @@ pub(crate) fn trim_leading_whitespace_from_lines(src: &str) -> String { while first_line.is_empty() { first_line = lines.next().unwrap(); } - let indent = first_line.len() - first_line.trim_start().len(); let mut result = first_line.trim_start().to_string(); for line in lines { result.push('\n'); - result.push_str(&line[indent..]); + result.push_str(line.trim_start()); } result } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs index b9faf1c46ec..016d7ffa25b 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs @@ -2,10 +2,11 @@ use std::sync::Arc; use acvm::{acir::AcirField, BlackBoxFunctionSolver, BlackBoxResolutionError, FieldElement}; +use crate::ssa::ir::instruction::BlackBoxFunc; use crate::ssa::ir::{ basic_block::BasicBlockId, dfg::{CallStack, DataFlowGraph}, - instruction::{Instruction, SimplifyResult}, + instruction::{Instruction, Intrinsic, SimplifyResult}, types::Type, value::ValueId, }; @@ -70,52 +71,125 @@ pub(super) fn simplify_msm( block: BasicBlockId, call_stack: &CallStack, ) -> SimplifyResult { - // TODO: Handle MSMs where a subset of the terms are constant. + let mut is_constant; + match (dfg.get_array_constant(arguments[0]), dfg.get_array_constant(arguments[1])) { (Some((points, _)), Some((scalars, _))) => { - let Some(points) = points - .into_iter() - .map(|id| dfg.get_numeric_constant(id)) - .collect::>>() - else { - return SimplifyResult::None; - }; - - let Some(scalars) = scalars - .into_iter() - .map(|id| dfg.get_numeric_constant(id)) - .collect::>>() - else { - return SimplifyResult::None; - }; + // We decompose points and scalars into constant and non-constant parts in order to simplify MSMs where a subset of the terms are constant. + let mut constant_points = vec![]; + let mut constant_scalars_lo = vec![]; + let mut constant_scalars_hi = vec![]; + let mut var_points = vec![]; + let mut var_scalars = vec![]; + let len = scalars.len() / 2; + for i in 0..len { + match ( + dfg.get_numeric_constant(scalars[2 * i]), + dfg.get_numeric_constant(scalars[2 * i + 1]), + dfg.get_numeric_constant(points[3 * i]), + dfg.get_numeric_constant(points[3 * i + 1]), + dfg.get_numeric_constant(points[3 * i + 2]), + ) { + (Some(lo), Some(hi), _, _, _) if lo.is_zero() && hi.is_zero() => { + is_constant = true; + constant_scalars_lo.push(lo); + constant_scalars_hi.push(hi); + constant_points.push(FieldElement::zero()); + constant_points.push(FieldElement::zero()); + constant_points.push(FieldElement::one()); + } + (_, _, _, _, Some(infinity)) if infinity.is_one() => { + is_constant = true; + constant_scalars_lo.push(FieldElement::zero()); + constant_scalars_hi.push(FieldElement::zero()); + constant_points.push(FieldElement::zero()); + constant_points.push(FieldElement::zero()); + constant_points.push(FieldElement::one()); + } + (Some(lo), Some(hi), Some(x), Some(y), Some(infinity)) => { + is_constant = true; + constant_scalars_lo.push(lo); + constant_scalars_hi.push(hi); + constant_points.push(x); + constant_points.push(y); + constant_points.push(infinity); + } + _ => { + is_constant = false; + } + } - let mut scalars_lo = Vec::new(); - let mut scalars_hi = Vec::new(); - for (i, scalar) in scalars.into_iter().enumerate() { - if i % 2 == 0 { - scalars_lo.push(scalar); - } else { - scalars_hi.push(scalar); + if !is_constant { + var_points.push(points[3 * i]); + var_points.push(points[3 * i + 1]); + var_points.push(points[3 * i + 2]); + var_scalars.push(scalars[2 * i]); + var_scalars.push(scalars[2 * i + 1]); } } - let Ok((result_x, result_y, result_is_infinity)) = - solver.multi_scalar_mul(&points, &scalars_lo, &scalars_hi) - else { + // If there are no constant terms, we can't simplify + if constant_scalars_lo.is_empty() { + return SimplifyResult::None; + } + let Ok((result_x, result_y, result_is_infinity)) = solver.multi_scalar_mul( + &constant_points, + &constant_scalars_lo, + &constant_scalars_hi, + ) else { return SimplifyResult::None; }; - let result_x = dfg.make_constant(result_x, Type::field()); - let result_y = dfg.make_constant(result_y, Type::field()); - let result_is_infinity = dfg.make_constant(result_is_infinity, Type::field()); + // If there are no variable term, we can directly return the constant result + if var_scalars.is_empty() { + let result_x = dfg.make_constant(result_x, Type::field()); + let result_y = dfg.make_constant(result_y, Type::field()); + let result_is_infinity = dfg.make_constant(result_is_infinity, Type::field()); - let elements = im::vector![result_x, result_y, result_is_infinity]; - let typ = Type::Array(Arc::new(vec![Type::field()]), 3); - let instruction = Instruction::MakeArray { elements, typ }; - let result_array = - dfg.insert_instruction_and_results(instruction, block, None, call_stack.clone()); + let elements = im::vector![result_x, result_y, result_is_infinity]; + let typ = Type::Array(Arc::new(vec![Type::field()]), 3); + let instruction = Instruction::MakeArray { elements, typ }; + let result_array = dfg.insert_instruction_and_results( + instruction, + block, + None, + call_stack.clone(), + ); - SimplifyResult::SimplifiedTo(result_array.first()) + return SimplifyResult::SimplifiedTo(result_array.first()); + } + // If there is only one non-null constant term, we cannot simplify + if constant_scalars_lo.len() == 1 && result_is_infinity != FieldElement::one() { + return SimplifyResult::None; + } + // Add the constant part back to the non-constant part, if it is not null + let one = dfg.make_constant(FieldElement::one(), Type::field()); + let zero = dfg.make_constant(FieldElement::zero(), Type::field()); + if result_is_infinity.is_zero() { + var_scalars.push(one); + var_scalars.push(zero); + let result_x = dfg.make_constant(result_x, Type::field()); + let result_y = dfg.make_constant(result_y, Type::field()); + let result_is_infinity = dfg.make_constant(result_is_infinity, Type::bool()); + var_points.push(result_x); + var_points.push(result_y); + var_points.push(result_is_infinity); + } + // Construct the simplified MSM expression + let typ = Type::Array(Arc::new(vec![Type::field()]), var_scalars.len() as u32); + let scalars = Instruction::MakeArray { elements: var_scalars.into(), typ }; + let scalars = dfg + .insert_instruction_and_results(scalars, block, None, call_stack.clone()) + .first(); + let typ = Type::Array(Arc::new(vec![Type::field()]), var_points.len() as u32); + let points = Instruction::MakeArray { elements: var_points.into(), typ }; + let points = + dfg.insert_instruction_and_results(points, block, None, call_stack.clone()).first(); + let msm = dfg.import_intrinsic(Intrinsic::BlackBox(BlackBoxFunc::MultiScalarMul)); + SimplifyResult::SimplifiedToInstruction(Instruction::Call { + func: msm, + arguments: vec![points, scalars], + }) } _ => SimplifyResult::None, } @@ -228,3 +302,93 @@ pub(super) fn simplify_signature( _ => SimplifyResult::None, } } + +#[cfg(feature = "bn254")] +#[cfg(test)] +mod test { + use crate::ssa::opt::assert_normalized_ssa_equals; + use crate::ssa::Ssa; + + #[cfg(feature = "bn254")] + #[test] + fn full_constant_folding() { + let src = r#" + acir(inline) fn main f0 { + b0(): + v0 = make_array [Field 2, Field 3, Field 5, Field 5] : [Field; 4] + v1 = make_array [Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0, Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0] : [Field; 6] + v2 = call multi_scalar_mul (v1, v0) -> [Field; 3] + return v2 + }"#; + let ssa = Ssa::from_str(src).unwrap(); + + let expected_src = r#" + acir(inline) fn main f0 { + b0(): + v3 = make_array [Field 2, Field 3, Field 5, Field 5] : [Field; 4] + v7 = make_array [Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0, Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0] : [Field; 6] + v10 = make_array [Field 1478523918288173385110236399861791147958001875200066088686689589556927843200, Field 700144278551281040379388961242974992655630750193306467120985766322057145630, Field 0] : [Field; 3] + return v10 + } + "#; + assert_normalized_ssa_equals(ssa, expected_src); + } + + #[cfg(feature = "bn254")] + #[test] + fn simplify_zero() { + let src = r#" + acir(inline) fn main f0 { + b0(v0: Field, v1: Field): + v2 = make_array [v0, Field 0, Field 0, Field 0, v0, Field 0] : [Field; 6] + v3 = make_array [ + Field 0, Field 0, Field 1, v0, v1, Field 0, Field 1, v0, Field 0] : [Field; 9] + v4 = call multi_scalar_mul (v3, v2) -> [Field; 3] + + return v4 + + }"#; + let ssa = Ssa::from_str(src).unwrap(); + //First point is zero, second scalar is zero, so we should be left with the scalar mul of the last point. + let expected_src = r#" + acir(inline) fn main f0 { + b0(v0: Field, v1: Field): + v3 = make_array [v0, Field 0, Field 0, Field 0, v0, Field 0] : [Field; 6] + v5 = make_array [Field 0, Field 0, Field 1, v0, v1, Field 0, Field 1, v0, Field 0] : [Field; 9] + v6 = make_array [v0, Field 0] : [Field; 2] + v7 = make_array [Field 1, v0, Field 0] : [Field; 3] + v9 = call multi_scalar_mul(v7, v6) -> [Field; 3] + return v9 + } + "#; + assert_normalized_ssa_equals(ssa, expected_src); + } + + #[cfg(feature = "bn254")] + #[test] + fn partial_constant_folding() { + let src = r#" + acir(inline) fn main f0 { + b0(v0: Field, v1: Field): + v2 = make_array [Field 1, Field 0, v0, Field 0, Field 2, Field 0] : [Field; 6] + v3 = make_array [ + Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0, v0, v1, Field 0, Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0] : [Field; 9] + v4 = call multi_scalar_mul (v3, v2) -> [Field; 3] + return v4 + }"#; + let ssa = Ssa::from_str(src).unwrap(); + //First and last scalar/point are constant, so we should be left with the msm of the middle point and the folded constant point + let expected_src = r#" + acir(inline) fn main f0 { + b0(v0: Field, v1: Field): + v5 = make_array [Field 1, Field 0, v0, Field 0, Field 2, Field 0] : [Field; 6] + v7 = make_array [Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0, v0, v1, Field 0, Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0] : [Field; 9] + v8 = make_array [v0, Field 0, Field 1, Field 0] : [Field; 4] + v12 = make_array [v0, v1, Field 0, Field -3227352362257037263902424173275354266044964400219754872043023745437788450996, Field 8902249110305491597038405103722863701255802573786510474664632793109847672620, u1 0] : [Field; 6] + v14 = call multi_scalar_mul(v12, v8) -> [Field; 3] + return v14 + } + "#; + assert_normalized_ssa_equals(ssa, expected_src); + } +} diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs index aa2952d5abc..29e79728303 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs @@ -1,8 +1,5 @@ //! This file is for pretty-printing the SSA IR in a human-readable form for debugging. -use std::{ - collections::HashSet, - fmt::{Formatter, Result}, -}; +use std::fmt::{Formatter, Result}; use acvm::acir::AcirField; use im::Vector; @@ -21,28 +18,10 @@ use super::{ /// Helper function for Function's Display impl to pretty-print the function with the given formatter. pub(crate) fn display_function(function: &Function, f: &mut Formatter) -> Result { writeln!(f, "{} fn {} {} {{", function.runtime(), function.name(), function.id())?; - display_block_with_successors(function, function.entry_block(), &mut HashSet::new(), f)?; - write!(f, "}}") -} - -/// Displays a block followed by all of its successors recursively. -/// This uses a HashSet to keep track of the visited blocks. Otherwise -/// there would be infinite recursion for any loops in the IR. -pub(crate) fn display_block_with_successors( - function: &Function, - block_id: BasicBlockId, - visited: &mut HashSet, - f: &mut Formatter, -) -> Result { - display_block(function, block_id, f)?; - visited.insert(block_id); - - for successor in function.dfg[block_id].successors() { - if !visited.contains(&successor) { - display_block_with_successors(function, successor, visited, f)?; - } + for block_id in function.reachable_blocks() { + display_block(function, block_id, f)?; } - Ok(()) + write!(f, "}}") } /// Display a single block. This will not display the block's successors. diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/array_set.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/array_set.rs index 96de22600a4..09339cf0797 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/array_set.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/array_set.rs @@ -209,6 +209,8 @@ mod tests { b1(v0: u32): v8 = lt v0, u32 5 jmpif v8 then: b3, else: b2 + b2(): + return b3(): v9 = eq v0, u32 5 jmpif v9 then: b4, else: b5 @@ -224,8 +226,6 @@ mod tests { store v15 at v4 v17 = add v0, u32 1 jmp b1(v17) - b2(): - return } "; let ssa = Ssa::from_str(src).unwrap(); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index 93ca428c6d0..e039b8f0f9e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -1521,18 +1521,18 @@ mod test { b0(v0: u32): v2 = eq v0, u32 0 jmpif v2 then: b4, else: b1 - b4(): - v5 = sub v0, u32 1 - jmp b5() - b5(): - return b1(): jmpif v0 then: b3, else: b2 + b2(): + jmp b5() b3(): v4 = sub v0, u32 1 // We can't hoist this because v0 is zero here and it will lead to an underflow jmp b5() - b2(): + b4(): + v5 = sub v0, u32 1 jmp b5() + b5(): + return } "; let ssa = Ssa::from_str(src).unwrap(); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs index 290d2a33846..87e7f8bcff3 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs @@ -251,13 +251,13 @@ mod test { b1(v2: u32): v5 = lt v2, u32 4 jmpif v5 then: b3, else: b2 + b2(): + return b3(): v6 = mul v0, v1 constrain v6 == u32 6 v8 = add v2, u32 1 jmp b1(v8) - b2(): - return } "; @@ -276,12 +276,12 @@ mod test { b1(v2: u32): v6 = lt v2, u32 4 jmpif v6 then: b3, else: b2 + b2(): + return b3(): constrain v3 == u32 6 v9 = add v2, u32 1 jmp b1(v9) - b2(): - return } "; @@ -300,21 +300,21 @@ mod test { b1(v2: u32): v6 = lt v2, u32 4 jmpif v6 then: b3, else: b2 + b2(): + return b3(): jmp b4(u32 0) b4(v3: u32): v7 = lt v3, u32 4 jmpif v7 then: b6, else: b5 + b5(): + v9 = add v2, u32 1 + jmp b1(v9) b6(): v10 = mul v0, v1 constrain v10 == u32 6 v12 = add v3, u32 1 jmp b4(v12) - b5(): - v9 = add v2, u32 1 - jmp b1(v9) - b2(): - return } "; @@ -333,20 +333,20 @@ mod test { b1(v2: u32): v7 = lt v2, u32 4 jmpif v7 then: b3, else: b2 + b2(): + return b3(): jmp b4(u32 0) b4(v3: u32): v8 = lt v3, u32 4 jmpif v8 then: b6, else: b5 + b5(): + v10 = add v2, u32 1 + jmp b1(v10) b6(): constrain v4 == u32 6 v12 = add v3, u32 1 jmp b4(v12) - b5(): - v10 = add v2, u32 1 - jmp b1(v10) - b2(): - return } "; @@ -374,6 +374,8 @@ mod test { b1(v2: u32): v5 = lt v2, u32 4 jmpif v5 then: b3, else: b2 + b2(): + return b3(): v6 = mul v0, v1 v7 = mul v6, v0 @@ -381,8 +383,6 @@ mod test { constrain v7 == u32 12 v9 = add v2, u32 1 jmp b1(v9) - b2(): - return } "; @@ -402,12 +402,12 @@ mod test { b1(v2: u32): v9 = lt v2, u32 4 jmpif v9 then: b3, else: b2 + b2(): + return b3(): constrain v4 == u32 12 v11 = add v2, u32 1 jmp b1(v11) - b2(): - return } "; @@ -431,17 +431,17 @@ mod test { b1(v2: u32): v7 = lt v2, u32 4 jmpif v7 then: b3, else: b2 + b2(): + v8 = load v5 -> [u32; 5] + v10 = array_get v8, index u32 2 -> u32 + constrain v10 == u32 3 + return b3(): v12 = load v5 -> [u32; 5] v13 = array_set v12, index v0, value v1 store v13 at v5 v15 = add v2, u32 1 jmp b1(v15) - b2(): - v8 = load v5 -> [u32; 5] - v10 = array_get v8, index u32 2 -> u32 - constrain v10 == u32 3 - return } "; @@ -485,16 +485,24 @@ mod test { b1(v2: u32): v9 = lt v2, u32 4 jmpif v9 then: b3, else: b2 + b2(): + return b3(): jmp b4(u32 0) b4(v3: u32): v10 = lt v3, u32 4 jmpif v10 then: b6, else: b5 + b5(): + v12 = add v2, u32 1 + jmp b1(v12) b6(): jmp b7(u32 0) b7(v4: u32): v13 = lt v4, u32 4 jmpif v13 then: b9, else: b8 + b8(): + v14 = add v3, u32 1 + jmp b4(v14) b9(): v15 = array_get v6, index v2 -> u32 v16 = eq v15, v0 @@ -504,14 +512,6 @@ mod test { constrain v17 == v0 v19 = add v4, u32 1 jmp b7(v19) - b8(): - v14 = add v3, u32 1 - jmp b4(v14) - b5(): - v12 = add v2, u32 1 - jmp b1(v12) - b2(): - return } "; @@ -526,6 +526,8 @@ mod test { b1(v2: u32): v9 = lt v2, u32 4 jmpif v9 then: b3, else: b2 + b2(): + return b3(): v10 = array_get v6, index v2 -> u32 v11 = eq v10, v0 @@ -533,6 +535,9 @@ mod test { b4(v3: u32): v12 = lt v3, u32 4 jmpif v12 then: b6, else: b5 + b5(): + v14 = add v2, u32 1 + jmp b1(v14) b6(): v15 = array_get v6, index v3 -> u32 v16 = eq v15, v0 @@ -540,19 +545,14 @@ mod test { b7(v4: u32): v17 = lt v4, u32 4 jmpif v17 then: b9, else: b8 + b8(): + v18 = add v3, u32 1 + jmp b4(v18) b9(): constrain v10 == v0 constrain v15 == v0 v19 = add v4, u32 1 jmp b7(v19) - b8(): - v18 = add v3, u32 1 - jmp b4(v18) - b5(): - v14 = add v2, u32 1 - jmp b1(v14) - b2(): - return } "; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index 53a31ae57c1..77ad53df9cf 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -1121,11 +1121,6 @@ mod tests { b1(v0: Field): v4 = eq v0, Field 0 jmpif v4 then: b3, else: b2 - b3(): - v11 = load v3 -> &mut Field - store Field 2 at v11 - v13 = add v0, Field 1 - jmp b1(v13) b2(): v5 = load v1 -> Field v7 = eq v5, Field 2 @@ -1135,6 +1130,11 @@ mod tests { v10 = eq v9, Field 2 constrain v9 == Field 2 return + b3(): + v11 = load v3 -> &mut Field + store Field 2 at v11 + v13 = add v0, Field 1 + jmp b1(v13) } "; @@ -1157,11 +1157,6 @@ mod tests { b1(v0: Field): v4 = eq v0, Field 0 jmpif v4 then: b3, else: b2 - b3(): - v13 = load v3 -> &mut Field - store Field 2 at v13 - v15 = add v0, Field 1 - jmp b1(v15) b2(): v5 = load v1 -> Field v7 = eq v5, Field 2 @@ -1173,6 +1168,11 @@ mod tests { v12 = eq v11, Field 2 constrain v11 == Field 2 return + b3(): + v13 = load v3 -> &mut Field + store Field 2 at v13 + v15 = add v0, Field 1 + jmp b1(v15) } acir(inline) fn foo f1 { b0(v0: &mut Field): @@ -1195,6 +1195,10 @@ mod tests { acir(inline) fn main f0 { b0(v0: u1): jmpif v0 then: b2, else: b1 + b1(): + v4 = allocate -> &mut Field + store Field 1 at v4 + jmp b3(v4, v4, v4) b2(): v6 = allocate -> &mut Field store Field 0 at v6 @@ -1212,10 +1216,6 @@ mod tests { constrain v11 == Field 1 constrain v13 == Field 3 return - b1(): - v4 = allocate -> &mut Field - store Field 1 at v4 - jmp b3(v4, v4, v4) } "; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs index c282e2df451..e7f8d227d28 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs @@ -442,14 +442,14 @@ mod test { store Field 0 at v1 v3 = not v0 jmpif v0 then: b2, else: b1 + b1(): + store Field 2 at v1 + jmp b2() b2(): v5 = load v1 -> Field v6 = eq v5, Field 2 constrain v5 == Field 2 return - b1(): - store Field 2 at v1 - jmp b2() }"; assert_normalized_ssa_equals(ssa.simplify_cfg(), expected); } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs index 1a13acc5435..22daba1de45 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs @@ -1337,12 +1337,15 @@ mod tests { b2(): v7 = eq v0, u32 2 jmpif v7 then: b7, else: b3 - b7(): - v18 = add v0, u32 1 - jmp b1(v18) b3(): v9 = eq v0, u32 5 jmpif v9 then: b5, else: b4 + b4(): + v10 = load v1 -> Field + v12 = add v10, Field 1 + store v12 at v1 + v14 = add v0, u32 1 + jmp b1(v14) b5(): jmp b6() b6(): @@ -1350,12 +1353,9 @@ mod tests { v17 = eq v15, Field 4 constrain v15 == Field 4 return - b4(): - v10 = load v1 -> Field - v12 = add v10, Field 1 - store v12 at v1 - v14 = add v0, u32 1 - jmp b1(v14) + b7(): + v18 = add v0, u32 1 + jmp b1(v18) } "; let ssa = Ssa::from_str(src).unwrap(); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs index 6318f9dc56e..dab96dfa04f 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs @@ -143,10 +143,10 @@ fn test_jmpif() { acir(inline) fn main f0 { b0(v0: Field): jmpif v0 then: b2, else: b1 - b2(): - return b1(): return + b2(): + return } "; assert_ssa_roundtrip(src); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index 2fe0a38af00..91a49018f76 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -2,6 +2,7 @@ pub(crate) mod context; mod program; mod value; +use noirc_frontend::token::FmtStrFragment; pub(crate) use program::Ssa; use context::SharedContext; @@ -230,10 +231,26 @@ impl<'a> FunctionContext<'a> { Ok(self.builder.numeric_constant(*value as u128, Type::bool()).into()) } ast::Literal::Str(string) => Ok(self.codegen_string(string)), - ast::Literal::FmtStr(string, number_of_fields, fields) => { + ast::Literal::FmtStr(fragments, number_of_fields, fields) => { + let mut string = String::new(); + for fragment in fragments { + match fragment { + FmtStrFragment::String(value) => { + // Escape curly braces in non-interpolations + let value = value.replace('{', "{{").replace('}', "}}"); + string.push_str(&value); + } + FmtStrFragment::Interpolation(value, _span) => { + string.push('{'); + string.push_str(value); + string.push('}'); + } + } + } + // A caller needs multiple pieces of information to make use of a format string // The message string, the number of fields to be formatted, and the fields themselves - let string = self.codegen_string(string); + let string = self.codegen_string(&string); let field_count = self.builder.length_constant(*number_of_fields as u128); let fields = self.codegen_expression(fields)?; diff --git a/noir/noir-repo/compiler/noirc_frontend/Cargo.toml b/noir/noir-repo/compiler/noirc_frontend/Cargo.toml index 5d1520af54f..5f8f02689c8 100644 --- a/noir/noir-repo/compiler/noirc_frontend/Cargo.toml +++ b/noir/noir-repo/compiler/noirc_frontend/Cargo.toml @@ -25,7 +25,6 @@ num-bigint.workspace = true num-traits.workspace = true rustc-hash = "1.1.0" small-ord-set = "0.1.3" -regex = "1.9.1" cfg-if.workspace = true tracing.workspace = true petgraph = "0.6" diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs index 2c8a9b6508d..ae622f46686 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs @@ -10,7 +10,7 @@ use crate::ast::{ use crate::node_interner::{ ExprId, InternedExpressionKind, InternedStatementKind, QuotedTypeId, StructId, }; -use crate::token::{Attributes, FunctionAttribute, Token, Tokens}; +use crate::token::{Attributes, FmtStrFragment, FunctionAttribute, Token, Tokens}; use crate::{Kind, Type}; use acvm::{acir::AcirField, FieldElement}; use iter_extended::vecmap; @@ -210,8 +210,8 @@ impl ExpressionKind { ExpressionKind::Literal(Literal::RawStr(contents, hashes)) } - pub fn format_string(contents: String) -> ExpressionKind { - ExpressionKind::Literal(Literal::FmtStr(contents)) + pub fn format_string(fragments: Vec, length: u32) -> ExpressionKind { + ExpressionKind::Literal(Literal::FmtStr(fragments, length)) } pub fn constructor( @@ -434,7 +434,7 @@ pub enum Literal { Integer(FieldElement, /*sign*/ bool), // false for positive integer and true for negative Str(String), RawStr(String, u8), - FmtStr(String), + FmtStr(Vec, u32 /* length */), Unit, } @@ -669,7 +669,13 @@ impl Display for Literal { std::iter::once('#').cycle().take(*num_hashes as usize).collect(); write!(f, "r{hashes}\"{string}\"{hashes}") } - Literal::FmtStr(string) => write!(f, "f\"{string}\""), + Literal::FmtStr(fragments, _length) => { + write!(f, "f\"")?; + for fragment in fragments { + fragment.fmt(f)?; + } + write!(f, "\"") + } Literal::Unit => write!(f, "()"), } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs index f149c998eca..2f60532980a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs @@ -16,7 +16,7 @@ use crate::{ InternedUnresolvedTypeData, QuotedTypeId, }, parser::{Item, ItemKind, ParsedSubModule}, - token::{MetaAttribute, SecondaryAttribute, Tokens}, + token::{FmtStrFragment, MetaAttribute, SecondaryAttribute, Tokens}, ParsedModule, QuotedType, }; @@ -172,7 +172,7 @@ pub trait Visitor { fn visit_literal_raw_str(&mut self, _: &str, _: u8) {} - fn visit_literal_fmt_str(&mut self, _: &str) {} + fn visit_literal_fmt_str(&mut self, _: &[FmtStrFragment], _length: u32) {} fn visit_literal_unit(&mut self) {} @@ -900,7 +900,7 @@ impl Literal { Literal::Integer(value, negative) => visitor.visit_literal_integer(*value, *negative), Literal::Str(str) => visitor.visit_literal_str(str), Literal::RawStr(str, length) => visitor.visit_literal_raw_str(str, *length), - Literal::FmtStr(str) => visitor.visit_literal_fmt_str(str), + Literal::FmtStr(fragments, length) => visitor.visit_literal_fmt_str(fragments, *length), Literal::Unit => visitor.visit_literal_unit(), } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs index f801c1817ef..b5fab6faf9b 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs @@ -1,7 +1,6 @@ use acvm::{AcirField, FieldElement}; use iter_extended::vecmap; use noirc_errors::{Location, Span}; -use regex::Regex; use rustc_hash::FxHashSet as HashSet; use crate::{ @@ -29,7 +28,7 @@ use crate::{ traits::{ResolvedTraitBound, TraitConstraint}, }, node_interner::{DefinitionKind, ExprId, FuncId, InternedStatementKind, TraitMethodId}, - token::Tokens, + token::{FmtStrFragment, Tokens}, Kind, QuotedType, Shared, StructType, Type, }; @@ -167,7 +166,7 @@ impl<'context> Elaborator<'context> { let len = Type::Constant(str.len().into(), Kind::u32()); (Lit(HirLiteral::Str(str)), Type::String(Box::new(len))) } - Literal::FmtStr(str) => self.elaborate_fmt_string(str, span), + Literal::FmtStr(fragments, length) => self.elaborate_fmt_string(fragments, length), Literal::Array(array_literal) => { self.elaborate_array_literal(array_literal, span, true) } @@ -234,53 +233,50 @@ impl<'context> Elaborator<'context> { (HirExpression::Literal(constructor(expr)), typ) } - fn elaborate_fmt_string(&mut self, str: String, call_expr_span: Span) -> (HirExpression, Type) { - let re = Regex::new(r"\{([a-zA-Z0-9_]+)\}") - .expect("ICE: an invalid regex pattern was used for checking format strings"); - + fn elaborate_fmt_string( + &mut self, + fragments: Vec, + length: u32, + ) -> (HirExpression, Type) { let mut fmt_str_idents = Vec::new(); let mut capture_types = Vec::new(); - for field in re.find_iter(&str) { - let matched_str = field.as_str(); - let ident_name = &matched_str[1..(matched_str.len() - 1)]; - - let scope_tree = self.scopes.current_scope_tree(); - let variable = scope_tree.find(ident_name); - - let hir_ident = if let Some((old_value, _)) = variable { - old_value.num_times_used += 1; - old_value.ident.clone() - } else if let Ok((definition_id, _)) = - self.lookup_global(Path::from_single(ident_name.to_string(), call_expr_span)) - { - HirIdent::non_trait_method(definition_id, Location::new(call_expr_span, self.file)) - } else if ident_name.parse::().is_ok() { - self.push_err(ResolverError::NumericConstantInFormatString { - name: ident_name.to_owned(), - span: call_expr_span, - }); - continue; - } else { - self.push_err(ResolverError::VariableNotDeclared { - name: ident_name.to_owned(), - span: call_expr_span, - }); - continue; - }; + for fragment in &fragments { + if let FmtStrFragment::Interpolation(ident_name, string_span) = fragment { + let scope_tree = self.scopes.current_scope_tree(); + let variable = scope_tree.find(ident_name); + + let hir_ident = if let Some((old_value, _)) = variable { + old_value.num_times_used += 1; + old_value.ident.clone() + } else if let Ok((definition_id, _)) = + self.lookup_global(Path::from_single(ident_name.to_string(), *string_span)) + { + HirIdent::non_trait_method( + definition_id, + Location::new(*string_span, self.file), + ) + } else { + self.push_err(ResolverError::VariableNotDeclared { + name: ident_name.to_owned(), + span: *string_span, + }); + continue; + }; - let hir_expr = HirExpression::Ident(hir_ident.clone(), None); - let expr_id = self.interner.push_expr(hir_expr); - self.interner.push_expr_location(expr_id, call_expr_span, self.file); - let typ = self.type_check_variable(hir_ident, expr_id, None); - self.interner.push_expr_type(expr_id, typ.clone()); - capture_types.push(typ); - fmt_str_idents.push(expr_id); + let hir_expr = HirExpression::Ident(hir_ident.clone(), None); + let expr_id = self.interner.push_expr(hir_expr); + self.interner.push_expr_location(expr_id, *string_span, self.file); + let typ = self.type_check_variable(hir_ident, expr_id, None); + self.interner.push_expr_type(expr_id, typ.clone()); + capture_types.push(typ); + fmt_str_idents.push(expr_id); + } } - let len = Type::Constant(str.len().into(), Kind::u32()); + let len = Type::Constant(length.into(), Kind::u32()); let typ = Type::FmtString(Box::new(len), Box::new(Type::Tuple(capture_types))); - (HirExpression::Literal(HirLiteral::FmtStr(str, fmt_str_idents)), typ) + (HirExpression::Literal(HirLiteral::FmtStr(fragments, fmt_str_idents, length)), typ) } fn elaborate_prefix(&mut self, prefix: PrefixExpression, span: Span) -> (ExprId, Type) { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs index 560d11cfa2e..29d1448f07e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs @@ -661,7 +661,7 @@ fn remove_interned_in_literal(interner: &NodeInterner, literal: Literal) -> Lite | Literal::Integer(_, _) | Literal::Str(_) | Literal::RawStr(_, _) - | Literal::FmtStr(_) + | Literal::FmtStr(_, _) | Literal::Unit => literal, } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs index 446c4dae2d3..3df20b39209 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs @@ -240,6 +240,9 @@ pub enum InterpreterError { err: Box, location: Location, }, + CannotInterpretFormatStringWithErrors { + location: Location, + }, // These cases are not errors, they are just used to prevent us from running more code // until the loop can be resumed properly. These cases will never be displayed to users. @@ -315,7 +318,8 @@ impl InterpreterError { | InterpreterError::TypeAnnotationsNeededForMethodCall { location } | InterpreterError::CannotResolveExpression { location, .. } | InterpreterError::CannotSetFunctionBody { location, .. } - | InterpreterError::UnknownArrayLength { location, .. } => *location, + | InterpreterError::UnknownArrayLength { location, .. } + | InterpreterError::CannotInterpretFormatStringWithErrors { location } => *location, InterpreterError::FailedToParseMacro { error, file, .. } => { Location::new(error.span(), *file) @@ -664,6 +668,12 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { let secondary = format!("Evaluating the length failed with: `{err}`"); CustomDiagnostic::simple_error(msg, secondary, location.span) } + InterpreterError::CannotInterpretFormatStringWithErrors { location } => { + let msg = "Cannot interpret format string with errors".to_string(); + let secondary = + "Some of the variables to interpolate could not be evaluated".to_string(); + CustomDiagnostic::simple_error(msg, secondary, location.span) + } } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs index 5540a199cec..9338c0fc37f 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs @@ -121,9 +121,9 @@ impl HirExpression { HirExpression::Literal(HirLiteral::Str(string)) => { ExpressionKind::Literal(Literal::Str(string.clone())) } - HirExpression::Literal(HirLiteral::FmtStr(string, _exprs)) => { + HirExpression::Literal(HirLiteral::FmtStr(fragments, _exprs, length)) => { // TODO: Is throwing away the exprs here valid? - ExpressionKind::Literal(Literal::FmtStr(string.clone())) + ExpressionKind::Literal(Literal::FmtStr(fragments.clone(), *length)) } HirExpression::Literal(HirLiteral::Unit) => ExpressionKind::Literal(Literal::Unit), HirExpression::Block(expr) => ExpressionKind::Block(expr.to_display_ast(interner)), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index 49fd86b73bb..dfa55a9d79b 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -20,7 +20,7 @@ use crate::monomorphization::{ perform_impl_bindings, perform_instantiation_bindings, resolve_trait_method, undo_instantiation_bindings, }; -use crate::token::Tokens; +use crate::token::{FmtStrFragment, Tokens}; use crate::TypeVariable; use crate::{ hir_def::{ @@ -623,8 +623,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { self.evaluate_integer(value, is_negative, id) } HirLiteral::Str(string) => Ok(Value::String(Rc::new(string))), - HirLiteral::FmtStr(string, captures) => { - self.evaluate_format_string(string, captures, id) + HirLiteral::FmtStr(fragments, captures, _length) => { + self.evaluate_format_string(fragments, captures, id) } HirLiteral::Array(array) => self.evaluate_array(array, id), HirLiteral::Slice(array) => self.evaluate_slice(array, id), @@ -633,7 +633,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { fn evaluate_format_string( &mut self, - string: String, + fragments: Vec, captures: Vec, id: ExprId, ) -> IResult { @@ -644,13 +644,12 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { let mut values: VecDeque<_> = captures.into_iter().map(|capture| self.evaluate(capture)).collect::>()?; - for character in string.chars() { - match character { - '\\' => escaped = true, - '{' if !escaped => consuming = true, - '}' if !escaped && consuming => { - consuming = false; - + for fragment in fragments { + match fragment { + FmtStrFragment::String(string) => { + result.push_str(&string); + } + FmtStrFragment::Interpolation(_, span) => { if let Some(value) = values.pop_front() { // When interpolating a quoted value inside a format string, we don't include the // surrounding `quote {` ... `}` as if we are unquoting the quoted value inside the string. @@ -665,13 +664,15 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { } else { result.push_str(&value.display(self.elaborator.interner).to_string()); } + } else { + // If we can't find a value for this fragment it means the interpolated value was not + // found or it errored. In this case we error here as well. + let location = self.elaborator.interner.expr_location(&id); + return Err(InterpreterError::CannotInterpretFormatStringWithErrors { + location, + }); } } - other if !consuming => { - escaped = false; - result.push(other); - } - _ => (), } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs index 5c8e0a1b53e..774836f8992 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -77,8 +77,6 @@ pub enum ResolverError { MutableReferenceToImmutableVariable { variable: String, span: Span }, #[error("Mutable references to array indices are unsupported")] MutableReferenceToArrayElement { span: Span }, - #[error("Numeric constants should be printed without formatting braces")] - NumericConstantInFormatString { name: String, span: Span }, #[error("Closure environment must be a tuple or unit type")] InvalidClosureEnvironment { typ: Type, span: Span }, #[error("Nested slices, i.e. slices within an array or slice, are not supported")] @@ -378,11 +376,6 @@ impl<'a> From<&'a ResolverError> for Diagnostic { ResolverError::MutableReferenceToArrayElement { span } => { Diagnostic::simple_error("Mutable references to array elements are currently unsupported".into(), "Try storing the element in a fresh variable first".into(), *span) }, - ResolverError::NumericConstantInFormatString { name, span } => Diagnostic::simple_error( - format!("cannot find `{name}` in this scope "), - "Numeric constants should be printed without formatting braces".to_string(), - *span, - ), ResolverError::InvalidClosureEnvironment { span, typ } => Diagnostic::simple_error( format!("{typ} is not a valid closure environment type"), "Closure environment must be a tuple or unit type".to_string(), *span), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs index 5d3fe632a74..e243fc88cff 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs @@ -7,7 +7,7 @@ use crate::hir::type_check::generics::TraitGenerics; use crate::node_interner::{ DefinitionId, DefinitionKind, ExprId, FuncId, NodeInterner, StmtId, TraitMethodId, }; -use crate::token::Tokens; +use crate::token::{FmtStrFragment, Tokens}; use crate::Shared; use super::stmt::HirPattern; @@ -114,7 +114,7 @@ pub enum HirLiteral { Bool(bool), Integer(FieldElement, bool), //true for negative integer and false for positive Str(String), - FmtStr(String, Vec), + FmtStr(Vec, Vec, u32 /* length */), Unit, } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs index 8d799ef35d1..f95ccba061a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs @@ -30,6 +30,10 @@ pub enum LexerErrorKind { UnterminatedBlockComment { span: Span }, #[error("Unterminated string literal")] UnterminatedStringLiteral { span: Span }, + #[error("Invalid format string: expected '}}', found {found:?}")] + InvalidFormatString { found: char, span: Span }, + #[error("Invalid format string: expected letter or underscore, found '}}'")] + EmptyFormatStringInterpolation { span: Span }, #[error( "'\\{escaped}' is not a valid escape sequence. Use '\\' for a literal backslash character." )] @@ -68,6 +72,8 @@ impl LexerErrorKind { LexerErrorKind::LogicalAnd { span } => *span, LexerErrorKind::UnterminatedBlockComment { span } => *span, LexerErrorKind::UnterminatedStringLiteral { span } => *span, + LexerErrorKind::InvalidFormatString { span, .. } => *span, + LexerErrorKind::EmptyFormatStringInterpolation { span, .. } => *span, LexerErrorKind::InvalidEscape { span, .. } => *span, LexerErrorKind::InvalidQuoteDelimiter { delimiter } => delimiter.to_span(), LexerErrorKind::NonAsciiComment { span, .. } => *span, @@ -130,6 +136,32 @@ impl LexerErrorKind { LexerErrorKind::UnterminatedBlockComment { span } => ("Unterminated block comment".to_string(), "Unterminated block comment".to_string(), *span), LexerErrorKind::UnterminatedStringLiteral { span } => ("Unterminated string literal".to_string(), "Unterminated string literal".to_string(), *span), + LexerErrorKind::InvalidFormatString { found, span } => { + if found == &'}' { + ( + "Invalid format string: unmatched '}}' found".to_string(), + "If you intended to print '}', you can escape it using '}}'".to_string(), + *span, + ) + } else { + ( + format!("Invalid format string: expected '}}', found {found:?}"), + if found == &'.' { + "Field access isn't supported in format strings".to_string() + } else { + "If you intended to print '{', you can escape it using '{{'".to_string() + }, + *span, + ) + } + } + LexerErrorKind::EmptyFormatStringInterpolation { span } => { + ( + "Invalid format string: expected letter or underscore, found '}}'".to_string(), + "If you intended to print '{' or '}', you can escape them using '{{' and '}}' respectively".to_string(), + *span, + ) + } LexerErrorKind::InvalidEscape { escaped, span } => (format!("'\\{escaped}' is not a valid escape sequence. Use '\\' for a literal backslash character."), "Invalid escape sequence".to_string(), *span), LexerErrorKind::InvalidQuoteDelimiter { delimiter } => { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs index 68dc142ff10..a5c4b2cd772 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs @@ -2,7 +2,7 @@ use crate::token::DocStyle; use super::{ errors::LexerErrorKind, - token::{IntType, Keyword, SpannedToken, Token, Tokens}, + token::{FmtStrFragment, IntType, Keyword, SpannedToken, Token, Tokens}, }; use acvm::{AcirField, FieldElement}; use noirc_errors::{Position, Span}; @@ -411,51 +411,190 @@ impl<'a> Lexer<'a> { let start = self.position; let mut string = String::new(); - while let Some(next) = self.next_char() { - let char = match next { - '"' => break, - '\\' => match self.next_char() { - Some('r') => '\r', - Some('n') => '\n', - Some('t') => '\t', - Some('0') => '\0', - Some('"') => '"', - Some('\\') => '\\', - Some(escaped) => { - let span = Span::inclusive(start, self.position); - return Err(LexerErrorKind::InvalidEscape { escaped, span }); - } - None => { - let span = Span::inclusive(start, self.position); - return Err(LexerErrorKind::UnterminatedStringLiteral { span }); - } - }, - other => other, - }; + loop { + if let Some(next) = self.next_char() { + let char = match next { + '"' => break, + '\\' => match self.next_char() { + Some('r') => '\r', + Some('n') => '\n', + Some('t') => '\t', + Some('0') => '\0', + Some('"') => '"', + Some('\\') => '\\', + Some(escaped) => { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::InvalidEscape { escaped, span }); + } + None => { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::UnterminatedStringLiteral { span }); + } + }, + other => other, + }; - string.push(char); + string.push(char); + } else { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::UnterminatedStringLiteral { span }); + } } let str_literal_token = Token::Str(string); - let end = self.position; Ok(str_literal_token.into_span(start, end)) } - // This differs from `eat_string_literal` in that we want the leading `f` to be captured in the Span fn eat_fmt_string(&mut self) -> SpannedTokenResult { let start = self.position; - self.next_char(); - let str_literal = self.eat_while(None, |ch| ch != '"'); + let mut fragments = Vec::new(); + let mut length = 0; + + loop { + // String fragment until '{' or '"' + let mut string = String::new(); + let mut found_curly = false; + + loop { + if let Some(next) = self.next_char() { + let char = match next { + '"' => break, + '\\' => match self.next_char() { + Some('r') => '\r', + Some('n') => '\n', + Some('t') => '\t', + Some('0') => '\0', + Some('"') => '"', + Some('\\') => '\\', + Some(escaped) => { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::InvalidEscape { escaped, span }); + } + None => { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::UnterminatedStringLiteral { span }); + } + }, + '{' if self.peek_char_is('{') => { + self.next_char(); + '{' + } + '}' if self.peek_char_is('}') => { + self.next_char(); + '}' + } + '}' => { + let error_position = self.position; + + // Keep consuming chars until we find the closing double quote + self.skip_until_string_end(); + + let span = Span::inclusive(error_position, error_position); + return Err(LexerErrorKind::InvalidFormatString { found: '}', span }); + } + '{' => { + found_curly = true; + break; + } + other => other, + }; + + string.push(char); + length += 1; + + if char == '{' || char == '}' { + // This might look a bit strange, but if there's `{{` or `}}` in the format string + // then it will be `{` and `}` in the string fragment respectively, but on the codegen + // phase it will be translated back to `{{` and `}}` to avoid executing an interpolation, + // thus the actual length of the codegen'd string will be one more than what we get here. + // + // We could just make the fragment include the double curly braces, but then the interpreter + // would need to undo the curly braces, so it's simpler to add them during codegen. + length += 1; + } + } else { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::UnterminatedStringLiteral { span }); + } + } + + if !string.is_empty() { + fragments.push(FmtStrFragment::String(string)); + } + + if !found_curly { + break; + } + + length += 1; // for the curly brace + + // Interpolation fragment until '}' or '"' + let mut string = String::new(); + let interpolation_start = self.position + 1; // + 1 because we are at '{' + let mut first_char = true; + while let Some(next) = self.next_char() { + let char = match next { + '}' => { + if string.is_empty() { + let error_position = self.position; + + // Keep consuming chars until we find the closing double quote + self.skip_until_string_end(); + + let span = Span::inclusive(error_position, error_position); + return Err(LexerErrorKind::EmptyFormatStringInterpolation { span }); + } + + break; + } + other => { + let is_valid_char = if first_char { + other.is_ascii_alphabetic() || other == '_' + } else { + other.is_ascii_alphanumeric() || other == '_' + }; + if !is_valid_char { + let error_position = self.position; + + // Keep consuming chars until we find the closing double quote + // (unless we bumped into a double quote now, in which case we are done) + if other != '"' { + self.skip_until_string_end(); + } - let str_literal_token = Token::FmtStr(str_literal); + let span = Span::inclusive(error_position, error_position); + return Err(LexerErrorKind::InvalidFormatString { found: other, span }); + } + first_char = false; + other + } + }; + length += 1; + string.push(char); + } + + length += 1; // for the closing curly brace - self.next_char(); // Advance past the closing quote + let interpolation_span = Span::from(interpolation_start..self.position); + fragments.push(FmtStrFragment::Interpolation(string, interpolation_span)); + } + let token = Token::FmtStr(fragments, length); let end = self.position; - Ok(str_literal_token.into_span(start, end)) + Ok(token.into_span(start, end)) + } + + fn skip_until_string_end(&mut self) { + while let Some(next) = self.next_char() { + if next == '\'' && self.peek_char_is('"') { + self.next_char(); + } else if next == '"' { + break; + } + } } fn eat_format_string_or_alpha_numeric(&mut self) -> SpannedTokenResult { @@ -962,6 +1101,155 @@ mod tests { } } + #[test] + fn test_eat_string_literal_with_escapes() { + let input = "let _word = \"hello\\n\\t\""; + + let expected = vec![ + Token::Keyword(Keyword::Let), + Token::Ident("_word".to_string()), + Token::Assign, + Token::Str("hello\n\t".to_string()), + ]; + let mut lexer = Lexer::new(input); + + for token in expected.into_iter() { + let got = lexer.next_token().unwrap(); + assert_eq!(got, token); + } + } + + #[test] + fn test_eat_string_literal_missing_double_quote() { + let input = "\"hello"; + let mut lexer = Lexer::new(input); + assert!(matches!( + lexer.next_token(), + Err(LexerErrorKind::UnterminatedStringLiteral { .. }) + )); + } + + #[test] + fn test_eat_fmt_string_literal_without_interpolations() { + let input = "let _word = f\"hello\""; + + let expected = vec![ + Token::Keyword(Keyword::Let), + Token::Ident("_word".to_string()), + Token::Assign, + Token::FmtStr(vec![FmtStrFragment::String("hello".to_string())], 5), + ]; + let mut lexer = Lexer::new(input); + + for token in expected.into_iter() { + let got = lexer.next_token().unwrap(); + assert_eq!(got, token); + } + } + + #[test] + fn test_eat_fmt_string_literal_with_escapes_without_interpolations() { + let input = "let _word = f\"hello\\n\\t{{x}}\""; + + let expected = vec![ + Token::Keyword(Keyword::Let), + Token::Ident("_word".to_string()), + Token::Assign, + Token::FmtStr(vec![FmtStrFragment::String("hello\n\t{x}".to_string())], 12), + ]; + let mut lexer = Lexer::new(input); + + for token in expected.into_iter() { + let got = lexer.next_token().unwrap(); + assert_eq!(got, token); + } + } + + #[test] + fn test_eat_fmt_string_literal_with_interpolations() { + let input = "let _word = f\"hello {world} and {_another} {vAr_123}\""; + + let expected = vec![ + Token::Keyword(Keyword::Let), + Token::Ident("_word".to_string()), + Token::Assign, + Token::FmtStr( + vec![ + FmtStrFragment::String("hello ".to_string()), + FmtStrFragment::Interpolation("world".to_string(), Span::from(21..26)), + FmtStrFragment::String(" and ".to_string()), + FmtStrFragment::Interpolation("_another".to_string(), Span::from(33..41)), + FmtStrFragment::String(" ".to_string()), + FmtStrFragment::Interpolation("vAr_123".to_string(), Span::from(44..51)), + ], + 38, + ), + ]; + let mut lexer = Lexer::new(input); + + for token in expected.into_iter() { + let got = lexer.next_token().unwrap().into_token(); + assert_eq!(got, token); + } + } + + #[test] + fn test_eat_fmt_string_literal_missing_double_quote() { + let input = "f\"hello"; + let mut lexer = Lexer::new(input); + assert!(matches!( + lexer.next_token(), + Err(LexerErrorKind::UnterminatedStringLiteral { .. }) + )); + } + + #[test] + fn test_eat_fmt_string_literal_invalid_char_in_interpolation() { + let input = "f\"hello {foo.bar}\" true"; + let mut lexer = Lexer::new(input); + assert!(matches!(lexer.next_token(), Err(LexerErrorKind::InvalidFormatString { .. }))); + + // Make sure the lexer went past the ending double quote for better recovery + let token = lexer.next_token().unwrap().into_token(); + assert!(matches!(token, Token::Bool(true))); + } + + #[test] + fn test_eat_fmt_string_literal_double_quote_inside_interpolation() { + let input = "f\"hello {world\" true"; + let mut lexer = Lexer::new(input); + assert!(matches!(lexer.next_token(), Err(LexerErrorKind::InvalidFormatString { .. }))); + + // Make sure the lexer stopped parsing the string literal when it found \" inside the interpolation + let token = lexer.next_token().unwrap().into_token(); + assert!(matches!(token, Token::Bool(true))); + } + + #[test] + fn test_eat_fmt_string_literal_unmatched_closing_curly() { + let input = "f\"hello }\" true"; + let mut lexer = Lexer::new(input); + assert!(matches!(lexer.next_token(), Err(LexerErrorKind::InvalidFormatString { .. }))); + + // Make sure the lexer went past the ending double quote for better recovery + let token = lexer.next_token().unwrap().into_token(); + assert!(matches!(token, Token::Bool(true))); + } + + #[test] + fn test_eat_fmt_string_literal_empty_interpolation() { + let input = "f\"{}\" true"; + let mut lexer = Lexer::new(input); + assert!(matches!( + lexer.next_token(), + Err(LexerErrorKind::EmptyFormatStringInterpolation { .. }) + )); + + // Make sure the lexer went past the ending double quote for better recovery + let token = lexer.next_token().unwrap().into_token(); + assert!(matches!(token, Token::Bool(true))); + } + #[test] fn test_eat_integer_literals() { let test_cases: Vec<(&str, Token)> = vec![ @@ -1151,7 +1439,7 @@ mod tests { format!("let s = r#####\"{s}\"#####;"), ], ), - (Some(Token::FmtStr("".to_string())), vec![format!("assert(x == y, f\"{s}\");")]), + (Some(Token::FmtStr(vec![], 0)), vec![format!("assert(x == y, f\"{s}\");")]), // expected token not found // (Some(Token::LineComment("".to_string(), None)), vec![ (None, vec![format!("//{s}"), format!("// {s}")]), @@ -1196,11 +1484,16 @@ mod tests { Err(LexerErrorKind::InvalidIntegerLiteral { .. }) | Err(LexerErrorKind::UnexpectedCharacter { .. }) | Err(LexerErrorKind::NonAsciiComment { .. }) - | Err(LexerErrorKind::UnterminatedBlockComment { .. }) => { + | Err(LexerErrorKind::UnterminatedBlockComment { .. }) + | Err(LexerErrorKind::UnterminatedStringLiteral { .. }) + | Err(LexerErrorKind::InvalidFormatString { .. }) => { expected_token_found = true; } Err(err) => { - panic!("Unexpected lexer error found: {:?}", err) + panic!( + "Unexpected lexer error found {:?} for input string {:?}", + err, blns_program_str + ) } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs index 836161c7c9f..f35515045db 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs @@ -25,7 +25,7 @@ pub enum BorrowedToken<'input> { Str(&'input str), /// the u8 is the number of hashes, i.e. r###.. RawStr(&'input str, u8), - FmtStr(&'input str), + FmtStr(&'input [FmtStrFragment], u32 /* length */), Keyword(Keyword), IntType(IntType), AttributeStart { @@ -136,7 +136,7 @@ pub enum Token { Str(String), /// the u8 is the number of hashes, i.e. r###.. RawStr(String, u8), - FmtStr(String), + FmtStr(Vec, u32 /* length */), Keyword(Keyword), IntType(IntType), AttributeStart { @@ -255,7 +255,7 @@ pub fn token_to_borrowed_token(token: &Token) -> BorrowedToken<'_> { Token::Int(n) => BorrowedToken::Int(*n), Token::Bool(b) => BorrowedToken::Bool(*b), Token::Str(ref b) => BorrowedToken::Str(b), - Token::FmtStr(ref b) => BorrowedToken::FmtStr(b), + Token::FmtStr(ref b, length) => BorrowedToken::FmtStr(b, *length), Token::RawStr(ref b, hashes) => BorrowedToken::RawStr(b, *hashes), Token::Keyword(k) => BorrowedToken::Keyword(*k), Token::AttributeStart { is_inner, is_tag } => { @@ -312,6 +312,35 @@ pub fn token_to_borrowed_token(token: &Token) -> BorrowedToken<'_> { } } +#[derive(Clone, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)] +pub enum FmtStrFragment { + String(String), + Interpolation(String, Span), +} + +impl Display for FmtStrFragment { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + FmtStrFragment::String(string) => { + // Undo the escapes when displaying the fmt string + let string = string + .replace('{', "{{") + .replace('}', "}}") + .replace('\r', "\\r") + .replace('\n', "\\n") + .replace('\t', "\\t") + .replace('\0', "\\0") + .replace('\'', "\\'") + .replace('\"', "\\\""); + write!(f, "{}", string) + } + FmtStrFragment::Interpolation(string, _span) => { + write!(f, "{{{}}}", string) + } + } + } +} + #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)] pub enum DocStyle { Outer, @@ -375,7 +404,7 @@ impl fmt::Display for Token { Token::Int(n) => write!(f, "{}", n), Token::Bool(b) => write!(f, "{b}"), Token::Str(ref b) => write!(f, "{b:?}"), - Token::FmtStr(ref b) => write!(f, "f{b:?}"), + Token::FmtStr(ref b, _length) => write!(f, "f{b:?}"), Token::RawStr(ref b, hashes) => { let h: String = std::iter::once('#').cycle().take(hashes as usize).collect(); write!(f, "r{h}{b:?}{h}") @@ -515,7 +544,7 @@ impl Token { | Token::Bool(_) | Token::Str(_) | Token::RawStr(..) - | Token::FmtStr(_) => TokenKind::Literal, + | Token::FmtStr(_, _) => TokenKind::Literal, Token::Keyword(_) => TokenKind::Keyword, Token::UnquoteMarker(_) => TokenKind::UnquoteMarker, Token::Quote(_) => TokenKind::Quote, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs index 5d9b66f4f96..c9ae3438e42 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs @@ -7,11 +7,11 @@ use noirc_errors::{ Location, }; -use crate::hir_def::function::FunctionSignature; use crate::{ ast::{BinaryOpKind, IntegerBitSize, Signedness, Visibility}, token::{Attributes, FunctionAttribute}, }; +use crate::{hir_def::function::FunctionSignature, token::FmtStrFragment}; use serde::{Deserialize, Serialize}; use super::HirType; @@ -112,7 +112,7 @@ pub enum Literal { Bool(bool), Unit, Str(String), - FmtStr(String, u64, Box), + FmtStr(Vec, u64, Box), } #[derive(Debug, Clone, Hash)] diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs index 050f844146a..b31a5744d09 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -12,6 +12,7 @@ use crate::ast::{FunctionKind, IntegerBitSize, Signedness, UnaryOp, Visibility}; use crate::hir::comptime::InterpreterError; use crate::hir::type_check::{NoMatchingImplFoundError, TypeCheckError}; use crate::node_interner::{ExprId, ImplSearchErrorKind}; +use crate::token::FmtStrFragment; use crate::{ debug::DebugInstrumenter, hir_def::{ @@ -417,10 +418,10 @@ impl<'interner> Monomorphizer<'interner> { let expr = match self.interner.expression(&expr) { HirExpression::Ident(ident, generics) => self.ident(ident, expr, generics)?, HirExpression::Literal(HirLiteral::Str(contents)) => Literal(Str(contents)), - HirExpression::Literal(HirLiteral::FmtStr(contents, idents)) => { + HirExpression::Literal(HirLiteral::FmtStr(fragments, idents, _length)) => { let fields = try_vecmap(idents, |ident| self.expr(ident))?; Literal(FmtStr( - contents, + fragments, fields.len() as u64, Box::new(ast::Expression::Tuple(fields)), )) @@ -1846,7 +1847,7 @@ impl<'interner> Monomorphizer<'interner> { _ => unreachable!("ICE: format string fields should be structured in a tuple, but got a {zeroed_tuple}"), }; ast::Expression::Literal(ast::Literal::FmtStr( - "\0".repeat(*length as usize), + vec![FmtStrFragment::String("\0".repeat(*length as usize))], fields_len, Box::new(zeroed_tuple), )) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/printer.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/printer.rs index b6421b26a03..9c1072a4117 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/printer.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/printer.rs @@ -105,9 +105,11 @@ impl AstPrinter { super::ast::Literal::Integer(x, _, _, _) => x.fmt(f), super::ast::Literal::Bool(x) => x.fmt(f), super::ast::Literal::Str(s) => s.fmt(f), - super::ast::Literal::FmtStr(s, _, _) => { + super::ast::Literal::FmtStr(fragments, _, _) => { write!(f, "f\"")?; - s.fmt(f)?; + for fragment in fragments { + fragment.fmt(f)?; + } write!(f, "\"") } super::ast::Literal::Unit => { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs index c2f7b781873..fcc58c5d833 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs @@ -5,7 +5,7 @@ use noirc_errors::Span; use crate::{ ast::{Ident, ItemVisibility}, lexer::{Lexer, SpannedTokenResult}, - token::{IntType, Keyword, SpannedToken, Token, TokenKind, Tokens}, + token::{FmtStrFragment, IntType, Keyword, SpannedToken, Token, TokenKind, Tokens}, }; use super::{labels::ParsingRuleLabel, ParsedModule, ParserError, ParserErrorReason}; @@ -294,11 +294,11 @@ impl<'a> Parser<'a> { } } - fn eat_fmt_str(&mut self) -> Option { + fn eat_fmt_str(&mut self) -> Option<(Vec, u32)> { if matches!(self.token.token(), Token::FmtStr(..)) { let token = self.bump(); match token.into_token() { - Token::FmtStr(string) => Some(string), + Token::FmtStr(fragments, length) => Some((fragments, length)), _ => unreachable!(), } } else { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs index e1ecc972eeb..526a0c3dd6e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs @@ -577,7 +577,7 @@ impl<'a> Parser<'a> { /// BlockExpression = Block fn parse_literal(&mut self) -> Option { if let Some(bool) = self.eat_bool() { - return Some(ExpressionKind::Literal(Literal::Bool(bool))); + return Some(ExpressionKind::boolean(bool)); } if let Some(int) = self.eat_int() { @@ -585,15 +585,15 @@ impl<'a> Parser<'a> { } if let Some(string) = self.eat_str() { - return Some(ExpressionKind::Literal(Literal::Str(string))); + return Some(ExpressionKind::string(string)); } if let Some((string, n)) = self.eat_raw_str() { - return Some(ExpressionKind::Literal(Literal::RawStr(string, n))); + return Some(ExpressionKind::raw_string(string, n)); } - if let Some(string) = self.eat_fmt_str() { - return Some(ExpressionKind::Literal(Literal::FmtStr(string))); + if let Some((fragments, length)) = self.eat_fmt_str() { + return Some(ExpressionKind::format_string(fragments, length)); } if let Some(tokens) = self.eat_quote() { @@ -865,10 +865,11 @@ mod tests { fn parses_fmt_str() { let src = "f\"hello\""; let expr = parse_expression_no_errors(src); - let ExpressionKind::Literal(Literal::FmtStr(string)) = expr.kind else { + let ExpressionKind::Literal(Literal::FmtStr(fragments, length)) = expr.kind else { panic!("Expected format string literal"); }; - assert_eq!(string, "hello"); + assert_eq!(fragments[0].to_string(), "hello"); + assert_eq!(length, 5); } #[test] diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs index cba29d58ea3..8ddf1b571e6 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs @@ -1209,8 +1209,6 @@ fn resolve_fmt_strings() { let string = f"this is i: {i}"; println(string); - println(f"I want to print {0}"); - let new_val = 10; println(f"random_string{new_val}{new_val}"); } @@ -1220,7 +1218,7 @@ fn resolve_fmt_strings() { "#; let errors = get_program_errors(src); - assert!(errors.len() == 5, "Expected 5 errors, got: {:?}", errors); + assert!(errors.len() == 3, "Expected 5 errors, got: {:?}", errors); for (err, _file_id) in errors { match &err { @@ -1229,21 +1227,13 @@ fn resolve_fmt_strings() { }) => { assert_eq!(name, "i"); } - CompilationError::ResolverError(ResolverError::NumericConstantInFormatString { - name, - .. - }) => { - assert_eq!(name, "0"); - } CompilationError::TypeError(TypeCheckError::UnusedResultError { expr_type: _, expr_span, }) => { let a = src.get(expr_span.start() as usize..expr_span.end() as usize).unwrap(); assert!( - a == "println(string)" - || a == "println(f\"I want to print {0}\")" - || a == "println(f\"random_string{new_val}{new_val}\")" + a == "println(string)" || a == "println(f\"random_string{new_val}{new_val}\")" ); } _ => unimplemented!(), diff --git a/noir/noir-repo/compiler/noirc_printable_type/Cargo.toml b/noir/noir-repo/compiler/noirc_printable_type/Cargo.toml index 8bb56703e8a..8d0574aad64 100644 --- a/noir/noir-repo/compiler/noirc_printable_type/Cargo.toml +++ b/noir/noir-repo/compiler/noirc_printable_type/Cargo.toml @@ -14,7 +14,6 @@ workspace = true [dependencies] acvm.workspace = true iter-extended.workspace = true -regex = "1.9.1" serde.workspace = true serde_json.workspace = true thiserror.workspace = true diff --git a/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs b/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs index 838a2472125..d46b37c4ea2 100644 --- a/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs @@ -2,7 +2,7 @@ use std::{collections::BTreeMap, str}; use acvm::{acir::AcirField, brillig_vm::brillig::ForeignCallParam}; use iter_extended::vecmap; -use regex::{Captures, Regex}; + use serde::{Deserialize, Serialize}; use thiserror::Error; @@ -253,24 +253,6 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Op Some(output) } -// Taken from Regex docs directly -fn replace_all( - re: &Regex, - haystack: &str, - mut replacement: impl FnMut(&Captures) -> Result, -) -> Result { - let mut new = String::with_capacity(haystack.len()); - let mut last_match = 0; - for caps in re.captures_iter(haystack) { - let m = caps.get(0).unwrap(); - new.push_str(&haystack[last_match..m.start()]); - new.push_str(&replacement(&caps)?); - last_match = m.end(); - } - new.push_str(&haystack[last_match..]); - Ok(new) -} - impl std::fmt::Display for PrintableValueDisplay { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { @@ -279,18 +261,56 @@ impl std::fmt::Display for PrintableValueDisplay { write!(fmt, "{output_string}") } Self::FmtString(template, values) => { - let mut display_iter = values.iter(); - let re = Regex::new(r"\{([a-zA-Z0-9_]+)\}").map_err(|_| std::fmt::Error)?; + let mut values_iter = values.iter(); + write_template_replacing_interpolations(template, fmt, || { + values_iter.next().and_then(|(value, typ)| to_string(value, typ)) + }) + } + } + } +} + +fn write_template_replacing_interpolations( + template: &str, + fmt: &mut std::fmt::Formatter<'_>, + mut replacement: impl FnMut() -> Option, +) -> std::fmt::Result { + let mut last_index = 0; // How far we've written from the template + let mut char_indices = template.char_indices().peekable(); + while let Some((char_index, char)) = char_indices.next() { + // Keep going forward until we find a '{' + if char != '{' { + continue; + } - let formatted_str = replace_all(&re, template, |_: &Captures| { - let (value, typ) = display_iter.next().ok_or(std::fmt::Error)?; - to_string(value, typ).ok_or(std::fmt::Error) - })?; + // We'll either have to write an interpolation or '{{' if it's an escape, + // so let's write what we've seen so far in the template. + write!(fmt, "{}", &template[last_index..char_index])?; - write!(fmt, "{formatted_str}") + // If it's '{{', write '{' and keep going + if char_indices.peek().map(|(_, char)| char) == Some(&'{') { + write!(fmt, "{{")?; + (last_index, _) = char_indices.next().unwrap(); + continue; + } + + // Write the interpolation + if let Some(string) = replacement() { + write!(fmt, "{}", string)?; + } else { + return Err(std::fmt::Error); + } + + // Whatever was inside '{...}' doesn't matter, so skip until we find '}' + while let Some((_, char)) = char_indices.next() { + if char == '}' { + last_index = char_indices.peek().map(|(index, _)| *index).unwrap_or(template.len()); + break; } } } + + write!(fmt, "{}", &template[last_index..]) } /// This trims any leading zeroes. @@ -390,3 +410,40 @@ pub fn decode_string_value(field_elements: &[F]) -> String { let final_string = str::from_utf8(&string_as_slice).unwrap(); final_string.to_owned() } + +#[cfg(test)] +mod tests { + use acvm::FieldElement; + + use crate::{PrintableType, PrintableValue, PrintableValueDisplay}; + + #[test] + fn printable_value_display_to_string_without_interpolations() { + let template = "hello"; + let display = + PrintableValueDisplay::::FmtString(template.to_string(), vec![]); + assert_eq!(display.to_string(), template); + } + + #[test] + fn printable_value_display_to_string_with_curly_escapes() { + let template = "hello {{world}} {{{{double_escape}}}}"; + let display = + PrintableValueDisplay::::FmtString(template.to_string(), vec![]); + assert_eq!(display.to_string(), template); + } + + #[test] + fn printable_value_display_to_string_with_interpolations() { + let template = "hello {one} {{no}} {two} {{not_again}} {three} world"; + let values = vec![ + (PrintableValue::String("ONE".to_string()), PrintableType::String { length: 3 }), + (PrintableValue::String("TWO".to_string()), PrintableType::String { length: 3 }), + (PrintableValue::String("THREE".to_string()), PrintableType::String { length: 5 }), + ]; + let expected = "hello ONE {{no}} TWO {{not_again}} THREE world"; + let display = + PrintableValueDisplay::::FmtString(template.to_string(), values); + assert_eq!(display.to_string(), expected); + } +} diff --git a/noir/noir-repo/docs/docs/noir/standard_library/meta/op.md b/noir/noir-repo/docs/docs/noir/standard_library/meta/op.md index 55d2d244344..90501e05fa4 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/meta/op.md +++ b/noir/noir-repo/docs/docs/noir/standard_library/meta/op.md @@ -142,7 +142,7 @@ Represents a binary operator. One of `+`, `-`, `*`, `/`, `%`, `==`, `!=`, `<`, ` #### is_shift_left -#include_code is_shift_right noir_stdlib/src/meta/op.nr rust +#include_code is_shift_left noir_stdlib/src/meta/op.nr rust `true` if this operator is `<<` diff --git a/noir/noir-repo/noir_stdlib/src/collections/map.nr b/noir/noir-repo/noir_stdlib/src/collections/map.nr index bcce08faab4..2b0da1b90ec 100644 --- a/noir/noir-repo/noir_stdlib/src/collections/map.nr +++ b/noir/noir-repo/noir_stdlib/src/collections/map.nr @@ -201,7 +201,10 @@ impl HashMap { } } - let msg = f"Amount of valid elements should have been {self._len} times, but got {entries.len()}."; + let self_len = self._len; + let entries_len = entries.len(); + let msg = + f"Amount of valid elements should have been {self_len} times, but got {entries_len}."; assert(entries.len() == self._len, msg); entries @@ -236,8 +239,10 @@ impl HashMap { } } + let self_len = self._len; + let keys_len = keys.len(); let msg = - f"Amount of valid elements should have been {self._len} times, but got {keys.len()}."; + f"Amount of valid elements should have been {self_len} times, but got {keys_len}."; assert(keys.len() == self._len, msg); keys @@ -271,8 +276,10 @@ impl HashMap { } } + let self_len = self._len; + let values_len = values.len(); let msg = - f"Amount of valid elements should have been {self._len} times, but got {values.len()}."; + f"Amount of valid elements should have been {self_len} times, but got {values_len}."; assert(values.len() == self._len, msg); values diff --git a/noir/noir-repo/noir_stdlib/src/collections/umap.nr b/noir/noir-repo/noir_stdlib/src/collections/umap.nr index 3e074551e9d..7aebeb437cf 100644 --- a/noir/noir-repo/noir_stdlib/src/collections/umap.nr +++ b/noir/noir-repo/noir_stdlib/src/collections/umap.nr @@ -138,7 +138,10 @@ impl UHashMap { } } - let msg = f"Amount of valid elements should have been {self._len} times, but got {entries.len()}."; + let self_len = self._len; + let entries_len = entries.len(); + let msg = + f"Amount of valid elements should have been {self_len} times, but got {entries_len}."; assert(entries.len() == self._len, msg); entries @@ -158,8 +161,10 @@ impl UHashMap { } } + let self_len = self._len; + let keys_len = keys.len(); let msg = - f"Amount of valid elements should have been {self._len} times, but got {keys.len()}."; + f"Amount of valid elements should have been {self_len} times, but got {keys_len}."; assert(keys.len() == self._len, msg); keys @@ -179,8 +184,10 @@ impl UHashMap { } } + let self_len = self._len; + let values_len = values.len(); let msg = - f"Amount of valid elements should have been {self._len} times, but got {values.len()}."; + f"Amount of valid elements should have been {self_len} times, but got {values_len}."; assert(values.len() == self._len, msg); values diff --git a/noir/noir-repo/scripts/check-critical-libraries.sh b/noir/noir-repo/scripts/check-critical-libraries.sh new file mode 100755 index 00000000000..b492cf1d4bc --- /dev/null +++ b/noir/noir-repo/scripts/check-critical-libraries.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +set -e + +# Run relative to repo root +cd $(dirname "$0")/../ + +if [[ -z $1 ]]; then + echo "Must specify Noir release to test against" >&2 + echo "usage: ./check-critical-libraries.sh " >&2 + exit 1 +fi +noirup -v $1 + +CRITICAL_LIBRARIES=$(grep -v "^#\|^$" ./CRITICAL_NOIR_LIBRARIES) +readarray -t REPOS_TO_CHECK < <(echo "$CRITICAL_LIBRARIES") + +getLatestReleaseTagForRepo() { + REPO_NAME=$1 + TAG=$(gh release list -R $REPO_NAME --json 'tagName,isLatest' -q '.[] | select(.isLatest == true).tagName') + if [[ -z $TAG ]]; then + echo "$REPO_NAME has no valid release" >&2 + exit 1 + fi + echo $TAG +} + +for REPO in ${REPOS_TO_CHECK[@]}; do + echo $REPO + TMP_DIR=$(mktemp -d) + + TAG=$(getLatestReleaseTagForRepo $REPO) + git clone $REPO -c advice.detachedHead=false --depth 1 --branch $TAG $TMP_DIR + + nargo test --program-dir $TMP_DIR + + rm -rf $TMP_DIR +done diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr index ca337c822d8..8cdd15aaa0e 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr @@ -6,7 +6,7 @@ fn main() { // Can't print these at compile-time here since printing to stdout while // compiling breaks the test runner. - let s1 = f"x is {x}, fake interpolation: \{y}, y is {y}"; + let s1 = f"x is {x}, fake interpolation: {{y}}, y is {y}"; let s2 = std::mem::zeroed::>(); (s1, s2) }; diff --git a/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/src/main.nr b/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/src/main.nr index e69184b9c96..85cf60dc796 100644 --- a/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/src/main.nr @@ -20,4 +20,22 @@ fn main(priv_key: Field, pub_x: pub Field, pub_y: pub Field) { // The results should be double the g1 point because the scalars are 1 and we pass in g1 twice assert(double.x == res.x); + + // Tests for #6549 + let const_scalar1 = std::embedded_curve_ops::EmbeddedCurveScalar { lo: 23, hi: 0 }; + let const_scalar2 = std::embedded_curve_ops::EmbeddedCurveScalar { lo: 0, hi: 23 }; + let const_scalar3 = std::embedded_curve_ops::EmbeddedCurveScalar { lo: 13, hi: 4 }; + let partial_mul = std::embedded_curve_ops::multi_scalar_mul( + [g1, double, pub_point, g1, g1], + [scalar, const_scalar1, scalar, const_scalar2, const_scalar3], + ); + assert(partial_mul.x == 0x2024c4eebfbc8a20018f8c95c7aab77c6f34f10cf785a6f04e97452d8708fda7); + // Check simplification by zero + let zero_point = std::embedded_curve_ops::EmbeddedCurvePoint { x: 0, y: 0, is_infinite: true }; + let const_zero = std::embedded_curve_ops::EmbeddedCurveScalar { lo: 0, hi: 0 }; + let partial_mul = std::embedded_curve_ops::multi_scalar_mul( + [zero_point, double, g1], + [scalar, const_zero, scalar], + ); + assert(partial_mul == g1); } diff --git a/noir/noir-repo/test_programs/execution_success/hashmap/src/main.nr b/noir/noir-repo/test_programs/execution_success/hashmap/src/main.nr index cfd4e4a9136..aab531ea559 100644 --- a/noir/noir-repo/test_programs/execution_success/hashmap/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/hashmap/src/main.nr @@ -104,10 +104,11 @@ fn test_insert_and_methods(input: [Entry; HASHMAP_LEN]) { hashmap.insert(entry.key, entry.value); } - assert(hashmap.len() == HASHMAP_LEN, "hashmap.len() does not match input lenght."); + assert(hashmap.len() == HASHMAP_LEN, "hashmap.len() does not match input length."); for entry in input { - assert(hashmap.contains_key(entry.key), f"Not found inserted key {entry.key}."); + let entry_key = entry.key; + assert(hashmap.contains_key(entry.key), f"Not found inserted key {entry_key}."); } hashmap.clear(); diff --git a/noir/noir-repo/test_programs/execution_success/regression_6451/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_6451/src/main.nr index fbee6956dfa..b13b6c25a7e 100644 --- a/noir/noir-repo/test_programs/execution_success/regression_6451/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/regression_6451/src/main.nr @@ -9,7 +9,7 @@ fn main(x: Field) { value += term2; value.assert_max_bit_size::<1>(); - // Regression test for Aztec Packages issue #6451 + // Regression test for #6447 (Aztec Packages issue #9488) let y = unsafe { empty(x + 1) }; let z = y + x + 1; let z1 = z + y; diff --git a/noir/noir-repo/test_programs/execution_success/uhashmap/src/main.nr b/noir/noir-repo/test_programs/execution_success/uhashmap/src/main.nr index b56a4fe1747..689ba9d4a04 100644 --- a/noir/noir-repo/test_programs/execution_success/uhashmap/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/uhashmap/src/main.nr @@ -104,7 +104,8 @@ unconstrained fn test_insert_and_methods(input: [Entry; HASHMAP_LEN]) { assert(hashmap.len() == HASHMAP_LEN, "hashmap.len() does not match input length."); for entry in input { - assert(hashmap.contains_key(entry.key), f"Not found inserted key {entry.key}."); + let entry_key = entry.key; + assert(hashmap.contains_key(entry.key), f"Not found inserted key {entry_key}."); } hashmap.clear(); diff --git a/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs b/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs index 0730d06ad72..ecc9fab18ce 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs @@ -104,11 +104,12 @@ impl<'a, 'b> ChunkFormatter<'a, 'b> { formatter.write_left_paren(); formatter.write_right_paren(); })), - Literal::Bool(_) | Literal::Str(_) | Literal::FmtStr(_) | Literal::RawStr(..) => group - .text(self.chunk(|formatter| { + Literal::Bool(_) | Literal::Str(_) | Literal::FmtStr(_, _) | Literal::RawStr(..) => { + group.text(self.chunk(|formatter| { formatter.write_current_token_as_in_source(); formatter.bump(); - })), + })); + } Literal::Integer(..) => group.text(self.chunk(|formatter| { if formatter.is_at(Token::Minus) { formatter.write_token(Token::Minus); diff --git a/noir/noir-repo/tooling/nargo_toml/Cargo.toml b/noir/noir-repo/tooling/nargo_toml/Cargo.toml index e4766e44859..2bc24153836 100644 --- a/noir/noir-repo/tooling/nargo_toml/Cargo.toml +++ b/noir/noir-repo/tooling/nargo_toml/Cargo.toml @@ -25,3 +25,4 @@ noirc_driver.workspace = true semver = "1.0.20" [dev-dependencies] +test-case.workspace = true diff --git a/noir/noir-repo/tooling/nargo_toml/src/git.rs b/noir/noir-repo/tooling/nargo_toml/src/git.rs index 80e57247ae6..efaed4fabb9 100644 --- a/noir/noir-repo/tooling/nargo_toml/src/git.rs +++ b/noir/noir-repo/tooling/nargo_toml/src/git.rs @@ -3,16 +3,20 @@ use std::path::PathBuf; /// Creates a unique folder name for a GitHub repo /// by using its URL and tag fn resolve_folder_name(base: &url::Url, tag: &str) -> String { - let mut folder_name = base.domain().unwrap().to_owned(); - folder_name.push_str(base.path()); - folder_name.push_str(tag); - folder_name + let mut folder = PathBuf::from(""); + for part in [base.domain().unwrap(), base.path(), tag] { + folder.push(part.trim_start_matches('/')); + } + folder.to_string_lossy().into_owned() } +/// Path to the `nargo` directory under `$HOME`. fn nargo_crates() -> PathBuf { dirs::home_dir().unwrap().join("nargo") } +/// Target directory to download dependencies into, e.g. +/// `$HOME/nargo/github.com/noir-lang/noir-bignum/v0.1.2` fn git_dep_location(base: &url::Url, tag: &str) -> PathBuf { let folder_name = resolve_folder_name(base, tag); @@ -53,3 +57,19 @@ pub(crate) fn clone_git_repo(url: &str, tag: &str) -> Result { Ok(loc) } + +#[cfg(test)] +mod tests { + use test_case::test_case; + use url::Url; + + use super::resolve_folder_name; + + #[test_case("https://github.com/noir-lang/noir-bignum/"; "with slash")] + #[test_case("https://github.com/noir-lang/noir-bignum"; "without slash")] + fn test_resolve_folder_name(url: &str) { + let tag = "v0.4.2"; + let dir = resolve_folder_name(&Url::parse(url).unwrap(), tag); + assert_eq!(dir, "github.com/noir-lang/noir-bignum/v0.4.2"); + } +}