diff --git a/.noir-sync-commit b/.noir-sync-commit index 29560ec9797..d1057d7263f 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -6d0f86ba389a5b59b1d7fdcadcbce3e40eecaa48 +852155dc1c4a910bf9cd4e7af334f3856c1c4643 diff --git a/noir/bb-version b/noir/bb-version index af9c31e0aa9..e40e4fc339c 100644 --- a/noir/bb-version +++ b/noir/bb-version @@ -1 +1 @@ -portal:../../../../barretenberg/ts +0.66.0 diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index e8226d5fc58..ea520517348 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -48,6 +48,7 @@ dependencies = [ "ark-bn254", "bn254_blackbox_solver", "brillig_vm", + "fxhash", "indexmap 1.9.3", "num-bigint", "proptest", diff --git a/noir/noir-repo/acvm-repo/acvm/Cargo.toml b/noir/noir-repo/acvm-repo/acvm/Cargo.toml index e513ae4e727..ba01ac8ec16 100644 --- a/noir/noir-repo/acvm-repo/acvm/Cargo.toml +++ b/noir/noir-repo/acvm-repo/acvm/Cargo.toml @@ -17,7 +17,7 @@ workspace = true thiserror.workspace = true tracing.workspace = true serde.workspace = true - +fxhash.workspace = true acir.workspace = true brillig_vm.workspace = true acvm_blackbox_solver.workspace = true diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/mod.rs index 8829f77e50b..e32c0665c0f 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/mod.rs @@ -16,6 +16,10 @@ pub use simulator::CircuitSimulator; use transformers::transform_internal; pub use transformers::{transform, MIN_EXPRESSION_WIDTH}; +/// We need multiple passes to stabilize the output. +/// The value was determined by running tests. +const MAX_OPTIMIZER_PASSES: usize = 3; + /// This module moves and decomposes acir opcodes. The transformation map allows consumers of this module to map /// metadata they had about the opcodes to the new opcode structure generated after the transformation. #[derive(Debug)] @@ -28,9 +32,9 @@ impl AcirTransformationMap { /// Builds a map from a vector of pointers to the old acir opcodes. /// The index of the vector is the new opcode index. /// The value of the vector is the old opcode index pointed. - fn new(acir_opcode_positions: Vec) -> Self { + fn new(acir_opcode_positions: &[usize]) -> Self { let mut old_indices_to_new_indices = HashMap::with_capacity(acir_opcode_positions.len()); - for (new_index, old_index) in acir_opcode_positions.into_iter().enumerate() { + for (new_index, old_index) in acir_opcode_positions.iter().copied().enumerate() { old_indices_to_new_indices.entry(old_index).or_insert_with(Vec::new).push(new_index); } AcirTransformationMap { old_indices_to_new_indices } @@ -72,17 +76,42 @@ fn transform_assert_messages( } /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] specific optimizations to a [`Circuit`]. +/// +/// Runs multiple passes until the output stabilizes. pub fn compile( acir: Circuit, expression_width: ExpressionWidth, ) -> (Circuit, AcirTransformationMap) { - let (acir, acir_opcode_positions) = optimize_internal(acir); + let mut pass = 0; + let mut prev_opcodes_hash = fxhash::hash64(&acir.opcodes); + let mut prev_acir = acir; + + // For most test programs it would be enough to only loop `transform_internal`, + // but some of them don't stabilize unless we also repeat the backend agnostic optimizations. + let (mut acir, acir_opcode_positions) = loop { + let (acir, acir_opcode_positions) = optimize_internal(prev_acir); + + // Stop if we have already done at least one transform and an extra optimization changed nothing. + if pass > 0 && prev_opcodes_hash == fxhash::hash64(&acir.opcodes) { + break (acir, acir_opcode_positions); + } - let (mut acir, acir_opcode_positions) = - transform_internal(acir, expression_width, acir_opcode_positions); + let (acir, acir_opcode_positions) = + transform_internal(acir, expression_width, acir_opcode_positions); + + let opcodes_hash = fxhash::hash64(&acir.opcodes); + + // Stop if the output hasn't change in this loop or we went too long. + if pass == MAX_OPTIMIZER_PASSES - 1 || prev_opcodes_hash == opcodes_hash { + break (acir, acir_opcode_positions); + } - let transformation_map = AcirTransformationMap::new(acir_opcode_positions); + pass += 1; + prev_acir = acir; + prev_opcodes_hash = opcodes_hash; + }; + let transformation_map = AcirTransformationMap::new(&acir_opcode_positions); acir.assert_messages = transform_assert_messages(acir.assert_messages, &transformation_map); (acir, transformation_map) diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/mod.rs index 1947a80dc35..f86bf500998 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/mod.rs @@ -23,7 +23,7 @@ use super::{transform_assert_messages, AcirTransformationMap}; pub fn optimize(acir: Circuit) -> (Circuit, AcirTransformationMap) { let (mut acir, new_opcode_positions) = optimize_internal(acir); - let transformation_map = AcirTransformationMap::new(new_opcode_positions); + let transformation_map = AcirTransformationMap::new(&new_opcode_positions); acir.assert_messages = transform_assert_messages(acir.assert_messages, &transformation_map); diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/transformers/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/transformers/mod.rs index c9ce4ac7895..a499aec1b30 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/transformers/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/transformers/mod.rs @@ -1,5 +1,10 @@ use acir::{ - circuit::{brillig::BrilligOutputs, Circuit, ExpressionWidth, Opcode}, + circuit::{ + self, + brillig::{BrilligInputs, BrilligOutputs}, + opcodes::{BlackBoxFuncCall, FunctionInput, MemOp}, + Circuit, ExpressionWidth, Opcode, + }, native_types::{Expression, Witness}, AcirField, }; @@ -12,6 +17,7 @@ pub use csat::MIN_EXPRESSION_WIDTH; use super::{ optimizers::MergeExpressionsOptimizer, transform_assert_messages, AcirTransformationMap, + MAX_OPTIMIZER_PASSES, }; /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] specific optimizations to a [`Circuit`]. @@ -26,7 +32,7 @@ pub fn transform( let (mut acir, acir_opcode_positions) = transform_internal(acir, expression_width, acir_opcode_positions); - let transformation_map = AcirTransformationMap::new(acir_opcode_positions); + let transformation_map = AcirTransformationMap::new(&acir_opcode_positions); acir.assert_messages = transform_assert_messages(acir.assert_messages, &transformation_map); @@ -36,9 +42,52 @@ pub fn transform( /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] specific optimizations to a [`Circuit`]. /// /// Accepts an injected `acir_opcode_positions` to allow transformations to be applied directly after optimizations. +/// +/// Does multiple passes until the output stabilizes. #[tracing::instrument(level = "trace", name = "transform_acir", skip(acir, acir_opcode_positions))] pub(super) fn transform_internal( - acir: Circuit, + mut acir: Circuit, + expression_width: ExpressionWidth, + mut acir_opcode_positions: Vec, +) -> (Circuit, Vec) { + // Allow multiple passes until we have stable output. + let mut prev_opcodes_hash = fxhash::hash64(&acir.opcodes); + + // For most test programs it would be enough to loop here, but some of them + // don't stabilize unless we also repeat the backend agnostic optimizations. + for _ in 0..MAX_OPTIMIZER_PASSES { + let (new_acir, new_acir_opcode_positions) = + transform_internal_once(acir, expression_width, acir_opcode_positions); + + acir = new_acir; + acir_opcode_positions = new_acir_opcode_positions; + + let new_opcodes_hash = fxhash::hash64(&acir.opcodes); + + if new_opcodes_hash == prev_opcodes_hash { + break; + } + prev_opcodes_hash = new_opcodes_hash; + } + // After the elimination of intermediate variables the `current_witness_index` is potentially higher than it needs to be, + // which would cause gaps if we ran the optimization a second time, making it look like new variables were added. + acir.current_witness_index = max_witness(&acir).witness_index(); + + (acir, acir_opcode_positions) +} + +/// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] specific optimizations to a [`Circuit`]. +/// +/// Accepts an injected `acir_opcode_positions` to allow transformations to be applied directly after optimizations. +/// +/// Does a single optimization pass. +#[tracing::instrument( + level = "trace", + name = "transform_acir_once", + skip(acir, acir_opcode_positions) +)] +fn transform_internal_once( + mut acir: Circuit, expression_width: ExpressionWidth, acir_opcode_positions: Vec, ) -> (Circuit, Vec) { @@ -79,8 +128,6 @@ pub(super) fn transform_internal( &mut next_witness_index, ); - // Update next_witness counter - next_witness_index += (intermediate_variables.len() - len) as u32; let mut new_opcodes = Vec::new(); for (g, (norm, w)) in intermediate_variables.iter().skip(len) { // de-normalize @@ -150,23 +197,275 @@ pub(super) fn transform_internal( let current_witness_index = next_witness_index - 1; - let acir = Circuit { + acir = Circuit { current_witness_index, expression_width, opcodes: transformed_opcodes, // The transformer does not add new public inputs ..acir }; + let mut merge_optimizer = MergeExpressionsOptimizer::new(); + let (opcodes, new_acir_opcode_positions) = merge_optimizer.eliminate_intermediate_variable(&acir, new_acir_opcode_positions); - // n.b. we do not update current_witness_index after the eliminate_intermediate_variable pass, the real index could be less. - let acir = Circuit { - current_witness_index, - expression_width, + + // n.b. if we do not update current_witness_index after the eliminate_intermediate_variable pass, the real index could be less. + acir = Circuit { opcodes, // The optimizer does not add new public inputs ..acir }; + (acir, new_acir_opcode_positions) } + +/// Find the witness with the highest ID in the circuit. +fn max_witness(circuit: &Circuit) -> Witness { + let mut witnesses = WitnessFolder::new(Witness::default(), |state, witness| { + *state = witness.max(*state); + }); + witnesses.fold_circuit(circuit); + witnesses.into_state() +} + +/// Fold all witnesses in a circuit. +struct WitnessFolder { + state: S, + accumulate: A, +} + +impl WitnessFolder +where + A: Fn(&mut S, Witness), +{ + /// Create the folder with some initial state and an accumulator function. + fn new(init: S, accumulate: A) -> Self { + Self { state: init, accumulate } + } + + /// Take the accumulated state. + fn into_state(self) -> S { + self.state + } + + /// Add all witnesses from the circuit. + fn fold_circuit(&mut self, circuit: &Circuit) { + self.fold_many(circuit.private_parameters.iter()); + self.fold_many(circuit.public_parameters.0.iter()); + self.fold_many(circuit.return_values.0.iter()); + for opcode in &circuit.opcodes { + self.fold_opcode(opcode); + } + } + + /// Fold a witness into the state. + fn fold(&mut self, witness: Witness) { + (self.accumulate)(&mut self.state, witness); + } + + /// Fold many witnesses into the state. + fn fold_many<'w, I: Iterator>(&mut self, witnesses: I) { + for w in witnesses { + self.fold(*w); + } + } + + /// Add witnesses from the opcode. + fn fold_opcode(&mut self, opcode: &Opcode) { + match opcode { + Opcode::AssertZero(expr) => { + self.fold_expr(expr); + } + Opcode::BlackBoxFuncCall(call) => self.fold_blackbox(call), + Opcode::MemoryOp { block_id: _, op, predicate } => { + let MemOp { operation, index, value } = op; + self.fold_expr(operation); + self.fold_expr(index); + self.fold_expr(value); + if let Some(pred) = predicate { + self.fold_expr(pred); + } + } + Opcode::MemoryInit { block_id: _, init, block_type: _ } => { + for w in init { + self.fold(*w); + } + } + // We keep the display for a BrilligCall and circuit Call separate as they + // are distinct in their functionality and we should maintain this separation for debugging. + Opcode::BrilligCall { id: _, inputs, outputs, predicate } => { + if let Some(pred) = predicate { + self.fold_expr(pred); + } + self.fold_brillig_inputs(inputs); + self.fold_brillig_outputs(outputs); + } + Opcode::Call { id: _, inputs, outputs, predicate } => { + if let Some(pred) = predicate { + self.fold_expr(pred); + } + self.fold_many(inputs.iter()); + self.fold_many(outputs.iter()); + } + } + } + + fn fold_expr(&mut self, expr: &Expression) { + for i in &expr.mul_terms { + self.fold(i.1); + self.fold(i.2); + } + for i in &expr.linear_combinations { + self.fold(i.1); + } + } + + fn fold_brillig_inputs(&mut self, inputs: &[BrilligInputs]) { + for input in inputs { + match input { + BrilligInputs::Single(expr) => { + self.fold_expr(expr); + } + BrilligInputs::Array(exprs) => { + for expr in exprs { + self.fold_expr(expr); + } + } + BrilligInputs::MemoryArray(_) => {} + } + } + } + + fn fold_brillig_outputs(&mut self, outputs: &[BrilligOutputs]) { + for output in outputs { + match output { + BrilligOutputs::Simple(w) => { + self.fold(*w); + } + BrilligOutputs::Array(ws) => self.fold_many(ws.iter()), + } + } + } + + fn fold_blackbox(&mut self, call: &BlackBoxFuncCall) { + match call { + BlackBoxFuncCall::AES128Encrypt { inputs, iv, key, outputs } => { + self.fold_function_inputs(inputs.as_slice()); + self.fold_function_inputs(iv.as_slice()); + self.fold_function_inputs(key.as_slice()); + self.fold_many(outputs.iter()); + } + BlackBoxFuncCall::AND { lhs, rhs, output } => { + self.fold_function_input(lhs); + self.fold_function_input(rhs); + self.fold(*output); + } + BlackBoxFuncCall::XOR { lhs, rhs, output } => { + self.fold_function_input(lhs); + self.fold_function_input(rhs); + self.fold(*output); + } + BlackBoxFuncCall::RANGE { input } => { + self.fold_function_input(input); + } + BlackBoxFuncCall::Blake2s { inputs, outputs } => { + self.fold_function_inputs(inputs.as_slice()); + self.fold_many(outputs.iter()); + } + BlackBoxFuncCall::Blake3 { inputs, outputs } => { + self.fold_function_inputs(inputs.as_slice()); + self.fold_many(outputs.iter()); + } + BlackBoxFuncCall::EcdsaSecp256k1 { + public_key_x, + public_key_y, + signature, + hashed_message, + output, + } => { + self.fold_function_inputs(public_key_x.as_slice()); + self.fold_function_inputs(public_key_y.as_slice()); + self.fold_function_inputs(signature.as_slice()); + self.fold_function_inputs(hashed_message.as_slice()); + self.fold(*output); + } + BlackBoxFuncCall::EcdsaSecp256r1 { + public_key_x, + public_key_y, + signature, + hashed_message, + output, + } => { + self.fold_function_inputs(public_key_x.as_slice()); + self.fold_function_inputs(public_key_y.as_slice()); + self.fold_function_inputs(signature.as_slice()); + self.fold_function_inputs(hashed_message.as_slice()); + self.fold(*output); + } + BlackBoxFuncCall::MultiScalarMul { points, scalars, outputs } => { + self.fold_function_inputs(points.as_slice()); + self.fold_function_inputs(scalars.as_slice()); + let (x, y, i) = outputs; + self.fold(*x); + self.fold(*y); + self.fold(*i); + } + BlackBoxFuncCall::EmbeddedCurveAdd { input1, input2, outputs } => { + self.fold_function_inputs(input1.as_slice()); + self.fold_function_inputs(input2.as_slice()); + let (x, y, i) = outputs; + self.fold(*x); + self.fold(*y); + self.fold(*i); + } + BlackBoxFuncCall::Keccakf1600 { inputs, outputs } => { + self.fold_function_inputs(inputs.as_slice()); + self.fold_many(outputs.iter()); + } + BlackBoxFuncCall::RecursiveAggregation { + verification_key, + proof, + public_inputs, + key_hash, + proof_type: _, + } => { + self.fold_function_inputs(verification_key.as_slice()); + self.fold_function_inputs(proof.as_slice()); + self.fold_function_inputs(public_inputs.as_slice()); + self.fold_function_input(key_hash); + } + BlackBoxFuncCall::BigIntAdd { .. } + | BlackBoxFuncCall::BigIntSub { .. } + | BlackBoxFuncCall::BigIntMul { .. } + | BlackBoxFuncCall::BigIntDiv { .. } => {} + BlackBoxFuncCall::BigIntFromLeBytes { inputs, modulus: _, output: _ } => { + self.fold_function_inputs(inputs.as_slice()); + } + BlackBoxFuncCall::BigIntToLeBytes { input: _, outputs } => { + self.fold_many(outputs.iter()); + } + BlackBoxFuncCall::Poseidon2Permutation { inputs, outputs, len: _ } => { + self.fold_function_inputs(inputs.as_slice()); + self.fold_many(outputs.iter()); + } + BlackBoxFuncCall::Sha256Compression { inputs, hash_values, outputs } => { + self.fold_function_inputs(inputs.as_slice()); + self.fold_function_inputs(hash_values.as_slice()); + self.fold_many(outputs.iter()); + } + } + } + + fn fold_function_input(&mut self, input: &FunctionInput) { + if let circuit::opcodes::ConstantOrWitnessEnum::Witness(witness) = input.input() { + self.fold(witness); + } + } + + fn fold_function_inputs(&mut self, inputs: &[FunctionInput]) { + for input in inputs { + self.fold_function_input(input); + } + } +} diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/benches/criterion.rs b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/benches/criterion.rs index 8bf239eec8a..fc566b70a26 100644 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/benches/criterion.rs +++ b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/benches/criterion.rs @@ -2,8 +2,7 @@ use criterion::{criterion_group, criterion_main, Criterion}; use std::{hint::black_box, time::Duration}; use acir::{AcirField, FieldElement}; -use acvm_blackbox_solver::BlackBoxFunctionSolver; -use bn254_blackbox_solver::{poseidon2_permutation, Bn254BlackBoxSolver}; +use bn254_blackbox_solver::poseidon2_permutation; use pprof::criterion::{Output, PProfProfiler}; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs index 962356d6dd9..fe8c8338b32 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs @@ -39,6 +39,8 @@ struct AttributeContext { attribute_module: LocalModuleId, } +type CollectedAttributes = Vec<(FuncId, Value, Vec, AttributeContext, Span)>; + impl AttributeContext { fn new(file: FileId, module: LocalModuleId) -> Self { Self { file, module, attribute_file: file, attribute_module: module } @@ -131,41 +133,37 @@ impl<'context> Elaborator<'context> { } } - fn run_comptime_attributes_on_item( + fn collect_comptime_attributes_on_item( &mut self, attributes: &[SecondaryAttribute], item: Value, - span: Span, attribute_context: AttributeContext, - generated_items: &mut CollectedItems, + attributes_to_run: &mut CollectedAttributes, ) { for attribute in attributes { - self.run_comptime_attribute_on_item( + self.collect_comptime_attribute_on_item( attribute, &item, - span, attribute_context, - generated_items, + attributes_to_run, ); } } - fn run_comptime_attribute_on_item( + fn collect_comptime_attribute_on_item( &mut self, attribute: &SecondaryAttribute, item: &Value, - span: Span, attribute_context: AttributeContext, - generated_items: &mut CollectedItems, + attributes_to_run: &mut CollectedAttributes, ) { if let SecondaryAttribute::Meta(attribute) = attribute { self.elaborate_in_comptime_context(|this| { - if let Err(error) = this.run_comptime_attribute_name_on_item( + if let Err(error) = this.collect_comptime_attribute_name_on_item( attribute, item.clone(), - span, attribute_context, - generated_items, + attributes_to_run, ) { this.errors.push(error); } @@ -173,22 +171,19 @@ impl<'context> Elaborator<'context> { } } - fn run_comptime_attribute_name_on_item( + /// Resolve an attribute to the function it refers to and add it to `attributes_to_run` + fn collect_comptime_attribute_name_on_item( &mut self, attribute: &MetaAttribute, item: Value, - span: Span, attribute_context: AttributeContext, - generated_items: &mut CollectedItems, + attributes_to_run: &mut CollectedAttributes, ) -> Result<(), (CompilationError, FileId)> { self.file = attribute_context.attribute_file; self.local_module = attribute_context.attribute_module; + let span = attribute.span; - let location = Location::new(attribute.span, self.file); - let function = Expression { - kind: ExpressionKind::Variable(attribute.name.clone()), - span: attribute.span, - }; + let function = Expression { kind: ExpressionKind::Variable(attribute.name.clone()), span }; let arguments = attribute.arguments.clone(); // Elaborate the function, rolling back any errors generated in case it is unknown @@ -200,32 +195,34 @@ impl<'context> Elaborator<'context> { let definition_id = match self.interner.expression(&function) { HirExpression::Ident(ident, _) => ident.id, _ => { - return Err(( - ResolverError::AttributeFunctionIsNotAPath { - function: function_string, - span: attribute.span, - } - .into(), - self.file, - )) + let error = + ResolverError::AttributeFunctionIsNotAPath { function: function_string, span }; + return Err((error.into(), self.file)); } }; let Some(definition) = self.interner.try_definition(definition_id) else { - return Err(( - ResolverError::AttributeFunctionNotInScope { - name: function_string, - span: attribute.span, - } - .into(), - self.file, - )); + let error = ResolverError::AttributeFunctionNotInScope { name: function_string, span }; + return Err((error.into(), self.file)); }; let DefinitionKind::Function(function) = definition.kind else { return Err((ResolverError::NonFunctionInAnnotation { span }.into(), self.file)); }; + attributes_to_run.push((function, item, arguments, attribute_context, span)); + Ok(()) + } + + fn run_attribute( + &mut self, + attribute_context: AttributeContext, + function: FuncId, + arguments: Vec, + item: Value, + location: Location, + generated_items: &mut CollectedItems, + ) -> Result<(), (CompilationError, FileId)> { self.file = attribute_context.file; self.local_module = attribute_context.module; @@ -237,10 +234,7 @@ impl<'context> Elaborator<'context> { arguments, location, ) - .map_err(|error| { - let file = error.get_location().file; - (error.into(), file) - })?; + .map_err(|error| error.into_compilation_error_pair())?; arguments.insert(0, (item, location)); @@ -496,65 +490,91 @@ impl<'context> Elaborator<'context> { } } - /// Run all the attributes on each item. The ordering is unspecified to users but currently - /// we run trait attributes first to (e.g.) register derive handlers before derive is - /// called on structs. - /// Returns any new items generated by attributes. + /// Run all the attributes on each item in the crate in source-order. + /// Source-order is defined as running all child modules before their parent modules are run. + /// Child modules of a parent are run in order of their `mod foo;` declarations in the parent. pub(super) fn run_attributes( &mut self, traits: &BTreeMap, types: &BTreeMap, functions: &[UnresolvedFunctions], module_attributes: &[ModuleAttribute], - ) -> CollectedItems { - let mut generated_items = CollectedItems::default(); + ) { + let mut attributes_to_run = Vec::new(); for (trait_id, trait_) in traits { let attributes = &trait_.trait_def.attributes; let item = Value::TraitDefinition(*trait_id); - let span = trait_.trait_def.span; let context = AttributeContext::new(trait_.file_id, trait_.module_id); - self.run_comptime_attributes_on_item( + self.collect_comptime_attributes_on_item( attributes, item, - span, context, - &mut generated_items, + &mut attributes_to_run, ); } for (struct_id, struct_def) in types { let attributes = &struct_def.struct_def.attributes; let item = Value::StructDefinition(*struct_id); - let span = struct_def.struct_def.span; let context = AttributeContext::new(struct_def.file_id, struct_def.module_id); - self.run_comptime_attributes_on_item( + self.collect_comptime_attributes_on_item( attributes, item, - span, context, - &mut generated_items, + &mut attributes_to_run, ); } - self.run_attributes_on_functions(functions, &mut generated_items); + self.collect_attributes_on_functions(functions, &mut attributes_to_run); + self.collect_attributes_on_modules(module_attributes, &mut attributes_to_run); + + self.sort_attributes_by_run_order(&mut attributes_to_run); - self.run_attributes_on_modules(module_attributes, &mut generated_items); + // run + for (attribute, item, args, context, span) in attributes_to_run { + let location = Location::new(span, context.attribute_file); - generated_items + let mut generated_items = CollectedItems::default(); + self.elaborate_in_comptime_context(|this| { + if let Err(error) = this.run_attribute( + context, + attribute, + args, + item, + location, + &mut generated_items, + ) { + this.errors.push(error); + } + }); + + if !generated_items.is_empty() { + self.elaborate_items(generated_items); + } + } } - fn run_attributes_on_modules( + fn sort_attributes_by_run_order(&self, attributes: &mut CollectedAttributes) { + let module_order = self.def_maps[&self.crate_id].get_module_topological_order(); + + // Sort each attribute by (module, location in file) so that we can execute in + // the order they were defined in, running attributes in child modules first. + attributes.sort_by_key(|(_, _, _, ctx, span)| { + (module_order[&ctx.attribute_module], span.start()) + }); + } + + fn collect_attributes_on_modules( &mut self, module_attributes: &[ModuleAttribute], - generated_items: &mut CollectedItems, + attributes_to_run: &mut CollectedAttributes, ) { for module_attribute in module_attributes { let local_id = module_attribute.module_id; let module_id = ModuleId { krate: self.crate_id, local_id }; let item = Value::ModuleDefinition(module_id); let attribute = &module_attribute.attribute; - let span = Span::default(); let context = AttributeContext { file: module_attribute.file_id, @@ -563,14 +583,14 @@ impl<'context> Elaborator<'context> { attribute_module: module_attribute.attribute_module_id, }; - self.run_comptime_attribute_on_item(attribute, &item, span, context, generated_items); + self.collect_comptime_attribute_on_item(attribute, &item, context, attributes_to_run); } } - fn run_attributes_on_functions( + fn collect_attributes_on_functions( &mut self, function_sets: &[UnresolvedFunctions], - generated_items: &mut CollectedItems, + attributes_to_run: &mut CollectedAttributes, ) { for function_set in function_sets { self.self_type = function_set.self_type.clone(); @@ -579,13 +599,11 @@ impl<'context> Elaborator<'context> { let context = AttributeContext::new(function_set.file_id, *local_module); let attributes = function.secondary_attributes(); let item = Value::FunctionDefinition(*function_id); - let span = function.span(); - self.run_comptime_attributes_on_item( + self.collect_comptime_attributes_on_item( attributes, item, - span, context, - generated_items, + attributes_to_run, ); } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs index 478504a79be..fe1d1e38e1a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs @@ -307,23 +307,13 @@ impl<'context> Elaborator<'context> { // We have to run any comptime attributes on functions before the function is elaborated // since the generated items are checked beforehand as well. - let generated_items = self.run_attributes( + self.run_attributes( &items.traits, &items.types, &items.functions, &items.module_attributes, ); - // After everything is collected, we can elaborate our generated items. - // It may be better to inline these within `items` entirely since elaborating them - // all here means any globals will not see these. Inlining them completely within `items` - // means we must be more careful about missing any additional items that need to be already - // elaborated. E.g. if a new struct is created, we've already passed the code path to - // elaborate them. - if !generated_items.is_empty() { - self.elaborate_items(generated_items); - } - for functions in items.functions { self.elaborate_functions(functions); } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs index 0404ae3c2c0..2e4809f3511 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs @@ -576,7 +576,7 @@ impl<'context> Elaborator<'context> { fn resolve_trait_static_method(&mut self, path: &Path) -> Option { let path_resolution = self.resolve_path(path.clone()).ok()?; let func_id = path_resolution.item.function_id()?; - let meta = self.interner.function_meta(&func_id); + let meta = self.interner.try_function_meta(&func_id)?; let the_trait = self.interner.get_trait(meta.trait_id?); let method = the_trait.find_method(path.last_name())?; let constraint = the_trait.as_constraint(path.span); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 51e62599b05..33dab802b21 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -119,9 +119,11 @@ pub struct ModuleAttribute { pub file_id: FileId, // The module this attribute is attached to pub module_id: LocalModuleId, + // The file where the attribute exists (it could be the same as `file_id` - // or a different one if it's an inner attribute in a different file) + // or a different one if it is an outer attribute in the parent of the module it applies to) pub attribute_file_id: FileId, + // The module where the attribute is defined (similar to `attribute_file_id`, // it could be different than `module_id` for inner attributes) pub attribute_module_id: LocalModuleId, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs index 3bb16a92fdb..d9d6e150a7a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -289,6 +289,29 @@ impl CrateDefMap { String::new() } } + + /// Return a topological ordering of each module such that any child modules + /// are before their parent modules. Sibling modules will respect the ordering + /// declared from their parent module (the `mod foo; mod bar;` declarations). + pub fn get_module_topological_order(&self) -> HashMap { + let mut ordering = HashMap::default(); + self.topologically_sort_modules(self.root, &mut 0, &mut ordering); + ordering + } + + fn topologically_sort_modules( + &self, + current: LocalModuleId, + index: &mut usize, + ordering: &mut HashMap, + ) { + for child in &self.modules[current.0].child_declaration_order { + self.topologically_sort_modules(*child, index, ordering); + } + + ordering.insert(current, *index); + *index += 1; + } } /// Specifies a contract function and extra metadata that diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_data.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_data.rs index fe6fe8285d3..06188f3920b 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_data.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_data.rs @@ -14,6 +14,11 @@ pub struct ModuleData { pub parent: Option, pub children: HashMap, + /// Each child in the order they were declared in the parent module. + /// E.g. for a module containing `mod foo; mod bar; mod baz` this would + /// be `vec![foo, bar, baz]`. + pub child_declaration_order: Vec, + /// Contains all definitions visible to the current module. This includes /// all definitions in self.definitions as well as all imported definitions. scope: ItemScope, @@ -47,6 +52,7 @@ impl ModuleData { ModuleData { parent, children: HashMap::new(), + child_declaration_order: Vec::new(), scope: ItemScope::default(), definitions: ItemScope::default(), location, @@ -73,6 +79,10 @@ impl ModuleData { ) -> Result<(), (Ident, Ident)> { self.scope.add_definition(name.clone(), visibility, item_id, trait_id)?; + if let ModuleDefId::ModuleId(child) = item_id { + self.child_declaration_order.push(child.local_id); + } + // definitions is a subset of self.scope so it is expected if self.scope.define_func_def // returns without error, so will self.definitions.define_func_def. self.definitions.add_definition(name, visibility, item_id, trait_id) diff --git a/noir/noir-repo/docs/docs/noir/concepts/comptime.md b/noir/noir-repo/docs/docs/noir/concepts/comptime.md index 37457d47b46..9661dc1a6ca 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/comptime.md +++ b/noir/noir-repo/docs/docs/noir/concepts/comptime.md @@ -41,7 +41,7 @@ Note that while in a `comptime` context, any runtime variables _local to the cur Evaluation rules of `comptime` follows the normal unconstrained evaluation rules for other Noir code. There are a few things to note though: - Certain built-in functions may not be available, although more may be added over time. -- Evaluation order of global items is currently unspecified. For example, given the following two functions we can't guarantee +- Evaluation order of `comptime {}` blocks within global items is currently unspecified. For example, given the following two functions we can't guarantee which `println` will execute first. The ordering of the two printouts will be arbitrary, but should be stable across multiple compilations with the same `nargo` version as long as the program is also unchanged. ```rust @@ -56,11 +56,14 @@ fn two() { - Since evaluation order is unspecified, care should be taken when using mutable globals so that they do not rely on a particular ordering. For example, using globals to generate unique ids should be fine but relying on certain ids always being produced (especially after edits to the program) should be avoided. -- Although most ordering of globals is unspecified, two are: +- Although the ordering of comptime code is usually unspecified, there are cases where it is: - Dependencies of a crate will always be evaluated before the dependent crate. - - Any annotations on a function will be run before the function itself is resolved. This is to allow the annotation to modify the function if necessary. Note that if the + - Any attributes on a function will be run before the function body is resolved. This is to allow the attribute to modify the function if necessary. Note that if the function itself was called at compile-time previously, it will already be resolved and cannot be modified. To prevent accidentally calling functions you wish to modify - at compile-time, it may be helpful to sort your `comptime` annotation functions into a different crate along with any dependencies they require. + at compile-time, it may be helpful to sort your `comptime` annotation functions into a different submodule crate along with any dependencies they require. + - Unlike raw `comptime {}` blocks, attributes on top-level items in the program do have a set evaluation order. Attributes within a module are evaluated top-down, and attributes + in different modules are evaluated submodule-first. Sibling modules to the same parent module are evaluated in order of the module declarations (`mod foo; mod bar;`) in their + parent module. ### Lowering @@ -89,7 +92,7 @@ fn main() { } ``` -Not all types of values can be lowered. For example, `Type`s and `TypeDefinition`s (among other types) cannot be lowered at all. +Not all types of values can be lowered. For example, references, `Type`s, and `TypeDefinition`s (among other types) cannot be lowered at all. ```rust fn main() { @@ -100,6 +103,19 @@ fn main() { comptime fn get_type() -> Type { ... } ``` +Values of certain types may also change type when they are lowered. For example, a comptime format string will already be +formatted, and thus lowers into a runtime string instead: + +```rust +fn main() { + let foo = comptime { + let i = 2; + f"i = {i}" + }; + assert_eq(foo, "i = 2"); +} +``` + --- ## (Quasi) Quote @@ -121,6 +137,21 @@ Calling such a function at compile-time without `!` will just return the `Quoted For those familiar with quoting from other languages (primarily lisps), Noir's `quote` is actually a _quasiquote_. This means we can escape the quoting by using the unquote operator to splice values in the middle of quoted code. +In addition to curly braces, you can also use square braces for the quote operator: + +```rust +comptime { + let q1 = quote { 1 }; + let q2 = quote [ 2 ]; + assert_eq(q1, q2); + + // Square braces can be used to quote mismatched curly braces if needed + let _ = quote[}]; +} +``` + +--- + ## Unquote The unquote operator `$` is usable within a `quote` expression. @@ -149,7 +180,7 @@ If it is an expression (even a parenthesized one), it will do nothing. Most like Unquoting can also be avoided by escaping the `$` with a backslash: -``` +```rust comptime { let x = quote { 1 + 2 }; @@ -158,26 +189,48 @@ comptime { } ``` +### Combining Tokens + +Note that `Quoted` is internally a series of separate tokens, and that all unquoting does is combine these token vectors. +This means that code which appears to append like a string actually appends like a vector internally: + +```rust +comptime { + let x = 3; + let q = quote { foo$x }; // This is [foo, 3], not [foo3] + + // Spaces are ignored in general, they're never part of a token + assert_eq(q, quote { foo 3 }); +} +``` + +If you do want string semantics, you can use format strings then convert back to a `Quoted` value with `.quoted_contents()`. +Note that formatting a quoted value with multiple tokens will always insert a space between each token. If this is +undesired, you'll need to only operate on quoted values containing a single token. To do this, you can iterate +over each token of a larger quoted value with `.tokens()`: + +#include_code concatenate-example noir_stdlib/src/meta/mod.nr rust + --- -## Annotations +## Attributes -Annotations provide a way to run a `comptime` function on an item in the program. -When you use an annotation, the function with the same name will be called with that item as an argument: +Attributes provide a way to run a `comptime` function on an item in the program. +When you use an attribute, the function with the same name will be called with that item as an argument: ```rust -#[my_struct_annotation] +#[my_struct_attribute] struct Foo {} -comptime fn my_struct_annotation(s: StructDefinition) { - println("Called my_struct_annotation!"); +comptime fn my_struct_attribute(s: StructDefinition) { + println("Called my_struct_attribute!"); } -#[my_function_annotation] +#[my_function_attribute] fn foo() {} -comptime fn my_function_annotation(f: FunctionDefinition) { - println("Called my_function_annotation!"); +comptime fn my_function_attribute(f: FunctionDefinition) { + println("Called my_function_attribute!"); } ``` @@ -190,15 +243,47 @@ For example, this is the mechanism used to insert additional trait implementatio ### Calling annotations with additional arguments -Arguments may optionally be given to annotations. -When this is done, these additional arguments are passed to the annotation function after the item argument. +Arguments may optionally be given to attributes. +When this is done, these additional arguments are passed to the attribute function after the item argument. #include_code annotation-arguments-example noir_stdlib/src/meta/mod.nr rust -We can also take any number of arguments by adding the `varargs` annotation: +We can also take any number of arguments by adding the `varargs` attribute: #include_code annotation-varargs-example noir_stdlib/src/meta/mod.nr rust +### Attribute Evaluation Order + +Unlike the evaluation order of stray `comptime {}` blocks within functions, attributes have a well-defined evaluation +order. Within a module, attributes are evaluated top to bottom. Between modules, attributes in child modules are evaluated +first. Attributes in sibling modules are resolved following the `mod foo; mod bar;` declaration order within their parent +modules. + +```rust +mod foo; // attributes in foo are run first +mod bar; // followed by attributes in bar + +// followed by any attributes in the parent module +#[derive(Eq)] +struct Baz {} +``` + +Note that because of this evaluation order, you may get an error trying to derive a trait for a struct whose fields +have not yet had the trait derived already: + +```rust +// Error! `Bar` field of `Foo` does not (yet) implement Eq! +#[derive(Eq)] +struct Foo { + bar: Bar +} + +#[derive(Eq)] +struct Bar {} +``` + +In this case, the issue can be resolved by rearranging the structs. + --- ## Comptime API diff --git a/noir/noir-repo/noir_stdlib/src/meta/mod.nr b/noir/noir-repo/noir_stdlib/src/meta/mod.nr index 5102f0cf1fd..5d2164a510d 100644 --- a/noir/noir-repo/noir_stdlib/src/meta/mod.nr +++ b/noir/noir-repo/noir_stdlib/src/meta/mod.nr @@ -252,6 +252,41 @@ mod tests { fn do_nothing(_: Self) {} } + // docs:start:concatenate-example + comptime fn concatenate(q1: Quoted, q2: Quoted) -> Quoted { + assert(q1.tokens().len() <= 1); + assert(q2.tokens().len() <= 1); + + f"{q1}{q2}".quoted_contents() + } + + // The CtString type is also useful for a compile-time string of unbounded size + // so that you can append to it in a loop. + comptime fn double_spaced(q: Quoted) -> CtString { + let mut result = "".as_ctstring(); + + for token in q.tokens() { + if result != "".as_ctstring() { + result = result.append_str(" "); + } + result = result.append_fmtstr(f"{token}"); + } + + result + } + + #[test] + fn concatenate_test() { + comptime { + let result = concatenate(quote {foo}, quote {bar}); + assert_eq(result, quote {foobar}); + + let result = double_spaced(quote {foo bar 3}).as_quoted_str!(); + assert_eq(result, "foo bar 3"); + } + } + // docs:end:concatenate-example + // This function is just to remove unused warnings fn remove_unused_warnings() { let _: Bar = Bar { x: 1, y: [2, 3] }; diff --git a/noir/noir-repo/scripts/install_bb.sh b/noir/noir-repo/scripts/install_bb.sh index db98f17c503..3d1dc038ab8 100755 --- a/noir/noir-repo/scripts/install_bb.sh +++ b/noir/noir-repo/scripts/install_bb.sh @@ -1,6 +1,6 @@ #!/bin/bash -VERSION="0.63.0" +VERSION="0.66.0" BBUP_PATH=~/.bb/bbup diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_order/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/Nargo.toml new file mode 100644 index 00000000000..4471bed86dc --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "attribute_order" +type = "bin" +authors = [""] +compiler_version = ">=0.36.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a.nr b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a.nr new file mode 100644 index 00000000000..663643f1288 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a.nr @@ -0,0 +1,13 @@ +// This is run fifth, after each child module finishes even though +// the function comes before the module declarations in the source. +#[crate::assert_run_order_function(4)] +pub fn foo() {} + +mod a_child1; +mod a_child2; + +#[crate::assert_run_order_struct(5)] +pub struct Bar {} + +#[crate::assert_run_order_trait(6)] +pub trait Foo {} diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a/a_child1.nr b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a/a_child1.nr new file mode 100644 index 00000000000..834bbd43fb5 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a/a_child1.nr @@ -0,0 +1,5 @@ +#[crate::assert_run_order_function(0)] +pub fn foo() {} + +#[crate::assert_run_order_struct(1)] +pub struct Foo {} diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a/a_child2.nr b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a/a_child2.nr new file mode 100644 index 00000000000..3548f4450a5 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a/a_child2.nr @@ -0,0 +1,5 @@ +#[crate::assert_run_order_trait(2)] +pub trait Foo {} + +#[crate::assert_run_order_function(3)] +pub fn foo() {} diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/b_child1.nr b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/b_child1.nr new file mode 100644 index 00000000000..a8e7e898ec1 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/b_child1.nr @@ -0,0 +1,4 @@ +#[crate::assert_run_order_function(8)] +pub fn foo() {} + +#![crate::assert_run_order_module(9)] diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/b_child2.nr b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/b_child2.nr new file mode 100644 index 00000000000..8e6d967707a --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/b_child2.nr @@ -0,0 +1,2 @@ +#[crate::assert_run_order_function(7)] +pub fn foo() {} diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/mod.nr b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/mod.nr new file mode 100644 index 00000000000..77df04e15a9 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/mod.nr @@ -0,0 +1,25 @@ +// Declare these in reverse order to ensure the declaration +// order here is respected +mod b_child2; + +#[crate::assert_run_order_module(12)] +mod b_child1; + +#[crate::assert_run_order_function(13)] +pub fn foo() {} + +#![crate::assert_run_order_module(14)] + +// Test inline submodules as well +#[crate::assert_run_order_module(15)] +mod b_child3 { + #![crate::assert_run_order_module(10)] + + #[crate::assert_run_order_function(11)] + pub fn foo() {} +} + +#[crate::assert_run_order_function(16)] +pub fn bar() {} + +#![crate::assert_run_order_module(17)] diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/main.nr new file mode 100644 index 00000000000..9d5ba32b58e --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/main.nr @@ -0,0 +1,29 @@ +mod a; +mod b; + +#[assert_run_order_function(18)] +fn main() { + assert_eq(attributes_run, 19); +} + +comptime mut global attributes_run: Field = 0; + +pub comptime fn assert_run_order_function(_f: FunctionDefinition, order: Field) { + assert_eq(order, attributes_run); + attributes_run += 1; +} + +pub comptime fn assert_run_order_struct(_s: StructDefinition, order: Field) { + assert_eq(order, attributes_run); + attributes_run += 1; +} + +pub comptime fn assert_run_order_trait(_t: TraitDefinition, order: Field) { + assert_eq(order, attributes_run); + attributes_run += 1; +} + +pub comptime fn assert_run_order_module(_m: Module, order: Field) { + assert_eq(order, attributes_run); + attributes_run += 1; +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_sees_result_of_previous_attribute/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/attribute_sees_result_of_previous_attribute/Nargo.toml new file mode 100644 index 00000000000..ad71067a58f --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_sees_result_of_previous_attribute/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "attribute_sees_results_of_previous_attribute" +type = "bin" +authors = [""] +compiler_version = ">=0.34.0" + +[dependencies] diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_sees_result_of_previous_attribute/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/attribute_sees_result_of_previous_attribute/src/main.nr new file mode 100644 index 00000000000..561c3e12e0c --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_sees_result_of_previous_attribute/src/main.nr @@ -0,0 +1,11 @@ +#[derive(Eq)] +pub struct Foo {} + +#[check_eq_derived_for_foo] +fn main() {} + +comptime fn check_eq_derived_for_foo(_f: FunctionDefinition) { + let foo = quote {Foo}.as_type(); + let eq = quote {Eq}.as_trait_constraint(); + assert(foo.implements(eq)); +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/derive_impl/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/derive_impl/src/main.nr index db3f78a35c6..4396169235d 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/derive_impl/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/derive_impl/src/main.nr @@ -20,15 +20,17 @@ comptime fn derive_default(typ: StructDefinition) -> Quoted { } } +// Bar needs to be defined before Foo so that its impls are defined before +// Foo's are. +#[derive_default] +struct Bar {} + #[derive_default] struct Foo { x: Field, y: Bar, } -#[derive_default] -struct Bar {} - comptime fn make_field_exprs(fields: [(Quoted, Type)]) -> [Quoted] { let mut result = &[]; for my_field in fields { diff --git a/noir/noir-repo/test_programs/execution_success/derive/src/main.nr b/noir/noir-repo/test_programs/execution_success/derive/src/main.nr index 6900aa6aead..f7d4f6b607a 100644 --- a/noir/noir-repo/test_programs/execution_success/derive/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/derive/src/main.nr @@ -25,6 +25,14 @@ comptime fn derive_do_nothing(s: StructDefinition) -> Quoted { // Test stdlib derive fns & multiple traits // - We can derive Ord and Hash even though std::cmp::Ordering and std::hash::Hasher aren't imported +// - We need to define MyOtherOtherStruct first since MyOtherStruct references it as a field and +// attributes are run in reading order. If it were defined afterward, the derived Eq impl for MyOtherStruct +// would error that MyOtherOtherStruct doesn't (yet) implement Eq. +#[derive(Eq, Default, Hash, Ord)] +struct MyOtherOtherStruct { + x: T, +} + #[derive(Eq, Default, Hash, Ord)] struct MyOtherStruct { field1: A, @@ -32,11 +40,6 @@ struct MyOtherStruct { field3: MyOtherOtherStruct, } -#[derive(Eq, Default, Hash, Ord)] -struct MyOtherOtherStruct { - x: T, -} - #[derive(Eq, Default, Hash, Ord)] struct EmptyStruct {} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs index ff6009981c7..f134374f89e 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -8,7 +8,9 @@ use nargo::ops::{collect_errors, compile_contract, compile_program, report_error use nargo::package::{CrateName, Package}; use nargo::workspace::Workspace; use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; +use nargo_toml::{ + get_package_manifest, resolve_workspace_from_toml, ManifestError, PackageSelection, +}; use noirc_driver::DEFAULT_EXPRESSION_WIDTH; use noirc_driver::NOIR_ARTIFACT_VERSION_STRING; use noirc_driver::{CompilationResult, CompileOptions, CompiledContract}; @@ -44,16 +46,11 @@ pub(crate) struct CompileCommand { } pub(crate) fn run(args: CompileCommand, config: NargoConfig) -> Result<(), CliError> { - let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; let selection = args.package.map_or(default_selection, PackageSelection::Selected); - let workspace = resolve_workspace_from_toml( - &toml_path, - selection, - Some(NOIR_ARTIFACT_VERSION_STRING.to_owned()), - )?; + let workspace = read_workspace(&config.program_dir, selection)?; if args.watch { watch_workspace(&workspace, &args.compile_options) @@ -65,6 +62,22 @@ pub(crate) fn run(args: CompileCommand, config: NargoConfig) -> Result<(), CliEr Ok(()) } +/// Read a given program directory into a workspace. +fn read_workspace( + program_dir: &Path, + selection: PackageSelection, +) -> Result { + let toml_path = get_package_manifest(program_dir)?; + + let workspace = resolve_workspace_from_toml( + &toml_path, + selection, + Some(NOIR_ARTIFACT_VERSION_STRING.to_owned()), + )?; + + Ok(workspace) +} + /// Continuously recompile the workspace on any Noir file change event. fn watch_workspace(workspace: &Workspace, compile_options: &CompileOptions) -> notify::Result<()> { let (tx, rx) = std::sync::mpsc::channel(); @@ -109,15 +122,21 @@ fn watch_workspace(workspace: &Workspace, compile_options: &CompileOptions) -> n Ok(()) } +/// Parse all files in the workspace. +fn parse_workspace(workspace: &Workspace) -> (FileManager, ParsedFiles) { + let mut file_manager = workspace.new_file_manager(); + insert_all_files_for_workspace_into_file_manager(workspace, &mut file_manager); + let parsed_files = parse_all(&file_manager); + (file_manager, parsed_files) +} + /// Parse and compile the entire workspace, then report errors. /// This is the main entry point used by all other commands that need compilation. pub(super) fn compile_workspace_full( workspace: &Workspace, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let mut workspace_file_manager = workspace.new_file_manager(); - insert_all_files_for_workspace_into_file_manager(workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + let (workspace_file_manager, parsed_files) = parse_workspace(workspace); let compiled_workspace = compile_workspace(&workspace_file_manager, &parsed_files, workspace, compile_options); @@ -150,7 +169,7 @@ fn compile_workspace( let program_warnings_or_errors: CompilationResult<()> = compile_programs(file_manager, parsed_files, workspace, &binary_packages, compile_options); - let contract_warnings_or_errors: CompilationResult<()> = compiled_contracts( + let contract_warnings_or_errors: CompilationResult<()> = compile_contracts( file_manager, parsed_files, &contract_packages, @@ -244,7 +263,7 @@ fn compile_programs( } /// Compile the given contracts in the workspace. -fn compiled_contracts( +fn compile_contracts( file_manager: &FileManager, parsed_files: &ParsedFiles, contract_packages: &[Package], @@ -296,3 +315,138 @@ pub(crate) fn get_target_width( compile_options_width.unwrap_or(DEFAULT_EXPRESSION_WIDTH) } } + +#[cfg(test)] +mod tests { + use std::{ + path::{Path, PathBuf}, + str::FromStr, + }; + + use clap::Parser; + use nargo::ops::compile_program; + use nargo_toml::PackageSelection; + use noirc_driver::{CompileOptions, CrateName}; + use rayon::prelude::*; + + use crate::cli::compile_cmd::{get_target_width, parse_workspace, read_workspace}; + + /// Try to find the directory that Cargo sets when it is running; + /// otherwise fallback to assuming the CWD is the root of the repository + /// and append the crate path. + fn test_programs_dir() -> PathBuf { + let root_dir = match std::env::var("CARGO_MANIFEST_DIR") { + Ok(dir) => PathBuf::from(dir).parent().unwrap().parent().unwrap().to_path_buf(), + Err(_) => std::env::current_dir().unwrap(), + }; + root_dir.join("test_programs") + } + + /// Collect the test programs under a sub-directory. + fn read_test_program_dirs( + test_programs_dir: &Path, + test_sub_dir: &str, + ) -> impl Iterator { + let test_case_dir = test_programs_dir.join(test_sub_dir); + std::fs::read_dir(test_case_dir) + .unwrap() + .flatten() + .filter(|c| c.path().is_dir()) + .map(|c| c.path()) + } + + #[derive(Parser, Debug)] + #[command(ignore_errors = true)] + struct Options { + /// Test name to filter for. + /// + /// For example: + /// ```text + /// cargo test -p nargo_cli -- test_transform_program_is_idempotent slice_loop + /// ``` + args: Vec, + } + + impl Options { + fn package_selection(&self) -> PackageSelection { + match self.args.as_slice() { + [_test_name, test_program] => { + PackageSelection::Selected(CrateName::from_str(test_program).unwrap()) + } + _ => PackageSelection::DefaultOrAll, + } + } + } + + /// Check that `nargo::ops::transform_program` is idempotent by compiling the + /// test programs and running them through the optimizer twice. + /// + /// This test is here purely because of the convenience of having access to + /// the utility functions to process workspaces. + #[test] + fn test_transform_program_is_idempotent() { + let opts = Options::parse(); + + let sel = opts.package_selection(); + let verbose = matches!(sel, PackageSelection::Selected(_)); + + let test_workspaces = read_test_program_dirs(&test_programs_dir(), "execution_success") + .filter_map(|dir| read_workspace(&dir, sel.clone()).ok()) + .collect::>(); + + assert!(!test_workspaces.is_empty(), "should find some test workspaces"); + + test_workspaces.par_iter().for_each(|workspace| { + let (file_manager, parsed_files) = parse_workspace(workspace); + let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); + + for package in binary_packages { + let (program_0, _warnings) = compile_program( + &file_manager, + &parsed_files, + workspace, + package, + &CompileOptions::default(), + None, + ) + .expect("failed to compile"); + + let width = get_target_width(package.expression_width, None); + + let program_1 = nargo::ops::transform_program(program_0, width); + let program_2 = nargo::ops::transform_program(program_1.clone(), width); + + if verbose { + // Compare where the most likely difference is. + similar_asserts::assert_eq!( + format!("{}", program_1.program), + format!("{}", program_2.program), + "optimization not idempotent for test program '{}'", + package.name + ); + assert_eq!( + program_1.program, program_2.program, + "optimization not idempotent for test program '{}'", + package.name + ); + + // Compare the whole content. + similar_asserts::assert_eq!( + serde_json::to_string_pretty(&program_1).unwrap(), + serde_json::to_string_pretty(&program_2).unwrap(), + "optimization not idempotent for test program '{}'", + package.name + ); + } else { + // Just compare hashes, which would just state that the program failed. + // Then we can use the filter option to zoom in one one to see why. + assert!( + fxhash::hash64(&program_1) == fxhash::hash64(&program_2), + "optimization not idempotent for test program '{}'", + package.name + ); + } + } + }); + } +} diff --git a/noir/noir-repo/tooling/nargo_toml/src/lib.rs b/noir/noir-repo/tooling/nargo_toml/src/lib.rs index c0d8c7997fd..c1990dab4a6 100644 --- a/noir/noir-repo/tooling/nargo_toml/src/lib.rs +++ b/noir/noir-repo/tooling/nargo_toml/src/lib.rs @@ -469,7 +469,7 @@ fn resolve_package_from_toml( result } -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq, Clone)] pub enum PackageSelection { Selected(CrateName), DefaultOrAll,