diff --git a/noir/.github/workflows/docs-dead-links.yml b/noir/.github/workflows/docs-dead-links.yml index ffb18fa0eb28..40e948fe2c17 100644 --- a/noir/.github/workflows/docs-dead-links.yml +++ b/noir/.github/workflows/docs-dead-links.yml @@ -29,7 +29,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} WORKFLOW_NAME: ${{ github.workflow }} - WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/job/${{ github.job }} + WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} with: update_existing: true filename: .github/DEAD_LINKS_IN_DOCS.md diff --git a/noir/.gitrepo b/noir/.gitrepo index 5fcea33359ce..0987ce805a70 100644 --- a/noir/.gitrepo +++ b/noir/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/noir-lang/noir branch = aztec-packages - commit = 13f93d523342daf478e08e8ccc0f00962c7fbe05 - parent = 41ae75cdee6285729551965972e8cb039ff3045a + commit = 727473cb5268d85cc524a9f4662cf8a7d5bd8996 + parent = 7c076653169771223a378f6c01bd9d3e3aafb682 method = merge cmdver = 0.4.6 diff --git a/noir/Cargo.lock b/noir/Cargo.lock index 79f1934059f5..6afa4fd2db49 100644 --- a/noir/Cargo.lock +++ b/noir/Cargo.lock @@ -115,14 +115,15 @@ dependencies = [ [[package]] name = "ahash" -version = "0.8.3" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" dependencies = [ "cfg-if 1.0.0", "getrandom 0.2.10", "once_cell", "version_check", + "zerocopy", ] [[package]] @@ -845,7 +846,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -1295,7 +1296,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -1306,7 +1307,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -1556,7 +1557,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -1774,7 +1775,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -1944,9 +1945,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.20" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97ec8491ebaf99c8eaa73058b045fe58073cd6be7f596ac993ced0b0a0c01049" +checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" dependencies = [ "bytes", "fnv", @@ -1954,7 +1955,7 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap 1.9.3", + "indexmap 2.0.0", "slab", "tokio", "tokio-util 0.7.8", @@ -1991,7 +1992,7 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.6", ] [[package]] @@ -2254,7 +2255,7 @@ version = "0.11.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2fb7c1b80a1dfa604bb4a649a5c5aeef3d913f7c520cb42b40e534e8a61bcdfc" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.6", "indexmap 1.9.3", "is-terminal", "itoa", @@ -3844,7 +3845,7 @@ dependencies = [ "quote", "rust-embed-utils", "shellexpand", - "syn 2.0.26", + "syn 2.0.32", "walkdir", ] @@ -4154,7 +4155,7 @@ checksum = "741e124f5485c7e60c03b043f79f320bff3527f4bbf12cf3831750dc46a0ec2c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -4176,7 +4177,7 @@ checksum = "1d89a8107374290037607734c0b73a85db7ed80cae314b3c5791f192a496e731" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -4226,7 +4227,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -4251,7 +4252,7 @@ checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -4527,9 +4528,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.26" +version = "2.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970" +checksum = "239814284fd6f1a4ffe4ca893952cdd93c224b6a1571c9a9eadd670295c0c9e2" dependencies = [ "proc-macro2", "quote", @@ -4649,7 +4650,7 @@ checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -4740,7 +4741,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -4891,7 +4892,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -5163,7 +5164,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", "wasm-bindgen-shared", ] @@ -5197,7 +5198,7 @@ checksum = "e128beba882dd1eb6200e1dc92ae6c5dbaa4311aa7bb211ca035779e5efc39f8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5678,6 +5679,26 @@ dependencies = [ "libc", ] +[[package]] +name = "zerocopy" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.32", +] + [[package]] name = "zeroize" version = "1.6.0" @@ -5695,5 +5716,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] diff --git a/noir/aztec_macros/src/lib.rs b/noir/aztec_macros/src/lib.rs index c985bbc367a2..7c62f8f81692 100644 --- a/noir/aztec_macros/src/lib.rs +++ b/noir/aztec_macros/src/lib.rs @@ -40,6 +40,7 @@ pub enum AztecMacroError { AztecComputeNoteHashAndNullifierNotFound { span: Span }, AztecContractHasTooManyFunctions { span: Span }, AztecContractConstructorMissing { span: Span }, + UnsupportedFunctionArgumentType { span: Span, typ: UnresolvedTypeData }, } impl From for MacroError { @@ -65,6 +66,11 @@ impl From for MacroError { secondary_message: None, span: Some(span), }, + AztecMacroError::UnsupportedFunctionArgumentType { span, typ } => MacroError { + primary_message: format!("Provided parameter type `{typ:?}` is not supported in Aztec contract interface"), + secondary_message: None, + span: Some(span), + }, } } } @@ -341,11 +347,14 @@ fn transform_module( for func in module.functions.iter_mut() { for secondary_attribute in func.def.attributes.secondary.clone() { + let crate_graph = &context.crate_graph[crate_id]; if is_custom_attribute(&secondary_attribute, "aztec(private)") { - transform_function("Private", func, storage_defined); + transform_function("Private", func, storage_defined) + .map_err(|err| (err.into(), crate_graph.root_file_id))?; has_transformed_module = true; } else if is_custom_attribute(&secondary_attribute, "aztec(public)") { - transform_function("Public", func, storage_defined); + transform_function("Public", func, storage_defined) + .map_err(|err| (err.into(), crate_graph.root_file_id))?; has_transformed_module = true; } } @@ -384,7 +393,11 @@ fn transform_module( /// - A new Input that is provided for a kernel app circuit, named: {Public/Private}ContextInputs /// - Hashes all of the function input variables /// - This instantiates a helper function -fn transform_function(ty: &str, func: &mut NoirFunction, storage_defined: bool) { +fn transform_function( + ty: &str, + func: &mut NoirFunction, + storage_defined: bool, +) -> Result<(), AztecMacroError> { let context_name = format!("{}Context", ty); let inputs_name = format!("{}ContextInputs", ty); let return_type_name = format!("{}CircuitPublicInputs", ty); @@ -396,7 +409,7 @@ fn transform_function(ty: &str, func: &mut NoirFunction, storage_defined: bool) } // Insert the context creation as the first action - let create_context = create_context(&context_name, &func.def.parameters); + let create_context = create_context(&context_name, &func.def.parameters)?; func.def.body.0.splice(0..0, (create_context).iter().cloned()); // Add the inputs to the params @@ -423,6 +436,8 @@ fn transform_function(ty: &str, func: &mut NoirFunction, storage_defined: bool) "Public" => func.def.is_open = true, _ => (), } + + Ok(()) } /// Transform Unconstrained @@ -621,7 +636,7 @@ fn create_inputs(ty: &str) -> Param { /// let mut context = PrivateContext::new(inputs, hasher.hash()); /// } /// ``` -fn create_context(ty: &str, params: &[Param]) -> Vec { +fn create_context(ty: &str, params: &[Param]) -> Result, AztecMacroError> { let mut injected_expressions: Vec = vec![]; // `let mut hasher = Hasher::new();` @@ -637,7 +652,7 @@ fn create_context(ty: &str, params: &[Param]) -> Vec { injected_expressions.push(let_hasher); // Iterate over each of the function parameters, adding to them to the hasher - params.iter().for_each(|Param { pattern, typ, span: _, visibility: _ }| { + for Param { pattern, typ, span, .. } in params { match pattern { Pattern::Identifier(identifier) => { // Match the type to determine the padding to do @@ -666,16 +681,18 @@ fn create_context(ty: &str, params: &[Param]) -> Vec { }, ) } - _ => panic!( - "[Aztec Noir] Provided parameter type: {:?} is not supported", - unresolved_type - ), + _ => { + return Err(AztecMacroError::UnsupportedFunctionArgumentType { + typ: unresolved_type.clone(), + span: *span, + }) + } }; injected_expressions.push(expression); } _ => todo!(), // Maybe unreachable? } - }); + } // Create the inputs to the context let inputs_expression = variable("inputs"); @@ -697,7 +714,7 @@ fn create_context(ty: &str, params: &[Param]) -> Vec { injected_expressions.push(let_context); // Return all expressions that will be injected by the hasher - injected_expressions + Ok(injected_expressions) } /// Abstract Return Type diff --git a/noir/bootstrap_cache.sh b/noir/bootstrap_cache.sh index ac919f3ca658..672702416bd4 100755 --- a/noir/bootstrap_cache.sh +++ b/noir/bootstrap_cache.sh @@ -8,3 +8,4 @@ echo -e "\033[1mRetrieving noir packages from remote cache...\033[0m" extract_repo noir-packages /usr/src/noir/packages ./ echo -e "\033[1mRetrieving nargo from remote cache...\033[0m" extract_repo noir /usr/src/noir/target/release ./target/ + diff --git a/noir/compiler/fm/src/file_map.rs b/noir/compiler/fm/src/file_map.rs index c4d7002a0820..50412d352ec4 100644 --- a/noir/compiler/fm/src/file_map.rs +++ b/noir/compiler/fm/src/file_map.rs @@ -75,6 +75,10 @@ impl FileMap { pub fn get_file_id(&self, file_name: &PathString) -> Option { self.name_to_id.get(file_name).cloned() } + + pub fn all_file_ids(&self) -> impl Iterator { + self.name_to_id.values() + } } impl Default for FileMap { fn default() -> Self { diff --git a/noir/compiler/noirc_driver/tests/stdlib_warnings.rs b/noir/compiler/noirc_driver/tests/stdlib_warnings.rs index e9153ec2f99f..6f4376211237 100644 --- a/noir/compiler/noirc_driver/tests/stdlib_warnings.rs +++ b/noir/compiler/noirc_driver/tests/stdlib_warnings.rs @@ -1,7 +1,7 @@ use std::path::Path; use noirc_driver::{file_manager_with_stdlib, prepare_crate, ErrorsAndWarnings}; -use noirc_frontend::hir::Context; +use noirc_frontend::hir::{def_map::parse_file, Context}; #[test] fn stdlib_does_not_produce_constant_warnings() -> Result<(), ErrorsAndWarnings> { @@ -15,8 +15,13 @@ fn stdlib_does_not_produce_constant_warnings() -> Result<(), ErrorsAndWarnings> file_manager.add_file_with_source(file_name, source.to_owned()).expect( "Adding source buffer to file manager should never fail when file manager is empty", ); + let parsed_files = file_manager + .as_file_map() + .all_file_ids() + .map(|&file_id| (file_id, parse_file(&file_manager, file_id))) + .collect(); - let mut context = Context::new(file_manager); + let mut context = Context::new(file_manager, parsed_files); let root_crate_id = prepare_crate(&mut context, file_name); let ((), warnings) = noirc_driver::check_crate(&mut context, root_crate_id, false, false)?; diff --git a/noir/compiler/noirc_evaluator/src/ssa.rs b/noir/compiler/noirc_evaluator/src/ssa.rs index f11c077d49a5..e2da5652faf6 100644 --- a/noir/compiler/noirc_evaluator/src/ssa.rs +++ b/noir/compiler/noirc_evaluator/src/ssa.rs @@ -45,7 +45,7 @@ pub(crate) fn optimize_into_acir( let ssa_gen_span = span!(Level::TRACE, "ssa_generation"); let ssa_gen_span_guard = ssa_gen_span.enter(); - let ssa_builder = SsaBuilder::new(program, print_ssa_passes)? + let ssa = SsaBuilder::new(program, print_ssa_passes)? .run_pass(Ssa::defunctionalize, "After Defunctionalization:") .run_pass(Ssa::inline_functions, "After Inlining:") // Run mem2reg with the CFG separated into blocks @@ -62,16 +62,12 @@ pub(crate) fn optimize_into_acir( // Run mem2reg once more with the flattened CFG to catch any remaining loads/stores .run_pass(Ssa::mem2reg, "After Mem2Reg:") .run_pass(Ssa::fold_constants, "After Constant Folding:") - .run_pass(Ssa::dead_instruction_elimination, "After Dead Instruction Elimination:"); + .run_pass(Ssa::dead_instruction_elimination, "After Dead Instruction Elimination:") + .run_pass(Ssa::bubble_up_constrains, "After Constraint Bubbling:") + .finish(); - let brillig = ssa_builder.to_brillig(print_brillig_trace); + let brillig = ssa.to_brillig(print_brillig_trace); - // Split off any passes the are not necessary for Brillig generation but are necessary for ACIR generation. - // We only need to fill out nested slices as we need to have a known length when dealing with memory operations - // in ACIR gen while this is not necessary in the Brillig IR. - let ssa = ssa_builder - .run_pass(Ssa::fill_internal_slices, "After Fill Internal Slice Dummy Data:") - .finish(); drop(ssa_gen_span_guard); let last_array_uses = ssa.find_last_array_uses(); diff --git a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index c0e3ed1ff661..dfb8c20df723 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -88,10 +88,6 @@ struct Context { /// a new BlockId max_block_id: u32, - /// Maps SSA array values to their slice size and any nested slices internal to the parent slice. - /// This enables us to maintain the slice structure of a slice when performing an array get. - slice_sizes: HashMap, Vec>, - data_bus: DataBus, } @@ -202,7 +198,6 @@ impl Context { internal_memory_blocks: HashMap::default(), internal_mem_block_lengths: HashMap::default(), max_block_id: 0, - slice_sizes: HashMap::default(), data_bus: DataBus::default(), } } @@ -683,21 +678,15 @@ impl Context { ) -> Result { let index_const = dfg.get_numeric_constant(index); let value_type = dfg.type_of_value(array); - let (Type::Array(element_types, _) | Type::Slice(element_types)) = &value_type else { + // Compiler sanity checks + assert!( + !value_type.is_nested_slice(), + "ICE: Nested slice type has reached ACIR generation" + ); + let (Type::Array(_, _) | Type::Slice(_)) = &value_type else { unreachable!("ICE: expected array or slice type"); - }; - // TODO(#3188): Need to be able to handle constant index for slices to seriously reduce - // constraint sizes of nested slices - // This can only be done if we accurately flatten nested slices as otherwise we will reach - // index out of bounds errors. If the slice is already flat then we can treat them similarly to arrays. - if matches!(value_type, Type::Slice(_)) - && element_types.iter().any(|element| element.contains_slice_element()) - { - return Ok(false); - } - match self.convert_value(array, dfg) { AcirValue::Var(acir_var, _) => { return Err(RuntimeError::InternalError(InternalError::Unexpected { @@ -788,24 +777,8 @@ impl Context { let mut dummy_predicate_index = predicate_index; // We must setup the dummy value to match the type of the value we wish to store - let slice_sizes = if store_type.contains_slice_element() { - self.compute_slice_sizes(store, None, dfg); - self.slice_sizes.get(&store).cloned().ok_or_else(|| { - InternalError::Unexpected { - expected: "Store value should have slice sizes computed".to_owned(), - found: "Missing key in slice sizes map".to_owned(), - call_stack: self.acir_context.get_call_stack(), - } - })? - } else { - vec![] - }; - let dummy = self.array_get_value( - &store_type, - block_id, - &mut dummy_predicate_index, - &slice_sizes, - )?; + let dummy = + self.array_get_value(&store_type, block_id, &mut dummy_predicate_index)?; Some(self.convert_array_set_store_value(&store_value, &dummy)?) } @@ -922,26 +895,12 @@ impl Context { } } - let value = if !res_typ.contains_slice_element() { - self.array_get_value(&res_typ, block_id, &mut var_index, &[])? - } else { - let slice_sizes = self - .slice_sizes - .get(&array_id) - .expect("ICE: Array with slices should have associated slice sizes"); - - // The first max size is going to be the length of the parent slice - // As we are fetching from the parent slice we just want its internal - // slice sizes. - let slice_sizes = slice_sizes[1..].to_vec(); - - let value = self.array_get_value(&res_typ, block_id, &mut var_index, &slice_sizes)?; - - // Insert the resulting slice sizes - self.slice_sizes.insert(results[0], slice_sizes); - - value - }; + // Compiler sanity check + assert!( + !res_typ.contains_slice_element(), + "ICE: Nested slice result found during ACIR generation" + ); + let value = self.array_get_value(&res_typ, block_id, &mut var_index)?; self.define_result(dfg, instruction, value.clone()); @@ -953,7 +912,6 @@ impl Context { ssa_type: &Type, block_id: BlockId, var_index: &mut AcirVar, - slice_sizes: &[usize], ) -> Result { let one = self.acir_context.add_constant(FieldElement::one()); match ssa_type.clone() { @@ -971,33 +929,12 @@ impl Context { let mut values = Vector::new(); for _ in 0..len { for typ in element_types.as_ref() { - values.push_back(self.array_get_value( - typ, - block_id, - var_index, - slice_sizes, - )?); + values.push_back(self.array_get_value(typ, block_id, var_index)?); } } Ok(AcirValue::Array(values)) } - Type::Slice(element_types) => { - // It is not enough to execute this loop and simply pass the size from the parent definition. - // We need the internal sizes of each type in case of a nested slice. - let mut values = Vector::new(); - - let (current_size, new_sizes) = - slice_sizes.split_first().expect("should be able to split"); - - for _ in 0..*current_size { - for typ in element_types.as_ref() { - values - .push_back(self.array_get_value(typ, block_id, var_index, new_sizes)?); - } - } - Ok(AcirValue::Array(values)) - } - _ => unreachable!("ICE - expected an array or slice"), + _ => unreachable!("ICE: Expected an array or numeric but got {ssa_type:?}"), } } @@ -1059,23 +996,6 @@ impl Context { self.array_set_value(&store_value, result_block_id, &mut var_index)?; - // Set new resulting array to have the same slice sizes as the instruction input - if let Type::Slice(element_types) = &array_typ { - let has_internal_slices = - element_types.as_ref().iter().any(|typ| typ.contains_slice_element()); - if has_internal_slices { - let slice_sizes = self - .slice_sizes - .get(&array_id) - .expect( - "ICE: Expected array with internal slices to have associated slice sizes", - ) - .clone(); - let results = dfg.instruction_results(instruction); - self.slice_sizes.insert(results[0], slice_sizes); - } - } - let element_type_sizes = if !can_omit_element_sizes_array(&array_typ) { Some(self.init_element_type_sizes_array(&array_typ, array_id, None, dfg)?) } else { @@ -1184,8 +1104,6 @@ impl Context { Type::Array(_, _) | Type::Slice(_) => { match &dfg[array_id] { Value::Array { array, .. } => { - self.compute_slice_sizes(array_id, None, dfg); - for (i, value) in array.iter().enumerate() { flat_elem_type_sizes.push( self.flattened_slice_size(*value, dfg) + flat_elem_type_sizes[i], @@ -1285,41 +1203,6 @@ impl Context { Ok(element_type_sizes) } - fn compute_slice_sizes( - &mut self, - current_array_id: ValueId, - parent_array: Option, - dfg: &DataFlowGraph, - ) { - let (array, typ) = match &dfg[current_array_id] { - Value::Array { array, typ } => (array, typ.clone()), - _ => return, - }; - - if !matches!(typ, Type::Slice(_)) { - return; - } - - let element_size = typ.element_size(); - let true_len = array.len() / element_size; - if let Some(parent_array) = parent_array { - let sizes_list = - self.slice_sizes.get_mut(&parent_array).expect("ICE: expected size list"); - sizes_list.push(true_len); - for value in array { - self.compute_slice_sizes(*value, Some(parent_array), dfg); - } - } else { - // This means the current_array_id is the parent array - // The slice sizes should follow the parent array's type structure - // thus we start our sizes list with the parent array size. - self.slice_sizes.insert(current_array_id, vec![true_len]); - for value in array { - self.compute_slice_sizes(*value, Some(current_array_id), dfg); - } - } - } - fn copy_dynamic_array( &mut self, source: BlockId, @@ -1772,23 +1655,21 @@ impl Context { let slice_length = self.convert_value(arguments[0], dfg).into_var()?; let (slice_contents, slice_typ, _) = self.check_array_is_initialized(arguments[1], dfg)?; - let slice = self.convert_value(slice_contents, dfg); + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); + let slice = self.convert_value(slice_contents, dfg); let mut new_elem_size = Self::flattened_value_size(&slice); let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; let elements_to_push = &arguments[2..]; - // We only fill internal slices for nested slices (a slice inside of a slice). - // So we must directly push back elements for slices which are not a nested slice. - if !slice_typ.is_nested_slice() { - for elem in elements_to_push { - let element = self.convert_value(*elem, dfg); - - new_elem_size += Self::flattened_value_size(&element); - new_slice.push_back(element); - } + // We must directly push back elements for non-nested slices + for elem in elements_to_push { + let element = self.convert_value(*elem, dfg); + + new_elem_size += Self::flattened_value_size(&element); + new_slice.push_back(element); } // Increase the slice length by one to enable accessing more elements in the slice. @@ -1800,20 +1681,6 @@ impl Context { self.initialize_array(result_block_id, new_elem_size, Some(new_slice_val.clone()))?; // The previous slice length represents the index we want to write into. let mut var_index = slice_length; - // Dynamic arrays are represented as flat memory. We must flatten the user facing index - // to a flattened index that matches the complex slice structure. - if slice_typ.is_nested_slice() { - let element_size = slice_typ.element_size(); - - // Multiply the element size against the var index before fetching the flattened index - // This operation makes sure our user-facing slice index matches the strategy for indexing in SSA, - // which is how `get_flattened_index` expects its index input. - let element_size_var = self.acir_context.add_constant(element_size); - var_index = self.acir_context.mul_var(slice_length, element_size_var)?; - var_index = - self.get_flattened_index(&slice_typ, slice_contents, var_index, dfg)?; - } - // Write the elements we wish to push back directly. // The slice's underlying array value should already be filled with dummy data // to enable this write to be within bounds. @@ -1847,8 +1714,9 @@ impl Context { let (slice_contents, slice_typ, _) = self.check_array_is_initialized(arguments[1], dfg)?; - let slice: AcirValue = self.convert_value(slice_contents, dfg); + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); + let slice: AcirValue = self.convert_value(slice_contents, dfg); let mut new_slice_size = Self::flattened_value_size(&slice); // Increase the slice length by one to enable accessing more elements in the slice. @@ -1860,33 +1728,14 @@ impl Context { let elements_to_push = &arguments[2..]; let mut elem_size = 0; - // We only fill internal slices for nested slices (a slice inside of a slice). - // So we must directly push front elements for slices which are not a nested slice. - if !slice_typ.is_nested_slice() { - for elem in elements_to_push.iter().rev() { - let element = self.convert_value(*elem, dfg); - - elem_size += Self::flattened_value_size(&element); - new_slice.push_front(element); - } - new_slice_size += elem_size; - } else { - // We have already filled the appropriate dummy values for nested slice during SSA gen. - // We need to account for that we do not go out of bounds by removing dummy data as we - // push elements to the front of our slice. - // Using this strategy we are able to avoid dynamic writes like we do for a SlicePushBack. - for elem in elements_to_push.iter().rev() { - let element = self.convert_value(*elem, dfg); - - let elem_size = Self::flattened_value_size(&element); - // Have to pop based off of the flattened value size as we read the - // slice intrinsic as a flat list of AcirValue::Var - for _ in 0..elem_size { - new_slice.pop_back(); - } - new_slice.push_front(element); - } + // We must directly push front elements for non-nested slices + for elem in elements_to_push.iter().rev() { + let element = self.convert_value(*elem, dfg); + + elem_size += Self::flattened_value_size(&element); + new_slice.push_front(element); } + new_slice_size += elem_size; let new_slice_val = AcirValue::Array(new_slice.clone()); @@ -1928,55 +1777,16 @@ impl Context { let (slice_contents, slice_typ, block_id) = self.check_array_is_initialized(arguments[1], dfg)?; - let slice = self.convert_value(slice_contents, dfg); - - let element_size = slice_typ.element_size(); + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let mut popped_elements = Vec::new(); - // Fetch the values we are popping off of the slice. - // In the case of non-nested slice the logic is simple as we do not - // need to account for the internal slice sizes or flattening the index. - // - // The pop back operation results are of the format [slice length, slice contents, popped elements]. - // Thus, we look at the result ids at index 2 and onwards to determine the type of each popped element. - if !slice_typ.is_nested_slice() { - for res in &result_ids[2..] { - let elem = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut var_index, - &[], - )?; - popped_elements.push(elem); - } - } else { - // Fetch the slice sizes of the nested slice. - let slice_sizes = self.slice_sizes.get(&slice_contents); - let mut slice_sizes = - slice_sizes.expect("ICE: should have slice sizes").clone(); - // We want to remove the parent size as we are fetching the child - slice_sizes.remove(0); - - // Multiply the element size against the var index before fetching the flattened index - // This operation makes sure our user-facing slice index matches the strategy for indexing in SSA, - // which is how `get_flattened_index` expects its index input. - let element_size_var = self.acir_context.add_constant(element_size); - // We want to use an index one less than the slice length - var_index = self.acir_context.mul_var(var_index, element_size_var)?; - var_index = - self.get_flattened_index(&slice_typ, slice_contents, var_index, dfg)?; - - for res in &result_ids[2..] { - let elem = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut var_index, - &slice_sizes, - )?; - popped_elements.push(elem); - } + for res in &result_ids[2..] { + let elem = + self.array_get_value(&dfg.type_of_value(*res), block_id, &mut var_index)?; + popped_elements.push(elem); } + let slice = self.convert_value(slice_contents, dfg); let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; @@ -1994,11 +1804,13 @@ impl Context { let (slice_contents, slice_typ, block_id) = self.check_array_is_initialized(arguments[1], dfg)?; - let slice = self.convert_value(slice_contents, dfg); + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.sub_var(slice_length, one)?; + let slice = self.convert_value(slice_contents, dfg); + let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; @@ -2010,40 +1822,14 @@ impl Context { // Fetch the values we are popping off of the slice. // In the case of non-nested slice the logic is simple as we do not // need to account for the internal slice sizes or flattening the index. - // - // The pop front operation results are of the format [popped elements, slice length, slice contents]. - // Thus, we look at the result ids up to the element size to determine the type of each popped element. - if !slice_typ.is_nested_slice() { - for res in &result_ids[..element_size] { - let element = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut var_index, - &[], - )?; - let elem_size = Self::flattened_value_size(&element); - popped_elements_size += elem_size; - popped_elements.push(element); - } - } else { - let slice_sizes = self.slice_sizes.get(&slice_contents); - let mut slice_sizes = - slice_sizes.expect("ICE: should have slice sizes").clone(); - // We want to remove the parent size as we are fetching the child - slice_sizes.remove(0); - - for res in &result_ids[..element_size] { - let element = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut var_index, - &slice_sizes, - )?; - let elem_size = Self::flattened_value_size(&element); - popped_elements_size += elem_size; - popped_elements.push(element); - } + for res in &result_ids[..element_size] { + let element = + self.array_get_value(&dfg.type_of_value(*res), block_id, &mut var_index)?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); } + // It is expected that the `popped_elements_size` is the flattened size of the elements, // as the input slice should be a dynamic array which is represented by flat memory. new_slice = new_slice.slice(popped_elements_size..); @@ -2059,6 +1845,7 @@ impl Context { let (slice_contents, slice_typ, block_id) = self.check_array_is_initialized(arguments[1], dfg)?; + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let slice = self.convert_value(slice_contents, dfg); let insert_index = self.convert_value(arguments[2], dfg).into_var()?; @@ -2164,7 +1951,6 @@ impl Context { } } - // let new_slice_val = AcirValue::Array(new_slice); let element_type_sizes = if !can_omit_element_sizes_array(&slice_typ) { Some(self.init_element_type_sizes_array( &slice_typ, @@ -2189,6 +1975,7 @@ impl Context { let (slice_contents, slice_typ, block_id) = self.check_array_is_initialized(arguments[1], dfg)?; + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let slice = self.convert_value(slice_contents, dfg); let remove_index = self.convert_value(arguments[2], dfg).into_var()?; @@ -2217,8 +2004,6 @@ impl Context { self.get_flattened_index(&slice_typ, slice_contents, flat_remove_index, dfg)?; // Fetch the values we are remove from the slice. - // In the case of non-nested slice the logic is simple as we do not - // need to account for the internal slice sizes or flattening the index. // As we fetch the values we can determine the size of the removed values // which we will later use for writing the correct resulting slice. let mut popped_elements = Vec::new(); @@ -2226,36 +2011,12 @@ impl Context { // Set a temp index just for fetching from the original slice as `array_get_value` mutates // the index internally. let mut temp_index = flat_user_index; - if !slice_typ.is_nested_slice() { - for res in &result_ids[2..(2 + element_size)] { - let element = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut temp_index, - &[], - )?; - let elem_size = Self::flattened_value_size(&element); - popped_elements_size += elem_size; - popped_elements.push(element); - } - } else { - let slice_sizes = self.slice_sizes.get(&slice_contents); - let mut slice_sizes = - slice_sizes.expect("ICE: should have slice sizes").clone(); - // We want to remove the parent size as we are fetching the child - slice_sizes.remove(0); - - for res in &result_ids[2..(2 + element_size)] { - let element = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut temp_index, - &slice_sizes, - )?; - let elem_size = Self::flattened_value_size(&element); - popped_elements_size += elem_size; - popped_elements.push(element); - } + for res in &result_ids[2..(2 + element_size)] { + let element = + self.array_get_value(&dfg.type_of_value(*res), block_id, &mut temp_index)?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); } // Go through the entire slice argument and determine what value should be written to the new slice. diff --git a/noir/compiler/noirc_evaluator/src/ssa/ir/types.rs b/noir/compiler/noirc_evaluator/src/ssa/ir/types.rs index ae53c7705c25..f412def1e760 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -139,7 +139,7 @@ impl Type { } pub(crate) fn is_nested_slice(&self) -> bool { - if let Type::Slice(element_types) = self { + if let Type::Slice(element_types) | Type::Array(element_types, _) = self { element_types.as_ref().iter().any(|typ| typ.contains_slice_element()) } else { false diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/bubble_up_constrains.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/bubble_up_constrains.rs new file mode 100644 index 000000000000..cb9519eeb2ec --- /dev/null +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/bubble_up_constrains.rs @@ -0,0 +1,43 @@ +use crate::ssa::{ir::instruction::Instruction, ssa_gen::Ssa}; + +impl Ssa { + /// A simple SSA pass to go through each instruction and move every `Instruction::Constrain` to immediately + /// after when all of its inputs are available. + #[tracing::instrument(level = "trace", skip(self))] + pub(crate) fn bubble_up_constrains(mut self) -> Ssa { + for function in self.functions.values_mut() { + for block in function.reachable_blocks() { + let instructions = function.dfg[block].take_instructions(); + let mut filtered_instructions = Vec::with_capacity(instructions.len()); + + let dfg = &function.dfg; + for instruction in instructions { + let (lhs, rhs) = match dfg[instruction] { + Instruction::Constrain(lhs, rhs, ..) => (lhs, rhs), + _ => { + filtered_instructions.push(instruction); + continue; + } + }; + + let index = filtered_instructions + .iter() + .rev() + .position(|instruction_id| { + let results = dfg.instruction_results(*instruction_id).to_vec(); + results.contains(&lhs) || results.contains(&rhs) + }) + // We iterate through the previous instructions in reverse order so the index is from the + // back of the vector. Subtract from vector length to get correct index. + .map(|reversed_index| filtered_instructions.len() - reversed_index) + .unwrap_or(0); + + filtered_instructions.insert(index, instruction); + } + + *function.dfg[block].instructions_mut() = filtered_instructions; + } + } + self + } +} diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs deleted file mode 100644 index 5ee8e42fe3ac..000000000000 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs +++ /dev/null @@ -1,765 +0,0 @@ -//! This module defines the internal slices data fill pass. -//! The purpose of this pass is to fill out nested slice values represented by SSA array values. -//! "Filling out" a nested slice specifically refers to making a nested slice's internal slice types -//! match up in their size. This pass is necessary for dynamic array operations to work in ACIR gen -//! as we need to have a known size for any memory operations. As slice types do not carry a size we -//! need to make sure all nested internal slices have the same size in order to accurately -//! read from or write to a nested slice. This pass ultimately attaches dummy data to any smaller internal slice types. -//! -//! A simple example: -//! If we have a slice of the type [[Field]] which is of length 2. The internal slices themselves -//! could be of different sizes, such as 3 and 4. An array operation on this nested slice would look -//! something like below: -//! array_get [Field 3, [Field 1, Field 1, Field 1], Field 4, [Field 2, Field 2, Field 2, Field 2]], index Field v0 -//! Will get translated into a new instruction like such: -//! array_get [Field 3, [Field 1, Field 1, Field 1, Field 0], Field 4, [Field 2, Field 2, Field 2, Field 2]], index Field v0 -//! -//! -//! TODO(#3188): Currently the pass only works on a single flattened block. This should be updated in followup work. -//! The steps of the pass are as follows: -//! - Process each instruction of the block to collect relevant slice size information. We want to find the maximum size that a nested slice -//! potentially could be. Slices can potentially be set to larger array values or used in intrinsics that increase or shorten their size. -//! - Track all array constants and compute an initial map of their nested slice sizes. The slice sizes map is simply a map of an SSA array value -//! to its array size and then any child slice values that may exist. -//! - We also track a map to resolve a starting array constant to its final possible array value. This map is updated on the appropriate instructions -//! such as ArraySet or any slice intrinsics. -//! - On an ArrayGet operation add the resulting value as a possible child of the original slice. In SSA we will reuse the same memory block -//! for the nested slice and must account for an internal slice being fetched and set to a larger value, otherwise we may have an out of bounds error. -//! Also set the resulting fetched value to have the same internal slice size map as the children of the original array used in the operation. -//! - On an ArraySet operation we set the resulting value to have the same slice sizes map as the original array used in the operation. Like the result of -//! an ArrayGet we need to also add the `value` for an ArraySet as a possible child slice of the original array. -//! - For slice intrinsics we set the resulting value to have the same slice sizes map as the original array the same way as we do in an ArraySet. -//! However, with a slice intrinsic we also increase the size for the respective slice intrinsics. -//! We do not decrement the size on intrinsics that could remove values from a slice. This is because we could potentially go back to the smaller slice size, -//! not fill in the appropriate dummies and then get an out of bounds error later when executing the ACIR. We always want to compute -//! what a slice maximum size could be. -//! - Now we need to add each instruction back except with the updated original array values. -//! - Resolve the original slice value to what its final value would be using the previously computed map. -//! - Find the max size as each layer of the recursive nested slice type. -//! For instance in the example above we have a slice of depth 2 with the max sizes of [2, 4]. -//! - Follow the slice type to check whether the SSA value is under the specified max size. If a slice value -//! is under the max size we then attach dummy data. -//! - Construct a final nested slice with the now attached dummy data and replace the original array in the previously -//! saved ArrayGet and ArraySet instructions. - -use crate::ssa::{ - ir::{ - basic_block::BasicBlockId, - dfg::CallStack, - function::{Function, RuntimeType}, - function_inserter::FunctionInserter, - instruction::{Instruction, InstructionId, Intrinsic}, - post_order::PostOrder, - types::Type, - value::{Value, ValueId}, - }, - ssa_gen::Ssa, -}; - -use acvm::FieldElement; -use fxhash::FxHashMap as HashMap; - -impl Ssa { - #[tracing::instrument(level = "trace", skip(self))] - pub(crate) fn fill_internal_slices(mut self) -> Ssa { - for function in self.functions.values_mut() { - // This pass is only necessary for generating ACIR and thus we should not - // process Brillig functions. - // The pass is also currently only setup to handle a function with a single flattened block. - // For complex Brillig functions we can expect this pass to panic. - if function.runtime() == RuntimeType::Acir { - let databus = function.dfg.data_bus.clone(); - let mut context = Context::new(function); - context.process_blocks(); - // update the databus with the new array instructions - function.dfg.data_bus = databus.map_values(|t| context.inserter.resolve(t)); - } - } - self - } -} - -struct Context<'f> { - post_order: PostOrder, - inserter: FunctionInserter<'f>, - - /// Maps SSA array values representing a slice's contents to its updated array value - /// after an array set or a slice intrinsic operation. - /// Maps original value -> result - mapped_slice_values: HashMap, - - /// Maps an updated array value following an array operation to its previous value. - /// When used in conjunction with `mapped_slice_values` we form a two way map of all array - /// values being used in array operations. - /// Maps result -> original value - slice_parents: HashMap, -} - -impl<'f> Context<'f> { - fn new(function: &'f mut Function) -> Self { - let post_order = PostOrder::with_function(function); - let inserter = FunctionInserter::new(function); - - Context { - post_order, - inserter, - mapped_slice_values: HashMap::default(), - slice_parents: HashMap::default(), - } - } - - fn process_blocks(&mut self) { - let mut block_order = PostOrder::with_function(self.inserter.function).into_vec(); - block_order.reverse(); - for block in block_order { - self.process_block(block); - } - } - - fn process_block(&mut self, block: BasicBlockId) { - // Fetch SSA values potentially with internal slices - let instructions = self.inserter.function.dfg[block].take_instructions(); - - // Values containing nested slices to be replaced - let mut slice_values = Vec::new(); - // Maps SSA array ID representing slice contents to its length and a list of its potential internal slices - // This map is constructed once for an array constant and is then updated - // according to the rules in `collect_slice_information`. - let mut slice_sizes: HashMap)> = HashMap::default(); - - // Update the slice sizes map to help find the potential max size of each nested slice. - for instruction in instructions.iter() { - self.collect_slice_information(*instruction, &mut slice_values, &mut slice_sizes); - } - - // Add back every instruction with the updated nested slices. - for instruction in instructions { - self.push_updated_instruction(instruction, &slice_values, &slice_sizes, block); - } - - self.inserter.map_terminator_in_place(block); - } - - /// Determine how the slice sizes map needs to be updated according to the provided instruction. - fn collect_slice_information( - &mut self, - instruction: InstructionId, - slice_values: &mut Vec, - slice_sizes: &mut HashMap)>, - ) { - let results = self.inserter.function.dfg.instruction_results(instruction); - match &self.inserter.function.dfg[instruction] { - Instruction::ArrayGet { array, .. } => { - let array_typ = self.inserter.function.dfg.type_of_value(*array); - let array_value = &self.inserter.function.dfg[*array]; - // If we have an SSA value containing nested slices we should mark it - // as a slice that potentially requires to be filled with dummy data. - if matches!(array_value, Value::Array { .. }) && array_typ.contains_slice_element() - { - slice_values.push(*array); - // Initial insertion into the slice sizes map - // Any other insertions should only occur if the value is already - // a part of the map. - self.compute_slice_sizes(*array, slice_sizes); - } - - let res_typ = self.inserter.function.dfg.type_of_value(results[0]); - if res_typ.contains_slice_element() { - if let Some(inner_sizes) = slice_sizes.get_mut(array) { - // Include the result in the parent array potential children - // If the result has internal slices and is called in an array set - // we could potentially have a new larger slice which we need to account for - inner_sizes.1.push(results[0]); - self.slice_parents.insert(results[0], *array); - - let inner_sizes_iter = inner_sizes.1.clone(); - for slice_value in inner_sizes_iter { - let inner_slice = slice_sizes.get(&slice_value).unwrap_or_else(|| { - panic!("ICE: should have inner slice set for {slice_value}") - }); - slice_sizes.insert(results[0], inner_slice.clone()); - if slice_value != results[0] { - self.mapped_slice_values.insert(slice_value, results[0]); - } - } - } - } - } - Instruction::ArraySet { array, value, .. } => { - let array_typ = self.inserter.function.dfg.type_of_value(*array); - let array_value = &self.inserter.function.dfg[*array]; - // If we have an SSA value containing nested slices we should mark it - // as a slice that potentially requires to be filled with dummy data. - if matches!(array_value, Value::Array { .. }) && array_typ.contains_slice_element() - { - slice_values.push(*array); - // Initial insertion into the slice sizes map - // Any other insertions should only occur if the value is already - // a part of the map. - self.compute_slice_sizes(*array, slice_sizes); - } - - let value_typ = self.inserter.function.dfg.type_of_value(*value); - if value_typ.contains_slice_element() { - self.compute_slice_sizes(*value, slice_sizes); - - let inner_sizes = slice_sizes.get_mut(array).expect("ICE expected slice sizes"); - inner_sizes.1.push(*value); - } - - if let Some(inner_sizes) = slice_sizes.get_mut(array) { - let inner_sizes = inner_sizes.clone(); - - slice_sizes.insert(results[0], inner_sizes); - - self.mapped_slice_values.insert(*array, results[0]); - self.slice_parents.insert(results[0], *array); - } - } - Instruction::Call { func, arguments } => { - let func = &self.inserter.function.dfg[*func]; - if let Value::Intrinsic(intrinsic) = func { - let (argument_index, result_index) = match intrinsic { - Intrinsic::SlicePushBack - | Intrinsic::SlicePushFront - | Intrinsic::SlicePopBack - | Intrinsic::SliceInsert - | Intrinsic::SliceRemove => (1, 1), - // `pop_front` returns the popped element, and then the respective slice. - // This means in the case of a slice with structs, the result index of the popped slice - // will change depending on the number of elements in the struct. - // For example, a slice with four elements will look as such in SSA: - // v3, v4, v5, v6, v7, v8 = call slice_pop_front(v1, v2) - // where v7 is the slice length and v8 is the popped slice itself. - Intrinsic::SlicePopFront => (1, results.len() - 1), - _ => return, - }; - let slice_contents = arguments[argument_index]; - match intrinsic { - Intrinsic::SlicePushBack - | Intrinsic::SlicePushFront - | Intrinsic::SliceInsert => { - for arg in &arguments[(argument_index + 1)..] { - let element_typ = self.inserter.function.dfg.type_of_value(*arg); - if element_typ.contains_slice_element() { - slice_values.push(*arg); - self.compute_slice_sizes(*arg, slice_sizes); - } - } - if let Some(inner_sizes) = slice_sizes.get_mut(&slice_contents) { - inner_sizes.0 += 1; - - let inner_sizes = inner_sizes.clone(); - slice_sizes.insert(results[result_index], inner_sizes); - - self.mapped_slice_values - .insert(slice_contents, results[result_index]); - self.slice_parents.insert(results[result_index], slice_contents); - } - } - Intrinsic::SlicePopBack - | Intrinsic::SliceRemove - | Intrinsic::SlicePopFront => { - // We do not decrement the size on intrinsics that could remove values from a slice. - // This is because we could potentially go back to the smaller slice and not fill in dummies. - // This pass should be tracking the potential max that a slice ***could be*** - if let Some(inner_sizes) = slice_sizes.get(&slice_contents) { - let inner_sizes = inner_sizes.clone(); - slice_sizes.insert(results[result_index], inner_sizes); - - self.mapped_slice_values - .insert(slice_contents, results[result_index]); - self.slice_parents.insert(results[result_index], slice_contents); - } - } - _ => {} - } - } - } - _ => {} - } - } - - fn push_updated_instruction( - &mut self, - instruction: InstructionId, - slice_values: &[ValueId], - slice_sizes: &HashMap)>, - block: BasicBlockId, - ) { - match &self.inserter.function.dfg[instruction] { - Instruction::ArrayGet { array, .. } | Instruction::ArraySet { array, .. } => { - if slice_values.contains(array) { - let (new_array_op_instr, call_stack) = - self.get_updated_array_op_instr(*array, slice_sizes, instruction); - self.inserter.push_instruction_value( - new_array_op_instr, - instruction, - block, - call_stack, - ); - } else { - self.inserter.push_instruction(instruction, block); - } - } - Instruction::Call { func: _, arguments } => { - let mut args_to_replace = Vec::new(); - for (i, arg) in arguments.iter().enumerate() { - let element_typ = self.inserter.function.dfg.type_of_value(*arg); - if slice_values.contains(arg) && element_typ.contains_slice_element() { - args_to_replace.push((i, *arg)); - } - } - if args_to_replace.is_empty() { - self.inserter.push_instruction(instruction, block); - } else { - // Using the original slice is ok to do as during collection of slice information - // we guarantee that only the arguments to slice intrinsic calls can be replaced. - let slice_contents = arguments[1]; - - let element_typ = self.inserter.function.dfg.type_of_value(arguments[1]); - let elem_depth = Self::compute_nested_slice_depth(&element_typ); - - let mut max_sizes = Vec::new(); - max_sizes.resize(elem_depth, 0); - // We want the max for the parent of the argument - let parent = self.resolve_slice_parent(slice_contents); - self.compute_slice_max_sizes(parent, slice_sizes, &mut max_sizes, 0); - - for (index, arg) in args_to_replace { - let element_typ = self.inserter.function.dfg.type_of_value(arg); - max_sizes.remove(0); - let new_array = - self.attach_slice_dummies(&element_typ, Some(arg), false, &max_sizes); - - let instruction_id = instruction; - let (instruction, call_stack) = - self.inserter.map_instruction(instruction_id); - let new_call_instr = match instruction { - Instruction::Call { func, mut arguments } => { - arguments[index] = new_array; - Instruction::Call { func, arguments } - } - _ => panic!("Expected call instruction"), - }; - self.inserter.push_instruction_value( - new_call_instr, - instruction_id, - block, - call_stack, - ); - } - } - } - _ => { - self.inserter.push_instruction(instruction, block); - } - } - } - - /// Construct an updated ArrayGet or ArraySet instruction where the array value - /// has been replaced by a newly filled in array according to the max internal - /// slice sizes. - fn get_updated_array_op_instr( - &mut self, - array_id: ValueId, - slice_sizes: &HashMap)>, - instruction: InstructionId, - ) -> (Instruction, CallStack) { - let mapped_slice_value = self.resolve_slice_value(array_id); - - let (current_size, _) = slice_sizes - .get(&mapped_slice_value) - .unwrap_or_else(|| panic!("should have slice sizes: {mapped_slice_value}")); - - let mut max_sizes = Vec::new(); - - let typ = self.inserter.function.dfg.type_of_value(array_id); - let depth = Self::compute_nested_slice_depth(&typ); - max_sizes.resize(depth, 0); - - max_sizes[0] = *current_size; - self.compute_slice_max_sizes(array_id, slice_sizes, &mut max_sizes, 1); - - let new_array = self.attach_slice_dummies(&typ, Some(array_id), true, &max_sizes); - - let instruction_id = instruction; - let (instruction, call_stack) = self.inserter.map_instruction(instruction_id); - let new_array_op_instr = match instruction { - Instruction::ArrayGet { index, .. } => { - Instruction::ArrayGet { array: new_array, index } - } - Instruction::ArraySet { index, value, .. } => { - Instruction::ArraySet { array: new_array, index, value } - } - _ => panic!("Expected array set"), - }; - - (new_array_op_instr, call_stack) - } - - fn attach_slice_dummies( - &mut self, - typ: &Type, - value: Option, - is_parent_slice: bool, - max_sizes: &[usize], - ) -> ValueId { - match typ { - Type::Numeric(_) => { - if let Some(value) = value { - self.inserter.resolve(value) - } else { - let zero = FieldElement::zero(); - self.inserter.function.dfg.make_constant(zero, Type::field()) - } - } - Type::Array(element_types, len) => { - if let Some(value) = value { - self.inserter.resolve(value) - } else { - let mut array = im::Vector::new(); - for _ in 0..*len { - for typ in element_types.iter() { - array.push_back(self.attach_slice_dummies(typ, None, false, max_sizes)); - } - } - self.inserter.function.dfg.make_array(array, typ.clone()) - } - } - Type::Slice(element_types) => { - let (current_size, max_sizes) = - max_sizes.split_first().expect("ICE: Missing internal slice max size"); - let mut max_size = *current_size; - if let Some(value) = value { - let mut slice = im::Vector::new(); - - let value = self.inserter.function.dfg[value].clone(); - let array = match value { - Value::Array { array, .. } => array, - _ => { - panic!("Expected an array value"); - } - }; - - if is_parent_slice { - max_size = array.len() / element_types.len(); - } - for i in 0..max_size { - for (element_index, element_type) in element_types.iter().enumerate() { - let index_usize = i * element_types.len() + element_index; - let valid_index = index_usize < array.len(); - let maybe_value = - if valid_index { Some(array[index_usize]) } else { None }; - slice.push_back(self.attach_slice_dummies( - element_type, - maybe_value, - false, - max_sizes, - )); - } - } - - self.inserter.function.dfg.make_array(slice, typ.clone()) - } else { - let mut slice = im::Vector::new(); - for _ in 0..max_size { - for typ in element_types.iter() { - slice.push_back(self.attach_slice_dummies(typ, None, false, max_sizes)); - } - } - self.inserter.function.dfg.make_array(slice, typ.clone()) - } - } - Type::Reference(_) => { - unreachable!("ICE: Generating dummy data for references is unsupported") - } - Type::Function => { - unreachable!("ICE: Generating dummy data for functions is unsupported") - } - } - } - - // This methods computes a map representing a nested slice. - // The method also automatically computes the given max slice size - // at each depth of the recursive type. - // For example if we had a next slice - fn compute_slice_sizes( - &self, - array_id: ValueId, - slice_sizes: &mut HashMap)>, - ) { - if let Value::Array { array, typ } = &self.inserter.function.dfg[array_id].clone() { - if let Type::Slice(_) = typ { - let element_size = typ.element_size(); - let len = array.len() / element_size; - let mut slice_value = (len, vec![]); - for value in array { - let typ = self.inserter.function.dfg.type_of_value(*value); - if let Type::Slice(_) = typ { - slice_value.1.push(*value); - self.compute_slice_sizes(*value, slice_sizes); - } - } - // Mark the correct max size based upon an array values internal structure - let mut max_size = 0; - for inner_value in slice_value.1.iter() { - let inner_slice = - slice_sizes.get(inner_value).expect("ICE: should have inner slice set"); - if inner_slice.0 > max_size { - max_size = inner_slice.0; - } - } - for inner_value in slice_value.1.iter() { - let inner_slice = - slice_sizes.get_mut(inner_value).expect("ICE: should have inner slice set"); - if inner_slice.0 < max_size { - inner_slice.0 = max_size; - } - } - slice_sizes.insert(array_id, slice_value); - } - } - } - - /// Determine the maximum possible size of an internal slice at each - /// layer of a nested slice. - /// - /// If the slice map is incorrectly formed the function will exceed - /// the type's nested slice depth and panic. - fn compute_slice_max_sizes( - &self, - array_id: ValueId, - slice_sizes: &HashMap)>, - max_sizes: &mut Vec, - depth: usize, - ) { - let array_id = self.resolve_slice_value(array_id); - let (current_size, inner_slices) = slice_sizes - .get(&array_id) - .unwrap_or_else(|| panic!("should have slice sizes: {array_id}")); - - if inner_slices.is_empty() { - return; - } - - let mut max = *current_size; - for inner_slice in inner_slices.iter() { - let inner_slice = &self.resolve_slice_value(*inner_slice); - - let (inner_size, _) = slice_sizes[inner_slice]; - if inner_size > max { - max = inner_size; - } - self.compute_slice_max_sizes(*inner_slice, slice_sizes, max_sizes, depth + 1); - } - - if max > max_sizes[depth] { - max_sizes[depth] = max; - } - } - - /// Compute the depth of nested slices in a given Type. - /// The depth follows the recursive type structure of a slice. - fn compute_nested_slice_depth(typ: &Type) -> usize { - let mut depth = 0; - if let Type::Slice(element_types) = typ { - depth += 1; - for typ in element_types.as_ref() { - depth += Self::compute_nested_slice_depth(typ); - } - } - depth - } - - /// Resolves a ValueId representing a slice's contents to its updated value. - /// If there is no resolved value for the supplied value, the value which - /// was passed to the method is returned. - fn resolve_slice_value(&self, array_id: ValueId) -> ValueId { - match self.mapped_slice_values.get(&array_id) { - Some(value) => self.resolve_slice_value(*value), - None => array_id, - } - } - - /// Resolves a ValueId representing a slice's contents to its previous value. - /// If there is no resolved parent value it means we have the original slice value - /// and the value which was passed to the method is returned. - fn resolve_slice_parent(&self, array_id: ValueId) -> ValueId { - match self.slice_parents.get(&array_id) { - Some(value) => self.resolve_slice_parent(*value), - None => array_id, - } - } -} - -#[cfg(test)] -mod tests { - - use std::rc::Rc; - - use acvm::FieldElement; - use im::vector; - - use crate::ssa::{ - function_builder::FunctionBuilder, - ir::{ - dfg::DataFlowGraph, - function::RuntimeType, - instruction::{BinaryOp, Instruction}, - map::Id, - types::Type, - value::ValueId, - }, - }; - - #[test] - fn test_simple_nested_slice() { - // We want to test that a nested slice with two internal slices of primitive types - // fills the smaller internal slice with dummy data to match the length of the - // larger internal slice. - - // Note that slices are a represented by a tuple of (length, contents). - // The type of the nested slice in this test is [[Field]]. - // - // This is the original SSA: - // acir fn main f0 { - // b0(v0: Field): - // v2 = lt v0, Field 2 - // constrain v2 == Field 1 'Index out of bounds' - // v11 = array_get [[Field 3, [Field 1, Field 1, Field 1]], [Field 4, [Field 2, Field 2, Field 2, Field 2]]], index Field v0 - // constrain v11 == Field 4 - // return - // } - - let main_id = Id::test_new(0); - let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); - - let main_v0 = builder.add_parameter(Type::field()); - - let two = builder.field_constant(2_u128); - // Every slice access checks against the dynamic slice length - let slice_access_check = builder.insert_binary(main_v0, BinaryOp::Lt, two); - let one = builder.field_constant(1_u128); - builder.insert_constrain(slice_access_check, one, Some("Index out of bounds".to_owned())); - - let field_element_type = Rc::new(vec![Type::field()]); - let inner_slice_contents_type = Type::Slice(field_element_type); - - let inner_slice_small_len = builder.field_constant(3_u128); - let inner_slice_small_contents = - builder.array_constant(vector![one, one, one], inner_slice_contents_type.clone()); - - let inner_slice_big_len = builder.field_constant(4_u128); - let inner_slice_big_contents = - builder.array_constant(vector![two, two, two, two], inner_slice_contents_type.clone()); - - let outer_slice_element_type = Rc::new(vec![Type::field(), inner_slice_contents_type]); - let outer_slice_type = Type::Slice(outer_slice_element_type); - - let outer_slice_contents = builder.array_constant( - vector![ - inner_slice_small_len, - inner_slice_small_contents, - inner_slice_big_len, - inner_slice_big_contents - ], - outer_slice_type, - ); - // Fetching the length of the second nested slice - // We must use a parameter to main as we do not want the array operation to be simplified out during SSA gen. The filling of internal slices - // is necessary for dynamic nested slices and thus we want to generate the SSA that ACIR gen would be converting. - let array_get_res = builder.insert_array_get(outer_slice_contents, main_v0, Type::field()); - - let four = builder.field_constant(4_u128); - builder.insert_constrain(array_get_res, four, None); - builder.terminate_with_return(vec![]); - - // Note that now the smaller internal slice should have extra dummy data that matches the larger internal slice's size. - // - // Expected SSA: - // acir fn main f0 { - // b0(v0: Field): - // v10 = lt v0, Field 2 - // constrain v10 == Field 1 'Index out of bounds' - // v18 = array_get [Field 3, [Field 1, Field 1, Field 1, Field 0], Field 4, [Field 2, Field 2, Field 2, Field 2]], index v0 - // constrain v18 == Field 4 - // return - // } - - let ssa = builder.finish().fill_internal_slices(); - - let func = ssa.main(); - let block_id = func.entry_block(); - - // Check the array get expression has replaced its nested slice with a new slice - // where the internal slice has dummy data attached to it. - let instructions = func.dfg[block_id].instructions(); - let array_id = instructions - .iter() - .find_map(|instruction| { - if let Instruction::ArrayGet { array, .. } = func.dfg[*instruction] { - Some(array) - } else { - None - } - }) - .expect("Should find array_get instruction"); - - let (array_constant, _) = - func.dfg.get_array_constant(array_id).expect("should have an array constant"); - - let inner_slice_small_len = func - .dfg - .get_numeric_constant(array_constant[0]) - .expect("should have a numeric constant"); - assert_eq!( - inner_slice_small_len, - FieldElement::from(3u128), - "The length of the smaller internal slice should be unchanged" - ); - - let (inner_slice_small_contents, _) = - func.dfg.get_array_constant(array_constant[1]).expect("should have an array constant"); - let small_capacity = inner_slice_small_contents.len(); - assert_eq!(small_capacity, 4, "The inner slice contents should contain dummy element"); - - compare_array_constants(&inner_slice_small_contents, &[1, 1, 1, 0], &func.dfg); - - let inner_slice_big_len = func - .dfg - .get_numeric_constant(array_constant[2]) - .expect("should have a numeric constant"); - assert_eq!( - inner_slice_big_len, - FieldElement::from(4u128), - "The length of the larger internal slice should be unchanged" - ); - - let (inner_slice_big_contents, _) = - func.dfg.get_array_constant(array_constant[3]).expect("should have an array constant"); - let big_capacity = inner_slice_big_contents.len(); - assert_eq!( - small_capacity, big_capacity, - "The length of both internal slice contents should be the same" - ); - - compare_array_constants(&inner_slice_big_contents, &[2u128; 4], &func.dfg); - } - - fn compare_array_constants( - got_list: &im::Vector, - expected_list: &[u128], - dfg: &DataFlowGraph, - ) { - for i in 0..got_list.len() { - let got_value = - dfg.get_numeric_constant(got_list[i]).expect("should have a numeric constant"); - assert_eq!( - got_value, - FieldElement::from(expected_list[i]), - "Value is different than expected" - ); - } - } -} diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/mod.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/mod.rs index 95784194d284..71725422a7a9 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/mod.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/mod.rs @@ -5,10 +5,10 @@ //! Generally, these passes are also expected to minimize the final amount of instructions. mod array_use; mod assert_constant; +mod bubble_up_constrains; mod constant_folding; mod defunctionalize; mod die; -mod fill_internal_slices; pub(crate) mod flatten_cfg; mod inlining; mod mem2reg; diff --git a/noir/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/noir/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index f0fc482cae0c..c768ea96f8f3 100644 --- a/noir/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/noir/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -491,7 +491,7 @@ pub(crate) fn check_methods_signatures( } // We also need to bind the traits generics to the trait's generics on the impl - for ((_, generic), binding) in the_trait.generics.iter().zip(trait_generics) { + for (generic, binding) in the_trait.generics.iter().zip(trait_generics) { generic.bind(binding); } @@ -599,7 +599,7 @@ pub(crate) fn check_methods_signatures( the_trait.set_methods(trait_methods); the_trait.self_type_typevar.unbind(the_trait.self_type_typevar_id); - for (old_id, generic) in &the_trait.generics { - generic.unbind(*old_id); + for generic in &the_trait.generics { + generic.unbind(generic.id()); } } diff --git a/noir/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/noir/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 2e6eb3992ff4..3cd60c33b8b0 100644 --- a/noir/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/noir/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -20,7 +20,7 @@ use super::{ }, errors::{DefCollectorErrorKind, DuplicateType}, }; -use crate::hir::def_map::{parse_file, LocalModuleId, ModuleData, ModuleId}; +use crate::hir::def_map::{LocalModuleId, ModuleData, ModuleId}; use crate::hir::resolution::import::ImportDirective; use crate::hir::Context; @@ -555,7 +555,7 @@ impl<'a> ModCollector<'a> { context.visited_files.insert(child_file_id, location); // Parse the AST for the module we just found and then recursively look for it's defs - let (ast, parsing_errors) = parse_file(&context.file_manager, child_file_id); + let (ast, parsing_errors) = context.parsed_file_results(child_file_id); let ast = ast.into_sorted(); errors.extend( diff --git a/noir/compiler/noirc_frontend/src/hir/def_map/mod.rs b/noir/compiler/noirc_frontend/src/hir/def_map/mod.rs index d60ceffa9af0..8c985e88e0b5 100644 --- a/noir/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/noir/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -87,7 +87,7 @@ impl CrateDefMap { // First parse the root file. let root_file_id = context.crate_graph[crate_id].root_file_id; - let (ast, parsing_errors) = parse_file(&context.file_manager, root_file_id); + let (ast, parsing_errors) = context.parsed_file_results(root_file_id); let mut ast = ast.into_sorted(); for macro_processor in ¯o_processors { diff --git a/noir/compiler/noirc_frontend/src/hir/mod.rs b/noir/compiler/noirc_frontend/src/hir/mod.rs index c62f357167f0..2124b5281f4f 100644 --- a/noir/compiler/noirc_frontend/src/hir/mod.rs +++ b/noir/compiler/noirc_frontend/src/hir/mod.rs @@ -7,18 +7,22 @@ pub mod type_check; use crate::graph::{CrateGraph, CrateId}; use crate::hir_def::function::FuncMeta; use crate::node_interner::{FuncId, NodeInterner, StructId}; +use crate::parser::ParserError; +use crate::ParsedModule; use def_map::{Contract, CrateDefMap}; use fm::FileManager; use noirc_errors::Location; use std::borrow::Cow; -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashMap}; use self::def_map::TestFunction; +pub type ParsedFiles = HashMap)>; + /// Helper object which groups together several useful context objects used /// during name resolution. Once name resolution is finished, only the /// def_interner is required for type inference and monomorphization. -pub struct Context<'file_manager> { +pub struct Context<'file_manager, 'parsed_files> { pub def_interner: NodeInterner, pub crate_graph: CrateGraph, pub(crate) def_maps: BTreeMap, @@ -30,6 +34,11 @@ pub struct Context<'file_manager> { /// A map of each file that already has been visited from a prior `mod foo;` declaration. /// This is used to issue an error if a second `mod foo;` is declared to the same file. pub visited_files: BTreeMap, + + // A map of all parsed files. + // Same as the file manager, we take ownership of the parsed files in the WASM context. + // Parsed files is also read only. + pub parsed_files: Cow<'parsed_files, ParsedFiles>, } #[derive(Debug, Copy, Clone)] @@ -39,27 +48,36 @@ pub enum FunctionNameMatch<'a> { Contains(&'a str), } -impl Context<'_> { - pub fn new(file_manager: FileManager) -> Context<'static> { +impl Context<'_, '_> { + pub fn new(file_manager: FileManager, parsed_files: ParsedFiles) -> Context<'static, 'static> { Context { def_interner: NodeInterner::default(), def_maps: BTreeMap::new(), visited_files: BTreeMap::new(), crate_graph: CrateGraph::default(), file_manager: Cow::Owned(file_manager), + parsed_files: Cow::Owned(parsed_files), } } - pub fn from_ref_file_manager(file_manager: &FileManager) -> Context<'_> { + pub fn from_ref_file_manager<'file_manager, 'parsed_files>( + file_manager: &'file_manager FileManager, + parsed_files: &'parsed_files ParsedFiles, + ) -> Context<'file_manager, 'parsed_files> { Context { def_interner: NodeInterner::default(), def_maps: BTreeMap::new(), visited_files: BTreeMap::new(), crate_graph: CrateGraph::default(), file_manager: Cow::Borrowed(file_manager), + parsed_files: Cow::Borrowed(parsed_files), } } + pub fn parsed_file_results(&self, file_id: fm::FileId) -> (ParsedModule, Vec) { + self.parsed_files.get(&file_id).expect("noir file wasn't parsed").clone() + } + /// Returns the CrateDefMap for a given CrateId. /// It is perfectly valid for the compiler to look /// up a CrateDefMap and it is not available. diff --git a/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs index bb7acf1037b1..8f656751fe17 100644 --- a/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -39,7 +39,7 @@ use crate::{ use crate::{ ArrayLiteral, ContractFunctionType, Distinctness, ForRange, FunctionDefinition, FunctionReturnType, FunctionVisibility, Generics, LValue, NoirStruct, NoirTypeAlias, Param, - Path, PathKind, Pattern, Shared, StructType, Type, TypeAliasType, TypeBinding, TypeVariable, + Path, PathKind, Pattern, Shared, StructType, Type, TypeAliasType, TypeVariable, TypeVariableKind, UnaryOp, UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, Visibility, ERROR_IDENT, }; @@ -643,7 +643,7 @@ impl<'a> Resolver<'a> { None => { let id = self.interner.next_type_variable_id(); let typevar = TypeVariable::unbound(id); - new_variables.push((id, typevar.clone())); + new_variables.push(typevar.clone()); // 'Named'Generic is a bit of a misnomer here, we want a type variable that // wont be bound over but this one has no name since we do not currently @@ -773,7 +773,7 @@ impl<'a> Resolver<'a> { self.generics.push((name, typevar.clone(), span)); } - (id, typevar) + typevar }) } @@ -783,7 +783,7 @@ impl<'a> Resolver<'a> { pub fn add_existing_generics(&mut self, names: &UnresolvedGenerics, generics: &Generics) { assert_eq!(names.len(), generics.len()); - for (name, (_id, typevar)) in names.iter().zip(generics) { + for (name, typevar) in names.iter().zip(generics) { self.add_existing_generic(&name.0.contents, name.0.span(), typevar.clone()); } } @@ -851,14 +851,7 @@ impl<'a> Resolver<'a> { let attributes = func.attributes().clone(); - let mut generics = - vecmap(self.generics.clone(), |(name, typevar, _)| match &*typevar.borrow() { - TypeBinding::Unbound(id) => (*id, typevar.clone()), - TypeBinding::Bound(binding) => { - unreachable!("Expected {} to be unbound, but it is bound to {}", name, binding) - } - }); - + let mut generics = vecmap(&self.generics, |(_, typevar, _)| typevar.clone()); let mut parameters = vec![]; let mut parameter_types = vec![]; diff --git a/noir/compiler/noirc_frontend/src/hir/resolution/traits.rs b/noir/compiler/noirc_frontend/src/hir/resolution/traits.rs index f08d9c50c847..8f966be312ba 100644 --- a/noir/compiler/noirc_frontend/src/hir/resolution/traits.rs +++ b/noir/compiler/noirc_frontend/src/hir/resolution/traits.rs @@ -18,7 +18,7 @@ use crate::{ }, hir_def::traits::{TraitConstant, TraitFunction, TraitImpl, TraitType}, node_interner::{FuncId, NodeInterner, TraitId}, - Generics, Path, Shared, TraitItem, Type, TypeBinding, TypeVariable, TypeVariableKind, + Generics, Path, Shared, TraitItem, Type, TypeVariable, TypeVariableKind, }; use super::{ @@ -42,8 +42,7 @@ pub(crate) fn resolve_traits( for (trait_id, unresolved_trait) in traits { let generics = vecmap(&unresolved_trait.trait_def.generics, |_| { - let id = context.def_interner.next_type_variable_id(); - (id, TypeVariable::unbound(id)) + TypeVariable::unbound(context.def_interner.next_type_variable_id()) }); // Resolve order @@ -142,17 +141,7 @@ fn resolve_trait_methods( let arguments = vecmap(parameters, |param| resolver.resolve_type(param.1.clone())); let return_type = resolver.resolve_type(return_type.get_type().into_owned()); - let generics = - vecmap(resolver.get_generics(), |(_, type_var, _)| match &*type_var.borrow() { - TypeBinding::Unbound(id) => (*id, type_var.clone()), - TypeBinding::Bound(binding) => { - unreachable!("Trait generic was bound to {binding}") - } - }); - - // Ensure the trait is generic over the Self type as well - // let the_trait = resolver.interner.get_trait(trait_id); - // generics.push((the_trait.self_type_typevar_id, the_trait.self_type_typevar.clone())); + let generics = vecmap(resolver.get_generics(), |(_, type_var, _)| type_var.clone()); let default_impl_list: Vec<_> = unresolved_trait .fns_with_default_impl @@ -465,8 +454,7 @@ pub(crate) fn resolve_trait_impls( methods: vecmap(&impl_methods, |(_, func_id)| *func_id), }); - let impl_generics = - vecmap(impl_generics, |(_, type_variable, _)| (type_variable.id(), type_variable)); + let impl_generics = vecmap(impl_generics, |(_, type_variable, _)| type_variable); if let Err((prev_span, prev_file)) = interner.add_trait_implementation( self_type.clone(), diff --git a/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs b/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs index b583959bfb1a..58cf4e7b289f 100644 --- a/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -595,7 +595,7 @@ impl<'interner> TypeChecker<'interner> { .generics .iter() .zip(generics) - .map(|((id, var), arg)| (*id, (var.clone(), arg))) + .map(|(var, arg)| (var.id(), (var.clone(), arg))) .collect(); (method.typ.clone(), method.arguments().len(), generic_bindings) diff --git a/noir/compiler/noirc_frontend/src/hir_def/traits.rs b/noir/compiler/noirc_frontend/src/hir_def/traits.rs index 85c292ac5f3b..16b9899039f9 100644 --- a/noir/compiler/noirc_frontend/src/hir_def/traits.rs +++ b/noir/compiler/noirc_frontend/src/hir_def/traits.rs @@ -147,7 +147,7 @@ impl TraitFunction { } } - pub fn generics(&self) -> &[(TypeVariableId, TypeVariable)] { + pub fn generics(&self) -> &[TypeVariable] { match &self.typ { Type::Function(..) => &[], Type::Forall(generics, _) => generics, diff --git a/noir/compiler/noirc_frontend/src/hir_def/types.rs b/noir/compiler/noirc_frontend/src/hir_def/types.rs index f59341e5b1ca..d63bec27bdc6 100644 --- a/noir/compiler/noirc_frontend/src/hir_def/types.rs +++ b/noir/compiler/noirc_frontend/src/hir_def/types.rs @@ -207,11 +207,8 @@ pub struct StructType { pub location: Location, } -/// Corresponds to generic lists such as `` in the source -/// program. The `TypeVariableId` portion is used to match two -/// type variables to check for equality, while the `TypeVariable` is -/// the actual part that can be mutated to bind it to another type. -pub type Generics = Vec<(TypeVariableId, TypeVariable)>; +/// Corresponds to generic lists such as `` in the source program. +pub type Generics = Vec; impl std::hash::Hash for StructType { fn hash(&self, state: &mut H) { @@ -260,7 +257,7 @@ impl StructType { .generics .iter() .zip(generic_args) - .map(|((old_id, old_var), new)| (*old_id, (old_var.clone(), new.clone()))) + .map(|(old, new)| (old.id(), (old.clone(), new.clone()))) .collect(); (typ.substitute(&substitutions), i) @@ -276,7 +273,7 @@ impl StructType { .generics .iter() .zip(generic_args) - .map(|((old_id, old_var), new)| (*old_id, (old_var.clone(), new.clone()))) + .map(|(old, new)| (old.id(), (old.clone(), new.clone()))) .collect(); vecmap(&self.fields, |(name, typ)| { @@ -337,7 +334,7 @@ impl std::fmt::Display for TypeAliasType { write!(f, "{}", self.name)?; if !self.generics.is_empty() { - let generics = vecmap(&self.generics, |(_, binding)| binding.borrow().to_string()); + let generics = vecmap(&self.generics, |binding| binding.borrow().to_string()); write!(f, "{}", generics.join(", "))?; } @@ -369,7 +366,7 @@ impl TypeAliasType { .generics .iter() .zip(generic_args) - .map(|((old_id, old_var), new)| (*old_id, (old_var.clone(), new.clone()))) + .map(|(old, new)| (old.id(), (old.clone(), new.clone()))) .collect(); self.typ.substitute(&substitutions) @@ -707,7 +704,7 @@ impl Type { /// Takes a monomorphic type and generalizes it over each of the type variables in the /// given type bindings, ignoring what each type variable is bound to in the TypeBindings. pub(crate) fn generalize_from_substitutions(self, type_bindings: TypeBindings) -> Type { - let polymorphic_type_vars = vecmap(type_bindings, |(id, (type_var, _))| (id, type_var)); + let polymorphic_type_vars = vecmap(type_bindings, |(_, (type_var, _))| type_var); Type::Forall(polymorphic_type_vars, Box::new(self)) } @@ -801,7 +798,7 @@ impl std::fmt::Display for Type { }, Type::Constant(x) => x.fmt(f), Type::Forall(typevars, typ) => { - let typevars = vecmap(typevars, |(var, _)| var.to_string()); + let typevars = vecmap(typevars, |var| var.id().to_string()); write!(f, "forall {}. {}", typevars.join(" "), typ) } Type::Function(args, ret, env) => { @@ -1307,9 +1304,9 @@ impl Type { ) -> (Type, TypeBindings) { match self { Type::Forall(typevars, typ) => { - for (id, var) in typevars { + for var in typevars { bindings - .entry(*id) + .entry(var.id()) .or_insert_with(|| (var.clone(), interner.next_type_variable())); } @@ -1328,9 +1325,9 @@ impl Type { Type::Forall(typevars, typ) => { let replacements = typevars .iter() - .map(|(id, var)| { + .map(|var| { let new = interner.next_type_variable(); - (*id, (var.clone(), new)) + (var.id(), (var.clone(), new)) }) .collect(); @@ -1428,8 +1425,8 @@ impl Type { Type::Forall(typevars, typ) => { // Trying to substitute_helper a variable de, substitute_bound_typevarsfined within a nested Forall // is usually impossible and indicative of an error in the type checker somewhere. - for (var, _) in typevars { - assert!(!type_bindings.contains_key(var)); + for var in typevars { + assert!(!type_bindings.contains_key(&var.id())); } let typ = Box::new(typ.substitute_helper(type_bindings, substitute_bound_typevars)); Type::Forall(typevars.clone(), typ) @@ -1476,7 +1473,7 @@ impl Type { } } Type::Forall(typevars, typ) => { - !typevars.iter().any(|(id, _)| *id == target_id) && typ.occurs(target_id) + !typevars.iter().any(|var| var.id() == target_id) && typ.occurs(target_id) } Type::Function(args, ret, env) => { args.iter().any(|arg| arg.occurs(target_id)) @@ -1549,7 +1546,7 @@ impl Type { } pub fn from_generics(generics: &Generics) -> Vec { - vecmap(generics, |(_, var)| Type::TypeVariable(var.clone(), TypeVariableKind::Normal)) + vecmap(generics, |var| Type::TypeVariable(var.clone(), TypeVariableKind::Normal)) } } diff --git a/noir/compiler/noirc_frontend/src/monomorphization/mod.rs b/noir/compiler/noirc_frontend/src/monomorphization/mod.rs index ac11e00ad201..75a5f1c63612 100644 --- a/noir/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/noir/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -27,7 +27,7 @@ use crate::{ node_interner::{self, DefinitionKind, NodeInterner, StmtId, TraitImplKind, TraitMethodId}, token::FunctionAttribute, ContractFunctionType, FunctionKind, Type, TypeBinding, TypeBindings, TypeVariable, - TypeVariableId, TypeVariableKind, UnaryOp, Visibility, + TypeVariableKind, UnaryOp, Visibility, }; use self::ast::{Definition, FuncId, Function, LocalId, Program}; @@ -1533,8 +1533,8 @@ impl<'interner> Monomorphizer<'interner> { let (generics, impl_method_type) = self.interner.function_meta(&impl_method).typ.unwrap_forall(); - let replace_type_variable = |(id, var): &(TypeVariableId, TypeVariable)| { - (*id, (var.clone(), Type::TypeVariable(var.clone(), TypeVariableKind::Normal))) + let replace_type_variable = |var: &TypeVariable| { + (var.id(), (var.clone(), Type::TypeVariable(var.clone(), TypeVariableKind::Normal))) }; // Replace each NamedGeneric with a TypeVariable containing the same internal type variable diff --git a/noir/compiler/noirc_frontend/src/node_interner.rs b/noir/compiler/noirc_frontend/src/node_interner.rs index 173bac958771..193f3fe04363 100644 --- a/noir/compiler/noirc_frontend/src/node_interner.rs +++ b/noir/compiler/noirc_frontend/src/node_interner.rs @@ -499,8 +499,7 @@ impl NodeInterner { // This lets us record how many arguments the type expects so that other types // can refer to it with generic arguments before the generic parameters themselves // are resolved. - let id = TypeVariableId(0); - (id, TypeVariable::unbound(id)) + TypeVariable::unbound(TypeVariableId(0)) }), self_type_typevar_id, self_type_typevar: TypeVariable::unbound(self_type_typevar_id), @@ -530,8 +529,7 @@ impl NodeInterner { // This lets us record how many arguments the type expects so that other types // can refer to it with generic arguments before the generic parameters themselves // are resolved. - let id = TypeVariableId(0); - (id, TypeVariable::unbound(id)) + TypeVariable::unbound(TypeVariableId(0)) }); let location = Location::new(typ.struct_def.span, file_id); @@ -549,10 +547,7 @@ impl NodeInterner { typ.type_alias_def.name.clone(), typ.type_alias_def.span, Type::Error, - vecmap(&typ.type_alias_def.generics, |_| { - let id = TypeVariableId(0); - (id, TypeVariable::unbound(id)) - }), + vecmap(&typ.type_alias_def.generics, |_| TypeVariable::unbound(TypeVariableId(0))), )); type_id @@ -1195,19 +1190,18 @@ impl NodeInterner { self.trait_implementations.push(trait_impl.clone()); - // Ignoring overlapping TraitImplKind::Assumed impls here is perfectly fine. - // It should never happen since impls are defined at global scope, but even - // if they were, we should never prevent defining a new impl because a where - // clause already assumes it exists. - // Replace each generic with a fresh type variable let substitutions = impl_generics .into_iter() - .map(|(id, typevar)| (id, (typevar, self.next_type_variable()))) + .map(|typevar| (typevar.id(), (typevar, self.next_type_variable()))) .collect(); let instantiated_object_type = object_type.substitute(&substitutions); + // Ignoring overlapping `TraitImplKind::Assumed` impls here is perfectly fine. + // It should never happen since impls are defined at global scope, but even + // if they were, we should never prevent defining a new impl because a 'where' + // clause already assumes it exists. if let Ok((TraitImplKind::Normal(existing), _)) = self.try_lookup_trait_implementation( &instantiated_object_type, trait_id, diff --git a/noir/compiler/noirc_frontend/src/resolve_locations.rs b/noir/compiler/noirc_frontend/src/resolve_locations.rs index 95ced906984e..c6834a4361e0 100644 --- a/noir/compiler/noirc_frontend/src/resolve_locations.rs +++ b/noir/compiler/noirc_frontend/src/resolve_locations.rs @@ -33,9 +33,13 @@ impl NodeInterner { /// Returns the [Location] of the definition of the given Ident found at [Span] of the given [FileId]. /// Returns [None] when definition is not found. - pub fn get_definition_location_from(&self, location: Location) -> Option { + pub fn get_definition_location_from( + &self, + location: Location, + return_type_location_instead: bool, + ) -> Option { self.find_location_index(location) - .and_then(|index| self.resolve_location(index)) + .and_then(|index| self.resolve_location(index, return_type_location_instead)) .or_else(|| self.try_resolve_trait_impl_location(location)) .or_else(|| self.try_resolve_trait_method_declaration(location)) } @@ -43,7 +47,7 @@ impl NodeInterner { pub fn get_declaration_location_from(&self, location: Location) -> Option { self.try_resolve_trait_method_declaration(location).or_else(|| { self.find_location_index(location) - .and_then(|index| self.resolve_location(index)) + .and_then(|index| self.resolve_location(index, false)) .and_then(|found_impl_location| { self.try_resolve_trait_method_declaration(found_impl_location) }) @@ -53,12 +57,31 @@ impl NodeInterner { /// For a given [Index] we return [Location] to which we resolved to /// We currently return None for features not yet implemented /// TODO(#3659): LSP goto def should error when Ident at Location could not resolve - fn resolve_location(&self, index: impl Into) -> Option { + fn resolve_location( + &self, + index: impl Into, + return_type_location_instead: bool, + ) -> Option { + if return_type_location_instead { + return self.get_type_location_from_index(index); + } + let node = self.nodes.get(index.into())?; match node { - Node::Function(func) => self.resolve_location(func.as_expr()), - Node::Expression(expression) => self.resolve_expression_location(expression), + Node::Function(func) => { + self.resolve_location(func.as_expr(), return_type_location_instead) + } + Node::Expression(expression) => { + self.resolve_expression_location(expression, return_type_location_instead) + } + _ => None, + } + } + + fn get_type_location_from_index(&self, index: impl Into) -> Option { + match self.id_type(index.into()) { + Type::Struct(struct_type, _) => Some(struct_type.borrow().location), _ => None, } } @@ -66,7 +89,11 @@ impl NodeInterner { /// Resolves the [Location] of the definition for a given [HirExpression] /// /// Note: current the code returns None because some expressions are not yet implemented. - fn resolve_expression_location(&self, expression: &HirExpression) -> Option { + fn resolve_expression_location( + &self, + expression: &HirExpression, + return_type_location_instead: bool, + ) -> Option { match expression { HirExpression::Ident(ident) => { let definition_info = self.definition(ident.id); @@ -88,7 +115,7 @@ impl NodeInterner { } HirExpression::Call(expr_call) => { let func = expr_call.func; - self.resolve_location(func) + self.resolve_location(func, return_type_location_instead) } _ => None, diff --git a/noir/compiler/noirc_frontend/src/tests.rs b/noir/compiler/noirc_frontend/src/tests.rs index a56c3a7755f6..9ccbddab9eca 100644 --- a/noir/compiler/noirc_frontend/src/tests.rs +++ b/noir/compiler/noirc_frontend/src/tests.rs @@ -52,7 +52,7 @@ mod test { ) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { let root = std::path::Path::new("/"); let fm = FileManager::new(root); - let mut context = Context::new(fm); + let mut context = Context::new(fm, Default::default()); context.def_interner.populate_dummy_operator_traits(); let root_file_id = FileId::dummy(); let root_crate_id = context.crate_graph.add_crate_root(root_file_id); diff --git a/noir/compiler/wasm/src/compile.rs b/noir/compiler/wasm/src/compile.rs index 351f9ae8a86c..498ffe447cef 100644 --- a/noir/compiler/wasm/src/compile.rs +++ b/noir/compiler/wasm/src/compile.rs @@ -13,7 +13,7 @@ use noirc_driver::{ use noirc_evaluator::errors::SsaReport; use noirc_frontend::{ graph::{CrateId, CrateName}, - hir::Context, + hir::{def_map::parse_file, Context, ParsedFiles}, }; use serde::Deserialize; use std::{collections::HashMap, path::Path}; @@ -140,6 +140,10 @@ impl PathToFileSourceMap { } } +pub(crate) fn parse_all(fm: &FileManager) -> ParsedFiles { + fm.as_file_map().all_file_ids().map(|&file_id| (file_id, parse_file(fm, file_id))).collect() +} + pub enum CompileResult { Contract { contract: ContractArtifact, warnings: Vec }, Program { program: ProgramArtifact, warnings: Vec }, @@ -162,8 +166,8 @@ pub fn compile( }; let fm = file_manager_with_source_map(file_source_map); - - let mut context = Context::new(fm); + let parsed_files = parse_all(&fm); + let mut context = Context::new(fm, parsed_files); let path = Path::new(&entry_point); let crate_id = prepare_crate(&mut context, path); @@ -291,15 +295,18 @@ mod test { use crate::compile::PathToFileSourceMap; - use super::{file_manager_with_source_map, process_dependency_graph, DependencyGraph}; + use super::{ + file_manager_with_source_map, parse_all, process_dependency_graph, DependencyGraph, + }; use std::{collections::HashMap, path::Path}; - fn setup_test_context(source_map: PathToFileSourceMap) -> Context<'static> { + fn setup_test_context(source_map: PathToFileSourceMap) -> Context<'static, 'static> { let mut fm = file_manager_with_source_map(source_map); // Add this due to us calling prepare_crate on "/main.nr" below fm.add_file_with_source(Path::new("/main.nr"), "fn foo() {}".to_string()); + let parsed_files = parse_all(&fm); - let mut context = Context::new(fm); + let mut context = Context::new(fm, parsed_files); prepare_crate(&mut context, Path::new("/main.nr")); context diff --git a/noir/compiler/wasm/src/compile_new.rs b/noir/compiler/wasm/src/compile_new.rs index 3cb20bd0b5c9..6476f6d29bc9 100644 --- a/noir/compiler/wasm/src/compile_new.rs +++ b/noir/compiler/wasm/src/compile_new.rs @@ -1,5 +1,5 @@ use crate::compile::{ - file_manager_with_source_map, generate_contract_artifact, generate_program_artifact, + file_manager_with_source_map, generate_contract_artifact, generate_program_artifact, parse_all, JsCompileResult, PathToFileSourceMap, }; use crate::errors::{CompileError, JsCompileError}; @@ -20,7 +20,7 @@ use wasm_bindgen::prelude::wasm_bindgen; pub struct CompilerContext { // `wasm_bindgen` currently doesn't allow lifetime parameters on structs so we must use a `'static` lifetime. // `Context` must then own the `FileManager` to satisfy this lifetime. - context: Context<'static>, + context: Context<'static, 'static>, } #[wasm_bindgen(js_name = "CrateId")] @@ -34,7 +34,9 @@ impl CompilerContext { console_error_panic_hook::set_once(); let fm = file_manager_with_source_map(source_map); - CompilerContext { context: Context::new(fm) } + let parsed_files = parse_all(&fm); + + CompilerContext { context: Context::new(fm, parsed_files) } } #[cfg(test)] @@ -231,7 +233,7 @@ mod test { use noirc_driver::prepare_crate; use noirc_frontend::hir::Context; - use crate::compile::{file_manager_with_source_map, PathToFileSourceMap}; + use crate::compile::{file_manager_with_source_map, parse_all, PathToFileSourceMap}; use std::path::Path; @@ -241,8 +243,9 @@ mod test { let mut fm = file_manager_with_source_map(source_map); // Add this due to us calling prepare_crate on "/main.nr" below fm.add_file_with_source(Path::new("/main.nr"), "fn foo() {}".to_string()); + let parsed_files = parse_all(&fm); - let mut context = Context::new(fm); + let mut context = Context::new(fm, parsed_files); prepare_crate(&mut context, Path::new("/main.nr")); CompilerContext { context } diff --git a/noir/test_programs/execution_success/bit_and/Prover.toml b/noir/test_programs/execution_success/bit_and/Prover.toml index 40ce2b0bc273..34a5b63e5b16 100644 --- a/noir/test_programs/execution_success/bit_and/Prover.toml +++ b/noir/test_programs/execution_success/bit_and/Prover.toml @@ -1,2 +1,4 @@ x = "0x00" y = "0x10" +a = "0x00" +b = "0x10" diff --git a/noir/test_programs/execution_success/bit_and/src/main.nr b/noir/test_programs/execution_success/bit_and/src/main.nr index 0bc1d9a49bde..5a0aa17e3ede 100644 --- a/noir/test_programs/execution_success/bit_and/src/main.nr +++ b/noir/test_programs/execution_success/bit_and/src/main.nr @@ -1,6 +1,6 @@ // You can only do bit operations with integers. // (Kobi/Daira/Circom/#37) https://github.com/iden3/circom/issues/37 -fn main(x: Field, y: Field) { +fn main(x: Field, y: Field, a: Field, b: Field) { let x_as_u8 = x as u8; let y_as_u8 = y as u8; @@ -9,8 +9,8 @@ fn main(x: Field, y: Field) { let flag = (x == 0) & (y == 16); assert(flag); //bitwise and with odd bits: - let x_as_u11 = x as u11; - let y_as_u11 = y as u11; - assert((x_as_u11 & y_as_u11) == x_as_u11); + let a_as_u8 = a as u8; + let b_as_u8 = b as u8; + assert((a_as_u8 & b_as_u8) == a_as_u8); } diff --git a/noir/test_programs/execution_success/nested_slice_dynamic/Nargo.toml b/noir/test_programs/execution_success/nested_array_in_slice/Nargo.toml similarity index 62% rename from noir/test_programs/execution_success/nested_slice_dynamic/Nargo.toml rename to noir/test_programs/execution_success/nested_array_in_slice/Nargo.toml index c8925ed97b4d..4f0748f79be3 100644 --- a/noir/test_programs/execution_success/nested_slice_dynamic/Nargo.toml +++ b/noir/test_programs/execution_success/nested_array_in_slice/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "nested_slice_dynamic" +name = "nested_array_in_slice" type = "bin" authors = [""] [dependencies] \ No newline at end of file diff --git a/noir/test_programs/execution_success/nested_slice_dynamic/Prover.toml b/noir/test_programs/execution_success/nested_array_in_slice/Prover.toml similarity index 100% rename from noir/test_programs/execution_success/nested_slice_dynamic/Prover.toml rename to noir/test_programs/execution_success/nested_array_in_slice/Prover.toml diff --git a/noir/test_programs/execution_success/nested_slice_dynamic/src/main.nr b/noir/test_programs/execution_success/nested_array_in_slice/src/main.nr similarity index 100% rename from noir/test_programs/execution_success/nested_slice_dynamic/src/main.nr rename to noir/test_programs/execution_success/nested_array_in_slice/src/main.nr diff --git a/noir/tooling/lsp/src/lib.rs b/noir/tooling/lsp/src/lib.rs index 1099ad602699..6f47dfc3791a 100644 --- a/noir/tooling/lsp/src/lib.rs +++ b/noir/tooling/lsp/src/lib.rs @@ -19,7 +19,7 @@ use async_lsp::{ }; use fm::codespan_files as files; use lsp_types::CodeLens; -use nargo::workspace::Workspace; +use nargo::{parse_all, workspace::Workspace}; use nargo_toml::{find_file_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{file_manager_with_stdlib, prepare_crate, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::{ @@ -34,7 +34,8 @@ use notifications::{ }; use requests::{ on_code_lens_request, on_formatting, on_goto_declaration_request, on_goto_definition_request, - on_initialize, on_profile_run_request, on_shutdown, on_test_run_request, on_tests_request, + on_goto_type_definition_request, on_initialize, on_profile_run_request, on_shutdown, + on_test_run_request, on_tests_request, }; use serde_json::Value as JsonValue; use thiserror::Error; @@ -98,6 +99,7 @@ impl NargoLspService { .request::(on_profile_run_request) .request::(on_goto_definition_request) .request::(on_goto_declaration_request) + .request::(on_goto_type_definition_request) .notification::(on_initialized) .notification::(on_did_change_configuration) .notification::(on_did_open_text_document) @@ -225,15 +227,16 @@ pub(crate) fn resolve_workspace_for_source_path(file_path: &Path) -> Result (Context<'static>, CrateId) { +fn prepare_source(source: String) -> (Context<'static, 'static>, CrateId) { let root = Path::new(""); let file_name = Path::new("main.nr"); let mut file_manager = file_manager_with_stdlib(root); file_manager.add_file_with_source(file_name, source).expect( "Adding source buffer to file manager should never fail when file manager is empty", ); + let parsed_files = parse_all(&file_manager); - let mut context = Context::new(file_manager); + let mut context = Context::new(file_manager, parsed_files); let root_crate_id = prepare_crate(&mut context, file_name); (context, root_crate_id) diff --git a/noir/tooling/lsp/src/notifications/mod.rs b/noir/tooling/lsp/src/notifications/mod.rs index 0cd86803efad..769cfbdfed76 100644 --- a/noir/tooling/lsp/src/notifications/mod.rs +++ b/noir/tooling/lsp/src/notifications/mod.rs @@ -1,7 +1,7 @@ use std::ops::ControlFlow; use async_lsp::{ErrorCode, LanguageClient, ResponseError}; -use nargo::{insert_all_files_for_workspace_into_file_manager, prepare_package}; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all, prepare_package}; use noirc_driver::{check_crate, file_manager_with_stdlib}; use noirc_errors::{DiagnosticKind, FileDiagnostic}; @@ -130,11 +130,13 @@ fn process_noir_document( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let diagnostics: Vec<_> = workspace .into_iter() .flat_map(|package| -> Vec { - let (mut context, crate_id) = prepare_package(&workspace_file_manager, package); + let (mut context, crate_id) = + prepare_package(&workspace_file_manager, &parsed_files, package); let file_diagnostics = match check_crate(&mut context, crate_id, false, false) { Ok(((), warnings)) => warnings, diff --git a/noir/tooling/lsp/src/requests/goto_declaration.rs b/noir/tooling/lsp/src/requests/goto_declaration.rs index 6e3664804f6a..59636192d870 100644 --- a/noir/tooling/lsp/src/requests/goto_declaration.rs +++ b/noir/tooling/lsp/src/requests/goto_declaration.rs @@ -7,7 +7,7 @@ use async_lsp::{ErrorCode, ResponseError}; use lsp_types::request::{GotoDeclarationParams, GotoDeclarationResponse}; -use nargo::insert_all_files_for_workspace_into_file_manager; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use noirc_driver::file_manager_with_stdlib; use super::{position_to_byte_index, to_lsp_location}; @@ -36,8 +36,10 @@ fn on_goto_definition_inner( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); - let (mut context, crate_id) = nargo::prepare_package(&workspace_file_manager, package); + let (mut context, crate_id) = + nargo::prepare_package(&workspace_file_manager, &parsed_files, package); let interner; if let Some(def_interner) = _state.cached_definitions.get(&package_root_path) { diff --git a/noir/tooling/lsp/src/requests/goto_definition.rs b/noir/tooling/lsp/src/requests/goto_definition.rs index 277bbf013f97..73a04c02a4d9 100644 --- a/noir/tooling/lsp/src/requests/goto_definition.rs +++ b/noir/tooling/lsp/src/requests/goto_definition.rs @@ -4,8 +4,9 @@ use crate::resolve_workspace_for_source_path; use crate::{types::GotoDefinitionResult, LspState}; use async_lsp::{ErrorCode, ResponseError}; +use lsp_types::request::GotoTypeDefinitionParams; use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; -use nargo::insert_all_files_for_workspace_into_file_manager; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use noirc_driver::file_manager_with_stdlib; use super::{position_to_byte_index, to_lsp_location}; @@ -14,13 +15,22 @@ pub(crate) fn on_goto_definition_request( state: &mut LspState, params: GotoDefinitionParams, ) -> impl Future> { - let result = on_goto_definition_inner(state, params); + let result = on_goto_definition_inner(state, params, false); + future::ready(result) +} + +pub(crate) fn on_goto_type_definition_request( + state: &mut LspState, + params: GotoTypeDefinitionParams, +) -> impl Future> { + let result = on_goto_definition_inner(state, params, true); future::ready(result) } fn on_goto_definition_inner( _state: &mut LspState, params: GotoDefinitionParams, + return_type_location_instead: bool, ) -> Result { let file_path = params.text_document_position_params.text_document.uri.to_file_path().map_err(|_| { @@ -34,8 +44,10 @@ fn on_goto_definition_inner( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); - let (mut context, crate_id) = nargo::prepare_package(&workspace_file_manager, package); + let (mut context, crate_id) = + nargo::prepare_package(&workspace_file_manager, &parsed_files, package); let interner; if let Some(def_interner) = _state.cached_definitions.get(&package_root_path) { @@ -65,8 +77,9 @@ fn on_goto_definition_inner( span: noirc_errors::Span::single_char(byte_index as u32), }; - let goto_definition_response = - interner.get_definition_location_from(search_for_location).and_then(|found_location| { + let goto_definition_response = interner + .get_definition_location_from(search_for_location, return_type_location_instead) + .and_then(|found_location| { let file_id = found_location.file; let definition_position = to_lsp_location(files, file_id, found_location.span)?; let response: GotoDefinitionResponse = diff --git a/noir/tooling/lsp/src/requests/mod.rs b/noir/tooling/lsp/src/requests/mod.rs index 9a4738e1985c..a03ee829e44d 100644 --- a/noir/tooling/lsp/src/requests/mod.rs +++ b/noir/tooling/lsp/src/requests/mod.rs @@ -5,7 +5,7 @@ use async_lsp::ResponseError; use fm::codespan_files::Error; use lsp_types::{ DeclarationCapability, Location, Position, TextDocumentSyncCapability, TextDocumentSyncKind, - Url, + TypeDefinitionProviderCapability, Url, }; use nargo_fmt::Config; use serde::{Deserialize, Serialize}; @@ -35,7 +35,8 @@ mod tests; pub(crate) use { code_lens_request::collect_lenses_for_package, code_lens_request::on_code_lens_request, goto_declaration::on_goto_declaration_request, goto_definition::on_goto_definition_request, - profile_run::on_profile_run_request, test_run::on_test_run_request, tests::on_tests_request, + goto_definition::on_goto_type_definition_request, profile_run::on_profile_run_request, + test_run::on_test_run_request, tests::on_tests_request, }; /// LSP client will send initialization request after the server has started. @@ -94,6 +95,7 @@ pub(crate) fn on_initialize( nargo: Some(nargo), definition_provider: Some(lsp_types::OneOf::Left(true)), declaration_provider: Some(DeclarationCapability::Simple(true)), + type_definition_provider: Some(TypeDefinitionProviderCapability::Simple(true)), }, server_info: None, }) diff --git a/noir/tooling/lsp/src/requests/profile_run.rs b/noir/tooling/lsp/src/requests/profile_run.rs index 6664475a68c4..65e6a936e201 100644 --- a/noir/tooling/lsp/src/requests/profile_run.rs +++ b/noir/tooling/lsp/src/requests/profile_run.rs @@ -5,7 +5,9 @@ use std::{ use acvm::ExpressionWidth; use async_lsp::{ErrorCode, ResponseError}; -use nargo::{artifacts::debug::DebugArtifact, insert_all_files_for_workspace_into_file_manager}; +use nargo::{ + artifacts::debug::DebugArtifact, insert_all_files_for_workspace_into_file_manager, parse_all, +}; use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{ file_manager_with_stdlib, CompileOptions, DebugFile, NOIR_ARTIFACT_VERSION_STRING, @@ -52,6 +54,7 @@ fn on_profile_run_request_inner( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); // Since we filtered on crate name, this should be the only item in the iterator match workspace.into_iter().next() { @@ -66,6 +69,7 @@ fn on_profile_run_request_inner( let (compiled_programs, compiled_contracts) = nargo::ops::compile_workspace( &workspace_file_manager, + &parsed_files, &workspace, &binary_packages, &contract_packages, diff --git a/noir/tooling/lsp/src/requests/test_run.rs b/noir/tooling/lsp/src/requests/test_run.rs index c2181d7839d0..8da4d74a654a 100644 --- a/noir/tooling/lsp/src/requests/test_run.rs +++ b/noir/tooling/lsp/src/requests/test_run.rs @@ -4,7 +4,7 @@ use async_lsp::{ErrorCode, ResponseError}; use nargo::{ insert_all_files_for_workspace_into_file_manager, ops::{run_test, TestStatus}, - prepare_package, + parse_all, prepare_package, }; use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{ @@ -52,11 +52,13 @@ fn on_test_run_request_inner( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); // Since we filtered on crate name, this should be the only item in the iterator match workspace.into_iter().next() { Some(package) => { - let (mut context, crate_id) = prepare_package(&workspace_file_manager, package); + let (mut context, crate_id) = + prepare_package(&workspace_file_manager, &parsed_files, package); if check_crate(&mut context, crate_id, false, false).is_err() { let result = NargoTestRunResult { id: params.id.clone(), diff --git a/noir/tooling/lsp/src/requests/tests.rs b/noir/tooling/lsp/src/requests/tests.rs index 0f717b9fb9ee..20098685a901 100644 --- a/noir/tooling/lsp/src/requests/tests.rs +++ b/noir/tooling/lsp/src/requests/tests.rs @@ -2,7 +2,7 @@ use std::future::{self, Future}; use async_lsp::{ErrorCode, LanguageClient, ResponseError}; use lsp_types::{LogMessageParams, MessageType}; -use nargo::{insert_all_files_for_workspace_into_file_manager, prepare_package}; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all, prepare_package}; use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{check_crate, file_manager_with_stdlib, NOIR_ARTIFACT_VERSION_STRING}; @@ -52,11 +52,13 @@ fn on_tests_request_inner( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let package_tests: Vec<_> = workspace .into_iter() .filter_map(|package| { - let (mut context, crate_id) = prepare_package(&workspace_file_manager, package); + let (mut context, crate_id) = + prepare_package(&workspace_file_manager, &parsed_files, package); // We ignore the warnings and errors produced by compilation for producing tests // because we can still get the test functions even if compilation fails let _ = check_crate(&mut context, crate_id, false, false); diff --git a/noir/tooling/lsp/src/types.rs b/noir/tooling/lsp/src/types.rs index 8dbc51ec83c5..e3492f21346d 100644 --- a/noir/tooling/lsp/src/types.rs +++ b/noir/tooling/lsp/src/types.rs @@ -1,5 +1,7 @@ use fm::FileId; -use lsp_types::{DeclarationCapability, DefinitionOptions, OneOf}; +use lsp_types::{ + DeclarationCapability, DefinitionOptions, OneOf, TypeDefinitionProviderCapability, +}; use noirc_driver::DebugFile; use noirc_errors::{debug_info::OpCodesCount, Location}; use noirc_frontend::graph::CrateName; @@ -25,7 +27,8 @@ pub(crate) mod request { // Re-providing lsp_types that we don't need to override pub(crate) use lsp_types::request::{ - CodeLensRequest as CodeLens, Formatting, GotoDeclaration, GotoDefinition, Shutdown, + CodeLensRequest as CodeLens, Formatting, GotoDeclaration, GotoDefinition, + GotoTypeDefinition, Shutdown, }; #[derive(Debug)] @@ -118,6 +121,10 @@ pub(crate) struct ServerCapabilities { #[serde(skip_serializing_if = "Option::is_none")] pub(crate) definition_provider: Option>, + /// The server provides goto type definition support. + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) type_definition_provider: Option, + /// The server provides code lens. #[serde(skip_serializing_if = "Option::is_none")] pub(crate) code_lens_provider: Option, diff --git a/noir/tooling/nargo/src/lib.rs b/noir/tooling/nargo/src/lib.rs index 62ff4325a230..0fdff8b202f8 100644 --- a/noir/tooling/nargo/src/lib.rs +++ b/noir/tooling/nargo/src/lib.rs @@ -20,9 +20,10 @@ use fm::FileManager; use noirc_driver::{add_dep, prepare_crate, prepare_dependency}; use noirc_frontend::{ graph::{CrateId, CrateName}, - hir::Context, + hir::{def_map::parse_file, Context, ParsedFiles}, }; use package::{Dependency, Package}; +use rayon::prelude::*; pub use self::errors::NargoError; @@ -95,11 +96,27 @@ fn insert_all_files_for_packages_dependencies_into_file_manager( } } -pub fn prepare_package<'file_manager>( +pub fn parse_all(file_manager: &FileManager) -> ParsedFiles { + file_manager + .as_file_map() + .all_file_ids() + .par_bridge() + .filter(|&&file_id| { + let file_path = file_manager.path(file_id).expect("expected file to exist"); + let file_extension = + file_path.extension().expect("expected all file paths to have an extension"); + file_extension == "nr" + }) + .map(|&file_id| (file_id, parse_file(file_manager, file_id))) + .collect() +} + +pub fn prepare_package<'file_manager, 'parsed_files>( file_manager: &'file_manager FileManager, + parsed_files: &'parsed_files ParsedFiles, package: &Package, -) -> (Context<'file_manager>, CrateId) { - let mut context = Context::from_ref_file_manager(file_manager); +) -> (Context<'file_manager, 'parsed_files>, CrateId) { + let mut context = Context::from_ref_file_manager(file_manager, parsed_files); let crate_id = prepare_crate(&mut context, &package.entry_path); diff --git a/noir/tooling/nargo/src/ops/compile.rs b/noir/tooling/nargo/src/ops/compile.rs index 043e2a367a5b..c048dbd288a7 100644 --- a/noir/tooling/nargo/src/ops/compile.rs +++ b/noir/tooling/nargo/src/ops/compile.rs @@ -1,6 +1,7 @@ use acvm::ExpressionWidth; use fm::FileManager; use noirc_driver::{CompilationResult, CompileOptions, CompiledContract, CompiledProgram}; +use noirc_frontend::hir::ParsedFiles; use crate::errors::CompileError; use crate::prepare_package; @@ -15,6 +16,7 @@ use rayon::prelude::*; /// This function will return an error if there are any compilations errors reported. pub fn compile_workspace( file_manager: &FileManager, + parsed_files: &ParsedFiles, workspace: &Workspace, binary_packages: &[Package], contract_packages: &[Package], @@ -25,12 +27,21 @@ pub fn compile_workspace( let program_results: Vec> = binary_packages .par_iter() .map(|package| { - compile_program(file_manager, workspace, package, compile_options, expression_width) + compile_program( + file_manager, + parsed_files, + workspace, + package, + compile_options, + expression_width, + ) }) .collect(); let contract_results: Vec> = contract_packages .par_iter() - .map(|package| compile_contract(file_manager, package, compile_options, expression_width)) + .map(|package| { + compile_contract(file_manager, parsed_files, package, compile_options, expression_width) + }) .collect(); // Report any warnings/errors which were encountered during compilation. @@ -62,12 +73,13 @@ pub fn compile_workspace( pub fn compile_program( file_manager: &FileManager, + parsed_files: &ParsedFiles, workspace: &Workspace, package: &Package, compile_options: &CompileOptions, expression_width: ExpressionWidth, ) -> CompilationResult { - let (mut context, crate_id) = prepare_package(file_manager, package); + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); let program_artifact_path = workspace.package_build_path(package); let mut debug_artifact_path = program_artifact_path.clone(); @@ -84,11 +96,12 @@ pub fn compile_program( fn compile_contract( file_manager: &FileManager, + parsed_files: &ParsedFiles, package: &Package, compile_options: &CompileOptions, expression_width: ExpressionWidth, ) -> CompilationResult { - let (mut context, crate_id) = prepare_package(file_manager, package); + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); let (contract, warnings) = match noirc_driver::compile_contract(&mut context, crate_id, compile_options) { Ok(contracts_and_warnings) => contracts_and_warnings, diff --git a/noir/tooling/nargo_cli/build.rs b/noir/tooling/nargo_cli/build.rs index 9a0492c99adb..3dfeb4ec01f8 100644 --- a/noir/tooling/nargo_cli/build.rs +++ b/noir/tooling/nargo_cli/build.rs @@ -75,7 +75,7 @@ fn execution_success_{test_name}() {{ let mut cmd = Command::cargo_bin("nargo").unwrap(); cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("execute"); + cmd.arg("execute").arg("--force"); cmd.assert().success(); }} @@ -194,6 +194,7 @@ fn compile_success_empty_{test_name}() {{ cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("info"); cmd.arg("--json"); + cmd.arg("--force"); let output = cmd.output().expect("Failed to execute command"); @@ -242,7 +243,7 @@ fn compile_success_contract_{test_name}() {{ let mut cmd = Command::cargo_bin("nargo").unwrap(); cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("compile"); + cmd.arg("compile").arg("--force"); cmd.assert().success(); }} @@ -280,7 +281,7 @@ fn compile_failure_{test_name}() {{ let mut cmd = Command::cargo_bin("nargo").unwrap(); cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("execute"); + cmd.arg("execute").arg("--force"); cmd.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not()); }} diff --git a/noir/tooling/nargo_cli/src/cli/check_cmd.rs b/noir/tooling/nargo_cli/src/cli/check_cmd.rs index e2db492fe9c1..a8b9dbdeeb26 100644 --- a/noir/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/check_cmd.rs @@ -6,7 +6,7 @@ use fm::FileManager; use iter_extended::btree_map; use nargo::{ errors::CompileError, insert_all_files_for_workspace_into_file_manager, package::Package, - prepare_package, + parse_all, prepare_package, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::{AbiParameter, AbiType, MAIN_RETURN_NAME}; @@ -16,7 +16,7 @@ use noirc_driver::{ }; use noirc_frontend::{ graph::{CrateId, CrateName}, - hir::Context, + hir::{Context, ParsedFiles}, }; use super::fs::write_to_file; @@ -54,9 +54,10 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); for package in &workspace { - check_package(&workspace_file_manager, package, &args.compile_options)?; + check_package(&workspace_file_manager, &parsed_files, package, &args.compile_options)?; println!("[{}] Constraint system successfully built!", package.name); } Ok(()) @@ -64,10 +65,11 @@ pub(crate) fn run( fn check_package( file_manager: &FileManager, + parsed_files: &ParsedFiles, package: &Package, compile_options: &CompileOptions, ) -> Result<(), CompileError> { - let (mut context, crate_id) = prepare_package(file_manager, package); + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); check_crate_and_report_errors( &mut context, crate_id, diff --git a/noir/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs b/noir/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs index 1eb8153ce9bb..02cbdb8b914a 100644 --- a/noir/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -9,12 +9,13 @@ use crate::errors::CliError; use acvm::ExpressionWidth; use clap::Args; use fm::FileManager; -use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo::workspace::Workspace; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; +use noirc_frontend::hir::ParsedFiles; /// Generates a Solidity verifier smart contract for the program #[derive(Debug, Clone, Args)] @@ -48,11 +49,13 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let expression_width = backend.get_backend_info()?; for package in &workspace { let smart_contract_string = smart_contract_for_package( &workspace_file_manager, + &parsed_files, &workspace, backend, package, @@ -73,14 +76,21 @@ pub(crate) fn run( fn smart_contract_for_package( file_manager: &FileManager, + parsed_files: &ParsedFiles, workspace: &Workspace, backend: &Backend, package: &Package, compile_options: &CompileOptions, expression_width: ExpressionWidth, ) -> Result { - let program = - compile_bin_package(file_manager, workspace, package, compile_options, expression_width)?; + let program = compile_bin_package( + file_manager, + parsed_files, + workspace, + package, + compile_options, + expression_width, + )?; Ok(backend.eth_contract(&program.circuit)?) } diff --git a/noir/tooling/nargo_cli/src/cli/compile_cmd.rs b/noir/tooling/nargo_cli/src/cli/compile_cmd.rs index 9e739f5c818c..febd73c967d8 100644 --- a/noir/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -5,10 +5,10 @@ use acvm::ExpressionWidth; use fm::FileManager; use nargo::artifacts::program::ProgramArtifact; use nargo::errors::CompileError; -use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo::prepare_package; use nargo::workspace::Workspace; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::file_manager_with_stdlib; use noirc_driver::NOIR_ARTIFACT_VERSION_STRING; @@ -17,6 +17,7 @@ use noirc_driver::{CompilationResult, CompileOptions, CompiledContract, Compiled use noirc_frontend::graph::CrateName; use clap::Args; +use noirc_frontend::hir::ParsedFiles; use crate::backends::Backend; use crate::errors::CliError; @@ -60,6 +61,7 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace .into_iter() @@ -70,6 +72,7 @@ pub(crate) fn run( let expression_width = backend.get_backend_info_or_default(); let (_, compiled_contracts) = compile_workspace( &workspace_file_manager, + &parsed_files, &workspace, &binary_packages, &contract_packages, @@ -87,6 +90,7 @@ pub(crate) fn run( pub(super) fn compile_workspace( file_manager: &FileManager, + parsed_files: &ParsedFiles, workspace: &Workspace, binary_packages: &[Package], contract_packages: &[Package], @@ -97,12 +101,21 @@ pub(super) fn compile_workspace( let program_results: Vec> = binary_packages .par_iter() .map(|package| { - compile_program(file_manager, workspace, package, compile_options, expression_width) + compile_program( + file_manager, + parsed_files, + workspace, + package, + compile_options, + expression_width, + ) }) .collect(); let contract_results: Vec> = contract_packages .par_iter() - .map(|package| compile_contract(file_manager, package, compile_options, expression_width)) + .map(|package| { + compile_contract(file_manager, parsed_files, package, compile_options, expression_width) + }) .collect(); // Report any warnings/errors which were encountered during compilation. @@ -134,6 +147,7 @@ pub(super) fn compile_workspace( pub(crate) fn compile_bin_package( file_manager: &FileManager, + parsed_files: &ParsedFiles, workspace: &Workspace, package: &Package, compile_options: &CompileOptions, @@ -143,8 +157,14 @@ pub(crate) fn compile_bin_package( return Err(CompileError::LibraryCrate(package.name.clone()).into()); } - let compilation_result = - compile_program(file_manager, workspace, package, compile_options, expression_width); + let compilation_result = compile_program( + file_manager, + parsed_files, + workspace, + package, + compile_options, + expression_width, + ); let program = report_errors( compilation_result, @@ -158,16 +178,16 @@ pub(crate) fn compile_bin_package( fn compile_program( file_manager: &FileManager, + parsed_files: &ParsedFiles, workspace: &Workspace, package: &Package, compile_options: &CompileOptions, expression_width: ExpressionWidth, ) -> CompilationResult { - let (mut context, crate_id) = prepare_package(file_manager, package); + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); let program_artifact_path = workspace.package_build_path(package); - let cached_program: Option = - read_program_from_file(program_artifact_path) + let cached_program: Option = read_program_from_file(program_artifact_path) .ok() .filter(|p| p.noir_version == NOIR_ARTIFACT_VERSION_STRING) .map(|p| p.into()); @@ -185,11 +205,12 @@ fn compile_program( fn compile_contract( file_manager: &FileManager, + parsed_files: &ParsedFiles, package: &Package, compile_options: &CompileOptions, expression_width: ExpressionWidth, ) -> CompilationResult { - let (mut context, crate_id) = prepare_package(file_manager, package); + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); let (contract, warnings) = match noirc_driver::compile_contract(&mut context, crate_id, compile_options) { Ok(contracts_and_warnings) => contracts_and_warnings, diff --git a/noir/tooling/nargo_cli/src/cli/dap_cmd.rs b/noir/tooling/nargo_cli/src/cli/dap_cmd.rs index 29e696ea6086..fe418a81ffd0 100644 --- a/noir/tooling/nargo_cli/src/cli/dap_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/dap_cmd.rs @@ -2,8 +2,8 @@ use acvm::acir::native_types::WitnessMap; use backend_interface::Backend; use clap::Args; use nargo::constants::PROVER_INPUT_FILE; -use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::workspace::Workspace; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; use noirc_driver::{ @@ -70,9 +70,11 @@ fn load_and_compile_project( let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let compiled_program = compile_bin_package( &workspace_file_manager, + &parsed_files, &workspace, package, &CompileOptions::default(), diff --git a/noir/tooling/nargo_cli/src/cli/debug_cmd.rs b/noir/tooling/nargo_cli/src/cli/debug_cmd.rs index f78a683aa8f9..7207513c3668 100644 --- a/noir/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -6,8 +6,8 @@ use clap::Args; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; -use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::InputMap; @@ -57,6 +57,7 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let Some(package) = workspace.into_iter().find(|p| p.is_binary()) else { println!( @@ -67,6 +68,7 @@ pub(crate) fn run( let compiled_program = compile_bin_package( &workspace_file_manager, + &parsed_files, &workspace, package, &args.compile_options, diff --git a/noir/tooling/nargo_cli/src/cli/execute_cmd.rs b/noir/tooling/nargo_cli/src/cli/execute_cmd.rs index 7f695c42fa4c..aa230613eafe 100644 --- a/noir/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -5,9 +5,9 @@ use clap::Args; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; use nargo::errors::try_to_diagnose_runtime_error; -use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::ops::DefaultForeignCallExecutor; use nargo::package::Package; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::InputMap; @@ -66,11 +66,13 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let expression_width = backend.get_backend_info_or_default(); for package in &workspace { let compiled_program = compile_bin_package( &workspace_file_manager, + &parsed_files, &workspace, package, &args.compile_options, diff --git a/noir/tooling/nargo_cli/src/cli/export_cmd.rs b/noir/tooling/nargo_cli/src/cli/export_cmd.rs index ac3e93e09b7f..feaa55857e56 100644 --- a/noir/tooling/nargo_cli/src/cli/export_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/export_cmd.rs @@ -1,13 +1,14 @@ use nargo::errors::CompileError; use noirc_errors::FileDiagnostic; +use noirc_frontend::hir::ParsedFiles; use rayon::prelude::*; use fm::FileManager; use iter_extended::try_vecmap; -use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo::prepare_package; use nargo::workspace::Workspace; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{ compile_no_check, file_manager_with_stdlib, CompileOptions, CompiledProgram, @@ -60,6 +61,7 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let library_packages: Vec<_> = workspace.into_iter().filter(|package| package.is_library()).collect(); @@ -69,6 +71,7 @@ pub(crate) fn run( .map(|package| { compile_exported_functions( &workspace_file_manager, + &parsed_files, &workspace, package, &args.compile_options, @@ -79,11 +82,12 @@ pub(crate) fn run( fn compile_exported_functions( file_manager: &FileManager, + parsed_files: &ParsedFiles, workspace: &Workspace, package: &Package, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let (mut context, crate_id) = prepare_package(file_manager, package); + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); check_crate_and_report_errors( &mut context, crate_id, diff --git a/noir/tooling/nargo_cli/src/cli/info_cmd.rs b/noir/tooling/nargo_cli/src/cli/info_cmd.rs index f983a19c0fdb..5f7dcbfbb6e1 100644 --- a/noir/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/info_cmd.rs @@ -6,7 +6,7 @@ use clap::Args; use iter_extended::vecmap; use nargo::{ artifacts::debug::DebugArtifact, insert_all_files_for_workspace_into_file_manager, - package::Package, + package::Package, parse_all, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{ @@ -67,6 +67,7 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace .into_iter() @@ -77,6 +78,7 @@ pub(crate) fn run( let expression_width = backend.get_backend_info_or_default(); let (compiled_programs, compiled_contracts) = compile_workspace( &workspace_file_manager, + &parsed_files, &workspace, &binary_packages, &contract_packages, diff --git a/noir/tooling/nargo_cli/src/cli/prove_cmd.rs b/noir/tooling/nargo_cli/src/cli/prove_cmd.rs index 167ab541bc5f..fb565b691b61 100644 --- a/noir/tooling/nargo_cli/src/cli/prove_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/prove_cmd.rs @@ -1,8 +1,8 @@ use clap::Args; use nargo::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; -use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo::workspace::Workspace; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; use noirc_driver::{ @@ -66,11 +66,13 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let expression_width = backend.get_backend_info()?; for package in &workspace { let program = compile_bin_package( &workspace_file_manager, + &parsed_files, &workspace, package, &args.compile_options, diff --git a/noir/tooling/nargo_cli/src/cli/test_cmd.rs b/noir/tooling/nargo_cli/src/cli/test_cmd.rs index 69f03b49cbde..5db842609e58 100644 --- a/noir/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/test_cmd.rs @@ -8,11 +8,14 @@ use nargo::{ insert_all_files_for_workspace_into_file_manager, ops::{run_test, TestStatus}, package::Package, - prepare_package, + parse_all, prepare_package, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; -use noirc_frontend::{graph::CrateName, hir::FunctionNameMatch}; +use noirc_frontend::{ + graph::CrateName, + hir::{FunctionNameMatch, ParsedFiles}, +}; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; use crate::{backends::Backend, cli::check_cmd::check_crate_and_report_errors, errors::CliError}; @@ -66,6 +69,7 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let pattern = match &args.test_name { Some(name) => { @@ -84,6 +88,7 @@ pub(crate) fn run( // TODO: We should run the whole suite even if there are failures in a package run_tests( &workspace_file_manager, + &parsed_files, &blackbox_solver, package, pattern, @@ -96,8 +101,10 @@ pub(crate) fn run( Ok(()) } +#[allow(clippy::too_many_arguments)] fn run_tests( file_manager: &FileManager, + parsed_files: &ParsedFiles, blackbox_solver: &S, package: &Package, fn_name: FunctionNameMatch, @@ -105,7 +112,7 @@ fn run_tests( foreign_call_resolver_url: Option<&str>, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let (mut context, crate_id) = prepare_package(file_manager, package); + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); check_crate_and_report_errors( &mut context, crate_id, diff --git a/noir/tooling/nargo_cli/src/cli/verify_cmd.rs b/noir/tooling/nargo_cli/src/cli/verify_cmd.rs index 86d5e774cbe9..3d6aeb2e4d5f 100644 --- a/noir/tooling/nargo_cli/src/cli/verify_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/verify_cmd.rs @@ -7,9 +7,9 @@ use crate::{backends::Backend, errors::CliError}; use clap::Args; use nargo::constants::{PROOF_EXT, VERIFIER_INPUT_FILE}; -use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo::workspace::Workspace; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; use noirc_driver::{ @@ -53,11 +53,13 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let expression_width = backend.get_backend_info()?; for package in &workspace { let program = compile_bin_package( &workspace_file_manager, + &parsed_files, &workspace, package, &args.compile_options,