From d31ae7f85072fcda37fdc024bf9d1169f57ad218 Mon Sep 17 00:00:00 2001 From: Blaine Bublitz Date: Thu, 24 Aug 2023 10:38:23 -0700 Subject: [PATCH 1/8] chore: Add stdlib to every crate as it is added to graph (#2392) --- crates/noirc_driver/src/lib.rs | 50 ++++++++++---------------- crates/noirc_frontend/src/graph/mod.rs | 7 ++++ crates/noirc_frontend/src/hir/mod.rs | 4 +++ crates/wasm/src/compile.rs | 35 ++++++++++++++---- 4 files changed, 57 insertions(+), 39 deletions(-) diff --git a/crates/noirc_driver/src/lib.rs b/crates/noirc_driver/src/lib.rs index 3511fbeabb6..d719c29d7bd 100644 --- a/crates/noirc_driver/src/lib.rs +++ b/crates/noirc_driver/src/lib.rs @@ -22,6 +22,8 @@ mod program; pub use contract::{CompiledContract, ContractFunction, ContractFunctionType}; pub use program::CompiledProgram; +const STD_CRATE_NAME: &str = "std"; + #[derive(Args, Clone, Debug, Default, Serialize, Deserialize)] pub struct CompileOptions { /// Emit debug information for the intermediate SSA IR @@ -58,16 +60,30 @@ pub fn compile_file( /// Adds the file from the file system at `Path` to the crate graph as a root file pub fn prepare_crate(context: &mut Context, file_name: &Path) -> CrateId { + let path_to_std_lib_file = Path::new(STD_CRATE_NAME).join("lib.nr"); + let std_file_id = context.file_manager.add_file(&path_to_std_lib_file).unwrap(); + let std_crate_id = context.crate_graph.add_stdlib(std_file_id); + let root_file_id = context.file_manager.add_file(file_name).unwrap(); - context.crate_graph.add_crate_root(root_file_id) + let root_crate_id = context.crate_graph.add_crate_root(root_file_id); + + add_dep(context, root_crate_id, std_crate_id, STD_CRATE_NAME.parse().unwrap()); + + root_crate_id } // Adds the file from the file system at `Path` to the crate graph pub fn prepare_dependency(context: &mut Context, file_name: &Path) -> CrateId { let root_file_id = context.file_manager.add_file(file_name).unwrap(); - context.crate_graph.add_crate(root_file_id) + let crate_id = context.crate_graph.add_crate(root_file_id); + + // Every dependency has access to stdlib + let std_crate_id = context.stdlib_crate_id(); + add_dep(context, crate_id, *std_crate_id, STD_CRATE_NAME.parse().unwrap()); + + crate_id } /// Adds a edge in the crate graph for two crates @@ -83,23 +99,6 @@ pub fn add_dep( .expect("cyclic dependency triggered"); } -/// Propagates a given dependency to every other crate. -pub fn propagate_dep( - context: &mut Context, - dep_to_propagate: CrateId, - dep_to_propagate_name: &CrateName, -) { - let crate_ids: Vec<_> = - context.crate_graph.iter_keys().filter(|crate_id| *crate_id != dep_to_propagate).collect(); - - for crate_id in crate_ids { - context - .crate_graph - .add_dep(crate_id, dep_to_propagate_name.clone(), dep_to_propagate) - .expect("ice: cyclic error triggered with std library"); - } -} - /// Run the lexing, parsing, name resolution, and type checking passes. /// /// This returns a (possibly empty) vector of any warnings found on success. @@ -109,19 +108,6 @@ pub fn check_crate( crate_id: CrateId, deny_warnings: bool, ) -> Result { - // Add the stdlib before we check the crate - // TODO: This should actually be done when constructing the driver and then propagated to each dependency when added; - // however, the `create_non_local_crate` panics if you add the stdlib as the first crate in the graph and other - // parts of the code expect the `0` FileID to be the crate root. See also #1681 - let std_crate_name = "std"; - let path_to_std_lib_file = Path::new(std_crate_name).join("lib.nr"); - let root_file_id = context.file_manager.add_file(&path_to_std_lib_file).unwrap(); - - // You can add any crate type to the crate graph - // but you cannot depend on Binaries - let std_crate = context.crate_graph.add_stdlib(root_file_id); - propagate_dep(context, std_crate, &std_crate_name.parse().unwrap()); - let mut errors = vec![]; CrateDefMap::collect_defs(crate_id, context, &mut errors); diff --git a/crates/noirc_frontend/src/graph/mod.rs b/crates/noirc_frontend/src/graph/mod.rs index 0305854ca32..6a21f2dead9 100644 --- a/crates/noirc_frontend/src/graph/mod.rs +++ b/crates/noirc_frontend/src/graph/mod.rs @@ -130,6 +130,13 @@ impl CrateGraph { .expect("ICE: A root crate should exist in the CrateGraph") } + pub fn stdlib_crate_id(&self) -> &CrateId { + self.arena + .keys() + .find(|crate_id| crate_id.is_stdlib()) + .expect("ICE: The stdlib should exist in the CrateGraph") + } + pub fn add_crate_root(&mut self, file_id: FileId) -> CrateId { for (crate_id, crate_data) in self.arena.iter() { if crate_id.is_root() { diff --git a/crates/noirc_frontend/src/hir/mod.rs b/crates/noirc_frontend/src/hir/mod.rs index d0b24e90a76..707f2b20251 100644 --- a/crates/noirc_frontend/src/hir/mod.rs +++ b/crates/noirc_frontend/src/hir/mod.rs @@ -63,6 +63,10 @@ impl Context { self.crate_graph.root_crate_id() } + pub fn stdlib_crate_id(&self) -> &CrateId { + self.crate_graph.stdlib_crate_id() + } + // TODO: Decide if we actually need `function_name` and `fully_qualified_function_name` pub fn function_name(&self, id: &FuncId) -> &str { self.def_interner.function_name(id) diff --git a/crates/wasm/src/compile.rs b/crates/wasm/src/compile.rs index 01f286f924f..69a405d5e28 100644 --- a/crates/wasm/src/compile.rs +++ b/crates/wasm/src/compile.rs @@ -3,8 +3,8 @@ use fm::FileManager; use gloo_utils::format::JsValueSerdeExt; use log::debug; use noirc_driver::{ - check_crate, compile_contracts, compile_no_check, prepare_crate, prepare_dependency, - propagate_dep, CompileOptions, CompiledContract, + add_dep, check_crate, compile_contracts, compile_no_check, prepare_crate, prepare_dependency, + CompileOptions, CompiledContract, }; use noirc_frontend::{graph::CrateGraph, hir::Context}; use serde::{Deserialize, Serialize}; @@ -58,11 +58,32 @@ impl Default for WASMCompileOptions { } } -fn add_noir_lib(context: &mut Context, crate_name: &str) { - let path_to_lib = Path::new(&crate_name).join("lib.nr"); - let library_crate = prepare_dependency(context, &path_to_lib); - - propagate_dep(context, library_crate, &crate_name.parse().unwrap()); +fn add_noir_lib(context: &mut Context, library_name: &str) { + let path_to_lib = Path::new(&library_name).join("lib.nr"); + let library_crate_id = prepare_dependency(context, &path_to_lib); + + add_dep(context, *context.root_crate_id(), library_crate_id, library_name.parse().unwrap()); + + // TODO: Remove this code that attaches every crate to every other crate as a dependency + let root_crate_id = context.root_crate_id(); + let stdlib_crate_id = context.stdlib_crate_id(); + let other_crate_ids: Vec<_> = context + .crate_graph + .iter_keys() + .filter(|crate_id| { + // We don't want to attach this crate to itself or stdlib, nor re-attach it to the root crate + crate_id != &library_crate_id + && crate_id != root_crate_id + && crate_id != stdlib_crate_id + }) + .collect(); + + for crate_id in other_crate_ids { + context + .crate_graph + .add_dep(crate_id, library_name.parse().unwrap(), library_crate_id) + .expect(&format!("ICE: Cyclic error triggered by {} library", library_name)); + } } #[wasm_bindgen] From ff62377d2993e0d12c24713db41897c5c91d1f5f Mon Sep 17 00:00:00 2001 From: kek kek kek Date: Thu, 24 Aug 2023 11:26:52 -0700 Subject: [PATCH 2/8] chore: improve error message for InternalError (#2429) --- crates/noirc_evaluator/src/errors.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/noirc_evaluator/src/errors.rs b/crates/noirc_evaluator/src/errors.rs index 4be94148b71..c3959fcf8a6 100644 --- a/crates/noirc_evaluator/src/errors.rs +++ b/crates/noirc_evaluator/src/errors.rs @@ -94,11 +94,11 @@ impl From for FileDiagnostic { impl RuntimeError { fn into_diagnostic(self) -> Diagnostic { match self { - RuntimeError::InternalError(_) => { + RuntimeError::InternalError(cause) => { Diagnostic::simple_error( "Internal Consistency Evaluators Errors: \n This is likely a bug. Consider Opening an issue at https://github.com/noir-lang/noir/issues".to_owned(), - "".to_string(), + cause.to_string(), noirc_errors::Span::inclusive(0, 0) ) } From 74af99d7230abf453e00ef4a48a79e4f0ed17a10 Mon Sep 17 00:00:00 2001 From: Ethan-000 Date: Thu, 24 Aug 2023 22:41:36 +0100 Subject: [PATCH 3/8] feat: Add `test(should_fail)` attribute for tests that are meant to fail (#2418) Co-authored-by: Blaine Bublitz Co-authored-by: kevaundray --- crates/lsp/src/lib.rs | 4 +- crates/nargo_cli/src/cli/test_cmd.rs | 81 ++- crates/noirc_frontend/src/ast/function.rs | 2 +- crates/noirc_frontend/src/hir/def_map/mod.rs | 43 +- crates/noirc_frontend/src/hir/mod.rs | 22 +- .../src/hir/resolution/resolver.rs | 3 +- crates/noirc_frontend/src/lexer/lexer.rs | 624 ++++++++++-------- crates/noirc_frontend/src/lexer/token.rs | 49 +- 8 files changed, 494 insertions(+), 334 deletions(-) diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index 802ecab5798..7e48f4b4d58 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -199,8 +199,8 @@ fn on_code_lens_request( let tests = context .get_all_test_functions_in_crate_matching(&crate_id, FunctionNameMatch::Anything); - for (func_name, func_id) in tests { - let location = context.function_meta(&func_id).name.location; + for (func_name, test_function) in tests { + let location = context.function_meta(&test_function.get_id()).name.location; let file_id = location.file; // Ignore diagnostics for any file that wasn't the file we saved diff --git a/crates/nargo_cli/src/cli/test_cmd.rs b/crates/nargo_cli/src/cli/test_cmd.rs index 94c8ff86dcd..ddf34933722 100644 --- a/crates/nargo_cli/src/cli/test_cmd.rs +++ b/crates/nargo_cli/src/cli/test_cmd.rs @@ -7,8 +7,7 @@ use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelec use noirc_driver::{compile_no_check, CompileOptions}; use noirc_frontend::{ graph::CrateName, - hir::{Context, FunctionNameMatch}, - node_interner::FuncId, + hir::{def_map::TestFunction, Context, FunctionNameMatch}, }; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; @@ -123,30 +122,70 @@ fn run_tests( fn run_test( backend: &B, test_name: &str, - main: FuncId, + test_function: TestFunction, context: &Context, show_output: bool, config: &CompileOptions, ) -> Result<(), CliError> { - let mut program = compile_no_check(context, config, main).map_err(|err| { + let report_error = |err| { noirc_errors::reporter::report_all(&context.file_manager, &[err], config.deny_warnings); - CliError::Generic(format!("Test '{test_name}' failed to compile")) - })?; - - // Note: We could perform this test using the unoptimized ACIR as generated by `compile_no_check`. - program.circuit = optimize_circuit(backend, program.circuit).unwrap().0; - - // Run the backend to ensure the PWG evaluates functions like std::hash::pedersen, - // otherwise constraints involving these expressions will not error. - match execute_circuit(backend, program.circuit, WitnessMap::new(), show_output) { - Ok(_) => Ok(()), - Err(error) => { - let writer = StandardStream::stderr(ColorChoice::Always); - let mut writer = writer.lock(); - writer.set_color(ColorSpec::new().set_fg(Some(Color::Red))).ok(); - writeln!(writer, "failed").ok(); - writer.reset().ok(); - Err(error.into()) + Err(CliError::Generic(format!("Test '{test_name}' failed to compile"))) + }; + let write_error = |err| { + let writer = StandardStream::stderr(ColorChoice::Always); + let mut writer = writer.lock(); + writer.set_color(ColorSpec::new().set_fg(Some(Color::Red))).ok(); + writeln!(writer, "failed").ok(); + writer.reset().ok(); + Err(err) + }; + + let program = compile_no_check(context, config, test_function.get_id()); + match program { + Ok(mut program) => { + // Note: We don't need to use the optimized ACIR here + program.circuit = optimize_circuit(backend, program.circuit).unwrap().0; + + // Run the backend to ensure the PWG evaluates functions like std::hash::pedersen, + // otherwise constraints involving these expressions will not error. + let circuit_execution = + execute_circuit(backend, program.circuit, WitnessMap::new(), show_output); + + if test_function.should_fail() { + match circuit_execution { + Ok(_) => { + write_error(CliError::Generic(format!("Test '{test_name}' should fail"))) + } + Err(_) => Ok(()), + } + } else { + match circuit_execution { + Ok(_) => Ok(()), + Err(error) => write_error(error.into()), + } + } + } + // Test function failed to compile + // + // Note: This could be because the compiler was able to deduce + // that a constraint was never satisfiable. + // An example of this is the program `assert(false)` + // In that case, we check if the test function should fail, and if so, we return Ok. + Err(err) => { + // The test has failed compilation, but it should never fail. Report error. + if !test_function.should_fail() { + return report_error(err); + } + + // The test has failed compilation, check if it is because the program is never satisfiable. + // If it is never satisfiable, then this is the expected behavior. + let program_is_never_satisfiable = err.diagnostic.message.contains("Failed constraint"); + if program_is_never_satisfiable { + return Ok(()); + } + + // The test has failed compilation, but its a compilation error. Report error + report_error(err) } } } diff --git a/crates/noirc_frontend/src/ast/function.rs b/crates/noirc_frontend/src/ast/function.rs index 377e6aa77ad..cedb48b1714 100644 --- a/crates/noirc_frontend/src/ast/function.rs +++ b/crates/noirc_frontend/src/ast/function.rs @@ -83,7 +83,7 @@ impl From for NoirFunction { let kind = match fd.attribute { Some(Attribute::Builtin(_)) => FunctionKind::Builtin, Some(Attribute::Foreign(_)) => FunctionKind::LowLevel, - Some(Attribute::Test) => FunctionKind::Normal, + Some(Attribute::Test { .. }) => FunctionKind::Normal, Some(Attribute::Oracle(_)) => FunctionKind::Oracle, Some(Attribute::Deprecated(_)) | None => FunctionKind::Normal, Some(Attribute::Custom(_)) => FunctionKind::Normal, diff --git a/crates/noirc_frontend/src/hir/def_map/mod.rs b/crates/noirc_frontend/src/hir/def_map/mod.rs index 2dc8c5ec96f..f97d7e9bf5d 100644 --- a/crates/noirc_frontend/src/hir/def_map/mod.rs +++ b/crates/noirc_frontend/src/hir/def_map/mod.rs @@ -3,7 +3,7 @@ use crate::hir::def_collector::dc_crate::DefCollector; use crate::hir::Context; use crate::node_interner::{FuncId, NodeInterner}; use crate::parser::{parse_program, ParsedModule}; -use crate::token::Attribute; +use crate::token::{Attribute, TestScope}; use arena::{Arena, Index}; use fm::{FileId, FileManager}; use noirc_errors::{FileDiagnostic, Location}; @@ -129,12 +129,18 @@ impl CrateDefMap { pub fn get_all_test_functions<'a>( &'a self, interner: &'a NodeInterner, - ) -> impl Iterator + 'a { + ) -> impl Iterator + 'a { self.modules.iter().flat_map(|(_, module)| { - module - .value_definitions() - .filter_map(|id| id.as_function()) - .filter(|id| interner.function_meta(id).attributes == Some(Attribute::Test)) + module.value_definitions().filter_map(|id| { + if let Some(func_id) = id.as_function() { + match interner.function_meta(&func_id).attributes { + Some(Attribute::Test(scope)) => Some(TestFunction::new(func_id, scope)), + _ => None, + } + } else { + None + } + }) }) } @@ -221,3 +227,28 @@ impl std::ops::IndexMut for CrateDefMap { &mut self.modules[local_module_id.0] } } + +pub struct TestFunction { + id: FuncId, + scope: TestScope, +} + +impl TestFunction { + fn new(id: FuncId, scope: TestScope) -> Self { + TestFunction { id, scope } + } + + /// Returns the function id of the test function + pub fn get_id(&self) -> FuncId { + self.id + } + + /// Returns true if the test function has been specified to fail + /// This is done by annotating the function with `#[test(should_fail)]` + pub fn should_fail(&self) -> bool { + match self.scope { + TestScope::ShouldFail => true, + TestScope::None => false, + } + } +} diff --git a/crates/noirc_frontend/src/hir/mod.rs b/crates/noirc_frontend/src/hir/mod.rs index 707f2b20251..3cc9c91bdd9 100644 --- a/crates/noirc_frontend/src/hir/mod.rs +++ b/crates/noirc_frontend/src/hir/mod.rs @@ -11,6 +11,8 @@ use def_map::{Contract, CrateDefMap}; use fm::FileManager; use std::collections::HashMap; +use self::def_map::TestFunction; + /// Helper object which groups together several useful context objects used /// during name resolution. Once name resolution is finished, only the /// def_interner is required for type inference and monomorphization. @@ -111,22 +113,22 @@ impl Context { &self, crate_id: &CrateId, pattern: FunctionNameMatch, - ) -> Vec<(String, FuncId)> { + ) -> Vec<(String, TestFunction)> { let interner = &self.def_interner; let def_map = self.def_map(crate_id).expect("The local crate should be analyzed already"); def_map .get_all_test_functions(interner) - .filter_map(|id| { - let fully_qualified_name = self.fully_qualified_function_name(crate_id, &id); + .filter_map(|test_function| { + let fully_qualified_name = + self.fully_qualified_function_name(crate_id, &test_function.get_id()); match &pattern { - FunctionNameMatch::Anything => Some((fully_qualified_name, id)), - FunctionNameMatch::Exact(pattern) => { - (&fully_qualified_name == pattern).then_some((fully_qualified_name, id)) - } - FunctionNameMatch::Contains(pattern) => { - fully_qualified_name.contains(pattern).then_some((fully_qualified_name, id)) - } + FunctionNameMatch::Anything => Some((fully_qualified_name, test_function)), + FunctionNameMatch::Exact(pattern) => (&fully_qualified_name == pattern) + .then_some((fully_qualified_name, test_function)), + FunctionNameMatch::Contains(pattern) => fully_qualified_name + .contains(pattern) + .then_some((fully_qualified_name, test_function)), } }) .collect() diff --git a/crates/noirc_frontend/src/hir/resolution/resolver.rs b/crates/noirc_frontend/src/hir/resolution/resolver.rs index 6ce06d4a1ae..47d624e028e 100644 --- a/crates/noirc_frontend/src/hir/resolution/resolver.rs +++ b/crates/noirc_frontend/src/hir/resolution/resolver.rs @@ -700,7 +700,8 @@ impl<'a> Resolver<'a> { self.push_err(ResolverError::DistinctNotAllowed { ident: func.name_ident().clone() }); } - if attributes == Some(Attribute::Test) && !parameters.is_empty() { + if matches!(attributes, Some(Attribute::Test { .. })) && !parameters.is_empty() + { self.push_err(ResolverError::TestFunctionHasParameters { span: func.name_ident().span(), }); diff --git a/crates/noirc_frontend/src/lexer/lexer.rs b/crates/noirc_frontend/src/lexer/lexer.rs index 3265249b13e..18adc0d51d7 100644 --- a/crates/noirc_frontend/src/lexer/lexer.rs +++ b/crates/noirc_frontend/src/lexer/lexer.rs @@ -408,297 +408,344 @@ impl<'a> Iterator for Lexer<'a> { } } -#[test] -fn test_single_double_char() { - let input = "! != + ( ) { } [ ] | , ; : :: < <= > >= & - -> . .. % / * = == << >>"; - - let expected = vec![ - Token::Bang, - Token::NotEqual, - Token::Plus, - Token::LeftParen, - Token::RightParen, - Token::LeftBrace, - Token::RightBrace, - Token::LeftBracket, - Token::RightBracket, - Token::Pipe, - Token::Comma, - Token::Semicolon, - Token::Colon, - Token::DoubleColon, - Token::Less, - Token::LessEqual, - Token::Greater, - Token::GreaterEqual, - Token::Ampersand, - Token::Minus, - Token::Arrow, - Token::Dot, - Token::DoubleDot, - Token::Percent, - Token::Slash, - Token::Star, - Token::Assign, - Token::Equal, - Token::ShiftLeft, - Token::Greater, - Token::Greater, - Token::EOF, - ]; - - let mut lexer = Lexer::new(input); - - for token in expected.into_iter() { - let got = lexer.next_token().unwrap(); - assert_eq!(got, token); +#[cfg(test)] +mod tests { + use super::*; + use crate::token::TestScope; + #[test] + fn test_single_double_char() { + let input = "! != + ( ) { } [ ] | , ; : :: < <= > >= & - -> . .. % / * = == << >>"; + + let expected = vec![ + Token::Bang, + Token::NotEqual, + Token::Plus, + Token::LeftParen, + Token::RightParen, + Token::LeftBrace, + Token::RightBrace, + Token::LeftBracket, + Token::RightBracket, + Token::Pipe, + Token::Comma, + Token::Semicolon, + Token::Colon, + Token::DoubleColon, + Token::Less, + Token::LessEqual, + Token::Greater, + Token::GreaterEqual, + Token::Ampersand, + Token::Minus, + Token::Arrow, + Token::Dot, + Token::DoubleDot, + Token::Percent, + Token::Slash, + Token::Star, + Token::Assign, + Token::Equal, + Token::ShiftLeft, + Token::Greater, + Token::Greater, + Token::EOF, + ]; + + let mut lexer = Lexer::new(input); + + for token in expected.into_iter() { + let got = lexer.next_token().unwrap(); + assert_eq!(got, token); + } } -} -#[test] -fn invalid_attribute() { - let input = "#"; - let mut lexer = Lexer::new(input); + #[test] + fn invalid_attribute() { + let input = "#"; + let mut lexer = Lexer::new(input); - let token = lexer.next().unwrap(); - assert!(token.is_err()); -} + let token = lexer.next().unwrap(); + assert!(token.is_err()); + } -#[test] -fn deprecated_attribute() { - let input = r#"#[deprecated]"#; - let mut lexer = Lexer::new(input); + #[test] + fn deprecated_attribute() { + let input = r#"#[deprecated]"#; + let mut lexer = Lexer::new(input); - let token = lexer.next().unwrap().unwrap(); - assert_eq!(token.token(), &Token::Attribute(Attribute::Deprecated(None))); -} + let token = lexer.next().unwrap().unwrap(); + assert_eq!(token.token(), &Token::Attribute(Attribute::Deprecated(None))); + } -#[test] -fn deprecated_attribute_with_note() { - let input = r#"#[deprecated("hello")]"#; - let mut lexer = Lexer::new(input); + #[test] + fn deprecated_attribute_with_note() { + let input = r#"#[deprecated("hello")]"#; + let mut lexer = Lexer::new(input); - let token = lexer.next().unwrap().unwrap(); - assert_eq!(token.token(), &Token::Attribute(Attribute::Deprecated("hello".to_string().into()))); -} + let token = lexer.next().unwrap().unwrap(); + assert_eq!( + token.token(), + &Token::Attribute(Attribute::Deprecated("hello".to_string().into())) + ); + } -#[test] -fn custom_attribute() { - let input = r#"#[custom(hello)]"#; - let mut lexer = Lexer::new(input); + #[test] + fn custom_attribute() { + let input = r#"#[custom(hello)]"#; + let mut lexer = Lexer::new(input); - let token = lexer.next().unwrap().unwrap(); - assert_eq!(token.token(), &Token::Attribute(Attribute::Custom("custom(hello)".to_string()))); -} + let token = lexer.next().unwrap().unwrap(); + assert_eq!( + token.token(), + &Token::Attribute(Attribute::Custom("custom(hello)".to_string())) + ); + } -#[test] -fn test_custom_gate_syntax() { - let input = "#[foreign(sha256)]#[foreign(blake2s)]#[builtin(sum)]"; + #[test] + fn test_attribute() { + let input = r#"#[test]"#; + let mut lexer = Lexer::new(input); - let expected = vec![ - Token::Attribute(Attribute::Foreign("sha256".to_string())), - Token::Attribute(Attribute::Foreign("blake2s".to_string())), - Token::Attribute(Attribute::Builtin("sum".to_string())), - ]; + let token = lexer.next().unwrap().unwrap(); + assert_eq!(token.token(), &Token::Attribute(Attribute::Test(TestScope::None))); + } + #[test] + fn test_attribute_with_valid_scope() { + let input = r#"#[test(should_fail)]"#; + let mut lexer = Lexer::new(input); - let mut lexer = Lexer::new(input); - for token in expected.into_iter() { - let got = lexer.next_token().unwrap(); - assert_eq!(got, token); + let token = lexer.next().unwrap().unwrap(); + assert_eq!(token.token(), &Token::Attribute(Attribute::Test(TestScope::ShouldFail))); } -} -#[test] -fn test_int_type() { - let input = "u16 i16 i108 u104.5"; + #[test] + fn test_attribute_with_invalid_scope() { + let input = r#"#[test(invalid_scope)]"#; + let mut lexer = Lexer::new(input); - let expected = vec![ - Token::IntType(IntType::Unsigned(16)), - Token::IntType(IntType::Signed(16)), - Token::IntType(IntType::Signed(108)), - Token::IntType(IntType::Unsigned(104)), - Token::Dot, - Token::Int(5_i128.into()), - ]; + let token = lexer.next().unwrap(); + let err = match token { + Ok(_) => panic!("test has an invalid scope, so expected an error"), + Err(err) => err, + }; + + // Check if error is MalformedFuncAttribute and found is "foo" + let sub_string = match err { + LexerErrorKind::MalformedFuncAttribute { found, .. } => found, + _ => panic!("expected malformed func attribute error"), + }; - let mut lexer = Lexer::new(input); - for token in expected.into_iter() { - let got = lexer.next_token().unwrap(); - assert_eq!(got, token); + assert_eq!(sub_string, "test(invalid_scope)"); } -} -#[test] -fn test_arithmetic_sugar() { - let input = "+= -= *= /= %="; - - let expected = vec![ - Token::Plus, - Token::Assign, - Token::Minus, - Token::Assign, - Token::Star, - Token::Assign, - Token::Slash, - Token::Assign, - Token::Percent, - Token::Assign, - ]; - - let mut lexer = Lexer::new(input); - for token in expected.into_iter() { - let got = lexer.next_token().unwrap(); - assert_eq!(got, token); + #[test] + fn test_custom_gate_syntax() { + let input = "#[foreign(sha256)]#[foreign(blake2s)]#[builtin(sum)]"; + + let expected = vec![ + Token::Attribute(Attribute::Foreign("sha256".to_string())), + Token::Attribute(Attribute::Foreign("blake2s".to_string())), + Token::Attribute(Attribute::Builtin("sum".to_string())), + ]; + + let mut lexer = Lexer::new(input); + for token in expected.into_iter() { + let got = lexer.next_token().unwrap(); + assert_eq!(got, token); + } } -} -#[test] -fn unterminated_block_comment() { - let input = "/*/"; + #[test] + fn test_int_type() { + let input = "u16 i16 i108 u104.5"; - let mut lexer = Lexer::new(input); - let token = lexer.next().unwrap(); + let expected = vec![ + Token::IntType(IntType::Unsigned(16)), + Token::IntType(IntType::Signed(16)), + Token::IntType(IntType::Signed(108)), + Token::IntType(IntType::Unsigned(104)), + Token::Dot, + Token::Int(5_i128.into()), + ]; - assert!(token.is_err()); -} + let mut lexer = Lexer::new(input); + for token in expected.into_iter() { + let got = lexer.next_token().unwrap(); + assert_eq!(got, token); + } + } -#[test] -fn test_comment() { - let input = "// hello + #[test] + fn test_arithmetic_sugar() { + let input = "+= -= *= /= %="; + + let expected = vec![ + Token::Plus, + Token::Assign, + Token::Minus, + Token::Assign, + Token::Star, + Token::Assign, + Token::Slash, + Token::Assign, + Token::Percent, + Token::Assign, + ]; + + let mut lexer = Lexer::new(input); + for token in expected.into_iter() { + let got = lexer.next_token().unwrap(); + assert_eq!(got, token); + } + } + + #[test] + fn unterminated_block_comment() { + let input = "/*/"; + + let mut lexer = Lexer::new(input); + let token = lexer.next().unwrap(); + + assert!(token.is_err()); + } + + #[test] + fn test_comment() { + let input = "// hello let x = 5 "; - let expected = vec![ - Token::Keyword(Keyword::Let), - Token::Ident("x".to_string()), - Token::Assign, - Token::Int(FieldElement::from(5_i128)), - ]; - - let mut lexer = Lexer::new(input); - for token in expected.into_iter() { - let first_lexer_output = lexer.next_token().unwrap(); - assert_eq!(first_lexer_output, token); + let expected = vec![ + Token::Keyword(Keyword::Let), + Token::Ident("x".to_string()), + Token::Assign, + Token::Int(FieldElement::from(5_i128)), + ]; + + let mut lexer = Lexer::new(input); + for token in expected.into_iter() { + let first_lexer_output = lexer.next_token().unwrap(); + assert_eq!(first_lexer_output, token); + } } -} -#[test] -fn test_block_comment() { - let input = " + #[test] + fn test_block_comment() { + let input = " /* comment */ let x = 5 /* comment */ "; - let expected = vec![ - Token::Keyword(Keyword::Let), - Token::Ident("x".to_string()), - Token::Assign, - Token::Int(FieldElement::from(5_i128)), - ]; - - let mut lexer = Lexer::new(input); - for token in expected.into_iter() { - let first_lexer_output = lexer.next_token().unwrap(); - assert_eq!(first_lexer_output, token); + let expected = vec![ + Token::Keyword(Keyword::Let), + Token::Ident("x".to_string()), + Token::Assign, + Token::Int(FieldElement::from(5_i128)), + ]; + + let mut lexer = Lexer::new(input); + for token in expected.into_iter() { + let first_lexer_output = lexer.next_token().unwrap(); + assert_eq!(first_lexer_output, token); + } } -} -#[test] -fn test_nested_block_comments() { - let input = " + #[test] + fn test_nested_block_comments() { + let input = " /* /* */ /** */ /*! */ */ let x = 5 /* /* */ /** */ /*! */ */ "; - let expected = vec![ - Token::Keyword(Keyword::Let), - Token::Ident("x".to_string()), - Token::Assign, - Token::Int(FieldElement::from(5_i128)), - ]; - - let mut lexer = Lexer::new(input); - for token in expected.into_iter() { - let first_lexer_output = lexer.next_token().unwrap(); - assert_eq!(first_lexer_output, token); + let expected = vec![ + Token::Keyword(Keyword::Let), + Token::Ident("x".to_string()), + Token::Assign, + Token::Int(FieldElement::from(5_i128)), + ]; + + let mut lexer = Lexer::new(input); + for token in expected.into_iter() { + let first_lexer_output = lexer.next_token().unwrap(); + assert_eq!(first_lexer_output, token); + } } -} -#[test] -fn test_eat_string_literal() { - let input = "let _word = \"hello\""; - - let expected = vec![ - Token::Keyword(Keyword::Let), - Token::Ident("_word".to_string()), - Token::Assign, - Token::Str("hello".to_string()), - ]; - let mut lexer = Lexer::new(input); - - for token in expected.into_iter() { - let got = lexer.next_token().unwrap(); - assert_eq!(got, token); + #[test] + fn test_eat_string_literal() { + let input = "let _word = \"hello\""; + + let expected = vec![ + Token::Keyword(Keyword::Let), + Token::Ident("_word".to_string()), + Token::Assign, + Token::Str("hello".to_string()), + ]; + let mut lexer = Lexer::new(input); + + for token in expected.into_iter() { + let got = lexer.next_token().unwrap(); + assert_eq!(got, token); + } } -} -#[test] -fn test_eat_hex_int() { - let input = "0x05"; + #[test] + fn test_eat_hex_int() { + let input = "0x05"; - let expected = vec![Token::Int(5_i128.into())]; - let mut lexer = Lexer::new(input); + let expected = vec![Token::Int(5_i128.into())]; + let mut lexer = Lexer::new(input); - for token in expected.into_iter() { - let got = lexer.next_token().unwrap(); - assert_eq!(got, token); + for token in expected.into_iter() { + let got = lexer.next_token().unwrap(); + assert_eq!(got, token); + } } -} -#[test] -fn test_span() { - let input = "let x = 5"; + #[test] + fn test_span() { + let input = "let x = 5"; - // Let - let start_position = Position::default(); - let let_position = start_position + 2; - let let_token = Token::Keyword(Keyword::Let).into_span(start_position, let_position); + // Let + let start_position = Position::default(); + let let_position = start_position + 2; + let let_token = Token::Keyword(Keyword::Let).into_span(start_position, let_position); - // Skip whitespace - let whitespace_position = let_position + 1; + // Skip whitespace + let whitespace_position = let_position + 1; - // Identifier position - let ident_position = whitespace_position + 1; - let ident_token = Token::Ident("x".to_string()).into_single_span(ident_position); + // Identifier position + let ident_position = whitespace_position + 1; + let ident_token = Token::Ident("x".to_string()).into_single_span(ident_position); - // Skip whitespace - let whitespace_position = ident_position + 1; + // Skip whitespace + let whitespace_position = ident_position + 1; - // Assign position - let assign_position = whitespace_position + 1; - let assign_token = Token::Assign.into_single_span(assign_position); + // Assign position + let assign_position = whitespace_position + 1; + let assign_token = Token::Assign.into_single_span(assign_position); - // Skip whitespace - let whitespace_position = assign_position + 1; + // Skip whitespace + let whitespace_position = assign_position + 1; - // Int position - let int_position = whitespace_position + 1; - let int_token = Token::Int(5_i128.into()).into_single_span(int_position); + // Int position + let int_position = whitespace_position + 1; + let int_token = Token::Int(5_i128.into()).into_single_span(int_position); - let expected = vec![let_token, ident_token, assign_token, int_token]; - let mut lexer = Lexer::new(input); + let expected = vec![let_token, ident_token, assign_token, int_token]; + let mut lexer = Lexer::new(input); - for spanned_token in expected.into_iter() { - let got = lexer.next_token().unwrap(); - assert_eq!(got.to_span(), spanned_token.to_span()); - assert_eq!(got, spanned_token); + for spanned_token in expected.into_iter() { + let got = lexer.next_token().unwrap(); + assert_eq!(got.to_span(), spanned_token.to_span()); + assert_eq!(got, spanned_token); + } } -} -#[test] -fn test_basic_language_syntax() { - let input = " + #[test] + fn test_basic_language_syntax() { + let input = " let five = 5; let ten : Field = 10; let mul = fn(x, y) { @@ -708,60 +755,61 @@ fn test_basic_language_syntax() { assert(ten + five == 15); "; - let expected = vec![ - Token::Keyword(Keyword::Let), - Token::Ident("five".to_string()), - Token::Assign, - Token::Int(5_i128.into()), - Token::Semicolon, - Token::Keyword(Keyword::Let), - Token::Ident("ten".to_string()), - Token::Colon, - Token::Keyword(Keyword::Field), - Token::Assign, - Token::Int(10_i128.into()), - Token::Semicolon, - Token::Keyword(Keyword::Let), - Token::Ident("mul".to_string()), - Token::Assign, - Token::Keyword(Keyword::Fn), - Token::LeftParen, - Token::Ident("x".to_string()), - Token::Comma, - Token::Ident("y".to_string()), - Token::RightParen, - Token::LeftBrace, - Token::Ident("x".to_string()), - Token::Star, - Token::Ident("y".to_string()), - Token::Semicolon, - Token::RightBrace, - Token::Semicolon, - Token::Keyword(Keyword::Constrain), - Token::Ident("mul".to_string()), - Token::LeftParen, - Token::Ident("five".to_string()), - Token::Comma, - Token::Ident("ten".to_string()), - Token::RightParen, - Token::Equal, - Token::Int(50_i128.into()), - Token::Semicolon, - Token::Keyword(Keyword::Assert), - Token::LeftParen, - Token::Ident("ten".to_string()), - Token::Plus, - Token::Ident("five".to_string()), - Token::Equal, - Token::Int(15_i128.into()), - Token::RightParen, - Token::Semicolon, - Token::EOF, - ]; - let mut lexer = Lexer::new(input); - - for token in expected.into_iter() { - let got = lexer.next_token().unwrap(); - assert_eq!(got, token); + let expected = vec![ + Token::Keyword(Keyword::Let), + Token::Ident("five".to_string()), + Token::Assign, + Token::Int(5_i128.into()), + Token::Semicolon, + Token::Keyword(Keyword::Let), + Token::Ident("ten".to_string()), + Token::Colon, + Token::Keyword(Keyword::Field), + Token::Assign, + Token::Int(10_i128.into()), + Token::Semicolon, + Token::Keyword(Keyword::Let), + Token::Ident("mul".to_string()), + Token::Assign, + Token::Keyword(Keyword::Fn), + Token::LeftParen, + Token::Ident("x".to_string()), + Token::Comma, + Token::Ident("y".to_string()), + Token::RightParen, + Token::LeftBrace, + Token::Ident("x".to_string()), + Token::Star, + Token::Ident("y".to_string()), + Token::Semicolon, + Token::RightBrace, + Token::Semicolon, + Token::Keyword(Keyword::Constrain), + Token::Ident("mul".to_string()), + Token::LeftParen, + Token::Ident("five".to_string()), + Token::Comma, + Token::Ident("ten".to_string()), + Token::RightParen, + Token::Equal, + Token::Int(50_i128.into()), + Token::Semicolon, + Token::Keyword(Keyword::Assert), + Token::LeftParen, + Token::Ident("ten".to_string()), + Token::Plus, + Token::Ident("five".to_string()), + Token::Equal, + Token::Int(15_i128.into()), + Token::RightParen, + Token::Semicolon, + Token::EOF, + ]; + let mut lexer = Lexer::new(input); + + for token in expected.into_iter() { + let got = lexer.next_token().unwrap(); + assert_eq!(got, token); + } } } diff --git a/crates/noirc_frontend/src/lexer/token.rs b/crates/noirc_frontend/src/lexer/token.rs index 6291ac4de12..5a47d4ece73 100644 --- a/crates/noirc_frontend/src/lexer/token.rs +++ b/crates/noirc_frontend/src/lexer/token.rs @@ -316,6 +316,36 @@ impl IntType { } } +/// TestScope is used to specify additional annotations for test functions +#[derive(PartialEq, Eq, Hash, Debug, Clone, PartialOrd, Ord)] +pub enum TestScope { + /// If a test has a scope of ShouldFail, then it is expected to fail + ShouldFail, + /// No scope is applied and so the test must pass + None, +} + +impl TestScope { + fn lookup_str(string: &str) -> Option { + match string { + "should_fail" => Some(TestScope::ShouldFail), + _ => None, + } + } + fn as_str(&self) -> &'static str { + match self { + TestScope::ShouldFail => "should_fail", + TestScope::None => "", + } + } +} + +impl fmt::Display for TestScope { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "({})", self.as_str()) + } +} + #[derive(PartialEq, Eq, Hash, Debug, Clone, PartialOrd, Ord)] // Attributes are special language markers in the target language // An example of one is `#[SHA256]` . Currently only Foreign attributes are supported @@ -325,17 +355,17 @@ pub enum Attribute { Builtin(String), Oracle(String), Deprecated(Option), - Test, + Test(TestScope), Custom(String), } impl fmt::Display for Attribute { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { + match self { Attribute::Foreign(ref k) => write!(f, "#[foreign({k})]"), Attribute::Builtin(ref k) => write!(f, "#[builtin({k})]"), Attribute::Oracle(ref k) => write!(f, "#[oracle({k})]"), - Attribute::Test => write!(f, "#[test]"), + Attribute::Test(scope) => write!(f, "#[test{}]", scope), Attribute::Deprecated(None) => write!(f, "#[deprecated]"), Attribute::Deprecated(Some(ref note)) => write!(f, r#"#[deprecated("{note}")]"#), Attribute::Custom(ref k) => write!(f, "#[{k}]"), @@ -391,7 +421,16 @@ impl Attribute { Attribute::Deprecated(name.trim_matches('"').to_string().into()) } - ["test"] => Attribute::Test, + ["test"] => Attribute::Test(TestScope::None), + ["test", name] => { + validate(name)?; + let malformed_scope = + LexerErrorKind::MalformedFuncAttribute { span, found: word.to_owned() }; + match TestScope::lookup_str(name) { + Some(scope) => Attribute::Test(scope), + None => return Err(malformed_scope), + } + } tokens => { tokens.iter().try_for_each(|token| validate(token))?; Attribute::Custom(word.to_owned()) @@ -431,7 +470,7 @@ impl AsRef for Attribute { Attribute::Builtin(string) => string, Attribute::Oracle(string) => string, Attribute::Deprecated(Some(string)) => string, - Attribute::Test | Attribute::Deprecated(None) => "", + Attribute::Test { .. } | Attribute::Deprecated(None) => "", Attribute::Custom(string) => string, } } From 41b568d1950f45049a322e316fd9acfa52a43208 Mon Sep 17 00:00:00 2001 From: Blaine Bublitz Date: Thu, 24 Aug 2023 15:14:27 -0700 Subject: [PATCH 4/8] fix(lsp): Remove duplicated creation of lenses (#2433) --- crates/lsp/src/lib.rs | 67 ------------------------------------------- 1 file changed, 67 deletions(-) diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index 7e48f4b4d58..a1bef82e4ca 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -313,73 +313,6 @@ fn on_code_lens_request( lenses.push(compile_lens); } } - - if package.is_binary() { - if let Some(main_func_id) = context.get_main_function(&crate_id) { - let location = context.function_meta(&main_func_id).name.location; - let file_id = location.file; - - // Ignore diagnostics for any file that wasn't the file we saved - // TODO: In the future, we could create "related" diagnostics for these files - // TODO: This currently just appends the `.nr` file extension that we store as a constant, - // but that won't work if we accept other extensions - if fm.path(file_id).with_extension(FILE_EXTENSION) != file_path { - continue; - } - - let range = byte_span_to_range(files, file_id.as_usize(), location.span.into()) - .unwrap_or_default(); - - let command = Command { - title: format!("{ARROW} {COMPILE_CODELENS_TITLE}"), - command: COMPILE_COMMAND.into(), - arguments: Some(vec![ - "--program-dir".into(), - format!("{}", workspace.root_dir.display()).into(), - "--package".into(), - format!("{}", package.name).into(), - ]), - }; - - let lens = CodeLens { range, command: command.into(), data: None }; - - lenses.push(lens); - } - } - - if package.is_contract() { - // Currently not looking to deduplicate this since we don't have a clear decision on if the Contract stuff is staying - for contract in context.get_all_contracts(&crate_id) { - let location = contract.location; - let file_id = location.file; - - // Ignore diagnostics for any file that wasn't the file we saved - // TODO: In the future, we could create "related" diagnostics for these files - // TODO: This currently just appends the `.nr` file extension that we store as a constant, - // but that won't work if we accept other extensions - if fm.path(file_id).with_extension(FILE_EXTENSION) != file_path { - continue; - } - - let range = byte_span_to_range(files, file_id.as_usize(), location.span.into()) - .unwrap_or_default(); - - let command = Command { - title: format!("{ARROW} {COMPILE_CODELENS_TITLE}"), - command: COMPILE_COMMAND.into(), - arguments: Some(vec![ - "--program-dir".into(), - format!("{}", workspace.root_dir.display()).into(), - "--package".into(), - format!("{}", package.name).into(), - ]), - }; - - let lens = CodeLens { range, command: command.into(), data: None }; - - lenses.push(lens); - } - } } let res = if lenses.is_empty() { Ok(None) } else { Ok(Some(lenses)) }; From 1435a86b0ae315abf7553e140dd091d0161ed7b5 Mon Sep 17 00:00:00 2001 From: Alex Vitkov <44268717+alexvitkov@users.noreply.github.com> Date: Fri, 25 Aug 2023 17:28:00 +0300 Subject: [PATCH 5/8] fix(ssa): codegen missing check for unary minus (#2413) --- .../unary_operators/Nargo.toml | 7 ++++ .../unary_operators/src/main.nr | 7 ++++ .../src/ssa/ssa_gen/context.rs | 1 + crates/noirc_evaluator/src/ssa/ssa_gen/mod.rs | 2 +- .../noirc_frontend/src/hir/type_check/expr.rs | 6 +++ .../src/monomorphization/ast.rs | 1 + .../src/monomorphization/mod.rs | 39 ++++++++++++------- 7 files changed, 47 insertions(+), 16 deletions(-) create mode 100644 crates/nargo_cli/tests/execution_success/unary_operators/Nargo.toml create mode 100644 crates/nargo_cli/tests/execution_success/unary_operators/src/main.nr diff --git a/crates/nargo_cli/tests/execution_success/unary_operators/Nargo.toml b/crates/nargo_cli/tests/execution_success/unary_operators/Nargo.toml new file mode 100644 index 00000000000..65ad7c1c70c --- /dev/null +++ b/crates/nargo_cli/tests/execution_success/unary_operators/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "unary_operators" +type = "bin" +authors = [""] +compiler_version = "0.10.3" + +[dependencies] \ No newline at end of file diff --git a/crates/nargo_cli/tests/execution_success/unary_operators/src/main.nr b/crates/nargo_cli/tests/execution_success/unary_operators/src/main.nr new file mode 100644 index 00000000000..1c9145fd81f --- /dev/null +++ b/crates/nargo_cli/tests/execution_success/unary_operators/src/main.nr @@ -0,0 +1,7 @@ +fn main() { + let x = -1; + assert(x == 1 - 2); + + let y: i32 = -1; + assert(x == 1 - 2); +} \ No newline at end of file diff --git a/crates/noirc_evaluator/src/ssa/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa/ssa_gen/context.rs index 0b3ee5b9acf..5e29ed60d42 100644 --- a/crates/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -306,6 +306,7 @@ impl<'a> FunctionContext<'a> { if operator_requires_swapped_operands(operator) { std::mem::swap(&mut lhs, &mut rhs); } + self.builder.set_location(location).insert_binary(lhs, op, rhs) } }; diff --git a/crates/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa/ssa_gen/mod.rs index 1367b1753af..59bdf7ff0e6 100644 --- a/crates/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -207,7 +207,7 @@ impl<'a> FunctionContext<'a> { let rhs = rhs.into_leaf().eval(self); let typ = self.builder.type_of_value(rhs); let zero = self.builder.numeric_constant(0u128, typ); - self.builder.insert_binary(zero, BinaryOp::Sub, rhs).into() + self.insert_binary(zero, noirc_frontend::BinaryOpKind::Subtract, rhs, unary.location) } noirc_frontend::UnaryOp::MutableReference => { self.codegen_reference(&unary.rhs).map(|rhs| { diff --git a/crates/noirc_frontend/src/hir/type_check/expr.rs b/crates/noirc_frontend/src/hir/type_check/expr.rs index 9f00f4b61da..7a0c0eb2c7d 100644 --- a/crates/noirc_frontend/src/hir/type_check/expr.rs +++ b/crates/noirc_frontend/src/hir/type_check/expr.rs @@ -335,12 +335,15 @@ impl<'interner> TypeChecker<'interner> { // inserted by a field access expression `foo.bar` on a mutable reference `foo`. if self.try_remove_implicit_dereference(method_call.object).is_none() { // If that didn't work, then wrap the whole expression in an `&mut` + let location = self.interner.id_location(method_call.object); + method_call.object = self.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { operator: UnaryOp::MutableReference, rhs: method_call.object, })); self.interner.push_expr_type(&method_call.object, new_type); + self.interner.push_expr_location(method_call.object, location.span, location.file); } } } @@ -567,6 +570,9 @@ impl<'interner> TypeChecker<'interner> { })); this.interner.push_expr_type(&old_lhs, lhs_type); this.interner.push_expr_type(access_lhs, element); + + let old_location = this.interner.id_location(old_lhs); + this.interner.push_expr_location(*access_lhs, span, old_location.file); }; match self.check_field_access(&lhs_type, &access.rhs.0.contents, span, dereference_lhs) { diff --git a/crates/noirc_frontend/src/monomorphization/ast.rs b/crates/noirc_frontend/src/monomorphization/ast.rs index d648e181865..2324624b1df 100644 --- a/crates/noirc_frontend/src/monomorphization/ast.rs +++ b/crates/noirc_frontend/src/monomorphization/ast.rs @@ -92,6 +92,7 @@ pub struct Unary { pub operator: crate::UnaryOp, pub rhs: Box, pub result_type: Type, + pub location: Location, } pub type BinaryOp = BinaryOpKind; diff --git a/crates/noirc_frontend/src/monomorphization/mod.rs b/crates/noirc_frontend/src/monomorphization/mod.rs index 2ef980176d3..555f4b89275 100644 --- a/crates/noirc_frontend/src/monomorphization/mod.rs +++ b/crates/noirc_frontend/src/monomorphization/mod.rs @@ -292,11 +292,15 @@ impl<'interner> Monomorphizer<'interner> { HirExpression::Literal(HirLiteral::Unit) => ast::Expression::Block(vec![]), HirExpression::Block(block) => self.block(block.0), - HirExpression::Prefix(prefix) => ast::Expression::Unary(ast::Unary { - operator: prefix.operator, - rhs: Box::new(self.expr(prefix.rhs)), - result_type: Self::convert_type(&self.interner.id_type(expr)), - }), + HirExpression::Prefix(prefix) => { + let location = self.interner.expr_location(&expr); + ast::Expression::Unary(ast::Unary { + operator: prefix.operator, + rhs: Box::new(self.expr(prefix.rhs)), + result_type: Self::convert_type(&self.interner.id_type(expr)), + location, + }) + } HirExpression::Infix(infix) => { let lhs = Box::new(self.expr(infix.lhs)); @@ -817,7 +821,7 @@ impl<'interner> Monomorphizer<'interner> { }; let call = self - .try_evaluate_call(&func, &return_type) + .try_evaluate_call(&func, &id, &return_type) .unwrap_or(ast::Expression::Call(ast::Call { func, arguments, return_type, location })); if !block_expressions.is_empty() { @@ -897,6 +901,7 @@ impl<'interner> Monomorphizer<'interner> { fn try_evaluate_call( &mut self, func: &ast::Expression, + expr_id: &node_interner::ExprId, result_type: &ast::Type, ) -> Option { if let ast::Expression::Ident(ident) = func { @@ -907,7 +912,10 @@ impl<'interner> Monomorphizer<'interner> { (FieldElement::max_num_bits() as u128).into(), ast::Type::Field, ))), - "zeroed" => Some(self.zeroed_value_of_type(result_type)), + "zeroed" => { + let location = self.interner.expr_location(expr_id); + Some(self.zeroed_value_of_type(result_type, location)) + } "modulus_le_bits" => { let bits = FieldElement::modulus().to_radix_le(2); Some(self.modulus_array_literal(bits, 1)) @@ -1179,7 +1187,7 @@ impl<'interner> Monomorphizer<'interner> { /// Implements std::unsafe::zeroed by returning an appropriate zeroed /// ast literal or collection node for the given type. Note that for functions /// there is no obvious zeroed value so this should be considered unsafe to use. - fn zeroed_value_of_type(&mut self, typ: &ast::Type) -> ast::Expression { + fn zeroed_value_of_type(&mut self, typ: &ast::Type, location: noirc_errors::Location) -> ast::Expression { match typ { ast::Type::Field | ast::Type::Integer(..) => { ast::Expression::Literal(ast::Literal::Integer(0_u128.into(), typ.clone())) @@ -1189,7 +1197,7 @@ impl<'interner> Monomorphizer<'interner> { // anyway. ast::Type::Unit => ast::Expression::Literal(ast::Literal::Bool(false)), ast::Type::Array(length, element_type) => { - let element = self.zeroed_value_of_type(element_type.as_ref()); + let element = self.zeroed_value_of_type(element_type.as_ref(), location); ast::Expression::Literal(ast::Literal::Array(ast::ArrayLiteral { contents: vec![element; *length as usize], typ: ast::Type::Array(*length, element_type.clone()), @@ -1199,7 +1207,7 @@ impl<'interner> Monomorphizer<'interner> { ast::Expression::Literal(ast::Literal::Str("\0".repeat(*length as usize))) } ast::Type::FmtString(length, fields) => { - let zeroed_tuple = self.zeroed_value_of_type(fields); + let zeroed_tuple = self.zeroed_value_of_type(fields, location); let fields_len = match &zeroed_tuple { ast::Expression::Tuple(fields) => fields.len() as u64, _ => unreachable!("ICE: format string fields should be structured in a tuple, but got a {zeroed_tuple}"), @@ -1211,10 +1219,10 @@ impl<'interner> Monomorphizer<'interner> { )) } ast::Type::Tuple(fields) => { - ast::Expression::Tuple(vecmap(fields, |field| self.zeroed_value_of_type(field))) + ast::Expression::Tuple(vecmap(fields, |field| self.zeroed_value_of_type(field, location))) } ast::Type::Function(parameter_types, ret_type, env) => { - self.create_zeroed_function(parameter_types, ret_type, env) + self.create_zeroed_function(parameter_types, ret_type, env, location) } ast::Type::Slice(element_type) => { ast::Expression::Literal(ast::Literal::Array(ast::ArrayLiteral { @@ -1224,9 +1232,9 @@ impl<'interner> Monomorphizer<'interner> { } ast::Type::MutableReference(element) => { use crate::UnaryOp::MutableReference; - let rhs = Box::new(self.zeroed_value_of_type(element)); + let rhs = Box::new(self.zeroed_value_of_type(element, location)); let result_type = typ.clone(); - ast::Expression::Unary(ast::Unary { rhs, result_type, operator: MutableReference }) + ast::Expression::Unary(ast::Unary { rhs, result_type, operator: MutableReference, location }) } } } @@ -1242,6 +1250,7 @@ impl<'interner> Monomorphizer<'interner> { parameter_types: &[ast::Type], ret_type: &ast::Type, env_type: &ast::Type, + location: noirc_errors::Location, ) -> ast::Expression { let lambda_name = "zeroed_lambda"; @@ -1249,7 +1258,7 @@ impl<'interner> Monomorphizer<'interner> { (self.next_local_id(), false, "_".into(), parameter_type.clone()) }); - let body = self.zeroed_value_of_type(ret_type); + let body = self.zeroed_value_of_type(ret_type, location); let id = self.next_function_id(); let return_type = ret_type.clone(); From 054642b0daa325476bb085f5a03b55fc63a8e5fc Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Fri, 25 Aug 2023 17:14:45 +0100 Subject: [PATCH 6/8] fix(acir_gen): Pass accurate contents to slice inputs for bb func calls (#2435) --- .../brillig_schnorr/Prover.toml | 1 + .../brillig_schnorr/src/main.nr | 13 +- .../execution_success/schnorr/Prover.toml | 1 + .../execution_success/schnorr/src/main.nr | 13 +- .../brillig/brillig_gen/brillig_black_box.rs | 151 +++++++++++++++--- .../ssa/acir_gen/acir_ir/generated_acir.rs | 130 ++++++++++----- .../src/monomorphization/mod.rs | 4 +- 7 files changed, 241 insertions(+), 72 deletions(-) diff --git a/crates/nargo_cli/tests/execution_success/brillig_schnorr/Prover.toml b/crates/nargo_cli/tests/execution_success/brillig_schnorr/Prover.toml index c5c3ab5101a..5fe6bd2546f 100644 --- a/crates/nargo_cli/tests/execution_success/brillig_schnorr/Prover.toml +++ b/crates/nargo_cli/tests/execution_success/brillig_schnorr/Prover.toml @@ -1,4 +1,5 @@ message = [0,1,2,3,4,5,6,7,8,9] +message_field = "0x010203040506070809" pub_key_x = "0x17cbd3ed3151ccfd170efe1d54280a6a4822640bf5c369908ad74ea21518a9c5" pub_key_y = "0x0e0456e3795c1a31f20035b741cd6158929eeccd320d299cfcac962865a6bc74" signature = [ diff --git a/crates/nargo_cli/tests/execution_success/brillig_schnorr/src/main.nr b/crates/nargo_cli/tests/execution_success/brillig_schnorr/src/main.nr index 0ffd6af6fcd..4212839601f 100644 --- a/crates/nargo_cli/tests/execution_success/brillig_schnorr/src/main.nr +++ b/crates/nargo_cli/tests/execution_success/brillig_schnorr/src/main.nr @@ -2,9 +2,20 @@ use dep::std; // Note: If main has any unsized types, then the verifier will never be able // to figure out the circuit instance -unconstrained fn main(message: [u8; 10], pub_key_x: Field, pub_key_y: Field, signature: [u8; 64]) { +unconstrained fn main(message: [u8; 10], message_field: Field, pub_key_x: Field, pub_key_y: Field, signature: [u8; 64]) { + // Regression for issue #2421 + // We want to make sure that we can accurately verify a signature whose message is a slice vs. an array + let message_field_bytes = message_field.to_be_bytes(10); + for i in 0..10 { + assert(message[i] == message_field_bytes[i]); + } // Is there ever a situation where someone would want // to ensure that a signature was invalid? + // Check that passing a slice as the message is valid + let valid_signature = std::schnorr::verify_signature(pub_key_x,pub_key_y,signature, message_field_bytes); + assert(valid_signature); + + // Check that passing an array as the message is valid let valid_signature = std::schnorr::verify_signature(pub_key_x,pub_key_y,signature, message); assert(valid_signature); } diff --git a/crates/nargo_cli/tests/execution_success/schnorr/Prover.toml b/crates/nargo_cli/tests/execution_success/schnorr/Prover.toml index c5c3ab5101a..5fe6bd2546f 100644 --- a/crates/nargo_cli/tests/execution_success/schnorr/Prover.toml +++ b/crates/nargo_cli/tests/execution_success/schnorr/Prover.toml @@ -1,4 +1,5 @@ message = [0,1,2,3,4,5,6,7,8,9] +message_field = "0x010203040506070809" pub_key_x = "0x17cbd3ed3151ccfd170efe1d54280a6a4822640bf5c369908ad74ea21518a9c5" pub_key_y = "0x0e0456e3795c1a31f20035b741cd6158929eeccd320d299cfcac962865a6bc74" signature = [ diff --git a/crates/nargo_cli/tests/execution_success/schnorr/src/main.nr b/crates/nargo_cli/tests/execution_success/schnorr/src/main.nr index c0933b23029..3c8881b2f39 100644 --- a/crates/nargo_cli/tests/execution_success/schnorr/src/main.nr +++ b/crates/nargo_cli/tests/execution_success/schnorr/src/main.nr @@ -2,9 +2,20 @@ use dep::std; // Note: If main has any unsized types, then the verifier will never be able // to figure out the circuit instance -fn main(message: [u8; 10], pub_key_x: Field, pub_key_y: Field, signature: [u8; 64]) { +fn main(message: [u8; 10], message_field: Field, pub_key_x: Field, pub_key_y: Field, signature: [u8; 64]) { + // Regression for issue #2421 + // We want to make sure that we can accurately verify a signature whose message is a slice vs. an array + let message_field_bytes = message_field.to_be_bytes(10); + for i in 0..10 { + assert(message[i] == message_field_bytes[i]); + } // Is there ever a situation where someone would want // to ensure that a signature was invalid? + // Check that passing a slice as the message is valid + let valid_signature = std::schnorr::verify_signature(pub_key_x,pub_key_y,signature, message_field_bytes); + assert(valid_signature); + + // Check that passing an array as the message is valid let valid_signature = std::schnorr::verify_signature(pub_key_x,pub_key_y,signature, message); assert(valid_signature); } diff --git a/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index 1ee323e4c09..7180467aff7 100644 --- a/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -16,12 +16,24 @@ pub(crate) fn convert_black_box_call( ) { match bb_func { BlackBoxFunc::SHA256 => { - if let ( - [RegisterOrMemory::HeapArray(message_array)], - [RegisterOrMemory::HeapArray(result_array)], - ) = (function_arguments, function_results) + if let ([..], [RegisterOrMemory::HeapArray(result_array)]) = + (function_arguments, function_results) { - let message_vector = brillig_context.array_to_vector(message_array); + // Slices are represented as a tuple of (length, slice contents). + // We must check the number of inputs to differentiate between arrays and slices + // and make sure that we pass the correct inputs to the function call. + let message = if function_arguments.len() > 1 { + &function_arguments[1] + } else { + &function_arguments[0] + }; + let message_vector = match message { + RegisterOrMemory::HeapArray(message_array) => { + brillig_context.array_to_vector(message_array) + } + RegisterOrMemory::HeapVector(message_vector) => *message_vector, + _ => unreachable!("ICE: SHA256 expects the message to be an array or a vector"), + }; brillig_context.black_box_op_instruction(BlackBoxOp::Sha256 { message: message_vector, output: *result_array, @@ -31,12 +43,26 @@ pub(crate) fn convert_black_box_call( } } BlackBoxFunc::Blake2s => { - if let ( - [RegisterOrMemory::HeapArray(message_array)], - [RegisterOrMemory::HeapArray(result_array)], - ) = (function_arguments, function_results) + if let ([..], [RegisterOrMemory::HeapArray(result_array)]) = + (function_arguments, function_results) { - let message_vector = brillig_context.array_to_vector(message_array); + // Slices are represented as a tuple of (length, slice contents). + // We must check the number of inputs to differentiate between arrays and slices + // and make sure that we pass the correct inputs to the function call. + let message = if function_arguments.len() > 1 { + &function_arguments[1] + } else { + &function_arguments[0] + }; + let message_vector = match message { + RegisterOrMemory::HeapArray(message_array) => { + brillig_context.array_to_vector(message_array) + } + RegisterOrMemory::HeapVector(message_vector) => *message_vector, + _ => { + unreachable!("ICE: Blake2s expects the message to be an array or a vector") + } + }; brillig_context.black_box_op_instruction(BlackBoxOp::Blake2s { message: message_vector, output: *result_array, @@ -47,12 +73,27 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::Keccak256 => { if let ( - [RegisterOrMemory::HeapArray(message_array), RegisterOrMemory::RegisterIndex(array_size)], + [.., RegisterOrMemory::RegisterIndex(array_size)], [RegisterOrMemory::HeapArray(result_array)], ) = (function_arguments, function_results) { - let message_vector = - HeapVector { size: *array_size, pointer: message_array.pointer }; + // Slices are represented as a tuple of (length, slice contents). + // We must check the number of inputs to differentiate between arrays and slices + // and make sure that we pass the correct inputs to the function call. + let message = if function_arguments.len() > 2 { + &function_arguments[1] + } else { + &function_arguments[0] + }; + let message_vector = match message { + RegisterOrMemory::HeapArray(message_array) => { + HeapVector { size: *array_size, pointer: message_array.pointer } + } + RegisterOrMemory::HeapVector(message_vector) => *message_vector, + _ => unreachable!( + "ICE: Keccak256 expects the message to be an array or a vector" + ), + }; brillig_context.black_box_op_instruction(BlackBoxOp::Keccak256 { message: message_vector, output: *result_array, @@ -62,12 +103,26 @@ pub(crate) fn convert_black_box_call( } } BlackBoxFunc::HashToField128Security => { - if let ( - [RegisterOrMemory::HeapArray(message_array)], - [RegisterOrMemory::RegisterIndex(result_register)], - ) = (function_arguments, function_results) + if let ([..], [RegisterOrMemory::RegisterIndex(result_register)]) = + (function_arguments, function_results) { - let message_vector = brillig_context.array_to_vector(message_array); + // Slices are represented as a tuple of (length, slice contents). + // We must check the number of inputs to differentiate between arrays and slices + // and make sure that we pass the correct inputs to the function call. + let message = if function_arguments.len() > 1 { + &function_arguments[1] + } else { + &function_arguments[0] + }; + let message_vector = match message { + RegisterOrMemory::HeapArray(message_array) => { + brillig_context.array_to_vector(message_array) + } + RegisterOrMemory::HeapVector(message_vector) => { + *message_vector + } + _ => unreachable!("ICE: HashToField128Security expects the message to be an array or a vector"), + }; brillig_context.black_box_op_instruction(BlackBoxOp::HashToField128Security { message: message_vector, output: *result_register, @@ -78,11 +133,27 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::EcdsaSecp256k1 => { if let ( - [RegisterOrMemory::HeapArray(public_key_x), RegisterOrMemory::HeapArray(public_key_y), RegisterOrMemory::HeapArray(signature), RegisterOrMemory::HeapArray(message_hash)], + [RegisterOrMemory::HeapArray(public_key_x), RegisterOrMemory::HeapArray(public_key_y), RegisterOrMemory::HeapArray(signature), ..], [RegisterOrMemory::RegisterIndex(result_register)], ) = (function_arguments, function_results) { - let message_hash_vector = brillig_context.array_to_vector(message_hash); + // Slices are represented as a tuple of (length, slice contents). + // We must check the number of inputs to differentiate between arrays and slices + // and make sure that we pass the correct inputs to the function call. + let message = if function_arguments.len() > 4 { + &function_arguments[4] + } else { + &function_arguments[3] + }; + let message_hash_vector = match message { + RegisterOrMemory::HeapArray(message_hash) => { + brillig_context.array_to_vector(message_hash) + } + RegisterOrMemory::HeapVector(message_hash_vector) => *message_hash_vector, + _ => unreachable!( + "ICE: EcdsaSecp256k1 expects the message to be an array or a vector" + ), + }; brillig_context.black_box_op_instruction(BlackBoxOp::EcdsaSecp256k1 { hashed_msg: message_hash_vector, public_key_x: *public_key_x, @@ -98,11 +169,27 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::Pedersen => { if let ( - [RegisterOrMemory::HeapArray(message_array), RegisterOrMemory::RegisterIndex(domain_separator)], + [.., RegisterOrMemory::RegisterIndex(domain_separator)], [RegisterOrMemory::HeapArray(result_array)], ) = (function_arguments, function_results) { - let message_vector = brillig_context.array_to_vector(message_array); + // Slices are represented as a tuple of (length, slice contents). + // We must check the number of inputs to differentiate between arrays and slices + // and make sure that we pass the correct inputs to the function call. + let message = if function_arguments.len() > 2 { + &function_arguments[1] + } else { + &function_arguments[0] + }; + let message_vector = match message { + RegisterOrMemory::HeapArray(message_array) => { + brillig_context.array_to_vector(message_array) + } + RegisterOrMemory::HeapVector(message_vector) => *message_vector, + _ => { + unreachable!("ICE: Pedersen expects the message to be an array or a vector") + } + }; brillig_context.black_box_op_instruction(BlackBoxOp::Pedersen { inputs: message_vector, domain_separator: *domain_separator, @@ -114,11 +201,27 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::SchnorrVerify => { if let ( - [RegisterOrMemory::RegisterIndex(public_key_x), RegisterOrMemory::RegisterIndex(public_key_y), RegisterOrMemory::HeapArray(signature), RegisterOrMemory::HeapArray(message_hash)], + [RegisterOrMemory::RegisterIndex(public_key_x), RegisterOrMemory::RegisterIndex(public_key_y), RegisterOrMemory::HeapArray(signature), ..], [RegisterOrMemory::RegisterIndex(result_register)], ) = (function_arguments, function_results) { - let message_hash = brillig_context.array_to_vector(message_hash); + // Slices are represented as a tuple of (length, slice contents). + // We must check the number of inputs to differentiate between arrays and slices + // and make sure that we pass the correct inputs to the function call. + let message = if function_arguments.len() > 4 { + &function_arguments[4] + } else { + &function_arguments[3] + }; + let message_hash = match message { + RegisterOrMemory::HeapArray(message_hash) => { + brillig_context.array_to_vector(message_hash) + } + RegisterOrMemory::HeapVector(message_hash) => *message_hash, + _ => unreachable!( + "ICE: Schnorr verify expects the message to be an array or a vector" + ), + }; let signature = brillig_context.array_to_vector(signature); brillig_context.black_box_op_instruction(BlackBoxOp::SchnorrVerify { public_key_x: *public_key_x, diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs b/crates/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs index 29ca4fa3892..3f12bda8da2 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs @@ -147,47 +147,86 @@ impl GeneratedAcir { BlackBoxFuncCall::XOR { lhs: inputs[0][0], rhs: inputs[1][0], output: outputs[0] } } BlackBoxFunc::RANGE => BlackBoxFuncCall::RANGE { input: inputs[0][0] }, - BlackBoxFunc::SHA256 => BlackBoxFuncCall::SHA256 { inputs: inputs[0].clone(), outputs }, + BlackBoxFunc::SHA256 => { + // Slices are represented as a tuple of (length, slice contents). + // We must check the number of inputs to differentiate between arrays and slices + // and make sure that we pass the correct inputs to the function call. + let inputs = if inputs.len() > 1 { inputs[1].clone() } else { inputs[0].clone() }; + BlackBoxFuncCall::SHA256 { inputs, outputs } + } BlackBoxFunc::Blake2s => { - BlackBoxFuncCall::Blake2s { inputs: inputs[0].clone(), outputs } + // Slices are represented as a tuple of (length, slice contents). + // We must check the number of inputs to differentiate between arrays and slices + // and make sure that we pass the correct inputs to the function call. + let inputs = if inputs.len() > 1 { inputs[1].clone() } else { inputs[0].clone() }; + BlackBoxFuncCall::Blake2s { inputs, outputs } + } + BlackBoxFunc::HashToField128Security => { + // Slices are represented as a tuple of (length, slice contents). + // We must check the number of inputs to differentiate between arrays and slices + // and make sure that we pass the correct inputs to the function call. + let inputs = if inputs.len() > 1 { inputs[1].clone() } else { inputs[0].clone() }; + BlackBoxFuncCall::HashToField128Security { inputs, output: outputs[0] } + } + BlackBoxFunc::SchnorrVerify => { + // Slices are represented as a tuple of (length, slice contents). + // We must check the number of inputs to differentiate between arrays and slices + // and make sure that we pass the correct inputs to the function call. + let message = if inputs.len() > 4 { inputs[4].clone() } else { inputs[3].clone() }; + BlackBoxFuncCall::SchnorrVerify { + public_key_x: inputs[0][0], + public_key_y: inputs[1][0], + // Schnorr signature is an r & s, 32 bytes each + signature: inputs[2].clone(), + message, + output: outputs[0], + } + } + BlackBoxFunc::Pedersen => { + // Slices are represented as a tuple of (length, slice contents). + // We must check the number of inputs to differentiate between arrays and slices + // and make sure that we pass the correct inputs to the function call. + let inputs = if inputs.len() > 1 { inputs[1].clone() } else { inputs[0].clone() }; + BlackBoxFuncCall::Pedersen { + inputs, + outputs: (outputs[0], outputs[1]), + domain_separator: constants[0].to_u128() as u32, + } + } + BlackBoxFunc::EcdsaSecp256k1 => { + // Slices are represented as a tuple of (length, slice contents). + // We must check the number of inputs to differentiate between arrays and slices + // and make sure that we pass the correct inputs to the function call. + let hashed_message = + if inputs.len() > 4 { inputs[4].clone() } else { inputs[3].clone() }; + BlackBoxFuncCall::EcdsaSecp256k1 { + // 32 bytes for each public key co-ordinate + public_key_x: inputs[0].clone(), + public_key_y: inputs[1].clone(), + // (r,s) are both 32 bytes each, so signature + // takes up 64 bytes + signature: inputs[2].clone(), + hashed_message, + output: outputs[0], + } + } + BlackBoxFunc::EcdsaSecp256r1 => { + // Slices are represented as a tuple of (length, slice contents). + // We must check the number of inputs to differentiate between arrays and slices + // and make sure that we pass the correct inputs to the function call. + let hashed_message = + if inputs.len() > 4 { inputs[4].clone() } else { inputs[3].clone() }; + BlackBoxFuncCall::EcdsaSecp256r1 { + // 32 bytes for each public key co-ordinate + public_key_x: inputs[0].clone(), + public_key_y: inputs[1].clone(), + // (r,s) are both 32 bytes each, so signature + // takes up 64 bytes + signature: inputs[2].clone(), + hashed_message, + output: outputs[0], + } } - BlackBoxFunc::HashToField128Security => BlackBoxFuncCall::HashToField128Security { - inputs: inputs[0].clone(), - output: outputs[0], - }, - BlackBoxFunc::SchnorrVerify => BlackBoxFuncCall::SchnorrVerify { - public_key_x: inputs[0][0], - public_key_y: inputs[1][0], - // Schnorr signature is an r & s, 32 bytes each - signature: inputs[2].clone(), - message: inputs[3].clone(), - output: outputs[0], - }, - BlackBoxFunc::Pedersen => BlackBoxFuncCall::Pedersen { - inputs: inputs[0].clone(), - outputs: (outputs[0], outputs[1]), - domain_separator: constants[0].to_u128() as u32, - }, - BlackBoxFunc::EcdsaSecp256k1 => BlackBoxFuncCall::EcdsaSecp256k1 { - // 32 bytes for each public key co-ordinate - public_key_x: inputs[0].clone(), - public_key_y: inputs[1].clone(), - // (r,s) are both 32 bytes each, so signature - // takes up 64 bytes - signature: inputs[2].clone(), - hashed_message: inputs[3].clone(), - output: outputs[0], - }, - BlackBoxFunc::EcdsaSecp256r1 => BlackBoxFuncCall::EcdsaSecp256r1 { - // 32 bytes for each public key co-ordinate - public_key_x: inputs[0].clone(), - public_key_y: inputs[1].clone(), - // (r,s) are both 32 bytes each, so signature - // takes up 64 bytes - signature: inputs[2].clone(), - hashed_message: inputs[3].clone(), - output: outputs[0], - }, BlackBoxFunc::FixedBaseScalarMul => BlackBoxFuncCall::FixedBaseScalarMul { input: inputs[0][0], outputs: (outputs[0], outputs[1]), @@ -203,11 +242,14 @@ impl GeneratedAcir { }); } }; - BlackBoxFuncCall::Keccak256VariableLength { - inputs: inputs[0].clone(), - var_message_size, - outputs, - } + + // Slices are represented as a tuple of (length, slice contents). + // We must check the number of inputs to differentiate between arrays and slices + // and make sure that we pass the correct inputs to the function call. + // `inputs` is cloned into a vector before being popped to find the `var_message_size` + // so we still check `inputs` against its original size passed into `call_black_box` + let inputs = if inputs.len() > 2 { inputs[1].clone() } else { inputs[0].clone() }; + BlackBoxFuncCall::Keccak256VariableLength { inputs, var_message_size, outputs } } BlackBoxFunc::RecursiveAggregation => { let has_previous_aggregation = self.opcodes.iter().any(|op| { diff --git a/crates/noirc_frontend/src/monomorphization/mod.rs b/crates/noirc_frontend/src/monomorphization/mod.rs index 555f4b89275..86a0ca1f20c 100644 --- a/crates/noirc_frontend/src/monomorphization/mod.rs +++ b/crates/noirc_frontend/src/monomorphization/mod.rs @@ -886,8 +886,8 @@ impl<'interner> Monomorphizer<'interner> { } } let printable_type: PrintableType = typ.into(); - let abi_as_string = - serde_json::to_string(&printable_type).expect("ICE: expected PrintableType to serialize"); + let abi_as_string = serde_json::to_string(&printable_type) + .expect("ICE: expected PrintableType to serialize"); arguments.push(ast::Expression::Literal(ast::Literal::Str(abi_as_string))); } From 84fdc55a635ea6198e877621f0ca97be558bda77 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Fri, 25 Aug 2023 18:36:42 +0200 Subject: [PATCH 7/8] feat(nargo): Support optional directory in git dependencies (#2436) --- crates/nargo_toml/src/errors.rs | 3 +++ crates/nargo_toml/src/lib.rs | 18 +++++++++++++++--- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/crates/nargo_toml/src/errors.rs b/crates/nargo_toml/src/errors.rs index 2b68f681f92..9abeab97b61 100644 --- a/crates/nargo_toml/src/errors.rs +++ b/crates/nargo_toml/src/errors.rs @@ -44,6 +44,9 @@ pub enum ManifestError { #[error("{} found in {toml}", if name.is_empty() { "Empty dependency name".into() } else { format!("Invalid dependency name `{name}`") })] InvalidDependencyName { toml: PathBuf, name: String }, + #[error("Invalid directory path {directory} in {toml}: It must point to a subdirectory")] + InvalidDirectory { toml: PathBuf, directory: PathBuf }, + /// Encountered error while downloading git repository. #[error("{0}")] GitError(String), diff --git a/crates/nargo_toml/src/lib.rs b/crates/nargo_toml/src/lib.rs index 8372942931b..1dd6ac0e695 100644 --- a/crates/nargo_toml/src/lib.rs +++ b/crates/nargo_toml/src/lib.rs @@ -238,16 +238,28 @@ struct PackageMetadata { /// Enum representing the different types of ways to /// supply a source for the dependency enum DependencyConfig { - Github { git: String, tag: String }, + Github { git: String, tag: String, directory: Option }, Path { path: String }, } impl DependencyConfig { fn resolve_to_dependency(&self, pkg_root: &Path) -> Result { let dep = match self { - Self::Github { git, tag } => { + Self::Github { git, tag, directory } => { let dir_path = clone_git_repo(git, tag).map_err(ManifestError::GitError)?; - let toml_path = dir_path.join("Nargo.toml"); + let project_path = if let Some(directory) = directory { + let internal_path = dir_path.join(directory).normalize(); + if !internal_path.starts_with(&dir_path) { + return Err(ManifestError::InvalidDirectory { + toml: pkg_root.join("Nargo.toml"), + directory: directory.into(), + }); + } + internal_path + } else { + dir_path + }; + let toml_path = project_path.join("Nargo.toml"); let package = resolve_package_from_toml(&toml_path)?; Dependency::Remote { package } } From 7714cd01858d816d67b5b1319022ef849977d0da Mon Sep 17 00:00:00 2001 From: jfecher Date: Fri, 25 Aug 2023 13:25:45 -0500 Subject: [PATCH 8/8] fix: Implement new mem2reg pass (#2420) Co-authored-by: Maxim Vezenov --- .../execution_success/references/src/main.nr | 2 +- .../references_aliasing/Nargo.toml | 7 + .../references_aliasing/Prover.toml | 0 .../references_aliasing/src/main.nr | 10 + .../target/references.bytecode | 1 + .../references_aliasing/target/witness.tr | Bin 0 -> 788 bytes crates/noirc_evaluator/src/ssa/opt/mem2reg.rs | 738 ++++++++++-------- .../noirc_evaluator/src/ssa/opt/unrolling.rs | 8 +- 8 files changed, 457 insertions(+), 309 deletions(-) create mode 100644 crates/nargo_cli/tests/execution_success/references_aliasing/Nargo.toml create mode 100644 crates/nargo_cli/tests/execution_success/references_aliasing/Prover.toml create mode 100644 crates/nargo_cli/tests/execution_success/references_aliasing/src/main.nr create mode 100644 crates/nargo_cli/tests/execution_success/references_aliasing/target/references.bytecode create mode 100644 crates/nargo_cli/tests/execution_success/references_aliasing/target/witness.tr diff --git a/crates/nargo_cli/tests/execution_success/references/src/main.nr b/crates/nargo_cli/tests/execution_success/references/src/main.nr index ec23f2f3a38..70de5cada3f 100644 --- a/crates/nargo_cli/tests/execution_success/references/src/main.nr +++ b/crates/nargo_cli/tests/execution_success/references/src/main.nr @@ -229,4 +229,4 @@ fn regression_2218_loop(x: Field, y: Field) { assert(*q1 == 20); } assert(*q1 == 2); -} \ No newline at end of file +} diff --git a/crates/nargo_cli/tests/execution_success/references_aliasing/Nargo.toml b/crates/nargo_cli/tests/execution_success/references_aliasing/Nargo.toml new file mode 100644 index 00000000000..b52fdcf77f0 --- /dev/null +++ b/crates/nargo_cli/tests/execution_success/references_aliasing/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "references" +type = "bin" +authors = [""] +compiler_version = "0.5.1" + +[dependencies] diff --git a/crates/nargo_cli/tests/execution_success/references_aliasing/Prover.toml b/crates/nargo_cli/tests/execution_success/references_aliasing/Prover.toml new file mode 100644 index 00000000000..e69de29bb2d diff --git a/crates/nargo_cli/tests/execution_success/references_aliasing/src/main.nr b/crates/nargo_cli/tests/execution_success/references_aliasing/src/main.nr new file mode 100644 index 00000000000..4582444c8f7 --- /dev/null +++ b/crates/nargo_cli/tests/execution_success/references_aliasing/src/main.nr @@ -0,0 +1,10 @@ +fn increment(mut r: &mut Field) { + *r = *r + 1; +} + +fn main() { + let mut x = 100; + let mut xref = &mut x; + increment(xref); + assert(*xref == 101); +} diff --git a/crates/nargo_cli/tests/execution_success/references_aliasing/target/references.bytecode b/crates/nargo_cli/tests/execution_success/references_aliasing/target/references.bytecode new file mode 100644 index 00000000000..3e521923327 --- /dev/null +++ b/crates/nargo_cli/tests/execution_success/references_aliasing/target/references.bytecode @@ -0,0 +1 @@ +H4sIAAAAAAAA/+1c8ZONVRh+WCKykYqIiFRE37f37u5dkY2IEEJEtLv2bkJKQkpCSkJKIjLGNE3TmJqpmZqpmZrRH9L/Uu907szZzfjlPO+Z98yc88u+w8yz3/O853ue93zf3fsXgL/x/zXY/ex0P4uwVQ7ysCpFW7Vab2+pl5Wyu2jp6Km1FtXWnrZaWStba629LbVKpV6r1to7ejrai46yWqmXfa0dlT4HNpiAVe/7bzX9izHoJvwHkfkP5mEV/vU2efWQAb3z//82BU4Y8HsG6th8k3+j/nKNJjUp4A4Bb/Nr8R7C71HhQZrXtLEsG99QpGd8Q6FjfLd5dTa+QMyhSkINg23jE97D+D1SNT62po1l2fiGIz3jGw4d47vdq7PxBWIOd4KycUfAtvEJ7xH8HqkaH1vTxrJsfCORnvGNhI7x3eHV2fgCMUc6Qdm4o2Db+IT3KH6PVI2PrWljWTa+ZtCMrxbL+JqhY3x3enU2vkDMZicoG3c0bBuf8B7N71EyhjIGLEOp12MZyhjoGMpdXp0NJRBzjBOUjTsWtg1FeI/l9+iWhhKqA9Ok74bOTcHmzNxH9yTCmdnneyNxLsJWL7PP43jX1SIYbO+Rnoy7CW4o76vQ8bEmKv+yytzf44l9uQrWkBXvcRWRf78h6z6vzkNWIOZ4JygbdwJsD1nCewK/R6qPq9iaNhZ7SGCeLifS9CsrsYyPd839je9+r87GF4g50QnKxp0E28YnvCfxe6T5uKqVqelkIudYhsK8Zv96H/DqbCiBmJOdoGzcKbBtKMJ7Cr9HqpPUFMSZpIqwRX1OP5WAFfsIORU6xvegV2fjC8Sc6gRl406DbeMT3tP4PUrGUKaDZSjxXvxNh46hPOTV2VACMac7Qdm4M2DbUIT3DH6PVJ/1ME36YejcFGzOzH30SCKcmX1+NBLnImz1Mvs8k3ddKi/+pCczwX/xdw06PsZ+8cfc37OIfbkG2pDVG2vIIvLvN2Q95tV5yArEnOUEZePOhu0hS3jP5vdI9XEVW9PGsny6nAOW8cX7nPoc6Bjf416djS8Qc44TlI1bwLbxCe+C3yNFQylbmJqWRM6xDIV5zf71tnh1NpRAzNIJysatwLahCO8Kv0eqkxRb08ayPElVkd4kVYWO8bV6dTa+QMyqE5SN2wbbxie82/g9SsZQ2pGeobRDx1BqXp0NJRCz3QnKxu2AbUMR3h38HiVjKHORnqHMhY6hPOHV2VACMec6Qdm482DbUIT3PH6PkjGU+UjPUOZDx1Ce9OpsKIGY852gbNwFsG0ownsBv0fJGEon0jOUTugYylNenQ0lELPTCcrGXQjbhiI4C/k9SsZQFiE9Q1kEHUN52quzoQRiLnKCsnEXw7ahCO/F/B4lYyhLwDKUeH+NsQQ6hvKMV2dDCcRc4gRl4y6FbUMR3kv5PUrGUJYhPUNZBh1Dedars6EEYi5zgrJxl8O2oQjv5fweqf55F9OkV/A4t9yKcxG2qCa6EumZ6EromOhzXp1NNBBzpROUjbsKtk1UeK/i90jlWsXsV4D/N3XfQCc8mkjX2fiWEGYgryb2hahfMkG0BukF0RroBNHzXp2DKBBzjROUjbsWtoNIeK/l90jlWiUwV4MfRN8ijSBiDjXriH0h6pdMEK1HekG0HjpB9IJX5yAKxFzvBGXjboDtIBLeG/g9UrlWCcx14AfRd0gjiJhDzUZiX4j63TKIQjkzX0K/CNt+Jvf0RoV75Xukca8wfXcTsS9E/VS+yUn29SaFfXM90r4pglZZZXrEZmJfrhPvjViDL5F/v8H3Ja/Og28g5mYnKBt3C2wPvsJ7C79Hqt8/wNa0sSyf+LeCZnzRPuq6FTrG97JXZ+MLxNzqBGXjdsG28QnvLn6PkjGUbqRnKN3QMZQer86GEojZ7QRl426DbUMR3tv4PVL9ZFoXEasXOjcFmzNzH9UjcS7CFjU4+sAKjnjvnvqgExyveHUOjkDMPicoG3c7bAeH8N7O75FqcDBN9FWkERzMfbSDx7nfuyLLz4F3Evus9RzdcvDuAit4453YdkEneF/z6hy8gZi7nKBs3N2wHbzCeze/R8mc2F5HGsHL3EdvROJchC1qcOwBKzjindj2QCc43vTqHByBmHucoGzcvbAdHMJ7L79HqsHBNNG3kEZwMPfRPh5nlU8syYl8J/ifWPoB3P3N5i1PD3Yo8P4ROvd1E/k69xO1JPa61NLP8qB1AKxBK94J/QB0Bq23vToPWoGYB5ygbNyDsD1oCe+D/B4lc0J/Bzo3BZszcx+9G4lzEbaowXEIrOCId0I/BJ3geM+rc3AEYh5ygrJxD8N2cAjvw/weqQYH00TfRxrBwdxHR3icVU7o8gRmP/gn1Z/A3d9s3vK0aJ8C75+hc1+zT+hHiVoSe11q6Wd50DoG1qAV74R+DDqD1gdenQetQMxjTlA27nHYHrSE93F+j5I5oX8InZuCzZm5jz6KxLkIW9TgOAFWcMQ7oZ+ATnB87NU5OAIxTzhB2bgnYTs4hPdJfo9Ug4Npop8gjeBg7qNTPM4qJ3R5AnMU/JPqL+DubzZveVp0RIH3r9C5r9kn9NNELYm9LrX0szxonQFr0Ip3Qj8DnUHrU6/Og1Yg5hknKBv3LGwPWsL7LL9HyZzQP4POTcHmzNxHn0fiXIQtanCcAys44p3Qz0EnOL7w6hwcgZjnnKBs3POwHRzC+zy/R6rBwTTRL5FGcDD30QUeZ5UTujyBOQ3+SfU3cPc3m7c8LTqlwPt36NzX7BP6RaKWxF6XWvpZHqi/gu28lgy4qHCv/AHbHiF5dUGB959IwyMuEbUk9rpk68feN3I/X1LYNzcQZ98UQausdhE5Xyb25QbhumJ/1zWRf78D7ddenQ+0gZiXnaBs3CuwfaAV3lf4PVL9rmumpoMGXKO//gGokXOuaucAAA== diff --git a/crates/nargo_cli/tests/execution_success/references_aliasing/target/witness.tr b/crates/nargo_cli/tests/execution_success/references_aliasing/target/witness.tr new file mode 100644 index 0000000000000000000000000000000000000000..22a1c7fe1098dfe81652877dc26d99dc3aae67ec GIT binary patch literal 788 zcmV+v1MB=BiwFP!00002|E-o)&l5oyg>iRxciq{U9YH_b-JPB84DRmk?ixY}A%p;d z1QJLf;iu7laMAnk*|te9nkVO+_dV13T<_mgzx=8B|2?VOlQ!U3Nkfp;h+~Pypspq$ zt10NQ8K|o{$Z7$4YzgXW1+rR$9@|v0WTKS{CJjQ0JV~9mS)wwpt*|1qS&>>PgH_fz z?@Z!^l0n4G$q;LwyloXLbtbnZof;E_R$eFGr&fjXubniBiiOLBNGbp+qz}2w)2LO+ z#8kQRTebu9YR`F0I#jVlR55C+o$%VHp(unavRv7a>$H@3KXx_(IY`!qd?YZF#9p!{c9|E|4P8T1jtH3mIUXhz&U2%95tAi z0Xj0p_(5%xe{x*J?1Y zHQ+sZEvRc9nAducwE@g)BX|ec1l|EQgL&nkM-O@|fgTHRCj!Wd;90l@JPWsi*>3~$ z+76yyJ3!V>utU4R4($f#xCfl$UXZm9?9hI&LkGa@4}y6e0{8wf$T|Xc=qT8sW8fT* zgL6CqvQB~>It6y}^6cIW}vp@(4hkHEYhgM0r3WIY8vJ_B_<2j}<# zWW5CQdIje78ua)EWW5Dh??Bdjko5ta<3~`}Cs5ZHP}f&b*EdktcTm?4P}fgT*Dp}l SZ&24Cko6Z+MM~zdGXMaV18<%H literal 0 HcmV?d00001 diff --git a/crates/noirc_evaluator/src/ssa/opt/mem2reg.rs b/crates/noirc_evaluator/src/ssa/opt/mem2reg.rs index d83cda4a8b1..3e63aec63b4 100644 --- a/crates/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/crates/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -1,69 +1,142 @@ -//! mem2reg implements a pass for promoting values stored in memory to values in registers where -//! possible. This is particularly important for converting our memory-based representation of -//! mutable variables into values that are easier to manipulate. -use std::collections::{BTreeMap, HashSet}; +//! The goal of the mem2reg SSA optimization pass is to replace any `Load` instructions to known +//! addresses with the value stored at that address, if it is also known. This pass will also remove +//! any `Store` instructions within a block that are no longer needed because no more loads occur in +//! between the Store in question and the next Store. +//! +//! The pass works as follows: +//! - Each block in each function is iterated in forward-order. +//! - The starting value of each reference in the block is the unification of the same references +//! at the end of each direct predecessor block to the current block. +//! - At each step, the value of each reference is either Known(ValueId) or Unknown. +//! - Two reference values unify to each other if they are exactly equal, or to Unknown otherwise. +//! - If a block has no predecessors, the starting value of each reference is Unknown. +//! - Throughout this pass, aliases of each reference are also tracked. +//! - References typically have 1 alias - themselves. +//! - A reference with multiple aliases means we will not be able to optimize out loads if the +//! reference is stored to. Note that this means we can still optimize out loads if these +//! aliased references are never stored to, or the store occurs after a load. +//! - A reference with 0 aliases means we were unable to find which reference this reference +//! refers to. If such a reference is stored to, we must conservatively invalidate every +//! reference in the current block. +//! +//! From there, to figure out the value of each reference at the end of block, iterate each instruction: +//! - On `Instruction::Allocate`: +//! - Register a new reference was made with itself as its only alias +//! - On `Instruction::Load { address }`: +//! - If `address` is known to only have a single alias (including itself) and if the value of +//! that alias is known, replace the value of the load with the known value. +//! - Furthermore, if the result of the load is a reference, mark the result as an alias +//! of the reference it dereferences to (if known). +//! - If which reference it dereferences to is not known, this load result has no aliases. +//! - On `Instruction::Store { address, value }`: +//! - If the address of the store is `Known(value)`: +//! - If the address has exactly 1 alias: +//! - Set the value of the address to `Known(value)`. +//! - If the address has more than 1 alias: +//! - Set the value of every possible alias to `Unknown`. +//! - If the address has 0 aliases: +//! - Conservatively mark every alias in the block to `Unknown`. +//! - If the address of the store is not known: +//! - Conservatively mark every alias in the block to `Unknown`. +//! - Additionally, if there were no Loads to any alias of the address between this Store and +//! the previous Store to the same address, the previous store can be removed. +//! - On `Instruction::Call { arguments }`: +//! - If any argument of the call is a reference, set the value of each alias of that +//! reference to `Unknown` +//! - Any builtin functions that may return aliases if their input also contains a +//! reference should be tracked. Examples: `slice_push_back`, `slice_insert`, `slice_remove`, etc. +//! +//! On a terminator instruction: +//! - If the terminator is a `Jmp`: +//! - For each reference argument of the jmp, mark the corresponding block parameter it is passed +//! to as an alias for the jmp argument. +//! +//! Finally, if this is the only block in the function, we can remove any Stores that were not +//! referenced by the terminator instruction. +//! +//! Repeating this algorithm for each block in the function in program order should result in +//! optimizing out most known loads. However, identifying all aliases correctly has been proven +//! undecidable in general (Landi, 1992). So this pass will not always optimize out all loads +//! that could theoretically be optimized out. This pass can be performed at any time in the +//! SSA optimization pipeline, although it will be more successful the simpler the program's CFG is. +//! This pass is currently performed several times to enable other passes - most notably being +//! performed before loop unrolling to try to allow for mutable variables used for loop indices. +use std::collections::{BTreeMap, BTreeSet}; use crate::ssa::{ ir::{ basic_block::BasicBlockId, cfg::ControlFlowGraph, - dfg::DataFlowGraph, dom::DominatorTree, function::Function, instruction::{Instruction, InstructionId, TerminatorInstruction}, post_order::PostOrder, - value::{Value, ValueId}, + types::Type, + value::ValueId, }, ssa_gen::Ssa, }; -use super::unrolling::{find_all_loops, Loops}; - impl Ssa { /// Attempts to remove any load instructions that recover values that are already available in /// scope, and attempts to remove stores that are subsequently redundant. - /// As long as they are not stores on memory used inside of loops pub(crate) fn mem2reg(mut self) -> Ssa { for function in self.functions.values_mut() { - let mut all_protected_allocations = HashSet::new(); - let mut context = PerFunctionContext::new(function); - - for block in function.reachable_blocks() { - // Maps Load instruction id -> value to replace the result of the load with - let mut loads_to_substitute_per_block = BTreeMap::new(); - - // Maps Load result id -> value to replace the result of the load with - let mut load_values_to_substitute = BTreeMap::new(); - - let allocations_protected_by_block = context - .analyze_allocations_and_eliminate_known_loads( - &mut function.dfg, - &mut loads_to_substitute_per_block, - &mut load_values_to_substitute, - block, - ); - all_protected_allocations.extend(allocations_protected_by_block.into_iter()); - } - - for block in function.reachable_blocks() { - context.remove_unused_stores(&mut function.dfg, &all_protected_allocations, block); - } + context.mem2reg(function); + context.remove_instructions(function); } self } } struct PerFunctionContext { - last_stores_with_block: BTreeMap<(AllocId, BasicBlockId), ValueId>, - // Maps Load result id -> (value, block_id) - // Used to replace the result of a load with the appropriate block - load_values_to_substitute_per_func: BTreeMap, - store_ids: Vec, cfg: ControlFlowGraph, post_order: PostOrder, dom_tree: DominatorTree, - loops: Loops, + + blocks: BTreeMap, + + /// Load and Store instructions that should be removed at the end of the pass. + /// + /// We avoid removing individual instructions as we go since removing elements + /// from the middle of Vecs many times will be slower than a single call to `retain`. + instructions_to_remove: BTreeSet, +} + +#[derive(Debug, Default, Clone)] +struct Block { + /// Maps a ValueId to the Expression it represents. + /// Multiple ValueIds can map to the same Expression, e.g. + /// dereferences to the same allocation. + expressions: BTreeMap, + + /// Each expression is tracked as to how many aliases it + /// may have. If there is only 1, we can attempt to optimize + /// out any known loads to that alias. Note that "alias" here + /// includes the original reference as well. + aliases: BTreeMap>, + + /// Each allocate instruction result (and some reference block parameters) + /// will map to a Reference value which tracks whether the last value stored + /// to the reference is known. + references: BTreeMap, +} + +/// An `Expression` here is used to represent a canonical key +/// into the aliases map since otherwise two dereferences of the +/// same address will be given different ValueIds. +#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq)] +enum Expression { + Dereference(Box), + Other(ValueId), +} + +/// Every reference's value is either Known and can be optimized away, or Unknown. +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +enum ReferenceValue { + Unknown, + Known(ValueId), } impl PerFunctionContext { @@ -71,308 +144,360 @@ impl PerFunctionContext { let cfg = ControlFlowGraph::with_function(function); let post_order = PostOrder::with_function(function); let dom_tree = DominatorTree::with_cfg_and_post_order(&cfg, &post_order); + PerFunctionContext { - last_stores_with_block: BTreeMap::new(), - load_values_to_substitute_per_func: BTreeMap::new(), - store_ids: Vec::new(), cfg, post_order, dom_tree, - loops: find_all_loops(function), + blocks: BTreeMap::new(), + instructions_to_remove: BTreeSet::new(), } } -} -/// An AllocId is the ValueId returned from an allocate instruction. E.g. v0 in v0 = allocate. -/// This type alias is used to help signal where the only valid ValueIds are those that are from -/// an allocate instruction. -type AllocId = ValueId; + /// Apply the mem2reg pass to the given function. + /// + /// This function is expected to be the same one that the internal cfg, post_order, and + /// dom_tree were created from. + fn mem2reg(&mut self, function: &mut Function) { + // Iterate each block in reverse post order = forward order + let mut block_order = PostOrder::with_function(function).into_vec(); + block_order.reverse(); + + for block in block_order { + let references = self.find_starting_references(block); + self.analyze_block(function, block, references); + } + } -impl PerFunctionContext { - // Attempts to remove load instructions for which the result is already known from previous - // store instructions to the same address in the same block. - fn analyze_allocations_and_eliminate_known_loads( - &mut self, - dfg: &mut DataFlowGraph, - loads_to_substitute: &mut BTreeMap, - load_values_to_substitute_per_block: &mut BTreeMap, - block_id: BasicBlockId, - ) -> HashSet { - let mut protected_allocations = HashSet::new(); - let block = &dfg[block_id]; - - // Check whether the block has a common successor here to avoid analyzing - // the CFG for every block instruction. - let has_common_successor = self.has_common_successor(block_id); - - for instruction_id in block.instructions() { - match &dfg[*instruction_id] { - Instruction::Store { mut address, value } => { - if has_common_successor { - protected_allocations.insert(address); - } - address = self.fetch_load_value_to_substitute(block_id, address); + /// The value of each reference at the start of the given block is the unification + /// of the value of the same reference at the end of its predecessor blocks. + fn find_starting_references(&mut self, block: BasicBlockId) -> Block { + let mut predecessors = self.cfg.predecessors(block); + + if let Some(first_predecessor) = predecessors.next() { + let first = self.blocks.get(&first_predecessor).cloned().unwrap_or_default(); + + // Note that we have to start folding with the first block as the accumulator. + // If we started with an empty block, an empty block union'd with any other block + // is always also empty so we'd never be able to track any references across blocks. + predecessors.fold(first, |block, predecessor| { + let predecessor = self.blocks.entry(predecessor).or_default(); + block.unify(predecessor) + }) + } else { + Block::default() + } + } - self.last_stores_with_block.insert((address, block_id), *value); - self.store_ids.push(*instruction_id); + /// Analyze a block with the given starting reference values. + /// + /// This will remove any known loads in the block and track the value of references + /// as they are stored to. When this function is finished, the value of each reference + /// at the end of this block will be remembered in `self.blocks`. + fn analyze_block( + &mut self, + function: &mut Function, + block: BasicBlockId, + mut references: Block, + ) { + let instructions = function.dfg[block].instructions().to_vec(); + let mut last_stores = BTreeMap::new(); - if has_common_successor { - protected_allocations.insert(address); - } - } - Instruction::Load { mut address } => { - address = self.fetch_load_value_to_substitute(block_id, address); - - let found_last_value = self.find_load_to_substitute( - block_id, - address, - dfg, - instruction_id, - loads_to_substitute, - load_values_to_substitute_per_block, - ); - if !found_last_value { - // We want to protect allocations that do not have a load to substitute - protected_allocations.insert(address); - // We also want to check for allocations that share a value - // with the one we are protecting. - // This check prevents the pass from removing stores to a value that - // is used by reference aliases in different blocks - protected_allocations.insert(dfg.resolve(address)); - } - } - Instruction::Call { arguments, .. } => { - for arg in arguments { - if Self::value_is_from_allocation(*arg, dfg) { - protected_allocations.insert(*arg); - } - } - } - _ => { - // Nothing to do - } - } + for instruction in instructions { + self.analyze_instruction(function, &mut references, instruction, &mut last_stores); } - // Identify any arrays that are returned from this function - if let TerminatorInstruction::Return { return_values } = block.unwrap_terminator() { - for value in return_values { - if Self::value_is_from_allocation(*value, dfg) { - protected_allocations.insert(*value); - } - } - } + self.handle_terminator(function, block, &mut references, &mut last_stores); - // Substitute load result values - for (result_value, new_value) in load_values_to_substitute_per_block { - let result_value = dfg.resolve(*result_value); - dfg.set_value_from_id(result_value, *new_value); + // If there's only 1 block in the function total, we can remove any remaining last stores + // as well. We can't do this if there are multiple blocks since subsequent blocks may + // reference these stores. + if self.post_order.as_slice().len() == 1 { + self.remove_stores_that_do_not_alias_parameters(function, &references, last_stores); } - // Delete load instructions - // Even though we could let DIE handle this, doing it here makes the debug output easier - // to read. - dfg[block_id] - .instructions_mut() - .retain(|instruction| !loads_to_substitute.contains_key(instruction)); - - protected_allocations + self.blocks.insert(block, references); } - // This method will fetch already saved load values to substitute for a given address. - // The search starts at the block supplied as a parameter. If there is not a load to substitute - // the CFG is analyzed to determine whether a predecessor block has a load value to substitute. - // If there is no load value to substitute the original address is returned. - fn fetch_load_value_to_substitute( + /// Add all instructions in `last_stores` to `self.instructions_to_remove` which do not + /// possibly alias any parameters of the given function. + fn remove_stores_that_do_not_alias_parameters( &mut self, - block_id: BasicBlockId, - address: ValueId, - ) -> ValueId { - let mut stack = vec![block_id]; - let mut visited = HashSet::new(); - - while let Some(block) = stack.pop() { - visited.insert(block); - - // We do not want to substitute loads that take place within loops or yet to be simplified branches - // as this pass can occur before loop unrolling and flattening. - // The mem2reg pass should be ran again following all optimization passes as new values - // may be able to be promoted - for l in self.loops.yet_to_unroll.iter() { - // We do not want to substitute loads that take place within loops as this pass - // can occur before loop unrolling - // The pass should be ran again following loop unrolling as new values - if l.blocks.contains(&block) { - return address; - } - } + function: &Function, + references: &Block, + last_stores: BTreeMap, + ) { + let reference_parameters = function + .parameters() + .iter() + .filter(|param| function.dfg.type_of_value(**param) == Type::Reference) + .collect::>(); - // Check whether there is a load value to substitute in the current block. - // Return the value if found. - if let Some((value, load_block_id)) = - self.load_values_to_substitute_per_func.get(&address) - { - if *load_block_id == block { - return *value; - } - } + for (allocation, instruction) in last_stores { + if let Some(expression) = references.expressions.get(&allocation) { + if let Some(aliases) = references.aliases.get(expression) { + let allocation_aliases_parameter = + aliases.iter().any(|alias| reference_parameters.contains(alias)); - // If no load values to substitute have been found in the current block, check the block's predecessors. - if let Some(predecessor) = self.block_has_predecessor(block, &visited) { - stack.push(predecessor); + if !aliases.is_empty() && !allocation_aliases_parameter { + self.instructions_to_remove.insert(instruction); + } + } + } else { + self.instructions_to_remove.insert(instruction); } } - address } - // This method determines which loads should be substituted and saves them - // to be substituted further in the pass. - // Starting at the block supplied as a parameter, we check whether a store has occurred with the given address. - // If no store has occurred in the supplied block, the CFG is analyzed to determine whether - // a predecessor block has a store at the given address. - fn find_load_to_substitute( + fn analyze_instruction( &mut self, - block_id: BasicBlockId, - address: ValueId, - dfg: &DataFlowGraph, - instruction_id: &InstructionId, - loads_to_substitute: &mut BTreeMap, - load_values_to_substitute_per_block: &mut BTreeMap, - ) -> bool { - let mut stack = vec![block_id]; - let mut visited = HashSet::new(); - - while let Some(block) = stack.pop() { - visited.insert(block); - - // We do not want to substitute loads that take place within loops or yet to be simplified branches - // as this pass can occur before loop unrolling and flattening. - // The mem2reg pass should be ran again following all optimization passes as new values - // may be able to be promoted - for l in self.loops.yet_to_unroll.iter() { - // We do not want to substitute loads that take place within loops as this pass - // can occur before loop unrolling - // The pass should be ran again following loop unrolling as new values - if l.blocks.contains(&block) { - return false; + function: &mut Function, + references: &mut Block, + instruction: InstructionId, + last_stores: &mut BTreeMap, + ) { + match &function.dfg[instruction] { + Instruction::Load { address } => { + let address = function.dfg.resolve(*address); + + let result = function.dfg.instruction_results(instruction)[0]; + references.remember_dereference(function, address, result); + + // If the load is known, replace it with the known value and remove the load + if let Some(value) = references.get_known_value(address) { + let result = function.dfg.instruction_results(instruction)[0]; + function.dfg.set_value_from_id(result, value); + + self.instructions_to_remove.insert(instruction); + } else { + last_stores.remove(&address); } } + Instruction::Store { address, value } => { + let address = function.dfg.resolve(*address); + let value = function.dfg.resolve(*value); + + // If there was another store to this instruction without any (unremoved) loads or + // function calls in-between, we can remove the previous store. + if let Some(last_store) = last_stores.get(&address) { + self.instructions_to_remove.insert(*last_store); + } - // Check whether there has been a store instruction in the current block - // If there has been a store, add a load to be substituted. - if let Some(last_value) = self.last_stores_with_block.get(&(address, block)) { - let result_value = *dfg - .instruction_results(*instruction_id) - .first() - .expect("ICE: Load instructions should have single result"); - - loads_to_substitute.insert(*instruction_id, *last_value); - load_values_to_substitute_per_block.insert(result_value, *last_value); - self.load_values_to_substitute_per_func.insert(result_value, (*last_value, block)); - return true; + references.set_known_value(address, value, last_stores); + last_stores.insert(address, instruction); } - - // If no load values to substitute have been found in the current block, check the block's predecessors. - if let Some(predecessor) = self.block_has_predecessor(block, &visited) { - stack.push(predecessor); + Instruction::Call { arguments, .. } => { + self.mark_all_unknown(arguments, function, references, last_stores); } + Instruction::Allocate => { + // Register the new reference + let result = function.dfg.instruction_results(instruction)[0]; + references.expressions.insert(result, Expression::Other(result)); + + let mut aliases = BTreeSet::new(); + aliases.insert(result); + references.aliases.insert(Expression::Other(result), aliases); + } + _ => (), } - false } - // If no loads or stores have been found in the current block, check the block's predecessors. - // Only check blocks with one predecessor as otherwise a constant value could be propogated - // through successor blocks with multiple branches that rely on other simplification passes - // such as loop unrolling or flattening of the CFG. - fn block_has_predecessor( - &mut self, - block_id: BasicBlockId, - visited: &HashSet, - ) -> Option { - let mut predecessors = self.cfg.predecessors(block_id); - if predecessors.len() == 1 { - let predecessor = predecessors.next().unwrap(); - if self.dom_tree.is_reachable(predecessor) - && self.dom_tree.dominates(predecessor, block_id) - && !visited.contains(&predecessor) - { - return Some(predecessor); + fn mark_all_unknown( + &self, + values: &[ValueId], + function: &Function, + references: &mut Block, + last_stores: &mut BTreeMap, + ) { + for value in values { + if function.dfg.type_of_value(*value) == Type::Reference { + let value = function.dfg.resolve(*value); + references.set_unknown(value, last_stores); + last_stores.remove(&value); } } - None } - fn has_common_successor(&mut self, block_id: BasicBlockId) -> bool { - let mut predecessors = self.cfg.predecessors(block_id); - if let Some(predecessor) = predecessors.next() { - let pred_successors = self.find_all_successors(predecessor); - let current_successors: HashSet<_> = self.cfg.successors(block_id).collect(); - return pred_successors.into_iter().any(|b| current_successors.contains(&b)); + /// Remove any instructions in `self.instructions_to_remove` from the current function. + /// This is expected to contain any loads which were replaced and any stores which are + /// no longer needed. + fn remove_instructions(&self, function: &mut Function) { + // The order we iterate blocks in is not important + for block in self.post_order.as_slice() { + function.dfg[*block] + .instructions_mut() + .retain(|instruction| !self.instructions_to_remove.contains(instruction)); } - false } - fn find_all_successors(&self, block_id: BasicBlockId) -> HashSet { - let mut stack = vec![]; - let mut visited = HashSet::new(); - - // Fetch initial block successors - let successors = self.cfg.successors(block_id); - for successor in successors { - if !visited.contains(&successor) { - stack.push(successor); + fn handle_terminator( + &self, + function: &mut Function, + block: BasicBlockId, + references: &mut Block, + last_stores: &mut BTreeMap, + ) { + match function.dfg[block].unwrap_terminator() { + TerminatorInstruction::JmpIf { .. } => (), // Nothing to do + TerminatorInstruction::Jmp { destination, arguments, .. } => { + let destination_parameters = function.dfg[*destination].parameters(); + assert_eq!(destination_parameters.len(), arguments.len()); + + // Add an alias for each reference parameter + for (parameter, argument) in destination_parameters.iter().zip(arguments) { + if function.dfg.type_of_value(*parameter) == Type::Reference { + let argument = function.dfg.resolve(*argument); + + if let Some(expression) = references.expressions.get(&argument) { + if let Some(aliases) = references.aliases.get_mut(expression) { + // The argument reference is possibly aliased by this block parameter + aliases.insert(*parameter); + } + } + } + } + } + TerminatorInstruction::Return { return_values } => { + // Removing all `last_stores` for each returned reference is more important here + // than setting them all to ReferenceValue::Unknown since no other block should + // have a block with a Return terminator as a predecessor anyway. + self.mark_all_unknown(return_values, function, references, last_stores); } } + } +} - // Follow the CFG to fetch the remaining successors - while let Some(block) = stack.pop() { - visited.insert(block); - let successors = self.cfg.successors(block); - for successor in successors { - if !visited.contains(&successor) { - stack.push(successor); +impl Block { + /// If the given reference id points to a known value, return the value + fn get_known_value(&self, address: ValueId) -> Option { + if let Some(expression) = self.expressions.get(&address) { + if let Some(aliases) = self.aliases.get(expression) { + // We could allow multiple aliases if we check that the reference + // value in each is equal. + if aliases.len() == 1 { + let alias = aliases.first().expect("There should be exactly 1 alias"); + + if let Some(ReferenceValue::Known(value)) = self.references.get(alias) { + return Some(*value); + } } } } - visited + None + } + + /// If the given address is known, set its value to `ReferenceValue::Known(value)`. + fn set_known_value( + &mut self, + address: ValueId, + value: ValueId, + last_stores: &mut BTreeMap, + ) { + self.set_value(address, ReferenceValue::Known(value), last_stores); + } + + fn set_unknown( + &mut self, + address: ValueId, + last_stores: &mut BTreeMap, + ) { + self.set_value(address, ReferenceValue::Unknown, last_stores); } - /// Checks whether the given value id refers to an allocation. - fn value_is_from_allocation(value: ValueId, dfg: &DataFlowGraph) -> bool { - match &dfg[value] { - Value::Instruction { instruction, .. } => { - matches!(&dfg[*instruction], Instruction::Allocate) + fn set_value( + &mut self, + address: ValueId, + value: ReferenceValue, + last_stores: &mut BTreeMap, + ) { + if let Some(expression) = self.expressions.get(&address) { + if let Some(aliases) = self.aliases.get(expression) { + if aliases.is_empty() { + // uh-oh, we don't know at all what this reference refers to, could be anything. + // Now we have to invalidate every reference we know of + self.invalidate_all_references(last_stores); + } else if aliases.len() == 1 { + let alias = aliases.first().expect("There should be exactly 1 alias"); + self.references.insert(*alias, value); + } else { + // More than one alias. We're not sure which it refers to so we have to + // conservatively invalidate all references it may refer to. + for alias in aliases.iter() { + if let Some(reference_value) = self.references.get_mut(alias) { + *reference_value = ReferenceValue::Unknown; + } + } + } } - _ => false, } } - /// Removes all store instructions identified during analysis that aren't present in the - /// provided `protected_allocations` `HashSet`. - fn remove_unused_stores( - &self, - dfg: &mut DataFlowGraph, - protected_allocations: &HashSet, - block_id: BasicBlockId, - ) { - // Scan for unused stores - let mut stores_to_remove = HashSet::new(); + fn invalidate_all_references(&mut self, last_stores: &mut BTreeMap) { + for reference_value in self.references.values_mut() { + *reference_value = ReferenceValue::Unknown; + } - for instruction_id in &self.store_ids { - let address = match &dfg[*instruction_id] { - Instruction::Store { address, .. } => *address, - _ => unreachable!("store_ids should contain only store instructions"), - }; + last_stores.clear(); + } + + fn unify(mut self, other: &Self) -> Self { + for (value_id, expression) in &other.expressions { + if let Some(existing) = self.expressions.get(value_id) { + assert_eq!(existing, expression, "Expected expressions for {value_id} to be equal"); + } else { + self.expressions.insert(*value_id, expression.clone()); + } + } + + for (expression, new_aliases) in &other.aliases { + let expression = expression.clone(); + + self.aliases + .entry(expression) + .and_modify(|aliases| { + for alias in new_aliases { + aliases.insert(*alias); + } + }) + .or_insert_with(|| new_aliases.clone()); + } - if !protected_allocations.contains(&address) { - stores_to_remove.insert(*instruction_id); + // Keep only the references present in both maps. + let mut intersection = BTreeMap::new(); + for (value_id, reference) in &other.references { + if let Some(existing) = self.references.get(value_id) { + intersection.insert(*value_id, existing.unify(*reference)); } } + self.references = intersection; - // Delete unused stores - dfg[block_id] - .instructions_mut() - .retain(|instruction| !stores_to_remove.contains(instruction)); + self + } + + /// Remember that `result` is the result of dereferencing `address`. This is important to + /// track aliasing when references are stored within other references. + fn remember_dereference(&mut self, function: &Function, address: ValueId, result: ValueId) { + if function.dfg.type_of_value(result) == Type::Reference { + if let Some(known_address) = self.get_known_value(address) { + self.expressions.insert(result, Expression::Other(known_address)); + } else { + let expression = Expression::Dereference(Box::new(Expression::Other(address))); + self.expressions.insert(result, expression); + // No known aliases to insert for this expression... can we find an alias + // even if we don't have a known address? If not we'll have to invalidate all + // known references if this reference is ever stored to. + } + } + } +} + +impl ReferenceValue { + fn unify(self, other: Self) -> Self { + if self == other { + self + } else { + ReferenceValue::Unknown + } } } @@ -423,8 +548,6 @@ mod tests { let ssa = builder.finish().mem2reg().fold_constants(); - println!("{ssa}"); - let func = ssa.main(); let block_id = func.entry_block(); @@ -495,7 +618,7 @@ mod tests { let func = ssa.main(); let block_id = func.entry_block(); - // Store affects outcome of returned array, and can't be removed + // Store is needed by the return value, and can't be removed assert_eq!(count_stores(block_id, &func.dfg), 1); let ret_val_id = match func.dfg[block_id].terminator().unwrap() { @@ -562,11 +685,13 @@ mod tests { assert_eq!(ssa.main().reachable_blocks().len(), 2); // Expected result: - // fn main { + // acir fn main f0 { // b0(): // v0 = allocate + // store Field 5 at v0 // jmp b1(Field 5) // b1(v3: Field): + // store Field 6 at v0 // return v3, Field 5, Field 6 // Optimized to constants 5 and 6 // } let ssa = ssa.mem2reg(); @@ -578,9 +703,9 @@ mod tests { assert_eq!(count_loads(main.entry_block(), &main.dfg), 0); assert_eq!(count_loads(b1, &main.dfg), 0); - // All stores should be removed - assert_eq!(count_stores(main.entry_block(), &main.dfg), 0); - assert_eq!(count_stores(b1, &main.dfg), 0); + // Neither store is removed since they are each the last in the block and there are multiple blocks + assert_eq!(count_stores(main.entry_block(), &main.dfg), 1); + assert_eq!(count_stores(b1, &main.dfg), 1); // The jmp to b1 should also be a constant 5 now match main.dfg[main.entry_block()].terminator() { @@ -597,7 +722,7 @@ mod tests { // Test that a load in a predecessor block has been removed if the value // is later stored in a successor block #[test] - fn store_with_load_in_predecessor_block() { + fn load_aliases_in_predecessor_block() { // fn main { // b0(): // v0 = allocate @@ -647,14 +772,17 @@ mod tests { assert_eq!(ssa.main().reachable_blocks().len(), 2); // Expected result: - // fn main { - // b0(): - // v0 = allocate - // v2 = allocate - // jmp b1() - // b1(): - // v8 = eq Field 2, Field 2 - // return + // acir fn main f0 { + // b0(): + // v0 = allocate + // store Field 0 at v0 + // v2 = allocate + // store v0 at v2 + // jmp b1() + // b1(): + // store Field 2 at v0 + // v8 = eq Field 1, Field 2 + // return // } let ssa = ssa.mem2reg(); @@ -665,13 +793,15 @@ mod tests { assert_eq!(count_loads(main.entry_block(), &main.dfg), 0); assert_eq!(count_loads(b1, &main.dfg), 0); - // All stores should be removed - assert_eq!(count_stores(main.entry_block(), &main.dfg), 0); - assert_eq!(count_stores(b1, &main.dfg), 0); + // Only the first store in b1 is removed since there is another store to the same reference + // in the same block, and the store is not needed before the later store. + assert_eq!(count_stores(main.entry_block(), &main.dfg), 2); + assert_eq!(count_stores(b1, &main.dfg), 1); let b1_instructions = main.dfg[b1].instructions(); - // The first instruction should be a binary operation - match &main.dfg[b1_instructions[0]] { + + // The last instruction in b1 should be a binary operation + match &main.dfg[*b1_instructions.last().unwrap()] { Instruction::Binary(binary) => { let lhs = main.dfg.get_numeric_constant(binary.lhs).expect("Expected constant value"); diff --git a/crates/noirc_evaluator/src/ssa/opt/unrolling.rs b/crates/noirc_evaluator/src/ssa/opt/unrolling.rs index 9bf62bda1cb..f9c92f8335e 100644 --- a/crates/noirc_evaluator/src/ssa/opt/unrolling.rs +++ b/crates/noirc_evaluator/src/ssa/opt/unrolling.rs @@ -44,7 +44,7 @@ impl Ssa { } } -pub(crate) struct Loop { +struct Loop { /// The header block of a loop is the block which dominates all the /// other blocks in the loop. header: BasicBlockId, @@ -57,12 +57,12 @@ pub(crate) struct Loop { pub(crate) blocks: HashSet, } -pub(crate) struct Loops { +struct Loops { /// The loops that failed to be unrolled so that we do not try to unroll them again. /// Each loop is identified by its header block id. failed_to_unroll: HashSet, - pub(crate) yet_to_unroll: Vec, + yet_to_unroll: Vec, modified_blocks: HashSet, cfg: ControlFlowGraph, dom_tree: DominatorTree, @@ -70,7 +70,7 @@ pub(crate) struct Loops { /// Find a loop in the program by finding a node that dominates any predecessor node. /// The edge where this happens will be the back-edge of the loop. -pub(crate) fn find_all_loops(function: &Function) -> Loops { +fn find_all_loops(function: &Function) -> Loops { let cfg = ControlFlowGraph::with_function(function); let post_order = PostOrder::with_function(function); let mut dom_tree = DominatorTree::with_cfg_and_post_order(&cfg, &post_order);