From 1fb27566ca85fb3c5912308b99edb7a379a8b792 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 6 Apr 2023 17:06:19 +0100 Subject: [PATCH 01/63] feat(nargo): add skeleton of composite types in template input tomls (#1104) * chore(nargo): return string rather than map when constructing template tomls * feat(nargo): generate structure for composite abi types in toml --- crates/nargo_cli/src/cli/check_cmd.rs | 94 ++++++++++++++++++++------- 1 file changed, 72 insertions(+), 22 deletions(-) diff --git a/crates/nargo_cli/src/cli/check_cmd.rs b/crates/nargo_cli/src/cli/check_cmd.rs index a10ccf71a11..557093444a1 100644 --- a/crates/nargo_cli/src/cli/check_cmd.rs +++ b/crates/nargo_cli/src/cli/check_cmd.rs @@ -4,10 +4,7 @@ use clap::Args; use iter_extended::btree_map; use noirc_abi::{AbiParameter, AbiType, MAIN_RETURN_NAME}; use noirc_driver::CompileOptions; -use std::{ - collections::BTreeMap, - path::{Path, PathBuf}, -}; +use std::path::{Path, PathBuf}; use super::fs::write_to_file; use super::NargoConfig; @@ -43,19 +40,16 @@ fn check_from_path>(p: P, compile_options: &CompileOptions) -> Re let path_to_prover_input = path_to_root.join(format!("{PROVER_INPUT_FILE}.toml")); let path_to_verifier_input = path_to_root.join(format!("{VERIFIER_INPUT_FILE}.toml")); - // If they are not available, then create them and - // populate them based on the ABI + // If they are not available, then create them and populate them based on the ABI if !path_to_prover_input.exists() { - let toml = - toml::to_string(&build_placeholder_input_map(parameters.clone(), None)).unwrap(); - write_to_file(toml.as_bytes(), &path_to_prover_input); + let prover_toml = create_input_toml_template(parameters.clone(), None); + write_to_file(prover_toml.as_bytes(), &path_to_prover_input); } if !path_to_verifier_input.exists() { let public_inputs = parameters.into_iter().filter(|param| param.is_public()).collect(); - let toml = - toml::to_string(&build_placeholder_input_map(public_inputs, return_type)).unwrap(); - write_to_file(toml.as_bytes(), &path_to_verifier_input); + let verifier_toml = create_input_toml_template(public_inputs, return_type); + write_to_file(verifier_toml.as_bytes(), &path_to_verifier_input); } } else { // This means that this is a library. Libraries do not have ABIs. @@ -63,17 +57,30 @@ fn check_from_path>(p: P, compile_options: &CompileOptions) -> Re Ok(()) } -fn build_placeholder_input_map( +/// Generates the contents of a toml file with fields for each of the passed parameters. +fn create_input_toml_template( parameters: Vec, return_type: Option, -) -> BTreeMap { - let default_value = |typ: AbiType| -> toml::Value { - if matches!(typ, AbiType::Array { .. }) { - toml::Value::Array(Vec::new()) - } else { - toml::Value::String("".to_owned()) +) -> String { + /// Returns a default placeholder `toml::Value` for `typ` which + /// complies with the structure of the specified `AbiType`. + fn default_value(typ: AbiType) -> toml::Value { + match typ { + AbiType::Array { length, typ } => { + let default_value_vec = std::iter::repeat(default_value(*typ)) + .take(length.try_into().unwrap()) + .collect(); + toml::Value::Array(default_value_vec) + } + AbiType::Struct { fields } => { + let default_value_map = toml::map::Map::from_iter( + fields.into_iter().map(|(name, typ)| (name, default_value(typ))), + ); + toml::Value::Table(default_value_map) + } + _ => toml::Value::String("".to_owned()), } - }; + } let mut map = btree_map(parameters, |AbiParameter { name, typ, .. }| (name, default_value(typ))); @@ -82,17 +89,60 @@ fn build_placeholder_input_map( map.insert(MAIN_RETURN_NAME.to_owned(), default_value(typ)); } - map + toml::to_string(&map).unwrap() } #[cfg(test)] mod tests { - use std::path::PathBuf; + use std::{collections::BTreeMap, path::PathBuf}; + use noirc_abi::{AbiParameter, AbiType, AbiVisibility, Sign}; use noirc_driver::CompileOptions; + use super::create_input_toml_template; + const TEST_DATA_DIR: &str = "tests/target_tests_data"; + #[test] + fn valid_toml_template() { + let typed_param = |name: &str, typ: AbiType| AbiParameter { + name: name.to_string(), + typ, + visibility: AbiVisibility::Public, + }; + let parameters = vec![ + typed_param("a", AbiType::Field), + typed_param("b", AbiType::Integer { sign: Sign::Unsigned, width: 32 }), + typed_param("c", AbiType::Array { length: 2, typ: Box::new(AbiType::Field) }), + typed_param( + "d", + AbiType::Struct { + fields: BTreeMap::from([ + (String::from("d1"), AbiType::Field), + ( + String::from("d2"), + AbiType::Array { length: 3, typ: Box::new(AbiType::Field) }, + ), + ]), + }, + ), + typed_param("e", AbiType::Boolean), + ]; + + let toml_str = create_input_toml_template(parameters, None); + + let expected_toml_str = r#"a = "" +b = "" +c = ["", ""] +e = "" + +[d] +d1 = "" +d2 = ["", "", ""] +"#; + assert_eq!(toml_str, expected_toml_str); + } + #[test] fn pass() { let pass_dir = From e12515778913164a0a9673c3f0eb98b3c5b73a7b Mon Sep 17 00:00:00 2001 From: jfecher Date: Thu, 6 Apr 2023 15:51:24 -0500 Subject: [PATCH 02/63] feat: Add new `Vec` type to frontend (#1103) * Add Vec keyword * Add Vec to frontend * Add doc comments * Update noir_stdlib/src/collections/vec.nr Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> --------- Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> --- crates/noirc_evaluator/src/ssa/context.rs | 1 + crates/noirc_evaluator/src/ssa/value.rs | 1 + crates/noirc_frontend/src/ast/mod.rs | 12 ++++++++++ .../src/hir/resolution/errors.rs | 11 +++------ .../src/hir/resolution/resolver.rs | 19 +++++++++++++-- crates/noirc_frontend/src/hir_def/types.rs | 17 +++++++++++++ crates/noirc_frontend/src/lexer/token.rs | 3 +++ .../src/monomorphization/ast.rs | 2 ++ .../src/monomorphization/mod.rs | 16 +++++++++---- crates/noirc_frontend/src/node_interner.rs | 2 ++ crates/noirc_frontend/src/parser/parser.rs | 11 +++++++-- noir_stdlib/src/collections.nr | 1 + noir_stdlib/src/collections/vec.nr | 24 +++++++++++++++++++ noir_stdlib/src/lib.nr | 1 + 14 files changed, 104 insertions(+), 17 deletions(-) create mode 100644 noir_stdlib/src/collections.nr create mode 100644 noir_stdlib/src/collections/vec.nr diff --git a/crates/noirc_evaluator/src/ssa/context.rs b/crates/noirc_evaluator/src/ssa/context.rs index da528e21d05..0154b36debc 100644 --- a/crates/noirc_evaluator/src/ssa/context.rs +++ b/crates/noirc_evaluator/src/ssa/context.rs @@ -1205,6 +1205,7 @@ impl SsaContext { Type::Function(..) => ObjectType::Function, Type::Tuple(_) => todo!("Conversion to ObjectType is unimplemented for tuples"), Type::String(_) => todo!("Conversion to ObjectType is unimplemented for strings"), + Type::Vec(_) => todo!("Conversion to ObjectType is unimplemented for Vecs"), } } diff --git a/crates/noirc_evaluator/src/ssa/value.rs b/crates/noirc_evaluator/src/ssa/value.rs index 30c07ccd567..915effe480b 100644 --- a/crates/noirc_evaluator/src/ssa/value.rs +++ b/crates/noirc_evaluator/src/ssa/value.rs @@ -96,6 +96,7 @@ impl Value { Type::Unit | Type::Function(..) | Type::Array(..) + | Type::Vec(..) | Type::String(..) | Type::Integer(..) | Type::Bool diff --git a/crates/noirc_frontend/src/ast/mod.rs b/crates/noirc_frontend/src/ast/mod.rs index 2e6592e3c87..6bd5c148d66 100644 --- a/crates/noirc_frontend/src/ast/mod.rs +++ b/crates/noirc_frontend/src/ast/mod.rs @@ -35,6 +35,14 @@ pub enum UnresolvedType { /// A Named UnresolvedType can be a struct type or a type variable Named(Path, Vec), + /// A vector of some element type. + /// It is expected the length of the generics is 1 so the inner Vec is technically unnecessary, + /// but we keep them all around to verify generic count after parsing for better error messages. + /// + /// The Span here encompasses the entire type and is used to issue an error if exactly 1 + /// generic argument is not given. + Vec(Vec, Span), + // Note: Tuples have no visibility, instead each of their elements may have one. Tuple(Vec), @@ -100,6 +108,10 @@ impl std::fmt::Display for UnresolvedType { let args = vecmap(args, ToString::to_string); write!(f, "fn({}) -> {ret}", args.join(", ")) } + Vec(args, _span) => { + let args = vecmap(args, ToString::to_string); + write!(f, "Vec<{}>", args.join(", ")) + } Unit => write!(f, "()"), Error => write!(f, "error"), Unspecified => write!(f, "unspecified"), diff --git a/crates/noirc_frontend/src/hir/resolution/errors.rs b/crates/noirc_frontend/src/hir/resolution/errors.rs index 3ce7d9d0249..9406474a226 100644 --- a/crates/noirc_frontend/src/hir/resolution/errors.rs +++ b/crates/noirc_frontend/src/hir/resolution/errors.rs @@ -2,7 +2,7 @@ pub use noirc_errors::Span; use noirc_errors::{CustomDiagnostic as Diagnostic, FileDiagnostic}; use thiserror::Error; -use crate::{parser::ParserError, Ident, Shared, StructType, Type}; +use crate::{parser::ParserError, Ident, Type}; use super::import::PathResolutionError; @@ -53,12 +53,7 @@ pub enum ResolverError { #[error("Cannot apply generics on Self type")] GenericsOnSelfType { span: Span }, #[error("Incorrect amount of arguments to generic type constructor")] - IncorrectGenericCount { - span: Span, - struct_type: Shared, - actual: usize, - expected: usize, - }, + IncorrectGenericCount { span: Span, struct_type: String, actual: usize, expected: usize }, #[error("{0}")] ParserError(ParserError), #[error("Function is not defined in a contract yet sets its contract visibility")] @@ -238,7 +233,7 @@ impl From for Diagnostic { let actual_plural = if actual == 1 { "is" } else { "are" }; Diagnostic::simple_error( - format!("The struct type {} has {expected} generic{expected_plural} but {actual} {actual_plural} given here", struct_type.borrow()), + format!("The struct type {struct_type} has {expected} generic{expected_plural} but {actual} {actual_plural} given here"), "Incorrect number of generic arguments".into(), span, ) diff --git a/crates/noirc_frontend/src/hir/resolution/resolver.rs b/crates/noirc_frontend/src/hir/resolution/resolver.rs index d01952f7c49..e0e282e0e02 100644 --- a/crates/noirc_frontend/src/hir/resolution/resolver.rs +++ b/crates/noirc_frontend/src/hir/resolution/resolver.rs @@ -347,6 +347,20 @@ impl<'a> Resolver<'a> { let ret = Box::new(self.resolve_type_inner(*ret, new_variables)); Type::Function(args, ret) } + UnresolvedType::Vec(mut args, span) => { + let arg = if args.len() != 1 { + self.push_err(ResolverError::IncorrectGenericCount { + span, + struct_type: "Vec".into(), + actual: args.len(), + expected: 1, + }); + Type::Error + } else { + self.resolve_type_inner(args.remove(0), new_variables) + }; + Type::Vec(Box::new(arg)) + } } } @@ -390,13 +404,13 @@ impl<'a> Resolver<'a> { if args.len() != expected_generic_count { self.push_err(ResolverError::IncorrectGenericCount { span, - struct_type: struct_type.clone(), + struct_type: struct_type.borrow().to_string(), actual: args.len(), expected: expected_generic_count, }); // Fix the generic count so we can continue typechecking - args.resize_with(expected_generic_count, || self.interner.next_type_variable()) + args.resize_with(expected_generic_count, || Type::Error) } Type::Struct(struct_type, args) @@ -751,6 +765,7 @@ impl<'a> Resolver<'a> { } } } + Type::Vec(element) => Self::find_numeric_generics_in_type(element, found), } } diff --git a/crates/noirc_frontend/src/hir_def/types.rs b/crates/noirc_frontend/src/hir_def/types.rs index f22ebb621d7..ff113f83c51 100644 --- a/crates/noirc_frontend/src/hir_def/types.rs +++ b/crates/noirc_frontend/src/hir_def/types.rs @@ -70,6 +70,11 @@ pub enum Type { /// A functions with arguments, and a return type. Function(Vec, Box), + /// A variable-sized Vector type. + /// Unlike arrays, this type can have a dynamic size and can grow/shrink dynamically via .push, + /// .pop, and similar methods. + Vec(Box), + /// A type generic over the given type variables. /// Storing both the TypeVariableId and TypeVariable isn't necessary /// but it makes handling them both easier. The TypeVariableId should @@ -589,6 +594,7 @@ impl Type { } }) } + Type::Vec(element) => element.contains_numeric_typevar(target_id), } } } @@ -645,6 +651,9 @@ impl std::fmt::Display for Type { let args = vecmap(args, ToString::to_string); write!(f, "fn({}) -> {}", args.join(", "), ret) } + Type::Vec(element) => { + write!(f, "Vec<{}>", element) + } } } } @@ -975,6 +984,8 @@ impl Type { } } + (Vec(elem_a), Vec(elem_b)) => elem_a.try_unify(elem_b, span), + (other_a, other_b) => { if other_a == other_b { Ok(()) @@ -1106,6 +1117,8 @@ impl Type { } } + (Vec(elem_a), Vec(elem_b)) => elem_a.is_subtype_of(elem_b, span), + (other_a, other_b) => { if other_a == other_b { Ok(()) @@ -1176,6 +1189,7 @@ impl Type { Type::NamedGeneric(..) => unreachable!(), Type::Forall(..) => unreachable!(), Type::Function(_, _) => unreachable!(), + Type::Vec(_) => unreachable!("Vecs cannot be used in the abi"), } } @@ -1289,6 +1303,7 @@ impl Type { let ret = Box::new(ret.substitute(type_bindings)); Type::Function(args, ret) } + Type::Vec(element) => Type::Vec(Box::new(element.substitute(type_bindings))), Type::FieldElement(_) | Type::Integer(_, _, _) @@ -1318,6 +1333,7 @@ impl Type { Type::Function(args, ret) => { args.iter().any(|arg| arg.occurs(target_id)) || ret.occurs(target_id) } + Type::Vec(element) => element.occurs(target_id), Type::FieldElement(_) | Type::Integer(_, _, _) @@ -1359,6 +1375,7 @@ impl Type { let ret = Box::new(ret.follow_bindings()); Function(args, ret) } + Vec(element) => Vec(Box::new(element.follow_bindings())), // Expect that this function should only be called on instantiated types Forall(..) => unreachable!(), diff --git a/crates/noirc_frontend/src/lexer/token.rs b/crates/noirc_frontend/src/lexer/token.rs index 84939641bd7..73ff6bfdedb 100644 --- a/crates/noirc_frontend/src/lexer/token.rs +++ b/crates/noirc_frontend/src/lexer/token.rs @@ -439,6 +439,7 @@ pub enum Keyword { Struct, Unconstrained, Use, + Vec, While, } @@ -471,6 +472,7 @@ impl fmt::Display for Keyword { Keyword::Struct => write!(f, "struct"), Keyword::Unconstrained => write!(f, "unconstrained"), Keyword::Use => write!(f, "use"), + Keyword::Vec => write!(f, "Vec"), Keyword::While => write!(f, "while"), } } @@ -506,6 +508,7 @@ impl Keyword { "struct" => Keyword::Struct, "unconstrained" => Keyword::Unconstrained, "use" => Keyword::Use, + "Vec" => Keyword::Vec, "while" => Keyword::While, "true" => return Some(Token::Bool(true)), diff --git a/crates/noirc_frontend/src/monomorphization/ast.rs b/crates/noirc_frontend/src/monomorphization/ast.rs index 7920676aa7d..938a937405c 100644 --- a/crates/noirc_frontend/src/monomorphization/ast.rs +++ b/crates/noirc_frontend/src/monomorphization/ast.rs @@ -202,6 +202,7 @@ pub enum Type { String(/*len:*/ u64), // String(4) = str[4] Unit, Tuple(Vec), + Vec(Box), Function(/*args:*/ Vec, /*ret:*/ Box), } @@ -301,6 +302,7 @@ impl std::fmt::Display for Type { let args = vecmap(args, ToString::to_string); write!(f, "fn({}) -> {}", args.join(", "), ret) } + Type::Vec(element) => write!(f, "Vec<{element}>"), } } } diff --git a/crates/noirc_frontend/src/monomorphization/mod.rs b/crates/noirc_frontend/src/monomorphization/mod.rs index 26009c0227e..bfce292d2eb 100644 --- a/crates/noirc_frontend/src/monomorphization/mod.rs +++ b/crates/noirc_frontend/src/monomorphization/mod.rs @@ -382,8 +382,8 @@ impl<'interner> Monomorphizer<'interner> { }, )), - ast::Type::Array(_, _) | ast::Type::String(_) => { - unreachable!("Nested arrays and arrays of strings are not supported") + ast::Type::Array(_, _) | ast::Type::String(_) | ast::Type::Vec(_) => { + unreachable!("Nested arrays, arrays of strings, and Vecs are not supported") } } } @@ -425,8 +425,8 @@ impl<'interner> Monomorphizer<'interner> { })) } - ast::Type::Array(_, _) | ast::Type::String(_) => { - unreachable!("Nested arrays and arrays of strings are not supported") + ast::Type::Array(_, _) | ast::Type::String(_) | ast::Type::Vec(_) => { + unreachable!("Nested arrays and arrays of strings or Vecs are not supported") } } } @@ -663,6 +663,11 @@ impl<'interner> Monomorphizer<'interner> { ast::Type::Function(args, ret) } + HirType::Vec(element) => { + let element = Self::convert_type(element); + ast::Type::Vec(Box::new(element)) + } + HirType::Forall(_, _) | HirType::Constant(_) | HirType::Error => { unreachable!("Unexpected type {} found", typ) } @@ -683,7 +688,7 @@ impl<'interner> Monomorphizer<'interner> { ast::Type::Tuple(vecmap(elements, |typ| Self::aos_to_soa_type(length, typ))) } - ast::Type::Array(_, _) | ast::Type::String(_) => { + ast::Type::Array(_, _) | ast::Type::String(_) | ast::Type::Vec(_) => { unreachable!("Nested arrays and arrays of strings are not supported") } } @@ -941,6 +946,7 @@ impl<'interner> Monomorphizer<'interner> { ast::Type::Function(parameter_types, ret_type) => { self.create_zeroed_function(parameter_types, ret_type) } + ast::Type::Vec(_) => panic!("Cannot create a zeroed Vec value. This type is currently unimplemented and meant to be unusable outside of unconstrained functions"), } } diff --git a/crates/noirc_frontend/src/node_interner.rs b/crates/noirc_frontend/src/node_interner.rs index 417c7bedc66..4479fe91135 100644 --- a/crates/noirc_frontend/src/node_interner.rs +++ b/crates/noirc_frontend/src/node_interner.rs @@ -654,6 +654,7 @@ enum TypeMethodKey { Unit, Tuple, Function, + Vec, } fn get_type_method_key(typ: &Type) -> Option { @@ -669,6 +670,7 @@ fn get_type_method_key(typ: &Type) -> Option { Type::Unit => Some(Unit), Type::Tuple(_) => Some(Tuple), Type::Function(_, _) => Some(Function), + Type::Vec(_) => Some(Vec), // We do not support adding methods to these types Type::TypeVariable(_) diff --git a/crates/noirc_frontend/src/parser/parser.rs b/crates/noirc_frontend/src/parser/parser.rs index 463fbee248e..62824023daf 100644 --- a/crates/noirc_frontend/src/parser/parser.rs +++ b/crates/noirc_frontend/src/parser/parser.rs @@ -537,11 +537,12 @@ fn parse_type_inner( choice(( field_type(), int_type(), + bool_type(), + string_type(), named_type(recursive_type_parser.clone()), array_type(recursive_type_parser.clone()), tuple_type(recursive_type_parser.clone()), - bool_type(), - string_type(), + vec_type(recursive_type_parser.clone()), function_type(recursive_type_parser), )) } @@ -593,6 +594,12 @@ fn named_type(type_parser: impl NoirParser) -> impl NoirParser) -> impl NoirParser { + keyword(Keyword::Vec) + .ignore_then(generic_type_args(type_parser)) + .map_with_span(UnresolvedType::Vec) +} + fn generic_type_args( type_parser: impl NoirParser, ) -> impl NoirParser> { diff --git a/noir_stdlib/src/collections.nr b/noir_stdlib/src/collections.nr new file mode 100644 index 00000000000..e06c662e658 --- /dev/null +++ b/noir_stdlib/src/collections.nr @@ -0,0 +1 @@ +mod vec; diff --git a/noir_stdlib/src/collections/vec.nr b/noir_stdlib/src/collections/vec.nr new file mode 100644 index 00000000000..130dfdfc2a6 --- /dev/null +++ b/noir_stdlib/src/collections/vec.nr @@ -0,0 +1,24 @@ + +// These methods are all stubs currently and aren't implemented internally yet. +// For a similar reason, no constructor for Vec is exposed yet since the type +// is still in-progress. +impl Vec { + /// Get an element from the vector at the given index. + /// Fails with a constraint error if the given index + /// points beyond the end of the vector. + #[builtin(vec_get)] + fn get(_self: Self, _index: Field) -> T { } + + /// Push a new element to the end of the vector, returning a + /// new vector with a length one greater than the + /// original unmodified vector. + #[builtin(vec_push)] + fn push(_self: Self, _elem: T) -> Self { } + + /// Pop an element from the end of the given vector, returning + /// a new vector with a length of one less than the given vector, + /// as well as the popped element. + /// Fails with a constraint error if the given vector's length is zero. + #[builtin(vec_pop)] + fn pop(_self: Self) -> (Self, T) { } +} diff --git a/noir_stdlib/src/lib.nr b/noir_stdlib/src/lib.nr index 16383c2c704..f0af06b97ba 100644 --- a/noir_stdlib/src/lib.nr +++ b/noir_stdlib/src/lib.nr @@ -9,6 +9,7 @@ mod sha512; mod field; mod ec; mod unsafe; +mod collections; #[builtin(println)] fn println(_input : T) {} From 61c38d2fd946697296905f267c49d18609835fcb Mon Sep 17 00:00:00 2001 From: jfecher Date: Fri, 7 Apr 2023 05:41:35 -0500 Subject: [PATCH 03/63] fix: compiler identifying imported functions as being part of a contract (#1112) * Separate items defined within a module from those just imported into the module * Fix test --- .../src/hir/def_collector/dc_crate.rs | 12 ++-- .../src/hir/def_collector/dc_mod.rs | 15 ++--- .../src/hir/def_map/item_scope.rs | 33 ++-------- crates/noirc_frontend/src/hir/def_map/mod.rs | 17 +++-- .../src/hir/def_map/module_data.rs | 62 ++++++++++++++++++- .../src/hir/resolution/import.rs | 4 +- crates/noirc_frontend/src/main.rs | 7 +-- 7 files changed, 86 insertions(+), 64 deletions(-) diff --git a/crates/noirc_frontend/src/hir/def_collector/dc_crate.rs b/crates/noirc_frontend/src/hir/def_collector/dc_crate.rs index e932f91f75a..b61376f39ae 100644 --- a/crates/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/crates/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -138,8 +138,7 @@ impl DefCollector { let name = resolved_import.name; for ns in resolved_import.resolved_namespace.iter_defs() { let result = current_def_map.modules[resolved_import.module_scope.0] - .scope - .add_item_to_namespace(name.clone(), ns); + .import(name.clone(), ns); if let Err((first_def, second_def)) = result { let err = DefCollectorErrorKind::DuplicateImport { first_def, second_def }; @@ -224,14 +223,13 @@ fn collect_impls( extend_errors(errors, unresolved.file_id, resolver.take_errors()); if let Some(type_module) = get_local_id_from_type(&typ) { - // Grab the scope defined by the struct type. Note that impls are a case - // where the scope the methods are added to is not the same as the scope + // Grab the module defined by the struct type. Note that impls are a case + // where the module the methods are added to is not the same as the module // they are resolved in. - let scope = &mut def_maps.get_mut(&crate_id).unwrap().modules[type_module.0].scope; + let module = &mut def_maps.get_mut(&crate_id).unwrap().modules[type_module.0]; - // .define_func_def(name, func_id); for (_, method_id, method) in &unresolved.functions { - let result = scope.define_func_def(method.name_ident().clone(), *method_id); + let result = module.declare_function(method.name_ident().clone(), *method_id); if let Err((first_def, second_def)) = result { let err = diff --git a/crates/noirc_frontend/src/hir/def_collector/dc_mod.rs b/crates/noirc_frontend/src/hir/def_collector/dc_mod.rs index 1a58decda99..bee7f1755c8 100644 --- a/crates/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/crates/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -75,9 +75,8 @@ impl<'a> ModCollector<'a> { let stmt_id = context.def_interner.push_empty_global(); // Add the statement to the scope so its path can be looked up later - let result = self.def_collector.def_map.modules[self.module_id.0] - .scope - .define_global(name, stmt_id); + let result = + self.def_collector.def_map.modules[self.module_id.0].declare_global(name, stmt_id); if let Err((first_def, second_def)) = result { let err = DefCollectorErrorKind::DuplicateGlobal { first_def, second_def }; @@ -137,8 +136,7 @@ impl<'a> ModCollector<'a> { // Add function to scope/ns of the module let result = self.def_collector.def_map.modules[self.module_id.0] - .scope - .define_func_def(name, func_id); + .declare_function(name, func_id); if let Err((first_def, second_def)) = result { let error = DefCollectorErrorKind::DuplicateFunction { first_def, second_def }; @@ -167,9 +165,8 @@ impl<'a> ModCollector<'a> { }; // Add the struct to scope so its path can be looked up later - let result = self.def_collector.def_map.modules[self.module_id.0] - .scope - .define_struct_def(name, id); + let result = + self.def_collector.def_map.modules[self.module_id.0].declare_struct(name, id); if let Err((first_def, second_def)) = result { let err = DefCollectorErrorKind::DuplicateFunction { first_def, second_def }; @@ -288,7 +285,7 @@ impl<'a> ModCollector<'a> { }; if let Err((first_def, second_def)) = - modules[self.module_id.0].scope.define_module_def(mod_name.to_owned(), mod_id) + modules[self.module_id.0].declare_child_module(mod_name.to_owned(), mod_id) { let err = DefCollectorErrorKind::DuplicateModuleDecl { first_def, second_def }; errors.push(err.into_file_diagnostic(self.file_id)); diff --git a/crates/noirc_frontend/src/hir/def_map/item_scope.rs b/crates/noirc_frontend/src/hir/def_map/item_scope.rs index 6a819550e72..52201f7ade3 100644 --- a/crates/noirc_frontend/src/hir/def_map/item_scope.rs +++ b/crates/noirc_frontend/src/hir/def_map/item_scope.rs @@ -1,8 +1,5 @@ use super::{namespace::PerNs, ModuleDefId, ModuleId}; -use crate::{ - node_interner::{FuncId, StmtId, StructId}, - Ident, -}; +use crate::{node_interner::FuncId, Ident}; use std::collections::{hash_map::Entry, HashMap}; #[derive(Debug, PartialEq, Eq, Copy, Clone)] @@ -55,30 +52,6 @@ impl ItemScope { } } - pub fn define_module_def( - &mut self, - name: Ident, - mod_id: ModuleId, - ) -> Result<(), (Ident, Ident)> { - self.add_definition(name, mod_id.into()) - } - - pub fn define_func_def(&mut self, name: Ident, local_id: FuncId) -> Result<(), (Ident, Ident)> { - self.add_definition(name, local_id.into()) - } - - pub fn define_struct_def( - &mut self, - name: Ident, - local_id: StructId, - ) -> Result<(), (Ident, Ident)> { - self.add_definition(name, ModuleDefId::TypeId(local_id)) - } - - pub fn define_global(&mut self, name: Ident, stmt_id: StmtId) -> Result<(), (Ident, Ident)> { - self.add_definition(name, ModuleDefId::GlobalId(stmt_id)) - } - pub fn find_module_with_name(&self, mod_name: &Ident) -> Option<&ModuleId> { let (module_def, _) = self.types.get(mod_name)?; match module_def { @@ -86,6 +59,7 @@ impl ItemScope { _ => None, } } + pub fn find_func_with_name(&self, func_name: &Ident) -> Option { let (module_def, _) = self.values.get(func_name)?; match module_def { @@ -93,6 +67,7 @@ impl ItemScope { _ => None, } } + pub fn find_name(&self, name: &Ident) -> PerNs { PerNs { types: self.types.get(name).cloned(), values: self.values.get(name).cloned() } } @@ -100,9 +75,11 @@ impl ItemScope { pub fn definitions(&self) -> Vec { self.defs.clone() } + pub fn types(&self) -> &HashMap { &self.types } + pub fn values(&self) -> &HashMap { &self.values } diff --git a/crates/noirc_frontend/src/hir/def_map/mod.rs b/crates/noirc_frontend/src/hir/def_map/mod.rs index abf07c8d2a8..25e0488a7b6 100644 --- a/crates/noirc_frontend/src/hir/def_map/mod.rs +++ b/crates/noirc_frontend/src/hir/def_map/mod.rs @@ -110,7 +110,7 @@ impl CrateDefMap { // This function accepts an Ident, so we attach a dummy span to // "main". Equality is implemented only on the contents. - root_module.scope.find_func_with_name(&MAIN_FUNCTION.into()) + root_module.find_func_with_name(&MAIN_FUNCTION.into()) } pub fn root_file_id(&self) -> FileId { @@ -129,8 +129,10 @@ impl CrateDefMap { interner: &'a NodeInterner, ) -> impl Iterator + 'a { self.modules.iter().flat_map(|(_, module)| { - let functions = module.scope.values().values().filter_map(|(id, _)| id.as_function()); - functions.filter(|id| interner.function_meta(id).attributes == Some(Attribute::Test)) + module + .value_definitions() + .filter_map(|id| id.as_function()) + .filter(|id| interner.function_meta(id).attributes == Some(Attribute::Test)) }) } @@ -141,13 +143,8 @@ impl CrateDefMap { .iter() .filter_map(|(id, module)| { if module.is_contract { - let functions = module - .scope - .values() - .values() - .filter_map(|(id, _)| id.as_function()) - .collect(); - + let functions = + module.value_definitions().filter_map(|id| id.as_function()).collect(); let name = self.get_module_path(id, module.parent); Some(Contract { name, functions }) } else { diff --git a/crates/noirc_frontend/src/hir/def_map/module_data.rs b/crates/noirc_frontend/src/hir/def_map/module_data.rs index d437a4d1b6c..20906885ad9 100644 --- a/crates/noirc_frontend/src/hir/def_map/module_data.rs +++ b/crates/noirc_frontend/src/hir/def_map/module_data.rs @@ -2,9 +2,12 @@ use std::collections::HashMap; use fm::FileId; -use crate::Ident; +use crate::{ + node_interner::{FuncId, StmtId, StructId}, + Ident, +}; -use super::{ItemScope, LocalModuleId}; +use super::{ItemScope, LocalModuleId, ModuleDefId, ModuleId, PerNs}; /// Contains the actual contents of a module: its parent (if one exists), /// children, and scope with all definitions defined within the scope. @@ -12,7 +15,13 @@ use super::{ItemScope, LocalModuleId}; pub struct ModuleData { pub parent: Option, pub children: HashMap, - pub scope: ItemScope, + + /// Contains all definitions visible to the current module. This includes + /// all definitions in self.definitions as well as all imported definitions. + scope: ItemScope, + + /// Contains only the definitions directly defined in the current module + definitions: ItemScope, pub origin: ModuleOrigin, @@ -30,10 +39,57 @@ impl ModuleData { parent, children: HashMap::new(), scope: ItemScope::default(), + definitions: ItemScope::default(), origin, is_contract, } } + + fn declare(&mut self, name: Ident, item_id: ModuleDefId) -> Result<(), (Ident, Ident)> { + self.scope.add_definition(name.clone(), item_id)?; + + // definitions is a subset of self.scope so it is expected if self.scope.define_func_def + // returns without error, so will self.definitions.define_func_def. + self.definitions.add_definition(name, item_id) + } + + pub fn declare_function(&mut self, name: Ident, id: FuncId) -> Result<(), (Ident, Ident)> { + self.declare(name, id.into()) + } + + pub fn declare_global(&mut self, name: Ident, id: StmtId) -> Result<(), (Ident, Ident)> { + self.declare(name, id.into()) + } + + pub fn declare_struct(&mut self, name: Ident, id: StructId) -> Result<(), (Ident, Ident)> { + self.declare(name, ModuleDefId::TypeId(id)) + } + + pub fn declare_child_module( + &mut self, + name: Ident, + child_id: ModuleId, + ) -> Result<(), (Ident, Ident)> { + self.declare(name, child_id.into()) + } + + pub fn find_func_with_name(&self, name: &Ident) -> Option { + self.scope.find_func_with_name(name) + } + + pub fn import(&mut self, name: Ident, id: ModuleDefId) -> Result<(), (Ident, Ident)> { + self.scope.add_item_to_namespace(name, id) + } + + pub fn find_name(&self, name: &Ident) -> PerNs { + self.scope.find_name(name) + } + + /// Return an iterator over all definitions defined within this module, + /// excluding any type definitions. + pub fn value_definitions(&self) -> impl Iterator + '_ { + self.definitions.values().values().map(|(id, _)| *id) + } } #[derive(Debug, PartialEq, Eq, Copy, Clone)] diff --git a/crates/noirc_frontend/src/hir/resolution/import.rs b/crates/noirc_frontend/src/hir/resolution/import.rs index 836333ee5d9..bd8f3e5e634 100644 --- a/crates/noirc_frontend/src/hir/resolution/import.rs +++ b/crates/noirc_frontend/src/hir/resolution/import.rs @@ -135,7 +135,7 @@ fn resolve_name_in_module( let mut import_path = import_path.iter(); let first_segment = import_path.next().expect("ice: could not fetch first segment"); - let mut current_ns = current_mod.scope.find_name(first_segment); + let mut current_ns = current_mod.find_name(first_segment); if current_ns.is_none() { return Err(PathResolutionError::Unresolved(first_segment.clone())); } @@ -158,7 +158,7 @@ fn resolve_name_in_module( current_mod = &def_maps[&new_module_id.krate].modules[new_module_id.local_id.0]; // Check if namespace - let found_ns = current_mod.scope.find_name(segment); + let found_ns = current_mod.find_name(segment); if found_ns.is_none() { return Err(PathResolutionError::Unresolved(segment.clone())); } diff --git a/crates/noirc_frontend/src/main.rs b/crates/noirc_frontend/src/main.rs index f68c52e3129..b1a5c0f950e 100644 --- a/crates/noirc_frontend/src/main.rs +++ b/crates/noirc_frontend/src/main.rs @@ -41,18 +41,15 @@ fn main() { // Get root module let root = def_map.root(); let module = def_map.modules().get(root.0).unwrap(); - for (name, (def_id, vis)) in module.scope.values() { - println!("func name is {:?}", name); + for def_id in module.value_definitions() { let func_id = match def_id { ModuleDefId::FunctionId(func_id) => func_id, _ => unreachable!(), }; // Get the HirFunction for that Id - let hir = context.def_interner.function(func_id); - + let hir = context.def_interner.function(&func_id); println!("func hir is {:?}", hir); - println!("func vis is {:?}", vis); } // From daf5c9d76820cc53cba1104b4d5c688ea0121117 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 10 Apr 2023 23:10:09 +0100 Subject: [PATCH 04/63] chore: borrow instead of cloning witness vectors in IR gen (#1127) --- crates/noirc_evaluator/src/lib.rs | 8 ++++---- crates/noirc_evaluator/src/ssa/ssa_gen.rs | 20 +++++++------------- 2 files changed, 11 insertions(+), 17 deletions(-) diff --git a/crates/noirc_evaluator/src/lib.rs b/crates/noirc_evaluator/src/lib.rs index 3880a32fd99..9656c3cdb9f 100644 --- a/crates/noirc_evaluator/src/lib.rs +++ b/crates/noirc_evaluator/src/lib.rs @@ -175,7 +175,7 @@ impl Evaluator { AbiType::Array { length, typ } => { let witnesses = self.generate_array_witnesses(length, typ)?; - ir_gen.abi_array(name, Some(def), typ.as_ref(), *length, witnesses.clone()); + ir_gen.abi_array(name, Some(def), typ.as_ref(), *length, &witnesses); witnesses } AbiType::Integer { sign: _, width } => { @@ -203,13 +203,13 @@ impl Evaluator { let mut struct_witnesses: BTreeMap> = BTreeMap::new(); self.generate_struct_witnesses(&mut struct_witnesses, &new_fields)?; - ir_gen.abi_struct(name, Some(def), fields, struct_witnesses.clone()); - struct_witnesses.values().flatten().cloned().collect() + ir_gen.abi_struct(name, Some(def), fields, &struct_witnesses); + struct_witnesses.values().flatten().copied().collect() } AbiType::String { length } => { let typ = AbiType::Integer { sign: noirc_abi::Sign::Unsigned, width: 8 }; let witnesses = self.generate_array_witnesses(length, &typ)?; - ir_gen.abi_array(name, Some(def), &typ, *length, witnesses.clone()); + ir_gen.abi_array(name, Some(def), &typ, *length, &witnesses); witnesses } }; diff --git a/crates/noirc_evaluator/src/ssa/ssa_gen.rs b/crates/noirc_evaluator/src/ssa/ssa_gen.rs index e819d83f55d..8205dc8e10c 100644 --- a/crates/noirc_evaluator/src/ssa/ssa_gen.rs +++ b/crates/noirc_evaluator/src/ssa/ssa_gen.rs @@ -10,7 +10,7 @@ use crate::{ {block, builtin, node, ssa_form}, }, }; -use acvm::FieldElement; +use acvm::{acir::native_types::Witness, FieldElement}; use iter_extended::vecmap; use noirc_errors::Location; use noirc_frontend::{ @@ -98,7 +98,7 @@ impl IrGenerator { ident_def: Option, el_type: &noirc_abi::AbiType, len: u64, - witness: Vec, + witness: &[Witness], ) -> NodeId { let element_type = self.get_object_type_from_abi(el_type); let (v_id, array_idx) = self.new_array(name, element_type, len as u32, ident_def); @@ -125,30 +125,24 @@ impl IrGenerator { struct_name: &str, ident_def: Option, fields: &BTreeMap, - witnesses: BTreeMap>, + witnesses: &BTreeMap>, ) -> Value { let values = vecmap(fields, |(name, field_typ)| { let new_name = format!("{struct_name}.{name}"); match field_typ { noirc_abi::AbiType::Array { length, typ } => { - let v_id = - self.abi_array(&new_name, None, typ, *length, witnesses[&new_name].clone()); + let v_id = self.abi_array(&new_name, None, typ, *length, &witnesses[&new_name]); Value::Node(v_id) } noirc_abi::AbiType::Struct { fields, .. } => { let new_name = format!("{struct_name}.{name}"); - self.abi_struct(&new_name, None, fields, witnesses.clone()) + self.abi_struct(&new_name, None, fields, witnesses) } noirc_abi::AbiType::String { length } => { let typ = noirc_abi::AbiType::Integer { sign: noirc_abi::Sign::Unsigned, width: 8 }; - let v_id = self.abi_array( - &new_name, - None, - &typ, - *length, - witnesses[&new_name].clone(), - ); + let v_id = + self.abi_array(&new_name, None, &typ, *length, &witnesses[&new_name]); Value::Node(v_id) } _ => { From 24adcc09aebde9087a6d45d207d95f4c3f907f1e Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 12 Apr 2023 13:48:05 +0100 Subject: [PATCH 05/63] chore: enforce `clippy::semicolon_if_nothing_returned` linting rule (#1139) * chore: warn on `clippy::semicolon_if_nothing_returned` * chore: address warnings --- crates/arena/src/lib.rs | 1 + crates/fm/src/lib.rs | 3 ++- crates/iter-extended/src/lib.rs | 1 + crates/nargo/src/lib.rs | 1 + crates/nargo_cli/src/lib.rs | 1 + crates/noirc_abi/src/lib.rs | 7 ++++--- crates/noirc_driver/src/lib.rs | 1 + crates/noirc_errors/src/lib.rs | 1 + crates/noirc_evaluator/src/lib.rs | 3 ++- .../src/ssa/acir_gen/constraints.rs | 2 +- .../src/ssa/acir_gen/internal_var.rs | 2 +- .../src/ssa/acir_gen/operations/intrinsics.rs | 6 +++--- crates/noirc_evaluator/src/ssa/conditional.rs | 4 ++-- crates/noirc_evaluator/src/ssa/context.rs | 4 ++-- crates/noirc_evaluator/src/ssa/inline.rs | 2 +- crates/noirc_evaluator/src/ssa/integer.rs | 2 +- crates/noirc_evaluator/src/ssa/node.rs | 4 ++-- crates/noirc_evaluator/src/ssa/optimizations.rs | 2 +- crates/noirc_frontend/src/graph/mod.rs | 6 +++--- .../src/hir/def_collector/dc_crate.rs | 8 ++++---- .../src/hir/def_collector/dc_mod.rs | 2 +- .../noirc_frontend/src/hir/resolution/import.rs | 2 +- .../src/hir/resolution/resolver.rs | 16 ++++++++-------- crates/noirc_frontend/src/hir/scope/mod.rs | 8 ++++---- crates/noirc_frontend/src/hir/type_check/mod.rs | 6 +++--- crates/noirc_frontend/src/hir_def/types.rs | 10 +++++----- crates/noirc_frontend/src/lexer/lexer.rs | 2 +- crates/noirc_frontend/src/lexer/token.rs | 2 +- crates/noirc_frontend/src/lib.rs | 1 + crates/noirc_frontend/src/node_interner.rs | 2 +- crates/noirc_frontend/src/parser/mod.rs | 2 +- crates/noirc_frontend/src/parser/parser.rs | 6 +++--- crates/wasm/src/lib.rs | 2 ++ 33 files changed, 67 insertions(+), 55 deletions(-) diff --git a/crates/arena/src/lib.rs b/crates/arena/src/lib.rs index 51c73d7f6a8..fc19f44ab6e 100644 --- a/crates/arena/src/lib.rs +++ b/crates/arena/src/lib.rs @@ -1,6 +1,7 @@ #![forbid(unsafe_code)] #![warn(unused_crate_dependencies, unused_extern_crates)] #![warn(unreachable_pub)] +#![warn(clippy::semicolon_if_nothing_returned)] // For now we use a wrapper around generational-arena pub use generational_arena::{Arena, Index}; diff --git a/crates/fm/src/lib.rs b/crates/fm/src/lib.rs index 49fac9b0178..cc87129fc0d 100644 --- a/crates/fm/src/lib.rs +++ b/crates/fm/src/lib.rs @@ -1,6 +1,7 @@ #![forbid(unsafe_code)] #![warn(unused_crate_dependencies, unused_extern_crates)] #![warn(unreachable_pub)] +#![warn(clippy::semicolon_if_nothing_returned)] mod file_map; mod file_reader; @@ -136,7 +137,7 @@ mod tests { let file_id = fm.add_file(&file_path, FileType::Normal).unwrap(); - assert!(fm.path(file_id).ends_with("foo")) + assert!(fm.path(file_id).ends_with("foo")); } #[test] fn path_resolve_sub_module() { diff --git a/crates/iter-extended/src/lib.rs b/crates/iter-extended/src/lib.rs index a022ad00b9e..aef89b58b30 100644 --- a/crates/iter-extended/src/lib.rs +++ b/crates/iter-extended/src/lib.rs @@ -1,6 +1,7 @@ #![forbid(unsafe_code)] #![warn(unused_crate_dependencies, unused_extern_crates)] #![warn(unreachable_pub)] +#![warn(clippy::semicolon_if_nothing_returned)] use std::collections::BTreeMap; diff --git a/crates/nargo/src/lib.rs b/crates/nargo/src/lib.rs index 9e98f9e3581..850deee560f 100644 --- a/crates/nargo/src/lib.rs +++ b/crates/nargo/src/lib.rs @@ -1,6 +1,7 @@ #![forbid(unsafe_code)] #![warn(unused_crate_dependencies, unused_extern_crates)] #![warn(unreachable_pub)] +#![warn(clippy::semicolon_if_nothing_returned)] //! Nargo is the package manager for Noir //! This name was used because it sounds like `cargo` and diff --git a/crates/nargo_cli/src/lib.rs b/crates/nargo_cli/src/lib.rs index 08e8513d45a..a943e580632 100644 --- a/crates/nargo_cli/src/lib.rs +++ b/crates/nargo_cli/src/lib.rs @@ -1,6 +1,7 @@ #![forbid(unsafe_code)] #![warn(unused_crate_dependencies, unused_extern_crates)] #![warn(unreachable_pub)] +#![warn(clippy::semicolon_if_nothing_returned)] //! Nargo is the package manager for Noir //! This name was used because it sounds like `cargo` and diff --git a/crates/noirc_abi/src/lib.rs b/crates/noirc_abi/src/lib.rs index 1a5293b160c..dbd935dcde0 100644 --- a/crates/noirc_abi/src/lib.rs +++ b/crates/noirc_abi/src/lib.rs @@ -1,6 +1,7 @@ #![forbid(unsafe_code)] #![warn(unused_crate_dependencies, unused_extern_crates)] #![warn(unreachable_pub)] +#![warn(clippy::semicolon_if_nothing_returned)] use std::{collections::BTreeMap, str}; @@ -283,11 +284,11 @@ impl Abi { InputValue::String(string) => { let str_as_fields = string.bytes().map(|byte| FieldElement::from_be_bytes_reduce(&[byte])); - encoded_value.extend(str_as_fields) + encoded_value.extend(str_as_fields); } InputValue::Struct(object) => { for value in object.into_values() { - encoded_value.extend(Self::encode_value(value)?) + encoded_value.extend(Self::encode_value(value)?); } } } @@ -442,6 +443,6 @@ mod test { } // We also decode the return value (we can do this immediately as we know it shares a witness with an input). - assert_eq!(return_value.unwrap(), reconstructed_inputs["thing2"]) + assert_eq!(return_value.unwrap(), reconstructed_inputs["thing2"]); } } diff --git a/crates/noirc_driver/src/lib.rs b/crates/noirc_driver/src/lib.rs index 04e4a82c6f6..4f1ab7fd482 100644 --- a/crates/noirc_driver/src/lib.rs +++ b/crates/noirc_driver/src/lib.rs @@ -1,6 +1,7 @@ #![forbid(unsafe_code)] #![warn(unused_crate_dependencies, unused_extern_crates)] #![warn(unreachable_pub)] +#![warn(clippy::semicolon_if_nothing_returned)] use acvm::Language; use clap::Args; diff --git a/crates/noirc_errors/src/lib.rs b/crates/noirc_errors/src/lib.rs index 9bb1ebaef81..ab154639d13 100644 --- a/crates/noirc_errors/src/lib.rs +++ b/crates/noirc_errors/src/lib.rs @@ -1,6 +1,7 @@ #![forbid(unsafe_code)] #![warn(unused_crate_dependencies, unused_extern_crates)] #![warn(unreachable_pub)] +#![warn(clippy::semicolon_if_nothing_returned)] mod position; pub mod reporter; diff --git a/crates/noirc_evaluator/src/lib.rs b/crates/noirc_evaluator/src/lib.rs index 9656c3cdb9f..4c1b05381f5 100644 --- a/crates/noirc_evaluator/src/lib.rs +++ b/crates/noirc_evaluator/src/lib.rs @@ -1,6 +1,7 @@ #![forbid(unsafe_code)] #![warn(unused_crate_dependencies, unused_extern_crates)] #![warn(unreachable_pub)] +#![warn(clippy::semicolon_if_nothing_returned)] mod errors; mod ssa; @@ -253,7 +254,7 @@ impl Evaluator { let new_name = format!("{name}.{inner_name}"); new_fields.insert(new_name, value.clone()); } - self.generate_struct_witnesses(struct_witnesses, &new_fields)? + self.generate_struct_witnesses(struct_witnesses, &new_fields)?; } AbiType::String { length } => { let typ = AbiType::Integer { sign: noirc_abi::Sign::Unsigned, width: 8 }; diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/constraints.rs b/crates/noirc_evaluator/src/ssa/acir_gen/constraints.rs index 978bfe49008..8257e0c9f9a 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/constraints.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/constraints.rs @@ -361,7 +361,7 @@ pub(crate) fn bound_constraint_with_offset( 0 => evaluator.push_opcode(AcirOpcode::Arithmetic(aof)), 1 => { let expr = boolean_expr(&aof, evaluator); - evaluator.push_opcode(AcirOpcode::Arithmetic(expr)) + evaluator.push_opcode(AcirOpcode::Arithmetic(expr)); } 2 => { let y = expression_to_witness(boolean_expr(&aof, evaluator), evaluator); diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/internal_var.rs b/crates/noirc_evaluator/src/ssa/acir_gen/internal_var.rs index a640c9db601..8e6e16776a9 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/internal_var.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/internal_var.rs @@ -36,7 +36,7 @@ impl InternalVar { &self.expression } pub(crate) fn set_id(&mut self, id: NodeId) { - self.id = Some(id) + self.id = Some(id); } pub(crate) fn get_id(&self) -> Option { self.id diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/intrinsics.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/intrinsics.rs index 73e46c266dc..7d6f7e2c32c 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/intrinsics.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/intrinsics.rs @@ -149,7 +149,7 @@ fn prepare_inputs( let mut inputs: Vec = Vec::new(); for argument in arguments { - inputs.extend(resolve_node_id(argument, acir_gen, cfg, evaluator)) + inputs.extend(resolve_node_id(argument, acir_gen, cfg, evaluator)); } inputs } @@ -212,7 +212,7 @@ fn resolve_array( arr_element.set_witness(witness); acir_gen.memory.insert(array.id, i, arr_element); - inputs.push(func_input) + inputs.push(func_input); } inputs @@ -329,7 +329,7 @@ fn evaluate_println( fn format_field_string(field: FieldElement) -> String { let mut trimmed_field = field.to_hex().trim_start_matches('0').to_owned(); if trimmed_field.len() % 2 != 0 { - trimmed_field = "0".to_owned() + &trimmed_field + trimmed_field = "0".to_owned() + &trimmed_field; }; "0x".to_owned() + &trimmed_field } diff --git a/crates/noirc_evaluator/src/ssa/conditional.rs b/crates/noirc_evaluator/src/ssa/conditional.rs index 36eb2b41f09..c7a9adc7a02 100644 --- a/crates/noirc_evaluator/src/ssa/conditional.rs +++ b/crates/noirc_evaluator/src/ssa/conditional.rs @@ -855,14 +855,14 @@ impl DecisionTree { && left_arrays.is_empty() && right_arrays.is_empty() => { - candidates.push(Segment::new(left_node, right_node)) + candidates.push(Segment::new(left_node, right_node)); } ( Operation::Store { array_id: left_array, index: left_index, .. }, Operation::Store { array_id: right_array, index: right_index, .. }, ) if left_array == right_array && left_index == right_index => { - candidates.push(Segment::new(left_node, right_node)) + candidates.push(Segment::new(left_node, right_node)); } _ => (), } diff --git a/crates/noirc_evaluator/src/ssa/context.rs b/crates/noirc_evaluator/src/ssa/context.rs index 0154b36debc..c7d4dba9799 100644 --- a/crates/noirc_evaluator/src/ssa/context.rs +++ b/crates/noirc_evaluator/src/ssa/context.rs @@ -163,7 +163,7 @@ impl SsaContext { result = format!("{var}"); } if result.is_empty() { - result = format!("unknown {:?}", id.0.into_raw_parts().0) + result = format!("unknown {:?}", id.0.into_raw_parts().0); } result } @@ -250,7 +250,7 @@ impl SsaContext { pub(crate) fn print_instructions(&self, instructions: &[NodeId]) { for id in instructions { - self.print_node(*id) + self.print_node(*id); } } diff --git a/crates/noirc_evaluator/src/ssa/inline.rs b/crates/noirc_evaluator/src/ssa/inline.rs index 08aac4975ff..2cab6018f58 100644 --- a/crates/noirc_evaluator/src/ssa/inline.rs +++ b/crates/noirc_evaluator/src/ssa/inline.rs @@ -241,7 +241,7 @@ fn inline( decision, )?; if result && nested_call { - result = false + result = false; } } Ok(result) diff --git a/crates/noirc_evaluator/src/ssa/integer.rs b/crates/noirc_evaluator/src/ssa/integer.rs index 9a48286f42f..2bdbf80c9e8 100644 --- a/crates/noirc_evaluator/src/ssa/integer.rs +++ b/crates/noirc_evaluator/src/ssa/integer.rs @@ -319,7 +319,7 @@ fn block_overflow( if let Some(r_const) = ctx.get_as_constant(rhs) { let r_type = ctx[rhs].get_type(); if r_const.to_u128() > r_type.bits() as u128 { - ins.mark = Mark::ReplaceWith(ctx.zero_with_type(ins.res_type)) + ins.mark = Mark::ReplaceWith(ctx.zero_with_type(ins.res_type)); } else { let rhs = ctx .get_or_create_const(FieldElement::from(2_i128).pow(&r_const), r_type); diff --git a/crates/noirc_evaluator/src/ssa/node.rs b/crates/noirc_evaluator/src/ssa/node.rs index 215308162d4..8819a96e1c3 100644 --- a/crates/noirc_evaluator/src/ssa/node.rs +++ b/crates/noirc_evaluator/src/ssa/node.rs @@ -1225,7 +1225,7 @@ impl Operation { Cond { condition, val_true: lhs, val_false: rhs } => { *condition = f(*condition); *lhs = f(*lhs); - *rhs = f(*rhs) + *rhs = f(*rhs); } Load { index, .. } => *index = f(*index), Store { index, value, predicate, .. } => { @@ -1291,7 +1291,7 @@ impl Operation { Nop => (), Call { func, arguments, .. } => { f(*func); - arguments.iter().copied().for_each(f) + arguments.iter().copied().for_each(f); } Return(values) => values.iter().copied().for_each(f), Result { call_instruction, .. } => { diff --git a/crates/noirc_evaluator/src/ssa/optimizations.rs b/crates/noirc_evaluator/src/ssa/optimizations.rs index d92a04d1fd6..2e9370961fc 100644 --- a/crates/noirc_evaluator/src/ssa/optimizations.rs +++ b/crates/noirc_evaluator/src/ssa/optimizations.rs @@ -469,7 +469,7 @@ fn cse_block_with_anchor( let mut activate_cse = true; // We do not want to replace any print intrinsics as we want them to remain in order and unchanged if let builtin::Opcode::Println(_) = opcode { - activate_cse = false + activate_cse = false; } for arg in args { diff --git a/crates/noirc_frontend/src/graph/mod.rs b/crates/noirc_frontend/src/graph/mod.rs index 1e054ee6699..47426606da1 100644 --- a/crates/noirc_frontend/src/graph/mod.rs +++ b/crates/noirc_frontend/src/graph/mod.rs @@ -120,9 +120,9 @@ impl CrateGraph { return; } for dep in graph[source].dependencies.iter() { - go(graph, visited, res, dep.crate_id) + go(graph, visited, res, dep.crate_id); } - res.push(source) + res.push(source); } } @@ -163,7 +163,7 @@ impl CrateGraph { } impl CrateData { fn add_dep(&mut self, name: CrateName, crate_id: CrateId) { - self.dependencies.push(Dependency { crate_id, name }) + self.dependencies.push(Dependency { crate_id, name }); } } impl std::ops::Index for CrateGraph { diff --git a/crates/noirc_frontend/src/hir/def_collector/dc_crate.rs b/crates/noirc_frontend/src/hir/def_collector/dc_crate.rs index b61376f39ae..55f2464dc62 100644 --- a/crates/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/crates/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -30,7 +30,7 @@ pub struct UnresolvedFunctions { impl UnresolvedFunctions { pub fn push_fn(&mut self, mod_id: LocalModuleId, func_id: FuncId, func: NoirFunction) { - self.functions.push((mod_id, func_id, func)) + self.functions.push((mod_id, func_id, func)); } } @@ -243,7 +243,7 @@ fn collect_impls( } else if typ != Type::Error && crate_id == LOCAL_CRATE { let span = *span; let error = DefCollectorErrorKind::NonStructTypeInImpl { span }; - errors.push(error.into_file_diagnostic(unresolved.file_id)) + errors.push(error.into_file_diagnostic(unresolved.file_id)); } } } @@ -261,7 +261,7 @@ where Errs: IntoIterator, Err: Into, { - errors.extend(new_errors.into_iter().map(|err| err.into().in_file(file))) + errors.extend(new_errors.into_iter().map(|err| err.into().in_file(file))); } /// Separate the globals Vec into two. The first element in the tuple will be the @@ -476,6 +476,6 @@ fn type_check_functions( errors: &mut Vec, ) { for (file, func) in file_func_ids { - extend_errors(errors, file, type_check_func(interner, func)) + extend_errors(errors, file, type_check_func(interner, func)); } } diff --git a/crates/noirc_frontend/src/hir/def_collector/dc_mod.rs b/crates/noirc_frontend/src/hir/def_collector/dc_mod.rs index bee7f1755c8..989d87e0720 100644 --- a/crates/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/crates/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -37,7 +37,7 @@ pub fn collect_defs( // First resolve the module declarations for decl in ast.module_decls { - collector.parse_module_declaration(context, &decl, crate_id, errors) + collector.parse_module_declaration(context, &decl, crate_id, errors); } collector.collect_submodules(context, crate_id, ast.submodules, file_id, errors); diff --git a/crates/noirc_frontend/src/hir/resolution/import.rs b/crates/noirc_frontend/src/hir/resolution/import.rs index bd8f3e5e634..79176d74afc 100644 --- a/crates/noirc_frontend/src/hir/resolution/import.rs +++ b/crates/noirc_frontend/src/hir/resolution/import.rs @@ -168,7 +168,7 @@ fn resolve_name_in_module( return Err(PathResolutionError::ExternalContractUsed(segment.clone())); } - current_ns = found_ns + current_ns = found_ns; } Ok(current_ns) diff --git a/crates/noirc_frontend/src/hir/resolution/resolver.rs b/crates/noirc_frontend/src/hir/resolution/resolver.rs index e0e282e0e02..cfb354498ab 100644 --- a/crates/noirc_frontend/src/hir/resolution/resolver.rs +++ b/crates/noirc_frontend/src/hir/resolution/resolver.rs @@ -121,7 +121,7 @@ impl<'a> Resolver<'a> { } fn push_err(&mut self, err: ResolverError) { - self.errors.push(err) + self.errors.push(err); } fn current_lambda_index(&self) -> usize { @@ -410,7 +410,7 @@ impl<'a> Resolver<'a> { }); // Fix the generic count so we can continue typechecking - args.resize_with(expected_generic_count, || Type::Error) + args.resize_with(expected_generic_count, || Type::Error); } Type::Struct(struct_type, args) @@ -555,7 +555,7 @@ impl<'a> Resolver<'a> { name: generic.0.contents.clone(), first_span: *first_span, second_span: span, - }) + }); } else { self.generics.push((name, typevar.clone(), span)); } @@ -616,7 +616,7 @@ impl<'a> Resolver<'a> { for (pattern, typ, visibility) in func.parameters().iter().cloned() { if visibility == noirc_abi::AbiVisibility::Public && !self.pub_allowed(func) { - self.push_err(ResolverError::UnnecessaryPub { ident: func.name_ident().clone() }) + self.push_err(ResolverError::UnnecessaryPub { ident: func.name_ident().clone() }); } let pattern = self.resolve_pattern(pattern, DefinitionKind::Local(None)); @@ -634,13 +634,13 @@ impl<'a> Resolver<'a> { && return_type.as_ref() != &Type::Unit && func.def.return_visibility != noirc_abi::AbiVisibility::Public { - self.push_err(ResolverError::NecessaryPub { ident: func.name_ident().clone() }) + self.push_err(ResolverError::NecessaryPub { ident: func.name_ident().clone() }); } if attributes == Some(Attribute::Test) && !parameters.is_empty() { self.push_err(ResolverError::TestFunctionHasParameters { span: func.name_ident().span(), - }) + }); } let mut typ = Type::Function(parameter_types, return_type); @@ -1017,7 +1017,7 @@ impl<'a> Resolver<'a> { } Pattern::Mutable(pattern, span) => { if let Some(first_mut) = mutable { - self.push_err(ResolverError::UnnecessaryMut { first_mut, second_mut: span }) + self.push_err(ResolverError::UnnecessaryMut { first_mut, second_mut: span }); } let pattern = self.resolve_pattern_mutable(*pattern, Some(span), definition); @@ -1491,7 +1491,7 @@ mod test { fn path_unresolved_error(err: ResolverError, expected_unresolved_path: &str) { match err { ResolverError::PathResolutionError(PathResolutionError::Unresolved(name)) => { - assert_eq!(name.to_string(), expected_unresolved_path) + assert_eq!(name.to_string(), expected_unresolved_path); } _ => unimplemented!("expected an unresolved path"), } diff --git a/crates/noirc_frontend/src/hir/scope/mod.rs b/crates/noirc_frontend/src/hir/scope/mod.rs index 85b7e2e62e9..1a9087a7408 100644 --- a/crates/noirc_frontend/src/hir/scope/mod.rs +++ b/crates/noirc_frontend/src/hir/scope/mod.rs @@ -90,7 +90,7 @@ impl ScopeTree { } pub fn push_scope(&mut self) { - self.0.push(Scope::default()) + self.0.push(Scope::default()); } pub fn pop_scope(&mut self) -> Scope { @@ -135,7 +135,7 @@ impl ScopeForest { } fn extend_current_scope_tree(&mut self) { - self.current_scope_tree().push_scope() + self.current_scope_tree().push_scope(); } fn remove_scope_tree_extension(&mut self) -> Scope { @@ -145,7 +145,7 @@ impl ScopeForest { /// Starting a function requires a new scope tree, as you do not want the functions scope to /// have access to the scope of the caller pub fn start_function(&mut self) { - self.0.push(ScopeTree::default()) + self.0.push(ScopeTree::default()); } /// Ending a function requires that we removes it's whole tree of scope @@ -157,7 +157,7 @@ impl ScopeForest { /// The beginning of a scope always correlates with the start of a block {}. /// This can be in if expressions, for loops, or functions. pub fn start_scope(&mut self) { - self.extend_current_scope_tree() + self.extend_current_scope_tree(); } /// Ends the current scope - this should correspond with the end of a BlockExpression. diff --git a/crates/noirc_frontend/src/hir/type_check/mod.rs b/crates/noirc_frontend/src/hir/type_check/mod.rs index 23907f6b3b4..97b1c71a0bc 100644 --- a/crates/noirc_frontend/src/hir/type_check/mod.rs +++ b/crates/noirc_frontend/src/hir/type_check/mod.rs @@ -101,7 +101,7 @@ impl<'interner> TypeChecker<'interner> { span: Span, make_error: impl FnOnce() -> TypeCheckError, ) { - actual.unify(expected, span, &mut self.errors, make_error) + actual.unify(expected, span, &mut self.errors, make_error); } /// Wrapper of Type::make_subtype_of using self.errors @@ -112,7 +112,7 @@ impl<'interner> TypeChecker<'interner> { span: Span, make_error: impl FnOnce() -> TypeCheckError, ) { - actual.make_subtype_of(expected, span, &mut self.errors, make_error) + actual.make_subtype_of(expected, span, &mut self.errors, make_error); } } @@ -362,7 +362,7 @@ mod test { for ((hir_func, meta), func_id) in func_meta.into_iter().zip(func_ids.clone()) { interner.update_fn(func_id, hir_func); - interner.push_fn_meta(meta, func_id) + interner.push_fn_meta(meta, func_id); } // Type check section diff --git a/crates/noirc_frontend/src/hir_def/types.rs b/crates/noirc_frontend/src/hir_def/types.rs index ff113f83c51..be7d90e089f 100644 --- a/crates/noirc_frontend/src/hir_def/types.rs +++ b/crates/noirc_frontend/src/hir_def/types.rs @@ -126,7 +126,7 @@ pub type Generics = Vec<(TypeVariableId, TypeVariable)>; impl std::hash::Hash for StructType { fn hash(&self, state: &mut H) { - self.id.hash(state) + self.id.hash(state); } } @@ -230,7 +230,7 @@ pub struct Shared(Rc>); impl std::hash::Hash for Shared { fn hash(&self, state: &mut H) { - self.0.borrow().hash(state) + self.0.borrow().hash(state); } } @@ -705,7 +705,7 @@ impl Type { pub fn set_comp_time_span(&mut self, new_span: Span) { match self { Type::FieldElement(comptime) | Type::Integer(comptime, _, _) => { - comptime.set_span(new_span) + comptime.set_span(new_span); } Type::PolymorphicInteger(span, binding) => { if let TypeBinding::Bound(binding) = &mut *binding.borrow_mut() { @@ -865,7 +865,7 @@ impl Type { make_error: impl FnOnce() -> TypeCheckError, ) { if let Err(err_span) = self.try_unify(expected, span) { - Self::issue_errors(expected, err_span, errors, make_error) + Self::issue_errors(expected, err_span, errors, make_error); } } @@ -1006,7 +1006,7 @@ impl Type { make_error: impl FnOnce() -> TypeCheckError, ) { if let Err(err_span) = self.is_subtype_of(expected, span) { - Self::issue_errors(expected, err_span, errors, make_error) + Self::issue_errors(expected, err_span, errors, make_error); } } diff --git a/crates/noirc_frontend/src/lexer/lexer.rs b/crates/noirc_frontend/src/lexer/lexer.rs index da4af04c001..c1ff328a3ed 100644 --- a/crates/noirc_frontend/src/lexer/lexer.rs +++ b/crates/noirc_frontend/src/lexer/lexer.rs @@ -216,7 +216,7 @@ impl<'a> Lexer<'a> { // Therefore, the current character which triggered this function will need to be appended let mut word = String::new(); if let Some(init_char) = initial_char { - word.push(init_char) + word.push(init_char); } // Keep checking that we are not at the EOF diff --git a/crates/noirc_frontend/src/lexer/token.rs b/crates/noirc_frontend/src/lexer/token.rs index 73ff6bfdedb..0df1fc39938 100644 --- a/crates/noirc_frontend/src/lexer/token.rs +++ b/crates/noirc_frontend/src/lexer/token.rs @@ -539,7 +539,7 @@ mod keywords { resolved_token, Token::Keyword(keyword), "Keyword::lookup_keyword returns unexpected Keyword" - ) + ); } } } diff --git a/crates/noirc_frontend/src/lib.rs b/crates/noirc_frontend/src/lib.rs index 2e375e637e0..e7d95d3dfc8 100644 --- a/crates/noirc_frontend/src/lib.rs +++ b/crates/noirc_frontend/src/lib.rs @@ -8,6 +8,7 @@ #![forbid(unsafe_code)] #![warn(unused_crate_dependencies, unused_extern_crates)] #![warn(unreachable_pub)] +#![warn(clippy::semicolon_if_nothing_returned)] pub mod ast; pub mod graph; diff --git a/crates/noirc_frontend/src/node_interner.rs b/crates/noirc_frontend/src/node_interner.rs index 4479fe91135..d8ea11ae89c 100644 --- a/crates/noirc_frontend/src/node_interner.rs +++ b/crates/noirc_frontend/src/node_interner.rs @@ -320,7 +320,7 @@ impl NodeInterner { pub fn update_struct(&mut self, type_id: StructId, f: impl FnOnce(&mut StructType)) { let mut value = self.structs.get_mut(&type_id).unwrap().borrow_mut(); - f(&mut value) + f(&mut value); } /// Returns the interned statement corresponding to `stmt_id` diff --git a/crates/noirc_frontend/src/parser/mod.rs b/crates/noirc_frontend/src/parser/mod.rs index fa0903ee659..788c0eec895 100644 --- a/crates/noirc_frontend/src/parser/mod.rs +++ b/crates/noirc_frontend/src/parser/mod.rs @@ -263,7 +263,7 @@ impl ParsedModule { } fn push_global(&mut self, global: LetStatement) { - self.globals.push(global) + self.globals.push(global); } } diff --git a/crates/noirc_frontend/src/parser/parser.rs b/crates/noirc_frontend/src/parser/parser.rs index 62824023daf..f4793d06368 100644 --- a/crates/noirc_frontend/src/parser/parser.rs +++ b/crates/noirc_frontend/src/parser/parser.rs @@ -856,7 +856,7 @@ where emit(ParserError::with_reason( "Arrays must have at least one element".to_owned(), span, - )) + )); } ExpressionKind::array(elements) }) @@ -1135,7 +1135,7 @@ mod test { match expr_to_array(expr) { ArrayLiteral::Standard(elements) => assert_eq!(elements.len(), 5), ArrayLiteral::Repeated { length, .. } => { - assert_eq!(length.kind, ExpressionKind::integer(5i128.into())) + assert_eq!(length.kind, ExpressionKind::integer(5i128.into())); } } } @@ -1367,7 +1367,7 @@ mod test { for (src, expected_path_kind) in cases { let path = parse_with(path(), src).unwrap(); - assert_eq!(path.kind, expected_path_kind) + assert_eq!(path.kind, expected_path_kind); } parse_all_failing( diff --git a/crates/wasm/src/lib.rs b/crates/wasm/src/lib.rs index 90a9bf0f173..56995c21df5 100644 --- a/crates/wasm/src/lib.rs +++ b/crates/wasm/src/lib.rs @@ -1,6 +1,8 @@ #![forbid(unsafe_code)] #![warn(unused_crate_dependencies, unused_extern_crates)] #![warn(unreachable_pub)] +#![warn(clippy::semicolon_if_nothing_returned)] + use gloo_utils::format::JsValueSerdeExt; use log::Level; use serde::{Deserialize, Serialize}; From 753a272cbdf32858e47d2fa4bd6c236521bbb2cf Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 12 Apr 2023 18:57:03 +0100 Subject: [PATCH 06/63] feat: import core logic in cli from `nargo` crate (#1142) --- Cargo.lock | 2 ++ crates/nargo/Cargo.toml | 2 ++ crates/nargo/src/errors.rs | 13 ++++++++ crates/nargo/src/lib.rs | 4 +++ crates/nargo/src/ops/codegen_verifier.rs | 10 ++++++ crates/nargo/src/ops/execute.rs | 20 ++++++++++++ crates/nargo/src/ops/mod.rs | 11 +++++++ crates/nargo/src/ops/preprocess.rs | 31 +++++++++++++++++++ crates/nargo/src/ops/prove.rs | 16 ++++++++++ crates/nargo/src/ops/verify.rs | 17 ++++++++++ .../nargo_cli/src/cli/codegen_verifier_cmd.rs | 1 + crates/nargo_cli/src/cli/compile_cmd.rs | 9 ++++-- crates/nargo_cli/src/cli/execute_cmd.rs | 15 ++------- crates/nargo_cli/src/cli/fs/keys.rs | 9 +++--- crates/nargo_cli/src/cli/fs/program.rs | 4 --- crates/nargo_cli/src/cli/preprocess_cmd.rs | 31 +++---------------- crates/nargo_cli/src/cli/prove_cmd.rs | 8 +++-- crates/nargo_cli/src/cli/test_cmd.rs | 15 +++------ crates/nargo_cli/src/cli/verify_cmd.rs | 14 ++++++--- crates/nargo_cli/src/errors.rs | 6 ++-- 20 files changed, 167 insertions(+), 71 deletions(-) create mode 100644 crates/nargo/src/errors.rs create mode 100644 crates/nargo/src/ops/codegen_verifier.rs create mode 100644 crates/nargo/src/ops/execute.rs create mode 100644 crates/nargo/src/ops/mod.rs create mode 100644 crates/nargo/src/ops/preprocess.rs create mode 100644 crates/nargo/src/ops/prove.rs create mode 100644 crates/nargo/src/ops/verify.rs diff --git a/Cargo.lock b/Cargo.lock index b311bc608ca..c4d231f3778 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2244,6 +2244,8 @@ checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" name = "nargo" version = "0.3.2" dependencies = [ + "acvm 0.8.0", + "noirc_abi", "rustc_version 0.4.0", "serde", "thiserror", diff --git a/crates/nargo/Cargo.toml b/crates/nargo/Cargo.toml index d24d238ea36..17827c6b60f 100644 --- a/crates/nargo/Cargo.toml +++ b/crates/nargo/Cargo.toml @@ -11,6 +11,8 @@ edition.workspace = true rustc_version = "0.4.0" [dependencies] +acvm.workspace = true +noirc_abi.workspace = true toml.workspace = true serde.workspace = true thiserror.workspace = true diff --git a/crates/nargo/src/errors.rs b/crates/nargo/src/errors.rs new file mode 100644 index 00000000000..59cec4552ef --- /dev/null +++ b/crates/nargo/src/errors.rs @@ -0,0 +1,13 @@ +use acvm::OpcodeResolutionError; +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum NargoError { + /// Error while compiling Noir into ACIR. + #[error("Failed to compile circuit")] + CompilationError, + + /// ACIR circuit solving error + #[error(transparent)] + SolvingError(#[from] OpcodeResolutionError), +} diff --git a/crates/nargo/src/lib.rs b/crates/nargo/src/lib.rs index 850deee560f..94c24714a0a 100644 --- a/crates/nargo/src/lib.rs +++ b/crates/nargo/src/lib.rs @@ -7,4 +7,8 @@ //! This name was used because it sounds like `cargo` and //! Noir Package Manager abbreviated is npm, which is already taken. +mod errors; pub mod manifest; +pub mod ops; + +pub use self::errors::NargoError; diff --git a/crates/nargo/src/ops/codegen_verifier.rs b/crates/nargo/src/ops/codegen_verifier.rs new file mode 100644 index 00000000000..ead125699b4 --- /dev/null +++ b/crates/nargo/src/ops/codegen_verifier.rs @@ -0,0 +1,10 @@ +use acvm::SmartContract; + +use crate::NargoError; + +pub fn codegen_verifier( + backend: &impl SmartContract, + verification_key: &[u8], +) -> Result { + Ok(backend.eth_contract_from_vk(verification_key)) +} diff --git a/crates/nargo/src/ops/execute.rs b/crates/nargo/src/ops/execute.rs new file mode 100644 index 00000000000..eb82df60d41 --- /dev/null +++ b/crates/nargo/src/ops/execute.rs @@ -0,0 +1,20 @@ +use acvm::PartialWitnessGenerator; +use acvm::{acir::circuit::Circuit, pwg::block::Blocks}; +use noirc_abi::WitnessMap; + +use crate::NargoError; + +pub fn execute_circuit( + backend: &impl PartialWitnessGenerator, + circuit: Circuit, + mut initial_witness: WitnessMap, +) -> Result { + let mut blocks = Blocks::default(); + let (unresolved_opcodes, oracles) = + backend.solve(&mut initial_witness, &mut blocks, circuit.opcodes)?; + if !unresolved_opcodes.is_empty() || !oracles.is_empty() { + todo!("Add oracle support to nargo execute") + } + + Ok(initial_witness) +} diff --git a/crates/nargo/src/ops/mod.rs b/crates/nargo/src/ops/mod.rs new file mode 100644 index 00000000000..578fc1ebbef --- /dev/null +++ b/crates/nargo/src/ops/mod.rs @@ -0,0 +1,11 @@ +pub use self::codegen_verifier::codegen_verifier; +pub use self::execute::execute_circuit; +pub use self::preprocess::{checksum_acir, preprocess_circuit, PreprocessedData}; +pub use self::prove::prove; +pub use self::verify::verify_proof; + +mod codegen_verifier; +mod execute; +mod preprocess; +mod prove; +mod verify; diff --git a/crates/nargo/src/ops/preprocess.rs b/crates/nargo/src/ops/preprocess.rs new file mode 100644 index 00000000000..3046f1e7dd0 --- /dev/null +++ b/crates/nargo/src/ops/preprocess.rs @@ -0,0 +1,31 @@ +use acvm::acir::circuit::Circuit; +use acvm::{checksum_constraint_system, ProofSystemCompiler}; + +use crate::NargoError; + +pub fn checksum_acir(circuit: &Circuit) -> [u8; 4] { + checksum_constraint_system(circuit).to_be_bytes() +} + +/// The result of preprocessing the ACIR bytecode. +/// The proving, verification key and circuit are backend specific. +/// +/// The circuit is backend specific because at the end of compilation +/// an optimization pass is applied which will transform the bytecode into +/// a format that the backend will accept; removing unsupported gates +/// is one example of this. +pub struct PreprocessedData { + pub proving_key: Vec, + pub verification_key: Vec, + pub program_checksum: [u8; 4], +} + +pub fn preprocess_circuit( + backend: &impl ProofSystemCompiler, + circuit: &Circuit, +) -> Result { + let (proving_key, verification_key) = backend.preprocess(circuit); + let program_checksum = checksum_acir(circuit); + + Ok(PreprocessedData { proving_key, verification_key, program_checksum }) +} diff --git a/crates/nargo/src/ops/prove.rs b/crates/nargo/src/ops/prove.rs new file mode 100644 index 00000000000..fc7ddcd4cb6 --- /dev/null +++ b/crates/nargo/src/ops/prove.rs @@ -0,0 +1,16 @@ +use acvm::acir::circuit::Circuit; +use acvm::ProofSystemCompiler; +use noirc_abi::WitnessMap; + +use crate::NargoError; + +pub fn prove( + backend: &impl ProofSystemCompiler, + circuit: &Circuit, + solved_witness: WitnessMap, + proving_key: &[u8], +) -> Result, NargoError> { + let proof = backend.prove_with_pk(circuit, solved_witness, proving_key); + + Ok(proof) +} diff --git a/crates/nargo/src/ops/verify.rs b/crates/nargo/src/ops/verify.rs new file mode 100644 index 00000000000..5109d2291db --- /dev/null +++ b/crates/nargo/src/ops/verify.rs @@ -0,0 +1,17 @@ +use acvm::acir::circuit::Circuit; +use acvm::ProofSystemCompiler; +use noirc_abi::WitnessMap; + +use crate::NargoError; + +pub fn verify_proof( + backend: &impl ProofSystemCompiler, + circuit: &Circuit, + proof: &[u8], + public_inputs: WitnessMap, + verification_key: &[u8], +) -> Result { + let valid_proof = backend.verify_with_vk(proof, public_inputs, circuit, verification_key); + + Ok(valid_proof) +} diff --git a/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs b/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs index b05626e4398..a9e233d3c10 100644 --- a/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -15,6 +15,7 @@ pub(crate) struct CodegenVerifierCommand { pub(crate) fn run(args: CodegenVerifierCommand, config: NargoConfig) -> Result<(), CliError> { let compiled_program = compile_circuit(&config.program_dir, &args.compile_options)?; + // TODO: replace with `nargo::ops::codegen_verifier` let backend = crate::backends::ConcreteBackend; #[allow(deprecated)] let smart_contract_string = backend.eth_contract_from_cs(compiled_program.circuit); diff --git a/crates/nargo_cli/src/cli/compile_cmd.rs b/crates/nargo_cli/src/cli/compile_cmd.rs index d259006c909..7691c3ebc62 100644 --- a/crates/nargo_cli/src/cli/compile_cmd.rs +++ b/crates/nargo_cli/src/cli/compile_cmd.rs @@ -1,4 +1,5 @@ use acvm::ProofSystemCompiler; +use nargo::ops::preprocess_circuit; use noirc_driver::{CompileOptions, CompiledContract, CompiledProgram, Driver}; use std::path::Path; @@ -8,7 +9,7 @@ use crate::resolver::DependencyResolutionError; use crate::{constants::TARGET_DIR, errors::CliError, resolver::Resolver}; use super::fs::program::{save_contract_to_file, save_program_to_file}; -use super::preprocess_cmd::{save_preprocess_data, PreprocessedData}; +use super::preprocess_cmd::save_preprocess_data; use super::NargoConfig; /// Compile the program and its secret execution trace into ACIR format @@ -54,7 +55,8 @@ fn save_and_preprocess_program( ) -> Result<(), CliError> { save_program_to_file(compiled_program, circuit_name, circuit_dir); - let preprocessed_data = PreprocessedData::from(&compiled_program.circuit); + let backend = crate::backends::ConcreteBackend; + let preprocessed_data = preprocess_circuit(&backend, &compiled_program.circuit)?; save_preprocess_data(&preprocessed_data, circuit_name, circuit_dir)?; Ok(()) } @@ -74,9 +76,10 @@ fn save_and_preprocess_contract( // Preprocess all contract data // We are patching the verification key in our contract functions // so when we save it to disk, the ABI will have the verification key. + let backend = crate::backends::ConcreteBackend; let mut contract_preprocess_data = Vec::new(); for contract_function in &mut compiled_contract.functions { - let preprocessed_data = PreprocessedData::from(&contract_function.bytecode); + let preprocessed_data = preprocess_circuit(&backend, &contract_function.bytecode)?; contract_function.verification_key = Some(preprocessed_data.verification_key.clone()); contract_preprocess_data.push(preprocessed_data); } diff --git a/crates/nargo_cli/src/cli/execute_cmd.rs b/crates/nargo_cli/src/cli/execute_cmd.rs index 4a3d89585bf..e7ecdc543e3 100644 --- a/crates/nargo_cli/src/cli/execute_cmd.rs +++ b/crates/nargo_cli/src/cli/execute_cmd.rs @@ -1,7 +1,5 @@ use std::path::Path; -use acvm::pwg::block::Blocks; -use acvm::PartialWitnessGenerator; use clap::Args; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::{InputMap, WitnessMap}; @@ -65,18 +63,11 @@ pub(crate) fn execute_program( compiled_program: &CompiledProgram, inputs_map: &InputMap, ) -> Result { - let mut solved_witness = compiled_program.abi.encode(inputs_map, None)?; + let initial_witness = compiled_program.abi.encode(inputs_map, None)?; let backend = crate::backends::ConcreteBackend; - let mut blocks = Blocks::default(); - let (unresolved_opcodes, oracles) = backend.solve( - &mut solved_witness, - &mut blocks, - compiled_program.circuit.opcodes.clone(), - )?; - if !unresolved_opcodes.is_empty() || !oracles.is_empty() { - todo!("Add oracle support to nargo execute") - } + let solved_witness = + nargo::ops::execute_circuit(&backend, compiled_program.circuit.clone(), initial_witness)?; Ok(solved_witness) } diff --git a/crates/nargo_cli/src/cli/fs/keys.rs b/crates/nargo_cli/src/cli/fs/keys.rs index bbb84876913..32920e51406 100644 --- a/crates/nargo_cli/src/cli/fs/keys.rs +++ b/crates/nargo_cli/src/cli/fs/keys.rs @@ -1,9 +1,10 @@ -use super::{create_named_dir, load_hex_data, program::checksum_acir, write_to_file}; +use super::{create_named_dir, load_hex_data, write_to_file}; use crate::{ constants::{ACIR_CHECKSUM, PK_EXT, VK_EXT}, errors::CliError, }; use acvm::acir::circuit::Circuit; +use nargo::ops::checksum_acir; use std::path::{Path, PathBuf}; pub(crate) fn save_key_to_dir>( @@ -61,11 +62,9 @@ pub(crate) fn fetch_pk_and_vk>( #[cfg(test)] mod tests { use super::fetch_pk_and_vk; - use crate::cli::fs::{ - keys::save_key_to_dir, - program::{checksum_acir, save_acir_checksum_to_dir}, - }; + use crate::cli::fs::{keys::save_key_to_dir, program::save_acir_checksum_to_dir}; use acvm::acir::circuit::Circuit; + use nargo::ops::checksum_acir; use tempdir::TempDir; #[test] diff --git a/crates/nargo_cli/src/cli/fs/program.rs b/crates/nargo_cli/src/cli/fs/program.rs index f327c81f609..b01455e2833 100644 --- a/crates/nargo_cli/src/cli/fs/program.rs +++ b/crates/nargo_cli/src/cli/fs/program.rs @@ -1,6 +1,5 @@ use std::path::{Path, PathBuf}; -use acvm::{acir::circuit::Circuit, checksum_constraint_system}; use noirc_driver::{CompiledContract, CompiledProgram}; use crate::{constants::ACIR_CHECKSUM, errors::CliError}; @@ -34,9 +33,6 @@ fn save_build_artifact_to_file, T: ?Sized + serde::Serialize>( circuit_path } -pub(crate) fn checksum_acir(circuit: &Circuit) -> [u8; 4] { - checksum_constraint_system(circuit).to_be_bytes() -} pub(crate) fn save_acir_checksum_to_dir>( acir_checksum: [u8; 4], hash_name: &str, diff --git a/crates/nargo_cli/src/cli/preprocess_cmd.rs b/crates/nargo_cli/src/cli/preprocess_cmd.rs index 05f6edd60a4..95a2ab849e9 100644 --- a/crates/nargo_cli/src/cli/preprocess_cmd.rs +++ b/crates/nargo_cli/src/cli/preprocess_cmd.rs @@ -1,5 +1,4 @@ -use acvm::acir::circuit::Circuit; -use acvm::ProofSystemCompiler; +use nargo::ops::{preprocess_circuit, PreprocessedData}; use std::path::{Path, PathBuf}; use clap::Args; @@ -8,7 +7,7 @@ use crate::{constants::TARGET_DIR, errors::CliError}; use super::fs::{ keys::save_key_to_dir, - program::{checksum_acir, read_program_from_file, save_acir_checksum_to_dir}, + program::{read_program_from_file, save_acir_checksum_to_dir}, }; use super::NargoConfig; @@ -23,33 +22,13 @@ pub(crate) fn run(args: PreprocessCommand, config: NargoConfig) -> Result<(), Cl let circuit_dir = config.program_dir.join(TARGET_DIR); let program = read_program_from_file(circuit_dir.join(&args.artifact_name))?; - let preprocess_data = PreprocessedData::from(&program.circuit); + + let backend = crate::backends::ConcreteBackend; + let preprocess_data = preprocess_circuit(&backend, &program.circuit)?; save_preprocess_data(&preprocess_data, &args.artifact_name, circuit_dir)?; Ok(()) } -/// The result of preprocessing the ACIR bytecode. -/// The proving, verification key and circuit are backend specific. -/// -/// The circuit is backend specific because at the end of compilation -/// an optimization pass is applied which will transform the bytecode into -/// a format that the backend will accept; removing unsupported gates -/// is one example of this. -pub(crate) struct PreprocessedData { - pub(crate) proving_key: Vec, - pub(crate) verification_key: Vec, - pub(crate) program_checksum: [u8; 4], -} - -impl From<&Circuit> for PreprocessedData { - fn from(circuit: &Circuit) -> Self { - let backend = crate::backends::ConcreteBackend; - let (proving_key, verification_key) = backend.preprocess(circuit); - let program_checksum = checksum_acir(circuit); - - PreprocessedData { proving_key, verification_key, program_checksum } - } -} pub(crate) fn save_preprocess_data>( data: &PreprocessedData, diff --git a/crates/nargo_cli/src/cli/prove_cmd.rs b/crates/nargo_cli/src/cli/prove_cmd.rs index 5f0f9d831cf..720110a6758 100644 --- a/crates/nargo_cli/src/cli/prove_cmd.rs +++ b/crates/nargo_cli/src/cli/prove_cmd.rs @@ -1,7 +1,7 @@ use std::path::{Path, PathBuf}; -use acvm::ProofSystemCompiler; use clap::Args; +use nargo::ops::{preprocess_circuit, PreprocessedData}; use noirc_abi::input_parser::Format; use noirc_driver::CompileOptions; @@ -75,7 +75,8 @@ pub(crate) fn prove_with_path>( super::compile_cmd::compile_circuit(program_dir.as_ref(), compile_options)?; let backend = crate::backends::ConcreteBackend; - let (proving_key, verification_key) = backend.preprocess(&compiled_program.circuit); + let PreprocessedData { proving_key, verification_key, .. } = + preprocess_circuit(&backend, &compiled_program.circuit)?; (compiled_program, proving_key, verification_key) } }; @@ -103,7 +104,8 @@ pub(crate) fn prove_with_path>( )?; let backend = crate::backends::ConcreteBackend; - let proof = backend.prove_with_pk(&compiled_program.circuit, solved_witness, &proving_key); + let proof = + nargo::ops::prove(&backend, &compiled_program.circuit, solved_witness, &proving_key)?; if check_proof { let no_proof_name = "".into(); diff --git a/crates/nargo_cli/src/cli/test_cmd.rs b/crates/nargo_cli/src/cli/test_cmd.rs index 665591f188d..d168e6c39ca 100644 --- a/crates/nargo_cli/src/cli/test_cmd.rs +++ b/crates/nargo_cli/src/cli/test_cmd.rs @@ -1,7 +1,8 @@ use std::{collections::BTreeMap, io::Write, path::Path}; -use acvm::{pwg::block::Blocks, PartialWitnessGenerator, ProofSystemCompiler}; +use acvm::ProofSystemCompiler; use clap::Args; +use nargo::ops::execute_circuit; use noirc_driver::{CompileOptions, Driver}; use noirc_frontend::node_interner::FuncId; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; @@ -79,23 +80,15 @@ fn run_test( config: &CompileOptions, ) -> Result<(), CliError> { let backend = crate::backends::ConcreteBackend; - let mut blocks = Blocks::default(); let program = driver .compile_no_check(config, main) .map_err(|_| CliError::Generic(format!("Test '{test_name}' failed to compile")))?; - let mut solved_witness = BTreeMap::new(); - // Run the backend to ensure the PWG evaluates functions like std::hash::pedersen, // otherwise constraints involving these expressions will not error. - match backend.solve(&mut solved_witness, &mut blocks, program.circuit.opcodes) { - Ok((unresolved_opcodes, oracles)) => { - if !unresolved_opcodes.is_empty() || !oracles.is_empty() { - todo!("Add oracle support to nargo test") - } - Ok(()) - } + match execute_circuit(&backend, program.circuit, BTreeMap::new()) { + Ok(_) => Ok(()), Err(error) => { let writer = StandardStream::stderr(ColorChoice::Always); let mut writer = writer.lock(); diff --git a/crates/nargo_cli/src/cli/verify_cmd.rs b/crates/nargo_cli/src/cli/verify_cmd.rs index 021b10c9404..03de9a7e33e 100644 --- a/crates/nargo_cli/src/cli/verify_cmd.rs +++ b/crates/nargo_cli/src/cli/verify_cmd.rs @@ -7,8 +7,8 @@ use crate::{ constants::{PROOFS_DIR, PROOF_EXT, TARGET_DIR, VERIFIER_INPUT_FILE}, errors::CliError, }; -use acvm::ProofSystemCompiler; use clap::Args; +use nargo::ops::{preprocess_circuit, PreprocessedData}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_driver::{CompileOptions, CompiledProgram}; use std::path::{Path, PathBuf}; @@ -55,7 +55,8 @@ fn verify_with_path>( let compiled_program = compile_circuit(program_dir.as_ref(), &compile_options)?; let backend = crate::backends::ConcreteBackend; - let (_, verification_key) = backend.preprocess(&compiled_program.circuit); + let PreprocessedData { verification_key, .. } = + preprocess_circuit(&backend, &compiled_program.circuit)?; (compiled_program, verification_key) } }; @@ -87,8 +88,13 @@ pub(crate) fn verify_proof( let public_inputs = public_abi.encode(&public_inputs_map, return_value)?; let backend = crate::backends::ConcreteBackend; - let valid_proof = - backend.verify_with_vk(proof, public_inputs, &compiled_program.circuit, verification_key); + let valid_proof = nargo::ops::verify_proof( + &backend, + &compiled_program.circuit, + proof, + public_inputs, + verification_key, + )?; if valid_proof { Ok(()) diff --git a/crates/nargo_cli/src/errors.rs b/crates/nargo_cli/src/errors.rs index 1fd86818852..1561a5033fa 100644 --- a/crates/nargo_cli/src/errors.rs +++ b/crates/nargo_cli/src/errors.rs @@ -1,5 +1,5 @@ -use acvm::OpcodeResolutionError; use hex::FromHexError; +use nargo::NargoError; use noirc_abi::errors::{AbiError, InputParserError}; use std::path::PathBuf; use thiserror::Error; @@ -40,7 +40,7 @@ pub(crate) enum CliError { #[error(transparent)] AbiError(#[from] AbiError), - /// ACIR circuit solving error + /// Error from Nargo #[error(transparent)] - SolvingError(#[from] OpcodeResolutionError), + NargoError(#[from] NargoError), } From 7528f59d10dba5a56b9fa7cf979fdc93cacacb9b Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 13 Apr 2023 10:47:56 +0100 Subject: [PATCH 07/63] feat(nargo)!: define preprocessed artifacts for programs/contracts (#1126) * feat(nargo): define generic preprocessed artifacts for programs/contracts * chore: wrap preprocessing functions in `Result<_, NargoError>` * fix: add missing handling of `NargoError` * chore: rename `prove` to `prove_execution` --- Cargo.lock | 2 + crates/nargo/Cargo.toml | 2 + crates/nargo/src/artifacts/contract.rs | 41 +++++++ crates/nargo/src/artifacts/mod.rs | 31 ++++++ crates/nargo/src/artifacts/program.rs | 23 ++++ crates/nargo/src/lib.rs | 1 + crates/nargo/src/ops/mod.rs | 4 +- crates/nargo/src/ops/preprocess.rs | 75 +++++++++---- crates/nargo/src/ops/prove.rs | 2 +- .../nargo_cli/src/cli/codegen_verifier_cmd.rs | 12 +- crates/nargo_cli/src/cli/compile_cmd.rs | 105 ++++-------------- crates/nargo_cli/src/cli/execute_cmd.rs | 11 +- crates/nargo_cli/src/cli/fs/keys.rs | 88 --------------- crates/nargo_cli/src/cli/fs/mod.rs | 1 - crates/nargo_cli/src/cli/fs/program.rs | 21 +--- crates/nargo_cli/src/cli/gates_cmd.rs | 5 +- crates/nargo_cli/src/cli/mod.rs | 3 - crates/nargo_cli/src/cli/preprocess_cmd.rs | 46 -------- crates/nargo_cli/src/cli/print_acir_cmd.rs | 4 +- crates/nargo_cli/src/cli/prove_cmd.rs | 48 ++++---- crates/nargo_cli/src/cli/verify_cmd.rs | 39 +++---- crates/nargo_cli/src/constants.rs | 6 - crates/nargo_cli/src/errors.rs | 3 +- crates/nargo_cli/src/preprocess.rs | 57 ++++++++++ crates/noirc_driver/src/contract.rs | 17 +-- crates/noirc_driver/src/lib.rs | 24 ++-- 26 files changed, 313 insertions(+), 358 deletions(-) create mode 100644 crates/nargo/src/artifacts/contract.rs create mode 100644 crates/nargo/src/artifacts/mod.rs create mode 100644 crates/nargo/src/artifacts/program.rs delete mode 100644 crates/nargo_cli/src/cli/fs/keys.rs delete mode 100644 crates/nargo_cli/src/cli/preprocess_cmd.rs create mode 100644 crates/nargo_cli/src/preprocess.rs diff --git a/Cargo.lock b/Cargo.lock index c4d231f3778..274eac0bce2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2245,7 +2245,9 @@ name = "nargo" version = "0.3.2" dependencies = [ "acvm 0.8.0", + "iter-extended", "noirc_abi", + "noirc_driver", "rustc_version 0.4.0", "serde", "thiserror", diff --git a/crates/nargo/Cargo.toml b/crates/nargo/Cargo.toml index 17827c6b60f..430d926cc9e 100644 --- a/crates/nargo/Cargo.toml +++ b/crates/nargo/Cargo.toml @@ -13,6 +13,8 @@ rustc_version = "0.4.0" [dependencies] acvm.workspace = true noirc_abi.workspace = true +noirc_driver.workspace = true +iter-extended.workspace = true toml.workspace = true serde.workspace = true thiserror.workspace = true diff --git a/crates/nargo/src/artifacts/contract.rs b/crates/nargo/src/artifacts/contract.rs new file mode 100644 index 00000000000..95f1ce9576d --- /dev/null +++ b/crates/nargo/src/artifacts/contract.rs @@ -0,0 +1,41 @@ +use acvm::acir::circuit::Circuit; +use noirc_abi::Abi; +use noirc_driver::ContractFunctionType; +use serde::{Deserialize, Serialize}; + +/// `PreprocessedContract` represents a Noir contract which has been preprocessed by a particular backend proving system. +/// +/// This differs from a generic Noir contract artifact in that: +/// - The ACIR bytecode has had an optimization pass applied to tailor it for the backend. +/// - Proving and verification keys have been pregenerated based on this ACIR. +#[derive(Serialize, Deserialize)] +pub struct PreprocessedContract { + /// The name of the contract. + pub name: String, + /// The identifier of the proving backend which this contract has been compiled for. + pub backend: String, + /// Each of the contract's functions are compiled into a separate program stored in this `Vec`. + pub functions: Vec, +} + +/// Each function in the contract will be compiled as a separate noir program. +/// +/// A contract function unlike a regular Noir program however can have additional properties. +/// One of these being a function type. +#[derive(Debug, Serialize, Deserialize)] +pub struct PreprocessedContractFunction { + pub name: String, + + pub function_type: ContractFunctionType, + + pub abi: Abi, + + #[serde( + serialize_with = "super::serialize_circuit", + deserialize_with = "super::deserialize_circuit" + )] + pub bytecode: Circuit, + + pub proving_key: Vec, + pub verification_key: Vec, +} diff --git a/crates/nargo/src/artifacts/mod.rs b/crates/nargo/src/artifacts/mod.rs new file mode 100644 index 00000000000..400254bfb0d --- /dev/null +++ b/crates/nargo/src/artifacts/mod.rs @@ -0,0 +1,31 @@ +//! This module defines the structure of Nargo's different compilation artifacts. +//! +//! These artifacts are intended to remain independent of any applications being built on top of Noir. +//! Should any projects require/desire a different artifact format, it's expected that they will write a transformer +//! to generate them using these artifacts as a starting point. + +use acvm::acir::circuit::Circuit; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +pub mod contract; +pub mod program; + +// TODO: move these down into ACVM. +fn serialize_circuit(circuit: &Circuit, s: S) -> Result +where + S: Serializer, +{ + let mut circuit_bytes: Vec = Vec::new(); + circuit.write(&mut circuit_bytes).unwrap(); + + circuit_bytes.serialize(s) +} + +fn deserialize_circuit<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + let circuit_bytes = Vec::::deserialize(deserializer)?; + let circuit = Circuit::read(&*circuit_bytes).unwrap(); + Ok(circuit) +} diff --git a/crates/nargo/src/artifacts/program.rs b/crates/nargo/src/artifacts/program.rs new file mode 100644 index 00000000000..288a5dba99b --- /dev/null +++ b/crates/nargo/src/artifacts/program.rs @@ -0,0 +1,23 @@ +use acvm::acir::circuit::Circuit; +use noirc_abi::Abi; +use serde::{Deserialize, Serialize}; + +/// `PreprocessedProgram` represents a Noir program which has been preprocessed by a particular backend proving system. +/// +/// This differs from a generic Noir program artifact in that: +/// - The ACIR bytecode has had an optimization pass applied to tailor it for the backend. +/// - Proving and verification keys have been pregenerated based on this ACIR. +#[derive(Serialize, Deserialize, Debug)] +pub struct PreprocessedProgram { + pub backend: String, + pub abi: Abi, + + #[serde( + serialize_with = "super::serialize_circuit", + deserialize_with = "super::deserialize_circuit" + )] + pub bytecode: Circuit, + + pub proving_key: Vec, + pub verification_key: Vec, +} diff --git a/crates/nargo/src/lib.rs b/crates/nargo/src/lib.rs index 94c24714a0a..24605de7849 100644 --- a/crates/nargo/src/lib.rs +++ b/crates/nargo/src/lib.rs @@ -7,6 +7,7 @@ //! This name was used because it sounds like `cargo` and //! Noir Package Manager abbreviated is npm, which is already taken. +pub mod artifacts; mod errors; pub mod manifest; pub mod ops; diff --git a/crates/nargo/src/ops/mod.rs b/crates/nargo/src/ops/mod.rs index 578fc1ebbef..5d1f096ecf4 100644 --- a/crates/nargo/src/ops/mod.rs +++ b/crates/nargo/src/ops/mod.rs @@ -1,7 +1,7 @@ pub use self::codegen_verifier::codegen_verifier; pub use self::execute::execute_circuit; -pub use self::preprocess::{checksum_acir, preprocess_circuit, PreprocessedData}; -pub use self::prove::prove; +pub use self::preprocess::{preprocess_contract, preprocess_program}; +pub use self::prove::prove_execution; pub use self::verify::verify_proof; mod codegen_verifier; diff --git a/crates/nargo/src/ops/preprocess.rs b/crates/nargo/src/ops/preprocess.rs index 3046f1e7dd0..f8d4eb5a825 100644 --- a/crates/nargo/src/ops/preprocess.rs +++ b/crates/nargo/src/ops/preprocess.rs @@ -1,31 +1,60 @@ -use acvm::acir::circuit::Circuit; -use acvm::{checksum_constraint_system, ProofSystemCompiler}; +use acvm::ProofSystemCompiler; +use iter_extended::vecmap; +use noirc_driver::{CompiledContract, CompiledProgram}; -use crate::NargoError; +use crate::{ + artifacts::{ + contract::{PreprocessedContract, PreprocessedContractFunction}, + program::PreprocessedProgram, + }, + NargoError, +}; -pub fn checksum_acir(circuit: &Circuit) -> [u8; 4] { - checksum_constraint_system(circuit).to_be_bytes() -} +// TODO: pull this from backend. +const BACKEND_IDENTIFIER: &str = "acvm-backend-barretenberg"; + +pub fn preprocess_program( + backend: &impl ProofSystemCompiler, + compiled_program: CompiledProgram, +) -> Result { + // TODO: currently `compiled_program`'s bytecode is already optimized for the backend. + // In future we'll need to apply those optimizations here. + let optimized_bytecode = compiled_program.circuit; + let (proving_key, verification_key) = backend.preprocess(&optimized_bytecode); -/// The result of preprocessing the ACIR bytecode. -/// The proving, verification key and circuit are backend specific. -/// -/// The circuit is backend specific because at the end of compilation -/// an optimization pass is applied which will transform the bytecode into -/// a format that the backend will accept; removing unsupported gates -/// is one example of this. -pub struct PreprocessedData { - pub proving_key: Vec, - pub verification_key: Vec, - pub program_checksum: [u8; 4], + Ok(PreprocessedProgram { + backend: String::from(BACKEND_IDENTIFIER), + abi: compiled_program.abi, + bytecode: optimized_bytecode, + proving_key, + verification_key, + }) } -pub fn preprocess_circuit( +pub fn preprocess_contract( backend: &impl ProofSystemCompiler, - circuit: &Circuit, -) -> Result { - let (proving_key, verification_key) = backend.preprocess(circuit); - let program_checksum = checksum_acir(circuit); + compiled_contract: CompiledContract, +) -> Result { + let preprocessed_contract_functions = vecmap(compiled_contract.functions, |func| { + // TODO: currently `func`'s bytecode is already optimized for the backend. + // In future we'll need to apply those optimizations here. + let optimized_bytecode = func.bytecode; + let (proving_key, verification_key) = backend.preprocess(&optimized_bytecode); + + PreprocessedContractFunction { + name: func.name, + function_type: func.function_type, + abi: func.abi, + + bytecode: optimized_bytecode, + proving_key, + verification_key, + } + }); - Ok(PreprocessedData { proving_key, verification_key, program_checksum }) + Ok(PreprocessedContract { + name: compiled_contract.name, + backend: String::from(BACKEND_IDENTIFIER), + functions: preprocessed_contract_functions, + }) } diff --git a/crates/nargo/src/ops/prove.rs b/crates/nargo/src/ops/prove.rs index fc7ddcd4cb6..376220a8a74 100644 --- a/crates/nargo/src/ops/prove.rs +++ b/crates/nargo/src/ops/prove.rs @@ -4,7 +4,7 @@ use noirc_abi::WitnessMap; use crate::NargoError; -pub fn prove( +pub fn prove_execution( backend: &impl ProofSystemCompiler, circuit: &Circuit, solved_witness: WitnessMap, diff --git a/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs b/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs index a9e233d3c10..3707214102e 100644 --- a/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -1,8 +1,8 @@ use super::fs::{create_named_dir, write_to_file}; use super::NargoConfig; use crate::{cli::compile_cmd::compile_circuit, constants::CONTRACT_DIR, errors::CliError}; -use acvm::SmartContract; use clap::Args; +use nargo::ops::{codegen_verifier, preprocess_program}; use noirc_driver::CompileOptions; /// Generates a Solidity verifier smart contract for the program @@ -13,12 +13,12 @@ pub(crate) struct CodegenVerifierCommand { } pub(crate) fn run(args: CodegenVerifierCommand, config: NargoConfig) -> Result<(), CliError> { - let compiled_program = compile_circuit(&config.program_dir, &args.compile_options)?; - - // TODO: replace with `nargo::ops::codegen_verifier` let backend = crate::backends::ConcreteBackend; - #[allow(deprecated)] - let smart_contract_string = backend.eth_contract_from_cs(compiled_program.circuit); + + let compiled_program = compile_circuit(&backend, &config.program_dir, &args.compile_options)?; + let preprocessed_program = preprocess_program(&backend, compiled_program)?; + + let smart_contract_string = codegen_verifier(&backend, &preprocessed_program.verification_key)?; let contract_dir = config.program_dir.join(CONTRACT_DIR); create_named_dir(&contract_dir, "contract"); diff --git a/crates/nargo_cli/src/cli/compile_cmd.rs b/crates/nargo_cli/src/cli/compile_cmd.rs index 7691c3ebc62..0c68de9d58e 100644 --- a/crates/nargo_cli/src/cli/compile_cmd.rs +++ b/crates/nargo_cli/src/cli/compile_cmd.rs @@ -1,15 +1,16 @@ use acvm::ProofSystemCompiler; -use nargo::ops::preprocess_circuit; -use noirc_driver::{CompileOptions, CompiledContract, CompiledProgram, Driver}; +use iter_extended::try_vecmap; +use noirc_driver::{CompileOptions, CompiledProgram, Driver}; use std::path::Path; use clap::Args; +use nargo::ops::{preprocess_contract, preprocess_program}; + use crate::resolver::DependencyResolutionError; use crate::{constants::TARGET_DIR, errors::CliError, resolver::Resolver}; use super::fs::program::{save_contract_to_file, save_program_to_file}; -use super::preprocess_cmd::save_preprocess_data; use super::NargoConfig; /// Compile the program and its secret execution trace into ACIR format @@ -29,99 +30,39 @@ pub(crate) struct CompileCommand { pub(crate) fn run(args: CompileCommand, config: NargoConfig) -> Result<(), CliError> { let circuit_dir = config.program_dir.join(TARGET_DIR); + let backend = crate::backends::ConcreteBackend; + // If contracts is set we're compiling every function in a 'contract' rather than just 'main'. if args.contracts { - let mut driver = setup_driver(&config.program_dir)?; - let mut compiled_contracts = driver + let mut driver = setup_driver(&backend, &config.program_dir)?; + let compiled_contracts = driver .compile_contracts(&args.compile_options) .map_err(|_| CliError::CompilationError)?; - save_and_preprocess_contract(&mut compiled_contracts, &args.circuit_name, &circuit_dir) + let preprocessed_contracts = + try_vecmap(compiled_contracts, |contract| preprocess_contract(&backend, contract))?; + for contract in preprocessed_contracts { + save_contract_to_file(&contract, &args.circuit_name, &circuit_dir); + } } else { - let program = compile_circuit(&config.program_dir, &args.compile_options)?; - save_and_preprocess_program(&program, &args.circuit_name, &circuit_dir) + let program = compile_circuit(&backend, &config.program_dir, &args.compile_options)?; + let preprocessed_program = preprocess_program(&backend, program)?; + save_program_to_file(&preprocessed_program, &args.circuit_name, circuit_dir); } -} - -fn setup_driver(program_dir: &Path) -> Result { - let backend = crate::backends::ConcreteBackend; - Resolver::resolve_root_manifest(program_dir, backend.np_language()) -} - -/// Save a program to disk along with proving and verification keys. -fn save_and_preprocess_program( - compiled_program: &CompiledProgram, - circuit_name: &str, - circuit_dir: &Path, -) -> Result<(), CliError> { - save_program_to_file(compiled_program, circuit_name, circuit_dir); - - let backend = crate::backends::ConcreteBackend; - let preprocessed_data = preprocess_circuit(&backend, &compiled_program.circuit)?; - save_preprocess_data(&preprocessed_data, circuit_name, circuit_dir)?; Ok(()) } -/// Save a contract to disk along with proving and verification keys. -/// - The contract ABI is saved as one file, which contains all of the -/// functions defined in the contract. -/// - The proving and verification keys are namespaced since the file -/// could contain multiple contracts with the same name. The verification key is saved inside -/// of the ABI. -fn save_and_preprocess_contract( - compiled_contracts: &mut [CompiledContract], - circuit_name: &str, - circuit_dir: &Path, -) -> Result<(), CliError> { - for compiled_contract in compiled_contracts { - // Preprocess all contract data - // We are patching the verification key in our contract functions - // so when we save it to disk, the ABI will have the verification key. - let backend = crate::backends::ConcreteBackend; - let mut contract_preprocess_data = Vec::new(); - for contract_function in &mut compiled_contract.functions { - let preprocessed_data = preprocess_circuit(&backend, &contract_function.bytecode)?; - contract_function.verification_key = Some(preprocessed_data.verification_key.clone()); - contract_preprocess_data.push(preprocessed_data); - } - - // Unique identifier for a contract. - let contract_id = format!("{}-{}", circuit_name, &compiled_contract.name); - - // Save contract ABI to file using the contract ID. - // This includes the verification keys for each contract function. - save_contract_to_file(compiled_contract, &contract_id, circuit_dir); - - // Save preprocessed data to disk - // - // TODO: This also includes the verification key, for now we save it in twice - // TODO, once in ABI and once to disk as we did before. - // TODO: A possible fix is to use optional fields in PreprocessedData - // TODO struct. Then make VK None before saving so it is not saved to disk - for (contract_function, preprocessed_data) in - compiled_contract.functions.iter().zip(contract_preprocess_data) - { - // Create a name which uniquely identifies this contract function - // over multiple contracts. - let uniquely_identifying_program_name = - format!("{}-{}", contract_id, contract_function.name); - // Each program in a contract is preprocessed - // Note: This can potentially be quite a long running process - - save_preprocess_data( - &preprocessed_data, - &uniquely_identifying_program_name, - circuit_dir, - )?; - } - } - - Ok(()) +fn setup_driver( + backend: &impl ProofSystemCompiler, + program_dir: &Path, +) -> Result { + Resolver::resolve_root_manifest(program_dir, backend.np_language()) } pub(crate) fn compile_circuit( + backend: &impl ProofSystemCompiler, program_dir: &Path, compile_options: &CompileOptions, ) -> Result { - let mut driver = setup_driver(program_dir)?; + let mut driver = setup_driver(backend, program_dir)?; driver.compile_main(compile_options).map_err(|_| CliError::CompilationError) } diff --git a/crates/nargo_cli/src/cli/execute_cmd.rs b/crates/nargo_cli/src/cli/execute_cmd.rs index e7ecdc543e3..9d1429bbda7 100644 --- a/crates/nargo_cli/src/cli/execute_cmd.rs +++ b/crates/nargo_cli/src/cli/execute_cmd.rs @@ -1,5 +1,6 @@ use std::path::Path; +use acvm::PartialWitnessGenerator; use clap::Args; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::{InputMap, WitnessMap}; @@ -45,13 +46,15 @@ fn execute_with_path( program_dir: &Path, compile_options: &CompileOptions, ) -> Result<(Option, WitnessMap), CliError> { - let compiled_program = compile_circuit(program_dir, compile_options)?; + let backend = crate::backends::ConcreteBackend; + + let compiled_program = compile_circuit(&backend, program_dir, compile_options)?; // Parse the initial witness values from Prover.toml let (inputs_map, _) = read_inputs_from_file(program_dir, PROVER_INPUT_FILE, Format::Toml, &compiled_program.abi)?; - let solved_witness = execute_program(&compiled_program, &inputs_map)?; + let solved_witness = execute_program(&backend, &compiled_program, &inputs_map)?; let public_abi = compiled_program.abi.public_abi(); let (_, return_value) = public_abi.decode(&solved_witness)?; @@ -60,14 +63,14 @@ fn execute_with_path( } pub(crate) fn execute_program( + backend: &impl PartialWitnessGenerator, compiled_program: &CompiledProgram, inputs_map: &InputMap, ) -> Result { let initial_witness = compiled_program.abi.encode(inputs_map, None)?; - let backend = crate::backends::ConcreteBackend; let solved_witness = - nargo::ops::execute_circuit(&backend, compiled_program.circuit.clone(), initial_witness)?; + nargo::ops::execute_circuit(backend, compiled_program.circuit.clone(), initial_witness)?; Ok(solved_witness) } diff --git a/crates/nargo_cli/src/cli/fs/keys.rs b/crates/nargo_cli/src/cli/fs/keys.rs deleted file mode 100644 index 32920e51406..00000000000 --- a/crates/nargo_cli/src/cli/fs/keys.rs +++ /dev/null @@ -1,88 +0,0 @@ -use super::{create_named_dir, load_hex_data, write_to_file}; -use crate::{ - constants::{ACIR_CHECKSUM, PK_EXT, VK_EXT}, - errors::CliError, -}; -use acvm::acir::circuit::Circuit; -use nargo::ops::checksum_acir; -use std::path::{Path, PathBuf}; - -pub(crate) fn save_key_to_dir>( - key: &[u8], - key_name: &str, - key_dir: P, - is_proving_key: bool, -) -> Result { - create_named_dir(key_dir.as_ref(), key_name); - - let extension = if is_proving_key { PK_EXT } else { VK_EXT }; - let key_path = key_dir.as_ref().join(key_name).with_extension(extension); - - write_to_file(hex::encode(key).as_bytes(), &key_path); - - Ok(key_path) -} - -pub(crate) fn fetch_pk_and_vk>( - circuit: &Circuit, - circuit_build_path: P, - prove_circuit: bool, - check_proof: bool, -) -> Result<(Vec, Vec), CliError> { - let acir_hash_path = circuit_build_path.as_ref().with_extension(ACIR_CHECKSUM); - - let expected_acir_checksum = load_hex_data(acir_hash_path.clone())?; - let new_acir_checksum = checksum_acir(circuit); - - if new_acir_checksum[..] != expected_acir_checksum { - return Err(CliError::MismatchedAcir(acir_hash_path)); - } - - // This flag exists to avoid an unnecessary read of the proving key during verification - // as this method is used by both `nargo prove` and `nargo verify` - let proving_key = if prove_circuit { - let proving_key_path = circuit_build_path.as_ref().with_extension(PK_EXT); - load_hex_data(proving_key_path)? - } else { - // We can return an empty Vec here as `prove_circuit` should only be false when running `nargo verify` - vec![] - }; - - let verification_key = if check_proof { - let verification_key_path = circuit_build_path.as_ref().with_extension(VK_EXT); - load_hex_data(verification_key_path)? - } else { - // We can return an empty Vec here as the verification key is used only is `check_proof` is true - vec![] - }; - - Ok((proving_key, verification_key)) -} - -#[cfg(test)] -mod tests { - use super::fetch_pk_and_vk; - use crate::cli::fs::{keys::save_key_to_dir, program::save_acir_checksum_to_dir}; - use acvm::acir::circuit::Circuit; - use nargo::ops::checksum_acir; - use tempdir::TempDir; - - #[test] - fn fetching_pk_and_vk_loads_expected_keys() { - let circuit = Circuit::default(); - let circuit_name = "my_circuit"; - let mut circuit_build_path = TempDir::new("temp_circuit_hash_dir").unwrap().into_path(); - - // These values are not meaningful, we just need distinct values. - let pk: Vec = vec![0]; - let vk: Vec = vec![1, 2]; - save_key_to_dir(&pk, circuit_name, &circuit_build_path, true).unwrap(); - save_key_to_dir(&vk, circuit_name, &circuit_build_path, false).unwrap(); - - save_acir_checksum_to_dir(checksum_acir(&circuit), circuit_name, &circuit_build_path); - circuit_build_path.push(circuit_name); - - let loaded_keys = fetch_pk_and_vk(&circuit, circuit_build_path, true, true).unwrap(); - assert_eq!(loaded_keys, (pk, vk)); - } -} diff --git a/crates/nargo_cli/src/cli/fs/mod.rs b/crates/nargo_cli/src/cli/fs/mod.rs index 0e7b643f2c7..d860f722fd1 100644 --- a/crates/nargo_cli/src/cli/fs/mod.rs +++ b/crates/nargo_cli/src/cli/fs/mod.rs @@ -7,7 +7,6 @@ use std::{ use crate::errors::CliError; pub(super) mod inputs; -pub(super) mod keys; pub(super) mod program; pub(super) mod proof; pub(super) mod witness; diff --git a/crates/nargo_cli/src/cli/fs/program.rs b/crates/nargo_cli/src/cli/fs/program.rs index b01455e2833..a3b5f4026bd 100644 --- a/crates/nargo_cli/src/cli/fs/program.rs +++ b/crates/nargo_cli/src/cli/fs/program.rs @@ -1,20 +1,20 @@ use std::path::{Path, PathBuf}; -use noirc_driver::{CompiledContract, CompiledProgram}; +use nargo::artifacts::{contract::PreprocessedContract, program::PreprocessedProgram}; -use crate::{constants::ACIR_CHECKSUM, errors::CliError}; +use crate::errors::CliError; use super::{create_named_dir, write_to_file}; pub(crate) fn save_program_to_file>( - compiled_program: &CompiledProgram, + compiled_program: &PreprocessedProgram, circuit_name: &str, circuit_dir: P, ) -> PathBuf { save_build_artifact_to_file(compiled_program, circuit_name, circuit_dir) } pub(crate) fn save_contract_to_file>( - compiled_contract: &CompiledContract, + compiled_contract: &PreprocessedContract, circuit_name: &str, circuit_dir: P, ) -> PathBuf { @@ -33,20 +33,9 @@ fn save_build_artifact_to_file, T: ?Sized + serde::Serialize>( circuit_path } -pub(crate) fn save_acir_checksum_to_dir>( - acir_checksum: [u8; 4], - hash_name: &str, - hash_dir: P, -) -> PathBuf { - let hash_path = hash_dir.as_ref().join(hash_name).with_extension(ACIR_CHECKSUM); - write_to_file(hex::encode(acir_checksum).as_bytes(), &hash_path); - - hash_path -} - pub(crate) fn read_program_from_file>( circuit_path: P, -) -> Result { +) -> Result { let file_path = circuit_path.as_ref().with_extension("json"); let input_string = std::fs::read(&file_path).map_err(|_| CliError::PathNotValid(file_path))?; diff --git a/crates/nargo_cli/src/cli/gates_cmd.rs b/crates/nargo_cli/src/cli/gates_cmd.rs index 71edd4101fe..a5093b4d775 100644 --- a/crates/nargo_cli/src/cli/gates_cmd.rs +++ b/crates/nargo_cli/src/cli/gates_cmd.rs @@ -23,10 +23,11 @@ fn count_gates_with_path>( program_dir: P, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let compiled_program = compile_circuit(program_dir.as_ref(), compile_options)?; - let num_opcodes = compiled_program.circuit.opcodes.len(); let backend = crate::backends::ConcreteBackend; + let compiled_program = compile_circuit(&backend, program_dir.as_ref(), compile_options)?; + let num_opcodes = compiled_program.circuit.opcodes.len(); + println!( "Total ACIR opcodes generated for language {:?}: {}", backend.np_language(), diff --git a/crates/nargo_cli/src/cli/mod.rs b/crates/nargo_cli/src/cli/mod.rs index 9e21bf472cd..e713bdd47fc 100644 --- a/crates/nargo_cli/src/cli/mod.rs +++ b/crates/nargo_cli/src/cli/mod.rs @@ -16,7 +16,6 @@ mod compile_cmd; mod execute_cmd; mod gates_cmd; mod new_cmd; -mod preprocess_cmd; mod print_acir_cmd; mod prove_cmd; mod test_cmd; @@ -56,7 +55,6 @@ enum NargoCommand { Execute(execute_cmd::ExecuteCommand), Prove(prove_cmd::ProveCommand), Verify(verify_cmd::VerifyCommand), - Preprocess(preprocess_cmd::PreprocessCommand), Test(test_cmd::TestCommand), Gates(gates_cmd::GatesCommand), PrintAcir(print_acir_cmd::PrintAcirCommand), @@ -77,7 +75,6 @@ pub fn start_cli() -> eyre::Result<()> { NargoCommand::Execute(args) => execute_cmd::run(args, config), NargoCommand::Prove(args) => prove_cmd::run(args, config), NargoCommand::Verify(args) => verify_cmd::run(args, config), - NargoCommand::Preprocess(args) => preprocess_cmd::run(args, config), NargoCommand::Test(args) => test_cmd::run(args, config), NargoCommand::Gates(args) => gates_cmd::run(args, config), NargoCommand::CodegenVerifier(args) => codegen_verifier_cmd::run(args, config), diff --git a/crates/nargo_cli/src/cli/preprocess_cmd.rs b/crates/nargo_cli/src/cli/preprocess_cmd.rs deleted file mode 100644 index 95a2ab849e9..00000000000 --- a/crates/nargo_cli/src/cli/preprocess_cmd.rs +++ /dev/null @@ -1,46 +0,0 @@ -use nargo::ops::{preprocess_circuit, PreprocessedData}; -use std::path::{Path, PathBuf}; - -use clap::Args; - -use crate::{constants::TARGET_DIR, errors::CliError}; - -use super::fs::{ - keys::save_key_to_dir, - program::{read_program_from_file, save_acir_checksum_to_dir}, -}; -use super::NargoConfig; - -/// Generate proving and verification keys for a circuit. -#[derive(Debug, Clone, Args)] -pub(crate) struct PreprocessCommand { - /// The name of the program build artifact. - artifact_name: String, -} - -pub(crate) fn run(args: PreprocessCommand, config: NargoConfig) -> Result<(), CliError> { - let circuit_dir = config.program_dir.join(TARGET_DIR); - - let program = read_program_from_file(circuit_dir.join(&args.artifact_name))?; - - let backend = crate::backends::ConcreteBackend; - let preprocess_data = preprocess_circuit(&backend, &program.circuit)?; - save_preprocess_data(&preprocess_data, &args.artifact_name, circuit_dir)?; - - Ok(()) -} - -pub(crate) fn save_preprocess_data>( - data: &PreprocessedData, - key_name: &str, - preprocess_dir: P, -) -> Result<(PathBuf, PathBuf), CliError> { - // Save a checksum of the circuit to compare against during proving and verification. - // If hash doesn't match then the circuit has been updated and keys are stale. - save_acir_checksum_to_dir(data.program_checksum, key_name, &preprocess_dir); - - let pk_path = save_key_to_dir(&data.proving_key, key_name, &preprocess_dir, true)?; - let vk_path = save_key_to_dir(&data.verification_key, key_name, preprocess_dir, false)?; - - Ok((pk_path, vk_path)) -} diff --git a/crates/nargo_cli/src/cli/print_acir_cmd.rs b/crates/nargo_cli/src/cli/print_acir_cmd.rs index dbc0fea86de..589cc490f40 100644 --- a/crates/nargo_cli/src/cli/print_acir_cmd.rs +++ b/crates/nargo_cli/src/cli/print_acir_cmd.rs @@ -22,7 +22,9 @@ fn print_acir_with_path>( program_dir: P, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let compiled_program = compile_circuit(program_dir.as_ref(), compile_options)?; + let backend = crate::backends::ConcreteBackend; + + let compiled_program = compile_circuit(&backend, program_dir.as_ref(), compile_options)?; println!("{}", compiled_program.circuit); Ok(()) diff --git a/crates/nargo_cli/src/cli/prove_cmd.rs b/crates/nargo_cli/src/cli/prove_cmd.rs index 720110a6758..fd60f004e2b 100644 --- a/crates/nargo_cli/src/cli/prove_cmd.rs +++ b/crates/nargo_cli/src/cli/prove_cmd.rs @@ -1,17 +1,20 @@ use std::path::{Path, PathBuf}; use clap::Args; -use nargo::ops::{preprocess_circuit, PreprocessedData}; +use nargo::artifacts::program::PreprocessedProgram; +use nargo::ops::{preprocess_program, prove_execution}; use noirc_abi::input_parser::Format; -use noirc_driver::CompileOptions; +use noirc_driver::{CompileOptions, CompiledProgram}; -use super::fs::{ - inputs::{read_inputs_from_file, write_inputs_to_file}, - keys::fetch_pk_and_vk, - program::read_program_from_file, - proof::save_proof_to_dir, -}; use super::NargoConfig; +use super::{ + compile_cmd::compile_circuit, + fs::{ + inputs::{read_inputs_from_file, write_inputs_to_file}, + program::read_program_from_file, + proof::save_proof_to_dir, + }, +}; use crate::{ cli::{execute_cmd::execute_program, verify_cmd::verify_proof}, constants::{PROOFS_DIR, PROVER_INPUT_FILE, TARGET_DIR, VERIFIER_INPUT_FILE}, @@ -62,25 +65,21 @@ pub(crate) fn prove_with_path>( check_proof: bool, compile_options: &CompileOptions, ) -> Result, CliError> { - let (compiled_program, proving_key, verification_key) = match circuit_build_path { - Some(circuit_build_path) => { - let compiled_program = read_program_from_file(&circuit_build_path)?; + let backend = crate::backends::ConcreteBackend; - let (proving_key, verification_key) = - fetch_pk_and_vk(&compiled_program.circuit, circuit_build_path, true, true)?; - (compiled_program, proving_key, verification_key) - } + let preprocessed_program = match circuit_build_path { + Some(circuit_build_path) => read_program_from_file(circuit_build_path)?, None => { let compiled_program = - super::compile_cmd::compile_circuit(program_dir.as_ref(), compile_options)?; - - let backend = crate::backends::ConcreteBackend; - let PreprocessedData { proving_key, verification_key, .. } = - preprocess_circuit(&backend, &compiled_program.circuit)?; - (compiled_program, proving_key, verification_key) + compile_circuit(&backend, program_dir.as_ref(), compile_options)?; + preprocess_program(&backend, compiled_program)? } }; + let PreprocessedProgram { abi, bytecode, proving_key, verification_key, .. } = + preprocessed_program; + let compiled_program = CompiledProgram { abi, circuit: bytecode }; + // Parse the initial witness values from Prover.toml let (inputs_map, _) = read_inputs_from_file( &program_dir, @@ -89,7 +88,7 @@ pub(crate) fn prove_with_path>( &compiled_program.abi, )?; - let solved_witness = execute_program(&compiled_program, &inputs_map)?; + let solved_witness = execute_program(&backend, &compiled_program, &inputs_map)?; // Write public inputs into Verifier.toml let public_abi = compiled_program.abi.clone().public_abi(); @@ -103,13 +102,12 @@ pub(crate) fn prove_with_path>( Format::Toml, )?; - let backend = crate::backends::ConcreteBackend; - let proof = - nargo::ops::prove(&backend, &compiled_program.circuit, solved_witness, &proving_key)?; + let proof = prove_execution(&backend, &compiled_program.circuit, solved_witness, &proving_key)?; if check_proof { let no_proof_name = "".into(); verify_proof( + &backend, &compiled_program, public_inputs, return_value, diff --git a/crates/nargo_cli/src/cli/verify_cmd.rs b/crates/nargo_cli/src/cli/verify_cmd.rs index 03de9a7e33e..cf2e4859091 100644 --- a/crates/nargo_cli/src/cli/verify_cmd.rs +++ b/crates/nargo_cli/src/cli/verify_cmd.rs @@ -1,14 +1,14 @@ -use super::fs::{ - inputs::read_inputs_from_file, keys::fetch_pk_and_vk, load_hex_data, - program::read_program_from_file, -}; -use super::{compile_cmd::compile_circuit, InputMap, NargoConfig}; +use super::compile_cmd::compile_circuit; +use super::fs::{inputs::read_inputs_from_file, load_hex_data, program::read_program_from_file}; +use super::{InputMap, NargoConfig}; use crate::{ constants::{PROOFS_DIR, PROOF_EXT, TARGET_DIR, VERIFIER_INPUT_FILE}, errors::CliError, }; +use acvm::ProofSystemCompiler; use clap::Args; -use nargo::ops::{preprocess_circuit, PreprocessedData}; +use nargo::artifacts::program::PreprocessedProgram; +use nargo::ops::preprocess_program; use noirc_abi::input_parser::{Format, InputValue}; use noirc_driver::{CompileOptions, CompiledProgram}; use std::path::{Path, PathBuf}; @@ -43,30 +43,27 @@ fn verify_with_path>( circuit_build_path: Option

, compile_options: CompileOptions, ) -> Result<(), CliError> { - let (compiled_program, verification_key) = match circuit_build_path { - Some(circuit_build_path) => { - let compiled_program = read_program_from_file(&circuit_build_path)?; + let backend = crate::backends::ConcreteBackend; - let (_, verification_key) = - fetch_pk_and_vk(&compiled_program.circuit, circuit_build_path, false, true)?; - (compiled_program, verification_key) - } + let preprocessed_program = match circuit_build_path { + Some(circuit_build_path) => read_program_from_file(circuit_build_path)?, None => { - let compiled_program = compile_circuit(program_dir.as_ref(), &compile_options)?; - - let backend = crate::backends::ConcreteBackend; - let PreprocessedData { verification_key, .. } = - preprocess_circuit(&backend, &compiled_program.circuit)?; - (compiled_program, verification_key) + let compiled_program = + compile_circuit(&backend, program_dir.as_ref(), &compile_options)?; + preprocess_program(&backend, compiled_program)? } }; + let PreprocessedProgram { abi, bytecode, verification_key, .. } = preprocessed_program; + let compiled_program = CompiledProgram { abi, circuit: bytecode }; + // Load public inputs (if any) from `VERIFIER_INPUT_FILE`. let public_abi = compiled_program.abi.clone().public_abi(); let (public_inputs_map, return_value) = read_inputs_from_file(program_dir, VERIFIER_INPUT_FILE, Format::Toml, &public_abi)?; verify_proof( + &backend, &compiled_program, public_inputs_map, return_value, @@ -77,6 +74,7 @@ fn verify_with_path>( } pub(crate) fn verify_proof( + backend: &impl ProofSystemCompiler, compiled_program: &CompiledProgram, public_inputs_map: InputMap, return_value: Option, @@ -87,9 +85,8 @@ pub(crate) fn verify_proof( let public_abi = compiled_program.abi.clone().public_abi(); let public_inputs = public_abi.encode(&public_inputs_map, return_value)?; - let backend = crate::backends::ConcreteBackend; let valid_proof = nargo::ops::verify_proof( - &backend, + backend, &compiled_program.circuit, proof, public_inputs, diff --git a/crates/nargo_cli/src/constants.rs b/crates/nargo_cli/src/constants.rs index ba7ba3e7675..d3e6b7f28e1 100644 --- a/crates/nargo_cli/src/constants.rs +++ b/crates/nargo_cli/src/constants.rs @@ -21,9 +21,3 @@ pub(crate) const PKG_FILE: &str = "Nargo.toml"; pub(crate) const PROOF_EXT: &str = "proof"; /// The extension for files containing proof witnesses. pub(crate) const WITNESS_EXT: &str = "tr"; -/// The extension for proving keys. -pub(crate) const PK_EXT: &str = "pk"; -/// The extension for verification keys. -pub(crate) const VK_EXT: &str = "vk"; -/// The extension for ACIR hash files. -pub(crate) const ACIR_CHECKSUM: &str = "json.checksum"; diff --git a/crates/nargo_cli/src/errors.rs b/crates/nargo_cli/src/errors.rs index 1561a5033fa..f6537b550ea 100644 --- a/crates/nargo_cli/src/errors.rs +++ b/crates/nargo_cli/src/errors.rs @@ -20,8 +20,7 @@ pub(crate) enum CliError { " Error: cannot find {0}.toml file.\n Expected location: {1:?} \n Please generate this file at the expected location." )] MissingTomlFile(String, PathBuf), - #[error("Error: the circuit you are trying to prove differs from the build artifact at {}\nYou must call `nargo compile` to generate the correct proving and verification keys for this circuit", .0.display())] - MismatchedAcir(PathBuf), + #[error("Failed to verify proof {}", .0.display())] InvalidProof(PathBuf), diff --git a/crates/nargo_cli/src/preprocess.rs b/crates/nargo_cli/src/preprocess.rs new file mode 100644 index 00000000000..249b6647e30 --- /dev/null +++ b/crates/nargo_cli/src/preprocess.rs @@ -0,0 +1,57 @@ +use acvm::ProofSystemCompiler; +use iter_extended::vecmap; +use noirc_driver::{CompiledContract, CompiledProgram}; + +// TODO: migrate to `nargo_cli` + +use crate::artifacts::{ + contract::{PreprocessedContract, PreprocessedContractFunction}, + program::PreprocessedProgram, +}; + +// TODO: pull this from backend. +const BACKEND_IDENTIFIER: &str = "acvm-backend-barretenberg"; + +pub(crate) fn preprocess_program(compiled_program: CompiledProgram) -> PreprocessedProgram { + let backend = crate::backends::ConcreteBackend; + + // TODO: currently `compiled_program`'s bytecode is already optimized for the backend. + // In future we'll need to apply those optimizations here. + let optimized_bytecode = compiled_program.circuit; + let (proving_key, verification_key) = backend.preprocess(&optimized_bytecode); + + PreprocessedProgram { + backend: String::from(BACKEND_IDENTIFIER), + abi: compiled_program.abi, + bytecode: optimized_bytecode, + proving_key, + verification_key, + } +} + +pub(crate) fn preprocess_contract(compiled_contract: CompiledContract) -> PreprocessedContract { + let backend = crate::backends::ConcreteBackend; + + let preprocessed_contract_functions = vecmap(compiled_contract.functions, |func| { + // TODO: currently `func`'s bytecode is already optimized for the backend. + // In future we'll need to apply those optimizations here. + let optimized_bytecode = func.bytecode; + let (proving_key, verification_key) = backend.preprocess(&optimized_bytecode); + + PreprocessedContractFunction { + name: func.name, + function_type: func.function_type, + abi: func.abi, + + bytecode: optimized_bytecode, + proving_key, + verification_key, + } + }); + + PreprocessedContract { + name: compiled_contract.name, + backend: String::from(BACKEND_IDENTIFIER), + functions: preprocessed_contract_functions, + } +} diff --git a/crates/noirc_driver/src/contract.rs b/crates/noirc_driver/src/contract.rs index ed9bd8d4dcd..a5600c3d215 100644 --- a/crates/noirc_driver/src/contract.rs +++ b/crates/noirc_driver/src/contract.rs @@ -6,8 +6,7 @@ use serde::{Deserialize, Serialize}; /// Unlike the similar enum in noirc_frontend, 'open' and 'unconstrained' /// are mutually exclusive here. In the case a function is both, 'unconstrained' /// takes precedence. -#[derive(serde::Serialize, serde::Deserialize, Debug, Copy, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] +#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)] pub enum ContractFunctionType { /// This function will be executed in a private /// context. @@ -20,8 +19,6 @@ pub enum ContractFunctionType { Unconstrained, } -#[derive(serde::Serialize, serde::Deserialize)] -#[serde(rename_all = "camelCase")] pub struct CompiledContract { /// The name of the contract. pub name: String, @@ -36,27 +33,19 @@ pub struct CompiledContract { /// A contract function unlike a regular Noir program /// however can have additional properties. /// One of these being a function type. -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] +#[derive(Debug)] pub struct ContractFunction { pub name: String, pub function_type: ContractFunctionType, - #[serde(flatten)] pub abi: Abi, - #[serde( - serialize_with = "crate::program::serialize_circuit", - deserialize_with = "crate::program::deserialize_circuit" - )] pub bytecode: Circuit, - - pub verification_key: Option>, } impl ContractFunctionType { - pub fn new(kind: noirc_frontend::ContractFunctionType, is_unconstrained: bool) -> Self { + pub(super) fn new(kind: noirc_frontend::ContractFunctionType, is_unconstrained: bool) -> Self { match (kind, is_unconstrained) { (_, true) => Self::Unconstrained, (noirc_frontend::ContractFunctionType::Secret, false) => Self::Secret, diff --git a/crates/noirc_driver/src/lib.rs b/crates/noirc_driver/src/lib.rs index 4f1ab7fd482..c6d0a08e4d8 100644 --- a/crates/noirc_driver/src/lib.rs +++ b/crates/noirc_driver/src/lib.rs @@ -5,9 +5,9 @@ use acvm::Language; use clap::Args; -use contract::{ContractFunction, ContractFunctionType}; +use contract::ContractFunction; use fm::FileType; -use iter_extended::{try_vecmap, vecmap}; +use iter_extended::try_vecmap; use noirc_abi::FunctionSignature; use noirc_errors::{reporter, ReportedError}; use noirc_evaluator::create_circuit; @@ -22,7 +22,7 @@ use std::path::{Path, PathBuf}; mod contract; mod program; -pub use contract::CompiledContract; +pub use contract::{CompiledContract, ContractFunctionType}; pub use program::CompiledProgram; pub struct Driver { @@ -204,30 +204,24 @@ impl Driver { options: &CompileOptions, ) -> Result { let functions = try_vecmap(&contract.functions, |function_id| { - let function_name = self.function_name(*function_id).to_owned(); + let name = self.function_name(*function_id).to_owned(); let function = self.compile_no_check(options, *function_id)?; let func_meta = self.context.def_interner.function_meta(function_id); let func_type = func_meta .contract_function_type .expect("Expected contract function to have a contract visibility"); - let func_type = ContractFunctionType::new(func_type, func_meta.is_unconstrained); + let function_type = ContractFunctionType::new(func_type, func_meta.is_unconstrained); - Ok((function_name, func_type, function)) - })?; - - let converted_functions = - vecmap(functions, |(name, function_type, function)| ContractFunction { + Ok(ContractFunction { name, function_type, abi: function.abi, bytecode: function.circuit, - // Since we have not called the proving system yet - // we do not have a verification key - verification_key: None, - }); + }) + })?; - Ok(CompiledContract { name: contract.name, functions: converted_functions }) + Ok(CompiledContract { name: contract.name, functions }) } /// Returns the FuncId of the 'main' function. From 5d6e4d0b13404bd0681c3fe508e1abad21522411 Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Thu, 13 Apr 2023 18:14:32 +0100 Subject: [PATCH 08/63] fix: Numeric generics with impls error (#1148) numeric generic Error reporting change for type rules --- .../tests/test_data/numeric_generics/src/main.nr | 13 ++++++++++++- crates/noirc_frontend/src/hir/type_check/expr.rs | 11 ++++++----- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/crates/nargo_cli/tests/test_data/numeric_generics/src/main.nr b/crates/nargo_cli/tests/test_data/numeric_generics/src/main.nr index 7d73eabe2be..ebe50c4d0d9 100644 --- a/crates/nargo_cli/tests/test_data/numeric_generics/src/main.nr +++ b/crates/nargo_cli/tests/test_data/numeric_generics/src/main.nr @@ -19,10 +19,21 @@ fn id(x: [Field; I]) -> [Field; I] { } struct MyStruct { - data: [Field; S] + data: [Field; S], +} + +impl MyStruct { + fn insert(mut self: Self, index: comptime Field, elem: Field) -> Self { + // Regression test for numeric generics on impls + constrain index as u64 < S as u64; + + self.data[index] = elem; + self + } } fn foo(mut s: MyStruct<2+1>) -> MyStruct<10/2-2> { s.data[0] = s.data[0] + 1; s } + diff --git a/crates/noirc_frontend/src/hir/type_check/expr.rs b/crates/noirc_frontend/src/hir/type_check/expr.rs index f03420704cc..e57dcdb0ffc 100644 --- a/crates/noirc_frontend/src/hir/type_check/expr.rs +++ b/crates/noirc_frontend/src/hir/type_check/expr.rs @@ -512,6 +512,9 @@ impl<'interner> TypeChecker<'interner> { use crate::BinaryOpKind::{Equal, NotEqual}; use Type::*; match (lhs_type, rhs_type) { + // Avoid reporting errors multiple times + (Error, _) | (_,Error) => Ok(Bool(CompTime::Yes(None))), + // Matches on PolymorphicInteger and TypeVariable must be first to follow any type // bindings. (PolymorphicInteger(comptime, int), other) @@ -572,9 +575,6 @@ impl<'interner> TypeChecker<'interner> { Ok(Bool(comptime)) } - // Avoid reporting errors multiple times - (Error, _) | (_,Error) => Ok(Bool(CompTime::Yes(None))), - // Special-case == and != for arrays (Array(x_size, x_type), Array(y_size, y_type)) if matches!(op.kind, Equal | NotEqual) => { x_type.unify(y_type, op.location.span, &mut self.errors, || { @@ -728,6 +728,9 @@ impl<'interner> TypeChecker<'interner> { use Type::*; match (lhs_type, rhs_type) { + // An error type on either side will always return an error + (Error, _) | (_,Error) => Ok(Error), + // Matches on PolymorphicInteger and TypeVariable must be first so that we follow any type // bindings. (PolymorphicInteger(comptime, int), other) @@ -793,8 +796,6 @@ impl<'interner> TypeChecker<'interner> { (Struct(..), _) | (_, Struct(..)) => Err(make_error("Structs cannot be used in an infix operation".to_string())), (Tuple(_), _) | (_, Tuple(_)) => Err(make_error("Tuples cannot be used in an infix operation".to_string())), - // An error type on either side will always return an error - (Error, _) | (_,Error) => Ok(Error), (Unit, _) | (_,Unit) => Ok(Unit), // The result of two Fields is always a witness From f9605f9121a6cd598446169d4fb86266d44a6a57 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 13 Apr 2023 18:17:52 +0100 Subject: [PATCH 09/63] chore: clean up orphaned `preprocess.rs` file (#1147) --- crates/nargo_cli/src/preprocess.rs | 57 ------------------------------ 1 file changed, 57 deletions(-) delete mode 100644 crates/nargo_cli/src/preprocess.rs diff --git a/crates/nargo_cli/src/preprocess.rs b/crates/nargo_cli/src/preprocess.rs deleted file mode 100644 index 249b6647e30..00000000000 --- a/crates/nargo_cli/src/preprocess.rs +++ /dev/null @@ -1,57 +0,0 @@ -use acvm::ProofSystemCompiler; -use iter_extended::vecmap; -use noirc_driver::{CompiledContract, CompiledProgram}; - -// TODO: migrate to `nargo_cli` - -use crate::artifacts::{ - contract::{PreprocessedContract, PreprocessedContractFunction}, - program::PreprocessedProgram, -}; - -// TODO: pull this from backend. -const BACKEND_IDENTIFIER: &str = "acvm-backend-barretenberg"; - -pub(crate) fn preprocess_program(compiled_program: CompiledProgram) -> PreprocessedProgram { - let backend = crate::backends::ConcreteBackend; - - // TODO: currently `compiled_program`'s bytecode is already optimized for the backend. - // In future we'll need to apply those optimizations here. - let optimized_bytecode = compiled_program.circuit; - let (proving_key, verification_key) = backend.preprocess(&optimized_bytecode); - - PreprocessedProgram { - backend: String::from(BACKEND_IDENTIFIER), - abi: compiled_program.abi, - bytecode: optimized_bytecode, - proving_key, - verification_key, - } -} - -pub(crate) fn preprocess_contract(compiled_contract: CompiledContract) -> PreprocessedContract { - let backend = crate::backends::ConcreteBackend; - - let preprocessed_contract_functions = vecmap(compiled_contract.functions, |func| { - // TODO: currently `func`'s bytecode is already optimized for the backend. - // In future we'll need to apply those optimizations here. - let optimized_bytecode = func.bytecode; - let (proving_key, verification_key) = backend.preprocess(&optimized_bytecode); - - PreprocessedContractFunction { - name: func.name, - function_type: func.function_type, - abi: func.abi, - - bytecode: optimized_bytecode, - proving_key, - verification_key, - } - }); - - PreprocessedContract { - name: compiled_contract.name, - backend: String::from(BACKEND_IDENTIFIER), - functions: preprocessed_contract_functions, - } -} From 2bb78ccb73421f811991ea5e6b4b41a650485385 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Fri, 14 Apr 2023 00:45:19 +0100 Subject: [PATCH 10/63] chore: update documentation of `show_output` flag (#1150) --- crates/noirc_driver/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/noirc_driver/src/lib.rs b/crates/noirc_driver/src/lib.rs index c6d0a08e4d8..2fcef5bc578 100644 --- a/crates/noirc_driver/src/lib.rs +++ b/crates/noirc_driver/src/lib.rs @@ -40,7 +40,7 @@ pub struct CompileOptions { #[arg(short, long)] pub allow_warnings: bool, - /// Display output of `println` statements during tests + /// Display output of `println` statements #[arg(long)] pub show_output: bool, } From bc8ed9aa0c207bc93ac18a210c7a7828b354e860 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Fri, 14 Apr 2023 16:17:07 +0100 Subject: [PATCH 11/63] chore!: remove outdated arkworks backend (#1151) chore: remove outdated arkworks backend --- Cargo.lock | 363 +++---------------------------- crates/nargo_cli/Cargo.toml | 2 - crates/nargo_cli/src/backends.rs | 11 - 3 files changed, 34 insertions(+), 342 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 274eac0bce2..1b059e32a88 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,88 +2,40 @@ # It is not intended for manual editing. version = 3 -[[package]] -name = "acir" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aad7977c11d19ae0dd983b50dc5fd9eb96c002072f75643e45daa6dc0c23fba5" -dependencies = [ - "acir_field 0.3.1", - "flate2", - "rmp-serde", - "serde", -] - [[package]] name = "acir" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f764b474e341efc3e8ee3d5054840b2fd2ac002f764fc2f4cd3569ce76badd1" dependencies = [ - "acir_field 0.8.0", + "acir_field", "flate2", "rmp-serde", "serde", ] -[[package]] -name = "acir_field" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "687506e635efa7ce15d6b93ceae14dec1519ed2e54c24298fc8c40e86edbce24" -dependencies = [ - "ark-bls12-381", - "ark-bn254 0.3.0", - "ark-ff 0.3.0", - "blake2", - "cfg-if 1.0.0", - "hex", - "num-bigint", - "num-traits", - "serde", -] - [[package]] name = "acir_field" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cbca7df5192c7823d4108d2c34cadcfd30dca94506b9e9861f85f0ea747ddedc" dependencies = [ - "ark-bn254 0.4.0", - "ark-ff 0.4.1", + "ark-bn254", + "ark-ff", "cfg-if 1.0.0", "hex", "num-bigint", "serde", ] -[[package]] -name = "acvm" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99007127e84602134226eefc2245c59b7fe55853bfeba572714b04c5b3fefdea" -dependencies = [ - "acir 0.3.1", - "acir_field 0.3.1", - "acvm_stdlib 0.3.1", - "blake2", - "hex", - "indexmap", - "k256", - "num-bigint", - "num-traits", - "sha2 0.9.9", - "thiserror", -] - [[package]] name = "acvm" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92d5df175b6923bf9bb05ba973b017b0fa1356066be8f0ebadd3d2dbbc48bd5b" dependencies = [ - "acir 0.8.0", - "acvm_stdlib 0.8.0", + "acir", + "acvm_stdlib", "blake2", "crc32fast", "indexmap", @@ -94,23 +46,13 @@ dependencies = [ "thiserror", ] -[[package]] -name = "acvm_stdlib" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4f5ef183f4a10b4a257d25c3a37fd090b9e8fbb7dff0902329fb6606b524114" -dependencies = [ - "acir 0.3.1", - "acir_field 0.3.1", -] - [[package]] name = "acvm_stdlib" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa2bbc18fe9732ca3d93a2bf8f1a1ad99a003b565e7bc1ad5c67f69867449e8f" dependencies = [ - "acir 0.8.0", + "acir", ] [[package]] @@ -181,51 +123,15 @@ dependencies = [ "generational-arena", ] -[[package]] -name = "ark-bls12-381" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65be532f9dd1e98ad0150b037276cde464c6f371059e6dd02c0222395761f6aa" -dependencies = [ - "ark-ec 0.3.0", - "ark-ff 0.3.0", - "ark-std 0.3.0", -] - -[[package]] -name = "ark-bn254" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea691771ebbb28aea556c044e2e5c5227398d840cee0c34d4d20fa8eb2689e8c" -dependencies = [ - "ark-ec 0.3.0", - "ark-ff 0.3.0", - "ark-std 0.3.0", -] - [[package]] name = "ark-bn254" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a22f4561524cd949590d78d7d4c5df8f592430d221f7f3c9497bbafd8972120f" dependencies = [ - "ark-ec 0.4.1", - "ark-ff 0.4.1", - "ark-std 0.4.0", -] - -[[package]] -name = "ark-ec" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dea978406c4b1ca13c2db2373b05cc55429c3575b8b21f1b9ee859aa5b03dd42" -dependencies = [ - "ark-ff 0.3.0", - "ark-serialize 0.3.0", - "ark-std 0.3.0", - "derivative", - "num-traits", - "zeroize", + "ark-ec", + "ark-ff", + "ark-std", ] [[package]] @@ -234,10 +140,10 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c60370a92f8e1a5f053cad73a862e1b99bc642333cd676fa11c0c39f80f4ac2" dependencies = [ - "ark-ff 0.4.1", - "ark-poly 0.4.1", - "ark-serialize 0.4.1", - "ark-std 0.4.0", + "ark-ff", + "ark-poly", + "ark-serialize", + "ark-std", "derivative", "hashbrown 0.13.2", "itertools", @@ -245,54 +151,26 @@ dependencies = [ "zeroize", ] -[[package]] -name = "ark-ff" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b3235cc41ee7a12aaaf2c575a2ad7b46713a8a50bda2fc3b003a04845c05dd6" -dependencies = [ - "ark-ff-asm 0.3.0", - "ark-ff-macros 0.3.0", - "ark-serialize 0.3.0", - "ark-std 0.3.0", - "derivative", - "num-bigint", - "num-traits", - "paste", - "rustc_version 0.3.3", - "zeroize", -] - [[package]] name = "ark-ff" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c2d42532524bee1da5a4f6f733eb4907301baa480829557adcff5dfaeee1d9a" dependencies = [ - "ark-ff-asm 0.4.1", - "ark-ff-macros 0.4.1", - "ark-serialize 0.4.1", - "ark-std 0.4.0", + "ark-ff-asm", + "ark-ff-macros", + "ark-serialize", + "ark-std", "derivative", "digest 0.10.6", "itertools", "num-bigint", "num-traits", "paste", - "rustc_version 0.4.0", + "rustc_version", "zeroize", ] -[[package]] -name = "ark-ff-asm" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db02d390bf6643fb404d3d22d31aee1c4bc4459600aef9113833d17e786c6e44" -dependencies = [ - "quote", - "syn", -] - [[package]] name = "ark-ff-asm" version = "0.4.1" @@ -303,18 +181,6 @@ dependencies = [ "syn", ] -[[package]] -name = "ark-ff-macros" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20" -dependencies = [ - "num-bigint", - "num-traits", - "quote", - "syn", -] - [[package]] name = "ark-ff-macros" version = "0.4.1" @@ -328,110 +194,31 @@ dependencies = [ "syn", ] -[[package]] -name = "ark-marlin" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "caa8510faa8e64f0a6841ee4b58efe2d56f7a80d86fa0ce9891bbb3aa20166d9" -dependencies = [ - "ark-ff 0.3.0", - "ark-poly 0.3.0", - "ark-poly-commit", - "ark-relations", - "ark-serialize 0.3.0", - "ark-std 0.3.0", - "derivative", - "digest 0.9.0", - "rand_chacha", -] - -[[package]] -name = "ark-poly" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b0f78f47537c2f15706db7e98fe64cc1711dbf9def81218194e17239e53e5aa" -dependencies = [ - "ark-ff 0.3.0", - "ark-serialize 0.3.0", - "ark-std 0.3.0", - "derivative", - "hashbrown 0.11.2", -] - [[package]] name = "ark-poly" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f6ec811462cabe265cfe1b102fcfe3df79d7d2929c2425673648ee9abfd0272" dependencies = [ - "ark-ff 0.4.1", - "ark-serialize 0.4.1", - "ark-std 0.4.0", + "ark-ff", + "ark-serialize", + "ark-std", "derivative", "hashbrown 0.13.2", ] -[[package]] -name = "ark-poly-commit" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a71ddfa72bad1446cab7bbecb6018dbbdc9abcbc3a0065483ae5186ad2a64dcd" -dependencies = [ - "ark-ec 0.3.0", - "ark-ff 0.3.0", - "ark-poly 0.3.0", - "ark-serialize 0.3.0", - "ark-std 0.3.0", - "derivative", - "digest 0.9.0", - "tracing", -] - -[[package]] -name = "ark-relations" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cba4c1c99792a6834bd97f7fd76578ec2cd58d2afc5139a17e1d1bec65b38f6" -dependencies = [ - "ark-ff 0.3.0", - "ark-std 0.3.0", - "tracing", -] - -[[package]] -name = "ark-serialize" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6c2b318ee6e10f8c2853e73a83adc0ccb88995aa978d8a3408d492ab2ee671" -dependencies = [ - "ark-serialize-derive 0.3.0", - "ark-std 0.3.0", - "digest 0.9.0", -] - [[package]] name = "ark-serialize" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7e735959bc173ea4baf13327b19c22d452b8e9e8e8f7b7fc34e6bf0e316c33e" dependencies = [ - "ark-serialize-derive 0.4.1", - "ark-std 0.4.0", + "ark-serialize-derive", + "ark-std", "digest 0.10.6", "num-bigint", ] -[[package]] -name = "ark-serialize-derive" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8dd4e5f0bf8285d5ed538d27fab7411f3e297908fd93c62195de8bee3f199e82" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "ark-serialize-derive" version = "0.4.1" @@ -443,16 +230,6 @@ dependencies = [ "syn", ] -[[package]] -name = "ark-std" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" -dependencies = [ - "num-traits", - "rand 0.8.5", -] - [[package]] name = "ark-std" version = "0.4.0" @@ -463,25 +240,6 @@ dependencies = [ "rand 0.8.5", ] -[[package]] -name = "arkworks_backend" -version = "0.1.0" -source = "git+https://github.com/noir-lang/arkworks_backend?rev=2f3f0db182004d5c01008c741bf519fe6798e24d#2f3f0db182004d5c01008c741bf519fe6798e24d" -dependencies = [ - "acvm 0.3.1", - "ark-bls12-381", - "ark-bn254 0.3.0", - "ark-ff 0.3.0", - "ark-marlin", - "ark-poly 0.3.0", - "ark-poly-commit", - "ark-relations", - "ark-serialize 0.3.0", - "ark-std 0.3.0", - "blake2", - "cfg-if 1.0.0", -] - [[package]] name = "assert_cmd" version = "2.0.10" @@ -904,7 +662,7 @@ name = "common" version = "0.1.0" source = "git+https://github.com/noir-lang/aztec_backend?rev=26178359a2251e885f15f0a4d1a686afda04aec9#26178359a2251e885f15f0a4d1a686afda04aec9" dependencies = [ - "acvm 0.8.0", + "acvm", "blake2", "dirs 3.0.2", "downloader", @@ -2159,15 +1917,6 @@ dependencies = [ "libc", ] -[[package]] -name = "marlin_arkworks_backend" -version = "0.1.0" -source = "git+https://github.com/noir-lang/marlin_arkworks_backend?rev=144378edad821bfaa52bf2cacca8ecc87514a4fc#144378edad821bfaa52bf2cacca8ecc87514a4fc" -dependencies = [ - "acvm 0.3.1", - "arkworks_backend", -] - [[package]] name = "memchr" version = "2.5.0" @@ -2244,11 +1993,11 @@ checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" name = "nargo" version = "0.3.2" dependencies = [ - "acvm 0.8.0", + "acvm", "iter-extended", "noirc_abi", "noirc_driver", - "rustc_version 0.4.0", + "rustc_version", "serde", "thiserror", "toml", @@ -2258,7 +2007,7 @@ dependencies = [ name = "nargo_cli" version = "0.3.2" dependencies = [ - "acvm 0.8.0", + "acvm", "assert_cmd", "assert_fs", "barretenberg_static_lib", @@ -2271,13 +2020,12 @@ dependencies = [ "dirs 4.0.0", "hex", "iter-extended", - "marlin_arkworks_backend", "nargo", "noirc_abi", "noirc_driver", "noirc_frontend", "predicates 2.1.5", - "rustc_version 0.4.0", + "rustc_version", "serde", "serde_json", "tempdir", @@ -2291,7 +2039,7 @@ dependencies = [ name = "noir_wasm" version = "0.3.2" dependencies = [ - "acvm 0.8.0", + "acvm", "build-data", "console_error_panic_hook", "gloo-utils", @@ -2307,7 +2055,7 @@ dependencies = [ name = "noirc_abi" version = "0.3.2" dependencies = [ - "acvm 0.8.0", + "acvm", "iter-extended", "serde", "serde_json", @@ -2319,7 +2067,7 @@ dependencies = [ name = "noirc_driver" version = "0.3.2" dependencies = [ - "acvm 0.8.0", + "acvm", "clap 4.1.8", "fm", "iter-extended", @@ -2345,7 +2093,7 @@ dependencies = [ name = "noirc_evaluator" version = "0.3.2" dependencies = [ - "acvm 0.8.0", + "acvm", "arena", "iter-extended", "noirc_abi", @@ -2361,7 +2109,7 @@ dependencies = [ name = "noirc_frontend" version = "0.3.2" dependencies = [ - "acvm 0.8.0", + "acvm", "arena", "chumsky", "fm", @@ -2535,16 +2283,6 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" -[[package]] -name = "pest" -version = "2.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cbd939b234e95d72bc393d51788aec68aeeb5d51e748ca08ff3aad58cb722f7" -dependencies = [ - "thiserror", - "ucd-trie", -] - [[package]] name = "pin-project-lite" version = "0.2.9" @@ -3017,22 +2755,13 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" -[[package]] -name = "rustc_version" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" -dependencies = [ - "semver 0.11.0", -] - [[package]] name = "rustc_version" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.17", + "semver", ] [[package]] @@ -3172,30 +2901,12 @@ version = "4.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" -[[package]] -name = "semver" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" -dependencies = [ - "semver-parser", -] - [[package]] name = "semver" version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" -[[package]] -name = "semver-parser" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" -dependencies = [ - "pest", -] - [[package]] name = "serde" version = "1.0.155" @@ -3696,12 +3407,6 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" -[[package]] -name = "ucd-trie" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" - [[package]] name = "unicode-bidi" version = "0.3.11" diff --git a/crates/nargo_cli/Cargo.toml b/crates/nargo_cli/Cargo.toml index 8d96eba8102..57de9aaa264 100644 --- a/crates/nargo_cli/Cargo.toml +++ b/crates/nargo_cli/Cargo.toml @@ -40,7 +40,6 @@ color-eyre = "0.6.2" # Backends aztec_backend = { optional = true, package = "barretenberg_static_lib", git = "https://github.com/noir-lang/aztec_backend", rev = "26178359a2251e885f15f0a4d1a686afda04aec9" } aztec_wasm_backend = { optional = true, package = "barretenberg_wasm", git = "https://github.com/noir-lang/aztec_backend", rev = "26178359a2251e885f15f0a4d1a686afda04aec9" } -marlin_arkworks_backend = { optional = true, git = "https://github.com/noir-lang/marlin_arkworks_backend", rev = "144378edad821bfaa52bf2cacca8ecc87514a4fc" } [dev-dependencies] assert_cmd = "2.0.8" @@ -51,6 +50,5 @@ predicates = "2.1.5" default = ["plonk_bn254"] # The plonk backend can only use bn254, so we do not specify the field plonk_bn254 = ["aztec_backend"] -marlin = ["marlin_arkworks_backend/bls12_381"] plonk_bn254_wasm = ["aztec_wasm_backend"] diff --git a/crates/nargo_cli/src/backends.rs b/crates/nargo_cli/src/backends.rs index 24b4c79e3cc..e1113279f80 100644 --- a/crates/nargo_cli/src/backends.rs +++ b/crates/nargo_cli/src/backends.rs @@ -3,9 +3,6 @@ cfg_if::cfg_if! { pub(crate) use aztec_backend::Plonk as ConcreteBackend; } else if #[cfg(feature = "plonk_bn254_wasm")] { pub(crate) use aztec_wasm_backend::Plonk as ConcreteBackend; - } else if #[cfg(feature = "marlin")] { - // R1CS_MARLIN_ARKWORKS - pub(crate) use marlin_arkworks_backend::Marlin as ConcreteBackend; } else { compile_error!("please specify a backend to compile with"); } @@ -16,11 +13,3 @@ cfg_if::cfg_if! { compile_error!( "feature \"plonk_bn254\" and feature \"plonk_bn254_wasm\" cannot be enabled at the same time" ); -#[cfg(all(feature = "plonk_bn254_wasm", feature = "marlin"))] -compile_error!( - "feature \"plonk_bn254_wasm\" and feature \"marlin\" cannot be enabled at the same time" -); -#[cfg(all(feature = "plonk_bn254", feature = "marlin"))] -compile_error!( - "feature \"plonk_bn254\" and feature \"marlin\" cannot be enabled at the same time" -); From 106a0bff24dc55ba674edb572ffdf0b56dc9e67e Mon Sep 17 00:00:00 2001 From: Blaine Bublitz Date: Fri, 14 Apr 2023 17:22:38 +0100 Subject: [PATCH 12/63] chore(ci): mark github releases as pre-release (#1152) --- .github/workflows/release.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ee67f48605d..75278dcee5c 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -22,6 +22,7 @@ jobs: package-name: noir bump-minor-pre-major: true bump-patch-for-minor-pre-major: true + prerelease: true # Marks GitHub Releases for 0.x.x versions as "Pre-release" pull-request-title-pattern: "chore(noir): Release ${version}" extra-files: | Cargo.toml From fba6418754500a3256d43f8ada791443db788aee Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 17 Apr 2023 13:23:05 +0100 Subject: [PATCH 13/63] chore: update author email to point to `team@noir-lang.org` (#1155) --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 29ac590f6ff..346469655c3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,7 +19,7 @@ default-members = ["crates/nargo_cli"] # x-release-please-start-version version = "0.3.2" # x-release-please-end -authors = ["The Noir Team "] +authors = ["The Noir Team "] edition = "2021" rust-version = "1.66" From 1227b2c913153bebfc416990f833687abb466ec7 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 17 Apr 2023 14:30:02 +0100 Subject: [PATCH 14/63] fix(nargo): give contract artifacts unique names to prevent overwrites (#1158) --- crates/nargo_cli/src/cli/compile_cmd.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/nargo_cli/src/cli/compile_cmd.rs b/crates/nargo_cli/src/cli/compile_cmd.rs index 0c68de9d58e..50c21486385 100644 --- a/crates/nargo_cli/src/cli/compile_cmd.rs +++ b/crates/nargo_cli/src/cli/compile_cmd.rs @@ -41,7 +41,11 @@ pub(crate) fn run(args: CompileCommand, config: NargoConfig) -> Result<(), CliEr let preprocessed_contracts = try_vecmap(compiled_contracts, |contract| preprocess_contract(&backend, contract))?; for contract in preprocessed_contracts { - save_contract_to_file(&contract, &args.circuit_name, &circuit_dir); + save_contract_to_file( + &contract, + &format!("{}-{}", &args.circuit_name, contract.name), + &circuit_dir, + ); } } else { let program = compile_circuit(&backend, &config.program_dir, &args.compile_options)?; From a4b196a248fdef9e80bf8c0551d83b1cf23c4a39 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Mon, 17 Apr 2023 17:02:20 +0100 Subject: [PATCH 15/63] chore(noir): Release 0.4.0 (#1005) * chore(noir): Release 0.4.0 * chore: Update lockfile --- CHANGELOG.md | 65 ++++++++++++++++++++++++++++++++++++++++++++++++++++ Cargo.lock | 22 +++++++++--------- Cargo.toml | 2 +- 3 files changed, 77 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ebf7aca5a8e..5845d82a685 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,70 @@ # Changelog +## [0.4.0](https://github.com/noir-lang/noir/compare/v0.3.2...v0.4.0) (2023-04-17) + + +### âš  BREAKING CHANGES + +* remove outdated arkworks backend ([#1151](https://github.com/noir-lang/noir/issues/1151)) +* **nargo:** define preprocessed artifacts for programs/contracts ([#1126](https://github.com/noir-lang/noir/issues/1126)) +* **nargo:** use faster hash function for checking preprocessed keys ([#1094](https://github.com/noir-lang/noir/issues/1094)) +* Fix returning of structs in ACIR ([#1058](https://github.com/noir-lang/noir/issues/1058)) +* upgrade to acvm 0.8.0 ([#1047](https://github.com/noir-lang/noir/issues/1047)) + +### Features + +* Add new `Vec` type to frontend ([#1103](https://github.com/noir-lang/noir/issues/1103)) ([e125157](https://github.com/noir-lang/noir/commit/e12515778913164a0a9673c3f0eb98b3c5b73a7b)) +* Add storage slots to globals ([#1019](https://github.com/noir-lang/noir/issues/1019)) ([4190e11](https://github.com/noir-lang/noir/commit/4190e11732ae0757ac84d6dcdab78ade62a7cfe8)) +* Allow arbitrary noir functions to be unconstrained ([#1044](https://github.com/noir-lang/noir/issues/1044)) ([ebc8a36](https://github.com/noir-lang/noir/commit/ebc8a36ebdf8b723baf9b5941ec2fa136ad0d2a1)) +* Allow non-comptime field indices in unconstrained functions ([#1053](https://github.com/noir-lang/noir/issues/1053)) ([bc52612](https://github.com/noir-lang/noir/commit/bc5261230310fca5c84a27258935761d9836c912)) +* Allow numeric generics to be referenced and add `map` ([#997](https://github.com/noir-lang/noir/issues/997)) ([34eab32](https://github.com/noir-lang/noir/commit/34eab32465ea195d53de29560e363303a36c73f6)) +* Allow secret functions to use public parameters ([#1051](https://github.com/noir-lang/noir/issues/1051)) ([12c0668](https://github.com/noir-lang/noir/commit/12c0668421addb9c0718d60efdcbfe79311fb718)) +* Allow structs and arrays as globals ([#1054](https://github.com/noir-lang/noir/issues/1054)) ([dadbd3c](https://github.com/noir-lang/noir/commit/dadbd3c033bd5e279e84f99bb579f91aff8b8213)) +* Changes serialization for contract functions ([#1056](https://github.com/noir-lang/noir/issues/1056)) ([41e0020](https://github.com/noir-lang/noir/commit/41e00207b0eeae4d0285c617acac72c780cb0900)) +* **compiler:** Allows specify entry_point source ([#1026](https://github.com/noir-lang/noir/issues/1026)) ([9789f89](https://github.com/noir-lang/noir/commit/9789f890fe9bfc014ba7a6b044c268c5dd40a658)) +* dynamic array indexing ([#886](https://github.com/noir-lang/noir/issues/886)) ([aba1ed2](https://github.com/noir-lang/noir/commit/aba1ed229472f2cbb8677b08d54af629382514f3)) +* Implement 'open' and 'unconstrained' keywords ([#1037](https://github.com/noir-lang/noir/issues/1037)) ([5a66dec](https://github.com/noir-lang/noir/commit/5a66dece860044dd23e287dae47070086a51018b)) +* Implement `std::unsafe::zeroed` ([#1048](https://github.com/noir-lang/noir/issues/1048)) ([9a43f85](https://github.com/noir-lang/noir/commit/9a43f85a055f23e5746e6836fe11990f4c87bbdc)) +* Implement arrays of structs ([#1068](https://github.com/noir-lang/noir/issues/1068)) ([f607150](https://github.com/noir-lang/noir/commit/f607150f34d5570ff2d86dddba2074f2c8c29b7e)) +* import core logic in cli from `nargo` crate ([#1142](https://github.com/noir-lang/noir/issues/1142)) ([753a272](https://github.com/noir-lang/noir/commit/753a272cbdf32858e47d2fa4bd6c236521bbb2cf)) +* make `noirc_driver` aware of contracts ([#999](https://github.com/noir-lang/noir/issues/999)) ([c21afca](https://github.com/noir-lang/noir/commit/c21afcaba738ad438cef6bd100a9eb25e7557bf3)) +* Merge all contracts into one ABI ([#1033](https://github.com/noir-lang/noir/issues/1033)) ([473428c](https://github.com/noir-lang/noir/commit/473428cfc3109f4c03e6cff7b76f995daa6ef4fa)) +* **nargo:** add `InvalidPackageError` and `DependencyResolutionError` error types. ([#1007](https://github.com/noir-lang/noir/issues/1007)) ([1e6761b](https://github.com/noir-lang/noir/commit/1e6761b490a38afe29a9eca085b1a806d8fdf59e)) +* **nargo:** add skeleton of composite types in template input tomls ([#1104](https://github.com/noir-lang/noir/issues/1104)) ([1fb2756](https://github.com/noir-lang/noir/commit/1fb27566ca85fb3c5912308b99edb7a379a8b792)) +* **nargo:** add test to example noir program ([#1039](https://github.com/noir-lang/noir/issues/1039)) ([f994c4f](https://github.com/noir-lang/noir/commit/f994c4f4813ba496f6a958a952691b650bf052e6)) +* **nargo:** allow running `nargo` from any directory in package ([#1010](https://github.com/noir-lang/noir/issues/1010)) ([761fdb5](https://github.com/noir-lang/noir/commit/761fdb5ab96a2259883eb5b42157df466b05175d)) +* **nargo:** define preprocessed artifacts for programs/contracts ([#1126](https://github.com/noir-lang/noir/issues/1126)) ([7528f59](https://github.com/noir-lang/noir/commit/7528f59d10dba5a56b9fa7cf979fdc93cacacb9b)) +* **nargo:** print-acir command ([#1031](https://github.com/noir-lang/noir/issues/1031)) ([408d9c0](https://github.com/noir-lang/noir/commit/408d9c04e3a2fb10a54faee97d3e788f75a07cda)) +* **nargo:** remove misleading quotes in generated `Prover.toml` ([#1087](https://github.com/noir-lang/noir/issues/1087)) ([57c817f](https://github.com/noir-lang/noir/commit/57c817fafe494c3d6a9cd56c7e266dad754b5c5b)) +* **nargo:** split `nargo` into core and cli packages ([#1065](https://github.com/noir-lang/noir/issues/1065)) ([7c388f9](https://github.com/noir-lang/noir/commit/7c388f9103a96f4b2073def1bb1af7d18744f274)) +* read-only array ([#899](https://github.com/noir-lang/noir/issues/899)) ([2e38ab0](https://github.com/noir-lang/noir/commit/2e38ab08c12b732331bb4dde18815dbb5c9e1398)) +* **stdlib:** Implement Poseidon hash ([#768](https://github.com/noir-lang/noir/issues/768)) ([779ab66](https://github.com/noir-lang/noir/commit/779ab66413ad33a71ed9ca180ca1e5bd8ba3f285)) + + +### Bug Fixes + +* Avoid asserting in typechecker if struct field count is not correct ([#1036](https://github.com/noir-lang/noir/issues/1036)) ([b3d1d7f](https://github.com/noir-lang/noir/commit/b3d1d7fc6f30f30e6ec0effc547713a8de7a5486)), closes [#1028](https://github.com/noir-lang/noir/issues/1028) +* compiler identifying imported functions as being part of a contract ([#1112](https://github.com/noir-lang/noir/issues/1112)) ([61c38d2](https://github.com/noir-lang/noir/commit/61c38d2fd946697296905f267c49d18609835fcb)) +* correct name in CLI output from `nargo_cli` to `nargo` ([74d7369](https://github.com/noir-lang/noir/commit/74d73696bdd042878cdfb06c8a781d575efc97fb)) +* correct test for mutually exclusive feature flags ([#1085](https://github.com/noir-lang/noir/issues/1085)) ([eb5c917](https://github.com/noir-lang/noir/commit/eb5c917e4e5550229fd1fd174b9fd7e507058d25)) +* crash when typechecking fields that don't exist ([#1070](https://github.com/noir-lang/noir/issues/1070)) ([a67e8c5](https://github.com/noir-lang/noir/commit/a67e8c5f3867c3704c74e0b53e74e8ac18dced0a)) +* Fix returning of structs in ACIR ([#1058](https://github.com/noir-lang/noir/issues/1058)) ([91bd471](https://github.com/noir-lang/noir/commit/91bd47190402f0fe567dbfb6fcfa17b97c129905)) +* **nargo:** correct logic for rejecting transitive local dependencies ([#1015](https://github.com/noir-lang/noir/issues/1015)) ([e2b8b65](https://github.com/noir-lang/noir/commit/e2b8b65834de1d6eeb87459f657257791cc9a289)) +* **nargo:** correct name in CLI output from `nargo_cli` to `nargo` ([#1095](https://github.com/noir-lang/noir/issues/1095)) ([74d7369](https://github.com/noir-lang/noir/commit/74d73696bdd042878cdfb06c8a781d575efc97fb)) +* **nargo:** give contract artifacts unique names to prevent overwrites ([#1158](https://github.com/noir-lang/noir/issues/1158)) ([1227b2c](https://github.com/noir-lang/noir/commit/1227b2c913153bebfc416990f833687abb466ec7)) +* **nargo:** only search for `Nargo.toml` in commands which act on a Nargo package ([#1029](https://github.com/noir-lang/noir/issues/1029)) ([6e642b9](https://github.com/noir-lang/noir/commit/6e642b9cf2f54d5e593fd5ded9246a6c4a61b5f8)) +* **nargo:** resolve local dependencies relative to root of depending package ([38bf571](https://github.com/noir-lang/noir/commit/38bf5719d1757d39c89ecee0a6653a5d9da29c21)) +* Numeric generics with impls error ([#1148](https://github.com/noir-lang/noir/issues/1148)) ([5d6e4d0](https://github.com/noir-lang/noir/commit/5d6e4d0b13404bd0681c3fe508e1abad21522411)) +* rationalise witness for constant values ([#984](https://github.com/noir-lang/noir/issues/984)) ([ab32365](https://github.com/noir-lang/noir/commit/ab32365793b640a0a1e7c359c36f739d981a2487)) +* Resolve globals in types ([#1043](https://github.com/noir-lang/noir/issues/1043)) ([2badf14](https://github.com/noir-lang/noir/commit/2badf1412e4322ced1db74c540708534d452d019)) + + +### Miscellaneous Chores + +* **nargo:** use faster hash function for checking preprocessed keys ([#1094](https://github.com/noir-lang/noir/issues/1094)) ([a69758c](https://github.com/noir-lang/noir/commit/a69758c0dff98bb23539df9c13366ef5b23e6b0f)) +* remove outdated arkworks backend ([#1151](https://github.com/noir-lang/noir/issues/1151)) ([bc8ed9a](https://github.com/noir-lang/noir/commit/bc8ed9aa0c207bc93ac18a210c7a7828b354e860)) +* upgrade to acvm 0.8.0 ([#1047](https://github.com/noir-lang/noir/issues/1047)) ([63f958b](https://github.com/noir-lang/noir/commit/63f958b0d4122a9974d450d4d6439434440a320c)) + ## [0.3.2](https://github.com/noir-lang/noir/compare/v0.3.1...v0.3.2) (2023-03-16) diff --git a/Cargo.lock b/Cargo.lock index 1b059e32a88..5d4fafa61cf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -118,7 +118,7 @@ checksum = "23ea9e81bd02e310c216d080f6223c179012256e5151c41db88d12c88a1684d2" [[package]] name = "arena" -version = "0.3.2" +version = "0.4.0" dependencies = [ "generational-arena", ] @@ -1242,7 +1242,7 @@ dependencies = [ [[package]] name = "fm" -version = "0.3.2" +version = "0.4.0" dependencies = [ "cfg-if 1.0.0", "codespan-reporting 0.9.5", @@ -1781,7 +1781,7 @@ dependencies = [ [[package]] name = "iter-extended" -version = "0.3.2" +version = "0.4.0" [[package]] name = "itertools" @@ -1991,7 +1991,7 @@ checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" [[package]] name = "nargo" -version = "0.3.2" +version = "0.4.0" dependencies = [ "acvm", "iter-extended", @@ -2005,7 +2005,7 @@ dependencies = [ [[package]] name = "nargo_cli" -version = "0.3.2" +version = "0.4.0" dependencies = [ "acvm", "assert_cmd", @@ -2037,7 +2037,7 @@ dependencies = [ [[package]] name = "noir_wasm" -version = "0.3.2" +version = "0.4.0" dependencies = [ "acvm", "build-data", @@ -2053,7 +2053,7 @@ dependencies = [ [[package]] name = "noirc_abi" -version = "0.3.2" +version = "0.4.0" dependencies = [ "acvm", "iter-extended", @@ -2065,7 +2065,7 @@ dependencies = [ [[package]] name = "noirc_driver" -version = "0.3.2" +version = "0.4.0" dependencies = [ "acvm", "clap 4.1.8", @@ -2080,7 +2080,7 @@ dependencies = [ [[package]] name = "noirc_errors" -version = "0.3.2" +version = "0.4.0" dependencies = [ "chumsky", "codespan", @@ -2091,7 +2091,7 @@ dependencies = [ [[package]] name = "noirc_evaluator" -version = "0.3.2" +version = "0.4.0" dependencies = [ "acvm", "arena", @@ -2107,7 +2107,7 @@ dependencies = [ [[package]] name = "noirc_frontend" -version = "0.3.2" +version = "0.4.0" dependencies = [ "acvm", "arena", diff --git a/Cargo.toml b/Cargo.toml index 346469655c3..a2f825928ac 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,7 +17,7 @@ default-members = ["crates/nargo_cli"] [workspace.package] # x-release-please-start-version -version = "0.3.2" +version = "0.4.0" # x-release-please-end authors = ["The Noir Team "] edition = "2021" From 809b85f751bd0e27ce8c4b38354bc051471d8522 Mon Sep 17 00:00:00 2001 From: guipublic <47281315+guipublic@users.noreply.github.com> Date: Mon, 17 Apr 2023 18:58:02 +0200 Subject: [PATCH 16/63] fix: Add checks for nop (#1160) * Add checks for nop * Add test case --- .../tests/test_data/regression/Nargo.toml | 5 ++ .../tests/test_data/regression/Prover.toml | 2 + .../tests/test_data/regression/src/main.nr | 56 +++++++++++++++++++ .../src/ssa/acir_gen/internal_var_cache.rs | 7 ++- crates/noirc_evaluator/src/ssa/block.rs | 13 ++++- crates/noirc_evaluator/src/ssa/context.rs | 10 +++- .../noirc_evaluator/src/ssa/optimizations.rs | 5 ++ 7 files changed, 93 insertions(+), 5 deletions(-) create mode 100644 crates/nargo_cli/tests/test_data/regression/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/regression/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/regression/src/main.nr diff --git a/crates/nargo_cli/tests/test_data/regression/Nargo.toml b/crates/nargo_cli/tests/test_data/regression/Nargo.toml new file mode 100644 index 00000000000..e0b467ce5da --- /dev/null +++ b/crates/nargo_cli/tests/test_data/regression/Nargo.toml @@ -0,0 +1,5 @@ +[package] +authors = [""] +compiler_version = "0.1" + +[dependencies] \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/regression/Prover.toml b/crates/nargo_cli/tests/test_data/regression/Prover.toml new file mode 100644 index 00000000000..2875190982f --- /dev/null +++ b/crates/nargo_cli/tests/test_data/regression/Prover.toml @@ -0,0 +1,2 @@ +x = [0x3f, 0x1c, 0xb8, 0x99, 0xab] +z = 3 diff --git a/crates/nargo_cli/tests/test_data/regression/src/main.nr b/crates/nargo_cli/tests/test_data/regression/src/main.nr new file mode 100644 index 00000000000..7ecc57a4803 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/regression/src/main.nr @@ -0,0 +1,56 @@ +global NIBBLE_LENGTH: comptime Field = 16; + +fn compact_decode(input: [u8; N], length: Field) -> ([u4; NIBBLE_LENGTH], Field) +{ + constrain 2*input.len() as u64 <= NIBBLE_LENGTH as u64; + constrain length as u64 <= input.len() as u64; + + let mut nibble = [0 as u4; NIBBLE_LENGTH]; + + let first_nibble = (input[0] >> 4) as u4; + let parity = first_nibble as u1; + + if parity == 1 + { + nibble[0] = (input[0] & 0x0f) as u4; + for i in 1..input.len() + { + if i as u64 < length as u64 + { + let x = input[i]; + nibble[2*i - 1] = (x >> 4) as u4; + nibble[2*i] = (x & 0x0f) as u4; + } + } + } + else + { + for i in 0..2 + { + if (i as u64) < length as u64 - 1 + { + let x = input[i + 1]; + nibble[2*i] = (x >> 4) as u4; + nibble[2*i + 1] = (x & 0x0f) as u4; + } + } + } + + let out = (nibble, 2*length + (parity as Field) - 2); + + out +} + +fn main(x: [u8; 5], z: Field) +{ + //Issue 1144 + let (nib, len) = compact_decode(x,z); + constrain len == 5; + constrain [nib[0], nib[1], nib[2], nib[3], nib[4]] == [15, 1, 12, 11, 8]; +} + +#[test] +fn test_1144() +{ + main([0x3f, 0x1c, 0xb8, 0x99, 0xab], 3); +} \ No newline at end of file diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/internal_var_cache.rs b/crates/noirc_evaluator/src/ssa/acir_gen/internal_var_cache.rs index 70e3fd41e56..fc9f9ae5af7 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/internal_var_cache.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/internal_var_cache.rs @@ -125,8 +125,11 @@ impl InternalVarCache { let w = evaluator.create_intermediate_variable(Expression::from(value)); for &id in ids { let mut cached_var = self.get_or_compute_internal_var_unwrap(id, evaluator, ctx); - assert!(cached_var.cached_witness().is_none()); - cached_var.set_witness(w); + if let Some(cached_witness) = cached_var.cached_witness() { + assert_eq!(*cached_witness, w); + } else { + cached_var.set_witness(w); + } self.update(cached_var); } w diff --git a/crates/noirc_evaluator/src/ssa/block.rs b/crates/noirc_evaluator/src/ssa/block.rs index d0f8b8f0859..26340a4a725 100644 --- a/crates/noirc_evaluator/src/ssa/block.rs +++ b/crates/noirc_evaluator/src/ssa/block.rs @@ -528,7 +528,18 @@ pub(super) fn merge_path( removed_blocks.push_back(next); if short_circuit.is_dummy() { - instructions.extend(&block.instructions); + if instructions.is_empty() { + instructions.extend(&block.instructions); + } else { + let nonop = block.instructions.iter().filter(|&i| { + if let Some(ins) = ctx.try_get_instruction(*i) { + ins.operation.opcode() != Opcode::Nop + } else { + true + } + }); + instructions.extend(nonop); + } } if short_circuit.is_dummy() && block.is_short_circuit(ctx, assumption) { diff --git a/crates/noirc_evaluator/src/ssa/context.rs b/crates/noirc_evaluator/src/ssa/context.rs index c7d4dba9799..2efdd8ff304 100644 --- a/crates/noirc_evaluator/src/ssa/context.rs +++ b/crates/noirc_evaluator/src/ssa/context.rs @@ -19,6 +19,8 @@ use noirc_frontend::monomorphization::ast::{Definition, Expression, FuncId, Lite use num_bigint::BigUint; use std::collections::{HashMap, HashSet}; +use super::node::Opcode; + // This is a 'master' class for generating the SSA IR from the AST // It contains all the data; the node objects representing the source code in the nodes arena // and The CFG in the blocks arena @@ -726,7 +728,6 @@ impl SsaContext { inline::inline_tree(self, self.first_block, &decision)?; block::merge_path(self, self.first_block, BlockId::dummy(), None)?; - //The CFG is now fully flattened, so we keep only the first block. let mut to_remove = Vec::new(); for b in &self.blocks { @@ -741,7 +742,6 @@ impl SsaContext { self[first_block].dominated.clear(); optimizations::cse(self, first_block, true)?; - //Truncation integer::overflow_strategy(self)?; self.log(enable_logging, "\noverflow:", ""); @@ -1072,6 +1072,12 @@ impl SsaContext { if a == NodeId::dummy() || b == NodeId::dummy() { return NodeId::dummy(); } + if let Some(ins) = self.try_get_instruction(a) { + if ins.operation.opcode() == Opcode::Nop { + assert_eq!(self.try_get_instruction(b).unwrap().operation.opcode(), Opcode::Nop); + return NodeId::dummy(); + } + } let exit_block = self.current_block; let block1 = self[exit_block].predecessor[0]; diff --git a/crates/noirc_evaluator/src/ssa/optimizations.rs b/crates/noirc_evaluator/src/ssa/optimizations.rs index 2e9370961fc..8dcc7cfecf0 100644 --- a/crates/noirc_evaluator/src/ssa/optimizations.rs +++ b/crates/noirc_evaluator/src/ssa/optimizations.rs @@ -499,6 +499,11 @@ fn cse_block_with_anchor( new_list.push(*ins_id); } } + Operation::Nop => { + if new_list.is_empty() { + new_list.push(*ins_id); + } + } _ => { //TODO: checks we do not need to propagate res arguments new_list.push(*ins_id); From 7628ed6aa0e430881bd5628c84342058fa0e2f78 Mon Sep 17 00:00:00 2001 From: guipublic <47281315+guipublic@users.noreply.github.com> Date: Tue, 18 Apr 2023 19:27:57 +0200 Subject: [PATCH 17/63] fix(ssa): set correct predecessors of IF join (#1171) fix predecessors of IF join --- .../tests/test_data/regression/src/main.nr | 45 +++++++++++++++++++ crates/noirc_evaluator/src/ssa/ssa_gen.rs | 1 + 2 files changed, 46 insertions(+) diff --git a/crates/nargo_cli/tests/test_data/regression/src/main.nr b/crates/nargo_cli/tests/test_data/regression/src/main.nr index 7ecc57a4803..2fcf41c8d7f 100644 --- a/crates/nargo_cli/tests/test_data/regression/src/main.nr +++ b/crates/nargo_cli/tests/test_data/regression/src/main.nr @@ -41,16 +41,61 @@ fn compact_decode(input: [u8; N], length: Field) -> ([u4; NIBBLE_LENGTH], Fie out } +fn enc(value: [u8; N], value_length: Field) -> ([u8; 32], Field) +{ + constrain value.len() as u8 >= value_length as u8; + let mut out_value = [0; 32]; + if value_length == 0 + { + let out = (out_value, value_length); + out + } + else { if value_length as u8 < 31 + { + out_value[0] = 0x80 + value_length as u8; + + for i in 1..value.len() + { + out_value[i] = value[i-1]; + } + + let out = (out_value, value_length + 1); + + out + } + else + { + let out = (out_value, 32); + out + } + } +} + fn main(x: [u8; 5], z: Field) { //Issue 1144 let (nib, len) = compact_decode(x,z); constrain len == 5; constrain [nib[0], nib[1], nib[2], nib[3], nib[4]] == [15, 1, 12, 11, 8]; + + } #[test] +// Issue 1144 fn test_1144() { main([0x3f, 0x1c, 0xb8, 0x99, 0xab], 3); +} + +// Issue 1169 +fn enc_test() +{ + let val1 = [0xb8,0x8f,0x61,0xe6,0xfb,0xda,0x83,0xfb,0xff,0xfa,0xbe,0x36,0x41,0x12,0x13,0x74,0x80,0x39,0x80,0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00]; + let val1_length = 20; + + let enc_val1 = enc(val1,val1_length); + + constrain enc_val1.0 == [0x94,0xb8,0x8f,0x61,0xe6,0xfb,0xda,0x83,0xfb,0xff,0xfa,0xbe,0x36,0x41,0x12,0x13,0x74,0x80,0x39,0x80,0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00]; + constrain enc_val1.1 == 21; } \ No newline at end of file diff --git a/crates/noirc_evaluator/src/ssa/ssa_gen.rs b/crates/noirc_evaluator/src/ssa/ssa_gen.rs index 8205dc8e10c..ae35855b5fa 100644 --- a/crates/noirc_evaluator/src/ssa/ssa_gen.rs +++ b/crates/noirc_evaluator/src/ssa/ssa_gen.rs @@ -790,6 +790,7 @@ impl IrGenerator { self.context.get_current_block_mut().left = Some(exit_block); //Exit block plumbing + let block2 = self.context.current_block; self.context.current_block = exit_block; self.context.get_current_block_mut().predecessor.push(block2); ssa_form::seal_block(&mut self.context, exit_block, entry_block); From 73df4653556a7d1c74d184e27ec5a8ca3be47af9 Mon Sep 17 00:00:00 2001 From: jfecher Date: Tue, 18 Apr 2023 15:42:20 -0500 Subject: [PATCH 18/63] fix: allow comptime or non comptime fields in unconstrained for loops (#1172) Fix unconstrained for loops to allow comptime or non comptime fields --- crates/noirc_frontend/src/hir/type_check/expr.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/noirc_frontend/src/hir/type_check/expr.rs b/crates/noirc_frontend/src/hir/type_check/expr.rs index e57dcdb0ffc..aba44e36d2c 100644 --- a/crates/noirc_frontend/src/hir/type_check/expr.rs +++ b/crates/noirc_frontend/src/hir/type_check/expr.rs @@ -155,7 +155,7 @@ impl<'interner> TypeChecker<'interner> { let mut unify_loop_range = |actual_type, span| { let expected_type = if self.is_unconstrained() { - Type::field(Some(span)) + Type::FieldElement(CompTime::new(self.interner)) } else { Type::comp_time(Some(span)) }; From b799c8aa4491f4f17e248a50a154386803b6d712 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 19 Apr 2023 18:04:33 +0100 Subject: [PATCH 19/63] fix: maintain ordering of return value witnesses when constructing ABI (#1177) fix: maintain ordering of return value witnesses --- crates/noirc_evaluator/src/lib.rs | 10 ++++++---- .../src/ssa/acir_gen/operations/return.rs | 2 +- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/crates/noirc_evaluator/src/lib.rs b/crates/noirc_evaluator/src/lib.rs index 4c1b05381f5..722ecc12397 100644 --- a/crates/noirc_evaluator/src/lib.rs +++ b/crates/noirc_evaluator/src/lib.rs @@ -43,7 +43,10 @@ pub struct Evaluator { // Witnesses below `num_witnesses_abi_len` and not included in this set // correspond to private parameters and must not be made public. public_parameters: BTreeSet, - return_values: BTreeSet, + // The witness indices for return values are not guaranteed to be contiguous + // and increasing as for `public_parameters`. We then use a `Vec` rather + // than a `BTreeSet` to preserve this order for the ABI. + return_values: Vec, opcodes: Vec, } @@ -78,7 +81,7 @@ pub fn create_circuit( current_witness_index, opcodes, public_parameters: PublicInputs(public_parameters), - return_values: PublicInputs(return_values.clone()), + return_values: PublicInputs(return_values.iter().copied().collect()), }, np_language, is_opcode_supported, @@ -86,8 +89,7 @@ pub fn create_circuit( .map_err(|_| RuntimeErrorKind::Spanless(String::from("produced an acvm compile error")))?; let (parameters, return_type) = program.main_function_signature; - let return_witnesses: Vec = return_values.into_iter().collect(); - let abi = Abi { parameters, param_witnesses, return_type, return_witnesses }; + let abi = Abi { parameters, param_witnesses, return_type, return_witnesses: return_values }; Ok((optimized_circuit, abi)) } diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/return.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/return.rs index 1e1183f13ab..3269af06d16 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/return.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/return.rs @@ -46,7 +46,7 @@ pub(crate) fn evaluate( "we do not allow private ABI inputs to be returned as public outputs", ))); } - evaluator.return_values.insert(witness); + evaluator.return_values.push(witness); } } From ef0773163fa0329a44ba6511d23b06199a30b268 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Wed, 19 Apr 2023 19:17:16 +0100 Subject: [PATCH 20/63] chore: Initial SSA refactor module (#1113) * add ssa refactor module * add more stub code * move value to its own file * add function * Rollback to simpler case * move types to types module * review * make types pub(crate) * allow dead code * add offline code * remove cfg * clean up * remove changes to old code * fix clippy * remove builder.rs * cargo fmt * clippy --- crates/noirc_evaluator/src/frontend.rs | 1 + .../noirc_evaluator/src/frontend/variable.rs | 23 ++ crates/noirc_evaluator/src/lib.rs | 7 + crates/noirc_evaluator/src/ssa_refactor.rs | 13 + .../src/ssa_refactor/basic_block.rs | 37 +++ .../noirc_evaluator/src/ssa_refactor/dfg.rs | 186 +++++++++++++ crates/noirc_evaluator/src/ssa_refactor/ir.rs | 5 + .../src/ssa_refactor/ir/extfunc.rs | 23 ++ .../src/ssa_refactor/ir/function.rs | 25 ++ .../src/ssa_refactor/ir/instruction.rs | 249 ++++++++++++++++++ .../src/ssa_refactor/ir/types.rs | 24 ++ .../src/ssa_refactor/ir/value.rs | 20 ++ 12 files changed, 613 insertions(+) create mode 100644 crates/noirc_evaluator/src/frontend.rs create mode 100644 crates/noirc_evaluator/src/frontend/variable.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/basic_block.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/dfg.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/extfunc.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/function.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/types.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/value.rs diff --git a/crates/noirc_evaluator/src/frontend.rs b/crates/noirc_evaluator/src/frontend.rs new file mode 100644 index 00000000000..410f9f1a9b0 --- /dev/null +++ b/crates/noirc_evaluator/src/frontend.rs @@ -0,0 +1 @@ +pub mod variable; diff --git a/crates/noirc_evaluator/src/frontend/variable.rs b/crates/noirc_evaluator/src/frontend/variable.rs new file mode 100644 index 00000000000..449581cf93c --- /dev/null +++ b/crates/noirc_evaluator/src/frontend/variable.rs @@ -0,0 +1,23 @@ +/// A variable in the SSA IR. +/// By definition, a variable can only be defined once. +/// +/// As in Cranelift, we also allow variable use before definition. +/// This will produce side-effects which will need to be handled +/// before sealing a block. +pub struct Variable(u32); + +impl From for Variable { + fn from(value: u32) -> Self { + Variable(value) + } +} +impl From for Variable { + fn from(value: u16) -> Self { + Variable(value as u32) + } +} +impl From for Variable { + fn from(value: u8) -> Self { + Variable(value as u32) + } +} diff --git a/crates/noirc_evaluator/src/lib.rs b/crates/noirc_evaluator/src/lib.rs index 722ecc12397..166c2d58239 100644 --- a/crates/noirc_evaluator/src/lib.rs +++ b/crates/noirc_evaluator/src/lib.rs @@ -6,6 +6,13 @@ mod errors; mod ssa; +// SSA code to create the SSA based IR +// for functions and execute different optimizations. +pub mod ssa_refactor; +// Frontend helper module to translate a different AST +// into the SSA IR. +pub mod frontend; + use acvm::{ acir::circuit::{opcodes::Opcode as AcirOpcode, Circuit, PublicInputs}, acir::native_types::{Expression, Witness}, diff --git a/crates/noirc_evaluator/src/ssa_refactor.rs b/crates/noirc_evaluator/src/ssa_refactor.rs new file mode 100644 index 00000000000..073b54cbf10 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor.rs @@ -0,0 +1,13 @@ +//! SSA stands for Single Static Assignment +//! The IR presented in this module will already +//! be in SSA form and will be used to apply +//! conventional optimizations like Common Subexpression +//! elimination and constant folding. +//! +//! This module heavily borrows from Cranelift +#[allow(dead_code)] +mod basic_block; +#[allow(dead_code)] +mod dfg; +#[allow(dead_code)] +mod ir; diff --git a/crates/noirc_evaluator/src/ssa_refactor/basic_block.rs b/crates/noirc_evaluator/src/ssa_refactor/basic_block.rs new file mode 100644 index 00000000000..d6c2198b4a0 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/basic_block.rs @@ -0,0 +1,37 @@ +use super::ir::instruction::{Instruction, TerminatorInstruction}; + +/// A Basic block is a maximal collection of instructions +/// such that there are only jumps at the end of block +/// and one can only enter the block from the beginning. +/// +/// This means that if one instruction is executed in a basic +/// block, then all instructions are executed. ie single-entry single-exit. +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +pub(crate) struct BasicBlock { + /// Arguments to the basic block. + phi_nodes: Vec, + /// Instructions in the basic block. + instructions: Vec, + + /// A basic block is considered sealed + /// if no further predecessors will be added to it. + /// Since only filled blocks can have successors, + /// predecessors are always filled. + is_sealed: bool, + + /// The terminating instruction for the basic block. + /// + /// This will be a control flow instruction. + terminator: TerminatorInstruction, +} + +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +/// An identifier for a Basic Block. +pub(crate) struct BasicBlockId; + +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +/// Arguments to the basic block. +/// We use the modern Crane-lift strategy +/// of representing phi nodes as basic block +/// arguments. +pub(crate) struct BlockArguments; diff --git a/crates/noirc_evaluator/src/ssa_refactor/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/dfg.rs new file mode 100644 index 00000000000..a0830b5ecc3 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/dfg.rs @@ -0,0 +1,186 @@ +use super::{ + basic_block::{BasicBlock, BasicBlockId}, + ir::{ + extfunc::{SigRef, Signature}, + instruction::{Instruction, InstructionId, Instructions}, + types::Typ, + value::{Value, ValueId}, + }, +}; +use std::collections::HashMap; + +#[derive(Debug, Default)] +/// A convenience wrapper to store `Value`s. +pub(crate) struct ValueList(Vec); + +impl ValueList { + /// Inserts an element to the back of the list and + /// returns the `position` + pub(crate) fn push(&mut self, value: ValueId) -> usize { + self.0.push(value); + self.len() - 1 + } + /// Returns the number of values in the list. + fn len(&self) -> usize { + self.0.len() + } + + /// Removes all items from the list. + fn clear(&mut self) { + self.0.clear(); + } + /// Returns the ValueId's as a slice. + pub(crate) fn as_slice(&self) -> &[ValueId] { + &self.0 + } +} +#[derive(Debug, Default)] +pub(crate) struct DataFlowGraph { + /// All of the instructions in a function + instructions: Instructions, + + /// Stores the results for a particular instruction. + /// + /// An instruction may return multiple values + /// and for this, we will also use the cranelift strategy + /// to fetch them via indices. + /// + /// Currently, we need to define them in a better way + /// Call instructions require the func signature, but + /// other instructions may need some more reading on my part + results: HashMap, + + /// Storage for all of the values defined in this + /// function. + values: HashMap, + + /// Function signatures of external methods + signatures: HashMap, + + /// All blocks in a function + blocks: HashMap, +} + +impl DataFlowGraph { + /// Creates a new `empty` basic block + pub(crate) fn new_block(&mut self) -> BasicBlockId { + todo!() + } + + /// Inserts a new instruction into the DFG. + pub(crate) fn make_instruction(&mut self, instruction_data: Instruction) -> InstructionId { + let id = self.instructions.add_instruction(instruction_data); + + // Create a new vector to store the potential results + // for the instruction. + self.results.insert(id, Default::default()); + + id + } + + /// Attaches results to the instruction. + /// + /// Returns the number of results that this instruction + /// produces. + pub(crate) fn make_instruction_results( + &mut self, + instruction_id: InstructionId, + ctrl_typevar: Typ, + ) -> usize { + // Clear all of the results instructions associated with this + // instruction. + self.results.get_mut(&instruction_id).expect("all instructions should have a `result` allocation when instruction was added to the DFG").clear(); + + // Get all of the types that this instruction produces + // and append them as results. + let typs = self.instruction_result_types(instruction_id, ctrl_typevar); + let num_typs = typs.len(); + + for typ in typs { + self.append_result(instruction_id, typ); + } + + num_typs + } + + /// Return the result types of this instruction. + /// + /// For example, an addition instruction will return + /// one type which is the type of the operands involved. + /// This is the `ctrl_typevar` in this case. + fn instruction_result_types( + &self, + instruction_id: InstructionId, + ctrl_typevar: Typ, + ) -> Vec { + // Check if it is a call instruction. If so, we don't support that yet + let ins_data = self.instructions.get_instruction(instruction_id); + match ins_data { + Instruction::Call { .. } => todo!("function calls are not supported yet"), + ins => ins.return_types(ctrl_typevar), + } + } + + /// Appends a result type to the instruction. + pub(crate) fn append_result(&mut self, instruction_id: InstructionId, typ: Typ) -> ValueId { + let next_value_id = self.next_value(); + + // Add value to the list of results for this instruction + let res_position = self.results.get_mut(&instruction_id).unwrap().push(next_value_id); + + self.make_value(Value::Instruction { + typ, + position: res_position as u16, + instruction: instruction_id, + }) + } + + /// Stores a value and returns its `ValueId` reference. + fn make_value(&mut self, data: Value) -> ValueId { + let next_value = self.next_value(); + + self.values.insert(next_value, data); + + next_value + } + + /// Returns the next `ValueId` + fn next_value(&self) -> ValueId { + ValueId(self.values.len() as u32) + } + + /// Returns the number of instructions + /// inserted into functions. + pub(crate) fn num_instructions(&self) -> usize { + self.instructions.num_instructions() + } + + /// Returns all of result values which are attached to this instruction. + pub(crate) fn instruction_results(&self, instruction_id: InstructionId) -> &[ValueId] { + self.results.get(&instruction_id).expect("expected a list of Values").as_slice() + } +} + +#[cfg(test)] +mod tests { + use super::DataFlowGraph; + use crate::ssa_refactor::ir::{ + instruction::Instruction, + types::{NumericType, Typ}, + }; + use acvm::FieldElement; + + #[test] + fn make_instruction() { + let mut dfg = DataFlowGraph::default(); + let ins = Instruction::Immediate { value: FieldElement::from(0u128) }; + let ins_id = dfg.make_instruction(ins); + + let num_results = + dfg.make_instruction_results(ins_id, Typ::Numeric(NumericType::NativeField)); + + let results = dfg.instruction_results(ins_id); + + assert_eq!(results.len(), num_results); + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir.rs b/crates/noirc_evaluator/src/ssa_refactor/ir.rs new file mode 100644 index 00000000000..aa07393203d --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir.rs @@ -0,0 +1,5 @@ +pub(crate) mod extfunc; +mod function; +pub(crate) mod instruction; +pub(crate) mod types; +pub(crate) mod value; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/extfunc.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/extfunc.rs new file mode 100644 index 00000000000..b0e573822cf --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/extfunc.rs @@ -0,0 +1,23 @@ +//! Like Crane-lift all functions outside of the current function is seen as +//! external. +//! To reference external functions, one uses + +use super::types::Typ; + +#[derive(Debug, Default, Clone)] +pub(crate) struct Signature { + pub(crate) params: Vec, + pub(crate) returns: Vec, +} +/// Reference to a `Signature` in a map inside of +/// a functions DFG. +#[derive(Debug, Default, Clone, Copy)] +pub(crate) struct SigRef(pub(crate) u32); + +#[test] +fn sign_smoke() { + let mut signature = Signature::default(); + + signature.params.push(Typ::Numeric(super::types::NumericType::NativeField)); + signature.returns.push(Typ::Numeric(super::types::NumericType::Unsigned { bit_size: 32 })); +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs new file mode 100644 index 00000000000..331c0d656d6 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs @@ -0,0 +1,25 @@ +use crate::ssa_refactor::basic_block::{BasicBlock, BasicBlockId}; + +use super::instruction::Instruction; + +use noirc_errors::Location; +use std::collections::HashMap; + +/// A function holds a list of instructions. +/// These instructions are further grouped into +/// Basic blocks +#[derive(Debug)] +pub(crate) struct Function { + /// Basic blocks associated to this particular function + basic_blocks: HashMap, + + /// Maps instructions to source locations + source_locations: HashMap, + + /// The first basic block in the function + entry_block: BasicBlockId, +} + +/// FunctionId is a reference for a function +#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] +pub(crate) struct FunctionId(pub(crate) u32); diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs new file mode 100644 index 00000000000..04d933d8f9e --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -0,0 +1,249 @@ +use std::collections::HashMap; + +use acvm::FieldElement; + +use super::{function::FunctionId, types::Typ, value::ValueId}; +use crate::ssa_refactor::basic_block::{BasicBlockId, BlockArguments}; + +/// Map of instructions. +/// This is similar to Arena. +#[derive(Debug, Default)] +pub(crate) struct Instructions(HashMap); + +impl Instructions { + /// Adds an instruction to the map and returns a + /// reference to the instruction. + pub(crate) fn add_instruction(&mut self, ins: Instruction) -> InstructionId { + let id = InstructionId(self.0.len() as u32); + self.0.insert(id, ins); + id + } + + /// Fetch the instruction corresponding to this + /// instruction id. + /// + /// Panics if there is no such instruction, since instructions cannot be + /// deleted. + pub(crate) fn get_instruction(&self, ins_id: InstructionId) -> &Instruction { + self.0.get(&ins_id).expect("ICE: instructions cannot be deleted") + } + + /// Returns the number of instructions stored in the map. + pub(crate) fn num_instructions(&self) -> usize { + self.0.len() + } +} + +#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] +/// Reference to an instruction +pub(crate) struct InstructionId(u32); + +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +/// These are similar to built-ins in other languages. +/// These can be classified under two categories: +/// - Opcodes which the IR knows the target machine has +/// special support for. (LowLevel) +/// - Opcodes which have no function definition in the +/// source code and must be processed by the IR. An example +/// of this is println. +pub(crate) struct IntrinsicOpcodes; + +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +/// Instructions are used to perform tasks. +/// The instructions that the IR is able to specify are listed below. +pub(crate) enum Instruction { + // Binary Operations + Binary(Binary), + + // Unary Operations + // + /// Converts `Value` into Typ + Cast(ValueId, Typ), + + /// Computes a bit wise not + Not(ValueId), + + /// Truncates `value` to `bit_size` + Truncate { + value: ValueId, + bit_size: u32, + max_bit_size: u32, + }, + + /// Constrains a value to be equal to true + Constrain(ValueId), + + /// Performs a function call with a list of its arguments. + Call { + func: FunctionId, + arguments: Vec, + }, + /// Performs a call to an intrinsic function and stores the + /// results in `return_arguments`. + Intrinsic { + func: IntrinsicOpcodes, + arguments: Vec, + }, + + /// Loads a value from memory. + Load(ValueId), + + /// Writes a value to memory. + Store { + destination: ValueId, + value: ValueId, + }, + + /// Stores an Immediate value + Immediate { + value: FieldElement, + }, +} + +impl Instruction { + /// Returns the number of results that this instruction + /// produces. + pub(crate) fn num_fixed_results(&self) -> usize { + match self { + Instruction::Binary(_) => 1, + Instruction::Cast(_, _) => 0, + Instruction::Not(_) => 1, + Instruction::Truncate { .. } => 1, + Instruction::Constrain(_) => 0, + // This returns 0 as the result depends on the function being called + Instruction::Call { .. } => 0, + // This also returns 0, but we could get it a compile time, + // since we know the signatures for the intrinsics + Instruction::Intrinsic { .. } => 0, + Instruction::Load(_) => 1, + Instruction::Store { .. } => 0, + Instruction::Immediate { .. } => 1, + } + } + + /// Returns the number of arguments required for a call + pub(crate) fn num_fixed_arguments(&self) -> usize { + match self { + Instruction::Binary(_) => 2, + Instruction::Cast(_, _) => 1, + Instruction::Not(_) => 1, + Instruction::Truncate { .. } => 1, + Instruction::Constrain(_) => 1, + // This returns 0 as the arguments depend on the function being called + Instruction::Call { .. } => 0, + // This also returns 0, but we could get it a compile time, + // since we know the function definition for the intrinsics + Instruction::Intrinsic { .. } => 0, + Instruction::Load(_) => 1, + Instruction::Store { .. } => 2, + Instruction::Immediate { .. } => 0, + } + } + + /// Returns the types that this instruction will return. + pub(crate) fn return_types(&self, ctrl_typevar: Typ) -> Vec { + match self { + Instruction::Binary(_) => vec![ctrl_typevar], + Instruction::Cast(_, typ) => vec![*typ], + Instruction::Not(_) => vec![ctrl_typevar], + Instruction::Truncate { .. } => vec![ctrl_typevar], + Instruction::Constrain(_) => vec![], + Instruction::Call { .. } => vec![], + Instruction::Intrinsic { .. } => vec![], + Instruction::Load(_) => vec![ctrl_typevar], + Instruction::Store { .. } => vec![], + Instruction::Immediate { .. } => vec![], + } + } +} + +/// These are operations which can exit a basic block +/// ie control flow type operations +/// +/// Since our IR needs to be in SSA form, it makes sense +/// to split up instructions like this, as we are sure that these instructions +/// will not be in the list of instructions for a basic block. +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +pub(crate) enum TerminatorInstruction { + /// Control flow + /// + /// Jump If + /// + /// Jumps to the specified `destination` with + /// arguments, if the condition + /// if the condition is true. + JmpIf { condition: ValueId, destination: BasicBlockId, arguments: BlockArguments }, + /// Unconditional Jump + /// + /// Jumps to specified `destination` with `arguments` + Jmp { destination: BasicBlockId, arguments: BlockArguments }, +} + +/// A binary instruction in the IR. +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +pub(crate) struct Binary { + /// Left hand side of the binary operation + pub(crate) lhs: ValueId, + /// Right hand side of the binary operation + pub(crate) rhs: ValueId, + /// The binary operation to apply + pub(crate) operator: BinaryOp, +} + +/// Binary Operations allowed in the IR. +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +pub(crate) enum BinaryOp { + /// Addition of two types. + /// The result will have the same type as + /// the operands. + Add, + /// Subtraction of two types. + /// The result will have the same type as + /// the operands. + Sub, + /// Multiplication of two types. + /// The result will have the same type as + /// the operands. + Mul, + /// Division of two types. + /// The result will have the same type as + /// the operands. + Div, + /// Checks whether two types are equal. + /// Returns true if the types were equal and + /// false otherwise. + Eq, + /// Checks whether two types are equal. + /// Returns true if the types were not equal and + /// false otherwise. + Ne, +} + +#[test] +fn smoke_instructions_map_duplicate() { + let ins = Instruction::Cast(ValueId(0), Typ::Unit); + let same_ins = Instruction::Cast(ValueId(0), Typ::Unit); + + let mut ins_map = Instructions::default(); + + // Document what happens when we insert the same instruction twice + let id = ins_map.add_instruction(ins); + let id_same_ins = ins_map.add_instruction(same_ins); + + // The map is quite naive and does not check if the instruction has ben inserted + // before. We simply assign a different Id. + assert_ne!(id, id_same_ins) +} + +#[test] +fn num_instructions_smoke() { + let n = 100; + + let mut ins_map = Instructions::default(); + for i in 0..n { + let ins = Instruction::Cast(ValueId(i as u32), Typ::Unit); + ins_map.add_instruction(ins); + } + + assert_eq!(n, ins_map.num_instructions()) +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs new file mode 100644 index 00000000000..9cf75e5ae7f --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs @@ -0,0 +1,24 @@ +/// A numeric type in the Intermediate representation +/// Note: we class NativeField as a numeric type +/// though we also apply limitations to it, such as not +/// being able to compare two native fields, whereas this is +/// something that you can do with a signed/unsigned integer. +/// +/// Fields do not have a notion of ordering, so this distinction +/// is reasonable. +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub(crate) enum NumericType { + Signed { bit_size: u32 }, + Unsigned { bit_size: u32 }, + NativeField, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +/// All types representable in the IR. +pub(crate) enum Typ { + /// Represents numeric types in the IR + /// including field elements + Numeric(NumericType), + /// Represents the absence of a Type. + Unit, +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs new file mode 100644 index 00000000000..c245cf95f24 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs @@ -0,0 +1,20 @@ +use super::{instruction::InstructionId, types::Typ}; + +#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] +/// Value is the most basic type allowed in the IR. +/// Transition Note: This is similar to `NodeId` in our previous IR. +pub(crate) struct ValueId(pub(crate) u32); + +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +pub(crate) enum Value { + /// This value was created due to an instruction + /// + /// instruction -- This is the instruction which defined it + /// typ -- This is the `Type` of the instruction + /// position -- Returns the position in the results + /// vector that this `Value` is located. + /// Example, if you add two numbers together, then the resulting + /// value would have position `0`, the typ would be the type + /// of the operands, and the instruction would map to an add instruction. + Instruction { typ: Typ, position: u16, instruction: InstructionId }, +} From 33feb2bcd71b1040d70d1f51a7377594db557c19 Mon Sep 17 00:00:00 2001 From: Ahmad Afuni Date: Wed, 19 Apr 2023 21:06:18 +0200 Subject: [PATCH 21/63] feat: Add Poseidon-BN254 hash functions (#1176) * Move Poseidon tests * Add Poseidon hash functions and modify test * Move (failing) Poseidon tests back * Remove commented-out code --- .../Nargo.toml | 0 .../Prover.toml | 4 +- .../test_data/poseidon_bn254_hash/src/main.nr | 10 ++ .../test_data/poseidonperm_x5_254/src/main.nr | 10 -- .../poseidonsponge_x5_254/src/main.nr | 5 - noir_stdlib/src/hash/poseidon/bn254.nr | 146 ++++++++++++++++++ 6 files changed, 158 insertions(+), 17 deletions(-) rename crates/nargo/tests/test_data/{poseidonperm_x5_254 => poseidon_bn254_hash}/Nargo.toml (100%) rename crates/nargo/tests/test_data/{poseidonperm_x5_254 => poseidon_bn254_hash}/Prover.toml (83%) create mode 100644 crates/nargo/tests/test_data/poseidon_bn254_hash/src/main.nr delete mode 100644 crates/nargo/tests/test_data/poseidonperm_x5_254/src/main.nr diff --git a/crates/nargo/tests/test_data/poseidonperm_x5_254/Nargo.toml b/crates/nargo/tests/test_data/poseidon_bn254_hash/Nargo.toml similarity index 100% rename from crates/nargo/tests/test_data/poseidonperm_x5_254/Nargo.toml rename to crates/nargo/tests/test_data/poseidon_bn254_hash/Nargo.toml diff --git a/crates/nargo/tests/test_data/poseidonperm_x5_254/Prover.toml b/crates/nargo/tests/test_data/poseidon_bn254_hash/Prover.toml similarity index 83% rename from crates/nargo/tests/test_data/poseidonperm_x5_254/Prover.toml rename to crates/nargo/tests/test_data/poseidon_bn254_hash/Prover.toml index 833b6c9961f..8eecf9a3db2 100644 --- a/crates/nargo/tests/test_data/poseidonperm_x5_254/Prover.toml +++ b/crates/nargo/tests/test_data/poseidon_bn254_hash/Prover.toml @@ -1,4 +1,4 @@ -x1 = [0,1,2] +x1 = [1,2] y1 = "0x115cc0f5e7d690413df64c6b9662e9cf2a3617f2743245519e19607a4417189a" -x2 = [0,1,2,3,4] +x2 = [1,2,3,4] y2 = "0x299c867db6c1fdd79dcefa40e4510b9837e60ebb1ce0663dbaa525df65250465" diff --git a/crates/nargo/tests/test_data/poseidon_bn254_hash/src/main.nr b/crates/nargo/tests/test_data/poseidon_bn254_hash/src/main.nr new file mode 100644 index 00000000000..f2f1af7ab7d --- /dev/null +++ b/crates/nargo/tests/test_data/poseidon_bn254_hash/src/main.nr @@ -0,0 +1,10 @@ +use dep::std::hash::poseidon; + +fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field) +{ + let hash1 = poseidon::bn254::hash_2(x1); + constrain hash1 == y1; + + let hash2 = poseidon::bn254::hash_4(x2); + constrain hash2 == y2; +} diff --git a/crates/nargo/tests/test_data/poseidonperm_x5_254/src/main.nr b/crates/nargo/tests/test_data/poseidonperm_x5_254/src/main.nr deleted file mode 100644 index 2f7f0ab3e56..00000000000 --- a/crates/nargo/tests/test_data/poseidonperm_x5_254/src/main.nr +++ /dev/null @@ -1,10 +0,0 @@ -use dep::std::hash::poseidon; - -fn main(x1: [Field; 3], y1: pub Field, x2: [Field; 5], y2: pub Field) -{ - let perm1 = poseidon::bn254::perm::x5_3(x1); - constrain perm1[0] == y1; - - let perm2 = poseidon::bn254::perm::x5_5(x2); - constrain perm2[0] == y2; -} diff --git a/crates/nargo/tests/test_data/poseidonsponge_x5_254/src/main.nr b/crates/nargo/tests/test_data/poseidonsponge_x5_254/src/main.nr index c6a4be98b7a..f5135897f19 100644 --- a/crates/nargo/tests/test_data/poseidonsponge_x5_254/src/main.nr +++ b/crates/nargo/tests/test_data/poseidonsponge_x5_254/src/main.nr @@ -6,9 +6,4 @@ fn main(x: [Field; 7]) let result = poseidon::bn254::sponge(x); constrain result == 0x080ae1669d62f0197190573d4a325bfb8d8fc201ce3127cbac0c47a7ac81ac48; - - // Test unoptimised sponge - let result2 = poseidon::absorb(poseidon::bn254::consts::x5_5_config(), [0;5], 4, 1, x)[1]; - - constrain result2 == result; } diff --git a/noir_stdlib/src/hash/poseidon/bn254.nr b/noir_stdlib/src/hash/poseidon/bn254.nr index 421916a564b..355e7d13a5f 100644 --- a/noir_stdlib/src/hash/poseidon/bn254.nr +++ b/noir_stdlib/src/hash/poseidon/bn254.nr @@ -101,3 +101,149 @@ fn absorb( fn sponge(msg: [Field; N]) -> Field { absorb(consts::x5_5_config(), [0;5], 4, 1, msg)[1] } + +// Various instances of the Poseidon hash function +// Consistent with Circom's implementation +fn hash_1(input: [Field; 1]) -> Field { + let mut state = [0; 2]; + for i in 0..input.len() { + state[i+1] = input[i]; + } + + perm::x5_2(state)[0] +} + +fn hash_2(input: [Field; 2]) -> Field { + let mut state = [0; 3]; + for i in 0..input.len() { + state[i+1] = input[i]; + } + + perm::x5_3(state)[0] +} + +fn hash_3(input: [Field; 3]) -> Field { + let mut state = [0; 4]; + for i in 0..input.len() { + state[i+1] = input[i]; + } + + perm::x5_4(state)[0] +} + +fn hash_4(input: [Field; 4]) -> Field { + let mut state = [0; 5]; + for i in 0..input.len() { + state[i+1] = input[i]; + } + + perm::x5_5(state)[0] +} + +fn hash_5(input: [Field; 5]) -> Field { + let mut state = [0; 6]; + for i in 0..input.len() { + state[i+1] = input[i]; + } + + perm::x5_6(state)[0] +} + +fn hash_6(input: [Field; 6]) -> Field { + let mut state = [0; 7]; + for i in 0..input.len() { + state[i+1] = input[i]; + } + + perm::x5_7(state)[0] +} + +fn hash_7(input: [Field; 7]) -> Field { + let mut state = [0; 8]; + for i in 0..input.len() { + state[i+1] = input[i]; + } + + perm::x5_8(state)[0] +} + +fn hash_8(input: [Field; 8]) -> Field { + let mut state = [0; 9]; + for i in 0..input.len() { + state[i+1] = input[i]; + } + + perm::x5_9(state)[0] +} + +fn hash_9(input: [Field; 9]) -> Field { + let mut state = [0; 10]; + for i in 0..input.len() { + state[i+1] = input[i]; + } + + perm::x5_10(state)[0] +} + +fn hash_10(input: [Field; 10]) -> Field { + let mut state = [0; 11]; + for i in 0..input.len() { + state[i+1] = input[i]; + } + + perm::x5_11(state)[0] +} + +fn hash_11(input: [Field; 11]) -> Field { + let mut state = [0; 12]; + for i in 0..input.len() { + state[i+1] = input[i]; + } + + perm::x5_12(state)[0] +} + +fn hash_12(input: [Field; 12]) -> Field { + let mut state = [0; 13]; + for i in 0..input.len() { + state[i+1] = input[i]; + } + + perm::x5_13(state)[0] +} + +fn hash_13(input: [Field; 13]) -> Field { + let mut state = [0; 14]; + for i in 0..input.len() { + state[i+1] = input[i]; + } + + perm::x5_14(state)[0] +} + +fn hash_14(input: [Field; 14]) -> Field { + let mut state = [0; 15]; + for i in 0..input.len() { + state[i+1] = input[i]; + } + + perm::x5_15(state)[0] +} + +fn hash_15(input: [Field; 15]) -> Field { + let mut state = [0; 16]; + for i in 0..input.len() { + state[i+1] = input[i]; + } + + perm::x5_16(state)[0] +} + +fn hash_16(input: [Field; 16]) -> Field { + let mut state = [0; 17]; + for i in 0..input.len() { + state[i+1] = input[i]; + } + + perm::x5_17(state)[0] +} From a675e07cd4fc72a697df80c1582461d249511264 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 19 Apr 2023 23:36:35 +0100 Subject: [PATCH 22/63] chore: simplify setup code in `noir_integration` test (#1180) * chore: refactor integration test code * chore: remove misleading comment --- crates/nargo_cli/Cargo.toml | 2 +- crates/nargo_cli/src/cli/mod.rs | 12 +-- crates/nargo_cli/tests/prove_and_verify.rs | 108 ++++++++++++++------- 3 files changed, 79 insertions(+), 43 deletions(-) diff --git a/crates/nargo_cli/Cargo.toml b/crates/nargo_cli/Cargo.toml index 57de9aaa264..49c9428f546 100644 --- a/crates/nargo_cli/Cargo.toml +++ b/crates/nargo_cli/Cargo.toml @@ -34,7 +34,6 @@ const_format = "0.2.30" hex = "0.4.2" serde_json = "1.0" termcolor = "1.1.2" -tempdir = "0.3.7" color-eyre = "0.6.2" # Backends @@ -42,6 +41,7 @@ aztec_backend = { optional = true, package = "barretenberg_static_lib", git = "h aztec_wasm_backend = { optional = true, package = "barretenberg_wasm", git = "https://github.com/noir-lang/aztec_backend", rev = "26178359a2251e885f15f0a4d1a686afda04aec9" } [dev-dependencies] +tempdir = "0.3.7" assert_cmd = "2.0.8" assert_fs = "1.0.10" predicates = "2.1.5" diff --git a/crates/nargo_cli/src/cli/mod.rs b/crates/nargo_cli/src/cli/mod.rs index e713bdd47fc..2bb92925e59 100644 --- a/crates/nargo_cli/src/cli/mod.rs +++ b/crates/nargo_cli/src/cli/mod.rs @@ -6,7 +6,7 @@ use std::path::{Path, PathBuf}; use color_eyre::eyre; -use crate::find_package_root; +use crate::{constants::PROOFS_DIR, find_package_root}; mod fs; @@ -85,16 +85,14 @@ pub fn start_cli() -> eyre::Result<()> { } // helper function which tests noir programs by trying to generate a proof and verify it -pub fn prove_and_verify(proof_name: &str, prg_dir: &Path, show_ssa: bool) -> bool { - use tempdir::TempDir; - - let tmp_dir = TempDir::new("p_and_v_tests").unwrap(); +pub fn prove_and_verify(proof_name: &str, program_dir: &Path, show_ssa: bool) -> bool { let compile_options = CompileOptions { show_ssa, allow_warnings: false, show_output: false }; + let proof_dir = program_dir.join(PROOFS_DIR); match prove_cmd::prove_with_path( Some(proof_name.to_owned()), - prg_dir, - &tmp_dir.into_path(), + program_dir, + &proof_dir, None, true, &compile_options, diff --git a/crates/nargo_cli/tests/prove_and_verify.rs b/crates/nargo_cli/tests/prove_and_verify.rs index 0006dd1b986..15e860bf059 100644 --- a/crates/nargo_cli/tests/prove_and_verify.rs +++ b/crates/nargo_cli/tests/prove_and_verify.rs @@ -1,3 +1,5 @@ +use tempdir::TempDir; + use std::collections::BTreeMap; use std::fs; @@ -6,11 +8,14 @@ const TEST_DATA_DIR: &str = "test_data"; const CONFIG_FILE: &str = "config.toml"; mod tests { + use std::path::Path; + use super::*; - fn load_conf(conf_path: &str) -> BTreeMap> { - // Parse config.toml into a BTreeMap, do not fail if config file does not exist. - let mut conf_data = match toml::from_str(conf_path) { + fn load_conf(conf_path: &Path) -> BTreeMap> { + let config_str = std::fs::read_to_string(conf_path).unwrap(); + + let mut conf_data = match toml::from_str(&config_str) { Ok(t) => t, Err(_) => BTreeMap::from([ ("exclude".to_string(), Vec::new()), @@ -26,42 +31,75 @@ mod tests { conf_data } + /// Copy files from source to destination recursively. + pub fn copy_recursively( + source: impl AsRef, + destination: impl AsRef, + ) -> std::io::Result<()> { + fs::create_dir_all(&destination)?; + for entry in fs::read_dir(source)? { + let entry = entry?; + let filetype = entry.file_type()?; + if filetype.is_dir() { + copy_recursively(entry.path(), destination.as_ref().join(entry.file_name()))?; + } else { + fs::copy(entry.path(), destination.as_ref().join(entry.file_name()))?; + } + } + Ok(()) + } + #[test] fn noir_integration() { - let mut current_dir = std::env::current_dir().unwrap(); - current_dir.push(TEST_DIR); - current_dir.push(TEST_DATA_DIR); - - //load config.tml file from test_data directory - current_dir.push(CONFIG_FILE); - let config_path = std::fs::read_to_string(current_dir).unwrap(); - let config_data: BTreeMap> = load_conf(&config_path); - let mut current_dir = std::env::current_dir().unwrap(); - current_dir.push(TEST_DIR); - current_dir.push(TEST_DATA_DIR); - - for c in fs::read_dir(current_dir.as_path()).unwrap().flatten() { - if let Ok(test_name) = c.file_name().into_string() { - println!("Running test {test_name:?}"); - if c.path().is_dir() && !config_data["exclude"].contains(&test_name) { - let verified = std::panic::catch_unwind(|| { - nargo_cli::cli::prove_and_verify("pp", &c.path(), false) - }); - - let r = match verified { - Ok(result) => result, - Err(_) => { - panic!("\n\n\nPanic occurred while running test {:?} (ignore the following panic)", c.file_name()); - } - }; - - if config_data["fail"].contains(&test_name) { - assert!(!r, "{:?} should not succeed", c.file_name()); - } else { - assert!(r, "verification fail for {:?}", c.file_name()); - } + let current_dir = std::env::current_dir().unwrap(); + + let test_data_dir = current_dir.join(TEST_DIR).join(TEST_DATA_DIR); + + // Load config.toml file from test_data directory + let config_file_path = test_data_dir.join(CONFIG_FILE); + let config_data: BTreeMap> = load_conf(&config_file_path); + + // Copy all the test cases into a temp dir so we don't leave artifacts around. + let tmp_dir = TempDir::new("p_and_v_tests").unwrap(); + copy_recursively(test_data_dir, &tmp_dir) + .expect("failed to copy test cases to temp directory"); + + let test_case_dirs = + fs::read_dir(&tmp_dir).unwrap().flatten().filter(|c| c.path().is_dir()); + + for test_dir in test_case_dirs { + let test_name = + test_dir.file_name().into_string().expect("Directory can't be converted to string"); + let test_program_dir = &test_dir.path(); + + if config_data["exclude"].contains(&test_name) { + println!("Skipping test {test_name}"); + continue; + } + + println!("Running test {test_name}"); + + let verified = std::panic::catch_unwind(|| { + nargo_cli::cli::prove_and_verify("pp", test_program_dir, false) + }); + + let r = match verified { + Ok(result) => result, + Err(_) => { + panic!( + "\n\n\nPanic occurred while running test {test_name} (ignore the following panic)" + ); } + }; + + if config_data["fail"].contains(&test_name) { + assert!(!r, "{:?} should not succeed", test_name); + } else { + assert!(r, "verification fail for {:?}", test_name); } } + + // Ensure that temp dir remains alive until all tests have run. + drop(tmp_dir); } } From 528a2a441cfe094885cc8f26ffba865f3a0b5c0c Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 20 Apr 2023 00:30:29 +0100 Subject: [PATCH 23/63] fix(nargo): restore `nargo codegen-verifier` functionality (#1185) --- .../nargo_cli/src/cli/codegen_verifier_cmd.rs | 6 ++-- crates/nargo_cli/tests/codegen-verifier.rs | 33 +++++++++++++++++++ 2 files changed, 37 insertions(+), 2 deletions(-) create mode 100644 crates/nargo_cli/tests/codegen-verifier.rs diff --git a/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs b/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs index 3707214102e..4e713ff4a42 100644 --- a/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -1,8 +1,9 @@ use super::fs::{create_named_dir, write_to_file}; use super::NargoConfig; use crate::{cli::compile_cmd::compile_circuit, constants::CONTRACT_DIR, errors::CliError}; +use acvm::SmartContract; use clap::Args; -use nargo::ops::{codegen_verifier, preprocess_program}; +use nargo::ops::preprocess_program; use noirc_driver::CompileOptions; /// Generates a Solidity verifier smart contract for the program @@ -18,7 +19,8 @@ pub(crate) fn run(args: CodegenVerifierCommand, config: NargoConfig) -> Result<( let compiled_program = compile_circuit(&backend, &config.program_dir, &args.compile_options)?; let preprocessed_program = preprocess_program(&backend, compiled_program)?; - let smart_contract_string = codegen_verifier(&backend, &preprocessed_program.verification_key)?; + #[allow(deprecated)] + let smart_contract_string = backend.eth_contract_from_cs(preprocessed_program.bytecode); let contract_dir = config.program_dir.join(CONTRACT_DIR); create_named_dir(&contract_dir, "contract"); diff --git a/crates/nargo_cli/tests/codegen-verifier.rs b/crates/nargo_cli/tests/codegen-verifier.rs new file mode 100644 index 00000000000..3e4dc1dc745 --- /dev/null +++ b/crates/nargo_cli/tests/codegen-verifier.rs @@ -0,0 +1,33 @@ +//! This integration test aims to check that the `nargo codegen-verifier` will successfully create a +//! file containing a verifier for a simple program. + +use assert_cmd::prelude::*; +use predicates::prelude::*; +use std::process::Command; + +use assert_fs::prelude::{PathAssert, PathChild}; + +#[test] +fn simple_verifier_codegen() { + let test_dir = assert_fs::TempDir::new().unwrap(); + std::env::set_current_dir(&test_dir).unwrap(); + + // Create trivial program + let project_name = "hello_world"; + let project_dir = test_dir.child(project_name); + + let mut cmd = Command::cargo_bin("nargo").unwrap(); + cmd.arg("new").arg(project_name); + cmd.assert().success(); + + std::env::set_current_dir(&project_dir).unwrap(); + + // Run `nargo codegen-verifier` + let mut cmd = Command::cargo_bin("nargo").unwrap(); + cmd.arg("codegen-verifier"); + cmd.assert() + .success() + .stdout(predicate::str::contains("Contract successfully created and located at")); + + project_dir.child("contract").child("plonk_vk.sol").assert(predicate::path::is_file()); +} From 42b8a3e12df51167912fadbcaee566def9184363 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 19 Apr 2023 23:46:01 +0000 Subject: [PATCH 24/63] chore(deps): bump h2 from 0.3.16 to 0.3.18 (#1186) Bumps [h2](https://github.com/hyperium/h2) from 0.3.16 to 0.3.18. - [Release notes](https://github.com/hyperium/h2/releases) - [Changelog](https://github.com/hyperium/h2/blob/master/CHANGELOG.md) - [Commits](https://github.com/hyperium/h2/compare/v0.3.16...v0.3.18) --- updated-dependencies: - dependency-name: h2 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5d4fafa61cf..dfd02bbb138 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1489,9 +1489,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.16" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5be7b54589b581f624f566bf5d8eb2bab1db736c51528720b6bd36b96b55924d" +checksum = "17f8a914c2987b688368b5138aa05321db91f4090cf26118185672ad588bce21" dependencies = [ "bytes", "fnv", From 750ed7793f5a07bc361b56c66f041cb4097219e3 Mon Sep 17 00:00:00 2001 From: Jonathan Bursztyn Date: Thu, 20 Apr 2023 11:19:55 +0100 Subject: [PATCH 25/63] feat: bump noir-source-resolver version (#1182) Bump noir-souce-resolver version --- crates/wasm/build-wasm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/wasm/build-wasm b/crates/wasm/build-wasm index 0762d12fff3..6b574b71d03 100755 --- a/crates/wasm/build-wasm +++ b/crates/wasm/build-wasm @@ -18,7 +18,7 @@ else VERSION_APPENDIX="-NOGIT" fi -jq -s '.[0] * .[1]' pkg/nodejs/package.json pkg/web/package.json | jq '.files = ["nodejs", "web", "package.json"]' | jq ".version += \"$VERSION_APPENDIX\"" | jq '.main = "./nodejs/" + .main | .module = "./web/" + .module | .types = "./web/" + .types | .peerDependencies = { "@noir-lang/noir-source-resolver": "1.1.1" }' | tee ./pkg/package.json +jq -s '.[0] * .[1]' pkg/nodejs/package.json pkg/web/package.json | jq '.files = ["nodejs", "web", "package.json"]' | jq ".version += \"$VERSION_APPENDIX\"" | jq '.main = "./nodejs/" + .main | .module = "./web/" + .module | .types = "./web/" + .types | .peerDependencies = { "@noir-lang/noir-source-resolver": "1.1.2" }' | tee ./pkg/package.json rm pkg/nodejs/package.json pkg/nodejs/README.md pkg/nodejs/.gitignore From e1ba4f82c6ad90a8f11c433b66b42a20efcea8ed Mon Sep 17 00:00:00 2001 From: jfecher Date: Thu, 20 Apr 2023 10:08:11 -0400 Subject: [PATCH 26/63] chore(ssa refactor): Add DenseMap and SparseMap types (#1184) * Add DenseMap and SparseMap * Update crates/noirc_evaluator/src/ssa_refactor/ir/map.rs Co-authored-by: kevaundray * Update crates/noirc_evaluator/src/ssa_refactor/ir/map.rs Co-authored-by: kevaundray * Apply suggestions from code review Co-authored-by: kevaundray * Apply cfg test suggestion * Revert removal of Cast * Fix typo --------- Co-authored-by: kevaundray --- .../noirc_evaluator/src/ssa_refactor/dfg.rs | 65 +++---- crates/noirc_evaluator/src/ssa_refactor/ir.rs | 1 + .../src/ssa_refactor/ir/extfunc.rs | 17 +- .../src/ssa_refactor/ir/instruction.rs | 95 +++------- .../src/ssa_refactor/ir/map.rs | 167 ++++++++++++++++++ .../src/ssa_refactor/ir/types.rs | 9 +- .../src/ssa_refactor/ir/value.rs | 11 +- 7 files changed, 239 insertions(+), 126 deletions(-) create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/map.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/dfg.rs index a0830b5ecc3..6dcee5212e2 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/dfg.rs @@ -1,9 +1,10 @@ use super::{ basic_block::{BasicBlock, BasicBlockId}, ir::{ - extfunc::{SigRef, Signature}, + extfunc::Signature, instruction::{Instruction, InstructionId, Instructions}, - types::Typ, + map::{Id, SparseMap}, + types::Type, value::{Value, ValueId}, }, }; @@ -11,7 +12,7 @@ use std::collections::HashMap; #[derive(Debug, Default)] /// A convenience wrapper to store `Value`s. -pub(crate) struct ValueList(Vec); +pub(crate) struct ValueList(Vec>); impl ValueList { /// Inserts an element to the back of the list and @@ -34,6 +35,7 @@ impl ValueList { &self.0 } } + #[derive(Debug, Default)] pub(crate) struct DataFlowGraph { /// All of the instructions in a function @@ -52,13 +54,13 @@ pub(crate) struct DataFlowGraph { /// Storage for all of the values defined in this /// function. - values: HashMap, + values: SparseMap, /// Function signatures of external methods - signatures: HashMap, + signatures: SparseMap, /// All blocks in a function - blocks: HashMap, + blocks: SparseMap, } impl DataFlowGraph { @@ -69,12 +71,10 @@ impl DataFlowGraph { /// Inserts a new instruction into the DFG. pub(crate) fn make_instruction(&mut self, instruction_data: Instruction) -> InstructionId { - let id = self.instructions.add_instruction(instruction_data); + let id = self.instructions.push(instruction_data); - // Create a new vector to store the potential results - // for the instruction. + // Create a new vector to store the potential results for the instruction. self.results.insert(id, Default::default()); - id } @@ -85,7 +85,7 @@ impl DataFlowGraph { pub(crate) fn make_instruction_results( &mut self, instruction_id: InstructionId, - ctrl_typevar: Typ, + ctrl_typevar: Type, ) -> usize { // Clear all of the results instructions associated with this // instruction. @@ -111,10 +111,10 @@ impl DataFlowGraph { fn instruction_result_types( &self, instruction_id: InstructionId, - ctrl_typevar: Typ, - ) -> Vec { + ctrl_typevar: Type, + ) -> Vec { // Check if it is a call instruction. If so, we don't support that yet - let ins_data = self.instructions.get_instruction(instruction_id); + let ins_data = &self.instructions[instruction_id]; match ins_data { Instruction::Call { .. } => todo!("function calls are not supported yet"), ins => ins.return_types(ctrl_typevar), @@ -122,37 +122,26 @@ impl DataFlowGraph { } /// Appends a result type to the instruction. - pub(crate) fn append_result(&mut self, instruction_id: InstructionId, typ: Typ) -> ValueId { - let next_value_id = self.next_value(); + pub(crate) fn append_result(&mut self, instruction_id: InstructionId, typ: Type) -> ValueId { + let results = self.results.get_mut(&instruction_id).unwrap(); + let expected_res_position = results.len(); - // Add value to the list of results for this instruction - let res_position = self.results.get_mut(&instruction_id).unwrap().push(next_value_id); - - self.make_value(Value::Instruction { + let value_id = self.values.push(Value::Instruction { typ, - position: res_position as u16, + position: expected_res_position as u16, instruction: instruction_id, - }) - } + }); - /// Stores a value and returns its `ValueId` reference. - fn make_value(&mut self, data: Value) -> ValueId { - let next_value = self.next_value(); - - self.values.insert(next_value, data); - - next_value - } - - /// Returns the next `ValueId` - fn next_value(&self) -> ValueId { - ValueId(self.values.len() as u32) + // Add value to the list of results for this instruction + let actual_res_position = results.push(value_id); + assert_eq!(actual_res_position, expected_res_position); + value_id } /// Returns the number of instructions /// inserted into functions. pub(crate) fn num_instructions(&self) -> usize { - self.instructions.num_instructions() + self.instructions.len() } /// Returns all of result values which are attached to this instruction. @@ -166,7 +155,7 @@ mod tests { use super::DataFlowGraph; use crate::ssa_refactor::ir::{ instruction::Instruction, - types::{NumericType, Typ}, + types::{NumericType, Type}, }; use acvm::FieldElement; @@ -177,7 +166,7 @@ mod tests { let ins_id = dfg.make_instruction(ins); let num_results = - dfg.make_instruction_results(ins_id, Typ::Numeric(NumericType::NativeField)); + dfg.make_instruction_results(ins_id, Type::Numeric(NumericType::NativeField)); let results = dfg.instruction_results(ins_id); diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir.rs b/crates/noirc_evaluator/src/ssa_refactor/ir.rs index aa07393203d..bdb722cd456 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir.rs @@ -1,5 +1,6 @@ pub(crate) mod extfunc; mod function; pub(crate) mod instruction; +pub(crate) mod map; pub(crate) mod types; pub(crate) mod value; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/extfunc.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/extfunc.rs index b0e573822cf..0ec7d6f5fc0 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/extfunc.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/extfunc.rs @@ -1,23 +1,20 @@ //! Like Crane-lift all functions outside of the current function is seen as //! external. -//! To reference external functions, one uses +//! To reference external functions, one must first import the function signature +//! into the current function's context. -use super::types::Typ; +use super::types::Type; #[derive(Debug, Default, Clone)] pub(crate) struct Signature { - pub(crate) params: Vec, - pub(crate) returns: Vec, + pub(crate) params: Vec, + pub(crate) returns: Vec, } -/// Reference to a `Signature` in a map inside of -/// a functions DFG. -#[derive(Debug, Default, Clone, Copy)] -pub(crate) struct SigRef(pub(crate) u32); #[test] fn sign_smoke() { let mut signature = Signature::default(); - signature.params.push(Typ::Numeric(super::types::NumericType::NativeField)); - signature.returns.push(Typ::Numeric(super::types::NumericType::Unsigned { bit_size: 32 })); + signature.params.push(Type::Numeric(super::types::NumericType::NativeField)); + signature.returns.push(Type::Numeric(super::types::NumericType::Unsigned { bit_size: 32 })); } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 04d933d8f9e..33715b67293 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -1,42 +1,18 @@ -use std::collections::HashMap; - use acvm::FieldElement; -use super::{function::FunctionId, types::Typ, value::ValueId}; +use super::{ + function::FunctionId, + map::{Id, SparseMap}, + types::Type, + value::ValueId, +}; use crate::ssa_refactor::basic_block::{BasicBlockId, BlockArguments}; -/// Map of instructions. -/// This is similar to Arena. -#[derive(Debug, Default)] -pub(crate) struct Instructions(HashMap); - -impl Instructions { - /// Adds an instruction to the map and returns a - /// reference to the instruction. - pub(crate) fn add_instruction(&mut self, ins: Instruction) -> InstructionId { - let id = InstructionId(self.0.len() as u32); - self.0.insert(id, ins); - id - } - - /// Fetch the instruction corresponding to this - /// instruction id. - /// - /// Panics if there is no such instruction, since instructions cannot be - /// deleted. - pub(crate) fn get_instruction(&self, ins_id: InstructionId) -> &Instruction { - self.0.get(&ins_id).expect("ICE: instructions cannot be deleted") - } +// Container for all Instructions, per-function +pub(crate) type Instructions = SparseMap; - /// Returns the number of instructions stored in the map. - pub(crate) fn num_instructions(&self) -> usize { - self.0.len() - } -} - -#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] /// Reference to an instruction -pub(crate) struct InstructionId(u32); +pub(crate) type InstructionId = Id; #[derive(Debug, PartialEq, Eq, Hash, Clone)] /// These are similar to built-ins in other languages. @@ -52,52 +28,35 @@ pub(crate) struct IntrinsicOpcodes; /// Instructions are used to perform tasks. /// The instructions that the IR is able to specify are listed below. pub(crate) enum Instruction { - // Binary Operations + /// Binary Operations like +, -, *, /, ==, != Binary(Binary), - // Unary Operations - // /// Converts `Value` into Typ - Cast(ValueId, Typ), + Cast(ValueId, Type), /// Computes a bit wise not Not(ValueId), /// Truncates `value` to `bit_size` - Truncate { - value: ValueId, - bit_size: u32, - max_bit_size: u32, - }, + Truncate { value: ValueId, bit_size: u32, max_bit_size: u32 }, /// Constrains a value to be equal to true Constrain(ValueId), /// Performs a function call with a list of its arguments. - Call { - func: FunctionId, - arguments: Vec, - }, + Call { func: FunctionId, arguments: Vec }, /// Performs a call to an intrinsic function and stores the /// results in `return_arguments`. - Intrinsic { - func: IntrinsicOpcodes, - arguments: Vec, - }, + Intrinsic { func: IntrinsicOpcodes, arguments: Vec }, /// Loads a value from memory. Load(ValueId), /// Writes a value to memory. - Store { - destination: ValueId, - value: ValueId, - }, + Store { destination: ValueId, value: ValueId }, /// Stores an Immediate value - Immediate { - value: FieldElement, - }, + Immediate { value: FieldElement }, } impl Instruction { @@ -106,7 +65,7 @@ impl Instruction { pub(crate) fn num_fixed_results(&self) -> usize { match self { Instruction::Binary(_) => 1, - Instruction::Cast(_, _) => 0, + Instruction::Cast(..) => 0, Instruction::Not(_) => 1, Instruction::Truncate { .. } => 1, Instruction::Constrain(_) => 0, @@ -125,7 +84,7 @@ impl Instruction { pub(crate) fn num_fixed_arguments(&self) -> usize { match self { Instruction::Binary(_) => 2, - Instruction::Cast(_, _) => 1, + Instruction::Cast(..) => 1, Instruction::Not(_) => 1, Instruction::Truncate { .. } => 1, Instruction::Constrain(_) => 1, @@ -141,7 +100,7 @@ impl Instruction { } /// Returns the types that this instruction will return. - pub(crate) fn return_types(&self, ctrl_typevar: Typ) -> Vec { + pub(crate) fn return_types(&self, ctrl_typevar: Type) -> Vec { match self { Instruction::Binary(_) => vec![ctrl_typevar], Instruction::Cast(_, typ) => vec![*typ], @@ -221,14 +180,16 @@ pub(crate) enum BinaryOp { #[test] fn smoke_instructions_map_duplicate() { - let ins = Instruction::Cast(ValueId(0), Typ::Unit); - let same_ins = Instruction::Cast(ValueId(0), Typ::Unit); + let id = Id::test_new(0); + + let ins = Instruction::Not(id); + let same_ins = Instruction::Not(id); let mut ins_map = Instructions::default(); // Document what happens when we insert the same instruction twice - let id = ins_map.add_instruction(ins); - let id_same_ins = ins_map.add_instruction(same_ins); + let id = ins_map.push(ins); + let id_same_ins = ins_map.push(same_ins); // The map is quite naive and does not check if the instruction has ben inserted // before. We simply assign a different Id. @@ -241,9 +202,9 @@ fn num_instructions_smoke() { let mut ins_map = Instructions::default(); for i in 0..n { - let ins = Instruction::Cast(ValueId(i as u32), Typ::Unit); - ins_map.add_instruction(ins); + let ins = Instruction::Not(Id::test_new(i)); + ins_map.push(ins); } - assert_eq!(n, ins_map.num_instructions()) + assert_eq!(n, ins_map.len()) } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs new file mode 100644 index 00000000000..6c7511b5bdd --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs @@ -0,0 +1,167 @@ +use std::collections::HashMap; + +/// A unique ID corresponding to a value of type T. +/// This type can be used to retrieve a value of type T from +/// either a DenseMap or SparseMap. +/// +/// Note that there is nothing in an Id binding it to a particular +/// DenseMap or SparseMap. If an Id was created to correspond to one +/// particular map type, users need to take care not to use it with +/// another map where it will likely be invalid. +pub(crate) struct Id { + index: usize, + _marker: std::marker::PhantomData, +} + +impl Id { + /// Constructs a new Id for the given index. + /// This constructor is deliberately private to prevent + /// constructing invalid IDs. + fn new(index: usize) -> Self { + Self { index, _marker: std::marker::PhantomData } + } + + /// Creates a test Id with the given index. + /// The name of this function makes it apparent it should only + /// be used for testing. Obtaining Ids in this way should be avoided + /// as unlike DenseMap::push and SparseMap::push, the Ids created + /// here are likely invalid for any particularly map. + #[cfg(test)] + pub(crate) fn test_new(index: usize) -> Self { + Self::new(index) + } +} + +// Need to manually implement most impls on Id. +// Otherwise rust assumes that Id: Hash only if T: Hash, +// which isn't true since the T is not used internally. +impl std::hash::Hash for Id { + fn hash(&self, state: &mut H) { + self.index.hash(state); + } +} + +impl Eq for Id {} + +impl PartialEq for Id { + fn eq(&self, other: &Self) -> bool { + self.index == other.index + } +} + +impl Copy for Id {} + +impl Clone for Id { + fn clone(&self) -> Self { + Self { index: self.index, _marker: self._marker } + } +} + +impl std::fmt::Debug for Id { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + // Deliberately formatting as a tuple with 1 element here and omitting + // the _marker: PhantomData field which would just clutter output + f.debug_tuple("Id").field(&self.index).finish() + } +} + +/// A DenseMap is a Vec wrapper where each element corresponds +/// to a unique ID that can be used to access the element. No direct +/// access to indices is provided. Since IDs must be stable and correspond +/// to indices in the internal Vec, operations that would change element +/// ordering like pop, remove, swap_remove, etc, are not possible. +#[derive(Debug)] +pub(crate) struct DenseMap { + storage: Vec, +} + +impl DenseMap { + /// Returns the number of elements in the map. + pub(crate) fn len(&self) -> usize { + self.storage.len() + } + /// Adds an element to the map. + /// Returns the identifier/reference to that element. + pub(crate) fn push(&mut self, element: T) -> Id { + let id = Id::new(self.storage.len()); + self.storage.push(element); + id + } +} + +impl Default for DenseMap { + fn default() -> Self { + Self { storage: Vec::new() } + } +} + +impl std::ops::Index> for DenseMap { + type Output = T; + + fn index(&self, id: Id) -> &Self::Output { + &self.storage[id.index] + } +} + +impl std::ops::IndexMut> for DenseMap { + fn index_mut(&mut self, id: Id) -> &mut Self::Output { + &mut self.storage[id.index] + } +} + +/// A SparseMap is a HashMap wrapper where each element corresponds +/// to a unique ID that can be used to access the element. No direct +/// access to indices is provided. +/// +/// Unlike DenseMap, SparseMap's IDs are stored within the structure +/// and are thus stable after element removal. +/// +/// Note that unlike DenseMap, it is possible to panic when retrieving +/// an element if the element's Id has been invalidated by a previous +/// call to .remove(). +#[derive(Debug)] +pub(crate) struct SparseMap { + storage: HashMap, T>, +} + +impl SparseMap { + /// Returns the number of elements in the map. + pub(crate) fn len(&self) -> usize { + self.storage.len() + } + + /// Adds an element to the map. + /// Returns the identifier/reference to that element. + pub(crate) fn push(&mut self, element: T) -> Id { + let id = Id::new(self.storage.len()); + self.storage.insert(id, element); + id + } + + /// Remove an element from the map and return it. + /// This may return None if the element was already + /// previously removed from the map. + pub(crate) fn remove(&mut self, id: Id) -> Option { + self.storage.remove(&id) + } +} + +impl Default for SparseMap { + fn default() -> Self { + Self { storage: HashMap::new() } + } +} + +impl std::ops::Index> for SparseMap { + type Output = T; + + fn index(&self, id: Id) -> &Self::Output { + &self.storage[&id] + } +} + +impl std::ops::IndexMut> for SparseMap { + fn index_mut(&mut self, id: Id) -> &mut Self::Output { + self.storage.get_mut(&id).expect("Invalid id used in SparseMap::index_mut") + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs index 9cf75e5ae7f..f2797423e30 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs @@ -13,12 +13,11 @@ pub(crate) enum NumericType { NativeField, } -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] /// All types representable in the IR. -pub(crate) enum Typ { - /// Represents numeric types in the IR - /// including field elements +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub(crate) enum Type { + /// Represents numeric types in the IR, including field elements Numeric(NumericType), - /// Represents the absence of a Type. + /// The Unit type with a single value Unit, } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs index c245cf95f24..ddd00efb38f 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs @@ -1,10 +1,9 @@ -use super::{instruction::InstructionId, types::Typ}; +use super::{instruction::InstructionId, map::Id, types::Type}; -#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] -/// Value is the most basic type allowed in the IR. -/// Transition Note: This is similar to `NodeId` in our previous IR. -pub(crate) struct ValueId(pub(crate) u32); +pub(crate) type ValueId = Id; +/// Value is the most basic type allowed in the IR. +/// Transition Note: A Id is similar to `NodeId` in our previous IR. #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub(crate) enum Value { /// This value was created due to an instruction @@ -16,5 +15,5 @@ pub(crate) enum Value { /// Example, if you add two numbers together, then the resulting /// value would have position `0`, the typ would be the type /// of the operands, and the instruction would map to an add instruction. - Instruction { typ: Typ, position: u16, instruction: InstructionId }, + Instruction { typ: Type, position: u16, instruction: InstructionId }, } From 3d2233de810ab6d4a0f4f83007232133c88a49fe Mon Sep 17 00:00:00 2001 From: kevaundray Date: Thu, 20 Apr 2023 20:10:39 +0100 Subject: [PATCH 27/63] chore(noir): Release 0.4.1 (#1164) * chore(noir): Release 0.4.1 * chore: Update lockfile --- CHANGELOG.md | 17 +++++++++++++++++ Cargo.lock | 22 +++++++++++----------- Cargo.toml | 2 +- 3 files changed, 29 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5845d82a685..9e03152c03a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,22 @@ # Changelog +## [0.4.1](https://github.com/noir-lang/noir/compare/v0.4.0...v0.4.1) (2023-04-20) + + +### Features + +* Add Poseidon-BN254 hash functions ([#1176](https://github.com/noir-lang/noir/issues/1176)) ([33feb2b](https://github.com/noir-lang/noir/commit/33feb2bcd71b1040d70d1f51a7377594db557c19)) +* bump noir-source-resolver version ([#1182](https://github.com/noir-lang/noir/issues/1182)) ([750ed77](https://github.com/noir-lang/noir/commit/750ed7793f5a07bc361b56c66f041cb4097219e3)) + + +### Bug Fixes + +* Add checks for nop ([#1160](https://github.com/noir-lang/noir/issues/1160)) ([809b85f](https://github.com/noir-lang/noir/commit/809b85f751bd0e27ce8c4b38354bc051471d8522)) +* allow comptime or non comptime fields in unconstrained for loops ([#1172](https://github.com/noir-lang/noir/issues/1172)) ([73df465](https://github.com/noir-lang/noir/commit/73df4653556a7d1c74d184e27ec5a8ca3be47af9)) +* maintain ordering of return value witnesses when constructing ABI ([#1177](https://github.com/noir-lang/noir/issues/1177)) ([b799c8a](https://github.com/noir-lang/noir/commit/b799c8aa4491f4f17e248a50a154386803b6d712)) +* **nargo:** restore `nargo codegen-verifier` functionality ([#1185](https://github.com/noir-lang/noir/issues/1185)) ([528a2a4](https://github.com/noir-lang/noir/commit/528a2a441cfe094885cc8f26ffba865f3a0b5c0c)) +* **ssa:** set correct predecessors of IF join ([#1171](https://github.com/noir-lang/noir/issues/1171)) ([7628ed6](https://github.com/noir-lang/noir/commit/7628ed6aa0e430881bd5628c84342058fa0e2f78)) + ## [0.4.0](https://github.com/noir-lang/noir/compare/v0.3.2...v0.4.0) (2023-04-17) diff --git a/Cargo.lock b/Cargo.lock index dfd02bbb138..7416537e24e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -118,7 +118,7 @@ checksum = "23ea9e81bd02e310c216d080f6223c179012256e5151c41db88d12c88a1684d2" [[package]] name = "arena" -version = "0.4.0" +version = "0.4.1" dependencies = [ "generational-arena", ] @@ -1242,7 +1242,7 @@ dependencies = [ [[package]] name = "fm" -version = "0.4.0" +version = "0.4.1" dependencies = [ "cfg-if 1.0.0", "codespan-reporting 0.9.5", @@ -1781,7 +1781,7 @@ dependencies = [ [[package]] name = "iter-extended" -version = "0.4.0" +version = "0.4.1" [[package]] name = "itertools" @@ -1991,7 +1991,7 @@ checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" [[package]] name = "nargo" -version = "0.4.0" +version = "0.4.1" dependencies = [ "acvm", "iter-extended", @@ -2005,7 +2005,7 @@ dependencies = [ [[package]] name = "nargo_cli" -version = "0.4.0" +version = "0.4.1" dependencies = [ "acvm", "assert_cmd", @@ -2037,7 +2037,7 @@ dependencies = [ [[package]] name = "noir_wasm" -version = "0.4.0" +version = "0.4.1" dependencies = [ "acvm", "build-data", @@ -2053,7 +2053,7 @@ dependencies = [ [[package]] name = "noirc_abi" -version = "0.4.0" +version = "0.4.1" dependencies = [ "acvm", "iter-extended", @@ -2065,7 +2065,7 @@ dependencies = [ [[package]] name = "noirc_driver" -version = "0.4.0" +version = "0.4.1" dependencies = [ "acvm", "clap 4.1.8", @@ -2080,7 +2080,7 @@ dependencies = [ [[package]] name = "noirc_errors" -version = "0.4.0" +version = "0.4.1" dependencies = [ "chumsky", "codespan", @@ -2091,7 +2091,7 @@ dependencies = [ [[package]] name = "noirc_evaluator" -version = "0.4.0" +version = "0.4.1" dependencies = [ "acvm", "arena", @@ -2107,7 +2107,7 @@ dependencies = [ [[package]] name = "noirc_frontend" -version = "0.4.0" +version = "0.4.1" dependencies = [ "acvm", "arena", diff --git a/Cargo.toml b/Cargo.toml index a2f825928ac..746c3e6399a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,7 +17,7 @@ default-members = ["crates/nargo_cli"] [workspace.package] # x-release-please-start-version -version = "0.4.0" +version = "0.4.1" # x-release-please-end authors = ["The Noir Team "] edition = "2021" From 7f8d2c2628bd792514815773033be4963e1dfa77 Mon Sep 17 00:00:00 2001 From: joss-aztec <94053499+joss-aztec@users.noreply.github.com> Date: Fri, 21 Apr 2023 16:59:05 +0100 Subject: [PATCH 28/63] chore(ssa): Replace JmpIf with BrIf (#1193) * chore(ssa): Replace JmpIf with BrIf * chore(ssa): doc tweak Co-authored-by: kevaundray * chore(ssa): cargo fmt * chore(ssa): rename BrIf -> JmpIf --------- Co-authored-by: kevaundray --- .../src/ssa_refactor/ir/instruction.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 33715b67293..e3298532ce0 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -128,10 +128,14 @@ pub(crate) enum TerminatorInstruction { /// /// Jump If /// - /// Jumps to the specified `destination` with - /// arguments, if the condition - /// if the condition is true. - JmpIf { condition: ValueId, destination: BasicBlockId, arguments: BlockArguments }, + /// If the condition is true: jump to the specified `then_destination` with `arguments`. + /// Otherwise, jump to the specified `else_destination` with `arguments`. + JmpIf { + condition: ValueId, + then_destination: BasicBlockId, + else_destination: BasicBlockId, + arguments: BlockArguments, + }, /// Unconditional Jump /// /// Jumps to specified `destination` with `arguments` From e4c7bb2fa7703aab3f0238e159431a3f0da5f13e Mon Sep 17 00:00:00 2001 From: jfecher Date: Fri, 21 Apr 2023 14:14:31 -0400 Subject: [PATCH 29/63] chore(ssa refactor): Add Context structs and start ssa gen pass (#1196) * Add Context structs and start ssa gen pass * Fix block arguments * Fix clippy lint * Use the correct dfg * Rename contexts to highlight the inner contexts are shared rather than used directly * Update crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs Co-authored-by: kevaundray --------- Co-authored-by: kevaundray --- crates/noirc_evaluator/src/lib.rs | 2 +- crates/noirc_evaluator/src/ssa_refactor.rs | 9 +- crates/noirc_evaluator/src/ssa_refactor/ir.rs | 5 +- .../src/ssa_refactor/{ => ir}/basic_block.rs | 32 +++-- .../src/ssa_refactor/{ => ir}/dfg.rs | 33 ++--- .../src/ssa_refactor/ir/extfunc.rs | 20 --- .../src/ssa_refactor/ir/function.rs | 63 +++++++-- .../src/ssa_refactor/ir/instruction.rs | 44 +------ .../src/ssa_refactor/ir/map.rs | 64 +++++++++- .../src/ssa_refactor/ir/value.rs | 10 +- .../ssa_builder/function_builder.rs | 54 ++++++++ .../src/ssa_refactor/ssa_builder/mod.rs | 19 +++ .../src/ssa_refactor/ssa_gen/context.rs | 60 +++++++++ .../src/ssa_refactor/ssa_gen/mod.rs | 120 ++++++++++++++++++ .../src/ssa_refactor/ssa_gen/value.rs | 13 ++ .../src/monomorphization/ast.rs | 8 +- 16 files changed, 442 insertions(+), 114 deletions(-) rename crates/noirc_evaluator/src/ssa_refactor/{ => ir}/basic_block.rs (55%) rename crates/noirc_evaluator/src/ssa_refactor/{ => ir}/dfg.rs (88%) delete mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/extfunc.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs diff --git a/crates/noirc_evaluator/src/lib.rs b/crates/noirc_evaluator/src/lib.rs index 166c2d58239..8b3cbb009a9 100644 --- a/crates/noirc_evaluator/src/lib.rs +++ b/crates/noirc_evaluator/src/lib.rs @@ -304,7 +304,7 @@ impl Evaluator { // u8 and arrays are assumed to be private // This is not a short-coming of the ABI, but of the grammar // The new grammar has been conceived, and will be implemented. - let main = ir_gen.program.main(); + let main = ir_gen.program.main_mut(); let main_params = std::mem::take(&mut main.parameters); let abi_params = std::mem::take(&mut ir_gen.program.main_function_signature.0); diff --git a/crates/noirc_evaluator/src/ssa_refactor.rs b/crates/noirc_evaluator/src/ssa_refactor.rs index 073b54cbf10..37f1ead2b07 100644 --- a/crates/noirc_evaluator/src/ssa_refactor.rs +++ b/crates/noirc_evaluator/src/ssa_refactor.rs @@ -5,9 +5,8 @@ //! elimination and constant folding. //! //! This module heavily borrows from Cranelift -#[allow(dead_code)] -mod basic_block; -#[allow(dead_code)] -mod dfg; -#[allow(dead_code)] +#![allow(dead_code)] + mod ir; +mod ssa_builder; +mod ssa_gen; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir.rs b/crates/noirc_evaluator/src/ssa_refactor/ir.rs index bdb722cd456..ce63bdc7238 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir.rs @@ -1,5 +1,6 @@ -pub(crate) mod extfunc; -mod function; +pub(crate) mod basic_block; +pub(crate) mod dfg; +pub(crate) mod function; pub(crate) mod instruction; pub(crate) mod map; pub(crate) mod types; diff --git a/crates/noirc_evaluator/src/ssa_refactor/basic_block.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs similarity index 55% rename from crates/noirc_evaluator/src/ssa_refactor/basic_block.rs rename to crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs index d6c2198b4a0..b11c4dc3f1c 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/basic_block.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs @@ -1,4 +1,8 @@ -use super::ir::instruction::{Instruction, TerminatorInstruction}; +use super::{ + instruction::{InstructionId, TerminatorInstruction}, + map::Id, + value::ValueId, +}; /// A Basic block is a maximal collection of instructions /// such that there are only jumps at the end of block @@ -8,10 +12,11 @@ use super::ir::instruction::{Instruction, TerminatorInstruction}; /// block, then all instructions are executed. ie single-entry single-exit. #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub(crate) struct BasicBlock { - /// Arguments to the basic block. - phi_nodes: Vec, + /// Parameters to the basic block. + parameters: Vec, + /// Instructions in the basic block. - instructions: Vec, + instructions: Vec, /// A basic block is considered sealed /// if no further predecessors will be added to it. @@ -21,17 +26,16 @@ pub(crate) struct BasicBlock { /// The terminating instruction for the basic block. /// - /// This will be a control flow instruction. - terminator: TerminatorInstruction, + /// This will be a control flow instruction. This is only + /// None if the block is still being constructed. + terminator: Option, } -#[derive(Debug, PartialEq, Eq, Hash, Clone)] /// An identifier for a Basic Block. -pub(crate) struct BasicBlockId; +pub(crate) type BasicBlockId = Id; -#[derive(Debug, PartialEq, Eq, Hash, Clone)] -/// Arguments to the basic block. -/// We use the modern Crane-lift strategy -/// of representing phi nodes as basic block -/// arguments. -pub(crate) struct BlockArguments; +impl BasicBlock { + pub(super) fn new(parameters: Vec) -> Self { + Self { parameters, instructions: Vec::new(), is_sealed: false, terminator: None } + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs similarity index 88% rename from crates/noirc_evaluator/src/ssa_refactor/dfg.rs rename to crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index 6dcee5212e2..ad6d614fec0 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -1,14 +1,11 @@ use super::{ basic_block::{BasicBlock, BasicBlockId}, - ir::{ - extfunc::Signature, - instruction::{Instruction, InstructionId, Instructions}, - map::{Id, SparseMap}, - types::Type, - value::{Value, ValueId}, - }, + function::Signature, + instruction::{Instruction, InstructionId}, + map::{DenseMap, Id, SecondaryMap}, + types::Type, + value::{Value, ValueId}, }; -use std::collections::HashMap; #[derive(Debug, Default)] /// A convenience wrapper to store `Value`s. @@ -39,7 +36,7 @@ impl ValueList { #[derive(Debug, Default)] pub(crate) struct DataFlowGraph { /// All of the instructions in a function - instructions: Instructions, + instructions: DenseMap, /// Stores the results for a particular instruction. /// @@ -50,17 +47,17 @@ pub(crate) struct DataFlowGraph { /// Currently, we need to define them in a better way /// Call instructions require the func signature, but /// other instructions may need some more reading on my part - results: HashMap, + results: SecondaryMap, /// Storage for all of the values defined in this /// function. - values: SparseMap, + values: DenseMap, /// Function signatures of external methods - signatures: SparseMap, + signatures: DenseMap, /// All blocks in a function - blocks: SparseMap, + blocks: DenseMap, } impl DataFlowGraph { @@ -71,13 +68,17 @@ impl DataFlowGraph { /// Inserts a new instruction into the DFG. pub(crate) fn make_instruction(&mut self, instruction_data: Instruction) -> InstructionId { - let id = self.instructions.push(instruction_data); + let id = self.instructions.insert(instruction_data); // Create a new vector to store the potential results for the instruction. self.results.insert(id, Default::default()); id } + pub(crate) fn make_value(&mut self, value: Value) -> ValueId { + self.values.insert(value) + } + /// Attaches results to the instruction. /// /// Returns the number of results that this instruction @@ -126,9 +127,9 @@ impl DataFlowGraph { let results = self.results.get_mut(&instruction_id).unwrap(); let expected_res_position = results.len(); - let value_id = self.values.push(Value::Instruction { + let value_id = self.values.insert(Value::Instruction { typ, - position: expected_res_position as u16, + position: expected_res_position, instruction: instruction_id, }); diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/extfunc.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/extfunc.rs deleted file mode 100644 index 0ec7d6f5fc0..00000000000 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/extfunc.rs +++ /dev/null @@ -1,20 +0,0 @@ -//! Like Crane-lift all functions outside of the current function is seen as -//! external. -//! To reference external functions, one must first import the function signature -//! into the current function's context. - -use super::types::Type; - -#[derive(Debug, Default, Clone)] -pub(crate) struct Signature { - pub(crate) params: Vec, - pub(crate) returns: Vec, -} - -#[test] -fn sign_smoke() { - let mut signature = Signature::default(); - - signature.params.push(Type::Numeric(super::types::NumericType::NativeField)); - signature.returns.push(Type::Numeric(super::types::NumericType::Unsigned { bit_size: 32 })); -} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs index 331c0d656d6..2509a85f435 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs @@ -1,25 +1,70 @@ -use crate::ssa_refactor::basic_block::{BasicBlock, BasicBlockId}; - +use super::basic_block::{BasicBlock, BasicBlockId}; +use super::dfg::DataFlowGraph; use super::instruction::Instruction; +use super::map::{DenseMap, Id, SecondaryMap}; +use super::types::Type; +use super::value::Value; +use iter_extended::vecmap; use noirc_errors::Location; -use std::collections::HashMap; /// A function holds a list of instructions. -/// These instructions are further grouped into -/// Basic blocks +/// These instructions are further grouped into Basic blocks +/// +/// Like Crane-lift all functions outside of the current function is seen as external. +/// To reference external functions, one must first import the function signature +/// into the current function's context. #[derive(Debug)] pub(crate) struct Function { /// Basic blocks associated to this particular function - basic_blocks: HashMap, + basic_blocks: DenseMap, /// Maps instructions to source locations - source_locations: HashMap, + source_locations: SecondaryMap, /// The first basic block in the function entry_block: BasicBlockId, + + dfg: DataFlowGraph, +} + +impl Function { + pub(crate) fn new(parameter_count: usize) -> Self { + let mut dfg = DataFlowGraph::default(); + let mut basic_blocks = DenseMap::default(); + + // The parameters for each function are stored as the block parameters + // of the function's entry block + let entry_block = basic_blocks.insert_with_id(|entry_block| { + // TODO: Give each parameter its correct type + let parameters = vecmap(0..parameter_count, |i| { + dfg.make_value(Value::Param { block: entry_block, position: i, typ: Type::Unit }) + }); + + BasicBlock::new(parameters) + }); + + Self { basic_blocks, source_locations: SecondaryMap::new(), entry_block, dfg } + } + + pub(crate) fn entry_block(&self) -> BasicBlockId { + self.entry_block + } } /// FunctionId is a reference for a function -#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] -pub(crate) struct FunctionId(pub(crate) u32); +pub(crate) type FunctionId = Id; + +#[derive(Debug, Default, Clone)] +pub(crate) struct Signature { + pub(crate) params: Vec, + pub(crate) returns: Vec, +} + +#[test] +fn sign_smoke() { + let mut signature = Signature::default(); + + signature.params.push(Type::Numeric(super::types::NumericType::NativeField)); + signature.returns.push(Type::Numeric(super::types::NumericType::Unsigned { bit_size: 32 })); +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index e3298532ce0..1d5089179d5 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -1,15 +1,8 @@ use acvm::FieldElement; use super::{ - function::FunctionId, - map::{Id, SparseMap}, - types::Type, - value::ValueId, + basic_block::BasicBlockId, function::FunctionId, map::Id, types::Type, value::ValueId, }; -use crate::ssa_refactor::basic_block::{BasicBlockId, BlockArguments}; - -// Container for all Instructions, per-function -pub(crate) type Instructions = SparseMap; /// Reference to an instruction pub(crate) type InstructionId = Id; @@ -134,12 +127,12 @@ pub(crate) enum TerminatorInstruction { condition: ValueId, then_destination: BasicBlockId, else_destination: BasicBlockId, - arguments: BlockArguments, + arguments: Vec, }, /// Unconditional Jump /// /// Jumps to specified `destination` with `arguments` - Jmp { destination: BasicBlockId, arguments: BlockArguments }, + Jmp { destination: BasicBlockId, arguments: Vec }, } /// A binary instruction in the IR. @@ -181,34 +174,3 @@ pub(crate) enum BinaryOp { /// false otherwise. Ne, } - -#[test] -fn smoke_instructions_map_duplicate() { - let id = Id::test_new(0); - - let ins = Instruction::Not(id); - let same_ins = Instruction::Not(id); - - let mut ins_map = Instructions::default(); - - // Document what happens when we insert the same instruction twice - let id = ins_map.push(ins); - let id_same_ins = ins_map.push(same_ins); - - // The map is quite naive and does not check if the instruction has ben inserted - // before. We simply assign a different Id. - assert_ne!(id, id_same_ins) -} - -#[test] -fn num_instructions_smoke() { - let n = 100; - - let mut ins_map = Instructions::default(); - for i in 0..n { - let ins = Instruction::Not(Id::test_new(i)); - ins_map.push(ins); - } - - assert_eq!(n, ins_map.len()) -} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs index 6c7511b5bdd..53a7db3a5d5 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs @@ -1,4 +1,7 @@ -use std::collections::HashMap; +use std::{ + collections::HashMap, + sync::atomic::{AtomicUsize, Ordering}, +}; /// A unique ID corresponding to a value of type T. /// This type can be used to retrieve a value of type T from @@ -80,13 +83,22 @@ impl DenseMap { pub(crate) fn len(&self) -> usize { self.storage.len() } + /// Adds an element to the map. /// Returns the identifier/reference to that element. - pub(crate) fn push(&mut self, element: T) -> Id { + pub(crate) fn insert(&mut self, element: T) -> Id { let id = Id::new(self.storage.len()); self.storage.push(element); id } + + /// Given the Id of the element being created, adds the element + /// returned by the given function to the map + pub(crate) fn insert_with_id(&mut self, f: impl FnOnce(Id) -> T) -> Id { + let id = Id::new(self.storage.len()); + self.storage.push(f(id)); + id + } } impl Default for DenseMap { @@ -132,12 +144,20 @@ impl SparseMap { /// Adds an element to the map. /// Returns the identifier/reference to that element. - pub(crate) fn push(&mut self, element: T) -> Id { + pub(crate) fn insert(&mut self, element: T) -> Id { let id = Id::new(self.storage.len()); self.storage.insert(id, element); id } + /// Given the Id of the element being created, adds the element + /// returned by the given function to the map + pub(crate) fn insert_with_id(&mut self, f: impl FnOnce(Id) -> T) -> Id { + let id = Id::new(self.storage.len()); + self.storage.insert(id, f(id)); + id + } + /// Remove an element from the map and return it. /// This may return None if the element was already /// previously removed from the map. @@ -165,3 +185,41 @@ impl std::ops::IndexMut> for SparseMap { self.storage.get_mut(&id).expect("Invalid id used in SparseMap::index_mut") } } + +/// A SecondaryMap is for storing secondary data for a given key. Since this +/// map is for secondary data, it will not return fresh Ids for data, instead +/// it expects users to provide these ids in order to associate existing ids with +/// additional data. +/// +/// Unlike SecondaryMap in cranelift, this version is sparse and thus +/// does not require inserting default elements for each key in between +/// the desired key and the previous length of the map. +/// +/// There is no expectation that there is always secondary data for all relevant +/// Ids of a given type, so unlike the other Map types, it is possible for +/// a call to .get(id) to return None. +pub(crate) type SecondaryMap = HashMap, V>; + +/// A simple counter to create fresh Ids without any storage. +/// Useful for assigning ids before the storage is created or assigning ids +/// for types that have no single owner. +/// +/// This type wraps an AtomicUsize so it can safely be used across threads. +#[derive(Debug)] +pub(crate) struct AtomicCounter { + next: AtomicUsize, + _marker: std::marker::PhantomData, +} + +impl AtomicCounter { + /// Return the next fresh id + pub(crate) fn next(&self) -> Id { + Id::new(self.next.fetch_add(1, Ordering::Relaxed)) + } +} + +impl Default for AtomicCounter { + fn default() -> Self { + Self { next: Default::default(), _marker: Default::default() } + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs index ddd00efb38f..38ca8b12c40 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs @@ -1,3 +1,5 @@ +use crate::ssa_refactor::ir::basic_block::BasicBlockId; + use super::{instruction::InstructionId, map::Id, types::Type}; pub(crate) type ValueId = Id; @@ -15,5 +17,11 @@ pub(crate) enum Value { /// Example, if you add two numbers together, then the resulting /// value would have position `0`, the typ would be the type /// of the operands, and the instruction would map to an add instruction. - Instruction { typ: Type, position: u16, instruction: InstructionId }, + Instruction { instruction: InstructionId, position: usize, typ: Type }, + + /// This Value originates from a block parameter. Since function parameters + /// are also represented as block parameters, this includes function parameters as well. + /// + /// position -- the index of this Value in the block parameters list + Param { block: BasicBlockId, position: usize, typ: Type }, } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs new file mode 100644 index 00000000000..8d90a95332e --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs @@ -0,0 +1,54 @@ +use crate::ssa_refactor::ir::{ + basic_block::BasicBlockId, + function::{Function, FunctionId}, +}; + +use super::SharedBuilderContext; + +/// The per-function context for each ssa function being generated. +/// +/// This is split from the global SsaBuilder context to allow each function +/// to be potentially built concurrently. +/// +/// Contrary to the name, this struct has the capacity to build as many +/// functions as needed, although it is limited to one function at a time. +pub(crate) struct FunctionBuilder<'ssa> { + global_context: &'ssa SharedBuilderContext, + + current_function: Function, + current_function_id: FunctionId, + + current_block: BasicBlockId, + + finished_functions: Vec<(FunctionId, Function)>, +} + +impl<'ssa> FunctionBuilder<'ssa> { + pub(crate) fn new(parameters: usize, context: &'ssa SharedBuilderContext) -> Self { + let new_function = Function::new(parameters); + let current_block = new_function.entry_block(); + + Self { + global_context: context, + current_function: new_function, + current_function_id: context.next_function(), + current_block, + finished_functions: Vec::new(), + } + } + + /// Finish the current function and create a new function + pub(crate) fn new_function(&mut self, parameters: usize) { + let new_function = Function::new(parameters); + let old_function = std::mem::replace(&mut self.current_function, new_function); + + self.finished_functions.push((self.current_function_id, old_function)); + + self.current_function_id = self.global_context.next_function(); + } + + pub(crate) fn finish(mut self) -> Vec<(FunctionId, Function)> { + self.finished_functions.push((self.current_function_id, self.current_function)); + self.finished_functions + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs new file mode 100644 index 00000000000..8f9ceed800e --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs @@ -0,0 +1,19 @@ +pub(crate) mod function_builder; + +use crate::ssa_refactor::ir::{ + function::{Function, FunctionId}, + map::AtomicCounter, +}; + +/// The global context while building the ssa representation. +/// Because this may be shared across threads, it is synchronized internally as necessary. +#[derive(Default)] +pub(crate) struct SharedBuilderContext { + function_count: AtomicCounter, +} + +impl SharedBuilderContext { + pub(super) fn next_function(&self) -> FunctionId { + self.function_count.next() + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs new file mode 100644 index 00000000000..94fedb7b4cf --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -0,0 +1,60 @@ +use std::collections::HashMap; +use std::sync::{Mutex, RwLock}; + +use noirc_frontend::monomorphization::ast::{self, LocalId}; +use noirc_frontend::monomorphization::ast::{FuncId, Program}; + +use crate::ssa_refactor::ssa_builder::SharedBuilderContext; +use crate::ssa_refactor::{ + ir::function::FunctionId as IrFunctionId, ssa_builder::function_builder::FunctionBuilder, +}; + +use super::value::Value; + +// TODO: Make this a threadsafe queue so we can compile functions in parallel +type FunctionQueue = Vec<(ast::FuncId, IrFunctionId)>; + +pub(super) struct FunctionContext<'a> { + definitions: HashMap, + function_builder: FunctionBuilder<'a>, + shared_context: &'a SharedContext, +} + +/// Shared context for all functions during ssa codegen +pub(super) struct SharedContext { + functions: RwLock>, + function_queue: Mutex, + pub(super) program: Program, +} + +impl<'a> FunctionContext<'a> { + pub(super) fn new( + parameter_count: usize, + shared_context: &'a SharedContext, + shared_builder_context: &'a SharedBuilderContext, + ) -> Self { + Self { + definitions: HashMap::new(), + function_builder: FunctionBuilder::new(parameter_count, shared_builder_context), + shared_context, + } + } + + pub(super) fn new_function(&mut self, parameters: impl ExactSizeIterator) { + self.function_builder.new_function(parameters.len()); + + for (_i, _parameter) in parameters.enumerate() { + todo!("Add block param to definitions") + } + } +} + +impl SharedContext { + pub(super) fn new(program: Program) -> Self { + Self { functions: Default::default(), function_queue: Default::default(), program } + } + + pub(super) fn pop_next_function_in_queue(&self) -> Option<(ast::FuncId, IrFunctionId)> { + self.function_queue.lock().expect("Failed to lock function_queue").pop() + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs new file mode 100644 index 00000000000..1da65fafd48 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -0,0 +1,120 @@ +mod context; +mod value; + +use context::SharedContext; +use noirc_errors::Location; +use noirc_frontend::monomorphization::ast::{self, Expression, Program}; + +use self::{context::FunctionContext, value::Value}; + +use super::ssa_builder::SharedBuilderContext; + +pub(crate) fn generate_ssa(program: Program) { + let context = SharedContext::new(program); + let builder_context = SharedBuilderContext::default(); + + let main = context.program.main(); + // TODO struct parameter counting + let parameter_count = main.parameters.len(); + + let mut function_context = FunctionContext::new(parameter_count, &context, &builder_context); + function_context.codegen_expression(&main.body); + + while let Some((src_function_id, _new_id)) = context.pop_next_function_in_queue() { + let function = &context.program[src_function_id]; + // TODO: Need to ensure/assert the new function's id == new_id + function_context.new_function(function.parameters.iter().map(|(id, ..)| *id)); + function_context.codegen_expression(&function.body); + } +} + +impl<'a> FunctionContext<'a> { + fn codegen_expression(&mut self, expr: &Expression) -> Value { + match expr { + Expression::Ident(ident) => self.codegen_ident(ident), + Expression::Literal(literal) => self.codegen_literal(literal), + Expression::Block(block) => self.codegen_block(block), + Expression::Unary(unary) => self.codegen_unary(unary), + Expression::Binary(binary) => self.codegen_binary(binary), + Expression::Index(index) => self.codegen_index(index), + Expression::Cast(cast) => self.codegen_cast(cast), + Expression::For(for_expr) => self.codegen_for(for_expr), + Expression::If(if_expr) => self.codegen_if(if_expr), + Expression::Tuple(tuple) => self.codegen_tuple(tuple), + Expression::ExtractTupleField(tuple, index) => { + self.codegen_extract_tuple_field(tuple, *index) + } + Expression::Call(call) => self.codegen_call(call), + Expression::Let(let_expr) => self.codegen_let(let_expr), + Expression::Constrain(constrain, location) => { + self.codegen_constrain(constrain, *location) + } + Expression::Assign(assign) => self.codegen_assign(assign), + Expression::Semi(semi) => self.codegen_semi(semi), + } + } + + fn codegen_ident(&mut self, _ident: &ast::Ident) -> Value { + todo!() + } + + fn codegen_literal(&mut self, _literal: &ast::Literal) -> Value { + todo!() + } + + fn codegen_block(&mut self, _block: &[Expression]) -> Value { + todo!() + } + + fn codegen_unary(&mut self, _unary: &ast::Unary) -> Value { + todo!() + } + + fn codegen_binary(&mut self, _binary: &ast::Binary) -> Value { + todo!() + } + + fn codegen_index(&mut self, _index: &ast::Index) -> Value { + todo!() + } + + fn codegen_cast(&mut self, _cast: &ast::Cast) -> Value { + todo!() + } + + fn codegen_for(&mut self, _for_expr: &ast::For) -> Value { + todo!() + } + + fn codegen_if(&mut self, _if_expr: &ast::If) -> Value { + todo!() + } + + fn codegen_tuple(&mut self, _tuple: &[Expression]) -> Value { + todo!() + } + + fn codegen_extract_tuple_field(&mut self, _tuple: &Expression, _index: usize) -> Value { + todo!() + } + + fn codegen_call(&mut self, _call: &ast::Call) -> Value { + todo!() + } + + fn codegen_let(&mut self, _let_expr: &ast::Let) -> Value { + todo!() + } + + fn codegen_constrain(&mut self, _constrain: &Expression, _location: Location) -> Value { + todo!() + } + + fn codegen_assign(&mut self, _assign: &ast::Assign) -> Value { + todo!() + } + + fn codegen_semi(&mut self, _semi: &Expression) -> Value { + todo!() + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs new file mode 100644 index 00000000000..785ae3cd8f7 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs @@ -0,0 +1,13 @@ +use crate::ssa_refactor::ir::function::FunctionId as IrFunctionId; +use crate::ssa_refactor::ir::value::ValueId; + +#[derive(Debug, Clone)] +pub(super) enum Value { + Normal(ValueId), + Function(IrFunctionId), + Tuple(Vec), + + /// Lazily inserting unit values helps prevent cluttering the IR with too many + /// unit literals. + Unit, +} diff --git a/crates/noirc_frontend/src/monomorphization/ast.rs b/crates/noirc_frontend/src/monomorphization/ast.rs index 938a937405c..6a2b97ae19d 100644 --- a/crates/noirc_frontend/src/monomorphization/ast.rs +++ b/crates/noirc_frontend/src/monomorphization/ast.rs @@ -226,8 +226,12 @@ impl Program { Program { functions, main_function_signature } } - pub fn main(&mut self) -> &mut Function { - &mut self.functions[0] + pub fn main(&self) -> &Function { + &self[Self::main_id()] + } + + pub fn main_mut(&mut self) -> &mut Function { + &mut self[Self::main_id()] } pub fn main_id() -> FuncId { From f14fe0b97e75eb5be39a48675149cf08d718abf6 Mon Sep 17 00:00:00 2001 From: Blaine Bublitz Date: Fri, 21 Apr 2023 20:01:56 +0100 Subject: [PATCH 30/63] feat!: Switch to aztec_backend that uses upstream BB & UltraPlonk (#1114) * feat(build): Nix flake to build with * use debug ultra branch for backend, and temporary simple XOR/AND test added to nargo test_data * ignore result * Update deps * Avoid reading build data if we already have it * Update values for UP * Align flake with aztec_backend * update lockfile * skip simple_shield until it is updated * Update nix version with our workflow * Build with Nix in CI * Update backend deps * Simplify the nix file * chore(nargo)!: Update the codegen command to use vk, which requires circuit_name arg * chore!: Update Prover.toml for merkle_insert and simple_shield with UP (#1123) * re-enable simple_shield * use CARGO_MANIFEST_DIR to locate data for integration testing * clippy * patch for bb-sys * patch for bb * Add build workaround to wasm crate * clippy * Remove buggy clippy lint * cleanup flake * Rough guidelines for working on Noir with nix * chore: Cleanup UltraPlonk Debugging (#1130) cleanup debugging code from UP * update 8_integration main func * update to newer bb * update aztec_backend * update acvm to 0.9.0 * Reference temp commit on aztec_backend * fix some issues with the acvm 0.9 upgrade * temp lock to cody bb branch * chore: fix up merge issues * update for contract changes * update to appropriate refs * working on direnv * Update nix and direnv stuff * Update building documentation * Stop ignoring vscode dir and recommend extensions and settings * cspell * aztec_backend refs * nix lockfile * Simplify and rename workflow * docs cleanup * code review * Update crates/nargo_cli/tests/prove_and_verify.rs * Update crates/nargo_cli/tests/prove_and_verify.rs Co-authored-by: kevaundray * Update cspell.json * Update README.md * Update cspell.json * update bb-sys for linux env fixes * add issue numbers to TODOs * Update transcript comment --------- Co-authored-by: Koby Co-authored-by: Maxim Vezenov Co-authored-by: Tom French Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> Co-authored-by: kevaundray --- .envrc | 21 +- .github/workflows/release.yml | 1 + .github/workflows/rust.yml | 21 -- .github/workflows/test.yml | 35 +++ .gitignore | 8 +- .vscode/extensions.json | 13 + .vscode/settings.json | 16 ++ Cargo.lock | 255 ++++++------------ Cargo.toml | 2 +- README.md | 75 ++++++ crates/nargo/Cargo.toml | 1 - crates/nargo_cli/Cargo.toml | 4 +- crates/nargo_cli/build.rs | 12 +- .../nargo_cli/src/cli/codegen_verifier_cmd.rs | 31 ++- crates/nargo_cli/src/lib.rs | 2 +- crates/nargo_cli/tests/prove_and_verify.rs | 22 +- .../tests/test_data/merkle_insert/Prover.toml | 14 +- .../test_data/pedersen_check/Prover.toml | 4 +- .../tests/test_data/simple_shield/Prover.toml | 8 +- .../nargo_cli/tests/test_data/xor/Nargo.toml | 5 + .../nargo_cli/tests/test_data/xor/Prover.toml | 2 + .../nargo_cli/tests/test_data/xor/src/main.nr | 5 + .../src/ssa/acir_gen/operations/sort.rs | 1 + crates/wasm/build.rs | 12 +- crates/wasm/src/circuit.rs | 23 -- crates/wasm/src/lib.rs | 3 +- cspell.json | 4 + default.nix | 24 +- flake.lock | 156 +++++++++++ flake.nix | 191 +++++++++++++ shell.nix | 13 + 31 files changed, 712 insertions(+), 272 deletions(-) delete mode 100644 .github/workflows/rust.yml create mode 100644 .github/workflows/test.yml create mode 100644 .vscode/extensions.json create mode 100644 .vscode/settings.json create mode 100644 crates/nargo_cli/tests/test_data/xor/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/xor/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/xor/src/main.nr create mode 100644 flake.lock create mode 100644 flake.nix create mode 100644 shell.nix diff --git a/.envrc b/.envrc index 1d953f4bd73..b2f868b1898 100644 --- a/.envrc +++ b/.envrc @@ -1 +1,20 @@ -use nix +# Based on https://github.com/direnv/direnv-vscode/blob/158e8302c2594cc0eaa5f8b4f0cafedd4e1c0315/.envrc + +# You can define your system-specific logic (like Git settings or GH tokens) in .envrc.local +# If that logic is usable by other people and might improve development environment, consider +# contributing it to this file! + +source_env_if_exists .envrc.local + +if [[ -z "${SKIP_NIX:-}" ]] && has nix; then + + if nix flake metadata &>/dev/null && has use_flake; then + # use flakes if possible + use flake + + else + # Otherwise fall back to pure nix + use nix + fi + +fi diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 75278dcee5c..c7eb6df168e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -26,6 +26,7 @@ jobs: pull-request-title-pattern: "chore(noir): Release ${version}" extra-files: | Cargo.toml + flake.nix update-lockfile: name: Update lockfile diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml deleted file mode 100644 index 8363858fa87..00000000000 --- a/.github/workflows/rust.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Rust - -on: [push, pull_request] - -# This will cancel previous runs when a branch or PR is updated -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} - cancel-in-progress: true - -jobs: - test: - name: Cargo test - uses: noir-lang/.github/.github/workflows/rust-test.yml@main - - clippy: - name: Cargo clippy - uses: noir-lang/.github/.github/workflows/rust-clippy.yml@main - - format: - name: Cargo fmt - uses: noir-lang/.github/.github/workflows/rust-format.yml@main \ No newline at end of file diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 00000000000..220985d8003 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,35 @@ +name: Test + +on: [push, pull_request] + +# This will cancel previous runs when a branch or PR is updated +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} + cancel-in-progress: true + +jobs: + test: + name: Test on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + include: + - os: ubuntu-latest + target: x86_64-linux + - os: macos-latest + target: x86_64-darwin + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - uses: cachix/install-nix-action@v20 + with: + nix_path: nixpkgs=channel:nixos-22.11 + github_access_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Run `nix flake check` + run: | + nix flake check diff --git a/.gitignore b/.gitignore index 10988465b29..351f5e16a7f 100644 --- a/.gitignore +++ b/.gitignore @@ -2,10 +2,14 @@ .DS_Store examples/**/target/ examples/9 -.vscode node_modules pkg/ +# Nix stuff +result +.envrc.local +.direnv/ + # Nargo output *.proof *.acir @@ -13,4 +17,4 @@ pkg/ *.tr *.pk *.vk -**/Verifier.toml \ No newline at end of file +**/Verifier.toml diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 00000000000..64ae238015f --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,13 @@ +{ + // See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations. + // Extension identifier format: ${publisher}.${name}. Example: vscode.csharp + // List of extensions which should be recommended for users of this workspace. + "recommendations": [ + "mkhl.direnv", + "jnoortheen.nix-ide", + "rust-lang.rust-analyzer", + "redhat.vscode-yaml" + ], + // List of extensions recommended by VS Code that should not be recommended for users of this workspace. + "unwantedRecommendations": [] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000000..78bfd88c35e --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,16 @@ +{ + "direnv.restart.automatic": true, + "redhat.telemetry.enabled": false, + "yaml.recommendations.show": false, + "nix.serverPath": "nil", + "nix.enableLanguageServer": true, + "nix.serverSettings": { + "nil": { + "formatting": { + "command": [ + "nixpkgs-fmt" + ] + } + } + }, +} diff --git a/Cargo.lock b/Cargo.lock index 7416537e24e..85b5c12aa46 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "acir" -version = "0.8.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f764b474e341efc3e8ee3d5054840b2fd2ac002f764fc2f4cd3569ce76badd1" +checksum = "018148d69bf14422b1c1d62909a241af2a7f51fec064feb2b01de88fb02b11b8" dependencies = [ "acir_field", "flate2", @@ -16,9 +16,9 @@ dependencies = [ [[package]] name = "acir_field" -version = "0.8.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbca7df5192c7823d4108d2c34cadcfd30dca94506b9e9861f85f0ea747ddedc" +checksum = "4d40dac25cf6be6335dd86286caeac859afd0dc74a4a75c64eed041b0f00a278" dependencies = [ "ark-bn254", "ark-ff", @@ -30,9 +30,9 @@ dependencies = [ [[package]] name = "acvm" -version = "0.8.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92d5df175b6923bf9bb05ba973b017b0fa1356066be8f0ebadd3d2dbbc48bd5b" +checksum = "e17b7bc8f2b2215075b8e080ba3a0b8b7d759f04bc44b27e5bb8d845f4c77f20" dependencies = [ "acir", "acvm_stdlib", @@ -48,9 +48,9 @@ dependencies = [ [[package]] name = "acvm_stdlib" -version = "0.8.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa2bbc18fe9732ca3d93a2bf8f1a1ad99a003b565e7bc1ad5c67f69867449e8f" +checksum = "33ce2d19a9d1e7ff1bf415ed909b43031e33ef6df21be70e470bb1817b3e6989" dependencies = [ "acir", ] @@ -270,17 +270,6 @@ dependencies = [ "tempfile", ] -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi 0.1.19", - "libc", - "winapi", -] - [[package]] name = "autocfg" version = "1.1.0" @@ -303,32 +292,38 @@ dependencies = [ ] [[package]] -name = "barretenberg_static_lib" -version = "0.1.0" -source = "git+https://github.com/noir-lang/aztec_backend?rev=26178359a2251e885f15f0a4d1a686afda04aec9#26178359a2251e885f15f0a4d1a686afda04aec9" +name = "barretenberg-sys" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bc96e40cc45e7d5622cbc57a140aee926a7fb6e4f7d07dda758daa9256f3317" dependencies = [ - "barretenberg_wrapper", - "common", + "bindgen", + "cc", + "color-eyre", + "link-cplusplus", + "pkg-config", + "thiserror", ] [[package]] -name = "barretenberg_wasm" +name = "barretenberg_static_lib" version = "0.1.0" -source = "git+https://github.com/noir-lang/aztec_backend?rev=26178359a2251e885f15f0a4d1a686afda04aec9#26178359a2251e885f15f0a4d1a686afda04aec9" +source = "git+https://github.com/noir-lang/aztec_backend?rev=e3d4504f15e1295e637c4da80b1d08c87c267c45#e3d4504f15e1295e637c4da80b1d08c87c267c45" dependencies = [ + "barretenberg-sys", "common", - "wasmer", ] [[package]] -name = "barretenberg_wrapper" +name = "barretenberg_wasm" version = "0.1.0" -source = "git+https://github.com/noir-lang/aztec-connect?branch=kw/noir-dsl#dbd544318819cf710dede8ffb7b7eafa112f5aed" +source = "git+https://github.com/noir-lang/aztec_backend?rev=e3d4504f15e1295e637c4da80b1d08c87c267c45#e3d4504f15e1295e637c4da80b1d08c87c267c45" dependencies = [ - "bindgen", - "cmake", - "hex", - "num_cpus 0.2.13", + "common", + "getrandom", + "pkg-config", + "rust-embed", + "wasmer", ] [[package]] @@ -339,15 +334,13 @@ checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a" [[package]] name = "bindgen" -version = "0.60.1" +version = "0.64.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "062dddbc1ba4aca46de6338e2bf87771414c335f7b2f2036e8f3e9befebf88e6" +checksum = "c4243e6031260db77ede97ad86c27e501d646a27ab57b59a574f725d98ab1fb4" dependencies = [ "bitflags", "cexpr", "clang-sys", - "clap 3.2.23", - "env_logger", "lazy_static", "lazycell", "log", @@ -357,6 +350,7 @@ dependencies = [ "regex", "rustc-hash", "shlex", + "syn", "which", ] @@ -531,21 +525,6 @@ dependencies = [ "libloading", ] -[[package]] -name = "clap" -version = "3.2.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5" -dependencies = [ - "atty", - "bitflags", - "clap_lex 0.2.4", - "indexmap", - "strsim", - "termcolor", - "textwrap", -] - [[package]] name = "clap" version = "4.1.8" @@ -554,7 +533,7 @@ checksum = "c3d7ae14b20b94cb02149ed21a86c423859cbe18dc7ed69845cace50e52b40a5" dependencies = [ "bitflags", "clap_derive", - "clap_lex 0.3.2", + "clap_lex", "is-terminal", "once_cell", "strsim", @@ -574,15 +553,6 @@ dependencies = [ "syn", ] -[[package]] -name = "clap_lex" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" -dependencies = [ - "os_str_bytes", -] - [[package]] name = "clap_lex" version = "0.3.2" @@ -592,15 +562,6 @@ dependencies = [ "os_str_bytes", ] -[[package]] -name = "cmake" -version = "0.1.49" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db34956e100b30725f2eb215f90d4871051239535632f84fea3bc92722c66b7c" -dependencies = [ - "cc", -] - [[package]] name = "codespan" version = "0.9.5" @@ -660,15 +621,16 @@ dependencies = [ [[package]] name = "common" version = "0.1.0" -source = "git+https://github.com/noir-lang/aztec_backend?rev=26178359a2251e885f15f0a4d1a686afda04aec9#26178359a2251e885f15f0a4d1a686afda04aec9" +source = "git+https://github.com/noir-lang/aztec_backend?rev=e3d4504f15e1295e637c4da80b1d08c87c267c45#e3d4504f15e1295e637c4da80b1d08c87c267c45" dependencies = [ "acvm", "blake2", "dirs 3.0.2", - "downloader", + "futures-util", "indicatif", - "regex", + "reqwest", "sled", + "tokio", ] [[package]] @@ -1047,19 +1009,6 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" -[[package]] -name = "downloader" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d05213e96f184578b5f70105d4d0a644a168e99e12d7bea0b200c15d67b5c182" -dependencies = [ - "futures", - "rand 0.8.5", - "reqwest", - "thiserror", - "tokio", -] - [[package]] name = "ecdsa" version = "0.10.2" @@ -1151,19 +1100,6 @@ dependencies = [ "syn", ] -[[package]] -name = "env_logger" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7" -dependencies = [ - "atty", - "humantime", - "log", - "regex", - "termcolor", -] - [[package]] name = "errno" version = "0.2.8" @@ -1288,21 +1224,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" -[[package]] -name = "futures" -version = "0.3.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "531ac96c6ff5fd7c62263c5e3c67a603af4fcaee2e1a0ae5565ba3a11e69e549" -dependencies = [ - "futures-channel", - "futures-core", - "futures-executor", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - [[package]] name = "futures-channel" version = "0.3.27" @@ -1310,7 +1231,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "164713a5a0dcc3e7b4b1ed7d3b433cabc18025386f9339346e8daf15963cf7ac" dependencies = [ "futures-core", - "futures-sink", ] [[package]] @@ -1319,17 +1239,6 @@ version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86d7a0c1aa76363dac491de0ee99faf6941128376f1cf96f07db7603b7de69dd" -[[package]] -name = "futures-executor" -version = "0.3.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1997dd9df74cdac935c76252744c1ed5794fac083242ea4fe77ef3ed60ba0f83" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", -] - [[package]] name = "futures-io" version = "0.3.27" @@ -1365,7 +1274,6 @@ version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ef6b17e481503ec85211fed8f39d1970f128935ca1f814cd32ac4a6842e84ab" dependencies = [ - "futures-channel", "futures-core", "futures-io", "futures-macro", @@ -1539,15 +1447,6 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - [[package]] name = "hermit-abi" version = "0.2.6" @@ -1613,12 +1512,6 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" -[[package]] -name = "humantime" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" - [[package]] name = "hyper" version = "0.14.25" @@ -1732,14 +1625,14 @@ dependencies = [ [[package]] name = "indicatif" -version = "0.15.0" +version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7baab56125e25686df467fe470785512329883aab42696d661247aca2a2896e4" +checksum = "cef509aa9bc73864d6756f0d34d35504af3cf0844373afe9b8669a5b8005a729" dependencies = [ "console", - "lazy_static", "number_prefix", - "regex", + "portable-atomic", + "unicode-width", ] [[package]] @@ -2014,7 +1907,7 @@ dependencies = [ "barretenberg_wasm", "build-data", "cfg-if 1.0.0", - "clap 4.1.8", + "clap", "color-eyre", "const_format", "dirs 4.0.0", @@ -2068,7 +1961,7 @@ name = "noirc_driver" version = "0.4.1" dependencies = [ "acvm", - "clap 4.1.8", + "clap", "fm", "iter-extended", "noirc_abi", @@ -2170,15 +2063,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "num_cpus" -version = "0.2.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cee7e88156f3f9e19bdd598f8d6c9db7bf4078f99f8381f43a55b09648d1a6e3" -dependencies = [ - "libc", -] - [[package]] name = "num_cpus" version = "1.15.0" @@ -2191,9 +2075,9 @@ dependencies = [ [[package]] name = "number_prefix" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17b02fc0ff9a9e4b35b3342880f48e896ebf69f2967921fe8646bf5b7125956a" +checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "object" @@ -2304,6 +2188,18 @@ dependencies = [ "der", ] +[[package]] +name = "pkg-config" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160" + +[[package]] +name = "portable-atomic" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26f6a7b87c2e435a3241addceeeff740ff8b7e76b74c13bf9acb17fa454ea00b" + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -2512,7 +2408,7 @@ dependencies = [ "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", - "num_cpus 1.15.0", + "num_cpus", ] [[package]] @@ -2610,9 +2506,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.11.14" +version = "0.11.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21eed90ec8570952d53b772ecf8f206aa1ec9a3d76b2521c56c42973f2d91ee9" +checksum = "27b71749df584b7f4cac2c426c127a7c785a5106cc98f7a8feb044115f0fa254" dependencies = [ "base64", "bytes", @@ -2638,10 +2534,12 @@ dependencies = [ "serde_urlencoded", "tokio", "tokio-rustls", + "tokio-util", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", + "wasm-streams", "web-sys", "webpki-roots", "winreg", @@ -2729,6 +2627,7 @@ dependencies = [ "proc-macro2", "quote", "rust-embed-utils", + "shellexpand", "syn", "walkdir", ] @@ -2739,6 +2638,7 @@ version = "7.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "512b0ab6853f7e14e3c8754acb43d6f748bb9ced66aa5915a6553ac8213f7731" dependencies = [ + "globset", "sha2 0.10.6", "walkdir", ] @@ -3001,6 +2901,15 @@ dependencies = [ "lazy_static", ] +[[package]] +name = "shellexpand" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ccc8076840c4da029af4f87e4e8daeb0fca6b87bbb02e10cb60b791450e11e4" +dependencies = [ + "dirs 4.0.0", +] + [[package]] name = "shlex" version = "1.1.0" @@ -3196,12 +3105,6 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" -[[package]] -name = "textwrap" -version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" - [[package]] name = "thiserror" version = "1.0.39" @@ -3269,7 +3172,6 @@ dependencies = [ "libc", "memchr", "mio", - "num_cpus 1.15.0", "pin-project-lite", "socket2", "windows-sys 0.45.0", @@ -3599,6 +3501,19 @@ dependencies = [ "web-sys", ] +[[package]] +name = "wasm-streams" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bbae3363c08332cadccd13b67db371814cd214c2524020932f0804b8cf7c078" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "wasmer" version = "2.3.0" diff --git a/Cargo.toml b/Cargo.toml index 746c3e6399a..26eec846ef9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,7 +24,7 @@ edition = "2021" rust-version = "1.66" [workspace.dependencies] -acvm = "0.8.0" +acvm = "0.9.0" arena = { path = "crates/arena" } fm = { path = "crates/fm" } iter-extended = { path = "crates/iter-extended" } diff --git a/README.md b/README.md index bef2e9a0a87..1830e43016f 100644 --- a/README.md +++ b/README.md @@ -65,6 +65,81 @@ Concretely the following items are on the road map: This crate's minimum supported rustc version is 1.66.0. +## Working on this project + +Due to the large number of native dependencies, this project uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. + +### Setting up your environment + +For the best experience, please follow these instructions to setup your environment: +1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system +2. Create the file `~/.config/nix/nix.conf` with the contents: +```ini +experimental-features = nix-command +extra-experimental-features = flakes +``` +3. Install direnv into your Nix profile by running: +```sh +nix profile install nixpkgs#direnv +``` +4. Add direnv to your shell following [their guide](https://direnv.net/docs/hook.html) +5. Restart your shell + +### Shell & editor experience + +Now that your environment is set up, you can get to work on the project. + +1. Clone the repository, such as: +```sh +git clone git@github.com:noir-lang/noir +``` +2. Navigate to the directory: +```sh +cd noir +``` +3. You should see a __direnv error__ because projects aren't allowed by default. Make sure you've reviewed and trust our `.envrc` file, then you need to run: +```sh +direnv allow +``` +4. Now, wait awhile for all the native dependencies to be built. This will take some time and direnv will warn you that it is taking a long time, but we just need to let it run. +5. Once you are presented with your prompt again, you can start your editor within the project directory (we recommend [VSCode](https://code.visualstudio.com/)): +```sh +code . +``` +6. (Recommended) When launching VSCode for the first time, you should be prompted to install our recommended plugins. We highly recommend installing these for the best development experience. + +### Building and testing + +Assuming you are using `direnv` to populate your environment, building and testing the project can be done +with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in [flake.nix](./flake.nix), which is 1.66.0 at the time of this writing. + +If you want to build the entire project in an isolated sandbox, you can use Nix commands: +1. `nix build .` (or `nix build . -L` for verbose output) to build the project in a Nix sandbox +2. `nix flake check` (or `nix flake check -L` for verbose output) to run clippy and tests in a Nix sandbox + +### Building against a different local/remote version of Barretenberg + +If you are working on this project and want a different version of Barretenberg (instead of the version this project is pinned against), you'll want to replace the lockfile version with your version. This can be done by running: + +```sh +nix flake lock --override-input barretenberg /absolute/path/to/your/barretenberg +``` + +You can also point at a fork and/or branch on GitHub using: + +```sh +nix flake lock --override-input barretenberg github:username/barretenberg/branch_name +``` + +__Note:__ You don't want to commit the updated lockfile, as it will fail in CI! + +### Without direnv + +If you have hesitations with using `direnv`, you can launch a subshell with `nix develop` and then launch your editor +from within the subshell. However, if VSCode was already launched in the project directory, the environment won't be updated. + +__Advanced:__ If you aren't using `direnv` nor launching your editor within the subshell, you can try to install Barretenberg and other global dependencies the package needs. This is an advanced workflow and likely won't receive support! + ## License Noir is free and open source. It is distributed under a dual license. (MIT/APACHE) diff --git a/crates/nargo/Cargo.toml b/crates/nargo/Cargo.toml index 430d926cc9e..8d3c9fbd3cd 100644 --- a/crates/nargo/Cargo.toml +++ b/crates/nargo/Cargo.toml @@ -18,4 +18,3 @@ iter-extended.workspace = true toml.workspace = true serde.workspace = true thiserror.workspace = true - diff --git a/crates/nargo_cli/Cargo.toml b/crates/nargo_cli/Cargo.toml index 49c9428f546..40ab4b92459 100644 --- a/crates/nargo_cli/Cargo.toml +++ b/crates/nargo_cli/Cargo.toml @@ -37,8 +37,8 @@ termcolor = "1.1.2" color-eyre = "0.6.2" # Backends -aztec_backend = { optional = true, package = "barretenberg_static_lib", git = "https://github.com/noir-lang/aztec_backend", rev = "26178359a2251e885f15f0a4d1a686afda04aec9" } -aztec_wasm_backend = { optional = true, package = "barretenberg_wasm", git = "https://github.com/noir-lang/aztec_backend", rev = "26178359a2251e885f15f0a4d1a686afda04aec9" } +aztec_backend = { optional = true, package = "barretenberg_static_lib", git = "https://github.com/noir-lang/aztec_backend", rev = "e3d4504f15e1295e637c4da80b1d08c87c267c45" } +aztec_wasm_backend = { optional = true, package = "barretenberg_wasm", git = "https://github.com/noir-lang/aztec_backend", rev = "e3d4504f15e1295e637c4da80b1d08c87c267c45" } [dev-dependencies] tempdir = "0.3.7" diff --git a/crates/nargo_cli/build.rs b/crates/nargo_cli/build.rs index 0745d31840d..8e8ff02b903 100644 --- a/crates/nargo_cli/build.rs +++ b/crates/nargo_cli/build.rs @@ -7,10 +7,16 @@ fn check_rustc_version() { ); } +const GIT_COMMIT: &&str = &"GIT_COMMIT"; + fn main() { check_rustc_version(); - build_data::set_GIT_COMMIT(); - build_data::set_GIT_DIRTY(); - build_data::no_debug_rebuilds(); + // Only use build_data if the environment variable isn't set + // The environment variable is always set when working via Nix + if std::env::var(GIT_COMMIT).is_err() { + build_data::set_GIT_COMMIT(); + build_data::set_GIT_DIRTY(); + build_data::no_debug_rebuilds(); + } } diff --git a/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs b/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs index 4e713ff4a42..319a5722708 100644 --- a/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -1,14 +1,19 @@ -use super::fs::{create_named_dir, write_to_file}; +use super::fs::{create_named_dir, program::read_program_from_file, write_to_file}; use super::NargoConfig; -use crate::{cli::compile_cmd::compile_circuit, constants::CONTRACT_DIR, errors::CliError}; -use acvm::SmartContract; +use crate::{ + cli::compile_cmd::compile_circuit, constants::CONTRACT_DIR, constants::TARGET_DIR, + errors::CliError, +}; use clap::Args; -use nargo::ops::preprocess_program; +use nargo::ops::{codegen_verifier, preprocess_program}; use noirc_driver::CompileOptions; /// Generates a Solidity verifier smart contract for the program #[derive(Debug, Clone, Args)] pub(crate) struct CodegenVerifierCommand { + /// The name of the circuit build files (ACIR, proving and verification keys) + circuit_name: Option, + #[clap(flatten)] compile_options: CompileOptions, } @@ -16,11 +21,21 @@ pub(crate) struct CodegenVerifierCommand { pub(crate) fn run(args: CodegenVerifierCommand, config: NargoConfig) -> Result<(), CliError> { let backend = crate::backends::ConcreteBackend; - let compiled_program = compile_circuit(&backend, &config.program_dir, &args.compile_options)?; - let preprocessed_program = preprocess_program(&backend, compiled_program)?; + // TODO(#1201): Should this be a utility function? + let circuit_build_path = args + .circuit_name + .map(|circuit_name| config.program_dir.join(TARGET_DIR).join(circuit_name)); + + let preprocessed_program = match circuit_build_path { + Some(circuit_build_path) => read_program_from_file(circuit_build_path)?, + None => { + let compiled_program = + compile_circuit(&backend, config.program_dir.as_ref(), &args.compile_options)?; + preprocess_program(&backend, compiled_program)? + } + }; - #[allow(deprecated)] - let smart_contract_string = backend.eth_contract_from_cs(preprocessed_program.bytecode); + let smart_contract_string = codegen_verifier(&backend, &preprocessed_program.verification_key)?; let contract_dir = config.program_dir.join(CONTRACT_DIR); create_named_dir(&contract_dir, "contract"); diff --git a/crates/nargo_cli/src/lib.rs b/crates/nargo_cli/src/lib.rs index a943e580632..da4dee6fd59 100644 --- a/crates/nargo_cli/src/lib.rs +++ b/crates/nargo_cli/src/lib.rs @@ -1,5 +1,5 @@ #![forbid(unsafe_code)] -#![warn(unused_crate_dependencies, unused_extern_crates)] +#![warn(unused_extern_crates)] #![warn(unreachable_pub)] #![warn(clippy::semicolon_if_nothing_returned)] diff --git a/crates/nargo_cli/tests/prove_and_verify.rs b/crates/nargo_cli/tests/prove_and_verify.rs index 15e860bf059..070db6d8ce8 100644 --- a/crates/nargo_cli/tests/prove_and_verify.rs +++ b/crates/nargo_cli/tests/prove_and_verify.rs @@ -3,12 +3,8 @@ use tempdir::TempDir; use std::collections::BTreeMap; use std::fs; -const TEST_DIR: &str = "tests"; -const TEST_DATA_DIR: &str = "test_data"; -const CONFIG_FILE: &str = "config.toml"; - mod tests { - use std::path::Path; + use std::path::{Path, PathBuf}; use super::*; @@ -51,13 +47,17 @@ mod tests { #[test] fn noir_integration() { - let current_dir = std::env::current_dir().unwrap(); - - let test_data_dir = current_dir.join(TEST_DIR).join(TEST_DATA_DIR); + // Try to find the directory that Cargo sets when it is running; otherwise fallback to assuming the CWD + // is the root of the repository and append the crate path + let manifest_dir = match std::env::var("CARGO_MANIFEST_DIR") { + Ok(dir) => PathBuf::from(dir), + Err(_) => std::env::current_dir().unwrap().join("crates").join("nargo_cli"), + }; + let test_data_dir = manifest_dir.join("tests").join("test_data"); + let config_path = test_data_dir.join("config.toml"); - // Load config.toml file from test_data directory - let config_file_path = test_data_dir.join(CONFIG_FILE); - let config_data: BTreeMap> = load_conf(&config_file_path); + // Load config.toml file from `test_data` directory + let config_data: BTreeMap> = load_conf(&config_path); // Copy all the test cases into a temp dir so we don't leave artifacts around. let tmp_dir = TempDir::new("p_and_v_tests").unwrap(); diff --git a/crates/nargo_cli/tests/test_data/merkle_insert/Prover.toml b/crates/nargo_cli/tests/test_data/merkle_insert/Prover.toml index 909fc1bbf6b..691ea27abdb 100644 --- a/crates/nargo_cli/tests/test_data/merkle_insert/Prover.toml +++ b/crates/nargo_cli/tests/test_data/merkle_insert/Prover.toml @@ -1,11 +1,11 @@ -old_root = "0x083b35b32ba24436c1614e4262cb4ad8f98f99cdb5fb0da8932ab2a290034867" -old_leaf = "0x0b81829b478114d28b964bd382ebff8be0216741aa72ff2896909110aef1704a" +old_root = "0x04ccfbbb859b8605546e03dcaf41393476642859ff7f99446c054b841f0e05c8" +old_leaf = "0x1cdcf02431ba623767fe389337d011df1048dcc24b98ed81cec97627bab454a0" old_hash_path = [ - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0e4223f3925f98934393c74975142bd73079ab0621f4ee133cee050a3c194f1a", - "0x2fd7bb412155bf8693a3bd2a3e7581a679c95c68a052f835dddca85fa1569a40" + "0x1cdcf02431ba623767fe389337d011df1048dcc24b98ed81cec97627bab454a0", + "0x0b5e9666e7323ce925c28201a97ddf4144ac9d148448ed6f49f9008719c1b85b", + "0x22ec636f8ad30ef78c42b7fe2be4a4cacf5a445cfb5948224539f59a11d70775", ] -new_root = "0x256c13d7694e2f900f55756246aa1104169efd9fb9e7c6be54c15794795d476f" -leaf = "0x2e5ba44f3c5329aeb915c703e39b33c5872f1542500cbb22f12b71640aba502f" +new_root = "0x293ee9de893904164a8f8b248b6941217d16a4c9042ec54b71896dddbbce7a3b" +leaf = "0x085ca53be9c9d95b57e6e5fc91c5d531ad9e63e85dd71af7e35562991774b435" index = "0" mimc_input = [12,45,78,41] diff --git a/crates/nargo_cli/tests/test_data/pedersen_check/Prover.toml b/crates/nargo_cli/tests/test_data/pedersen_check/Prover.toml index c5eaec281c9..4459711e6ed 100644 --- a/crates/nargo_cli/tests/test_data/pedersen_check/Prover.toml +++ b/crates/nargo_cli/tests/test_data/pedersen_check/Prover.toml @@ -2,5 +2,5 @@ x = "0" y = "1" salt = "42" -out_x = "0x229fb88be21cec523e9223a21324f2e305aea8bff9cdbcb3d0c6bba384666ea1" -out_y = "0x296b4b4605e586a91caa3202baad557628a8c56d0a1d6dff1a7ca35aed3029d5" +out_x = "0x11831f49876c313f2a9ec6d8d521c7ce0b6311c852117e340bfe27fd1ac096ef" +out_y = "0x0ecf9d98be4597a88c46a7e0fa8836b57a7dcb41ee30f8d8787b11cc259c83fa" diff --git a/crates/nargo_cli/tests/test_data/simple_shield/Prover.toml b/crates/nargo_cli/tests/test_data/simple_shield/Prover.toml index 67e825f6333..554ae9900a8 100644 --- a/crates/nargo_cli/tests/test_data/simple_shield/Prover.toml +++ b/crates/nargo_cli/tests/test_data/simple_shield/Prover.toml @@ -1,11 +1,11 @@ # Random test key priv_key = "0x000000000000000000000000000000000000000000000000000000616c696365" -note_root = "0x165cdc3cbcf4a6bf7b69514807409a3ed2e701556821f396ca04781a6434d96a" +note_root = "0x293ee9de893904164a8f8b248b6941217d16a4c9042ec54b71896dddbbce7a3b" index = "0" note_hash_path = [ - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0e4223f3925f98934393c74975142bd73079ab0621f4ee133cee050a3c194f1a", - "0x2fd7bb412155bf8693a3bd2a3e7581a679c95c68a052f835dddca85fa1569a40", + "0x1cdcf02431ba623767fe389337d011df1048dcc24b98ed81cec97627bab454a0", + "0x0b5e9666e7323ce925c28201a97ddf4144ac9d148448ed6f49f9008719c1b85b", + "0x22ec636f8ad30ef78c42b7fe2be4a4cacf5a445cfb5948224539f59a11d70775", ] to_pubkey_x = "0x0000000000000000000000000000000000000000000000000000000000000001" to_pubkey_y = "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c" diff --git a/crates/nargo_cli/tests/test_data/xor/Nargo.toml b/crates/nargo_cli/tests/test_data/xor/Nargo.toml new file mode 100644 index 00000000000..e0b467ce5da --- /dev/null +++ b/crates/nargo_cli/tests/test_data/xor/Nargo.toml @@ -0,0 +1,5 @@ +[package] +authors = [""] +compiler_version = "0.1" + +[dependencies] \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/xor/Prover.toml b/crates/nargo_cli/tests/test_data/xor/Prover.toml new file mode 100644 index 00000000000..f28f2f8cc48 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/xor/Prover.toml @@ -0,0 +1,2 @@ +x = "5" +y = "10" diff --git a/crates/nargo_cli/tests/test_data/xor/src/main.nr b/crates/nargo_cli/tests/test_data/xor/src/main.nr new file mode 100644 index 00000000000..cc7caf17fad --- /dev/null +++ b/crates/nargo_cli/tests/test_data/xor/src/main.nr @@ -0,0 +1,5 @@ +fn main(x : u32, y : pub u32) { + let m = x ^ y; + + constrain m != 10; +} \ No newline at end of file diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/sort.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/sort.rs index fe7c39de7ce..04524959fbe 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/sort.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/sort.rs @@ -130,6 +130,7 @@ mod test { struct MockBackend {} impl PartialWitnessGenerator for MockBackend { fn solve_black_box_function_call( + &self, _initial_witness: &mut BTreeMap, _func_call: &BlackBoxFuncCall, ) -> Result { diff --git a/crates/wasm/build.rs b/crates/wasm/build.rs index 9c91effe74f..3b96be74ef3 100644 --- a/crates/wasm/build.rs +++ b/crates/wasm/build.rs @@ -1,6 +1,14 @@ +const GIT_COMMIT: &&str = &"GIT_COMMIT"; + fn main() { - build_data::set_GIT_COMMIT(); - build_data::set_GIT_DIRTY(); + // Only use build_data if the environment variable isn't set + // The environment variable is always set when working via Nix + if std::env::var(GIT_COMMIT).is_err() { + build_data::set_GIT_COMMIT(); + build_data::set_GIT_DIRTY(); + build_data::no_debug_rebuilds(); + } + build_data::set_SOURCE_TIMESTAMP(); build_data::no_debug_rebuilds(); } diff --git a/crates/wasm/src/circuit.rs b/crates/wasm/src/circuit.rs index 6168fe77bbf..97f9ef9cf18 100644 --- a/crates/wasm/src/circuit.rs +++ b/crates/wasm/src/circuit.rs @@ -2,29 +2,6 @@ use acvm::acir::circuit::Circuit; use gloo_utils::format::JsValueSerdeExt; use wasm_bindgen::prelude::*; -// Deserializes bytes into ACIR structure -#[deprecated( - note = "we have moved away from this serialization strategy. Call `acir_read_bytes` instead" -)] -#[allow(deprecated)] -#[wasm_bindgen] -pub fn acir_from_bytes(bytes: Vec) -> JsValue { - console_error_panic_hook::set_once(); - let circuit = Circuit::from_bytes(&bytes); - ::from_serde(&circuit).unwrap() -} - -#[deprecated( - note = "we have moved away from this serialization strategy. Call `acir_write_bytes` instead" -)] -#[allow(deprecated)] -#[wasm_bindgen] -pub fn acir_to_bytes(acir: JsValue) -> Vec { - console_error_panic_hook::set_once(); - let circuit: Circuit = JsValueSerdeExt::into_serde(&acir).unwrap(); - circuit.to_bytes() -} - // Deserializes bytes into ACIR structure #[wasm_bindgen] pub fn acir_read_bytes(bytes: Vec) -> JsValue { diff --git a/crates/wasm/src/lib.rs b/crates/wasm/src/lib.rs index 56995c21df5..2a659b94965 100644 --- a/crates/wasm/src/lib.rs +++ b/crates/wasm/src/lib.rs @@ -12,8 +12,7 @@ use wasm_bindgen::prelude::*; mod circuit; mod compile; -#[allow(deprecated)] -pub use circuit::{acir_from_bytes, acir_read_bytes, acir_to_bytes, acir_write_bytes}; +pub use circuit::{acir_read_bytes, acir_write_bytes}; pub use compile::{compile, WASMCompileOptions}; #[derive(Serialize, Deserialize)] diff --git a/cspell.json b/cspell.json index 37667e96ba3..f0031d724b2 100644 --- a/cspell.json +++ b/cspell.json @@ -52,6 +52,10 @@ "uninstantiated", "urem", "vecmap", + "direnv", + "nixpkgs", + "envrc", + "subshell", // Dependencies // "acir", diff --git a/default.nix b/default.nix index 0bb5c39e068..9e230590a61 100644 --- a/default.nix +++ b/default.nix @@ -1,11 +1,13 @@ -{ pkgs ? import {} }: - -pkgs.mkShell { - buildInputs = [ - pkgs.openssl - pkgs.pkg-config - pkgs.cmake - pkgs.llvmPackages.openmp - pkgs.rustup - ]; -} \ No newline at end of file +let + lock = builtins.fromJSON (builtins.readFile ./flake.lock); + flakeCompatRev = lock.nodes.flake-compat.locked.rev; + flakeCompatHash = lock.nodes.flake-compat.locked.narHash; + flakeCompat = fetchTarball { + url = "https://github.com/edolstra/flake-compat/archive/${flakeCompatRev}.tar.gz"; + sha256 = flakeCompatHash; + }; + compat = import flakeCompat { + src = ./.; + }; +in +compat.defaultNix diff --git a/flake.lock b/flake.lock new file mode 100644 index 00000000000..04dbc188a52 --- /dev/null +++ b/flake.lock @@ -0,0 +1,156 @@ +{ + "nodes": { + "barretenberg": { + "inputs": { + "flake-utils": [ + "flake-utils" + ], + "nixpkgs": [ + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1682019675, + "narHash": "sha256-KZ/VL/u81z2sFTdwfxvUFR+ftqf3+2AA0gR9kkgKxe4=", + "owner": "AztecProtocol", + "repo": "barretenberg", + "rev": "a38e3611590e085e5f25c322757871fb048aa3d7", + "type": "github" + }, + "original": { + "owner": "AztecProtocol", + "repo": "barretenberg", + "type": "github" + } + }, + "crane": { + "inputs": { + "flake-compat": [ + "flake-compat" + ], + "flake-utils": [ + "flake-utils" + ], + "nixpkgs": [ + "nixpkgs" + ], + "rust-overlay": [ + "rust-overlay" + ] + }, + "locked": { + "lastModified": 1681680516, + "narHash": "sha256-EB8Adaeg4zgcYDJn9sR6UMjN/OHdIiMMK19+3LmmXQY=", + "owner": "ipetkov", + "repo": "crane", + "rev": "54b63c8eae4c50172cb50b612946ff1d2bc1c75c", + "type": "github" + }, + "original": { + "owner": "ipetkov", + "repo": "crane", + "type": "github" + } + }, + "flake-compat": { + "flake": false, + "locked": { + "lastModified": 1673956053, + "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", + "owner": "edolstra", + "repo": "flake-compat", + "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", + "type": "github" + }, + "original": { + "owner": "edolstra", + "repo": "flake-compat", + "type": "github" + } + }, + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1681202837, + "narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "cfacdce06f30d2b68473a46042957675eebb3401", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1681932375, + "narHash": "sha256-tSXbYmpnKSSWpzOrs27ie8X3I0yqKA6AuCzCYNtwbCU=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "3d302c67ab8647327dba84fbdb443cdbf0e82744", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-22.11", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "barretenberg": "barretenberg", + "crane": "crane", + "flake-compat": "flake-compat", + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs", + "rust-overlay": "rust-overlay" + } + }, + "rust-overlay": { + "inputs": { + "flake-utils": [ + "flake-utils" + ], + "nixpkgs": [ + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1681957132, + "narHash": "sha256-52GaHyeLyyiT0u4OL3uGbo0vsUMKm33Z3zLkPyK/ZRY=", + "owner": "oxalica", + "repo": "rust-overlay", + "rev": "4771640d46c214d702512a8ece591f582ae507fa", + "type": "github" + }, + "original": { + "owner": "oxalica", + "repo": "rust-overlay", + "type": "github" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 00000000000..ab5012160d3 --- /dev/null +++ b/flake.nix @@ -0,0 +1,191 @@ +{ + description = "Build the Noir programming language"; + + inputs = { + nixpkgs = { + url = "github:NixOS/nixpkgs/nixos-22.11"; + }; + + flake-utils = { + url = "github:numtide/flake-utils"; + }; + + flake-compat = { + url = "github:edolstra/flake-compat"; + flake = false; + }; + + rust-overlay = { + url = "github:oxalica/rust-overlay"; + # All of these inputs (a.k.a. dependencies) need to align with inputs we + # use so they use the `inputs.*.follows` syntax to reference our inputs + inputs = { + nixpkgs.follows = "nixpkgs"; + flake-utils.follows = "flake-utils"; + }; + }; + + crane = { + url = "github:ipetkov/crane"; + # All of these inputs (a.k.a. dependencies) need to align with inputs we + # use so they use the `inputs.*.follows` syntax to reference our inputs + inputs = { + nixpkgs.follows = "nixpkgs"; + flake-utils.follows = "flake-utils"; + flake-compat.follows = "flake-compat"; + rust-overlay.follows = "rust-overlay"; + }; + }; + + barretenberg = { + url = "github:AztecProtocol/barretenberg"; + # All of these inputs (a.k.a. dependencies) need to align with inputs we + # use so they use the `inputs.*.follows` syntax to reference our inputs + inputs = { + nixpkgs.follows = "nixpkgs"; + flake-utils.follows = "flake-utils"; + }; + }; + }; + + outputs = + { self, nixpkgs, crane, flake-utils, rust-overlay, barretenberg, ... }: + flake-utils.lib.eachDefaultSystem (system: + let + pkgs = import nixpkgs { + inherit system; + overlays = [ + rust-overlay.overlays.default + barretenberg.overlays.default + ]; + }; + + rustToolchain = pkgs.rust-bin.stable."1.66.0".default.override { + # We include rust-src to ensure rust-analyzer works. + # See https://discourse.nixos.org/t/rust-src-not-found-and-other-misadventures-of-developing-rust-on-nixos/11570/4 + extensions = [ "rust-src" ]; + }; + + craneLib = (crane.mkLib pkgs).overrideToolchain rustToolchain; + + environment = { + # rust-bindgen needs to know the location of libclang + LIBCLANG_PATH = "${pkgs.llvmPackages.libclang.lib}/lib"; + + # Barretenberg fails if tests are run on multiple threads, so we set the test thread + # count to 1 throughout the entire project + # + # Note: Setting this allows for consistent behavior across build and shells, but is mostly + # hidden from the developer - i.e. when they see the command being run via `nix flake check` + RUST_TEST_THREADS = "1"; + + # We set the environment variable because barretenberg must be compiled in a special way for wasm + BARRETENBERG_BIN_DIR = "${pkgs.barretenberg-wasm}/bin"; + + # We provide `barretenberg-transcript00` from the overlay to the build. + # This is necessary because the Nix sandbox disables the $HOME so downloading during tests would fail + BARRETENBERG_TRANSCRIPT = pkgs.barretenberg-transcript00; + }; + + # The `self.rev` property is only available when the working tree is not dirty + GIT_COMMIT = if (self ? rev) then self.rev else "unknown"; + GIT_DIRTY = if (self ? rev) then "false" else "true"; + + # As per https://discourse.nixos.org/t/gcc11stdenv-and-clang/17734/7 since it seems that aarch64-linux uses + # gcc9 instead of gcc11 for the C++ stdlib, while all other targets we support provide the correct libstdc++ + stdenv = + if (pkgs.stdenv.targetPlatform.isGnu && pkgs.stdenv.targetPlatform.isAarch64) then + pkgs.overrideCC pkgs.llvmPackages.stdenv (pkgs.llvmPackages.clang.override { gccForLibs = pkgs.gcc11.cc; }) + else + pkgs.llvmPackages.stdenv; + + # Combine the environment and other configuration needed for crane to build our Rust packages + commonArgs = environment // { + pname = "noir"; + # x-release-please-start-version + version = "0.3.2"; + # x-release-please-end + + # Use our custom stdenv to build and test our Rust project + inherit stdenv; + + src = ./.; + + # Running checks don't do much more than compiling itself and increase + # the build time by a lot, so we disable them throughout all our flakes + doCheck = false; + + nativeBuildInputs = [ + # This provides the pkg-config tool to find barretenberg & other native libraries + pkgs.pkg-config + # This provides the `lld` linker to cargo + pkgs.llvmPackages.bintools + ]; + + buildInputs = [ + pkgs.llvmPackages.openmp + pkgs.barretenberg + ] ++ pkgs.lib.optionals pkgs.stdenv.isDarwin [ + # Need libiconv and apple Security on Darwin. See https://github.com/ipetkov/crane/issues/156 + pkgs.libiconv + pkgs.darwin.apple_sdk.frameworks.Security + ]; + + inherit GIT_COMMIT; + inherit GIT_DIRTY; + }; + + # Build *just* the cargo dependencies, so we can reuse all of that work between runs + cargoArtifacts = craneLib.buildDepsOnly commonArgs; + + noir = craneLib.buildPackage (commonArgs // { + inherit cargoArtifacts; + }); + in + rec { + checks = { + cargo-clippy = craneLib.cargoClippy (commonArgs // { + inherit cargoArtifacts; + + # TODO(#1198): It'd be nice to include these flags when running `cargo clippy` in a devShell. + cargoClippyExtraArgs = "--all-targets -- -D warnings"; + + doCheck = true; + }); + + cargo-test = craneLib.cargoTest (commonArgs // { + inherit cargoArtifacts; + + # TODO(#1198): It'd be nice to include this flag when running `cargo test` in a devShell. + cargoTestExtraArgs = "--workspace"; + + doCheck = true; + }); + }; + + packages.default = noir; + + # TODO(#1197): Look into installable apps with Nix flakes + # apps.default = flake-utils.lib.mkApp { drv = nargo; }; + + # Setup the environment to match the stdenv from `nix build` & `nix flake check`, and + # combine it with the environment settings, the inputs from our checks derivations, + # and extra tooling via `nativeBuildInputs` + devShells.default = pkgs.mkShell.override { inherit stdenv; } (environment // { + inputsFrom = builtins.attrValues checks; + + nativeBuildInputs = with pkgs; [ + which + starship + git + nil + nixpkgs-fmt + llvmPackages.lldb # This ensures the right lldb is in the environment for running rust-lldb + ]; + + shellHook = '' + eval "$(starship init bash)" + ''; + }); + }); +} diff --git a/shell.nix b/shell.nix new file mode 100644 index 00000000000..b72d4a4697b --- /dev/null +++ b/shell.nix @@ -0,0 +1,13 @@ +let + lock = builtins.fromJSON (builtins.readFile ./flake.lock); + flakeCompatRev = lock.nodes.flake-compat.locked.rev; + flakeCompatHash = lock.nodes.flake-compat.locked.narHash; + flakeCompat = fetchTarball { + url = "https://github.com/edolstra/flake-compat/archive/${flakeCompatRev}.tar.gz"; + sha256 = flakeCompatHash; + }; + compat = import flakeCompat { + src = ./.; + }; +in +compat.shellNix From b51f74a20831e0a2602167855cf405c11f5b70d3 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Sat, 22 Apr 2023 00:16:21 +0100 Subject: [PATCH 31/63] chore: update flake version to match current release (#1204) --- flake.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index ab5012160d3..74f19f612f2 100644 --- a/flake.nix +++ b/flake.nix @@ -103,7 +103,7 @@ commonArgs = environment // { pname = "noir"; # x-release-please-start-version - version = "0.3.2"; + version = "0.4.1"; # x-release-please-end # Use our custom stdenv to build and test our Rust project From 48995b4571e7cbe2c36c516d7a7ae9e543105132 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Pedro=20Sousa?= Date: Mon, 24 Apr 2023 19:02:30 +0100 Subject: [PATCH 32/63] chore: adding workflow to add issues to docs on docs needed label (#1178) * chore: adding workflow to add issues to docs on docs needed label * chore: adding suggestions * chore: adding suggestions * Update .github/workflows/label_docs.yml Co-authored-by: Blaine Bublitz * chore: adding suggestions * fixing accidental reversion of the gh token * Update .github/workflows/label_docs.yml Co-authored-by: Blaine Bublitz * pushing for CI --------- Co-authored-by: kevaundray Co-authored-by: Blaine Bublitz --- .github/workflows/label_docs.yml | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 .github/workflows/label_docs.yml diff --git a/.github/workflows/label_docs.yml b/.github/workflows/label_docs.yml new file mode 100644 index 00000000000..c7c20c7b157 --- /dev/null +++ b/.github/workflows/label_docs.yml @@ -0,0 +1,32 @@ +name: Notify Doc Needed + +on: + pull_request_target: + types: + - labeled + - unlabeled + +jobs: + dispatch: + runs-on: ubuntu-latest + if: github.event.label.name == 'doc needed' + + steps: + - uses: actions/checkout@v3 + - name: Set workflowId environment variable + id: set_workflow_id + run: | + if [[ "${{ github.event.action }}" == "labeled" ]]; then + echo "workflowId=new-migrated-issue.yml" >> $GITHUB_ENV + else + echo "workflowId=delete-migrated-issue.yml" >> $GITHUB_ENV + fi + + - name: Dispatch + uses: benc-uk/workflow-dispatch@v1 + with: + workflow: ${{ env.workflowId }} + repo: noir-lang/docs + ref: master + token: ${{ secrets.DOCS_REPO_TOKEN }} + inputs: '{ "pr_number": "${{ github.event.pull_request.number }}" }' From cca45a4980aebc041742be57f80a3428b26284cc Mon Sep 17 00:00:00 2001 From: jfecher Date: Mon, 24 Apr 2023 15:38:55 -0400 Subject: [PATCH 33/63] chore(ssa refactor): Handle function parameters (#1203) * Add Context structs and start ssa gen pass * Fix block arguments * Fix clippy lint * Use the correct dfg * Rename contexts to highlight the inner contexts are shared rather than used directly * Correctly handle function parameters * Rename Nested to Tree; add comment --- .../src/ssa_refactor/ir/basic_block.rs | 19 +++- .../src/ssa_refactor/ir/dfg.rs | 36 +++++++- .../src/ssa_refactor/ir/function.rs | 32 ++----- .../src/ssa_refactor/ir/instruction.rs | 10 ++ .../src/ssa_refactor/ir/types.rs | 21 +++++ .../ssa_builder/function_builder.rs | 16 +++- .../src/ssa_refactor/ssa_gen/context.rs | 91 +++++++++++++++++-- .../src/ssa_refactor/ssa_gen/mod.rs | 42 ++++----- .../src/ssa_refactor/ssa_gen/value.rs | 33 ++++++- .../src/monomorphization/ast.rs | 4 +- 10 files changed, 237 insertions(+), 67 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs index b11c4dc3f1c..431f1647863 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs @@ -35,7 +35,24 @@ pub(crate) struct BasicBlock { pub(crate) type BasicBlockId = Id; impl BasicBlock { - pub(super) fn new(parameters: Vec) -> Self { + pub(crate) fn new(parameters: Vec) -> Self { Self { parameters, instructions: Vec::new(), is_sealed: false, terminator: None } } + + pub(crate) fn parameters(&self) -> &[ValueId] { + &self.parameters + } + + pub(crate) fn add_parameter(&mut self, parameter: ValueId) { + self.parameters.push(parameter); + } + + /// Insert an instruction at the end of this block + pub(crate) fn insert_instruction(&mut self, instruction: InstructionId) { + self.instructions.push(instruction); + } + + pub(crate) fn set_terminator(&mut self, terminator: TerminatorInstruction) { + self.terminator = Some(terminator); + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index ad6d614fec0..b456fd08ee4 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -7,6 +7,8 @@ use super::{ value::{Value, ValueId}, }; +use iter_extended::vecmap; + #[derive(Debug, Default)] /// A convenience wrapper to store `Value`s. pub(crate) struct ValueList(Vec>); @@ -61,9 +63,31 @@ pub(crate) struct DataFlowGraph { } impl DataFlowGraph { - /// Creates a new `empty` basic block + /// Creates a new basic block with no parameters. + /// After being created, the block is unreachable in the current function + /// until another block is made to jump to it. pub(crate) fn new_block(&mut self) -> BasicBlockId { - todo!() + self.blocks.insert(BasicBlock::new(Vec::new())) + } + + /// Creates a new basic block with the given parameters. + /// After being created, the block is unreachable in the current function + /// until another block is made to jump to it. + pub(crate) fn new_block_with_parameters( + &mut self, + parameter_types: impl Iterator, + ) -> BasicBlockId { + self.blocks.insert_with_id(|entry_block| { + let parameters = vecmap(parameter_types.enumerate(), |(position, typ)| { + self.values.insert(Value::Param { block: entry_block, position, typ }) + }); + + BasicBlock::new(parameters) + }) + } + + pub(crate) fn block_parameters(&self, block: BasicBlockId) -> &[ValueId] { + self.blocks[block].parameters() } /// Inserts a new instruction into the DFG. @@ -149,6 +173,14 @@ impl DataFlowGraph { pub(crate) fn instruction_results(&self, instruction_id: InstructionId) -> &[ValueId] { self.results.get(&instruction_id).expect("expected a list of Values").as_slice() } + + pub(crate) fn add_block_parameter(&mut self, block_id: BasicBlockId, typ: Type) -> Id { + let block = &mut self.blocks[block_id]; + let position = block.parameters().len(); + let parameter = self.values.insert(Value::Param { block: block_id, position, typ }); + block.add_parameter(parameter); + parameter + } } #[cfg(test)] diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs index 2509a85f435..1abd6c85367 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs @@ -1,11 +1,9 @@ -use super::basic_block::{BasicBlock, BasicBlockId}; +use super::basic_block::BasicBlockId; use super::dfg::DataFlowGraph; use super::instruction::Instruction; -use super::map::{DenseMap, Id, SecondaryMap}; +use super::map::{Id, SecondaryMap}; use super::types::Type; -use super::value::Value; -use iter_extended::vecmap; use noirc_errors::Location; /// A function holds a list of instructions. @@ -16,35 +14,23 @@ use noirc_errors::Location; /// into the current function's context. #[derive(Debug)] pub(crate) struct Function { - /// Basic blocks associated to this particular function - basic_blocks: DenseMap, - /// Maps instructions to source locations source_locations: SecondaryMap, /// The first basic block in the function entry_block: BasicBlockId, - dfg: DataFlowGraph, + pub(crate) dfg: DataFlowGraph, } impl Function { - pub(crate) fn new(parameter_count: usize) -> Self { + /// Creates a new function with an automatically inserted entry block. + /// + /// Note that any parameters to the function must be manually added later. + pub(crate) fn new() -> Self { let mut dfg = DataFlowGraph::default(); - let mut basic_blocks = DenseMap::default(); - - // The parameters for each function are stored as the block parameters - // of the function's entry block - let entry_block = basic_blocks.insert_with_id(|entry_block| { - // TODO: Give each parameter its correct type - let parameters = vecmap(0..parameter_count, |i| { - dfg.make_value(Value::Param { block: entry_block, position: i, typ: Type::Unit }) - }); - - BasicBlock::new(parameters) - }); - - Self { basic_blocks, source_locations: SecondaryMap::new(), entry_block, dfg } + let entry_block = dfg.new_block(); + Self { source_locations: SecondaryMap::new(), entry_block, dfg } } pub(crate) fn entry_block(&self) -> BasicBlockId { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 1d5089179d5..81a28b8407c 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -129,10 +129,20 @@ pub(crate) enum TerminatorInstruction { else_destination: BasicBlockId, arguments: Vec, }, + /// Unconditional Jump /// /// Jumps to specified `destination` with `arguments` Jmp { destination: BasicBlockId, arguments: Vec }, + + /// Return from the current function with the given return values. + /// + /// All finished functions should have exactly 1 return instruction. + /// Functions with early returns should instead be structured to + /// unconditionally jump to a single exit block with the return values + /// as the block arguments. Then the exit block can terminate in a return + /// instruction returning these values. + Return { return_values: Vec }, } /// A binary instruction in the IR. diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs index f2797423e30..e1f8e8a74d2 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs @@ -18,6 +18,27 @@ pub(crate) enum NumericType { pub(crate) enum Type { /// Represents numeric types in the IR, including field elements Numeric(NumericType), + + /// A reference to some value, such as an array + Reference, + + /// A function that may be called directly + Function, + /// The Unit type with a single value Unit, } + +impl Type { + pub(crate) fn signed(bit_size: u32) -> Type { + Type::Numeric(NumericType::Signed { bit_size }) + } + + pub(crate) fn unsigned(bit_size: u32) -> Type { + Type::Numeric(NumericType::Unsigned { bit_size }) + } + + pub(crate) fn field() -> Type { + Type::Numeric(NumericType::NativeField) + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs index 8d90a95332e..5e82226d3be 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs @@ -1,6 +1,8 @@ use crate::ssa_refactor::ir::{ basic_block::BasicBlockId, function::{Function, FunctionId}, + types::Type, + value::ValueId, }; use super::SharedBuilderContext; @@ -24,8 +26,8 @@ pub(crate) struct FunctionBuilder<'ssa> { } impl<'ssa> FunctionBuilder<'ssa> { - pub(crate) fn new(parameters: usize, context: &'ssa SharedBuilderContext) -> Self { - let new_function = Function::new(parameters); + pub(crate) fn new(context: &'ssa SharedBuilderContext) -> Self { + let new_function = Function::new(); let current_block = new_function.entry_block(); Self { @@ -38,12 +40,11 @@ impl<'ssa> FunctionBuilder<'ssa> { } /// Finish the current function and create a new function - pub(crate) fn new_function(&mut self, parameters: usize) { - let new_function = Function::new(parameters); + pub(crate) fn new_function(&mut self) { + let new_function = Function::new(); let old_function = std::mem::replace(&mut self.current_function, new_function); self.finished_functions.push((self.current_function_id, old_function)); - self.current_function_id = self.global_context.next_function(); } @@ -51,4 +52,9 @@ impl<'ssa> FunctionBuilder<'ssa> { self.finished_functions.push((self.current_function_id, self.current_function)); self.finished_functions } + + pub(crate) fn add_parameter(&mut self, typ: Type) -> ValueId { + let entry = self.current_function.entry_block(); + self.current_function.dfg.add_block_parameter(entry, typ) + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 94fedb7b4cf..02bfee8a87f 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -1,21 +1,24 @@ use std::collections::HashMap; use std::sync::{Mutex, RwLock}; -use noirc_frontend::monomorphization::ast::{self, LocalId}; +use iter_extended::vecmap; +use noirc_frontend::monomorphization::ast::{self, LocalId, Parameters}; use noirc_frontend::monomorphization::ast::{FuncId, Program}; +use noirc_frontend::Signedness; +use crate::ssa_refactor::ir::types::Type; use crate::ssa_refactor::ssa_builder::SharedBuilderContext; use crate::ssa_refactor::{ ir::function::FunctionId as IrFunctionId, ssa_builder::function_builder::FunctionBuilder, }; -use super::value::Value; +use super::value::{Tree, Values}; // TODO: Make this a threadsafe queue so we can compile functions in parallel type FunctionQueue = Vec<(ast::FuncId, IrFunctionId)>; pub(super) struct FunctionContext<'a> { - definitions: HashMap, + definitions: HashMap, function_builder: FunctionBuilder<'a>, shared_context: &'a SharedContext, } @@ -29,22 +32,90 @@ pub(super) struct SharedContext { impl<'a> FunctionContext<'a> { pub(super) fn new( - parameter_count: usize, + parameters: &Parameters, shared_context: &'a SharedContext, shared_builder_context: &'a SharedBuilderContext, ) -> Self { - Self { + let mut this = Self { definitions: HashMap::new(), - function_builder: FunctionBuilder::new(parameter_count, shared_builder_context), + function_builder: FunctionBuilder::new(shared_builder_context), shared_context, + }; + this.add_parameters_to_scope(parameters); + this + } + + pub(super) fn new_function(&mut self, parameters: &Parameters) { + self.definitions.clear(); + self.function_builder.new_function(); + self.add_parameters_to_scope(parameters); + } + + /// Add each parameter to the current scope, and return the list of parameter types. + /// + /// The returned parameter type list will be flattened, so any struct parameters will + /// be returned as one entry for each field (recursively). + fn add_parameters_to_scope(&mut self, parameters: &Parameters) { + for (id, _, _, typ) in parameters { + self.add_parameter_to_scope(*id, typ); + } + } + + /// Adds a "single" parameter to scope. + /// + /// Single is in quotes here because in the case of tuple parameters, the tuple is flattened + /// into a new parameter for each field recursively. + fn add_parameter_to_scope(&mut self, parameter_id: LocalId, parameter_type: &ast::Type) { + // Add a separate parameter for each field type in 'parameter_type' + let parameter_value = self + .map_type(parameter_type, |this, typ| this.function_builder.add_parameter(typ).into()); + + self.definitions.insert(parameter_id, parameter_value); + } + + /// Maps the given type to a Tree of the result type. + /// + /// This can be used to (for example) flatten a tuple type, creating + /// and returning a new parameter for each field type. + pub(super) fn map_type( + &mut self, + typ: &ast::Type, + mut f: impl FnMut(&mut Self, Type) -> T, + ) -> Tree { + self.map_type_helper(typ, &mut f) + } + + // This helper is needed because we need to take f by mutable reference, + // otherwise we cannot move it multiple times each loop of vecmap. + fn map_type_helper( + &mut self, + typ: &ast::Type, + f: &mut impl FnMut(&mut Self, Type) -> T, + ) -> Tree { + match typ { + ast::Type::Tuple(fields) => { + Tree::Branch(vecmap(fields, |field| self.map_type_helper(field, f))) + } + other => Tree::Leaf(f(self, Self::convert_non_tuple_type(other))), } } - pub(super) fn new_function(&mut self, parameters: impl ExactSizeIterator) { - self.function_builder.new_function(parameters.len()); + pub(super) fn convert_non_tuple_type(typ: &ast::Type) -> Type { + match typ { + ast::Type::Field => Type::field(), + ast::Type::Array(_, _) => Type::Reference, + ast::Type::Integer(Signedness::Signed, bits) => Type::signed(*bits), + ast::Type::Integer(Signedness::Unsigned, bits) => Type::unsigned(*bits), + ast::Type::Bool => Type::unsigned(1), + ast::Type::String(_) => Type::Reference, + ast::Type::Unit => Type::Unit, + ast::Type::Tuple(_) => panic!("convert_non_tuple_type called on a tuple: {typ}"), + ast::Type::Function(_, _) => Type::Function, - for (_i, _parameter) in parameters.enumerate() { - todo!("Add block param to definitions") + // How should we represent Vecs? + // Are they a struct of array + length + capacity? + // Or are they just references? + ast::Type::Vec(_) => Type::Reference, } } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 1da65fafd48..c340b45eb9b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -5,7 +5,7 @@ use context::SharedContext; use noirc_errors::Location; use noirc_frontend::monomorphization::ast::{self, Expression, Program}; -use self::{context::FunctionContext, value::Value}; +use self::{context::FunctionContext, value::Values}; use super::ssa_builder::SharedBuilderContext; @@ -14,22 +14,20 @@ pub(crate) fn generate_ssa(program: Program) { let builder_context = SharedBuilderContext::default(); let main = context.program.main(); - // TODO struct parameter counting - let parameter_count = main.parameters.len(); - let mut function_context = FunctionContext::new(parameter_count, &context, &builder_context); + let mut function_context = FunctionContext::new(&main.parameters, &context, &builder_context); function_context.codegen_expression(&main.body); while let Some((src_function_id, _new_id)) = context.pop_next_function_in_queue() { let function = &context.program[src_function_id]; // TODO: Need to ensure/assert the new function's id == new_id - function_context.new_function(function.parameters.iter().map(|(id, ..)| *id)); + function_context.new_function(&function.parameters); function_context.codegen_expression(&function.body); } } impl<'a> FunctionContext<'a> { - fn codegen_expression(&mut self, expr: &Expression) -> Value { + fn codegen_expression(&mut self, expr: &Expression) -> Values { match expr { Expression::Ident(ident) => self.codegen_ident(ident), Expression::Literal(literal) => self.codegen_literal(literal), @@ -54,67 +52,67 @@ impl<'a> FunctionContext<'a> { } } - fn codegen_ident(&mut self, _ident: &ast::Ident) -> Value { + fn codegen_ident(&mut self, _ident: &ast::Ident) -> Values { todo!() } - fn codegen_literal(&mut self, _literal: &ast::Literal) -> Value { + fn codegen_literal(&mut self, _literal: &ast::Literal) -> Values { todo!() } - fn codegen_block(&mut self, _block: &[Expression]) -> Value { + fn codegen_block(&mut self, _block: &[Expression]) -> Values { todo!() } - fn codegen_unary(&mut self, _unary: &ast::Unary) -> Value { + fn codegen_unary(&mut self, _unary: &ast::Unary) -> Values { todo!() } - fn codegen_binary(&mut self, _binary: &ast::Binary) -> Value { + fn codegen_binary(&mut self, _binary: &ast::Binary) -> Values { todo!() } - fn codegen_index(&mut self, _index: &ast::Index) -> Value { + fn codegen_index(&mut self, _index: &ast::Index) -> Values { todo!() } - fn codegen_cast(&mut self, _cast: &ast::Cast) -> Value { + fn codegen_cast(&mut self, _cast: &ast::Cast) -> Values { todo!() } - fn codegen_for(&mut self, _for_expr: &ast::For) -> Value { + fn codegen_for(&mut self, _for_expr: &ast::For) -> Values { todo!() } - fn codegen_if(&mut self, _if_expr: &ast::If) -> Value { + fn codegen_if(&mut self, _if_expr: &ast::If) -> Values { todo!() } - fn codegen_tuple(&mut self, _tuple: &[Expression]) -> Value { + fn codegen_tuple(&mut self, _tuple: &[Expression]) -> Values { todo!() } - fn codegen_extract_tuple_field(&mut self, _tuple: &Expression, _index: usize) -> Value { + fn codegen_extract_tuple_field(&mut self, _tuple: &Expression, _index: usize) -> Values { todo!() } - fn codegen_call(&mut self, _call: &ast::Call) -> Value { + fn codegen_call(&mut self, _call: &ast::Call) -> Values { todo!() } - fn codegen_let(&mut self, _let_expr: &ast::Let) -> Value { + fn codegen_let(&mut self, _let_expr: &ast::Let) -> Values { todo!() } - fn codegen_constrain(&mut self, _constrain: &Expression, _location: Location) -> Value { + fn codegen_constrain(&mut self, _constrain: &Expression, _location: Location) -> Values { todo!() } - fn codegen_assign(&mut self, _assign: &ast::Assign) -> Value { + fn codegen_assign(&mut self, _assign: &ast::Assign) -> Values { todo!() } - fn codegen_semi(&mut self, _semi: &Expression) -> Value { + fn codegen_semi(&mut self, _semi: &Expression) -> Values { todo!() } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs index 785ae3cd8f7..4b41c6ae102 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs @@ -1,13 +1,40 @@ use crate::ssa_refactor::ir::function::FunctionId as IrFunctionId; -use crate::ssa_refactor::ir::value::ValueId; +use crate::ssa_refactor::ir::value::ValueId as IrValueId; + +pub(super) enum Tree { + Branch(Vec>), + Leaf(T), +} #[derive(Debug, Clone)] pub(super) enum Value { - Normal(ValueId), + Normal(IrValueId), Function(IrFunctionId), - Tuple(Vec), /// Lazily inserting unit values helps prevent cluttering the IR with too many /// unit literals. Unit, } + +pub(super) type Values = Tree; + +impl Tree { + pub(super) fn flatten(self) -> Vec { + match self { + Tree::Branch(values) => values.into_iter().flat_map(Tree::flatten).collect(), + Tree::Leaf(value) => vec![value], + } + } +} + +impl From for Values { + fn from(id: IrValueId) -> Self { + Self::Leaf(Value::Normal(id)) + } +} + +impl From for Value { + fn from(id: IrValueId) -> Self { + Value::Normal(id) + } +} diff --git a/crates/noirc_frontend/src/monomorphization/ast.rs b/crates/noirc_frontend/src/monomorphization/ast.rs index 6a2b97ae19d..e4339c8e367 100644 --- a/crates/noirc_frontend/src/monomorphization/ast.rs +++ b/crates/noirc_frontend/src/monomorphization/ast.rs @@ -175,12 +175,14 @@ pub enum LValue { MemberAccess { object: Box, field_index: usize }, } +pub type Parameters = Vec<(LocalId, /*mutable:*/ bool, /*name:*/ String, Type)>; + #[derive(Debug, Clone)] pub struct Function { pub id: FuncId, pub name: String, - pub parameters: Vec<(LocalId, /*mutable:*/ bool, /*name:*/ String, Type)>, + pub parameters: Parameters, pub body: Expression, pub return_type: Type, From ac87a806a4aa70eddca22587f6cfcb68c8c992e7 Mon Sep 17 00:00:00 2001 From: Blaine Bublitz Date: Mon, 24 Apr 2023 21:20:45 +0100 Subject: [PATCH 34/63] chore(ci): add cache for Nix workflow (#1210) * chore(ci): Add cache for Nix workflow * align lockfile with other projects * Include both flake.lock and cargo.lock in hash --- .github/workflows/test.yml | 28 +++++++++++++++++++++++++++- flake.lock | 24 ++++++++++++------------ flake.nix | 3 +++ 3 files changed, 42 insertions(+), 13 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 220985d8003..29f58ad519c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -30,6 +30,32 @@ jobs: nix_path: nixpkgs=channel:nixos-22.11 github_access_token: ${{ secrets.GITHUB_TOKEN }} + - uses: cachix/cachix-action@v12 + with: + name: barretenberg + + - name: Restore nix store cache + id: nix-store-cache + uses: actions/cache@v3 + with: + path: /tmp/nix-cache + key: ${{ runner.os }}-flake-${{ hashFiles('*.lock') }} + + # Based on https://github.com/marigold-dev/deku/blob/b5016f0cf4bf6ac48db9111b70dd7fb49b969dfd/.github/workflows/build.yml#L26 + - name: Copy cache into nix store + if: steps.nix-store-cache.outputs.cache-hit == 'true' + # We don't check the signature because we're the one that created the cache + run: | + for narinfo in /tmp/nix-cache/*.narinfo; do + path=$(head -n 1 "$narinfo" | awk '{print $2}') + nix copy --no-check-sigs --from "file:///tmp/nix-cache" "$path" + done + - name: Run `nix flake check` run: | - nix flake check + nix flake check -L + + - name: Export cache from nix store + if: steps.nix-store-cache.outputs.cache-hit != 'true' + run: | + nix copy --to "file:///tmp/nix-cache?compression=zstd¶llel-compression=true" .#cargo-artifacts diff --git a/flake.lock b/flake.lock index 04dbc188a52..7c01326f86d 100644 --- a/flake.lock +++ b/flake.lock @@ -10,11 +10,11 @@ ] }, "locked": { - "lastModified": 1682019675, - "narHash": "sha256-KZ/VL/u81z2sFTdwfxvUFR+ftqf3+2AA0gR9kkgKxe4=", + "lastModified": 1682345890, + "narHash": "sha256-ZsInK9Iy81MaCugouU3ifa5Vw2GKlJK9MxCU/LF8bIw=", "owner": "AztecProtocol", "repo": "barretenberg", - "rev": "a38e3611590e085e5f25c322757871fb048aa3d7", + "rev": "87aeb375d7b434e0faf47abb79f97753ab760987", "type": "github" }, "original": { @@ -39,11 +39,11 @@ ] }, "locked": { - "lastModified": 1681680516, - "narHash": "sha256-EB8Adaeg4zgcYDJn9sR6UMjN/OHdIiMMK19+3LmmXQY=", + "lastModified": 1681177078, + "narHash": "sha256-ZNIjBDou2GOabcpctiQykEQVkI8BDwk7TyvlWlI4myE=", "owner": "ipetkov", "repo": "crane", - "rev": "54b63c8eae4c50172cb50b612946ff1d2bc1c75c", + "rev": "0c9f468ff00576577d83f5019a66c557ede5acf6", "type": "github" }, "original": { @@ -88,11 +88,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1681932375, - "narHash": "sha256-tSXbYmpnKSSWpzOrs27ie8X3I0yqKA6AuCzCYNtwbCU=", + "lastModified": 1681269223, + "narHash": "sha256-i6OeI2f7qGvmLfD07l1Az5iBL+bFeP0RHixisWtpUGo=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "3d302c67ab8647327dba84fbdb443cdbf0e82744", + "rev": "87edbd74246ccdfa64503f334ed86fa04010bab9", "type": "github" }, "original": { @@ -122,11 +122,11 @@ ] }, "locked": { - "lastModified": 1681957132, - "narHash": "sha256-52GaHyeLyyiT0u4OL3uGbo0vsUMKm33Z3zLkPyK/ZRY=", + "lastModified": 1681352318, + "narHash": "sha256-+kwy7bTsuW8GYrRqWRQ8T5hg6duZb5IJiHlKo1J+v9g=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "4771640d46c214d702512a8ece591f582ae507fa", + "rev": "aeaa11c65a5c5cebaa51652353ab3c497b9a7bbf", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 74f19f612f2..fc8e5fa6868 100644 --- a/flake.nix +++ b/flake.nix @@ -165,6 +165,9 @@ packages.default = noir; + # We expose the `cargo-artifacts` derivation so we can cache our cargo dependencies in CI + packages.cargo-artifacts = cargoArtifacts; + # TODO(#1197): Look into installable apps with Nix flakes # apps.default = flake-utils.lib.mkApp { drv = nargo; }; From dc3fb4806564acb6fd6ec46fcdc68dc336da96ba Mon Sep 17 00:00:00 2001 From: jfecher Date: Mon, 24 Apr 2023 18:19:28 -0400 Subject: [PATCH 35/63] chore(ssa refactor): Handle codegen for literals (#1209) * Add Context structs and start ssa gen pass * Fix block arguments * Fix clippy lint * Use the correct dfg * Rename contexts to highlight the inner contexts are shared rather than used directly * Correctly handle function parameters * Rename Nested to Tree; add comment * Add codegen for literals * PR feedback * chore(ssa refactor): Add debug printing for the new ssa ir (#1211) Implement debug printing for the new ssa ir --- crates/noirc_evaluator/src/ssa_refactor/ir.rs | 2 + .../src/ssa_refactor/ir/basic_block.rs | 22 ++++ .../src/ssa_refactor/ir/constant.rs | 56 +++++++++ .../src/ssa_refactor/ir/dfg.rs | 76 ++++++++++-- .../src/ssa_refactor/ir/function.rs | 15 ++- .../src/ssa_refactor/ir/instruction.rs | 60 ++++++--- .../src/ssa_refactor/ir/map.rs | 54 ++++++++ .../src/ssa_refactor/ir/printer.rs | 115 ++++++++++++++++++ .../src/ssa_refactor/ir/types.rs | 21 ++++ .../src/ssa_refactor/ir/value.rs | 5 +- .../ssa_builder/function_builder.rs | 59 ++++++++- .../src/ssa_refactor/ssa_gen/context.rs | 33 ++--- .../src/ssa_refactor/ssa_gen/mod.rs | 65 +++++++++- .../src/ssa_refactor/ssa_gen/value.rs | 43 ++++++- 14 files changed, 566 insertions(+), 60 deletions(-) create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir.rs b/crates/noirc_evaluator/src/ssa_refactor/ir.rs index ce63bdc7238..851b86e511f 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir.rs @@ -1,7 +1,9 @@ pub(crate) mod basic_block; +pub(crate) mod constant; pub(crate) mod dfg; pub(crate) mod function; pub(crate) mod instruction; pub(crate) mod map; +pub(crate) mod printer; pub(crate) mod types; pub(crate) mod value; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs index 431f1647863..13d1b3ca6f8 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs @@ -52,7 +52,29 @@ impl BasicBlock { self.instructions.push(instruction); } + pub(crate) fn instructions(&self) -> &[InstructionId] { + &self.instructions + } + pub(crate) fn set_terminator(&mut self, terminator: TerminatorInstruction) { self.terminator = Some(terminator); } + + pub(crate) fn terminator(&self) -> Option<&TerminatorInstruction> { + self.terminator.as_ref() + } + + /// Iterate over all the successors of the currently block, as determined by + /// the blocks jumped to in the terminator instruction. If there is no terminator + /// instruction yet, this will iterate 0 times. + pub(crate) fn successors(&self) -> impl ExactSizeIterator { + match &self.terminator { + Some(TerminatorInstruction::Jmp { destination, .. }) => vec![*destination].into_iter(), + Some(TerminatorInstruction::JmpIf { then_destination, else_destination, .. }) => { + vec![*then_destination, *else_destination].into_iter() + } + Some(TerminatorInstruction::Return { .. }) => vec![].into_iter(), + None => vec![].into_iter(), + } + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs new file mode 100644 index 00000000000..6d5538d3410 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs @@ -0,0 +1,56 @@ +use acvm::FieldElement; + +use super::map::Id; + +/// Represents a numeric constant in Ssa. Constants themselves are +/// uniqued in the DataFlowGraph and immutable. +/// +/// This is just a thin wrapper around FieldElement so that +/// we can use Id without it getting confused +/// with a possible future use of Id. +#[derive(Debug, Clone, Hash, PartialEq, Eq)] +pub(crate) struct NumericConstant(FieldElement); + +impl NumericConstant { + pub(crate) fn new(value: FieldElement) -> Self { + Self(value) + } + + pub(crate) fn value(&self) -> &FieldElement { + &self.0 + } +} + +pub(crate) type NumericConstantId = Id; + +impl std::ops::Add for NumericConstant { + type Output = NumericConstant; + + fn add(self, rhs: Self) -> Self::Output { + Self::new(self.0 + rhs.0) + } +} + +impl std::ops::Sub for NumericConstant { + type Output = NumericConstant; + + fn sub(self, rhs: Self) -> Self::Output { + Self::new(self.0 - rhs.0) + } +} + +impl std::ops::Mul for NumericConstant { + type Output = NumericConstant; + + fn mul(self, rhs: Self) -> Self::Output { + Self::new(self.0 * rhs.0) + } +} + +impl std::ops::Div for NumericConstant { + type Output = NumericConstant; + + fn div(self, rhs: Self) -> Self::Output { + Self::new(self.0 / rhs.0) + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index b456fd08ee4..f92cae79b75 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -1,12 +1,14 @@ use super::{ basic_block::{BasicBlock, BasicBlockId}, + constant::{NumericConstant, NumericConstantId}, function::Signature, instruction::{Instruction, InstructionId}, - map::{DenseMap, Id, SecondaryMap}, + map::{DenseMap, Id, SecondaryMap, TwoWayMap}, types::Type, value::{Value, ValueId}, }; +use acvm::FieldElement; use iter_extended::vecmap; #[derive(Debug, Default)] @@ -20,6 +22,7 @@ impl ValueList { self.0.push(value); self.len() - 1 } + /// Returns the number of values in the list. fn len(&self) -> usize { self.0.len() @@ -29,6 +32,7 @@ impl ValueList { fn clear(&mut self) { self.0.clear(); } + /// Returns the ValueId's as a slice. pub(crate) fn as_slice(&self) -> &[ValueId] { &self.0 @@ -55,6 +59,11 @@ pub(crate) struct DataFlowGraph { /// function. values: DenseMap, + /// Storage for all constants used within a function. + /// Each constant is unique, attempting to insert the same constant + /// twice will return the same ConstantId. + constants: TwoWayMap, + /// Function signatures of external methods signatures: DenseMap, @@ -91,27 +100,35 @@ impl DataFlowGraph { } /// Inserts a new instruction into the DFG. + /// This does not add the instruction to the block or populate the instruction's result list pub(crate) fn make_instruction(&mut self, instruction_data: Instruction) -> InstructionId { let id = self.instructions.insert(instruction_data); - // Create a new vector to store the potential results for the instruction. self.results.insert(id, Default::default()); id } + /// Insert a value into the dfg's storage and return an id to reference it. + /// Until the value is used in an instruction it is unreachable. pub(crate) fn make_value(&mut self, value: Value) -> ValueId { self.values.insert(value) } - /// Attaches results to the instruction. + /// Creates a new constant value, or returns the Id to an existing one if + /// one already exists. + pub(crate) fn make_constant(&mut self, value: FieldElement, typ: Type) -> ValueId { + let constant = self.constants.insert(NumericConstant::new(value)); + self.values.insert(Value::NumericConstant { constant, typ }) + } + + /// Attaches results to the instruction, clearing any previous results. /// - /// Returns the number of results that this instruction - /// produces. + /// Returns the results of the instruction pub(crate) fn make_instruction_results( &mut self, instruction_id: InstructionId, ctrl_typevar: Type, - ) -> usize { + ) -> &[ValueId] { // Clear all of the results instructions associated with this // instruction. self.results.get_mut(&instruction_id).expect("all instructions should have a `result` allocation when instruction was added to the DFG").clear(); @@ -119,13 +136,14 @@ impl DataFlowGraph { // Get all of the types that this instruction produces // and append them as results. let typs = self.instruction_result_types(instruction_id, ctrl_typevar); - let num_typs = typs.len(); for typ in typs { self.append_result(instruction_id, typ); } - num_typs + self.results.get_mut(&instruction_id) + .expect("all instructions should have a `result` allocation when instruction was added to the DFG") + .as_slice() } /// Return the result types of this instruction. @@ -181,6 +199,42 @@ impl DataFlowGraph { block.add_parameter(parameter); parameter } + + pub(crate) fn insert_instruction_in_block( + &mut self, + block: BasicBlockId, + instruction: InstructionId, + ) { + self.blocks[block].insert_instruction(instruction); + } +} + +impl std::ops::Index for DataFlowGraph { + type Output = Instruction; + fn index(&self, id: InstructionId) -> &Self::Output { + &self.instructions[id] + } +} + +impl std::ops::Index for DataFlowGraph { + type Output = Value; + fn index(&self, id: ValueId) -> &Self::Output { + &self.values[id] + } +} + +impl std::ops::Index for DataFlowGraph { + type Output = NumericConstant; + fn index(&self, id: NumericConstantId) -> &Self::Output { + &self.constants[id] + } +} + +impl std::ops::Index for DataFlowGraph { + type Output = BasicBlock; + fn index(&self, id: BasicBlockId) -> &Self::Output { + &self.blocks[id] + } } #[cfg(test)] @@ -190,19 +244,17 @@ mod tests { instruction::Instruction, types::{NumericType, Type}, }; - use acvm::FieldElement; #[test] fn make_instruction() { let mut dfg = DataFlowGraph::default(); - let ins = Instruction::Immediate { value: FieldElement::from(0u128) }; + let ins = Instruction::Allocate { size: 20 }; let ins_id = dfg.make_instruction(ins); let num_results = - dfg.make_instruction_results(ins_id, Type::Numeric(NumericType::NativeField)); + dfg.make_instruction_results(ins_id, Type::Numeric(NumericType::NativeField)).len(); let results = dfg.instruction_results(ins_id); - assert_eq!(results.len(), num_results); } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs index 1abd6c85367..63cd31142c4 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs @@ -18,7 +18,10 @@ pub(crate) struct Function { source_locations: SecondaryMap, /// The first basic block in the function - entry_block: BasicBlockId, + pub(super) entry_block: BasicBlockId, + + /// Name of the function for debugging only + pub(super) name: String, pub(crate) dfg: DataFlowGraph, } @@ -27,10 +30,10 @@ impl Function { /// Creates a new function with an automatically inserted entry block. /// /// Note that any parameters to the function must be manually added later. - pub(crate) fn new() -> Self { + pub(crate) fn new(name: String) -> Self { let mut dfg = DataFlowGraph::default(); let entry_block = dfg.new_block(); - Self { source_locations: SecondaryMap::new(), entry_block, dfg } + Self { name, source_locations: SecondaryMap::new(), entry_block, dfg } } pub(crate) fn entry_block(&self) -> BasicBlockId { @@ -47,6 +50,12 @@ pub(crate) struct Signature { pub(crate) returns: Vec, } +impl std::fmt::Display for Function { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + super::printer::display_function(self, f) + } +} + #[test] fn sign_smoke() { let mut signature = Signature::default(); diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 81a28b8407c..442f1dbd47e 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -1,5 +1,3 @@ -use acvm::FieldElement; - use super::{ basic_block::BasicBlockId, function::FunctionId, map::Id, types::Type, value::ValueId, }; @@ -17,6 +15,12 @@ pub(crate) type InstructionId = Id; /// of this is println. pub(crate) struct IntrinsicOpcodes; +impl std::fmt::Display for IntrinsicOpcodes { + fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + todo!("intrinsics have no opcodes yet") + } +} + #[derive(Debug, PartialEq, Eq, Hash, Clone)] /// Instructions are used to perform tasks. /// The instructions that the IR is able to specify are listed below. @@ -38,18 +42,24 @@ pub(crate) enum Instruction { /// Performs a function call with a list of its arguments. Call { func: FunctionId, arguments: Vec }, + /// Performs a call to an intrinsic function and stores the /// results in `return_arguments`. Intrinsic { func: IntrinsicOpcodes, arguments: Vec }, + /// Allocates a region of memory. Note that this is not concerned with + /// the type of memory, the type of element is determined when loading this memory. + /// + /// `size` is the size of the region to be allocated by the number of FieldElements it + /// contains. Note that non-numeric types like Functions and References are counted as 1 field + /// each. + Allocate { size: u32 }, + /// Loads a value from memory. - Load(ValueId), + Load { address: ValueId }, /// Writes a value to memory. - Store { destination: ValueId, value: ValueId }, - - /// Stores an Immediate value - Immediate { value: FieldElement }, + Store { address: ValueId, value: ValueId }, } impl Instruction { @@ -67,28 +77,31 @@ impl Instruction { // This also returns 0, but we could get it a compile time, // since we know the signatures for the intrinsics Instruction::Intrinsic { .. } => 0, - Instruction::Load(_) => 1, + Instruction::Allocate { .. } => 1, + Instruction::Load { .. } => 1, Instruction::Store { .. } => 0, - Instruction::Immediate { .. } => 1, } } /// Returns the number of arguments required for a call pub(crate) fn num_fixed_arguments(&self) -> usize { + // Match-all fields syntax (..) is avoided on most cases of this match to ensure that + // if an extra argument is ever added to any of these variants, an error + // is issued pointing to this spot to update it here as well. match self { Instruction::Binary(_) => 2, - Instruction::Cast(..) => 1, + Instruction::Cast(_, _) => 1, Instruction::Not(_) => 1, - Instruction::Truncate { .. } => 1, + Instruction::Truncate { value: _, bit_size: _, max_bit_size: _ } => 1, Instruction::Constrain(_) => 1, // This returns 0 as the arguments depend on the function being called Instruction::Call { .. } => 0, // This also returns 0, but we could get it a compile time, // since we know the function definition for the intrinsics Instruction::Intrinsic { .. } => 0, - Instruction::Load(_) => 1, - Instruction::Store { .. } => 2, - Instruction::Immediate { .. } => 0, + Instruction::Allocate { size: _ } => 1, + Instruction::Load { address: _ } => 1, + Instruction::Store { address: _, value: _ } => 2, } } @@ -102,9 +115,9 @@ impl Instruction { Instruction::Constrain(_) => vec![], Instruction::Call { .. } => vec![], Instruction::Intrinsic { .. } => vec![], - Instruction::Load(_) => vec![ctrl_typevar], + Instruction::Allocate { .. } => vec![Type::Reference], + Instruction::Load { .. } => vec![ctrl_typevar], Instruction::Store { .. } => vec![], - Instruction::Immediate { .. } => vec![], } } } @@ -182,5 +195,18 @@ pub(crate) enum BinaryOp { /// Checks whether two types are equal. /// Returns true if the types were not equal and /// false otherwise. - Ne, + Neq, +} + +impl std::fmt::Display for BinaryOp { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + BinaryOp::Add => write!(f, "add"), + BinaryOp::Sub => write!(f, "sub"), + BinaryOp::Mul => write!(f, "mul"), + BinaryOp::Div => write!(f, "div"), + BinaryOp::Eq => write!(f, "eq"), + BinaryOp::Neq => write!(f, "neq"), + } + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs index 53a7db3a5d5..bb526076e3b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs @@ -1,5 +1,6 @@ use std::{ collections::HashMap, + hash::Hash, sync::atomic::{AtomicUsize, Ordering}, }; @@ -68,6 +69,12 @@ impl std::fmt::Debug for Id { } } +impl std::fmt::Display for Id { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "${}", self.index) + } +} + /// A DenseMap is a Vec wrapper where each element corresponds /// to a unique ID that can be used to access the element. No direct /// access to indices is provided. Since IDs must be stable and correspond @@ -186,6 +193,53 @@ impl std::ops::IndexMut> for SparseMap { } } +/// A TwoWayMap is a map from both key to value and value to key. +/// This is accomplished by keeping the map bijective - for every +/// value there is exactly one key and vice-versa. Any duplicate values +/// are prevented in the call to insert. +#[derive(Debug)] +pub(crate) struct TwoWayMap { + key_to_value: HashMap, T>, + value_to_key: HashMap>, +} + +impl TwoWayMap { + /// Returns the number of elements in the map. + pub(crate) fn len(&self) -> usize { + self.key_to_value.len() + } + + /// Adds an element to the map. + /// Returns the identifier/reference to that element. + pub(crate) fn insert(&mut self, element: T) -> Id { + if let Some(existing) = self.value_to_key.get(&element) { + return *existing; + } + + let id = Id::new(self.key_to_value.len()); + self.key_to_value.insert(id, element.clone()); + self.value_to_key.insert(element, id); + id + } +} + +impl Default for TwoWayMap { + fn default() -> Self { + Self { key_to_value: HashMap::new(), value_to_key: HashMap::new() } + } +} + +// Note that there is no impl for IndexMut>, +// if we allowed mutable access to map elements they may be +// mutated such that elements are no longer unique +impl std::ops::Index> for TwoWayMap { + type Output = T; + + fn index(&self, id: Id) -> &Self::Output { + &self.key_to_value[&id] + } +} + /// A SecondaryMap is for storing secondary data for a given key. Since this /// map is for secondary data, it will not return fresh Ids for data, instead /// it expects users to provide these ids in order to associate existing ids with diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs new file mode 100644 index 00000000000..1a7737e97b0 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -0,0 +1,115 @@ +//! This file is for pretty-printing the SSA IR in a human-readable form for debugging. +use std::fmt::{Formatter, Result}; + +use iter_extended::vecmap; + +use super::{ + basic_block::BasicBlockId, + function::Function, + instruction::{Instruction, InstructionId, TerminatorInstruction}, + value::ValueId, +}; + +pub(crate) fn display_function(function: &Function, f: &mut Formatter) -> Result { + writeln!(f, "fn {} {{", function.name)?; + display_block_with_successors(function, function.entry_block, f)?; + write!(f, "}}") +} + +pub(crate) fn display_block_with_successors( + function: &Function, + block_id: BasicBlockId, + f: &mut Formatter, +) -> Result { + display_block(function, block_id, f)?; + + for successor in function.dfg[block_id].successors() { + display_block(function, successor, f)?; + } + Ok(()) +} + +pub(crate) fn display_block( + function: &Function, + block_id: BasicBlockId, + f: &mut Formatter, +) -> Result { + let block = &function.dfg[block_id]; + + writeln!(f, "{}({}):", block_id, value_list(block.parameters()))?; + + for instruction in block.instructions() { + display_instruction(function, *instruction, f)?; + } + + display_terminator(block.terminator(), f) +} + +fn value_list(values: &[ValueId]) -> String { + vecmap(values, ToString::to_string).join(", ") +} + +pub(crate) fn display_terminator( + terminator: Option<&TerminatorInstruction>, + f: &mut Formatter, +) -> Result { + match terminator { + Some(TerminatorInstruction::Jmp { destination, arguments }) => { + writeln!(f, " jmp {}({})", destination, value_list(arguments)) + } + Some(TerminatorInstruction::JmpIf { + condition, + arguments, + then_destination, + else_destination, + }) => { + let args = value_list(arguments); + writeln!( + f, + " jmpif {}({}) then: {}, else: {}", + condition, args, then_destination, else_destination + ) + } + Some(TerminatorInstruction::Return { return_values }) => { + writeln!(f, " return {}", value_list(return_values)) + } + None => writeln!(f, " (no terminator instruction)"), + } +} + +pub(crate) fn display_instruction( + function: &Function, + instruction: InstructionId, + f: &mut Formatter, +) -> Result { + // instructions are always indented within a function + write!(f, " ")?; + + let results = function.dfg.instruction_results(instruction); + if !results.is_empty() { + write!(f, "{} = ", value_list(results))?; + } + + match &function.dfg[instruction] { + Instruction::Binary(binary) => { + writeln!(f, "{} {}, {}", binary.operator, binary.lhs, binary.rhs) + } + Instruction::Cast(value, typ) => writeln!(f, "cast {value} as {typ}"), + Instruction::Not(value) => writeln!(f, "not {value}"), + Instruction::Truncate { value, bit_size, max_bit_size } => { + writeln!(f, "truncate {value} to {bit_size} bits, max_bit_size: {max_bit_size}") + } + Instruction::Constrain(value) => { + writeln!(f, "constrain {value}") + } + Instruction::Call { func, arguments } => { + writeln!(f, "call {func}({})", value_list(arguments)) + } + Instruction::Intrinsic { func, arguments } => { + writeln!(f, "intrinsic {func}({})", value_list(arguments)) + } + Instruction::Allocate { size } => writeln!(f, "alloc {size} fields"), + Instruction::Load { address } => writeln!(f, "load {address}"), + Instruction::Store { address, value } => writeln!(f, "store {value} at {address}"), + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs index e1f8e8a74d2..888d7d128d1 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs @@ -42,3 +42,24 @@ impl Type { Type::Numeric(NumericType::NativeField) } } + +impl std::fmt::Display for Type { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Type::Numeric(numeric) => numeric.fmt(f), + Type::Reference => write!(f, "reference"), + Type::Function => write!(f, "function"), + Type::Unit => write!(f, "unit"), + } + } +} + +impl std::fmt::Display for NumericType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + NumericType::Signed { bit_size } => write!(f, "i{bit_size}"), + NumericType::Unsigned { bit_size } => write!(f, "u{bit_size}"), + NumericType::NativeField => write!(f, "Field"), + } + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs index 38ca8b12c40..537eabb0cab 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs @@ -1,6 +1,6 @@ use crate::ssa_refactor::ir::basic_block::BasicBlockId; -use super::{instruction::InstructionId, map::Id, types::Type}; +use super::{constant::NumericConstantId, instruction::InstructionId, map::Id, types::Type}; pub(crate) type ValueId = Id; @@ -24,4 +24,7 @@ pub(crate) enum Value { /// /// position -- the index of this Value in the block parameters list Param { block: BasicBlockId, position: usize, typ: Type }, + + /// This Value originates from a numeric constant + NumericConstant { constant: NumericConstantId, typ: Type }, } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs index 5e82226d3be..c76d2943abe 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs @@ -1,6 +1,9 @@ +use acvm::FieldElement; + use crate::ssa_refactor::ir::{ basic_block::BasicBlockId, function::{Function, FunctionId}, + instruction::{Binary, BinaryOp, Instruction, InstructionId}, types::Type, value::ValueId, }; @@ -26,8 +29,8 @@ pub(crate) struct FunctionBuilder<'ssa> { } impl<'ssa> FunctionBuilder<'ssa> { - pub(crate) fn new(context: &'ssa SharedBuilderContext) -> Self { - let new_function = Function::new(); + pub(crate) fn new(function_name: String, context: &'ssa SharedBuilderContext) -> Self { + let new_function = Function::new(function_name); let current_block = new_function.entry_block(); Self { @@ -40,8 +43,8 @@ impl<'ssa> FunctionBuilder<'ssa> { } /// Finish the current function and create a new function - pub(crate) fn new_function(&mut self) { - let new_function = Function::new(); + pub(crate) fn new_function(&mut self, name: String) { + let new_function = Function::new(name); let old_function = std::mem::replace(&mut self.current_function, new_function); self.finished_functions.push((self.current_function_id, old_function)); @@ -57,4 +60,52 @@ impl<'ssa> FunctionBuilder<'ssa> { let entry = self.current_function.entry_block(); self.current_function.dfg.add_block_parameter(entry, typ) } + + /// Insert a numeric constant into the current function + pub(crate) fn numeric_constant(&mut self, value: FieldElement, typ: Type) -> ValueId { + self.current_function.dfg.make_constant(value, typ) + } + + /// Insert a numeric constant into the current function of type Field + pub(crate) fn field_constant(&mut self, value: impl Into) -> ValueId { + self.numeric_constant(value.into(), Type::field()) + } + + fn insert_instruction(&mut self, instruction: Instruction) -> InstructionId { + let id = self.current_function.dfg.make_instruction(instruction); + self.current_function.dfg.insert_instruction_in_block(self.current_block, id); + id + } + + /// Insert an allocate instruction at the end of the current block, allocating the + /// given amount of field elements. Returns the result of the allocate instruction, + /// which is always a Reference to the allocated data. + pub(crate) fn insert_allocate(&mut self, size_to_allocate: u32) -> ValueId { + let id = self.insert_instruction(Instruction::Allocate { size: size_to_allocate }); + self.current_function.dfg.make_instruction_results(id, Type::Reference)[0] + } + + /// Insert a Load instruction at the end of the current block, loading from the given address + /// which should point to a previous Allocate instruction. Note that this is limited to loading + /// a single value. Loading multiple values (such as a tuple) will require multiple loads. + /// Returns the element that was loaded. + pub(crate) fn insert_load(&mut self, address: ValueId, type_to_load: Type) -> ValueId { + let id = self.insert_instruction(Instruction::Load { address }); + self.current_function.dfg.make_instruction_results(id, type_to_load)[0] + } + + /// Insert a Store instruction at the end of the current block, storing the given element + /// at the given address. Expects that the address points to a previous Allocate instruction. + pub(crate) fn insert_store(&mut self, address: ValueId, value: ValueId) { + self.insert_instruction(Instruction::Store { address, value }); + } + + /// Insert a Store instruction at the end of the current block, storing the given element + /// at the given address. Expects that the address points to a previous Allocate instruction. + /// Returns the result of the add instruction. + pub(crate) fn insert_add(&mut self, lhs: ValueId, rhs: ValueId, typ: Type) -> ValueId { + let operator = BinaryOp::Add; + let id = self.insert_instruction(Instruction::Binary(Binary { lhs, rhs, operator })); + self.current_function.dfg.make_instruction_results(id, typ)[0] + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 02bfee8a87f..32133feea13 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -19,7 +19,7 @@ type FunctionQueue = Vec<(ast::FuncId, IrFunctionId)>; pub(super) struct FunctionContext<'a> { definitions: HashMap, - function_builder: FunctionBuilder<'a>, + pub(super) builder: FunctionBuilder<'a>, shared_context: &'a SharedContext, } @@ -32,22 +32,23 @@ pub(super) struct SharedContext { impl<'a> FunctionContext<'a> { pub(super) fn new( + function_name: String, parameters: &Parameters, shared_context: &'a SharedContext, shared_builder_context: &'a SharedBuilderContext, ) -> Self { let mut this = Self { definitions: HashMap::new(), - function_builder: FunctionBuilder::new(shared_builder_context), + builder: FunctionBuilder::new(function_name, shared_builder_context), shared_context, }; this.add_parameters_to_scope(parameters); this } - pub(super) fn new_function(&mut self, parameters: &Parameters) { + pub(super) fn new_function(&mut self, name: String, parameters: &Parameters) { self.definitions.clear(); - self.function_builder.new_function(); + self.builder.new_function(name); self.add_parameters_to_scope(parameters); } @@ -67,8 +68,8 @@ impl<'a> FunctionContext<'a> { /// into a new parameter for each field recursively. fn add_parameter_to_scope(&mut self, parameter_id: LocalId, parameter_type: &ast::Type) { // Add a separate parameter for each field type in 'parameter_type' - let parameter_value = self - .map_type(parameter_type, |this, typ| this.function_builder.add_parameter(typ).into()); + let parameter_value = + self.map_type(parameter_type, |this, typ| this.builder.add_parameter(typ).into()); self.definitions.insert(parameter_id, parameter_value); } @@ -82,24 +83,28 @@ impl<'a> FunctionContext<'a> { typ: &ast::Type, mut f: impl FnMut(&mut Self, Type) -> T, ) -> Tree { - self.map_type_helper(typ, &mut f) + Self::map_type_helper(typ, &mut |typ| f(self, typ)) } // This helper is needed because we need to take f by mutable reference, // otherwise we cannot move it multiple times each loop of vecmap. - fn map_type_helper( - &mut self, - typ: &ast::Type, - f: &mut impl FnMut(&mut Self, Type) -> T, - ) -> Tree { + fn map_type_helper(typ: &ast::Type, f: &mut impl FnMut(Type) -> T) -> Tree { match typ { ast::Type::Tuple(fields) => { - Tree::Branch(vecmap(fields, |field| self.map_type_helper(field, f))) + Tree::Branch(vecmap(fields, |field| Self::map_type_helper(field, f))) } - other => Tree::Leaf(f(self, Self::convert_non_tuple_type(other))), + other => Tree::Leaf(f(Self::convert_non_tuple_type(other))), } } + /// Convert a monomorphized type to an SSA type, preserving the structure + /// of any tuples within. + pub(super) fn convert_type(typ: &ast::Type) -> Tree { + // Do nothing in the closure here - map_type_helper already calls + // convert_non_tuple_type internally. + Self::map_type_helper(typ, &mut |x| x) + } + pub(super) fn convert_non_tuple_type(typ: &ast::Type) -> Type { match typ { ast::Type::Field => Type::field(), diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index c340b45eb9b..2f9c6646282 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -1,27 +1,33 @@ mod context; mod value; +use acvm::FieldElement; use context::SharedContext; +use iter_extended::vecmap; use noirc_errors::Location; use noirc_frontend::monomorphization::ast::{self, Expression, Program}; -use self::{context::FunctionContext, value::Values}; +use self::{ + context::FunctionContext, + value::{Tree, Values}, +}; -use super::ssa_builder::SharedBuilderContext; +use super::{ir::types::Type, ssa_builder::SharedBuilderContext}; pub(crate) fn generate_ssa(program: Program) { let context = SharedContext::new(program); let builder_context = SharedBuilderContext::default(); let main = context.program.main(); + let mut function_context = + FunctionContext::new(main.name.clone(), &main.parameters, &context, &builder_context); - let mut function_context = FunctionContext::new(&main.parameters, &context, &builder_context); function_context.codegen_expression(&main.body); while let Some((src_function_id, _new_id)) = context.pop_next_function_in_queue() { let function = &context.program[src_function_id]; // TODO: Need to ensure/assert the new function's id == new_id - function_context.new_function(&function.parameters); + function_context.new_function(function.name.clone(), &function.parameters); function_context.codegen_expression(&function.body); } } @@ -56,8 +62,55 @@ impl<'a> FunctionContext<'a> { todo!() } - fn codegen_literal(&mut self, _literal: &ast::Literal) -> Values { - todo!() + fn codegen_literal(&mut self, literal: &ast::Literal) -> Values { + match literal { + ast::Literal::Array(array) => { + let elements = vecmap(&array.contents, |element| self.codegen_expression(element)); + let element_type = Self::convert_type(&array.element_type); + self.codegen_array(elements, element_type) + } + ast::Literal::Integer(value, typ) => { + let typ = Self::convert_non_tuple_type(typ); + self.builder.numeric_constant(*value, typ).into() + } + ast::Literal::Bool(value) => { + // Booleans are represented as u1s with 0 = false, 1 = true + let typ = Type::unsigned(1); + let value = FieldElement::from(*value as u128); + self.builder.numeric_constant(value, typ).into() + } + ast::Literal::Str(string) => { + let elements = vecmap(string.as_bytes(), |byte| { + let value = FieldElement::from(*byte as u128); + self.builder.numeric_constant(value, Type::field()).into() + }); + self.codegen_array(elements, Tree::Leaf(Type::field())) + } + } + } + + fn codegen_array(&mut self, elements: Vec, element_type: Tree) -> Values { + let size = element_type.size_of_type() * elements.len(); + let array = self.builder.insert_allocate(size.try_into().unwrap_or_else(|_| { + panic!("Cannot allocate {size} bytes for array, it does not fit into a u32") + })); + + // Now we must manually store all the elements into the array + let mut i = 0; + for element in elements { + element.for_each(|value| { + let address = if i == 0 { + array + } else { + let offset = self.builder.numeric_constant((i as u128).into(), Type::field()); + self.builder.insert_add(array, offset, Type::field()) + }; + self.builder.insert_store(address, value.eval()); + i += 1; + }); + } + + array.into() } fn codegen_block(&mut self, _block: &[Expression]) -> Values { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs index 4b41c6ae102..c3911d367c1 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs @@ -1,4 +1,5 @@ use crate::ssa_refactor::ir::function::FunctionId as IrFunctionId; +use crate::ssa_refactor::ir::types::Type; use crate::ssa_refactor::ir::value::ValueId as IrValueId; pub(super) enum Tree { @@ -10,10 +11,18 @@ pub(super) enum Tree { pub(super) enum Value { Normal(IrValueId), Function(IrFunctionId), +} - /// Lazily inserting unit values helps prevent cluttering the IR with too many - /// unit literals. - Unit, +impl Value { + /// Evaluate a value, returning an IrValue from it. + /// This has no effect on Value::Normal, but any variables will be updated with their latest + /// use. + pub(super) fn eval(self) -> IrValueId { + match self { + Value::Normal(value) => value, + Value::Function(_) => panic!("Tried to evaluate a function value"), + } + } } pub(super) type Values = Tree; @@ -25,6 +34,25 @@ impl Tree { Tree::Leaf(value) => vec![value], } } + + pub(super) fn count_leaves(&self) -> usize { + match self { + Tree::Branch(trees) => trees.iter().map(|tree| tree.count_leaves()).sum(), + Tree::Leaf(_) => 1, + } + } + + /// Iterates over each Leaf node, calling f on each value within. + pub(super) fn for_each(self, mut f: impl FnMut(T)) { + self.for_each_helper(&mut f); + } + + fn for_each_helper(self, f: &mut impl FnMut(T)) { + match self { + Tree::Branch(trees) => trees.into_iter().for_each(|tree| tree.for_each_helper(f)), + Tree::Leaf(value) => f(value), + } + } } impl From for Values { @@ -38,3 +66,12 @@ impl From for Value { Value::Normal(id) } } + +// Specialize this impl just to give a better name for this function +impl Tree { + /// Returns the size of the type in terms of the number of FieldElements it contains. + /// Non-field types like functions and references are also counted as 1 FieldElement. + pub(super) fn size_of_type(&self) -> usize { + self.count_leaves() + } +} From f3fe1218bd4d41c0d459ea7af0105ad45f14e9e3 Mon Sep 17 00:00:00 2001 From: joss-aztec <94053499+joss-aztec@users.noreply.github.com> Date: Tue, 25 Apr 2023 14:49:49 +0100 Subject: [PATCH 36/63] chore(ssa): Add intial control flow graph (#1200) * Add Context structs and start ssa gen pass * Fix block arguments * Fix clippy lint * chore(ssa): cfg * Use the correct dfg * Rename contexts to highlight the inner contexts are shared rather than used directly * Correctly handle function parameters * Rename Nested to Tree; add comment * chore(ssa refactor): fix up merge regressions * chore(ssa refactor): tidy up * chore(ssa refactor): rm iterator type aliases * chore(ssa refactor): handle return inst gets blocks via dfg * chore(ssa refactor): cfg tests * chore(ssa refactor): add cfg test comments * chore(ssa refactor): cfg - merge related fixes * chore(ssa refactor): fix cfg tests --------- Co-authored-by: Jake Fecher --- crates/noirc_evaluator/src/ssa_refactor/ir.rs | 2 + .../ssa_refactor/ir/basic_block_visitors.rs | 23 ++ .../src/ssa_refactor/ir/cfg.rs | 251 ++++++++++++++++++ .../src/ssa_refactor/ir/dfg.rs | 17 ++ .../src/ssa_refactor/ir/map.rs | 8 + 5 files changed, 301 insertions(+) create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/basic_block_visitors.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir.rs b/crates/noirc_evaluator/src/ssa_refactor/ir.rs index 851b86e511f..1a1ca9eab89 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir.rs @@ -1,4 +1,6 @@ pub(crate) mod basic_block; +pub(crate) mod basic_block_visitors; +pub(crate) mod cfg; pub(crate) mod constant; pub(crate) mod dfg; pub(crate) mod function; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block_visitors.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block_visitors.rs new file mode 100644 index 00000000000..e0d5dc1b3df --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block_visitors.rs @@ -0,0 +1,23 @@ +use super::{ + basic_block::{BasicBlock, BasicBlockId}, + instruction::TerminatorInstruction, +}; + +/// Visit all successors of a block with a given visitor closure. The closure +/// arguments are the branch instruction that is used to reach the successor, +/// and the id of the successor block itself. +pub(crate) fn visit_block_succs(basic_block: &BasicBlock, mut visit: F) { + match basic_block + .terminator() + .expect("ICE: No terminator indicates block is still under construction.") + { + TerminatorInstruction::Jmp { destination, .. } => visit(*destination), + TerminatorInstruction::JmpIf { then_destination, else_destination, .. } => { + visit(*then_destination); + visit(*else_destination); + } + TerminatorInstruction::Return { .. } => { + // The last block of the control flow - no successors + } + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs new file mode 100644 index 00000000000..05b64e30ed8 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs @@ -0,0 +1,251 @@ +use std::collections::{HashMap, HashSet}; + +use super::{ + basic_block::{BasicBlock, BasicBlockId}, + basic_block_visitors, + function::Function, +}; + +/// A container for the successors and predecessors of some Block. +#[derive(Clone, Default)] +struct CfgNode { + /// Set of blocks that containing jumps that target this block. + /// The predecessor set has no meaningful order. + pub(crate) predecessors: HashSet, + + /// Set of blocks that are the targets of jumps in this block. + /// The successors set has no meaningful order. + pub(crate) successors: HashSet, +} + +/// The Control Flow Graph maintains a mapping of blocks to their predecessors +/// and successors where predecessors are basic blocks and successors are +/// basic blocks. +pub(crate) struct ControlFlowGraph { + data: HashMap, +} + +impl ControlFlowGraph { + /// Allocate and compute the control flow graph for `func`. + pub(crate) fn with_function(func: &Function) -> Self { + let mut cfg = ControlFlowGraph { data: HashMap::new() }; + cfg.compute(func); + cfg + } + + fn compute(&mut self, func: &Function) { + for (basic_block_id, basic_block) in func.dfg.basic_blocks_iter() { + self.compute_block(basic_block_id, basic_block); + } + } + + fn compute_block(&mut self, basic_block_id: BasicBlockId, basic_block: &BasicBlock) { + basic_block_visitors::visit_block_succs(basic_block, |dest| { + self.add_edge(basic_block_id, dest); + }); + } + + fn invalidate_block_successors(&mut self, basic_block_id: BasicBlockId) { + let node = self + .data + .get_mut(&basic_block_id) + .expect("ICE: Attempted to invalidate cfg node successors for non-existent node."); + let old_successors = node.successors.clone(); + node.successors.clear(); + for successor_id in old_successors { + self.data + .get_mut(&successor_id) + .expect("ICE: Cfg node successor doesn't exist.") + .predecessors + .remove(&basic_block_id); + } + } + + /// Recompute the control flow graph of `block`. + /// + /// This is for use after modifying instructions within a specific block. It recomputes all edges + /// from `basic_block_id` while leaving edges to `basic_block_id` intact. + pub(crate) fn recompute_block(&mut self, func: &Function, basic_block_id: BasicBlockId) { + self.invalidate_block_successors(basic_block_id); + let basic_block = &func.dfg[basic_block_id]; + self.compute_block(basic_block_id, basic_block); + } + + fn add_edge(&mut self, from: BasicBlockId, to: BasicBlockId) { + let predecessor_node = self.data.entry(from).or_default(); + assert!( + predecessor_node.successors.len() < 2, + "ICE: A cfg node cannot have more than two successors" + ); + predecessor_node.successors.insert(to); + let successor_node = self.data.entry(to).or_default(); + assert!( + successor_node.predecessors.len() < 2, + "ICE: A cfg node cannot have more than two predecessors" + ); + successor_node.predecessors.insert(from); + } + + /// Get an iterator over the CFG predecessors to `basic_block_id`. + pub(crate) fn pred_iter( + &self, + basic_block_id: BasicBlockId, + ) -> impl ExactSizeIterator + '_ { + self.data + .get(&basic_block_id) + .expect("ICE: Attempted to iterate predecessors of block not found within cfg.") + .predecessors + .iter() + .copied() + } + + /// Get an iterator over the CFG successors to `basic_block_id`. + pub(crate) fn succ_iter( + &self, + basic_block_id: BasicBlockId, + ) -> impl ExactSizeIterator + '_ { + self.data + .get(&basic_block_id) + .expect("ICE: Attempted to iterate successors of block not found within cfg.") + .successors + .iter() + .copied() + } +} + +#[cfg(test)] +mod tests { + use crate::ssa_refactor::ir::{instruction::TerminatorInstruction, types::Type}; + + use super::{super::function::Function, ControlFlowGraph}; + + #[test] + fn empty() { + let mut func = Function::new("func".into()); + let block_id = func.entry_block(); + func.dfg[block_id].set_terminator(TerminatorInstruction::Return { return_values: vec![] }); + + ControlFlowGraph::with_function(&func); + } + + #[test] + fn jumps() { + // Build function of form + // fn func { + // block0(cond: u1): + // jmpif cond(), then: block2, else: block1 + // block1(): + // jmpif cond(), then: block1, else: block2 + // block2(): + // return + // } + let mut func = Function::new("func".into()); + let block0_id = func.entry_block(); + let cond = func.dfg.add_block_parameter(block0_id, Type::unsigned(1)); + let block1_id = func.dfg.new_block(); + let block2_id = func.dfg.new_block(); + + func.dfg[block0_id].set_terminator(TerminatorInstruction::JmpIf { + condition: cond, + then_destination: block2_id, + else_destination: block1_id, + arguments: vec![], + }); + func.dfg[block1_id].set_terminator(TerminatorInstruction::JmpIf { + condition: cond, + then_destination: block1_id, + else_destination: block2_id, + arguments: vec![], + }); + func.dfg[block2_id].set_terminator(TerminatorInstruction::Return { return_values: vec![] }); + + let mut cfg = ControlFlowGraph::with_function(&func); + + { + let block0_predecessors = cfg.pred_iter(block0_id).collect::>(); + let block1_predecessors = cfg.pred_iter(block1_id).collect::>(); + let block2_predecessors = cfg.pred_iter(block2_id).collect::>(); + + let block0_successors = cfg.succ_iter(block0_id).collect::>(); + let block1_successors = cfg.succ_iter(block1_id).collect::>(); + let block2_successors = cfg.succ_iter(block2_id).collect::>(); + + assert_eq!(block0_predecessors.len(), 0); + assert_eq!(block1_predecessors.len(), 2); + assert_eq!(block2_predecessors.len(), 2); + + assert_eq!(block1_predecessors.contains(&block0_id), true); + assert_eq!(block1_predecessors.contains(&block1_id), true); + assert_eq!(block2_predecessors.contains(&block0_id), true); + assert_eq!(block2_predecessors.contains(&block1_id), true); + + assert_eq!(block0_successors.len(), 2); + assert_eq!(block1_successors.len(), 2); + assert_eq!(block2_successors.len(), 0); + + assert_eq!(block0_successors.contains(&block1_id), true); + assert_eq!(block0_successors.contains(&block2_id), true); + assert_eq!(block1_successors.contains(&block1_id), true); + assert_eq!(block1_successors.contains(&block2_id), true); + } + + // Modify function to form: + // fn func { + // block0(cond: u1): + // jmpif cond(), then: block1, else: ret_block + // block1(): + // jmpif cond(), then: block1, else: block2 + // block2(): + // jmp ret_block + // ret_block(): + // return + // } + let ret_block_id = func.dfg.new_block(); + func.dfg[ret_block_id] + .set_terminator(TerminatorInstruction::Return { return_values: vec![] }); + func.dfg[block2_id].set_terminator(TerminatorInstruction::Jmp { + destination: ret_block_id, + arguments: vec![], + }); + func.dfg[block0_id].set_terminator(TerminatorInstruction::JmpIf { + condition: cond, + then_destination: block1_id, + else_destination: ret_block_id, + arguments: vec![], + }); + + // Recompute new and changed blocks + cfg.recompute_block(&mut func, block0_id); + cfg.recompute_block(&mut func, block2_id); + cfg.recompute_block(&mut func, ret_block_id); + + { + let block0_predecessors = cfg.pred_iter(block0_id).collect::>(); + let block1_predecessors = cfg.pred_iter(block1_id).collect::>(); + let block2_predecessors = cfg.pred_iter(block2_id).collect::>(); + + let block0_successors = cfg.succ_iter(block0_id).collect::>(); + let block1_successors = cfg.succ_iter(block1_id).collect::>(); + let block2_successors = cfg.succ_iter(block2_id).collect::>(); + + assert_eq!(block0_predecessors.len(), 0); + assert_eq!(block1_predecessors.len(), 2); + assert_eq!(block2_predecessors.len(), 1); + + assert_eq!(block1_predecessors.contains(&block0_id), true); + assert_eq!(block1_predecessors.contains(&block1_id), true); + assert_eq!(block2_predecessors.contains(&block0_id), false); + assert_eq!(block2_predecessors.contains(&block1_id), true); + + assert_eq!(block0_successors.len(), 2); + assert_eq!(block1_successors.len(), 2); + assert_eq!(block2_successors.len(), 1); + + assert_eq!(block0_successors.contains(&block1_id), true); + assert_eq!(block0_successors.contains(&ret_block_id), true); + assert_eq!(block1_successors.contains(&block1_id), true); + assert_eq!(block1_successors.contains(&block2_id), true); + assert_eq!(block2_successors.contains(&ret_block_id), true); + } + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index f92cae79b75..c21fc2c3f35 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -95,6 +95,16 @@ impl DataFlowGraph { }) } + /// Get an iterator over references to each basic block within the dfg, paired with the basic + /// block's id. + /// + /// The pairs are order by id, which is not guaranteed to be meaningful. + pub(crate) fn basic_blocks_iter( + &self, + ) -> impl ExactSizeIterator { + self.blocks.iter() + } + pub(crate) fn block_parameters(&self, block: BasicBlockId) -> &[ValueId] { self.blocks[block].parameters() } @@ -237,6 +247,13 @@ impl std::ops::Index for DataFlowGraph { } } +impl std::ops::IndexMut for DataFlowGraph { + /// Get a mutable reference to a function's basic block for the given id. + fn index_mut(&mut self, id: BasicBlockId) -> &mut BasicBlock { + &mut self.blocks[id] + } +} + #[cfg(test)] mod tests { use super::DataFlowGraph; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs index bb526076e3b..5937b374726 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs @@ -106,6 +106,14 @@ impl DenseMap { self.storage.push(f(id)); id } + + /// Gets an iterator to a reference to each element in the dense map paired with its id. + /// + /// The id-element pairs are ordered by the numeric values of the ids. + pub(crate) fn iter(&self) -> impl ExactSizeIterator, &T)> { + let ids_iter = (0..self.storage.len()).into_iter().map(|idx| Id::new(idx)); + ids_iter.zip(self.storage.iter()) + } } impl Default for DenseMap { From 573966db3e71d2cc69e744e87052bd5c9448a2e2 Mon Sep 17 00:00:00 2001 From: Blaine Bublitz Date: Tue, 25 Apr 2023 16:40:54 +0100 Subject: [PATCH 37/63] chore: add RUST_BACKTRACE environment variable to nix config (#1216) --- flake.nix | 3 +++ 1 file changed, 3 insertions(+) diff --git a/flake.nix b/flake.nix index fc8e5fa6868..28859ebb2b9 100644 --- a/flake.nix +++ b/flake.nix @@ -79,6 +79,9 @@ # hidden from the developer - i.e. when they see the command being run via `nix flake check` RUST_TEST_THREADS = "1"; + # We enable backtraces on any failure for help with debugging + RUST_BACKTRACE = "1"; + # We set the environment variable because barretenberg must be compiled in a special way for wasm BARRETENBERG_BIN_DIR = "${pkgs.barretenberg-wasm}/bin"; From 27b8bf8ca6a60ba5882525f645ace2fdde4b2a41 Mon Sep 17 00:00:00 2001 From: jfecher Date: Tue, 25 Apr 2023 14:09:33 -0400 Subject: [PATCH 38/63] chore(ssa refactor): Implement ssa-gen for binary, block, tuple, extract-tuple-field, and semi expressions (#1217) * Implement binary instructions * Cleanup PR --- .../src/ssa_refactor/ir/instruction.rs | 51 ++++++++----- .../ssa_builder/function_builder.rs | 21 +++-- .../src/ssa_refactor/ssa_gen/context.rs | 76 +++++++++++++++++++ .../src/ssa_refactor/ssa_gen/mod.rs | 50 +++++++++--- .../src/ssa_refactor/ssa_gen/value.rs | 1 + 5 files changed, 165 insertions(+), 34 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 442f1dbd47e..9b5aeb9388c 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -170,32 +170,43 @@ pub(crate) struct Binary { } /// Binary Operations allowed in the IR. +/// Aside from the comparison operators (Eq and Lt), all operators +/// will return the same type as their operands. +/// The operand types must match for all binary operators. +/// All binary operators are also only for numeric types. To implement +/// e.g. equality for a compound type like a struct, one must add a +/// separate Eq operation for each field and combine them later with And. #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub(crate) enum BinaryOp { - /// Addition of two types. - /// The result will have the same type as - /// the operands. + /// Addition of lhs + rhs. Add, - /// Subtraction of two types. - /// The result will have the same type as - /// the operands. + /// Subtraction of lhs - rhs. Sub, - /// Multiplication of two types. - /// The result will have the same type as - /// the operands. + /// Multiplication of lhs * rhs. Mul, - /// Division of two types. - /// The result will have the same type as - /// the operands. + /// Division of lhs / rhs. Div, + /// Modulus of lhs % rhs. + Mod, /// Checks whether two types are equal. /// Returns true if the types were equal and /// false otherwise. Eq, - /// Checks whether two types are equal. - /// Returns true if the types were not equal and - /// false otherwise. - Neq, + /// Checks whether the lhs is less than the rhs. + /// All other comparison operators should be translated + /// to less than. For example (a > b) = (b < a) = !(a >= b) = !(b <= a). + /// The result will always be a u1. + Lt, + /// Bitwise and (&) + And, + /// Bitwise or (|) + Or, + /// Bitwise xor (^) + Xor, + /// Shift lhs left by rhs bits (<<) + Shl, + /// Shift lhs right by rhs bits (>>) + Shr, } impl std::fmt::Display for BinaryOp { @@ -206,7 +217,13 @@ impl std::fmt::Display for BinaryOp { BinaryOp::Mul => write!(f, "mul"), BinaryOp::Div => write!(f, "div"), BinaryOp::Eq => write!(f, "eq"), - BinaryOp::Neq => write!(f, "neq"), + BinaryOp::Mod => write!(f, "mod"), + BinaryOp::Lt => write!(f, "lt"), + BinaryOp::And => write!(f, "and"), + BinaryOp::Or => write!(f, "or"), + BinaryOp::Xor => write!(f, "xor"), + BinaryOp::Shl => write!(f, "shl"), + BinaryOp::Shr => write!(f, "shr"), } } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs index c76d2943abe..7911aa2988a 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs @@ -100,12 +100,23 @@ impl<'ssa> FunctionBuilder<'ssa> { self.insert_instruction(Instruction::Store { address, value }); } - /// Insert a Store instruction at the end of the current block, storing the given element - /// at the given address. Expects that the address points to a previous Allocate instruction. - /// Returns the result of the add instruction. - pub(crate) fn insert_add(&mut self, lhs: ValueId, rhs: ValueId, typ: Type) -> ValueId { - let operator = BinaryOp::Add; + /// Insert a binary instruction at the end of the current block. + /// Returns the result of the binary instruction. + pub(crate) fn insert_binary( + &mut self, + lhs: ValueId, + operator: BinaryOp, + rhs: ValueId, + typ: Type, + ) -> ValueId { let id = self.insert_instruction(Instruction::Binary(Binary { lhs, rhs, operator })); self.current_function.dfg.make_instruction_results(id, typ)[0] } + + /// Insert a not instruction at the end of the current block. + /// Returns the result of the instruction. + pub(crate) fn insert_not(&mut self, rhs: ValueId, typ: Type) -> ValueId { + let id = self.insert_instruction(Instruction::Not(rhs)); + self.current_function.dfg.make_instruction_results(id, typ)[0] + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 32133feea13..8f7b4e3de9a 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -6,7 +6,9 @@ use noirc_frontend::monomorphization::ast::{self, LocalId, Parameters}; use noirc_frontend::monomorphization::ast::{FuncId, Program}; use noirc_frontend::Signedness; +use crate::ssa_refactor::ir::instruction::BinaryOp; use crate::ssa_refactor::ir::types::Type; +use crate::ssa_refactor::ir::value::ValueId; use crate::ssa_refactor::ssa_builder::SharedBuilderContext; use crate::ssa_refactor::{ ir::function::FunctionId as IrFunctionId, ssa_builder::function_builder::FunctionBuilder, @@ -123,6 +125,80 @@ impl<'a> FunctionContext<'a> { ast::Type::Vec(_) => Type::Reference, } } + + /// Insert a unit constant into the current function if not already + /// present, and return its value + pub(super) fn unit_value(&mut self) -> Values { + self.builder.numeric_constant(0u128.into(), Type::Unit).into() + } + + /// Insert a binary instruction at the end of the current block. + /// Converts the form of the binary instruction as necessary + /// (e.g. swapping arguments, inserting a not) to represent it in the IR. + /// For example, (a <= b) is represented as !(b < a) + pub(super) fn insert_binary( + &mut self, + mut lhs: ValueId, + operator: noirc_frontend::BinaryOpKind, + mut rhs: ValueId, + ) -> Values { + let op = convert_operator(operator); + + if operator_requires_swapped_operands(operator) { + std::mem::swap(&mut lhs, &mut rhs); + } + + // TODO: Rework how types are stored. + // They should be on values rather than on instruction results + let typ = Type::field(); + let mut result = self.builder.insert_binary(lhs, op, rhs, typ); + + if operator_requires_not(operator) { + result = self.builder.insert_not(result, typ); + } + result.into() + } +} + +/// True if the given operator cannot be encoded directly and needs +/// to be represented as !(some other operator) +fn operator_requires_not(op: noirc_frontend::BinaryOpKind) -> bool { + use noirc_frontend::BinaryOpKind::*; + matches!(op, NotEqual | LessEqual | GreaterEqual) +} + +/// True if the given operator cannot be encoded directly and needs +/// to have its lhs and rhs swapped to be represented with another operator. +/// Example: (a > b) needs to be represented as (b < a) +fn operator_requires_swapped_operands(op: noirc_frontend::BinaryOpKind) -> bool { + use noirc_frontend::BinaryOpKind::*; + matches!(op, Greater | LessEqual) +} + +/// Converts the given operator to the appropriate BinaryOp. +/// Take care when using this to insert a binary instruction: this requires +/// checking operator_requires_not and operator_requires_swapped_operands +/// to represent the full operation correctly. +fn convert_operator(op: noirc_frontend::BinaryOpKind) -> BinaryOp { + use noirc_frontend::BinaryOpKind; + match op { + BinaryOpKind::Add => BinaryOp::Add, + BinaryOpKind::Subtract => BinaryOp::Sub, + BinaryOpKind::Multiply => BinaryOp::Mul, + BinaryOpKind::Divide => BinaryOp::Div, + BinaryOpKind::Modulo => BinaryOp::Mod, + BinaryOpKind::Equal => BinaryOp::Eq, + BinaryOpKind::NotEqual => BinaryOp::Eq, // Requires not + BinaryOpKind::Less => BinaryOp::Lt, + BinaryOpKind::Greater => BinaryOp::Lt, // Requires operand swap + BinaryOpKind::LessEqual => BinaryOp::Lt, // Requires operand swap and not + BinaryOpKind::GreaterEqual => BinaryOp::Lt, // Requires not + BinaryOpKind::And => BinaryOp::And, + BinaryOpKind::Or => BinaryOp::Or, + BinaryOpKind::Xor => BinaryOp::Xor, + BinaryOpKind::ShiftRight => BinaryOp::Shr, + BinaryOpKind::ShiftLeft => BinaryOp::Shl, + } } impl SharedContext { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 2f9c6646282..3b469ad9664 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -12,7 +12,10 @@ use self::{ value::{Tree, Values}, }; -use super::{ir::types::Type, ssa_builder::SharedBuilderContext}; +use super::{ + ir::{instruction::BinaryOp, types::Type, value::ValueId}, + ssa_builder::SharedBuilderContext, +}; pub(crate) fn generate_ssa(program: Program) { let context = SharedContext::new(program); @@ -58,6 +61,17 @@ impl<'a> FunctionContext<'a> { } } + /// Codegen any non-tuple expression so that we can unwrap the Values + /// tree to return a single value for use with most SSA instructions. + fn codegen_non_tuple_expression(&mut self, expr: &Expression) -> ValueId { + match self.codegen_expression(expr) { + Tree::Branch(branches) => { + panic!("codegen_non_tuple_expression called on tuple {branches:?}") + } + Tree::Leaf(value) => value.eval(), + } + } + fn codegen_ident(&mut self, _ident: &ast::Ident) -> Values { todo!() } @@ -103,7 +117,7 @@ impl<'a> FunctionContext<'a> { array } else { let offset = self.builder.numeric_constant((i as u128).into(), Type::field()); - self.builder.insert_add(array, offset, Type::field()) + self.builder.insert_binary(array, BinaryOp::Add, offset, Type::field()) }; self.builder.insert_store(address, value.eval()); i += 1; @@ -113,16 +127,22 @@ impl<'a> FunctionContext<'a> { array.into() } - fn codegen_block(&mut self, _block: &[Expression]) -> Values { - todo!() + fn codegen_block(&mut self, block: &[Expression]) -> Values { + let mut result = self.unit_value(); + for expr in block { + result = self.codegen_expression(expr); + } + result } fn codegen_unary(&mut self, _unary: &ast::Unary) -> Values { todo!() } - fn codegen_binary(&mut self, _binary: &ast::Binary) -> Values { - todo!() + fn codegen_binary(&mut self, binary: &ast::Binary) -> Values { + let lhs = self.codegen_non_tuple_expression(&binary.lhs); + let rhs = self.codegen_non_tuple_expression(&binary.rhs); + self.insert_binary(lhs, binary.operator, rhs) } fn codegen_index(&mut self, _index: &ast::Index) -> Values { @@ -141,12 +161,17 @@ impl<'a> FunctionContext<'a> { todo!() } - fn codegen_tuple(&mut self, _tuple: &[Expression]) -> Values { - todo!() + fn codegen_tuple(&mut self, tuple: &[Expression]) -> Values { + Tree::Branch(vecmap(tuple, |expr| self.codegen_expression(expr))) } - fn codegen_extract_tuple_field(&mut self, _tuple: &Expression, _index: usize) -> Values { - todo!() + fn codegen_extract_tuple_field(&mut self, tuple: &Expression, index: usize) -> Values { + match self.codegen_expression(tuple) { + Tree::Branch(mut trees) => trees.remove(index), + Tree::Leaf(value) => { + unreachable!("Tried to extract tuple index {index} from non-tuple {value:?}") + } + } } fn codegen_call(&mut self, _call: &ast::Call) -> Values { @@ -165,7 +190,8 @@ impl<'a> FunctionContext<'a> { todo!() } - fn codegen_semi(&mut self, _semi: &Expression) -> Values { - todo!() + fn codegen_semi(&mut self, expr: &Expression) -> Values { + self.codegen_expression(expr); + self.unit_value() } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs index c3911d367c1..83a5d15c904 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs @@ -2,6 +2,7 @@ use crate::ssa_refactor::ir::function::FunctionId as IrFunctionId; use crate::ssa_refactor::ir::types::Type; use crate::ssa_refactor::ir::value::ValueId as IrValueId; +#[derive(Debug)] pub(super) enum Tree { Branch(Vec>), Leaf(T), From c5457f4c201246fda0d8e0a1f0f08245cf26c3a7 Mon Sep 17 00:00:00 2001 From: jfecher Date: Tue, 25 Apr 2023 15:13:47 -0400 Subject: [PATCH 39/63] chore(ssa refactor): Update how instruction result types are retrieved (#1222) * Implement binary instructions * Cleanup PR * Change how instruction result types are handled * Reorganize make_instruction flow a bit --- .../src/ssa_refactor/ir/dfg.rs | 75 ++++++++++--------- .../src/ssa_refactor/ir/instruction.rs | 49 +++++++++--- .../src/ssa_refactor/ir/types.rs | 4 + .../src/ssa_refactor/ir/value.rs | 10 +++ .../ssa_builder/function_builder.rs | 30 ++++---- .../src/ssa_refactor/ssa_gen/context.rs | 7 +- .../src/ssa_refactor/ssa_gen/mod.rs | 4 +- 7 files changed, 111 insertions(+), 68 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index c21fc2c3f35..54ffd5a05f6 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -2,7 +2,7 @@ use super::{ basic_block::{BasicBlock, BasicBlockId}, constant::{NumericConstant, NumericConstantId}, function::Signature, - instruction::{Instruction, InstructionId}, + instruction::{Instruction, InstructionId, InstructionResultType}, map::{DenseMap, Id, SecondaryMap, TwoWayMap}, types::Type, value::{Value, ValueId}, @@ -110,11 +110,19 @@ impl DataFlowGraph { } /// Inserts a new instruction into the DFG. - /// This does not add the instruction to the block or populate the instruction's result list - pub(crate) fn make_instruction(&mut self, instruction_data: Instruction) -> InstructionId { + /// This does not add the instruction to the block. + /// Returns the id of the new instruction and its results. + /// + /// Populates the instruction's results with the given ctrl_typevars if the instruction + /// is a Load, Call, or Intrinsic. Otherwise the instruction's results will be known + /// by the instruction itself and None can safely be passed for this parameter. + pub(crate) fn make_instruction( + &mut self, + instruction_data: Instruction, + ctrl_typevars: Option>, + ) -> InstructionId { let id = self.instructions.insert(instruction_data); - // Create a new vector to store the potential results for the instruction. - self.results.insert(id, Default::default()); + self.make_instruction_results(id, ctrl_typevars); id } @@ -134,46 +142,51 @@ impl DataFlowGraph { /// Attaches results to the instruction, clearing any previous results. /// /// Returns the results of the instruction - pub(crate) fn make_instruction_results( + fn make_instruction_results( &mut self, instruction_id: InstructionId, - ctrl_typevar: Type, - ) -> &[ValueId] { - // Clear all of the results instructions associated with this - // instruction. - self.results.get_mut(&instruction_id).expect("all instructions should have a `result` allocation when instruction was added to the DFG").clear(); + ctrl_typevars: Option>, + ) { + self.results.insert(instruction_id, Default::default()); // Get all of the types that this instruction produces // and append them as results. - let typs = self.instruction_result_types(instruction_id, ctrl_typevar); + let typs = self.instruction_result_types(instruction_id, ctrl_typevars); for typ in typs { self.append_result(instruction_id, typ); } - - self.results.get_mut(&instruction_id) - .expect("all instructions should have a `result` allocation when instruction was added to the DFG") - .as_slice() } /// Return the result types of this instruction. /// - /// For example, an addition instruction will return - /// one type which is the type of the operands involved. - /// This is the `ctrl_typevar` in this case. + /// In the case of Load, Call, and Intrinsic, the function's result + /// type may be unknown. In this case, the given ctrl_typevars are returned instead. + /// ctrl_typevars is taken in as an Option since it is common to omit them when getting + /// the type of an instruction that does not require them. Compared to passing an empty Vec, + /// Option has the benefit of panicking if it is accidentally used for a Call instruction, + /// rather than silently returning the empty Vec and continuing. fn instruction_result_types( &self, instruction_id: InstructionId, - ctrl_typevar: Type, + ctrl_typevars: Option>, ) -> Vec { - // Check if it is a call instruction. If so, we don't support that yet - let ins_data = &self.instructions[instruction_id]; - match ins_data { - Instruction::Call { .. } => todo!("function calls are not supported yet"), - ins => ins.return_types(ctrl_typevar), + let instruction = &self.instructions[instruction_id]; + match instruction.result_type() { + InstructionResultType::Known(typ) => vec![typ], + InstructionResultType::Operand(value) => vec![self.type_of_value(value)], + InstructionResultType::None => vec![], + InstructionResultType::Unknown => { + ctrl_typevars.expect("Control typevars required but not given") + } } } + /// Returns the type of a given value + pub(crate) fn type_of_value(&self, value: ValueId) -> Type { + self.values[value].get_type() + } + /// Appends a result type to the instruction. pub(crate) fn append_result(&mut self, instruction_id: InstructionId, typ: Type) -> ValueId { let results = self.results.get_mut(&instruction_id).unwrap(); @@ -257,21 +270,15 @@ impl std::ops::IndexMut for DataFlowGraph { #[cfg(test)] mod tests { use super::DataFlowGraph; - use crate::ssa_refactor::ir::{ - instruction::Instruction, - types::{NumericType, Type}, - }; + use crate::ssa_refactor::ir::instruction::Instruction; #[test] fn make_instruction() { let mut dfg = DataFlowGraph::default(); let ins = Instruction::Allocate { size: 20 }; - let ins_id = dfg.make_instruction(ins); - - let num_results = - dfg.make_instruction_results(ins_id, Type::Numeric(NumericType::NativeField)).len(); + let ins_id = dfg.make_instruction(ins, None); let results = dfg.instruction_results(ins_id); - assert_eq!(results.len(), num_results); + assert_eq!(results.len(), 1); } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 9b5aeb9388c..dcab6e04006 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -105,23 +105,39 @@ impl Instruction { } } - /// Returns the types that this instruction will return. - pub(crate) fn return_types(&self, ctrl_typevar: Type) -> Vec { + /// Returns the type that this instruction will return. + pub(crate) fn result_type(&self) -> InstructionResultType { match self { - Instruction::Binary(_) => vec![ctrl_typevar], - Instruction::Cast(_, typ) => vec![*typ], - Instruction::Not(_) => vec![ctrl_typevar], - Instruction::Truncate { .. } => vec![ctrl_typevar], - Instruction::Constrain(_) => vec![], - Instruction::Call { .. } => vec![], - Instruction::Intrinsic { .. } => vec![], - Instruction::Allocate { .. } => vec![Type::Reference], - Instruction::Load { .. } => vec![ctrl_typevar], - Instruction::Store { .. } => vec![], + Instruction::Binary(binary) => binary.result_type(), + Instruction::Cast(_, typ) => InstructionResultType::Known(*typ), + Instruction::Allocate { .. } => InstructionResultType::Known(Type::Reference), + Instruction::Not(value) | Instruction::Truncate { value, .. } => { + InstructionResultType::Operand(*value) + } + Instruction::Constrain(_) | Instruction::Store { .. } => InstructionResultType::None, + Instruction::Load { .. } | Instruction::Call { .. } | Instruction::Intrinsic { .. } => { + InstructionResultType::Unknown + } } } } +/// The possible return values for Instruction::return_types +pub(crate) enum InstructionResultType { + /// The result type of this instruction matches that of this operand + Operand(ValueId), + + /// The result type of this instruction is known to be this type - independent of its operands. + Known(Type), + + /// The result type of this function is unknown and separate from its operand types. + /// This occurs for function and intrinsic calls. + Unknown, + + /// This instruction does not return any results. + None, +} + /// These are operations which can exit a basic block /// ie control flow type operations /// @@ -169,6 +185,15 @@ pub(crate) struct Binary { pub(crate) operator: BinaryOp, } +impl Binary { + pub(crate) fn result_type(&self) -> InstructionResultType { + match self.operator { + BinaryOp::Eq | BinaryOp::Lt => InstructionResultType::Known(Type::bool()), + _ => InstructionResultType::Operand(self.lhs), + } + } +} + /// Binary Operations allowed in the IR. /// Aside from the comparison operators (Eq and Lt), all operators /// will return the same type as their operands. diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs index 888d7d128d1..8a0f825a117 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs @@ -38,6 +38,10 @@ impl Type { Type::Numeric(NumericType::Unsigned { bit_size }) } + pub(crate) fn bool() -> Type { + Type::unsigned(1) + } + pub(crate) fn field() -> Type { Type::Numeric(NumericType::NativeField) } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs index 537eabb0cab..a559522fadd 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs @@ -28,3 +28,13 @@ pub(crate) enum Value { /// This Value originates from a numeric constant NumericConstant { constant: NumericConstantId, typ: Type }, } + +impl Value { + pub(crate) fn get_type(&self) -> Type { + match self { + Value::Instruction { typ, .. } => *typ, + Value::Param { typ, .. } => *typ, + Value::NumericConstant { typ, .. } => *typ, + } + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs index 7911aa2988a..b30ff11c2e1 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs @@ -3,7 +3,7 @@ use acvm::FieldElement; use crate::ssa_refactor::ir::{ basic_block::BasicBlockId, function::{Function, FunctionId}, - instruction::{Binary, BinaryOp, Instruction, InstructionId}, + instruction::{Binary, BinaryOp, Instruction}, types::Type, value::ValueId, }; @@ -71,18 +71,21 @@ impl<'ssa> FunctionBuilder<'ssa> { self.numeric_constant(value.into(), Type::field()) } - fn insert_instruction(&mut self, instruction: Instruction) -> InstructionId { - let id = self.current_function.dfg.make_instruction(instruction); + fn insert_instruction( + &mut self, + instruction: Instruction, + ctrl_typevars: Option>, + ) -> &[ValueId] { + let id = self.current_function.dfg.make_instruction(instruction, ctrl_typevars); self.current_function.dfg.insert_instruction_in_block(self.current_block, id); - id + self.current_function.dfg.instruction_results(id) } /// Insert an allocate instruction at the end of the current block, allocating the /// given amount of field elements. Returns the result of the allocate instruction, /// which is always a Reference to the allocated data. pub(crate) fn insert_allocate(&mut self, size_to_allocate: u32) -> ValueId { - let id = self.insert_instruction(Instruction::Allocate { size: size_to_allocate }); - self.current_function.dfg.make_instruction_results(id, Type::Reference)[0] + self.insert_instruction(Instruction::Allocate { size: size_to_allocate }, None)[0] } /// Insert a Load instruction at the end of the current block, loading from the given address @@ -90,14 +93,13 @@ impl<'ssa> FunctionBuilder<'ssa> { /// a single value. Loading multiple values (such as a tuple) will require multiple loads. /// Returns the element that was loaded. pub(crate) fn insert_load(&mut self, address: ValueId, type_to_load: Type) -> ValueId { - let id = self.insert_instruction(Instruction::Load { address }); - self.current_function.dfg.make_instruction_results(id, type_to_load)[0] + self.insert_instruction(Instruction::Load { address }, Some(vec![type_to_load]))[0] } /// Insert a Store instruction at the end of the current block, storing the given element /// at the given address. Expects that the address points to a previous Allocate instruction. pub(crate) fn insert_store(&mut self, address: ValueId, value: ValueId) { - self.insert_instruction(Instruction::Store { address, value }); + self.insert_instruction(Instruction::Store { address, value }, None); } /// Insert a binary instruction at the end of the current block. @@ -107,16 +109,14 @@ impl<'ssa> FunctionBuilder<'ssa> { lhs: ValueId, operator: BinaryOp, rhs: ValueId, - typ: Type, ) -> ValueId { - let id = self.insert_instruction(Instruction::Binary(Binary { lhs, rhs, operator })); - self.current_function.dfg.make_instruction_results(id, typ)[0] + let instruction = Instruction::Binary(Binary { lhs, rhs, operator }); + self.insert_instruction(instruction, None)[0] } /// Insert a not instruction at the end of the current block. /// Returns the result of the instruction. - pub(crate) fn insert_not(&mut self, rhs: ValueId, typ: Type) -> ValueId { - let id = self.insert_instruction(Instruction::Not(rhs)); - self.current_function.dfg.make_instruction_results(id, typ)[0] + pub(crate) fn insert_not(&mut self, rhs: ValueId) -> ValueId { + self.insert_instruction(Instruction::Not(rhs), None)[0] } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 8f7b4e3de9a..f76a6675077 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -148,13 +148,10 @@ impl<'a> FunctionContext<'a> { std::mem::swap(&mut lhs, &mut rhs); } - // TODO: Rework how types are stored. - // They should be on values rather than on instruction results - let typ = Type::field(); - let mut result = self.builder.insert_binary(lhs, op, rhs, typ); + let mut result = self.builder.insert_binary(lhs, op, rhs); if operator_requires_not(operator) { - result = self.builder.insert_not(result, typ); + result = self.builder.insert_not(result); } result.into() } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 3b469ad9664..553b5eb2218 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -116,8 +116,8 @@ impl<'a> FunctionContext<'a> { let address = if i == 0 { array } else { - let offset = self.builder.numeric_constant((i as u128).into(), Type::field()); - self.builder.insert_binary(array, BinaryOp::Add, offset, Type::field()) + let offset = self.builder.field_constant(i as u128); + self.builder.insert_binary(array, BinaryOp::Add, offset) }; self.builder.insert_store(address, value.eval()); i += 1; From a10182e46c1bb2bd37237ff86f214fd11295624c Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 25 Apr 2023 21:56:57 +0100 Subject: [PATCH 40/63] chore(nargo): update panic message to suggest searching for similar issues (#1224) chore: update panic message to suggest searching for similar issues --- crates/nargo_cli/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/nargo_cli/src/main.rs b/crates/nargo_cli/src/main.rs index bd3f1352bda..a73785c64c6 100644 --- a/crates/nargo_cli/src/main.rs +++ b/crates/nargo_cli/src/main.rs @@ -7,7 +7,7 @@ fn main() -> eyre::Result<()> { // Register a panic hook to display more readable panic messages to end-users let (panic_hook, _) = HookBuilder::default() .display_env_section(false) - .panic_section("This is a bug. Consider opening an issue at https://github.com/noir-lang/noir/issues/new?labels=bug&template=bug_report.yml") + .panic_section("This is a bug. We may have already fixed this in newer versions of Nargo so try searching for similar issues at https://github.com/noir-lang/noir/issues/.\nIf there isn't an open issue for this bug, consider opening one at https://github.com/noir-lang/noir/issues/new?labels=bug&template=bug_report.yml") .into_hooks(); panic_hook.install(); From 3a65f304c25e8239f9735ce1e6dee29d7eecc244 Mon Sep 17 00:00:00 2001 From: joss-aztec <94053499+joss-aztec@users.noreply.github.com> Date: Wed, 26 Apr 2023 18:19:45 +0100 Subject: [PATCH 41/63] feat(noir): added `distinct` keyword (#1219) * feat(noir): added `distinct` keyword for preventing witness overlap in program input and output * chore(noir): dedup within output also. Futhermore: - reorder keywords (distinct first) - add test case - fix up comments & typos - tidy up * chore(hir): hoist "main" constant for reuse * chore(noir): comment typo Co-authored-by: jfecher --------- Co-authored-by: jfecher --- crates/noirc_abi/src/lib.rs | 24 ++++++++++++++++++ crates/noirc_evaluator/src/lib.rs | 20 +++++++++++++-- .../src/ssa/acir_gen/operations/return.rs | 12 ++++++++- crates/noirc_frontend/src/ast/expression.rs | 1 + crates/noirc_frontend/src/hir/def_map/mod.rs | 5 ++-- .../src/hir/resolution/errors.rs | 14 +++++++++++ .../src/hir/resolution/resolver.rs | 22 ++++++++++++++-- .../noirc_frontend/src/hir/type_check/mod.rs | 1 + crates/noirc_frontend/src/hir_def/function.rs | 4 ++- crates/noirc_frontend/src/lexer/token.rs | 3 +++ .../src/monomorphization/ast.rs | 13 ++++++++-- .../src/monomorphization/mod.rs | 5 ++-- crates/noirc_frontend/src/parser/parser.rs | 25 +++++++++++++++---- 13 files changed, 132 insertions(+), 17 deletions(-) diff --git a/crates/noirc_abi/src/lib.rs b/crates/noirc_abi/src/lib.rs index dbd935dcde0..191128b9407 100644 --- a/crates/noirc_abi/src/lib.rs +++ b/crates/noirc_abi/src/lib.rs @@ -85,6 +85,30 @@ impl std::fmt::Display for AbiVisibility { } } +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +/// Represents whether the return value should compromise of unique witness indices such that no +/// index occurs within the program's abi more than once. +/// +/// This is useful for application stacks that require an uniform abi across across multiple +/// circuits. When index duplication is allowed, the compiler may identify that a public input +/// reaches the output unaltered and is thus referenced directly, causing the input and output +/// witness indices to overlap. Similarly, repetitions of copied values in the output may be +/// optimized away. +pub enum AbiDistinctness { + Distinct, + DuplicationAllowed, +} + +impl std::fmt::Display for AbiDistinctness { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + AbiDistinctness::Distinct => write!(f, "distinct"), + AbiDistinctness::DuplicationAllowed => write!(f, "duplication-allowed"), + } + } +} + #[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] #[serde(rename_all = "lowercase")] pub enum Sign { diff --git a/crates/noirc_evaluator/src/lib.rs b/crates/noirc_evaluator/src/lib.rs index 8b3cbb009a9..64a02061b0f 100644 --- a/crates/noirc_evaluator/src/lib.rs +++ b/crates/noirc_evaluator/src/lib.rs @@ -54,6 +54,9 @@ pub struct Evaluator { // and increasing as for `public_parameters`. We then use a `Vec` rather // than a `BTreeSet` to preserve this order for the ABI. return_values: Vec, + // If true, indicates that the resulting ACIR should enforce that all inputs and outputs are + // comprised of unique witness indices by having extra constraints if necessary. + return_is_distinct: bool, opcodes: Vec, } @@ -102,6 +105,11 @@ pub fn create_circuit( } impl Evaluator { + // Returns true if the `witness_index` appears in the program's input parameters. + fn is_abi_input(&self, witness_index: Witness) -> bool { + witness_index.as_usize() <= self.num_witnesses_abi_len + } + // Returns true if the `witness_index` // was created in the ABI as a private input. // @@ -111,11 +119,17 @@ impl Evaluator { // If the `witness_index` is more than the `num_witnesses_abi_len` // then it was created after the ABI was processed and is therefore // an intermediate variable. - let is_intermediate_variable = witness_index.as_usize() > self.num_witnesses_abi_len; let is_public_input = self.public_parameters.contains(&witness_index); - !is_intermediate_variable && !is_public_input + self.is_abi_input(witness_index) && !is_public_input + } + + // True if the main function return has the `distinct` keyword and this particular witness + // index has already occurred elsewhere in the abi's inputs and outputs. + fn should_proxy_witness_for_abi_output(&self, witness_index: Witness) -> bool { + self.return_is_distinct + && (self.is_abi_input(witness_index) || self.return_values.contains(&witness_index)) } // Creates a new Witness index @@ -139,6 +153,8 @@ impl Evaluator { enable_logging: bool, show_output: bool, ) -> Result<(), RuntimeError> { + self.return_is_distinct = + program.return_distinctness == noirc_abi::AbiDistinctness::Distinct; let mut ir_gen = IrGenerator::new(program); self.parse_abi_alt(&mut ir_gen); diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/return.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/return.rs index 3269af06d16..6aaa3b2fbbd 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/return.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/return.rs @@ -1,3 +1,5 @@ +use acvm::acir::native_types::Expression; + use crate::{ errors::RuntimeErrorKind, ssa::{ @@ -46,7 +48,15 @@ pub(crate) fn evaluate( "we do not allow private ABI inputs to be returned as public outputs", ))); } - evaluator.return_values.push(witness); + // Check if the outputted witness needs separating from an existing occurrence in the + // abi. This behavior stems from usage of the `distinct` keyword. + let return_witness = if evaluator.should_proxy_witness_for_abi_output(witness) { + let proxy_constraint = Expression::from(witness); + evaluator.create_intermediate_variable(proxy_constraint) + } else { + witness + }; + evaluator.return_values.push(return_witness); } } diff --git a/crates/noirc_frontend/src/ast/expression.rs b/crates/noirc_frontend/src/ast/expression.rs index ac6161ddac1..9be6f715a14 100644 --- a/crates/noirc_frontend/src/ast/expression.rs +++ b/crates/noirc_frontend/src/ast/expression.rs @@ -325,6 +325,7 @@ pub struct FunctionDefinition { pub span: Span, pub return_type: UnresolvedType, pub return_visibility: noirc_abi::AbiVisibility, + pub return_distinctness: noirc_abi::AbiDistinctness, } /// Describes the types of smart contract functions that are allowed. diff --git a/crates/noirc_frontend/src/hir/def_map/mod.rs b/crates/noirc_frontend/src/hir/def_map/mod.rs index 25e0488a7b6..fdaf2dd3acc 100644 --- a/crates/noirc_frontend/src/hir/def_map/mod.rs +++ b/crates/noirc_frontend/src/hir/def_map/mod.rs @@ -18,6 +18,9 @@ pub use module_data::*; mod namespace; pub use namespace::*; +/// The name that is used for a non-contract program's entry-point function. +pub const MAIN_FUNCTION: &str = "main"; + // XXX: Ultimately, we want to constrain an index to be of a certain type just like in RA /// Lets first check if this is offered by any external crate /// XXX: RA has made this a crate on crates.io @@ -104,8 +107,6 @@ impl CrateDefMap { /// Find the main function for this crate pub fn main_function(&self) -> Option { - const MAIN_FUNCTION: &str = "main"; - let root_module = &self.modules()[self.root.0]; // This function accepts an Ident, so we attach a dummy span to diff --git a/crates/noirc_frontend/src/hir/resolution/errors.rs b/crates/noirc_frontend/src/hir/resolution/errors.rs index 9406474a226..c57e4c890d2 100644 --- a/crates/noirc_frontend/src/hir/resolution/errors.rs +++ b/crates/noirc_frontend/src/hir/resolution/errors.rs @@ -32,6 +32,8 @@ pub enum ResolverError { UnnecessaryPub { ident: Ident }, #[error("Required 'pub', main function must return public value")] NecessaryPub { ident: Ident }, + #[error("'distinct' keyword can only be used with main method")] + DistinctNotAllowed { ident: Ident }, #[error("Expected const value where non-constant value was used")] ExpectedComptimeVariable { name: String, span: Span }, #[error("Missing expression for declared constant")] @@ -176,6 +178,18 @@ impl From for Diagnostic { diag.add_note("The `pub` keyword is mandatory for the entry-point function return type because the verifier cannot retrieve private witness and thus the function will not be able to return a 'priv' value".to_owned()); diag } + ResolverError::DistinctNotAllowed { ident } => { + let name = &ident.0.contents; + + let mut diag = Diagnostic::simple_error( + format!("Invalid `distinct` keyword on return type of function {name}"), + "Invalid distinct on return type".to_string(), + ident.0.span(), + ); + + diag.add_note("The `distinct` keyword is only valid when used on the main function of a program, as its only purpose is to ensure that all witness indices that occur in the abi are unique".to_owned()); + diag + } ResolverError::ExpectedComptimeVariable { name, span } => Diagnostic::simple_error( format!("expected constant variable where non-constant variable {name} was used"), "expected const variable".to_string(), diff --git a/crates/noirc_frontend/src/hir/resolution/resolver.rs b/crates/noirc_frontend/src/hir/resolution/resolver.rs index cfb354498ab..98cf5993edf 100644 --- a/crates/noirc_frontend/src/hir/resolution/resolver.rs +++ b/crates/noirc_frontend/src/hir/resolution/resolver.rs @@ -22,7 +22,7 @@ use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::rc::Rc; use crate::graph::CrateId; -use crate::hir::def_map::{ModuleDefId, TryFromModuleDefId}; +use crate::hir::def_map::{ModuleDefId, TryFromModuleDefId, MAIN_FUNCTION}; use crate::hir_def::stmt::{HirAssignStatement, HirLValue, HirPattern}; use crate::node_interner::{ DefinitionId, DefinitionKind, ExprId, FuncId, NodeInterner, StmtId, StructId, @@ -637,6 +637,12 @@ impl<'a> Resolver<'a> { self.push_err(ResolverError::NecessaryPub { ident: func.name_ident().clone() }); } + if !self.distinct_allowed(func) + && func.def.return_distinctness != noirc_abi::AbiDistinctness::DuplicationAllowed + { + self.push_err(ResolverError::DistinctNotAllowed { ident: func.name_ident().clone() }); + } + if attributes == Some(Attribute::Test) && !parameters.is_empty() { self.push_err(ResolverError::TestFunctionHasParameters { span: func.name_ident().span(), @@ -661,6 +667,7 @@ impl<'a> Resolver<'a> { typ, parameters: parameters.into(), return_visibility: func.def.return_visibility, + return_distinctness: func.def.return_distinctness, has_body: !func.def.body.is_empty(), } } @@ -670,7 +677,18 @@ impl<'a> Resolver<'a> { if self.in_contract() { !func.def.is_unconstrained && !func.def.is_open } else { - func.name() == "main" + func.name() == MAIN_FUNCTION + } + } + + /// True if the `distinct` keyword is allowed on a function's return type + fn distinct_allowed(&self, func: &NoirFunction) -> bool { + if self.in_contract() { + // "open" and "unconstrained" functions are compiled to brillig and thus duplication of + // witness indices in their abis is not a concern. + !func.def.is_unconstrained && !func.def.is_open + } else { + func.name() == MAIN_FUNCTION } } diff --git a/crates/noirc_frontend/src/hir/type_check/mod.rs b/crates/noirc_frontend/src/hir/type_check/mod.rs index 97b1c71a0bc..5ebac6de9a3 100644 --- a/crates/noirc_frontend/src/hir/type_check/mod.rs +++ b/crates/noirc_frontend/src/hir/type_check/mod.rs @@ -219,6 +219,7 @@ mod test { ] .into(), return_visibility: noirc_abi::AbiVisibility::Private, + return_distinctness: noirc_abi::AbiDistinctness::DuplicationAllowed, has_body: true, }; interner.push_fn_meta(func_meta, func_id); diff --git a/crates/noirc_frontend/src/hir_def/function.rs b/crates/noirc_frontend/src/hir_def/function.rs index a9fafffe159..1f7399e5547 100644 --- a/crates/noirc_frontend/src/hir_def/function.rs +++ b/crates/noirc_frontend/src/hir_def/function.rs @@ -1,5 +1,5 @@ use iter_extended::vecmap; -use noirc_abi::{AbiParameter, AbiType, AbiVisibility}; +use noirc_abi::{AbiDistinctness, AbiParameter, AbiType, AbiVisibility}; use noirc_errors::{Location, Span}; use super::expr::{HirBlockExpression, HirExpression, HirIdent}; @@ -131,6 +131,8 @@ pub struct FuncMeta { pub return_visibility: AbiVisibility, + pub return_distinctness: AbiDistinctness, + /// The type of this function. Either a Type::Function /// or a Type::Forall for generic functions. pub typ: Type, diff --git a/crates/noirc_frontend/src/lexer/token.rs b/crates/noirc_frontend/src/lexer/token.rs index 0df1fc39938..6b021a3dcbb 100644 --- a/crates/noirc_frontend/src/lexer/token.rs +++ b/crates/noirc_frontend/src/lexer/token.rs @@ -421,6 +421,7 @@ pub enum Keyword { Contract, Crate, Dep, + Distinct, Else, Field, Fn, @@ -454,6 +455,7 @@ impl fmt::Display for Keyword { Keyword::Contract => write!(f, "contract"), Keyword::Crate => write!(f, "crate"), Keyword::Dep => write!(f, "dep"), + Keyword::Distinct => write!(f, "distinct"), Keyword::Else => write!(f, "else"), Keyword::Field => write!(f, "Field"), Keyword::Fn => write!(f, "fn"), @@ -490,6 +492,7 @@ impl Keyword { "contract" => Keyword::Contract, "crate" => Keyword::Crate, "dep" => Keyword::Dep, + "distinct" => Keyword::Distinct, "else" => Keyword::Else, "Field" => Keyword::Field, "fn" => Keyword::Fn, diff --git a/crates/noirc_frontend/src/monomorphization/ast.rs b/crates/noirc_frontend/src/monomorphization/ast.rs index e4339c8e367..04aec9a6726 100644 --- a/crates/noirc_frontend/src/monomorphization/ast.rs +++ b/crates/noirc_frontend/src/monomorphization/ast.rs @@ -221,11 +221,20 @@ impl Type { pub struct Program { pub functions: Vec, pub main_function_signature: FunctionSignature, + /// Indicates whether witness indices are allowed to reoccur in the ABI of the resulting ACIR. + /// + /// Note: this has no impact on monomorphization, and is simply attached here for ease of + /// forwarding to the next phase. + pub return_distinctness: noirc_abi::AbiDistinctness, } impl Program { - pub fn new(functions: Vec, main_function_signature: FunctionSignature) -> Program { - Program { functions, main_function_signature } + pub fn new( + functions: Vec, + main_function_signature: FunctionSignature, + return_distinctness: noirc_abi::AbiDistinctness, + ) -> Program { + Program { functions, main_function_signature, return_distinctness } } pub fn main(&self) -> &Function { diff --git a/crates/noirc_frontend/src/monomorphization/mod.rs b/crates/noirc_frontend/src/monomorphization/mod.rs index bfce292d2eb..79c9bab7d8a 100644 --- a/crates/noirc_frontend/src/monomorphization/mod.rs +++ b/crates/noirc_frontend/src/monomorphization/mod.rs @@ -17,7 +17,7 @@ use std::collections::{BTreeMap, HashMap, VecDeque}; use crate::{ hir_def::{ expr::*, - function::{Param, Parameters}, + function::{FuncMeta, Param, Parameters}, stmt::{HirAssignStatement, HirLValue, HirLetStatement, HirPattern, HirStatement}, }, node_interner::{self, DefinitionKind, NodeInterner, StmtId}, @@ -88,7 +88,8 @@ pub fn monomorphize(main: node_interner::FuncId, interner: &NodeInterner) -> Pro } let functions = vecmap(monomorphizer.finished_functions, |(_, f)| f); - Program::new(functions, function_sig) + let FuncMeta { return_distinctness, .. } = interner.function_meta(&main); + Program::new(functions, function_sig, return_distinctness) } impl<'interner> Monomorphizer<'interner> { diff --git a/crates/noirc_frontend/src/parser/parser.rs b/crates/noirc_frontend/src/parser/parser.rs index f4793d06368..065b6362fb4 100644 --- a/crates/noirc_frontend/src/parser/parser.rs +++ b/crates/noirc_frontend/src/parser/parser.rs @@ -40,7 +40,7 @@ use crate::{ use chumsky::prelude::*; use iter_extended::vecmap; -use noirc_abi::AbiVisibility; +use noirc_abi::{AbiDistinctness, AbiVisibility}; use noirc_errors::{CustomDiagnostic, Span, Spanned}; /// Entry function for the parser - also handles lexing internally. @@ -162,7 +162,7 @@ fn function_definition(allow_self: bool) -> impl NoirParser { |( ( ((((attribute, (is_unconstrained, is_open)), name), generics), parameters), - (return_visibility, return_type), + ((return_distinctness, return_visibility), return_type), ), body, )| { @@ -177,6 +177,7 @@ fn function_definition(allow_self: bool) -> impl NoirParser { body, return_type, return_visibility, + return_distinctness, } .into() }, @@ -235,12 +236,18 @@ fn lambda_return_type() -> impl NoirParser { .map(|ret| ret.unwrap_or(UnresolvedType::Unspecified)) } -fn function_return_type() -> impl NoirParser<(AbiVisibility, UnresolvedType)> { +fn function_return_type() -> impl NoirParser<((AbiDistinctness, AbiVisibility), UnresolvedType)> { just(Token::Arrow) - .ignore_then(optional_visibility()) + .ignore_then(optional_distinctness()) + .then(optional_visibility()) .then(parse_type()) .or_not() - .map(|ret| ret.unwrap_or((AbiVisibility::Private, UnresolvedType::Unit))) + .map(|ret| { + ret.unwrap_or(( + (AbiDistinctness::DuplicationAllowed, AbiVisibility::Private), + UnresolvedType::Unit, + )) + }) } fn attribute() -> impl NoirParser { @@ -554,6 +561,13 @@ fn optional_visibility() -> impl NoirParser { }) } +fn optional_distinctness() -> impl NoirParser { + keyword(Keyword::Distinct).or_not().map(|opt| match opt { + Some(_) => AbiDistinctness::Distinct, + None => AbiDistinctness::DuplicationAllowed, + }) +} + fn maybe_comp_time() -> impl NoirParser { keyword(Keyword::CompTime).or_not().map(|opt| match opt { Some(_) => CompTime::Yes(None), @@ -1257,6 +1271,7 @@ mod test { "fn f(f: pub Field, y : Field, z : comptime Field) -> u8 { x + a }", "fn func_name(f: Field, y : pub Field, z : pub [u8;5],) {}", "fn func_name(x: [Field], y : [Field;2],y : pub [Field;2], z : pub [u8;5]) {}", + "fn main(x: pub u8, y: pub u8) -> distinct pub [u8; 2] { [x, y] }" ], ); From e551e55e4ed16c5dfb6e05f66389674d9a737fc5 Mon Sep 17 00:00:00 2001 From: jfecher Date: Wed, 26 Apr 2023 13:34:55 -0400 Subject: [PATCH 42/63] chore(ssa refactor): Implement ssa-gen for indexing, cast, constrain, if, unary (#1225) * Implement ssa-gen for if * Satisfy the clippy gods --- .../src/ssa_refactor/ir/cfg.rs | 23 ++-- .../src/ssa_refactor/ir/constant.rs | 4 +- .../src/ssa_refactor/ir/dfg.rs | 24 ++++- .../src/ssa_refactor/ir/function.rs | 2 +- .../src/ssa_refactor/ir/instruction.rs | 11 +- .../src/ssa_refactor/ir/printer.rs | 12 +-- .../ssa_builder/function_builder.rs | 101 ++++++++++++++++-- .../src/ssa_refactor/ssa_gen/context.rs | 11 +- .../src/ssa_refactor/ssa_gen/mod.rs | 99 ++++++++++++----- .../src/ssa_refactor/ssa_gen/value.rs | 10 ++ .../src/monomorphization/ast.rs | 1 + .../src/monomorphization/mod.rs | 2 +- 12 files changed, 230 insertions(+), 70 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs index 05b64e30ed8..3e469361c37 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs @@ -133,29 +133,27 @@ mod tests { // Build function of form // fn func { // block0(cond: u1): - // jmpif cond(), then: block2, else: block1 + // jmpif cond, then: block2, else: block1 // block1(): - // jmpif cond(), then: block1, else: block2 + // jmpif cond, then: block1, else: block2 // block2(): - // return + // return () // } let mut func = Function::new("func".into()); let block0_id = func.entry_block(); let cond = func.dfg.add_block_parameter(block0_id, Type::unsigned(1)); - let block1_id = func.dfg.new_block(); - let block2_id = func.dfg.new_block(); + let block1_id = func.dfg.make_block(); + let block2_id = func.dfg.make_block(); func.dfg[block0_id].set_terminator(TerminatorInstruction::JmpIf { condition: cond, then_destination: block2_id, else_destination: block1_id, - arguments: vec![], }); func.dfg[block1_id].set_terminator(TerminatorInstruction::JmpIf { condition: cond, then_destination: block1_id, else_destination: block2_id, - arguments: vec![], }); func.dfg[block2_id].set_terminator(TerminatorInstruction::Return { return_values: vec![] }); @@ -192,15 +190,15 @@ mod tests { // Modify function to form: // fn func { // block0(cond: u1): - // jmpif cond(), then: block1, else: ret_block + // jmpif cond, then: block1, else: ret_block // block1(): - // jmpif cond(), then: block1, else: block2 + // jmpif cond, then: block1, else: block2 // block2(): - // jmp ret_block + // jmp ret_block() // ret_block(): - // return + // return () // } - let ret_block_id = func.dfg.new_block(); + let ret_block_id = func.dfg.make_block(); func.dfg[ret_block_id] .set_terminator(TerminatorInstruction::Return { return_values: vec![] }); func.dfg[block2_id].set_terminator(TerminatorInstruction::Jmp { @@ -211,7 +209,6 @@ mod tests { condition: cond, then_destination: block1_id, else_destination: ret_block_id, - arguments: vec![], }); // Recompute new and changed blocks diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs index 6d5538d3410..4c793a144da 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs @@ -16,8 +16,8 @@ impl NumericConstant { Self(value) } - pub(crate) fn value(&self) -> &FieldElement { - &self.0 + pub(crate) fn value(&self) -> FieldElement { + self.0 } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index 54ffd5a05f6..8acce876d90 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -2,7 +2,7 @@ use super::{ basic_block::{BasicBlock, BasicBlockId}, constant::{NumericConstant, NumericConstantId}, function::Signature, - instruction::{Instruction, InstructionId, InstructionResultType}, + instruction::{Instruction, InstructionId, InstructionResultType, TerminatorInstruction}, map::{DenseMap, Id, SecondaryMap, TwoWayMap}, types::Type, value::{Value, ValueId}, @@ -75,14 +75,14 @@ impl DataFlowGraph { /// Creates a new basic block with no parameters. /// After being created, the block is unreachable in the current function /// until another block is made to jump to it. - pub(crate) fn new_block(&mut self) -> BasicBlockId { + pub(crate) fn make_block(&mut self) -> BasicBlockId { self.blocks.insert(BasicBlock::new(Vec::new())) } /// Creates a new basic block with the given parameters. /// After being created, the block is unreachable in the current function /// until another block is made to jump to it. - pub(crate) fn new_block_with_parameters( + pub(crate) fn make_block_with_parameters( &mut self, parameter_types: impl Iterator, ) -> BasicBlockId { @@ -230,6 +230,24 @@ impl DataFlowGraph { ) { self.blocks[block].insert_instruction(instruction); } + + /// Returns the field element represented by this value if it is a numeric constant. + /// Returns None if the given value is not a numeric constant. + pub(crate) fn get_numeric_constant(&self, value: Id) -> Option { + match self.values[value] { + Value::NumericConstant { constant, .. } => Some(self[constant].value()), + _ => None, + } + } + + /// Sets the terminator instruction for the given basic block + pub(crate) fn set_block_terminator( + &mut self, + block: BasicBlockId, + terminator: TerminatorInstruction, + ) { + self.blocks[block].set_terminator(terminator); + } } impl std::ops::Index for DataFlowGraph { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs index 63cd31142c4..1a735726029 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs @@ -32,7 +32,7 @@ impl Function { /// Note that any parameters to the function must be manually added later. pub(crate) fn new(name: String) -> Self { let mut dfg = DataFlowGraph::default(); - let entry_block = dfg.new_block(); + let entry_block = dfg.make_block(); Self { name, source_locations: SecondaryMap::new(), entry_block, dfg } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index dcab6e04006..11c6b8dc05f 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -150,14 +150,9 @@ pub(crate) enum TerminatorInstruction { /// /// Jump If /// - /// If the condition is true: jump to the specified `then_destination` with `arguments`. - /// Otherwise, jump to the specified `else_destination` with `arguments`. - JmpIf { - condition: ValueId, - then_destination: BasicBlockId, - else_destination: BasicBlockId, - arguments: Vec, - }, + /// If the condition is true: jump to the specified `then_destination`. + /// Otherwise, jump to the specified `else_destination`. + JmpIf { condition: ValueId, then_destination: BasicBlockId, else_destination: BasicBlockId }, /// Unconditional Jump /// diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs index 1a7737e97b0..a711482e08c 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -57,17 +57,11 @@ pub(crate) fn display_terminator( Some(TerminatorInstruction::Jmp { destination, arguments }) => { writeln!(f, " jmp {}({})", destination, value_list(arguments)) } - Some(TerminatorInstruction::JmpIf { - condition, - arguments, - then_destination, - else_destination, - }) => { - let args = value_list(arguments); + Some(TerminatorInstruction::JmpIf { condition, then_destination, else_destination }) => { writeln!( f, - " jmpif {}({}) then: {}, else: {}", - condition, args, then_destination, else_destination + " jmpif {} then: {}, else: {}", + condition, then_destination, else_destination ) } Some(TerminatorInstruction::Return { return_values }) => { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs index b30ff11c2e1..c0a94be6f80 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs @@ -3,7 +3,7 @@ use acvm::FieldElement; use crate::ssa_refactor::ir::{ basic_block::BasicBlockId, function::{Function, FunctionId}, - instruction::{Binary, BinaryOp, Instruction}, + instruction::{Binary, BinaryOp, Instruction, TerminatorInstruction}, types::Type, value::ValueId, }; @@ -62,8 +62,12 @@ impl<'ssa> FunctionBuilder<'ssa> { } /// Insert a numeric constant into the current function - pub(crate) fn numeric_constant(&mut self, value: FieldElement, typ: Type) -> ValueId { - self.current_function.dfg.make_constant(value, typ) + pub(crate) fn numeric_constant( + &mut self, + value: impl Into, + typ: Type, + ) -> ValueId { + self.current_function.dfg.make_constant(value.into(), typ) } /// Insert a numeric constant into the current function of type Field @@ -71,6 +75,19 @@ impl<'ssa> FunctionBuilder<'ssa> { self.numeric_constant(value.into(), Type::field()) } + pub(crate) fn type_of_value(&self, value: ValueId) -> Type { + self.current_function.dfg.type_of_value(value) + } + + pub(crate) fn insert_block(&mut self) -> BasicBlockId { + self.current_function.dfg.make_block() + } + + pub(crate) fn add_block_parameter(&mut self, block: BasicBlockId, typ: Type) -> ValueId { + self.current_function.dfg.add_block_parameter(block, typ) + } + + /// Inserts a new instruction at the end of the current block and returns its results fn insert_instruction( &mut self, instruction: Instruction, @@ -81,6 +98,13 @@ impl<'ssa> FunctionBuilder<'ssa> { self.current_function.dfg.instruction_results(id) } + /// Switch to inserting instructions in the given block. + /// Expects the given block to be within the same function. If you want to insert + /// instructions into a new function, call new_function instead. + pub(crate) fn switch_to_block(&mut self, block: BasicBlockId) { + self.current_block = block; + } + /// Insert an allocate instruction at the end of the current block, allocating the /// given amount of field elements. Returns the result of the allocate instruction, /// which is always a Reference to the allocated data. @@ -88,16 +112,31 @@ impl<'ssa> FunctionBuilder<'ssa> { self.insert_instruction(Instruction::Allocate { size: size_to_allocate }, None)[0] } - /// Insert a Load instruction at the end of the current block, loading from the given address - /// which should point to a previous Allocate instruction. Note that this is limited to loading - /// a single value. Loading multiple values (such as a tuple) will require multiple loads. + /// Insert a Load instruction at the end of the current block, loading from the given offset + /// of the given address which should point to a previous Allocate instruction. Note that + /// this is limited to loading a single value. Loading multiple values (such as a tuple) + /// will require multiple loads. + /// 'offset' is in units of FieldElements here. So loading the fourth FieldElement stored in + /// an array will have an offset of 3. /// Returns the element that was loaded. - pub(crate) fn insert_load(&mut self, address: ValueId, type_to_load: Type) -> ValueId { + pub(crate) fn insert_load( + &mut self, + mut address: ValueId, + offset: ValueId, + type_to_load: Type, + ) -> ValueId { + if let Some(offset) = self.current_function.dfg.get_numeric_constant(offset) { + if !offset.is_zero() { + let offset = self.field_constant(offset); + address = self.insert_binary(address, BinaryOp::Add, offset); + } + }; self.insert_instruction(Instruction::Load { address }, Some(vec![type_to_load]))[0] } /// Insert a Store instruction at the end of the current block, storing the given element - /// at the given address. Expects that the address points to a previous Allocate instruction. + /// at the given address. Expects that the address points somewhere + /// within a previous Allocate instruction. pub(crate) fn insert_store(&mut self, address: ValueId, value: ValueId) { self.insert_instruction(Instruction::Store { address, value }, None); } @@ -119,4 +158,50 @@ impl<'ssa> FunctionBuilder<'ssa> { pub(crate) fn insert_not(&mut self, rhs: ValueId) -> ValueId { self.insert_instruction(Instruction::Not(rhs), None)[0] } + + /// Insert a cast instruction at the end of the current block. + /// Returns the result of the cast instruction. + pub(crate) fn insert_cast(&mut self, value: ValueId, typ: Type) -> ValueId { + self.insert_instruction(Instruction::Cast(value, typ), None)[0] + } + + /// Insert a constrain instruction at the end of the current block. + pub(crate) fn insert_constrain(&mut self, boolean: ValueId) { + self.insert_instruction(Instruction::Constrain(boolean), None); + } + + /// Terminates the current block with the given terminator instruction + fn terminate_block_with(&mut self, terminator: TerminatorInstruction) { + self.current_function.dfg.set_block_terminator(self.current_block, terminator); + } + + /// Terminate the current block with a jmp instruction to jmp to the given + /// block with the given arguments. + pub(crate) fn terminate_with_jmp( + &mut self, + destination: BasicBlockId, + arguments: Vec, + ) { + self.terminate_block_with(TerminatorInstruction::Jmp { destination, arguments }); + } + + /// Terminate the current block with a jmpif instruction to jmp with the given arguments + /// block with the given arguments. + pub(crate) fn terminate_with_jmpif( + &mut self, + condition: ValueId, + then_destination: BasicBlockId, + else_destination: BasicBlockId, + ) { + self.terminate_block_with(TerminatorInstruction::JmpIf { + condition, + then_destination, + else_destination, + }); + } + + /// Terminate the current block with a return instruction + pub(crate) fn terminate_with_return(&mut self, return_values: Vec) { + self.terminate_block_with(TerminatorInstruction::Return { return_values }); + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index f76a6675077..30855b8fdc8 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -129,7 +129,7 @@ impl<'a> FunctionContext<'a> { /// Insert a unit constant into the current function if not already /// present, and return its value pub(super) fn unit_value(&mut self) -> Values { - self.builder.numeric_constant(0u128.into(), Type::Unit).into() + self.builder.numeric_constant(0u128, Type::Unit).into() } /// Insert a binary instruction at the end of the current block. @@ -155,6 +155,15 @@ impl<'a> FunctionContext<'a> { } result.into() } + + /// Create a const offset of an address for an array load or store + pub(super) fn make_offset(&mut self, mut address: ValueId, offset: u128) -> ValueId { + if offset != 0 { + let offset = self.builder.field_constant(offset); + address = self.builder.insert_binary(address, BinaryOp::Add, offset); + } + address + } } /// True if the given operator cannot be encoded directly and needs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 553b5eb2218..04fb88d76d0 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -1,7 +1,6 @@ mod context; mod value; -use acvm::FieldElement; use context::SharedContext; use iter_extended::vecmap; use noirc_errors::Location; @@ -88,15 +87,11 @@ impl<'a> FunctionContext<'a> { self.builder.numeric_constant(*value, typ).into() } ast::Literal::Bool(value) => { - // Booleans are represented as u1s with 0 = false, 1 = true - let typ = Type::unsigned(1); - let value = FieldElement::from(*value as u128); - self.builder.numeric_constant(value, typ).into() + self.builder.numeric_constant(*value as u128, Type::bool()).into() } ast::Literal::Str(string) => { let elements = vecmap(string.as_bytes(), |byte| { - let value = FieldElement::from(*byte as u128); - self.builder.numeric_constant(value, Type::field()).into() + self.builder.numeric_constant(*byte as u128, Type::field()).into() }); self.codegen_array(elements, Tree::Leaf(Type::field())) } @@ -110,15 +105,10 @@ impl<'a> FunctionContext<'a> { })); // Now we must manually store all the elements into the array - let mut i = 0; + let mut i = 0u128; for element in elements { element.for_each(|value| { - let address = if i == 0 { - array - } else { - let offset = self.builder.field_constant(i as u128); - self.builder.insert_binary(array, BinaryOp::Add, offset) - }; + let address = self.make_offset(array, i); self.builder.insert_store(address, value.eval()); i += 1; }); @@ -135,8 +125,16 @@ impl<'a> FunctionContext<'a> { result } - fn codegen_unary(&mut self, _unary: &ast::Unary) -> Values { - todo!() + fn codegen_unary(&mut self, unary: &ast::Unary) -> Values { + let rhs = self.codegen_non_tuple_expression(&unary.rhs); + match unary.operator { + noirc_frontend::UnaryOp::Not => self.builder.insert_not(rhs).into(), + noirc_frontend::UnaryOp::Minus => { + let typ = self.builder.type_of_value(rhs); + let zero = self.builder.numeric_constant(0u128, typ); + self.builder.insert_binary(zero, BinaryOp::Sub, rhs).into() + } + } } fn codegen_binary(&mut self, binary: &ast::Binary) -> Values { @@ -145,20 +143,71 @@ impl<'a> FunctionContext<'a> { self.insert_binary(lhs, binary.operator, rhs) } - fn codegen_index(&mut self, _index: &ast::Index) -> Values { - todo!() + fn codegen_index(&mut self, index: &ast::Index) -> Values { + let array = self.codegen_non_tuple_expression(&index.collection); + let base_offset = self.codegen_non_tuple_expression(&index.index); + + // base_index = base_offset * type_size + let type_size = Self::convert_type(&index.element_type).size_of_type(); + let type_size = self.builder.field_constant(type_size as u128); + let base_index = self.builder.insert_binary(base_offset, BinaryOp::Mul, type_size); + + let mut field_index = 0u128; + self.map_type(&index.element_type, |ctx, typ| { + let offset = ctx.make_offset(base_index, field_index); + field_index += 1; + ctx.builder.insert_load(array, offset, typ).into() + }) } - fn codegen_cast(&mut self, _cast: &ast::Cast) -> Values { - todo!() + fn codegen_cast(&mut self, cast: &ast::Cast) -> Values { + let lhs = self.codegen_non_tuple_expression(&cast.lhs); + let typ = Self::convert_non_tuple_type(&cast.r#type); + self.builder.insert_cast(lhs, typ).into() } fn codegen_for(&mut self, _for_expr: &ast::For) -> Values { todo!() } - fn codegen_if(&mut self, _if_expr: &ast::If) -> Values { - todo!() + fn codegen_if(&mut self, if_expr: &ast::If) -> Values { + let condition = self.codegen_non_tuple_expression(&if_expr.condition); + + let then_block = self.builder.insert_block(); + let else_block = self.builder.insert_block(); + + self.builder.terminate_with_jmpif(condition, then_block, else_block); + + self.builder.switch_to_block(then_block); + let then_value = self.codegen_expression(&if_expr.consequence); + + let mut result = self.unit_value(); + + if let Some(alternative) = &if_expr.alternative { + self.builder.switch_to_block(else_block); + let else_value = self.codegen_expression(alternative); + + let end_block = self.builder.insert_block(); + + // Create block arguments for the end block as needed to branch to + // with our then and else value. + result = self.map_type(&if_expr.typ, |ctx, typ| { + ctx.builder.add_block_parameter(end_block, typ).into() + }); + + self.builder.terminate_with_jmp(end_block, else_value.into_value_list()); + + // Must also set the then block to jmp to the end now + self.builder.switch_to_block(then_block); + self.builder.terminate_with_jmp(end_block, then_value.into_value_list()); + self.builder.switch_to_block(end_block); + } else { + // In the case we have no 'else', the 'else' block is actually the end block. + self.builder.terminate_with_jmp(else_block, vec![]); + self.builder.switch_to_block(else_block); + } + + result } fn codegen_tuple(&mut self, tuple: &[Expression]) -> Values { @@ -182,8 +231,10 @@ impl<'a> FunctionContext<'a> { todo!() } - fn codegen_constrain(&mut self, _constrain: &Expression, _location: Location) -> Values { - todo!() + fn codegen_constrain(&mut self, expr: &Expression, _location: Location) -> Values { + let boolean = self.codegen_non_tuple_expression(expr); + self.builder.insert_constrain(boolean); + self.unit_value() } fn codegen_assign(&mut self, _assign: &ast::Assign) -> Values { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs index 83a5d15c904..31a93374940 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs @@ -1,3 +1,5 @@ +use iter_extended::vecmap; + use crate::ssa_refactor::ir::function::FunctionId as IrFunctionId; use crate::ssa_refactor::ir::types::Type; use crate::ssa_refactor::ir::value::ValueId as IrValueId; @@ -76,3 +78,11 @@ impl Tree { self.count_leaves() } } + +impl Tree { + /// Flattens and evaluates this Tree into a list of ir values + /// for return statements, branching instructions, or function parameters. + pub(super) fn into_value_list(self) -> Vec { + vecmap(self.flatten(), Value::eval) + } +} diff --git a/crates/noirc_frontend/src/monomorphization/ast.rs b/crates/noirc_frontend/src/monomorphization/ast.rs index 04aec9a6726..bad88885749 100644 --- a/crates/noirc_frontend/src/monomorphization/ast.rs +++ b/crates/noirc_frontend/src/monomorphization/ast.rs @@ -131,6 +131,7 @@ pub struct Call { pub struct Index { pub collection: Box, pub index: Box, + pub element_type: Type, pub location: Location, } diff --git a/crates/noirc_frontend/src/monomorphization/mod.rs b/crates/noirc_frontend/src/monomorphization/mod.rs index 79c9bab7d8a..3c3c602d132 100644 --- a/crates/noirc_frontend/src/monomorphization/mod.rs +++ b/crates/noirc_frontend/src/monomorphization/mod.rs @@ -412,7 +412,7 @@ impl<'interner> Monomorphizer<'interner> { | ast::Type::Bool | ast::Type::Unit | ast::Type::Function(_, _) => { - ast::Expression::Index(ast::Index { collection, index, location }) + ast::Expression::Index(ast::Index { collection, index, element_type, location }) } ast::Type::Tuple(elements) => { From 0dc2cac5bc26d277a0e6377fd774e0ec9c8d3531 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Wed, 26 Apr 2023 19:52:25 +0200 Subject: [PATCH 43/63] feat(noir): added assert keyword (#1227) * feat(keyword): added assert keyword * test(keyword): added integration test * fix(parser): separate parser for assertion * test(parser): fix test * style(parser): fix whitespaces * refactor(keyword): assert use constrain parser * feat(parser): give assertions function form * fix(lexer): update the basic test * style: label the whole assertion statement --- .../tests/test_data/assert/Nargo.toml | 5 ++ .../tests/test_data/assert/Prover.toml | 1 + .../tests/test_data/assert/src/main.nr | 3 ++ crates/noirc_frontend/src/lexer/lexer.rs | 10 ++++ crates/noirc_frontend/src/lexer/token.rs | 3 ++ crates/noirc_frontend/src/parser/parser.rs | 53 +++++++++++++++++++ 6 files changed, 75 insertions(+) create mode 100644 crates/nargo_cli/tests/test_data/assert/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/assert/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/assert/src/main.nr diff --git a/crates/nargo_cli/tests/test_data/assert/Nargo.toml b/crates/nargo_cli/tests/test_data/assert/Nargo.toml new file mode 100644 index 00000000000..e0b467ce5da --- /dev/null +++ b/crates/nargo_cli/tests/test_data/assert/Nargo.toml @@ -0,0 +1,5 @@ +[package] +authors = [""] +compiler_version = "0.1" + +[dependencies] \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/assert/Prover.toml b/crates/nargo_cli/tests/test_data/assert/Prover.toml new file mode 100644 index 00000000000..4dd6b405159 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/assert/Prover.toml @@ -0,0 +1 @@ +x = "1" diff --git a/crates/nargo_cli/tests/test_data/assert/src/main.nr b/crates/nargo_cli/tests/test_data/assert/src/main.nr new file mode 100644 index 00000000000..00e94414c0b --- /dev/null +++ b/crates/nargo_cli/tests/test_data/assert/src/main.nr @@ -0,0 +1,3 @@ +fn main(x: Field) { + assert(x == 1); +} diff --git a/crates/noirc_frontend/src/lexer/lexer.rs b/crates/noirc_frontend/src/lexer/lexer.rs index c1ff328a3ed..5e0d99cfed9 100644 --- a/crates/noirc_frontend/src/lexer/lexer.rs +++ b/crates/noirc_frontend/src/lexer/lexer.rs @@ -560,6 +560,7 @@ fn test_basic_language_syntax() { x * y; }; constrain mul(five, ten) == 50; + assert(ten + five == 15); "; let expected = vec![ @@ -601,6 +602,15 @@ fn test_basic_language_syntax() { Token::Equal, Token::Int(50_i128.into()), Token::Semicolon, + Token::Keyword(Keyword::Assert), + Token::LeftParen, + Token::Ident("ten".to_string()), + Token::Plus, + Token::Ident("five".to_string()), + Token::Equal, + Token::Int(15_i128.into()), + Token::RightParen, + Token::Semicolon, Token::EOF, ]; let mut lexer = Lexer::new(input); diff --git a/crates/noirc_frontend/src/lexer/token.rs b/crates/noirc_frontend/src/lexer/token.rs index 6b021a3dcbb..bfcd0f4be51 100644 --- a/crates/noirc_frontend/src/lexer/token.rs +++ b/crates/noirc_frontend/src/lexer/token.rs @@ -414,6 +414,7 @@ impl AsRef for Attribute { #[cfg_attr(test, derive(strum_macros::EnumIter))] pub enum Keyword { As, + Assert, Bool, Char, CompTime, @@ -448,6 +449,7 @@ impl fmt::Display for Keyword { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Keyword::As => write!(f, "as"), + Keyword::Assert => write!(f, "assert"), Keyword::Bool => write!(f, "bool"), Keyword::Char => write!(f, "char"), Keyword::CompTime => write!(f, "comptime"), @@ -485,6 +487,7 @@ impl Keyword { pub(crate) fn lookup_keyword(word: &str) -> Option { let keyword = match word { "as" => Keyword::As, + "assert" => Keyword::Assert, "bool" => Keyword::Bool, "char" => Keyword::Char, "comptime" => Keyword::CompTime, diff --git a/crates/noirc_frontend/src/parser/parser.rs b/crates/noirc_frontend/src/parser/parser.rs index 065b6362fb4..15ed0d74222 100644 --- a/crates/noirc_frontend/src/parser/parser.rs +++ b/crates/noirc_frontend/src/parser/parser.rs @@ -435,6 +435,7 @@ where { choice(( constrain(expr_parser.clone()), + assertion(expr_parser.clone()), declaration(expr_parser.clone()), assignment(expr_parser.clone()), expr_parser.map(Statement::Expression), @@ -449,6 +450,15 @@ where .map(|expr| Statement::Constrain(ConstrainStatement(expr))) } +fn assertion<'a, P>(expr_parser: P) -> impl NoirParser + 'a +where + P: ExprParser + 'a, +{ + ignore_then_commit(keyword(Keyword::Assert), parenthesized(expr_parser)) + .labelled("statement") + .map(|expr| Statement::Constrain(ConstrainStatement(expr))) +} + fn declaration<'a, P>(expr_parser: P) -> impl NoirParser + 'a where P: ExprParser + 'a, @@ -1228,6 +1238,47 @@ mod test { ); } + #[test] + fn parse_assert() { + parse_with(assertion(expression()), "assert(x == y)").unwrap(); + + // Currently we disallow constrain statements where the outer infix operator + // produces a value. This would require an implicit `==` which + // may not be intuitive to the user. + // + // If this is deemed useful, one would either apply a transformation + // or interpret it with an `==` in the evaluator + let disallowed_operators = vec![ + BinaryOpKind::And, + BinaryOpKind::Subtract, + BinaryOpKind::Divide, + BinaryOpKind::Multiply, + BinaryOpKind::Or, + ]; + + for operator in disallowed_operators { + let src = format!("assert(x {} y);", operator.as_string()); + parse_with(assertion(expression()), &src).unwrap_err(); + } + + // These are general cases which should always work. + // + // The first case is the most noteworthy. It contains two `==` + // The first (inner) `==` is a predicate which returns 0/1 + // The outer layer is an infix `==` which is + // associated with the Constrain statement + parse_all( + assertion(expression()), + vec![ + "assert(((x + y) == k) + z == y)", + "assert((x + !y) == y)", + "assert((x ^ y) == y)", + "assert((x ^ y) == (y + m))", + "assert(x + x ^ x == y | m)", + ], + ); + } + #[test] fn parse_let() { // Why is it valid to specify a let declaration as having type u8? @@ -1483,7 +1534,9 @@ mod test { ("let", 3, "let $error: unspecified = Error"), ("foo = one two three", 1, "foo = plain::one"), ("constrain", 1, "constrain Error"), + ("assert", 1, "constrain Error"), ("constrain x ==", 1, "constrain (plain::x == Error)"), + ("assert(x ==)", 1, "constrain (plain::x == Error)"), ]; let show_errors = |v| vecmap(v, ToString::to_string).join("\n"); From 407cecbcd410b59bae20907c2c62b512d41a5cb9 Mon Sep 17 00:00:00 2001 From: jfecher Date: Wed, 26 Apr 2023 16:27:11 -0400 Subject: [PATCH 44/63] chore(ssa refactor): Fix recursive printing of blocks (#1230) * Implement ssa-gen for if * Satisfy the clippy gods * Fix printing bug * Print constants directly * chore(ssa refactor): Implement for loops (#1233) Impl for loops --- .../src/ssa_refactor/ir/dfg.rs | 11 ++- .../src/ssa_refactor/ir/map.rs | 16 +++- .../src/ssa_refactor/ir/printer.rs | 63 +++++++++----- .../src/ssa_refactor/ssa_gen/context.rs | 5 ++ .../src/ssa_refactor/ssa_gen/mod.rs | 30 ++++++- crates/noirc_frontend/src/parser/parser.rs | 82 +++++++++---------- 6 files changed, 142 insertions(+), 65 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index 8acce876d90..9b713eee06e 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -234,8 +234,17 @@ impl DataFlowGraph { /// Returns the field element represented by this value if it is a numeric constant. /// Returns None if the given value is not a numeric constant. pub(crate) fn get_numeric_constant(&self, value: Id) -> Option { + self.get_numeric_constant_with_type(value).map(|(value, _typ)| value) + } + + /// Returns the field element and type represented by this value if it is a numeric constant. + /// Returns None if the given value is not a numeric constant. + pub(crate) fn get_numeric_constant_with_type( + &self, + value: Id, + ) -> Option<(FieldElement, Type)> { match self.values[value] { - Value::NumericConstant { constant, .. } => Some(self[constant].value()), + Value::NumericConstant { constant, typ } => Some((self[constant].value(), typ)), _ => None, } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs index 5937b374726..24b30241293 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs @@ -69,9 +69,21 @@ impl std::fmt::Debug for Id { } } -impl std::fmt::Display for Id { +impl std::fmt::Display for Id { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "${}", self.index) + write!(f, "b{}", self.index) + } +} + +impl std::fmt::Display for Id { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "v{}", self.index) + } +} + +impl std::fmt::Display for Id { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "f{}", self.index) } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs index a711482e08c..57c573c7bd4 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -1,5 +1,8 @@ //! This file is for pretty-printing the SSA IR in a human-readable form for debugging. -use std::fmt::{Formatter, Result}; +use std::{ + collections::HashSet, + fmt::{Formatter, Result}, +}; use iter_extended::vecmap; @@ -12,19 +15,26 @@ use super::{ pub(crate) fn display_function(function: &Function, f: &mut Formatter) -> Result { writeln!(f, "fn {} {{", function.name)?; - display_block_with_successors(function, function.entry_block, f)?; + display_block_with_successors(function, function.entry_block, &mut HashSet::new(), f)?; write!(f, "}}") } +/// Displays a block followed by all of its successors recursively. +/// This uses a HashSet to keep track of the visited blocks. Otherwise, +/// there would be infinite recursion for any loops in the IR. pub(crate) fn display_block_with_successors( function: &Function, block_id: BasicBlockId, + visited: &mut HashSet, f: &mut Formatter, ) -> Result { display_block(function, block_id, f)?; + visited.insert(block_id); for successor in function.dfg[block_id].successors() { - display_block(function, successor, f)?; + if !visited.contains(&successor) { + display_block_with_successors(function, successor, visited, f)?; + } } Ok(()) } @@ -36,26 +46,36 @@ pub(crate) fn display_block( ) -> Result { let block = &function.dfg[block_id]; - writeln!(f, "{}({}):", block_id, value_list(block.parameters()))?; + writeln!(f, " {}({}):", block_id, value_list(function, block.parameters()))?; for instruction in block.instructions() { display_instruction(function, *instruction, f)?; } - display_terminator(block.terminator(), f) + display_terminator(function, block.terminator(), f) +} + +/// Specialize displaying value ids so that if they refer to constants we +/// print the constant directly +fn value(function: &Function, id: ValueId) -> String { + match function.dfg.get_numeric_constant_with_type(id) { + Some((value, typ)) => format!("{} {}", value, typ), + None => id.to_string(), + } } -fn value_list(values: &[ValueId]) -> String { - vecmap(values, ToString::to_string).join(", ") +fn value_list(function: &Function, values: &[ValueId]) -> String { + vecmap(values, |id| value(function, *id)).join(", ") } pub(crate) fn display_terminator( + function: &Function, terminator: Option<&TerminatorInstruction>, f: &mut Formatter, ) -> Result { match terminator { Some(TerminatorInstruction::Jmp { destination, arguments }) => { - writeln!(f, " jmp {}({})", destination, value_list(arguments)) + writeln!(f, " jmp {}({})", destination, value_list(function, arguments)) } Some(TerminatorInstruction::JmpIf { condition, then_destination, else_destination }) => { writeln!( @@ -65,7 +85,7 @@ pub(crate) fn display_terminator( ) } Some(TerminatorInstruction::Return { return_values }) => { - writeln!(f, " return {}", value_list(return_values)) + writeln!(f, " return {}", value_list(function, return_values)) } None => writeln!(f, " (no terminator instruction)"), } @@ -81,29 +101,34 @@ pub(crate) fn display_instruction( let results = function.dfg.instruction_results(instruction); if !results.is_empty() { - write!(f, "{} = ", value_list(results))?; + write!(f, "{} = ", value_list(function, results))?; } + let show = |id| value(function, id); + match &function.dfg[instruction] { Instruction::Binary(binary) => { - writeln!(f, "{} {}, {}", binary.operator, binary.lhs, binary.rhs) + writeln!(f, "{} {}, {}", binary.operator, show(binary.lhs), show(binary.rhs)) } - Instruction::Cast(value, typ) => writeln!(f, "cast {value} as {typ}"), - Instruction::Not(value) => writeln!(f, "not {value}"), + Instruction::Cast(lhs, typ) => writeln!(f, "cast {} as {typ}", show(*lhs)), + Instruction::Not(rhs) => writeln!(f, "not {}", show(*rhs)), Instruction::Truncate { value, bit_size, max_bit_size } => { - writeln!(f, "truncate {value} to {bit_size} bits, max_bit_size: {max_bit_size}") + let value = show(*value); + writeln!(f, "truncate {value} to {bit_size} bits, max_bit_size: {max_bit_size}",) } Instruction::Constrain(value) => { - writeln!(f, "constrain {value}") + writeln!(f, "constrain {}", show(*value)) } Instruction::Call { func, arguments } => { - writeln!(f, "call {func}({})", value_list(arguments)) + writeln!(f, "call {func}({})", value_list(function, arguments)) } Instruction::Intrinsic { func, arguments } => { - writeln!(f, "intrinsic {func}({})", value_list(arguments)) + writeln!(f, "intrinsic {func}({})", value_list(function, arguments)) } Instruction::Allocate { size } => writeln!(f, "alloc {size} fields"), - Instruction::Load { address } => writeln!(f, "load {address}"), - Instruction::Store { address, value } => writeln!(f, "store {value} at {address}"), + Instruction::Load { address } => writeln!(f, "load {}", show(*address)), + Instruction::Store { address, value } => { + writeln!(f, "store {} at {}", show(*address), show(*value)) + } } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 30855b8fdc8..48175ebb52b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -164,6 +164,11 @@ impl<'a> FunctionContext<'a> { } address } + + pub(super) fn define(&mut self, id: LocalId, value: Values) { + let existing = self.definitions.insert(id, value); + assert!(existing.is_none(), "Variable {id:?} was defined twice in ssa-gen pass"); + } } /// True if the given operator cannot be encoded directly and needs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 04fb88d76d0..f8faf8eeeb4 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -166,8 +166,34 @@ impl<'a> FunctionContext<'a> { self.builder.insert_cast(lhs, typ).into() } - fn codegen_for(&mut self, _for_expr: &ast::For) -> Values { - todo!() + fn codegen_for(&mut self, for_expr: &ast::For) -> Values { + let loop_entry = self.builder.insert_block(); + let loop_body = self.builder.insert_block(); + let loop_end = self.builder.insert_block(); + + // this is the 'i' in `for i in start .. end { block }` + let loop_index = self.builder.add_block_parameter(loop_entry, Type::field()); + + let start_index = self.codegen_non_tuple_expression(&for_expr.start_range); + let end_index = self.codegen_non_tuple_expression(&for_expr.end_range); + + self.builder.terminate_with_jmp(loop_entry, vec![start_index]); + + // Compile the loop entry block + self.builder.switch_to_block(loop_entry); + let jump_condition = self.builder.insert_binary(loop_index, BinaryOp::Lt, end_index); + self.builder.terminate_with_jmpif(jump_condition, loop_body, loop_end); + + // Compile the loop body + self.builder.switch_to_block(loop_body); + self.define(for_expr.index_variable, loop_index.into()); + self.codegen_expression(&for_expr.block); + let new_loop_index = self.make_offset(loop_index, 1); + self.builder.terminate_with_jmp(loop_entry, vec![new_loop_index]); + + // Finish by switching back to the end of the loop + self.builder.switch_to_block(loop_end); + self.unit_value() } fn codegen_if(&mut self, if_expr: &ast::If) -> Values { diff --git a/crates/noirc_frontend/src/parser/parser.rs b/crates/noirc_frontend/src/parser/parser.rs index 15ed0d74222..575a9403ea8 100644 --- a/crates/noirc_frontend/src/parser/parser.rs +++ b/crates/noirc_frontend/src/parser/parser.rs @@ -1238,46 +1238,46 @@ mod test { ); } - #[test] - fn parse_assert() { - parse_with(assertion(expression()), "assert(x == y)").unwrap(); - - // Currently we disallow constrain statements where the outer infix operator - // produces a value. This would require an implicit `==` which - // may not be intuitive to the user. - // - // If this is deemed useful, one would either apply a transformation - // or interpret it with an `==` in the evaluator - let disallowed_operators = vec![ - BinaryOpKind::And, - BinaryOpKind::Subtract, - BinaryOpKind::Divide, - BinaryOpKind::Multiply, - BinaryOpKind::Or, - ]; - - for operator in disallowed_operators { - let src = format!("assert(x {} y);", operator.as_string()); - parse_with(assertion(expression()), &src).unwrap_err(); - } - - // These are general cases which should always work. - // - // The first case is the most noteworthy. It contains two `==` - // The first (inner) `==` is a predicate which returns 0/1 - // The outer layer is an infix `==` which is - // associated with the Constrain statement - parse_all( - assertion(expression()), - vec![ - "assert(((x + y) == k) + z == y)", - "assert((x + !y) == y)", - "assert((x ^ y) == y)", - "assert((x ^ y) == (y + m))", - "assert(x + x ^ x == y | m)", - ], - ); - } + #[test] + fn parse_assert() { + parse_with(assertion(expression()), "assert(x == y)").unwrap(); + + // Currently we disallow constrain statements where the outer infix operator + // produces a value. This would require an implicit `==` which + // may not be intuitive to the user. + // + // If this is deemed useful, one would either apply a transformation + // or interpret it with an `==` in the evaluator + let disallowed_operators = vec![ + BinaryOpKind::And, + BinaryOpKind::Subtract, + BinaryOpKind::Divide, + BinaryOpKind::Multiply, + BinaryOpKind::Or, + ]; + + for operator in disallowed_operators { + let src = format!("assert(x {} y);", operator.as_string()); + parse_with(assertion(expression()), &src).unwrap_err(); + } + + // These are general cases which should always work. + // + // The first case is the most noteworthy. It contains two `==` + // The first (inner) `==` is a predicate which returns 0/1 + // The outer layer is an infix `==` which is + // associated with the Constrain statement + parse_all( + assertion(expression()), + vec![ + "assert(((x + y) == k) + z == y)", + "assert((x + !y) == y)", + "assert((x ^ y) == y)", + "assert((x ^ y) == (y + m))", + "assert(x + x ^ x == y | m)", + ], + ); + } #[test] fn parse_let() { @@ -1322,7 +1322,7 @@ mod test { "fn f(f: pub Field, y : Field, z : comptime Field) -> u8 { x + a }", "fn func_name(f: Field, y : pub Field, z : pub [u8;5],) {}", "fn func_name(x: [Field], y : [Field;2],y : pub [Field;2], z : pub [u8;5]) {}", - "fn main(x: pub u8, y: pub u8) -> distinct pub [u8; 2] { [x, y] }" + "fn main(x: pub u8, y: pub u8) -> distinct pub [u8; 2] { [x, y] }", ], ); From 62dcc5c287ab386caba6e74314f49aedbefc318c Mon Sep 17 00:00:00 2001 From: jfecher Date: Wed, 26 Apr 2023 18:33:16 -0400 Subject: [PATCH 45/63] chore(ssa refactor): Implement mutable and immutable variables (#1234) * Implement ssa-gen for if * Satisfy the clippy gods * Fix printing bug * Print constants directly * Impl for loops * Implement immutable and mutable variables * chore(ssa refactor): Implement for loops (#1233) Impl for loops --- .../src/ssa_refactor/ir/dfg.rs | 16 +++- .../ssa_builder/function_builder.rs | 28 +++++- .../src/ssa_refactor/ssa_gen/context.rs | 46 ++++++++++ .../src/ssa_refactor/ssa_gen/mod.rs | 92 +++++++++++++------ .../src/ssa_refactor/ssa_gen/value.rs | 54 +++++++++-- 5 files changed, 201 insertions(+), 35 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index 9b713eee06e..ab2018b1df8 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -126,6 +126,17 @@ impl DataFlowGraph { id } + /// Replace an instruction id with another. + /// + /// This function should generally be avoided if possible in favor of inserting new + /// instructions since it does not check whether the instruction results of the removed + /// instruction are still in use. Users of this function thus need to ensure the old + /// instruction's results are no longer in use or are otherwise compatible with the + /// new instruction's result count and types. + pub(crate) fn replace_instruction(&mut self, id: Id, instruction: Instruction) { + self.instructions[id] = instruction; + } + /// Insert a value into the dfg's storage and return an id to reference it. /// Until the value is used in an instruction it is unreachable. pub(crate) fn make_value(&mut self, value: Value) -> ValueId { @@ -141,8 +152,11 @@ impl DataFlowGraph { /// Attaches results to the instruction, clearing any previous results. /// + /// This does not normally need to be called manually as it is called within + /// make_instruction automatically. + /// /// Returns the results of the instruction - fn make_instruction_results( + pub(crate) fn make_instruction_results( &mut self, instruction_id: InstructionId, ctrl_typevars: Option>, diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs index c0a94be6f80..d11e9a763cd 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs @@ -5,7 +5,7 @@ use crate::ssa_refactor::ir::{ function::{Function, FunctionId}, instruction::{Binary, BinaryOp, Instruction, TerminatorInstruction}, types::Type, - value::ValueId, + value::{Value, ValueId}, }; use super::SharedBuilderContext; @@ -204,4 +204,30 @@ impl<'ssa> FunctionBuilder<'ssa> { pub(crate) fn terminate_with_return(&mut self, return_values: Vec) { self.terminate_block_with(TerminatorInstruction::Return { return_values }); } + + /// Mutates a load instruction into a store instruction. + /// + /// This function is used while generating ssa-form for assignments currently. + /// To re-use most of the expression infrastructure, the lvalue of an assignment + /// is compiled as an expression and to assign to it we replace the final load + /// (which should always be present to load a mutable value) with a store of the + /// assigned value. + pub(crate) fn mutate_load_into_store(&mut self, load_result: ValueId, value_to_store: ValueId) { + let (instruction, address) = match &self.current_function.dfg[load_result] { + Value::Instruction { instruction, .. } => { + match &self.current_function.dfg[*instruction] { + Instruction::Load { address } => (*instruction, *address), + other => { + panic!("mutate_load_into_store: Expected Load instruction, found {other:?}") + } + } + } + other => panic!("mutate_load_into_store: Expected Load instruction, found {other:?}"), + }; + + let store = Instruction::Store { address, value: value_to_store }; + self.current_function.dfg.replace_instruction(instruction, store); + // Clear the results of the previous load for safety + self.current_function.dfg.make_instruction_results(instruction, None); + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 48175ebb52b..10206e28c2d 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -21,6 +21,7 @@ type FunctionQueue = Vec<(ast::FuncId, IrFunctionId)>; pub(super) struct FunctionContext<'a> { definitions: HashMap, + pub(super) builder: FunctionBuilder<'a>, shared_context: &'a SharedContext, } @@ -165,10 +166,55 @@ impl<'a> FunctionContext<'a> { address } + /// Define a local variable to be some Values that can later be retrieved + /// by calling self.lookup(id) pub(super) fn define(&mut self, id: LocalId, value: Values) { let existing = self.definitions.insert(id, value); assert!(existing.is_none(), "Variable {id:?} was defined twice in ssa-gen pass"); } + + /// Looks up the value of a given local variable. Expects the variable to have + /// been previously defined or panics otherwise. + pub(super) fn lookup(&self, id: LocalId) -> Values { + self.definitions.get(&id).expect("lookup: variable not defined").clone() + } + + /// Extract the given field of the tuple. Panics if the given Values is not + /// a Tree::Branch or does not have enough fields. + pub(super) fn get_field(tuple: Values, field_index: usize) -> Values { + match tuple { + Tree::Branch(mut trees) => trees.remove(field_index), + Tree::Leaf(value) => { + unreachable!("Tried to extract tuple index {field_index} from non-tuple {value:?}") + } + } + } + + /// Mutate lhs to equal rhs + pub(crate) fn assign(&mut self, lhs: Values, rhs: Values) { + match (lhs, rhs) { + (Tree::Branch(lhs_branches), Tree::Branch(rhs_branches)) => { + assert_eq!(lhs_branches.len(), rhs_branches.len()); + + for (lhs, rhs) in lhs_branches.into_iter().zip(rhs_branches) { + self.assign(lhs, rhs); + } + } + (Tree::Leaf(lhs), Tree::Leaf(rhs)) => { + // Re-evaluating these should have no effect + let (lhs, rhs) = (lhs.eval(self), rhs.eval(self)); + + // Expect lhs to be previously evaluated. If it is a load we need to undo + // the load to get the address to store to. + self.builder.mutate_load_into_store(lhs, rhs); + } + (lhs, rhs) => { + unreachable!( + "assign: Expected lhs and rhs values to match but found {lhs:?} and {rhs:?}" + ) + } + } + } } /// True if the given operator cannot be encoded directly and needs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index f8faf8eeeb4..a7880032d42 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -63,16 +63,16 @@ impl<'a> FunctionContext<'a> { /// Codegen any non-tuple expression so that we can unwrap the Values /// tree to return a single value for use with most SSA instructions. fn codegen_non_tuple_expression(&mut self, expr: &Expression) -> ValueId { - match self.codegen_expression(expr) { - Tree::Branch(branches) => { - panic!("codegen_non_tuple_expression called on tuple {branches:?}") - } - Tree::Leaf(value) => value.eval(), - } + self.codegen_expression(expr).into_leaf().eval(self) } - fn codegen_ident(&mut self, _ident: &ast::Ident) -> Values { - todo!() + fn codegen_ident(&mut self, ident: &ast::Ident) -> Values { + match &ident.definition { + ast::Definition::Local(id) => self.lookup(*id).map(|value| value.eval(self).into()), + ast::Definition::Function(_) => todo!(), + ast::Definition::Builtin(_) => todo!(), + ast::Definition::LowLevel(_) => todo!(), + } } fn codegen_literal(&mut self, literal: &ast::Literal) -> Values { @@ -107,9 +107,10 @@ impl<'a> FunctionContext<'a> { // Now we must manually store all the elements into the array let mut i = 0u128; for element in elements { - element.for_each(|value| { + element.for_each(|element| { let address = self.make_offset(array, i); - self.builder.insert_store(address, value.eval()); + let element = element.eval(self); + self.builder.insert_store(address, element); i += 1; }); } @@ -145,15 +146,26 @@ impl<'a> FunctionContext<'a> { fn codegen_index(&mut self, index: &ast::Index) -> Values { let array = self.codegen_non_tuple_expression(&index.collection); - let base_offset = self.codegen_non_tuple_expression(&index.index); + self.codegen_array_index(array, &index.index, &index.element_type) + } + + /// This is broken off from codegen_index so that it can also be + /// used to codegen a LValue::Index + fn codegen_array_index( + &mut self, + array: super::ir::value::ValueId, + index: &ast::Expression, + element_type: &ast::Type, + ) -> Values { + let base_offset = self.codegen_non_tuple_expression(index); // base_index = base_offset * type_size - let type_size = Self::convert_type(&index.element_type).size_of_type(); + let type_size = Self::convert_type(element_type).size_of_type(); let type_size = self.builder.field_constant(type_size as u128); let base_index = self.builder.insert_binary(base_offset, BinaryOp::Mul, type_size); let mut field_index = 0u128; - self.map_type(&index.element_type, |ctx, typ| { + self.map_type(element_type, |ctx, typ| { let offset = ctx.make_offset(base_index, field_index); field_index += 1; ctx.builder.insert_load(array, offset, typ).into() @@ -221,11 +233,13 @@ impl<'a> FunctionContext<'a> { ctx.builder.add_block_parameter(end_block, typ).into() }); - self.builder.terminate_with_jmp(end_block, else_value.into_value_list()); + let else_values = else_value.into_value_list(self); + self.builder.terminate_with_jmp(end_block, else_values); // Must also set the then block to jmp to the end now self.builder.switch_to_block(then_block); - self.builder.terminate_with_jmp(end_block, then_value.into_value_list()); + let then_values = then_value.into_value_list(self); + self.builder.terminate_with_jmp(end_block, then_values); self.builder.switch_to_block(end_block); } else { // In the case we have no 'else', the 'else' block is actually the end block. @@ -240,21 +254,30 @@ impl<'a> FunctionContext<'a> { Tree::Branch(vecmap(tuple, |expr| self.codegen_expression(expr))) } - fn codegen_extract_tuple_field(&mut self, tuple: &Expression, index: usize) -> Values { - match self.codegen_expression(tuple) { - Tree::Branch(mut trees) => trees.remove(index), - Tree::Leaf(value) => { - unreachable!("Tried to extract tuple index {index} from non-tuple {value:?}") - } - } + fn codegen_extract_tuple_field(&mut self, tuple: &Expression, field_index: usize) -> Values { + let tuple = self.codegen_expression(tuple); + Self::get_field(tuple, field_index) } fn codegen_call(&mut self, _call: &ast::Call) -> Values { todo!() } - fn codegen_let(&mut self, _let_expr: &ast::Let) -> Values { - todo!() + fn codegen_let(&mut self, let_expr: &ast::Let) -> Values { + let mut values = self.codegen_expression(&let_expr.expression); + + if let_expr.mutable { + values.map_mut(|value| { + let value = value.eval(self); + // Size is always 1 here since we're recursively unpacking tuples + let alloc = self.builder.insert_allocate(1); + self.builder.insert_store(alloc, value); + alloc.into() + }); + } + + self.define(let_expr.id, values); + self.unit_value() } fn codegen_constrain(&mut self, expr: &Expression, _location: Location) -> Values { @@ -263,8 +286,25 @@ impl<'a> FunctionContext<'a> { self.unit_value() } - fn codegen_assign(&mut self, _assign: &ast::Assign) -> Values { - todo!() + fn codegen_assign(&mut self, assign: &ast::Assign) -> Values { + let lhs = self.codegen_lvalue(&assign.lvalue); + let rhs = self.codegen_expression(&assign.expression); + self.assign(lhs, rhs); + self.unit_value() + } + + fn codegen_lvalue(&mut self, lvalue: &ast::LValue) -> Values { + match lvalue { + ast::LValue::Ident(ident) => self.codegen_ident(ident), + ast::LValue::Index { array, index, element_type, location: _ } => { + let array = self.codegen_lvalue(array).into_leaf().eval(self); + self.codegen_array_index(array, index, element_type) + } + ast::LValue::MemberAccess { object, field_index } => { + let object = self.codegen_lvalue(object); + Self::get_field(object, *field_index) + } + } } fn codegen_semi(&mut self, expr: &Expression) -> Values { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs index 31a93374940..52ff52d75f2 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs @@ -4,25 +4,34 @@ use crate::ssa_refactor::ir::function::FunctionId as IrFunctionId; use crate::ssa_refactor::ir::types::Type; use crate::ssa_refactor::ir::value::ValueId as IrValueId; -#[derive(Debug)] +use super::context::FunctionContext; + +#[derive(Debug, Clone)] pub(super) enum Tree { Branch(Vec>), Leaf(T), } -#[derive(Debug, Clone)] +#[derive(Debug, Copy, Clone)] pub(super) enum Value { Normal(IrValueId), Function(IrFunctionId), + + /// A mutable variable that must be loaded as the given type before being used + Mutable(IrValueId, Type), } impl Value { /// Evaluate a value, returning an IrValue from it. - /// This has no effect on Value::Normal, but any variables will be updated with their latest - /// use. - pub(super) fn eval(self) -> IrValueId { + /// This has no effect on Value::Normal, but any variables will + /// need to be loaded from memory + pub(super) fn eval(self, ctx: &mut FunctionContext) -> IrValueId { match self { Value::Normal(value) => value, + Value::Mutable(address, typ) => { + let offset = ctx.builder.field_constant(0u128); + ctx.builder.insert_load(address, offset, typ) + } Value::Function(_) => panic!("Tried to evaluate a function value"), } } @@ -56,6 +65,37 @@ impl Tree { Tree::Leaf(value) => f(value), } } + + pub(super) fn map_mut(&mut self, mut f: impl FnMut(&T) -> Tree) { + self.map_mut_helper(&mut f); + } + + fn map_mut_helper(&mut self, f: &mut impl FnMut(&T) -> Tree) { + match self { + Tree::Branch(trees) => trees.iter_mut().for_each(|tree| tree.map_mut_helper(f)), + Tree::Leaf(value) => *self = f(value), + } + } + + pub(super) fn map(self, mut f: impl FnMut(T) -> Tree) -> Tree { + self.map_helper(&mut f) + } + + fn map_helper(self, f: &mut impl FnMut(T) -> Tree) -> Tree { + match self { + Tree::Branch(trees) => Tree::Branch(vecmap(trees, |tree| tree.map_helper(f))), + Tree::Leaf(value) => f(value), + } + } + + /// Unwraps this Tree into the value of the leaf node. Panics if + /// this Tree is a Branch + pub(super) fn into_leaf(self) -> T { + match self { + Tree::Branch(_) => panic!("into_leaf called on a Tree::Branch"), + Tree::Leaf(value) => value, + } + } } impl From for Values { @@ -82,7 +122,7 @@ impl Tree { impl Tree { /// Flattens and evaluates this Tree into a list of ir values /// for return statements, branching instructions, or function parameters. - pub(super) fn into_value_list(self) -> Vec { - vecmap(self.flatten(), Value::eval) + pub(super) fn into_value_list(self, ctx: &mut FunctionContext) -> Vec { + vecmap(self.flatten(), |value| value.eval(ctx)) } } From 64cf49d22b8c5859396f0a72ff84452b4ab0b1b7 Mon Sep 17 00:00:00 2001 From: jfecher Date: Thu, 27 Apr 2023 04:22:05 -0400 Subject: [PATCH 46/63] chore(ssa refactor): Implement function calls (#1235) * Implement ssa-gen for if * Satisfy the clippy gods * Fix printing bug * Print constants directly * Impl for loops * Implement immutable and mutable variables * Implement function calls --- .../src/ssa_refactor/ir/cfg.rs | 8 +- .../src/ssa_refactor/ir/function.rs | 18 +- .../src/ssa_refactor/ir/printer.rs | 4 +- .../ssa_builder/function_builder.rs | 233 ------------------ .../src/ssa_refactor/ssa_builder/mod.rs | 231 ++++++++++++++++- .../src/ssa_refactor/ssa_gen/context.rs | 93 +++++-- .../src/ssa_refactor/ssa_gen/mod.rs | 50 ++-- 7 files changed, 350 insertions(+), 287 deletions(-) delete mode 100644 crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs index 3e469361c37..42a2cd573a1 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs @@ -115,13 +115,14 @@ impl ControlFlowGraph { #[cfg(test)] mod tests { - use crate::ssa_refactor::ir::{instruction::TerminatorInstruction, types::Type}; + use crate::ssa_refactor::ir::{instruction::TerminatorInstruction, map::Id, types::Type}; use super::{super::function::Function, ControlFlowGraph}; #[test] fn empty() { - let mut func = Function::new("func".into()); + let func_id = Id::test_new(0); + let mut func = Function::new("func".into(), func_id); let block_id = func.entry_block(); func.dfg[block_id].set_terminator(TerminatorInstruction::Return { return_values: vec![] }); @@ -139,7 +140,8 @@ mod tests { // block2(): // return () // } - let mut func = Function::new("func".into()); + let func_id = Id::test_new(0); + let mut func = Function::new("func".into(), func_id); let block0_id = func.entry_block(); let cond = func.dfg.add_block_parameter(block0_id, Type::unsigned(1)); let block1_id = func.dfg.make_block(); diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs index 1a735726029..ca486d0258a 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs @@ -18,10 +18,12 @@ pub(crate) struct Function { source_locations: SecondaryMap, /// The first basic block in the function - pub(super) entry_block: BasicBlockId, + entry_block: BasicBlockId, /// Name of the function for debugging only - pub(super) name: String, + name: String, + + id: FunctionId, pub(crate) dfg: DataFlowGraph, } @@ -30,10 +32,18 @@ impl Function { /// Creates a new function with an automatically inserted entry block. /// /// Note that any parameters to the function must be manually added later. - pub(crate) fn new(name: String) -> Self { + pub(crate) fn new(name: String, id: FunctionId) -> Self { let mut dfg = DataFlowGraph::default(); let entry_block = dfg.make_block(); - Self { name, source_locations: SecondaryMap::new(), entry_block, dfg } + Self { name, source_locations: SecondaryMap::new(), id, entry_block, dfg } + } + + pub(crate) fn name(&self) -> &str { + &self.name + } + + pub(crate) fn id(&self) -> FunctionId { + self.id } pub(crate) fn entry_block(&self) -> BasicBlockId { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs index 57c573c7bd4..ff46b49b9b4 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -14,8 +14,8 @@ use super::{ }; pub(crate) fn display_function(function: &Function, f: &mut Formatter) -> Result { - writeln!(f, "fn {} {{", function.name)?; - display_block_with_successors(function, function.entry_block, &mut HashSet::new(), f)?; + writeln!(f, "fn {} {} {{", function.name(), function.id())?; + display_block_with_successors(function, function.entry_block(), &mut HashSet::new(), f)?; write!(f, "}}") } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs deleted file mode 100644 index d11e9a763cd..00000000000 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs +++ /dev/null @@ -1,233 +0,0 @@ -use acvm::FieldElement; - -use crate::ssa_refactor::ir::{ - basic_block::BasicBlockId, - function::{Function, FunctionId}, - instruction::{Binary, BinaryOp, Instruction, TerminatorInstruction}, - types::Type, - value::{Value, ValueId}, -}; - -use super::SharedBuilderContext; - -/// The per-function context for each ssa function being generated. -/// -/// This is split from the global SsaBuilder context to allow each function -/// to be potentially built concurrently. -/// -/// Contrary to the name, this struct has the capacity to build as many -/// functions as needed, although it is limited to one function at a time. -pub(crate) struct FunctionBuilder<'ssa> { - global_context: &'ssa SharedBuilderContext, - - current_function: Function, - current_function_id: FunctionId, - - current_block: BasicBlockId, - - finished_functions: Vec<(FunctionId, Function)>, -} - -impl<'ssa> FunctionBuilder<'ssa> { - pub(crate) fn new(function_name: String, context: &'ssa SharedBuilderContext) -> Self { - let new_function = Function::new(function_name); - let current_block = new_function.entry_block(); - - Self { - global_context: context, - current_function: new_function, - current_function_id: context.next_function(), - current_block, - finished_functions: Vec::new(), - } - } - - /// Finish the current function and create a new function - pub(crate) fn new_function(&mut self, name: String) { - let new_function = Function::new(name); - let old_function = std::mem::replace(&mut self.current_function, new_function); - - self.finished_functions.push((self.current_function_id, old_function)); - self.current_function_id = self.global_context.next_function(); - } - - pub(crate) fn finish(mut self) -> Vec<(FunctionId, Function)> { - self.finished_functions.push((self.current_function_id, self.current_function)); - self.finished_functions - } - - pub(crate) fn add_parameter(&mut self, typ: Type) -> ValueId { - let entry = self.current_function.entry_block(); - self.current_function.dfg.add_block_parameter(entry, typ) - } - - /// Insert a numeric constant into the current function - pub(crate) fn numeric_constant( - &mut self, - value: impl Into, - typ: Type, - ) -> ValueId { - self.current_function.dfg.make_constant(value.into(), typ) - } - - /// Insert a numeric constant into the current function of type Field - pub(crate) fn field_constant(&mut self, value: impl Into) -> ValueId { - self.numeric_constant(value.into(), Type::field()) - } - - pub(crate) fn type_of_value(&self, value: ValueId) -> Type { - self.current_function.dfg.type_of_value(value) - } - - pub(crate) fn insert_block(&mut self) -> BasicBlockId { - self.current_function.dfg.make_block() - } - - pub(crate) fn add_block_parameter(&mut self, block: BasicBlockId, typ: Type) -> ValueId { - self.current_function.dfg.add_block_parameter(block, typ) - } - - /// Inserts a new instruction at the end of the current block and returns its results - fn insert_instruction( - &mut self, - instruction: Instruction, - ctrl_typevars: Option>, - ) -> &[ValueId] { - let id = self.current_function.dfg.make_instruction(instruction, ctrl_typevars); - self.current_function.dfg.insert_instruction_in_block(self.current_block, id); - self.current_function.dfg.instruction_results(id) - } - - /// Switch to inserting instructions in the given block. - /// Expects the given block to be within the same function. If you want to insert - /// instructions into a new function, call new_function instead. - pub(crate) fn switch_to_block(&mut self, block: BasicBlockId) { - self.current_block = block; - } - - /// Insert an allocate instruction at the end of the current block, allocating the - /// given amount of field elements. Returns the result of the allocate instruction, - /// which is always a Reference to the allocated data. - pub(crate) fn insert_allocate(&mut self, size_to_allocate: u32) -> ValueId { - self.insert_instruction(Instruction::Allocate { size: size_to_allocate }, None)[0] - } - - /// Insert a Load instruction at the end of the current block, loading from the given offset - /// of the given address which should point to a previous Allocate instruction. Note that - /// this is limited to loading a single value. Loading multiple values (such as a tuple) - /// will require multiple loads. - /// 'offset' is in units of FieldElements here. So loading the fourth FieldElement stored in - /// an array will have an offset of 3. - /// Returns the element that was loaded. - pub(crate) fn insert_load( - &mut self, - mut address: ValueId, - offset: ValueId, - type_to_load: Type, - ) -> ValueId { - if let Some(offset) = self.current_function.dfg.get_numeric_constant(offset) { - if !offset.is_zero() { - let offset = self.field_constant(offset); - address = self.insert_binary(address, BinaryOp::Add, offset); - } - }; - self.insert_instruction(Instruction::Load { address }, Some(vec![type_to_load]))[0] - } - - /// Insert a Store instruction at the end of the current block, storing the given element - /// at the given address. Expects that the address points somewhere - /// within a previous Allocate instruction. - pub(crate) fn insert_store(&mut self, address: ValueId, value: ValueId) { - self.insert_instruction(Instruction::Store { address, value }, None); - } - - /// Insert a binary instruction at the end of the current block. - /// Returns the result of the binary instruction. - pub(crate) fn insert_binary( - &mut self, - lhs: ValueId, - operator: BinaryOp, - rhs: ValueId, - ) -> ValueId { - let instruction = Instruction::Binary(Binary { lhs, rhs, operator }); - self.insert_instruction(instruction, None)[0] - } - - /// Insert a not instruction at the end of the current block. - /// Returns the result of the instruction. - pub(crate) fn insert_not(&mut self, rhs: ValueId) -> ValueId { - self.insert_instruction(Instruction::Not(rhs), None)[0] - } - - /// Insert a cast instruction at the end of the current block. - /// Returns the result of the cast instruction. - pub(crate) fn insert_cast(&mut self, value: ValueId, typ: Type) -> ValueId { - self.insert_instruction(Instruction::Cast(value, typ), None)[0] - } - - /// Insert a constrain instruction at the end of the current block. - pub(crate) fn insert_constrain(&mut self, boolean: ValueId) { - self.insert_instruction(Instruction::Constrain(boolean), None); - } - - /// Terminates the current block with the given terminator instruction - fn terminate_block_with(&mut self, terminator: TerminatorInstruction) { - self.current_function.dfg.set_block_terminator(self.current_block, terminator); - } - - /// Terminate the current block with a jmp instruction to jmp to the given - /// block with the given arguments. - pub(crate) fn terminate_with_jmp( - &mut self, - destination: BasicBlockId, - arguments: Vec, - ) { - self.terminate_block_with(TerminatorInstruction::Jmp { destination, arguments }); - } - - /// Terminate the current block with a jmpif instruction to jmp with the given arguments - /// block with the given arguments. - pub(crate) fn terminate_with_jmpif( - &mut self, - condition: ValueId, - then_destination: BasicBlockId, - else_destination: BasicBlockId, - ) { - self.terminate_block_with(TerminatorInstruction::JmpIf { - condition, - then_destination, - else_destination, - }); - } - - /// Terminate the current block with a return instruction - pub(crate) fn terminate_with_return(&mut self, return_values: Vec) { - self.terminate_block_with(TerminatorInstruction::Return { return_values }); - } - - /// Mutates a load instruction into a store instruction. - /// - /// This function is used while generating ssa-form for assignments currently. - /// To re-use most of the expression infrastructure, the lvalue of an assignment - /// is compiled as an expression and to assign to it we replace the final load - /// (which should always be present to load a mutable value) with a store of the - /// assigned value. - pub(crate) fn mutate_load_into_store(&mut self, load_result: ValueId, value_to_store: ValueId) { - let (instruction, address) = match &self.current_function.dfg[load_result] { - Value::Instruction { instruction, .. } => { - match &self.current_function.dfg[*instruction] { - Instruction::Load { address } => (*instruction, *address), - other => { - panic!("mutate_load_into_store: Expected Load instruction, found {other:?}") - } - } - } - other => panic!("mutate_load_into_store: Expected Load instruction, found {other:?}"), - }; - - let store = Instruction::Store { address, value: value_to_store }; - self.current_function.dfg.replace_instruction(instruction, store); - // Clear the results of the previous load for safety - self.current_function.dfg.make_instruction_results(instruction, None); - } -} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs index 8f9ceed800e..fdbaa36308b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs @@ -1,19 +1,230 @@ -pub(crate) mod function_builder; +use acvm::FieldElement; use crate::ssa_refactor::ir::{ + basic_block::BasicBlockId, function::{Function, FunctionId}, - map::AtomicCounter, + instruction::{Binary, BinaryOp, Instruction, TerminatorInstruction}, + types::Type, + value::{Value, ValueId}, }; -/// The global context while building the ssa representation. -/// Because this may be shared across threads, it is synchronized internally as necessary. -#[derive(Default)] -pub(crate) struct SharedBuilderContext { - function_count: AtomicCounter, +/// The per-function context for each ssa function being generated. +/// +/// This is split from the global SsaBuilder context to allow each function +/// to be potentially built concurrently. +/// +/// Contrary to the name, this struct has the capacity to build as many +/// functions as needed, although it is limited to one function at a time. +pub(crate) struct FunctionBuilder { + current_function: Function, + current_block: BasicBlockId, + finished_functions: Vec<(FunctionId, Function)>, } -impl SharedBuilderContext { - pub(super) fn next_function(&self) -> FunctionId { - self.function_count.next() +impl FunctionBuilder { + pub(crate) fn new(function_name: String, function_id: FunctionId) -> Self { + let new_function = Function::new(function_name, function_id); + let current_block = new_function.entry_block(); + + Self { current_function: new_function, current_block, finished_functions: Vec::new() } + } + + /// Finish the current function and create a new function + pub(crate) fn new_function(&mut self, name: String, function_id: FunctionId) { + let new_function = Function::new(name, function_id); + let old_function = std::mem::replace(&mut self.current_function, new_function); + + self.finished_functions.push((self.current_function.id(), old_function)); + } + + pub(crate) fn finish(mut self) -> Vec<(FunctionId, Function)> { + self.finished_functions.push((self.current_function.id(), self.current_function)); + self.finished_functions + } + + pub(crate) fn add_parameter(&mut self, typ: Type) -> ValueId { + let entry = self.current_function.entry_block(); + self.current_function.dfg.add_block_parameter(entry, typ) + } + + /// Insert a numeric constant into the current function + pub(crate) fn numeric_constant( + &mut self, + value: impl Into, + typ: Type, + ) -> ValueId { + self.current_function.dfg.make_constant(value.into(), typ) + } + + /// Insert a numeric constant into the current function of type Field + pub(crate) fn field_constant(&mut self, value: impl Into) -> ValueId { + self.numeric_constant(value.into(), Type::field()) + } + + pub(crate) fn type_of_value(&self, value: ValueId) -> Type { + self.current_function.dfg.type_of_value(value) + } + + pub(crate) fn insert_block(&mut self) -> BasicBlockId { + self.current_function.dfg.make_block() + } + + pub(crate) fn add_block_parameter(&mut self, block: BasicBlockId, typ: Type) -> ValueId { + self.current_function.dfg.add_block_parameter(block, typ) + } + + /// Inserts a new instruction at the end of the current block and returns its results + fn insert_instruction( + &mut self, + instruction: Instruction, + ctrl_typevars: Option>, + ) -> &[ValueId] { + let id = self.current_function.dfg.make_instruction(instruction, ctrl_typevars); + self.current_function.dfg.insert_instruction_in_block(self.current_block, id); + self.current_function.dfg.instruction_results(id) + } + + /// Switch to inserting instructions in the given block. + /// Expects the given block to be within the same function. If you want to insert + /// instructions into a new function, call new_function instead. + pub(crate) fn switch_to_block(&mut self, block: BasicBlockId) { + self.current_block = block; + } + + /// Insert an allocate instruction at the end of the current block, allocating the + /// given amount of field elements. Returns the result of the allocate instruction, + /// which is always a Reference to the allocated data. + pub(crate) fn insert_allocate(&mut self, size_to_allocate: u32) -> ValueId { + self.insert_instruction(Instruction::Allocate { size: size_to_allocate }, None)[0] + } + + /// Insert a Load instruction at the end of the current block, loading from the given offset + /// of the given address which should point to a previous Allocate instruction. Note that + /// this is limited to loading a single value. Loading multiple values (such as a tuple) + /// will require multiple loads. + /// 'offset' is in units of FieldElements here. So loading the fourth FieldElement stored in + /// an array will have an offset of 3. + /// Returns the element that was loaded. + pub(crate) fn insert_load( + &mut self, + mut address: ValueId, + offset: ValueId, + type_to_load: Type, + ) -> ValueId { + if let Some(offset) = self.current_function.dfg.get_numeric_constant(offset) { + if !offset.is_zero() { + let offset = self.field_constant(offset); + address = self.insert_binary(address, BinaryOp::Add, offset); + } + }; + self.insert_instruction(Instruction::Load { address }, Some(vec![type_to_load]))[0] + } + + /// Insert a Store instruction at the end of the current block, storing the given element + /// at the given address. Expects that the address points somewhere + /// within a previous Allocate instruction. + pub(crate) fn insert_store(&mut self, address: ValueId, value: ValueId) { + self.insert_instruction(Instruction::Store { address, value }, None); + } + + /// Insert a binary instruction at the end of the current block. + /// Returns the result of the binary instruction. + pub(crate) fn insert_binary( + &mut self, + lhs: ValueId, + operator: BinaryOp, + rhs: ValueId, + ) -> ValueId { + let instruction = Instruction::Binary(Binary { lhs, rhs, operator }); + self.insert_instruction(instruction, None)[0] + } + + /// Insert a not instruction at the end of the current block. + /// Returns the result of the instruction. + pub(crate) fn insert_not(&mut self, rhs: ValueId) -> ValueId { + self.insert_instruction(Instruction::Not(rhs), None)[0] + } + + /// Insert a cast instruction at the end of the current block. + /// Returns the result of the cast instruction. + pub(crate) fn insert_cast(&mut self, value: ValueId, typ: Type) -> ValueId { + self.insert_instruction(Instruction::Cast(value, typ), None)[0] + } + + /// Insert a constrain instruction at the end of the current block. + pub(crate) fn insert_constrain(&mut self, boolean: ValueId) { + self.insert_instruction(Instruction::Constrain(boolean), None); + } + + /// Insert a call instruction a the end of the current block and return + /// the results of the call. + pub(crate) fn insert_call( + &mut self, + func: FunctionId, + arguments: Vec, + result_types: Vec, + ) -> &[ValueId] { + self.insert_instruction(Instruction::Call { func, arguments }, Some(result_types)) + } + + /// Terminates the current block with the given terminator instruction + fn terminate_block_with(&mut self, terminator: TerminatorInstruction) { + self.current_function.dfg.set_block_terminator(self.current_block, terminator); + } + + /// Terminate the current block with a jmp instruction to jmp to the given + /// block with the given arguments. + pub(crate) fn terminate_with_jmp( + &mut self, + destination: BasicBlockId, + arguments: Vec, + ) { + self.terminate_block_with(TerminatorInstruction::Jmp { destination, arguments }); + } + + /// Terminate the current block with a jmpif instruction to jmp with the given arguments + /// block with the given arguments. + pub(crate) fn terminate_with_jmpif( + &mut self, + condition: ValueId, + then_destination: BasicBlockId, + else_destination: BasicBlockId, + ) { + self.terminate_block_with(TerminatorInstruction::JmpIf { + condition, + then_destination, + else_destination, + }); + } + + /// Terminate the current block with a return instruction + pub(crate) fn terminate_with_return(&mut self, return_values: Vec) { + self.terminate_block_with(TerminatorInstruction::Return { return_values }); + } + + /// Mutates a load instruction into a store instruction. + /// + /// This function is used while generating ssa-form for assignments currently. + /// To re-use most of the expression infrastructure, the lvalue of an assignment + /// is compiled as an expression and to assign to it we replace the final load + /// (which should always be present to load a mutable value) with a store of the + /// assigned value. + pub(crate) fn mutate_load_into_store(&mut self, load_result: ValueId, value_to_store: ValueId) { + let (instruction, address) = match &self.current_function.dfg[load_result] { + Value::Instruction { instruction, .. } => { + match &self.current_function.dfg[*instruction] { + Instruction::Load { address } => (*instruction, *address), + other => { + panic!("mutate_load_into_store: Expected Load instruction, found {other:?}") + } + } + } + other => panic!("mutate_load_into_store: Expected Load instruction, found {other:?}"), + }; + + let store = Instruction::Store { address, value: value_to_store }; + self.current_function.dfg.replace_instruction(instruction, store); + // Clear the results of the previous load for safety + self.current_function.dfg.make_instruction_results(instruction, None); } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 10206e28c2d..df5329fed92 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -6,13 +6,13 @@ use noirc_frontend::monomorphization::ast::{self, LocalId, Parameters}; use noirc_frontend::monomorphization::ast::{FuncId, Program}; use noirc_frontend::Signedness; +use crate::ssa_refactor::ir::function::Function; +use crate::ssa_refactor::ir::function::FunctionId as IrFunctionId; use crate::ssa_refactor::ir::instruction::BinaryOp; +use crate::ssa_refactor::ir::map::AtomicCounter; use crate::ssa_refactor::ir::types::Type; use crate::ssa_refactor::ir::value::ValueId; -use crate::ssa_refactor::ssa_builder::SharedBuilderContext; -use crate::ssa_refactor::{ - ir::function::FunctionId as IrFunctionId, ssa_builder::function_builder::FunctionBuilder, -}; +use crate::ssa_refactor::ssa_builder::FunctionBuilder; use super::value::{Tree, Values}; @@ -22,7 +22,7 @@ type FunctionQueue = Vec<(ast::FuncId, IrFunctionId)>; pub(super) struct FunctionContext<'a> { definitions: HashMap, - pub(super) builder: FunctionBuilder<'a>, + pub(super) builder: FunctionBuilder, shared_context: &'a SharedContext, } @@ -30,28 +30,32 @@ pub(super) struct FunctionContext<'a> { pub(super) struct SharedContext { functions: RwLock>, function_queue: Mutex, + function_counter: AtomicCounter, + pub(super) program: Program, } impl<'a> FunctionContext<'a> { pub(super) fn new( + function_id: FuncId, function_name: String, parameters: &Parameters, shared_context: &'a SharedContext, - shared_builder_context: &'a SharedBuilderContext, ) -> Self { + let new_id = shared_context.get_or_queue_function(function_id); + let mut this = Self { definitions: HashMap::new(), - builder: FunctionBuilder::new(function_name, shared_builder_context), + builder: FunctionBuilder::new(function_name, new_id), shared_context, }; this.add_parameters_to_scope(parameters); this } - pub(super) fn new_function(&mut self, name: String, parameters: &Parameters) { + pub(super) fn new_function(&mut self, id: IrFunctionId, name: String, parameters: &Parameters) { self.definitions.clear(); - self.builder.new_function(name); + self.builder.new_function(name, id); self.add_parameters_to_scope(parameters); } @@ -72,7 +76,7 @@ impl<'a> FunctionContext<'a> { fn add_parameter_to_scope(&mut self, parameter_id: LocalId, parameter_type: &ast::Type) { // Add a separate parameter for each field type in 'parameter_type' let parameter_value = - self.map_type(parameter_type, |this, typ| this.builder.add_parameter(typ).into()); + Self::map_type(parameter_type, |typ| self.builder.add_parameter(typ).into()); self.definitions.insert(parameter_id, parameter_value); } @@ -81,12 +85,8 @@ impl<'a> FunctionContext<'a> { /// /// This can be used to (for example) flatten a tuple type, creating /// and returning a new parameter for each field type. - pub(super) fn map_type( - &mut self, - typ: &ast::Type, - mut f: impl FnMut(&mut Self, Type) -> T, - ) -> Tree { - Self::map_type_helper(typ, &mut |typ| f(self, typ)) + pub(super) fn map_type(typ: &ast::Type, mut f: impl FnMut(Type) -> T) -> Tree { + Self::map_type_helper(typ, &mut f) } // This helper is needed because we need to take f by mutable reference, @@ -157,6 +157,30 @@ impl<'a> FunctionContext<'a> { result.into() } + /// Inserts a call instruction at the end of the current block and returns the results + /// of the call. + /// + /// Compared to self.builder.insert_call, this version will reshape the returned Vec + /// back into a Values tree of the proper shape. + pub(super) fn insert_call( + &mut self, + function: IrFunctionId, + arguments: Vec, + result_type: &ast::Type, + ) -> Values { + let result_types = Self::convert_type(result_type).flatten(); + let results = self.builder.insert_call(function, arguments, result_types); + + let mut i = 0; + let reshaped_return_values = Self::map_type(result_type, |_| { + let result = results[i].into(); + i += 1; + result + }); + assert_eq!(i, results.len()); + reshaped_return_values + } + /// Create a const offset of an address for an array load or store pub(super) fn make_offset(&mut self, mut address: ValueId, offset: u128) -> ValueId { if offset != 0 { @@ -215,6 +239,13 @@ impl<'a> FunctionContext<'a> { } } } + + /// Retrieves the given function, adding it to the function queue + /// if it is not yet compiled. + pub(super) fn get_or_queue_function(&self, id: FuncId) -> Values { + let function = self.shared_context.get_or_queue_function(id); + Values::Leaf(super::value::Value::Function(function)) + } } /// True if the given operator cannot be encoded directly and needs @@ -260,10 +291,38 @@ fn convert_operator(op: noirc_frontend::BinaryOpKind) -> BinaryOp { impl SharedContext { pub(super) fn new(program: Program) -> Self { - Self { functions: Default::default(), function_queue: Default::default(), program } + Self { + functions: Default::default(), + function_queue: Default::default(), + function_counter: Default::default(), + program, + } } pub(super) fn pop_next_function_in_queue(&self) -> Option<(ast::FuncId, IrFunctionId)> { self.function_queue.lock().expect("Failed to lock function_queue").pop() } + + /// Return the matching id for the given function if known. If it is not known this + /// will add the function to the queue of functions to compile, assign it a new id, + /// and return this new id. + pub(super) fn get_or_queue_function(&self, id: ast::FuncId) -> IrFunctionId { + // Start a new block to guarantee the destructor for the map lock is released + // before map needs to be aquired again in self.functions.write() below + { + let map = self.functions.read().expect("Failed to read self.functions"); + if let Some(existing_id) = map.get(&id) { + return *existing_id; + } + } + + let next_id = self.function_counter.next(); + + let mut queue = self.function_queue.lock().expect("Failed to lock function queue"); + queue.push((id, next_id)); + + self.functions.write().expect("Failed to write to self.functions").insert(id, next_id); + + next_id + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index a7880032d42..4aad2aafec1 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -11,25 +11,21 @@ use self::{ value::{Tree, Values}, }; -use super::{ - ir::{instruction::BinaryOp, types::Type, value::ValueId}, - ssa_builder::SharedBuilderContext, -}; +use super::ir::{function::FunctionId, instruction::BinaryOp, types::Type, value::ValueId}; pub(crate) fn generate_ssa(program: Program) { let context = SharedContext::new(program); - let builder_context = SharedBuilderContext::default(); let main = context.program.main(); - let mut function_context = - FunctionContext::new(main.name.clone(), &main.parameters, &context, &builder_context); + let main_id = Program::main_id(); + let main_name = main.name.clone(); + let mut function_context = FunctionContext::new(main_id, main_name, &main.parameters, &context); function_context.codegen_expression(&main.body); - while let Some((src_function_id, _new_id)) = context.pop_next_function_in_queue() { + while let Some((src_function_id, dest_id)) = context.pop_next_function_in_queue() { let function = &context.program[src_function_id]; - // TODO: Need to ensure/assert the new function's id == new_id - function_context.new_function(function.name.clone(), &function.parameters); + function_context.new_function(dest_id, function.name.clone(), &function.parameters); function_context.codegen_expression(&function.body); } } @@ -69,7 +65,7 @@ impl<'a> FunctionContext<'a> { fn codegen_ident(&mut self, ident: &ast::Ident) -> Values { match &ident.definition { ast::Definition::Local(id) => self.lookup(*id).map(|value| value.eval(self).into()), - ast::Definition::Function(_) => todo!(), + ast::Definition::Function(id) => self.get_or_queue_function(*id), ast::Definition::Builtin(_) => todo!(), ast::Definition::LowLevel(_) => todo!(), } @@ -165,10 +161,10 @@ impl<'a> FunctionContext<'a> { let base_index = self.builder.insert_binary(base_offset, BinaryOp::Mul, type_size); let mut field_index = 0u128; - self.map_type(element_type, |ctx, typ| { - let offset = ctx.make_offset(base_index, field_index); + Self::map_type(element_type, |typ| { + let offset = self.make_offset(base_index, field_index); field_index += 1; - ctx.builder.insert_load(array, offset, typ).into() + self.builder.insert_load(array, offset, typ).into() }) } @@ -229,8 +225,8 @@ impl<'a> FunctionContext<'a> { // Create block arguments for the end block as needed to branch to // with our then and else value. - result = self.map_type(&if_expr.typ, |ctx, typ| { - ctx.builder.add_block_parameter(end_block, typ).into() + result = Self::map_type(&if_expr.typ, |typ| { + self.builder.add_block_parameter(end_block, typ).into() }); let else_values = else_value.into_value_list(self); @@ -259,8 +255,26 @@ impl<'a> FunctionContext<'a> { Self::get_field(tuple, field_index) } - fn codegen_call(&mut self, _call: &ast::Call) -> Values { - todo!() + fn codegen_function(&mut self, function: &Expression) -> FunctionId { + use crate::ssa_refactor::ssa_gen::value::Value; + match self.codegen_expression(function) { + Tree::Leaf(Value::Function(id)) => id, + other => { + panic!("codegen_function: expected function value, found {other:?}") + } + } + } + + fn codegen_call(&mut self, call: &ast::Call) -> Values { + let function = self.codegen_function(&call.func); + + let arguments = call + .arguments + .iter() + .flat_map(|argument| self.codegen_expression(argument).into_value_list(self)) + .collect(); + + self.insert_call(function, arguments, &call.return_type) } fn codegen_let(&mut self, let_expr: &ast::Let) -> Values { From 4e198c0b8190d5bfb2167b929a5d5a8a9106a4b0 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 27 Apr 2023 17:29:48 +0100 Subject: [PATCH 47/63] chore: address clippy warnings (#1239) --- .../src/ssa_refactor/ir/cfg.rs | 66 ++++++++++--------- 1 file changed, 34 insertions(+), 32 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs index 42a2cd573a1..d443d574ca8 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs @@ -161,32 +161,33 @@ mod tests { let mut cfg = ControlFlowGraph::with_function(&func); + #[allow(clippy::needless_collect)] { - let block0_predecessors = cfg.pred_iter(block0_id).collect::>(); - let block1_predecessors = cfg.pred_iter(block1_id).collect::>(); - let block2_predecessors = cfg.pred_iter(block2_id).collect::>(); + let block0_predecessors: Vec<_> = cfg.pred_iter(block0_id).collect(); + let block1_predecessors: Vec<_> = cfg.pred_iter(block1_id).collect(); + let block2_predecessors: Vec<_> = cfg.pred_iter(block2_id).collect(); - let block0_successors = cfg.succ_iter(block0_id).collect::>(); - let block1_successors = cfg.succ_iter(block1_id).collect::>(); - let block2_successors = cfg.succ_iter(block2_id).collect::>(); + let block0_successors: Vec<_> = cfg.succ_iter(block0_id).collect(); + let block1_successors: Vec<_> = cfg.succ_iter(block1_id).collect(); + let block2_successors: Vec<_> = cfg.succ_iter(block2_id).collect(); assert_eq!(block0_predecessors.len(), 0); assert_eq!(block1_predecessors.len(), 2); assert_eq!(block2_predecessors.len(), 2); - assert_eq!(block1_predecessors.contains(&block0_id), true); - assert_eq!(block1_predecessors.contains(&block1_id), true); - assert_eq!(block2_predecessors.contains(&block0_id), true); - assert_eq!(block2_predecessors.contains(&block1_id), true); + assert!(block1_predecessors.contains(&block0_id)); + assert!(block1_predecessors.contains(&block1_id)); + assert!(block2_predecessors.contains(&block0_id)); + assert!(block2_predecessors.contains(&block1_id)); assert_eq!(block0_successors.len(), 2); assert_eq!(block1_successors.len(), 2); assert_eq!(block2_successors.len(), 0); - assert_eq!(block0_successors.contains(&block1_id), true); - assert_eq!(block0_successors.contains(&block2_id), true); - assert_eq!(block1_successors.contains(&block1_id), true); - assert_eq!(block1_successors.contains(&block2_id), true); + assert!(block0_successors.contains(&block1_id)); + assert!(block0_successors.contains(&block2_id)); + assert!(block1_successors.contains(&block1_id)); + assert!(block1_successors.contains(&block2_id)); } // Modify function to form: @@ -214,37 +215,38 @@ mod tests { }); // Recompute new and changed blocks - cfg.recompute_block(&mut func, block0_id); - cfg.recompute_block(&mut func, block2_id); - cfg.recompute_block(&mut func, ret_block_id); + cfg.recompute_block(&func, block0_id); + cfg.recompute_block(&func, block2_id); + cfg.recompute_block(&func, ret_block_id); + #[allow(clippy::needless_collect)] { - let block0_predecessors = cfg.pred_iter(block0_id).collect::>(); - let block1_predecessors = cfg.pred_iter(block1_id).collect::>(); - let block2_predecessors = cfg.pred_iter(block2_id).collect::>(); + let block0_predecessors: Vec<_> = cfg.pred_iter(block0_id).collect(); + let block1_predecessors: Vec<_> = cfg.pred_iter(block1_id).collect(); + let block2_predecessors: Vec<_> = cfg.pred_iter(block2_id).collect(); - let block0_successors = cfg.succ_iter(block0_id).collect::>(); - let block1_successors = cfg.succ_iter(block1_id).collect::>(); - let block2_successors = cfg.succ_iter(block2_id).collect::>(); + let block0_successors: Vec<_> = cfg.succ_iter(block0_id).collect(); + let block1_successors: Vec<_> = cfg.succ_iter(block1_id).collect(); + let block2_successors: Vec<_> = cfg.succ_iter(block2_id).collect(); assert_eq!(block0_predecessors.len(), 0); assert_eq!(block1_predecessors.len(), 2); assert_eq!(block2_predecessors.len(), 1); - assert_eq!(block1_predecessors.contains(&block0_id), true); - assert_eq!(block1_predecessors.contains(&block1_id), true); - assert_eq!(block2_predecessors.contains(&block0_id), false); - assert_eq!(block2_predecessors.contains(&block1_id), true); + assert!(block1_predecessors.contains(&block0_id)); + assert!(block1_predecessors.contains(&block1_id)); + assert!(!block2_predecessors.contains(&block0_id)); + assert!(block2_predecessors.contains(&block1_id)); assert_eq!(block0_successors.len(), 2); assert_eq!(block1_successors.len(), 2); assert_eq!(block2_successors.len(), 1); - assert_eq!(block0_successors.contains(&block1_id), true); - assert_eq!(block0_successors.contains(&ret_block_id), true); - assert_eq!(block1_successors.contains(&block1_id), true); - assert_eq!(block1_successors.contains(&block2_id), true); - assert_eq!(block2_successors.contains(&ret_block_id), true); + assert!(block0_successors.contains(&block1_id)); + assert!(block0_successors.contains(&ret_block_id)); + assert!(block1_successors.contains(&block1_id)); + assert!(block1_successors.contains(&block2_id)); + assert!(block2_successors.contains(&ret_block_id)); } } } From 606b7d0db77fccde1bb14c4c3f91f424436e4ea6 Mon Sep 17 00:00:00 2001 From: jfecher Date: Thu, 27 Apr 2023 14:36:24 -0400 Subject: [PATCH 48/63] chore(ssa refactor): Implement first-class functions (#1238) * Implement first-class functions * Update crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs Co-authored-by: kevaundray --------- Co-authored-by: kevaundray --- .../src/ssa_refactor/ir/dfg.rs | 21 ++++++++++++++++--- .../src/ssa_refactor/ir/function.rs | 10 +++++---- .../src/ssa_refactor/ir/instruction.rs | 6 ++---- .../src/ssa_refactor/ir/map.rs | 14 ------------- .../src/ssa_refactor/ir/printer.rs | 17 +++++++++------ .../src/ssa_refactor/ir/value.rs | 13 +++++++++++- .../src/ssa_refactor/ssa_builder/mod.rs | 2 +- .../src/ssa_refactor/ssa_gen/context.rs | 2 +- .../src/ssa_refactor/ssa_gen/mod.rs | 14 ++----------- 9 files changed, 53 insertions(+), 46 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index ab2018b1df8..60591da311c 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -1,9 +1,11 @@ +use std::collections::HashMap; + use super::{ basic_block::{BasicBlock, BasicBlockId}, constant::{NumericConstant, NumericConstantId}, - function::Signature, + function::{FunctionId, Signature}, instruction::{Instruction, InstructionId, InstructionResultType, TerminatorInstruction}, - map::{DenseMap, Id, SecondaryMap, TwoWayMap}, + map::{DenseMap, Id, TwoWayMap}, types::Type, value::{Value, ValueId}, }; @@ -53,7 +55,7 @@ pub(crate) struct DataFlowGraph { /// Currently, we need to define them in a better way /// Call instructions require the func signature, but /// other instructions may need some more reading on my part - results: SecondaryMap, + results: HashMap, /// Storage for all of the values defined in this /// function. @@ -64,6 +66,11 @@ pub(crate) struct DataFlowGraph { /// twice will return the same ConstantId. constants: TwoWayMap, + /// Contains each function that has been imported into the current function. + /// Each function's Value::Function is uniqued here so any given FunctionId + /// will always have the same ValueId within this function. + functions: HashMap, + /// Function signatures of external methods signatures: DenseMap, @@ -150,6 +157,14 @@ impl DataFlowGraph { self.values.insert(Value::NumericConstant { constant, typ }) } + /// Gets or creates a ValueId for the given FunctionId. + pub(crate) fn import_function(&mut self, function: FunctionId) -> ValueId { + if let Some(existing) = self.functions.get(&function) { + return *existing; + } + self.values.insert(Value::Function { id: function }) + } + /// Attaches results to the instruction, clearing any previous results. /// /// This does not normally need to be called manually as it is called within diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs index ca486d0258a..e40c086c0e6 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs @@ -1,7 +1,9 @@ +use std::collections::HashMap; + use super::basic_block::BasicBlockId; use super::dfg::DataFlowGraph; -use super::instruction::Instruction; -use super::map::{Id, SecondaryMap}; +use super::instruction::InstructionId; +use super::map::Id; use super::types::Type; use noirc_errors::Location; @@ -15,7 +17,7 @@ use noirc_errors::Location; #[derive(Debug)] pub(crate) struct Function { /// Maps instructions to source locations - source_locations: SecondaryMap, + source_locations: HashMap, /// The first basic block in the function entry_block: BasicBlockId, @@ -35,7 +37,7 @@ impl Function { pub(crate) fn new(name: String, id: FunctionId) -> Self { let mut dfg = DataFlowGraph::default(); let entry_block = dfg.make_block(); - Self { name, source_locations: SecondaryMap::new(), id, entry_block, dfg } + Self { name, source_locations: HashMap::new(), id, entry_block, dfg } } pub(crate) fn name(&self) -> &str { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 11c6b8dc05f..5e9e7229e3a 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -1,6 +1,4 @@ -use super::{ - basic_block::BasicBlockId, function::FunctionId, map::Id, types::Type, value::ValueId, -}; +use super::{basic_block::BasicBlockId, map::Id, types::Type, value::ValueId}; /// Reference to an instruction pub(crate) type InstructionId = Id; @@ -41,7 +39,7 @@ pub(crate) enum Instruction { Constrain(ValueId), /// Performs a function call with a list of its arguments. - Call { func: FunctionId, arguments: Vec }, + Call { func: ValueId, arguments: Vec }, /// Performs a call to an intrinsic function and stores the /// results in `return_arguments`. diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs index 24b30241293..a99ff06c5fb 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs @@ -260,20 +260,6 @@ impl std::ops::Index> for TwoWayMap { } } -/// A SecondaryMap is for storing secondary data for a given key. Since this -/// map is for secondary data, it will not return fresh Ids for data, instead -/// it expects users to provide these ids in order to associate existing ids with -/// additional data. -/// -/// Unlike SecondaryMap in cranelift, this version is sparse and thus -/// does not require inserting default elements for each key in between -/// the desired key and the previous length of the map. -/// -/// There is no expectation that there is always secondary data for all relevant -/// Ids of a given type, so unlike the other Map types, it is possible for -/// a call to .get(id) to return None. -pub(crate) type SecondaryMap = HashMap, V>; - /// A simple counter to create fresh Ids without any storage. /// Useful for assigning ids before the storage is created or assigning ids /// for types that have no single owner. diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs index ff46b49b9b4..4873f436dca 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -55,12 +55,17 @@ pub(crate) fn display_block( display_terminator(function, block.terminator(), f) } -/// Specialize displaying value ids so that if they refer to constants we -/// print the constant directly +/// Specialize displaying value ids so that if they refer to a numeric +/// constant or a function we print those directly. fn value(function: &Function, id: ValueId) -> String { - match function.dfg.get_numeric_constant_with_type(id) { - Some((value, typ)) => format!("{} {}", value, typ), - None => id.to_string(), + use super::value::Value; + match &function.dfg[id] { + Value::NumericConstant { constant, typ } => { + let value = function.dfg[*constant].value(); + format!("{} {}", typ, value) + } + Value::Function { id } => id.to_string(), + _ => id.to_string(), } } @@ -120,7 +125,7 @@ pub(crate) fn display_instruction( writeln!(f, "constrain {}", show(*value)) } Instruction::Call { func, arguments } => { - writeln!(f, "call {func}({})", value_list(function, arguments)) + writeln!(f, "call {}({})", show(*func), value_list(function, arguments)) } Instruction::Intrinsic { func, arguments } => { writeln!(f, "intrinsic {func}({})", value_list(function, arguments)) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs index a559522fadd..39228ae655b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs @@ -1,6 +1,9 @@ use crate::ssa_refactor::ir::basic_block::BasicBlockId; -use super::{constant::NumericConstantId, instruction::InstructionId, map::Id, types::Type}; +use super::{ + constant::NumericConstantId, function::FunctionId, instruction::InstructionId, map::Id, + types::Type, +}; pub(crate) type ValueId = Id; @@ -27,6 +30,13 @@ pub(crate) enum Value { /// This Value originates from a numeric constant NumericConstant { constant: NumericConstantId, typ: Type }, + + /// This Value refers to a function in the IR. + /// Functions always have the type Type::Function. + /// If the argument or return types are needed, users should retrieve + /// their types via the Call instruction's arguments or the Call instruction's + /// result types respectively. + Function { id: FunctionId }, } impl Value { @@ -35,6 +45,7 @@ impl Value { Value::Instruction { typ, .. } => *typ, Value::Param { typ, .. } => *typ, Value::NumericConstant { typ, .. } => *typ, + Value::Function { .. } => Type::Function, } } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs index fdbaa36308b..7da88e47157 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs @@ -160,7 +160,7 @@ impl FunctionBuilder { /// the results of the call. pub(crate) fn insert_call( &mut self, - func: FunctionId, + func: ValueId, arguments: Vec, result_types: Vec, ) -> &[ValueId] { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index df5329fed92..bd04f90d063 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -164,7 +164,7 @@ impl<'a> FunctionContext<'a> { /// back into a Values tree of the proper shape. pub(super) fn insert_call( &mut self, - function: IrFunctionId, + function: ValueId, arguments: Vec, result_type: &ast::Type, ) -> Values { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 4aad2aafec1..8475b3c84c7 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -11,7 +11,7 @@ use self::{ value::{Tree, Values}, }; -use super::ir::{function::FunctionId, instruction::BinaryOp, types::Type, value::ValueId}; +use super::ir::{instruction::BinaryOp, types::Type, value::ValueId}; pub(crate) fn generate_ssa(program: Program) { let context = SharedContext::new(program); @@ -255,18 +255,8 @@ impl<'a> FunctionContext<'a> { Self::get_field(tuple, field_index) } - fn codegen_function(&mut self, function: &Expression) -> FunctionId { - use crate::ssa_refactor::ssa_gen::value::Value; - match self.codegen_expression(function) { - Tree::Leaf(Value::Function(id)) => id, - other => { - panic!("codegen_function: expected function value, found {other:?}") - } - } - } - fn codegen_call(&mut self, call: &ast::Call) -> Values { - let function = self.codegen_function(&call.func); + let function = self.codegen_non_tuple_expression(&call.func); let arguments = call .arguments From 06427e5bccaa40b0eb0e3f30388a98dc25cf558f Mon Sep 17 00:00:00 2001 From: jfecher Date: Thu, 27 Apr 2023 15:52:04 -0400 Subject: [PATCH 49/63] chore(ssa refactor): Implement intrinsics (#1241) * Implement first-class functions * Update crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs Co-authored-by: kevaundray * Implement intrinsics --------- Co-authored-by: kevaundray --- .../src/ssa_refactor/ir/dfg.rs | 19 +++++- .../src/ssa_refactor/ir/instruction.rs | 63 +++++++++++++------ .../src/ssa_refactor/ir/printer.rs | 6 +- .../src/ssa_refactor/ir/value.rs | 12 +++- .../src/ssa_refactor/ssa_builder/mod.rs | 15 +++++ .../src/ssa_refactor/ssa_gen/context.rs | 4 +- .../src/ssa_refactor/ssa_gen/mod.rs | 8 ++- .../src/ssa_refactor/ssa_gen/value.rs | 3 - 8 files changed, 96 insertions(+), 34 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index 60591da311c..4d2ebe31efb 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -4,7 +4,9 @@ use super::{ basic_block::{BasicBlock, BasicBlockId}, constant::{NumericConstant, NumericConstantId}, function::{FunctionId, Signature}, - instruction::{Instruction, InstructionId, InstructionResultType, TerminatorInstruction}, + instruction::{ + Instruction, InstructionId, InstructionResultType, Intrinsic, TerminatorInstruction, + }, map::{DenseMap, Id, TwoWayMap}, types::Type, value::{Value, ValueId}, @@ -71,6 +73,11 @@ pub(crate) struct DataFlowGraph { /// will always have the same ValueId within this function. functions: HashMap, + /// Contains each intrinsic that has been imported into the current function. + /// This map is used to ensure that the ValueId for any given intrinsic is always + /// represented by only 1 ValueId within this function. + intrinsics: HashMap, + /// Function signatures of external methods signatures: DenseMap, @@ -162,7 +169,15 @@ impl DataFlowGraph { if let Some(existing) = self.functions.get(&function) { return *existing; } - self.values.insert(Value::Function { id: function }) + self.values.insert(Value::Function(function)) + } + + /// Gets or creates a ValueId for the given Intrinsic. + pub(crate) fn import_intrinsic(&mut self, intrinsic: Intrinsic) -> ValueId { + if let Some(existing) = self.intrinsics.get(&intrinsic) { + return *existing; + } + self.values.insert(Value::Intrinsic(intrinsic)) } /// Attaches results to the instruction, clearing any previous results. diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 5e9e7229e3a..756c7ae5a13 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -1,9 +1,10 @@ +use acvm::acir::BlackBoxFunc; + use super::{basic_block::BasicBlockId, map::Id, types::Type, value::ValueId}; /// Reference to an instruction pub(crate) type InstructionId = Id; -#[derive(Debug, PartialEq, Eq, Hash, Clone)] /// These are similar to built-ins in other languages. /// These can be classified under two categories: /// - Opcodes which the IR knows the target machine has @@ -11,14 +12,50 @@ pub(crate) type InstructionId = Id; /// - Opcodes which have no function definition in the /// source code and must be processed by the IR. An example /// of this is println. -pub(crate) struct IntrinsicOpcodes; +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub(crate) enum Intrinsic { + Sort, + Println, + ToBits(Endian), + ToRadix(Endian), + BlackBox(BlackBoxFunc), +} + +impl std::fmt::Display for Intrinsic { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Intrinsic::Println => write!(f, "println"), + Intrinsic::Sort => write!(f, "sort"), + Intrinsic::ToBits(Endian::Big) => write!(f, "to_be_bits"), + Intrinsic::ToBits(Endian::Little) => write!(f, "to_le_bits"), + Intrinsic::ToRadix(Endian::Big) => write!(f, "to_be_radix"), + Intrinsic::ToRadix(Endian::Little) => write!(f, "to_le_radix"), + Intrinsic::BlackBox(function) => write!(f, "{function}"), + } + } +} -impl std::fmt::Display for IntrinsicOpcodes { - fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - todo!("intrinsics have no opcodes yet") +impl Intrinsic { + pub(crate) fn lookup(name: &str) -> Option { + match name { + "println" => Some(Intrinsic::Println), + "array_sort" => Some(Intrinsic::Sort), + "to_le_radix" => Some(Intrinsic::ToRadix(Endian::Little)), + "to_be_radix" => Some(Intrinsic::ToRadix(Endian::Big)), + "to_le_bits" => Some(Intrinsic::ToBits(Endian::Little)), + "to_be_bits" => Some(Intrinsic::ToBits(Endian::Big)), + other => BlackBoxFunc::lookup(other).map(Intrinsic::BlackBox), + } } } +/// The endian-ness of bits when encoding values as bits in e.g. ToBits or ToRadix +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)] +pub(crate) enum Endian { + Big, + Little, +} + #[derive(Debug, PartialEq, Eq, Hash, Clone)] /// Instructions are used to perform tasks. /// The instructions that the IR is able to specify are listed below. @@ -41,10 +78,6 @@ pub(crate) enum Instruction { /// Performs a function call with a list of its arguments. Call { func: ValueId, arguments: Vec }, - /// Performs a call to an intrinsic function and stores the - /// results in `return_arguments`. - Intrinsic { func: IntrinsicOpcodes, arguments: Vec }, - /// Allocates a region of memory. Note that this is not concerned with /// the type of memory, the type of element is determined when loading this memory. /// @@ -72,9 +105,6 @@ impl Instruction { Instruction::Constrain(_) => 0, // This returns 0 as the result depends on the function being called Instruction::Call { .. } => 0, - // This also returns 0, but we could get it a compile time, - // since we know the signatures for the intrinsics - Instruction::Intrinsic { .. } => 0, Instruction::Allocate { .. } => 1, Instruction::Load { .. } => 1, Instruction::Store { .. } => 0, @@ -94,9 +124,6 @@ impl Instruction { Instruction::Constrain(_) => 1, // This returns 0 as the arguments depend on the function being called Instruction::Call { .. } => 0, - // This also returns 0, but we could get it a compile time, - // since we know the function definition for the intrinsics - Instruction::Intrinsic { .. } => 0, Instruction::Allocate { size: _ } => 1, Instruction::Load { address: _ } => 1, Instruction::Store { address: _, value: _ } => 2, @@ -113,9 +140,7 @@ impl Instruction { InstructionResultType::Operand(*value) } Instruction::Constrain(_) | Instruction::Store { .. } => InstructionResultType::None, - Instruction::Load { .. } | Instruction::Call { .. } | Instruction::Intrinsic { .. } => { - InstructionResultType::Unknown - } + Instruction::Load { .. } | Instruction::Call { .. } => InstructionResultType::Unknown, } } } @@ -129,7 +154,7 @@ pub(crate) enum InstructionResultType { Known(Type), /// The result type of this function is unknown and separate from its operand types. - /// This occurs for function and intrinsic calls. + /// This occurs for function calls and load operations. Unknown, /// This instruction does not return any results. diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs index 4873f436dca..1471bd46e35 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -64,7 +64,8 @@ fn value(function: &Function, id: ValueId) -> String { let value = function.dfg[*constant].value(); format!("{} {}", typ, value) } - Value::Function { id } => id.to_string(), + Value::Function(id) => id.to_string(), + Value::Intrinsic(intrinsic) => intrinsic.to_string(), _ => id.to_string(), } } @@ -127,9 +128,6 @@ pub(crate) fn display_instruction( Instruction::Call { func, arguments } => { writeln!(f, "call {}({})", show(*func), value_list(function, arguments)) } - Instruction::Intrinsic { func, arguments } => { - writeln!(f, "intrinsic {func}({})", value_list(function, arguments)) - } Instruction::Allocate { size } => writeln!(f, "alloc {size} fields"), Instruction::Load { address } => writeln!(f, "load {}", show(*address)), Instruction::Store { address, value } => { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs index 39228ae655b..d7d8d8a41ab 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs @@ -1,7 +1,10 @@ use crate::ssa_refactor::ir::basic_block::BasicBlockId; use super::{ - constant::NumericConstantId, function::FunctionId, instruction::InstructionId, map::Id, + constant::NumericConstantId, + function::FunctionId, + instruction::{InstructionId, Intrinsic}, + map::Id, types::Type, }; @@ -36,7 +39,11 @@ pub(crate) enum Value { /// If the argument or return types are needed, users should retrieve /// their types via the Call instruction's arguments or the Call instruction's /// result types respectively. - Function { id: FunctionId }, + Function(FunctionId), + + /// An Intrinsic is a special kind of builtin function that may be handled internally + /// or optimized into a special form. + Intrinsic(Intrinsic), } impl Value { @@ -46,6 +53,7 @@ impl Value { Value::Param { typ, .. } => *typ, Value::NumericConstant { typ, .. } => *typ, Value::Function { .. } => Type::Function, + Value::Intrinsic { .. } => Type::Function, } } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs index 7da88e47157..6c407dfcd42 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs @@ -8,6 +8,8 @@ use crate::ssa_refactor::ir::{ value::{Value, ValueId}, }; +use super::ir::instruction::Intrinsic; + /// The per-function context for each ssa function being generated. /// /// This is split from the global SsaBuilder context to allow each function @@ -227,4 +229,17 @@ impl FunctionBuilder { // Clear the results of the previous load for safety self.current_function.dfg.make_instruction_results(instruction, None); } + + /// Returns a ValueId pointing to the given function or imports the function + /// into the current function if it was not already, and returns that ID. + pub(crate) fn import_function(&mut self, function: FunctionId) -> ValueId { + self.current_function.dfg.import_function(function) + } + + /// Retrieve a value reference to the given intrinsic operation. + /// Returns None if there is no intrinsic matching the given name. + pub(crate) fn import_intrinsic(&mut self, name: &str) -> Option { + Intrinsic::lookup(name) + .map(|intrinsic| self.current_function.dfg.import_intrinsic(intrinsic)) + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index bd04f90d063..909ed4ff84d 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -242,9 +242,9 @@ impl<'a> FunctionContext<'a> { /// Retrieves the given function, adding it to the function queue /// if it is not yet compiled. - pub(super) fn get_or_queue_function(&self, id: FuncId) -> Values { + pub(super) fn get_or_queue_function(&mut self, id: FuncId) -> Values { let function = self.shared_context.get_or_queue_function(id); - Values::Leaf(super::value::Value::Function(function)) + self.builder.import_function(function).into() } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 8475b3c84c7..715f835ab7f 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -66,8 +66,12 @@ impl<'a> FunctionContext<'a> { match &ident.definition { ast::Definition::Local(id) => self.lookup(*id).map(|value| value.eval(self).into()), ast::Definition::Function(id) => self.get_or_queue_function(*id), - ast::Definition::Builtin(_) => todo!(), - ast::Definition::LowLevel(_) => todo!(), + ast::Definition::Builtin(name) | ast::Definition::LowLevel(name) => { + match self.builder.import_intrinsic(name) { + Some(builtin) => builtin.into(), + None => panic!("No builtin function named '{name}' found"), + } + } } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs index 52ff52d75f2..410e375fcd6 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs @@ -1,6 +1,5 @@ use iter_extended::vecmap; -use crate::ssa_refactor::ir::function::FunctionId as IrFunctionId; use crate::ssa_refactor::ir::types::Type; use crate::ssa_refactor::ir::value::ValueId as IrValueId; @@ -15,7 +14,6 @@ pub(super) enum Tree { #[derive(Debug, Copy, Clone)] pub(super) enum Value { Normal(IrValueId), - Function(IrFunctionId), /// A mutable variable that must be loaded as the given type before being used Mutable(IrValueId, Type), @@ -32,7 +30,6 @@ impl Value { let offset = ctx.builder.field_constant(0u128); ctx.builder.insert_load(address, offset, typ) } - Value::Function(_) => panic!("Tried to evaluate a function value"), } } } From ed4691bcdf9cefbeaa57f0b3b7bce0acadb11424 Mon Sep 17 00:00:00 2001 From: jfecher Date: Fri, 28 Apr 2023 11:35:42 -0400 Subject: [PATCH 50/63] chore(ssa refactor): Fix no returns & duplicate main (#1243) * Implement first-class functions * Update crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs Co-authored-by: kevaundray * Implement intrinsics * Fix no return & duplicate main * bad git. remove duplicated functions * Remove Option in builder * Undo debug printing in driver --------- Co-authored-by: kevaundray --- crates/noirc_evaluator/src/ssa_refactor.rs | 2 +- .../src/ssa_refactor/ir/function.rs | 2 +- .../src/ssa_refactor/ir/printer.rs | 16 +++++++++-- .../src/ssa_refactor/ssa_builder/mod.rs | 15 ++++++----- .../src/ssa_refactor/ssa_gen/context.rs | 13 +++++---- .../src/ssa_refactor/ssa_gen/mod.rs | 27 ++++++++++++++----- .../src/ssa_refactor/ssa_gen/program.rs | 23 ++++++++++++++++ 7 files changed, 74 insertions(+), 24 deletions(-) create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor.rs b/crates/noirc_evaluator/src/ssa_refactor.rs index 37f1ead2b07..fc45071e579 100644 --- a/crates/noirc_evaluator/src/ssa_refactor.rs +++ b/crates/noirc_evaluator/src/ssa_refactor.rs @@ -9,4 +9,4 @@ mod ir; mod ssa_builder; -mod ssa_gen; +pub mod ssa_gen; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs index e40c086c0e6..6789e5364fe 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs @@ -15,7 +15,7 @@ use noirc_errors::Location; /// To reference external functions, one must first import the function signature /// into the current function's context. #[derive(Debug)] -pub(crate) struct Function { +pub struct Function { /// Maps instructions to source locations source_locations: HashMap, diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs index 1471bd46e35..b0e6d787a6a 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -46,7 +46,7 @@ pub(crate) fn display_block( ) -> Result { let block = &function.dfg[block_id]; - writeln!(f, " {}({}):", block_id, value_list(function, block.parameters()))?; + writeln!(f, " {}({}):", block_id, value_list_with_types(function, block.parameters()))?; for instruction in block.instructions() { display_instruction(function, *instruction, f)?; @@ -70,6 +70,16 @@ fn value(function: &Function, id: ValueId) -> String { } } +/// Display each value along with its type. E.g. `v0: Field, v1: u64, v2: u1` +fn value_list_with_types(function: &Function, values: &[ValueId]) -> String { + vecmap(values, |id| { + let value = value(function, *id); + let typ = function.dfg.type_of_value(*id); + format!("{value}: {typ}") + }) + .join(", ") +} + fn value_list(function: &Function, values: &[ValueId]) -> String { vecmap(values, |id| value(function, *id)).join(", ") } @@ -87,7 +97,9 @@ pub(crate) fn display_terminator( writeln!( f, " jmpif {} then: {}, else: {}", - condition, then_destination, else_destination + value(function, *condition), + then_destination, + else_destination ) } Some(TerminatorInstruction::Return { return_values }) => { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs index 6c407dfcd42..35c918d645d 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs @@ -8,7 +8,7 @@ use crate::ssa_refactor::ir::{ value::{Value, ValueId}, }; -use super::ir::instruction::Intrinsic; +use super::{ir::instruction::Intrinsic, ssa_gen::Ssa}; /// The per-function context for each ssa function being generated. /// @@ -20,7 +20,7 @@ use super::ir::instruction::Intrinsic; pub(crate) struct FunctionBuilder { current_function: Function, current_block: BasicBlockId, - finished_functions: Vec<(FunctionId, Function)>, + finished_functions: Vec, } impl FunctionBuilder { @@ -34,14 +34,15 @@ impl FunctionBuilder { /// Finish the current function and create a new function pub(crate) fn new_function(&mut self, name: String, function_id: FunctionId) { let new_function = Function::new(name, function_id); - let old_function = std::mem::replace(&mut self.current_function, new_function); + self.current_block = new_function.entry_block(); - self.finished_functions.push((self.current_function.id(), old_function)); + let old_function = std::mem::replace(&mut self.current_function, new_function); + self.finished_functions.push(old_function); } - pub(crate) fn finish(mut self) -> Vec<(FunctionId, Function)> { - self.finished_functions.push((self.current_function.id(), self.current_function)); - self.finished_functions + pub(crate) fn finish(mut self) -> Ssa { + self.finished_functions.push(self.current_function); + Ssa::new(self.finished_functions) } pub(crate) fn add_parameter(&mut self, typ: Type) -> ValueId { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 909ed4ff84d..3a730cca827 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -37,18 +37,17 @@ pub(super) struct SharedContext { impl<'a> FunctionContext<'a> { pub(super) fn new( - function_id: FuncId, function_name: String, parameters: &Parameters, shared_context: &'a SharedContext, ) -> Self { - let new_id = shared_context.get_or_queue_function(function_id); + let function_id = shared_context + .pop_next_function_in_queue() + .expect("No function in queue for the FunctionContext to compile") + .1; - let mut this = Self { - definitions: HashMap::new(), - builder: FunctionBuilder::new(function_name, new_id), - shared_context, - }; + let builder = FunctionBuilder::new(function_name, function_id); + let mut this = Self { definitions: HashMap::new(), builder, shared_context }; this.add_parameters_to_scope(parameters); this } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 715f835ab7f..8b168b08836 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -1,6 +1,9 @@ mod context; +mod program; mod value; +pub use program::Ssa; + use context::SharedContext; use iter_extended::vecmap; use noirc_errors::Location; @@ -13,24 +16,36 @@ use self::{ use super::ir::{instruction::BinaryOp, types::Type, value::ValueId}; -pub(crate) fn generate_ssa(program: Program) { +pub fn generate_ssa(program: Program) -> Ssa { let context = SharedContext::new(program); - let main = context.program.main(); let main_id = Program::main_id(); - let main_name = main.name.clone(); + let main = context.program.main(); + + // Queue the main function for compilation + context.get_or_queue_function(main_id); - let mut function_context = FunctionContext::new(main_id, main_name, &main.parameters, &context); - function_context.codegen_expression(&main.body); + let mut function_context = FunctionContext::new(main.name.clone(), &main.parameters, &context); + function_context.codegen_function_body(&main.body); while let Some((src_function_id, dest_id)) = context.pop_next_function_in_queue() { let function = &context.program[src_function_id]; function_context.new_function(dest_id, function.name.clone(), &function.parameters); - function_context.codegen_expression(&function.body); + function_context.codegen_function_body(&function.body); } + + function_context.builder.finish() } impl<'a> FunctionContext<'a> { + /// Codegen a function's body and set its return value to that of its last parameter. + /// For functions returning nothing, this will be an empty list. + fn codegen_function_body(&mut self, body: &Expression) { + let return_value = self.codegen_expression(body); + let results = return_value.into_value_list(self); + self.builder.terminate_with_return(results); + } + fn codegen_expression(&mut self, expr: &Expression) -> Values { match expr { Expression::Ident(ident) => self.codegen_ident(ident), diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs new file mode 100644 index 00000000000..03eb76dec50 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs @@ -0,0 +1,23 @@ +use std::fmt::Display; + +use crate::ssa_refactor::ir::function::Function; + +/// Contains the entire Ssa representation of the program +pub struct Ssa { + functions: Vec, +} + +impl Ssa { + pub fn new(functions: Vec) -> Self { + Self { functions } + } +} + +impl Display for Ssa { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + for function in &self.functions { + writeln!(f, "{function}")?; + } + Ok(()) + } +} From 55ef8a2d3246a5edbf11a605c092b09151b120e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Fri, 28 Apr 2023 17:42:04 +0200 Subject: [PATCH 51/63] fix(wasm): add std after dependencies (#1245) --- crates/wasm/src/compile.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/wasm/src/compile.rs b/crates/wasm/src/compile.rs index cd34e685fa2..18bd30029bd 100644 --- a/crates/wasm/src/compile.rs +++ b/crates/wasm/src/compile.rs @@ -81,13 +81,13 @@ pub fn compile(args: JsValue) -> JsValue { let path = PathBuf::from(&options.entry_point); driver.create_local_crate(path, CrateType::Binary); - // We are always adding std lib implicitly. It comes bundled with binary. - add_noir_lib(&mut driver, "std"); - for dependency in options.optional_dependencies_set { add_noir_lib(&mut driver, dependency.as_str()); } + // We are always adding std lib implicitly. It comes bundled with binary. + add_noir_lib(&mut driver, "std"); + driver.check_crate(&options.compile_options).unwrap_or_else(|_| panic!("Crate check failed")); if options.contracts { From a0c6bfe824d25b0ad9a7452ee1c0da2723b94669 Mon Sep 17 00:00:00 2001 From: jfecher Date: Fri, 28 Apr 2023 15:19:25 -0400 Subject: [PATCH 52/63] chore(ssa refactor): Fix loading from mutable parameters (#1248) * Implement first-class functions * Update crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs Co-authored-by: kevaundray * Implement intrinsics * Fix no return & duplicate main * bad git. remove duplicated functions * Remove Option in builder * Undo debug printing in driver * Fix loading from mutable parameters * Grammar * Fix storing to mutable arrays * Fix unused variable * Fix array loading * Change terminology --------- Co-authored-by: kevaundray --- .../src/ssa_refactor/ir/basic_block.rs | 9 +++ .../src/ssa_refactor/ir/instruction.rs | 4 +- .../src/ssa_refactor/ir/printer.rs | 2 +- .../src/ssa_refactor/ir/value.rs | 2 +- .../src/ssa_refactor/ssa_builder/mod.rs | 61 ++++++++----------- .../src/ssa_refactor/ssa_gen/context.rs | 40 ++++++++---- .../src/ssa_refactor/ssa_gen/mod.rs | 37 ++++++++--- .../src/ssa_refactor/ssa_gen/value.rs | 9 +++ 8 files changed, 105 insertions(+), 59 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs index 13d1b3ca6f8..f6ca293f0fd 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs @@ -77,4 +77,13 @@ impl BasicBlock { None => vec![].into_iter(), } } + + /// Removes the given instruction from this block if present or panics otherwise. + pub(crate) fn remove_instruction(&mut self, instruction: InstructionId) { + let index = + self.instructions.iter().position(|id| *id == instruction).unwrap_or_else(|| { + panic!("remove_instruction: No such instruction {instruction:?} in block") + }); + self.instructions.remove(index); + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 756c7ae5a13..545519e316f 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -25,7 +25,7 @@ impl std::fmt::Display for Intrinsic { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Intrinsic::Println => write!(f, "println"), - Intrinsic::Sort => write!(f, "sort"), + Intrinsic::Sort => write!(f, "arraysort"), Intrinsic::ToBits(Endian::Big) => write!(f, "to_be_bits"), Intrinsic::ToBits(Endian::Little) => write!(f, "to_le_bits"), Intrinsic::ToRadix(Endian::Big) => write!(f, "to_be_radix"), @@ -39,7 +39,7 @@ impl Intrinsic { pub(crate) fn lookup(name: &str) -> Option { match name { "println" => Some(Intrinsic::Println), - "array_sort" => Some(Intrinsic::Sort), + "arraysort" => Some(Intrinsic::Sort), "to_le_radix" => Some(Intrinsic::ToRadix(Endian::Little)), "to_be_radix" => Some(Intrinsic::ToRadix(Endian::Big)), "to_le_bits" => Some(Intrinsic::ToBits(Endian::Little)), diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs index b0e6d787a6a..a0ab65bf639 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -143,7 +143,7 @@ pub(crate) fn display_instruction( Instruction::Allocate { size } => writeln!(f, "alloc {size} fields"), Instruction::Load { address } => writeln!(f, "load {}", show(*address)), Instruction::Store { address, value } => { - writeln!(f, "store {} at {}", show(*address), show(*value)) + writeln!(f, "store {} at {}", show(*value), show(*address)) } } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs index d7d8d8a41ab..868aee2199e 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs @@ -12,7 +12,7 @@ pub(crate) type ValueId = Id; /// Value is the most basic type allowed in the IR. /// Transition Note: A Id is similar to `NodeId` in our previous IR. -#[derive(Debug, PartialEq, Eq, Hash, Clone)] +#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] pub(crate) enum Value { /// This value was created due to an instruction /// diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs index 35c918d645d..df80799c28a 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs @@ -8,7 +8,10 @@ use crate::ssa_refactor::ir::{ value::{Value, ValueId}, }; -use super::{ir::instruction::Intrinsic, ssa_gen::Ssa}; +use super::{ + ir::instruction::{InstructionId, Intrinsic}, + ssa_gen::Ssa, +}; /// The per-function context for each ssa function being generated. /// @@ -18,7 +21,7 @@ use super::{ir::instruction::Intrinsic, ssa_gen::Ssa}; /// Contrary to the name, this struct has the capacity to build as many /// functions as needed, although it is limited to one function at a time. pub(crate) struct FunctionBuilder { - current_function: Function, + pub(super) current_function: Function, current_block: BasicBlockId, finished_functions: Vec, } @@ -114,12 +117,7 @@ impl FunctionBuilder { offset: ValueId, type_to_load: Type, ) -> ValueId { - if let Some(offset) = self.current_function.dfg.get_numeric_constant(offset) { - if !offset.is_zero() { - let offset = self.field_constant(offset); - address = self.insert_binary(address, BinaryOp::Add, offset); - } - }; + address = self.insert_binary(address, BinaryOp::Add, offset); self.insert_instruction(Instruction::Load { address }, Some(vec![type_to_load]))[0] } @@ -205,32 +203,6 @@ impl FunctionBuilder { self.terminate_block_with(TerminatorInstruction::Return { return_values }); } - /// Mutates a load instruction into a store instruction. - /// - /// This function is used while generating ssa-form for assignments currently. - /// To re-use most of the expression infrastructure, the lvalue of an assignment - /// is compiled as an expression and to assign to it we replace the final load - /// (which should always be present to load a mutable value) with a store of the - /// assigned value. - pub(crate) fn mutate_load_into_store(&mut self, load_result: ValueId, value_to_store: ValueId) { - let (instruction, address) = match &self.current_function.dfg[load_result] { - Value::Instruction { instruction, .. } => { - match &self.current_function.dfg[*instruction] { - Instruction::Load { address } => (*instruction, *address), - other => { - panic!("mutate_load_into_store: Expected Load instruction, found {other:?}") - } - } - } - other => panic!("mutate_load_into_store: Expected Load instruction, found {other:?}"), - }; - - let store = Instruction::Store { address, value: value_to_store }; - self.current_function.dfg.replace_instruction(instruction, store); - // Clear the results of the previous load for safety - self.current_function.dfg.make_instruction_results(instruction, None); - } - /// Returns a ValueId pointing to the given function or imports the function /// into the current function if it was not already, and returns that ID. pub(crate) fn import_function(&mut self, function: FunctionId) -> ValueId { @@ -243,4 +215,25 @@ impl FunctionBuilder { Intrinsic::lookup(name) .map(|intrinsic| self.current_function.dfg.import_intrinsic(intrinsic)) } + + /// Removes the given instruction from the current block or panics otherwise. + pub(crate) fn remove_instruction_from_current_block(&mut self, instruction: InstructionId) { + self.current_function.dfg[self.current_block].remove_instruction(instruction); + } +} + +impl std::ops::Index for FunctionBuilder { + type Output = Value; + + fn index(&self, id: ValueId) -> &Self::Output { + &self.current_function.dfg[id] + } +} + +impl std::ops::Index for FunctionBuilder { + type Output = Instruction; + + fn index(&self, id: InstructionId) -> &Self::Output { + &self.current_function.dfg[id] + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 3a730cca827..df54d5bd079 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -14,7 +14,7 @@ use crate::ssa_refactor::ir::types::Type; use crate::ssa_refactor::ir::value::ValueId; use crate::ssa_refactor::ssa_builder::FunctionBuilder; -use super::value::{Tree, Values}; +use super::value::{Tree, Value, Values}; // TODO: Make this a threadsafe queue so we can compile functions in parallel type FunctionQueue = Vec<(ast::FuncId, IrFunctionId)>; @@ -63,8 +63,8 @@ impl<'a> FunctionContext<'a> { /// The returned parameter type list will be flattened, so any struct parameters will /// be returned as one entry for each field (recursively). fn add_parameters_to_scope(&mut self, parameters: &Parameters) { - for (id, _, _, typ) in parameters { - self.add_parameter_to_scope(*id, typ); + for (id, mutable, _, typ) in parameters { + self.add_parameter_to_scope(*id, typ, *mutable); } } @@ -72,14 +72,34 @@ impl<'a> FunctionContext<'a> { /// /// Single is in quotes here because in the case of tuple parameters, the tuple is flattened /// into a new parameter for each field recursively. - fn add_parameter_to_scope(&mut self, parameter_id: LocalId, parameter_type: &ast::Type) { + fn add_parameter_to_scope( + &mut self, + parameter_id: LocalId, + parameter_type: &ast::Type, + mutable: bool, + ) { // Add a separate parameter for each field type in 'parameter_type' - let parameter_value = - Self::map_type(parameter_type, |typ| self.builder.add_parameter(typ).into()); + let parameter_value = Self::map_type(parameter_type, |typ| { + let value = self.builder.add_parameter(typ); + if mutable { + self.new_mutable_variable(value) + } else { + value.into() + } + }); self.definitions.insert(parameter_id, parameter_value); } + /// Allocate a single slot of memory and store into it the given initial value of the variable. + /// Always returns a Value::Mutable wrapping the allocate instruction. + pub(super) fn new_mutable_variable(&mut self, value_to_store: ValueId) -> Value { + let alloc = self.builder.insert_allocate(1); + self.builder.insert_store(alloc, value_to_store); + let typ = self.builder.type_of_value(value_to_store); + Value::Mutable(alloc, typ) + } + /// Maps the given type to a Tree of the result type. /// /// This can be used to (for example) flatten a tuple type, creating @@ -224,12 +244,8 @@ impl<'a> FunctionContext<'a> { } } (Tree::Leaf(lhs), Tree::Leaf(rhs)) => { - // Re-evaluating these should have no effect - let (lhs, rhs) = (lhs.eval(self), rhs.eval(self)); - - // Expect lhs to be previously evaluated. If it is a load we need to undo - // the load to get the address to store to. - self.builder.mutate_load_into_store(lhs, rhs); + let (lhs, rhs) = (lhs.eval_reference(), rhs.eval(self)); + self.builder.insert_store(lhs, rhs); } (lhs, rhs) => { unreachable!( diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 8b168b08836..4b93a7e1185 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -161,16 +161,21 @@ impl<'a> FunctionContext<'a> { fn codegen_index(&mut self, index: &ast::Index) -> Values { let array = self.codegen_non_tuple_expression(&index.collection); - self.codegen_array_index(array, &index.index, &index.element_type) + self.codegen_array_index(array, &index.index, &index.element_type, true) } /// This is broken off from codegen_index so that it can also be - /// used to codegen a LValue::Index + /// used to codegen a LValue::Index. + /// + /// Set load_result to true to load from each relevant index of the array + /// (it may be multiple in the case of tuples). Set it to false to instead + /// return a reference to each element, for use with the store instruction. fn codegen_array_index( &mut self, array: super::ir::value::ValueId, index: &ast::Expression, element_type: &ast::Type, + load_result: bool, ) -> Values { let base_offset = self.codegen_non_tuple_expression(index); @@ -183,7 +188,12 @@ impl<'a> FunctionContext<'a> { Self::map_type(element_type, |typ| { let offset = self.make_offset(base_index, field_index); field_index += 1; - self.builder.insert_load(array, offset, typ).into() + if load_result { + self.builder.insert_load(array, offset, typ) + } else { + self.builder.insert_binary(array, BinaryOp::Add, offset) + } + .into() }) } @@ -292,10 +302,7 @@ impl<'a> FunctionContext<'a> { if let_expr.mutable { values.map_mut(|value| { let value = value.eval(self); - // Size is always 1 here since we're recursively unpacking tuples - let alloc = self.builder.insert_allocate(1); - self.builder.insert_store(alloc, value); - alloc.into() + Tree::Leaf(self.new_mutable_variable(value)) }); } @@ -312,16 +319,28 @@ impl<'a> FunctionContext<'a> { fn codegen_assign(&mut self, assign: &ast::Assign) -> Values { let lhs = self.codegen_lvalue(&assign.lvalue); let rhs = self.codegen_expression(&assign.expression); + self.assign(lhs, rhs); self.unit_value() } fn codegen_lvalue(&mut self, lvalue: &ast::LValue) -> Values { match lvalue { - ast::LValue::Ident(ident) => self.codegen_ident(ident), + ast::LValue::Ident(ident) => { + // Do not .eval the Values here! We do not want to load from any references within + // since we want to return the references instead + match &ident.definition { + ast::Definition::Local(id) => self.lookup(*id), + other => panic!("Unexpected definition found for mutable value: {other}"), + } + } ast::LValue::Index { array, index, element_type, location: _ } => { + // Note that unlike the Ident case, we're .eval'ing the array here. + // This is because arrays are already references and thus a mutable reference + // to an array would be a Value::Mutable( Value::Mutable ( address ) ), and we + // only need the inner mutable value. let array = self.codegen_lvalue(array).into_leaf().eval(self); - self.codegen_array_index(array, index, element_type) + self.codegen_array_index(array, index, element_type, false) } ast::LValue::MemberAccess { object, field_index } => { let object = self.codegen_lvalue(object); diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs index 410e375fcd6..fa27e70ad9b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs @@ -32,6 +32,15 @@ impl Value { } } } + + /// Evaluates the value, returning a reference to the mutable variable found within + /// if possible. Compared to .eval, this method will not load from self if it is Value::Mutable. + pub(super) fn eval_reference(self) -> IrValueId { + match self { + Value::Normal(value) => value, + Value::Mutable(address, _) => address, + } + } } pub(super) type Values = Tree; From 66b7105d0c66cc679580f5b751076cf3da2cd20a Mon Sep 17 00:00:00 2001 From: Blaine Bublitz Date: Fri, 28 Apr 2023 21:19:09 +0100 Subject: [PATCH 53/63] chore(ci): Utilize new workflow to build binaries (#1250) * chore(ci): Utilize new workflow to build binaries * Update release.yml --------- Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> --- .github/workflows/release.yml | 38 ++++------------------------------- 1 file changed, 4 insertions(+), 34 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c7eb6df168e..f242f10d971 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -58,8 +58,8 @@ jobs: git commit -m 'chore: Update lockfile' git push - build-linux-binaries: - name: Build linux binaries + build-binaries: + name: Build binaries needs: [release-please] if: ${{ needs.release-please.outputs.tag-name }} runs-on: ubuntu-latest @@ -67,41 +67,11 @@ jobs: - name: Dispatch to build-nargo uses: benc-uk/workflow-dispatch@v1 with: - workflow: publish-linux.yml + workflow: publish.yml repo: noir-lang/build-nargo ref: master token: ${{ secrets.NOIR_REPO_TOKEN }} - inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}" }' - - build-windows-binaries: - name: Build windows binaries - needs: [release-please] - if: ${{ needs.release-please.outputs.tag-name }} - runs-on: ubuntu-latest - steps: - - name: Dispatch to build-nargo - uses: benc-uk/workflow-dispatch@v1 - with: - workflow: publish-x86_64-pc-windows-wasm.yml - repo: noir-lang/build-nargo - ref: master - token: ${{ secrets.NOIR_REPO_TOKEN }} - inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}" }' - - build-mac-binaries: - name: Build mac binaries - needs: [release-please] - if: ${{ needs.release-please.outputs.tag-name }} - runs-on: ubuntu-latest - steps: - - name: Dispatch to build-nargo - uses: benc-uk/workflow-dispatch@v1 - with: - workflow: publish-apple-darwin-wasm.yml - repo: noir-lang/build-nargo - ref: master - token: ${{ secrets.NOIR_REPO_TOKEN }} - inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}" }' + inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}", "publish": true }' publish-wasm: name: Publish noir_wasm package From 049773bd4d08afaf70c3cb1e4c658df0e6f50ac6 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Fri, 28 Apr 2023 21:56:02 +0100 Subject: [PATCH 54/63] chore(noir): Release 0.5.0 (#1202) * chore(noir): Release 0.5.0 * chore: Update lockfile --- CHANGELOG.md | 18 ++++++++++++++++++ Cargo.lock | 22 +++++++++++----------- Cargo.toml | 2 +- flake.nix | 2 +- 4 files changed, 31 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9e03152c03a..292c08fb8c2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## [0.5.0](https://github.com/noir-lang/noir/compare/v0.4.1...v0.5.0) (2023-04-28) + + +### âš  BREAKING CHANGES + +* Switch to aztec_backend that uses upstream BB & UltraPlonk ([#1114](https://github.com/noir-lang/noir/issues/1114)) + +### Features + +* **noir:** added `distinct` keyword ([#1219](https://github.com/noir-lang/noir/issues/1219)) ([3a65f30](https://github.com/noir-lang/noir/commit/3a65f304c25e8239f9735ce1e6dee29d7eecc244)) +* **noir:** added assert keyword ([#1227](https://github.com/noir-lang/noir/issues/1227)) ([0dc2cac](https://github.com/noir-lang/noir/commit/0dc2cac5bc26d277a0e6377fd774e0ec9c8d3531)) +* Switch to aztec_backend that uses upstream BB & UltraPlonk ([#1114](https://github.com/noir-lang/noir/issues/1114)) ([f14fe0b](https://github.com/noir-lang/noir/commit/f14fe0b97e75eb5be39a48675149cf08d718abf6)) + + +### Bug Fixes + +* **wasm:** add std after dependencies ([#1245](https://github.com/noir-lang/noir/issues/1245)) ([55ef8a2](https://github.com/noir-lang/noir/commit/55ef8a2d3246a5edbf11a605c092b09151b120e6)) + ## [0.4.1](https://github.com/noir-lang/noir/compare/v0.4.0...v0.4.1) (2023-04-20) diff --git a/Cargo.lock b/Cargo.lock index 85b5c12aa46..a62d34f67a7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -118,7 +118,7 @@ checksum = "23ea9e81bd02e310c216d080f6223c179012256e5151c41db88d12c88a1684d2" [[package]] name = "arena" -version = "0.4.1" +version = "0.5.0" dependencies = [ "generational-arena", ] @@ -1178,7 +1178,7 @@ dependencies = [ [[package]] name = "fm" -version = "0.4.1" +version = "0.5.0" dependencies = [ "cfg-if 1.0.0", "codespan-reporting 0.9.5", @@ -1674,7 +1674,7 @@ dependencies = [ [[package]] name = "iter-extended" -version = "0.4.1" +version = "0.5.0" [[package]] name = "itertools" @@ -1884,7 +1884,7 @@ checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" [[package]] name = "nargo" -version = "0.4.1" +version = "0.5.0" dependencies = [ "acvm", "iter-extended", @@ -1898,7 +1898,7 @@ dependencies = [ [[package]] name = "nargo_cli" -version = "0.4.1" +version = "0.5.0" dependencies = [ "acvm", "assert_cmd", @@ -1930,7 +1930,7 @@ dependencies = [ [[package]] name = "noir_wasm" -version = "0.4.1" +version = "0.5.0" dependencies = [ "acvm", "build-data", @@ -1946,7 +1946,7 @@ dependencies = [ [[package]] name = "noirc_abi" -version = "0.4.1" +version = "0.5.0" dependencies = [ "acvm", "iter-extended", @@ -1958,7 +1958,7 @@ dependencies = [ [[package]] name = "noirc_driver" -version = "0.4.1" +version = "0.5.0" dependencies = [ "acvm", "clap", @@ -1973,7 +1973,7 @@ dependencies = [ [[package]] name = "noirc_errors" -version = "0.4.1" +version = "0.5.0" dependencies = [ "chumsky", "codespan", @@ -1984,7 +1984,7 @@ dependencies = [ [[package]] name = "noirc_evaluator" -version = "0.4.1" +version = "0.5.0" dependencies = [ "acvm", "arena", @@ -2000,7 +2000,7 @@ dependencies = [ [[package]] name = "noirc_frontend" -version = "0.4.1" +version = "0.5.0" dependencies = [ "acvm", "arena", diff --git a/Cargo.toml b/Cargo.toml index 26eec846ef9..2ae36eee6f1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,7 +17,7 @@ default-members = ["crates/nargo_cli"] [workspace.package] # x-release-please-start-version -version = "0.4.1" +version = "0.5.0" # x-release-please-end authors = ["The Noir Team "] edition = "2021" diff --git a/flake.nix b/flake.nix index 28859ebb2b9..03d8f6be43f 100644 --- a/flake.nix +++ b/flake.nix @@ -106,7 +106,7 @@ commonArgs = environment // { pname = "noir"; # x-release-please-start-version - version = "0.4.1"; + version = "0.5.0"; # x-release-please-end # Use our custom stdenv to build and test our Rust project From 41d96ae9bbb9ce7010451cae5dc1f66d5e57d45b Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 1 May 2023 15:49:19 +0100 Subject: [PATCH 55/63] chore(nargo): replace `aztec_backend` with `acvm-backend-barretenberg` (#1226) * chore: replace `aztec_backend` with `acvm-backend-barretenberg` * feat: update to ACVM 0.10.0 * chore: move `ComputeMerkleRoot` to same match arm as `HashToField128Security` * chore: bump backend commit * feat: update stdlib to use new merkle black box function * fix: bump commit of barretenberg to match acvm-backend-barretenberg * feat: update `merkle_insert` to use new `compute_merkle_root` function * chore: update to use ACVM 0.10.3 * chore: bump backend commit --- Cargo.lock | 144 ++++-------------- Cargo.toml | 2 +- crates/nargo/src/ops/execute.rs | 7 +- crates/nargo_cli/Cargo.toml | 7 +- crates/nargo_cli/src/backends.rs | 14 +- crates/nargo_cli/src/cli/check_cmd.rs | 2 +- .../nargo_cli/src/cli/codegen_verifier_cmd.rs | 2 +- crates/nargo_cli/src/cli/compile_cmd.rs | 2 +- crates/nargo_cli/src/cli/execute_cmd.rs | 2 +- crates/nargo_cli/src/cli/gates_cmd.rs | 2 +- crates/nargo_cli/src/cli/print_acir_cmd.rs | 2 +- crates/nargo_cli/src/cli/prove_cmd.rs | 2 +- crates/nargo_cli/src/cli/test_cmd.rs | 4 +- crates/nargo_cli/src/cli/verify_cmd.rs | 2 +- .../tests/test_data/merkle_insert/src/main.nr | 4 +- crates/noirc_evaluator/src/lib.rs | 2 +- .../src/ssa/acir_gen/constraints.rs | 20 +-- .../src/ssa/acir_gen/internal_var.rs | 2 +- .../src/ssa/acir_gen/operations/binary.rs | 4 +- .../src/ssa/acir_gen/operations/bitwise.rs | 4 +- .../src/ssa/acir_gen/operations/cmp.rs | 4 +- .../src/ssa/acir_gen/operations/intrinsics.rs | 2 +- .../src/ssa/acir_gen/operations/not.rs | 2 +- .../src/ssa/acir_gen/operations/sort.rs | 5 +- crates/noirc_evaluator/src/ssa/builtin.rs | 18 ++- flake.lock | 6 +- noir_stdlib/src/merkle.nr | 13 +- 27 files changed, 93 insertions(+), 187 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a62d34f67a7..b281ed5a9d0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "acir" -version = "0.9.0" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "018148d69bf14422b1c1d62909a241af2a7f51fec064feb2b01de88fb02b11b8" +checksum = "510b65efd4d20bf266185ce0a5dc7d29bcdd196a6a1835c20908fd88040de76c" dependencies = [ "acir_field", "flate2", @@ -16,9 +16,9 @@ dependencies = [ [[package]] name = "acir_field" -version = "0.9.0" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d40dac25cf6be6335dd86286caeac859afd0dc74a4a75c64eed041b0f00a278" +checksum = "f4f032e710c67fd146caedc8fe1dea6e95f01ab59453e42d59b604a51fef3dfe" dependencies = [ "ark-bn254", "ark-ff", @@ -30,9 +30,9 @@ dependencies = [ [[package]] name = "acvm" -version = "0.9.0" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e17b7bc8f2b2215075b8e080ba3a0b8b7d759f04bc44b27e5bb8d845f4c77f20" +checksum = "2611266039740ffd1978f23258bd6ce3166c22cf15b8227685c2f3bb20ae2ee0" dependencies = [ "acir", "acvm_stdlib", @@ -46,11 +46,30 @@ dependencies = [ "thiserror", ] +[[package]] +name = "acvm-backend-barretenberg" +version = "0.0.0" +source = "git+https://github.com/noir-lang/aztec_backend?rev=c9fb9e806f1400a2ff7594a0669bec56025220bb#c9fb9e806f1400a2ff7594a0669bec56025220bb" +dependencies = [ + "acvm", + "barretenberg-sys", + "blake2", + "dirs 3.0.2", + "futures-util", + "getrandom", + "indicatif", + "pkg-config", + "reqwest", + "rust-embed", + "tokio", + "wasmer", +] + [[package]] name = "acvm_stdlib" -version = "0.9.0" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ce2d19a9d1e7ff1bf415ed909b43031e33ef6df21be70e470bb1817b3e6989" +checksum = "a5ec51160c66eba75dc15a028a2391675386fd395b3897478d89a386c64a48dd" dependencies = [ "acir", ] @@ -305,27 +324,6 @@ dependencies = [ "thiserror", ] -[[package]] -name = "barretenberg_static_lib" -version = "0.1.0" -source = "git+https://github.com/noir-lang/aztec_backend?rev=e3d4504f15e1295e637c4da80b1d08c87c267c45#e3d4504f15e1295e637c4da80b1d08c87c267c45" -dependencies = [ - "barretenberg-sys", - "common", -] - -[[package]] -name = "barretenberg_wasm" -version = "0.1.0" -source = "git+https://github.com/noir-lang/aztec_backend?rev=e3d4504f15e1295e637c4da80b1d08c87c267c45#e3d4504f15e1295e637c4da80b1d08c87c267c45" -dependencies = [ - "common", - "getrandom", - "pkg-config", - "rust-embed", - "wasmer", -] - [[package]] name = "base64" version = "0.21.0" @@ -618,21 +616,6 @@ dependencies = [ "tracing-error", ] -[[package]] -name = "common" -version = "0.1.0" -source = "git+https://github.com/noir-lang/aztec_backend?rev=e3d4504f15e1295e637c4da80b1d08c87c267c45#e3d4504f15e1295e637c4da80b1d08c87c267c45" -dependencies = [ - "acvm", - "blake2", - "dirs 3.0.2", - "futures-util", - "indicatif", - "reqwest", - "sled", - "tokio", -] - [[package]] name = "console" version = "0.15.5" @@ -1202,16 +1185,6 @@ dependencies = [ "percent-encoding", ] -[[package]] -name = "fs2" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "fuchsia-cprng" version = "0.1.1" @@ -1285,15 +1258,6 @@ dependencies = [ "slab", ] -[[package]] -name = "fxhash" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" -dependencies = [ - "byteorder", -] - [[package]] name = "generational-arena" version = "0.2.8" @@ -1761,16 +1725,6 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" -[[package]] -name = "lock_api" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" -dependencies = [ - "autocfg", - "scopeguard", -] - [[package]] name = "log" version = "0.4.17" @@ -1901,10 +1855,9 @@ name = "nargo_cli" version = "0.5.0" dependencies = [ "acvm", + "acvm-backend-barretenberg", "assert_cmd", "assert_fs", - "barretenberg_static_lib", - "barretenberg_wasm", "build-data", "cfg-if 1.0.0", "clap", @@ -2124,31 +2077,6 @@ version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" -[[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" -dependencies = [ - "cfg-if 1.0.0", - "instant", - "libc", - "redox_syscall", - "smallvec", - "winapi", -] - [[package]] name = "paste" version = "1.0.12" @@ -2941,22 +2869,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "sled" -version = "0.34.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f96b4737c2ce5987354855aed3797279def4ebf734436c6aa4552cf8e169935" -dependencies = [ - "crc32fast", - "crossbeam-epoch", - "crossbeam-utils", - "fs2", - "fxhash", - "libc", - "log", - "parking_lot", -] - [[package]] name = "smallvec" version = "1.10.0" diff --git a/Cargo.toml b/Cargo.toml index 2ae36eee6f1..34dfe88e2ba 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,7 +24,7 @@ edition = "2021" rust-version = "1.66" [workspace.dependencies] -acvm = "0.9.0" +acvm = "0.10.3" arena = { path = "crates/arena" } fm = { path = "crates/fm" } iter-extended = { path = "crates/iter-extended" } diff --git a/crates/nargo/src/ops/execute.rs b/crates/nargo/src/ops/execute.rs index eb82df60d41..e4c8a5afbb5 100644 --- a/crates/nargo/src/ops/execute.rs +++ b/crates/nargo/src/ops/execute.rs @@ -1,5 +1,5 @@ -use acvm::PartialWitnessGenerator; use acvm::{acir::circuit::Circuit, pwg::block::Blocks}; +use acvm::{PartialWitnessGenerator, PartialWitnessGeneratorStatus}; use noirc_abi::WitnessMap; use crate::NargoError; @@ -10,9 +10,8 @@ pub fn execute_circuit( mut initial_witness: WitnessMap, ) -> Result { let mut blocks = Blocks::default(); - let (unresolved_opcodes, oracles) = - backend.solve(&mut initial_witness, &mut blocks, circuit.opcodes)?; - if !unresolved_opcodes.is_empty() || !oracles.is_empty() { + let solver_status = backend.solve(&mut initial_witness, &mut blocks, circuit.opcodes)?; + if matches!(solver_status, PartialWitnessGeneratorStatus::RequiresOracleData { .. }) { todo!("Add oracle support to nargo execute") } diff --git a/crates/nargo_cli/Cargo.toml b/crates/nargo_cli/Cargo.toml index 40ab4b92459..2568f2a86f5 100644 --- a/crates/nargo_cli/Cargo.toml +++ b/crates/nargo_cli/Cargo.toml @@ -37,8 +37,7 @@ termcolor = "1.1.2" color-eyre = "0.6.2" # Backends -aztec_backend = { optional = true, package = "barretenberg_static_lib", git = "https://github.com/noir-lang/aztec_backend", rev = "e3d4504f15e1295e637c4da80b1d08c87c267c45" } -aztec_wasm_backend = { optional = true, package = "barretenberg_wasm", git = "https://github.com/noir-lang/aztec_backend", rev = "e3d4504f15e1295e637c4da80b1d08c87c267c45" } +acvm-backend-barretenberg = { git = "https://github.com/noir-lang/aztec_backend", rev = "c9fb9e806f1400a2ff7594a0669bec56025220bb", default-features=false } [dev-dependencies] tempdir = "0.3.7" @@ -49,6 +48,6 @@ predicates = "2.1.5" [features] default = ["plonk_bn254"] # The plonk backend can only use bn254, so we do not specify the field -plonk_bn254 = ["aztec_backend"] -plonk_bn254_wasm = ["aztec_wasm_backend"] +plonk_bn254 = ["acvm-backend-barretenberg/native"] +plonk_bn254_wasm = ["acvm-backend-barretenberg/wasm"] diff --git a/crates/nargo_cli/src/backends.rs b/crates/nargo_cli/src/backends.rs index e1113279f80..bbec5c99006 100644 --- a/crates/nargo_cli/src/backends.rs +++ b/crates/nargo_cli/src/backends.rs @@ -1,14 +1,8 @@ -cfg_if::cfg_if! { - if #[cfg(feature = "plonk_bn254")] { - pub(crate) use aztec_backend::Plonk as ConcreteBackend; - } else if #[cfg(feature = "plonk_bn254_wasm")] { - pub(crate) use aztec_wasm_backend::Plonk as ConcreteBackend; - } else { - compile_error!("please specify a backend to compile with"); - } -} +pub(crate) use acvm_backend_barretenberg::Barretenberg as ConcreteBackend; + +#[cfg(not(any(feature = "plonk_bn254", feature = "plonk_bn254_wasm")))] +compile_error!("please specify a backend to compile with"); -// As we have 3 feature flags we must test all 3 potential pairings to ensure they're mutually exclusive. #[cfg(all(feature = "plonk_bn254", feature = "plonk_bn254_wasm"))] compile_error!( "feature \"plonk_bn254\" and feature \"plonk_bn254_wasm\" cannot be enabled at the same time" diff --git a/crates/nargo_cli/src/cli/check_cmd.rs b/crates/nargo_cli/src/cli/check_cmd.rs index 557093444a1..3049c830def 100644 --- a/crates/nargo_cli/src/cli/check_cmd.rs +++ b/crates/nargo_cli/src/cli/check_cmd.rs @@ -24,7 +24,7 @@ pub(crate) fn run(args: CheckCommand, config: NargoConfig) -> Result<(), CliErro } fn check_from_path>(p: P, compile_options: &CompileOptions) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); let mut driver = Resolver::resolve_root_manifest(p.as_ref(), backend.np_language())?; diff --git a/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs b/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs index 319a5722708..f23502a15b5 100644 --- a/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -19,7 +19,7 @@ pub(crate) struct CodegenVerifierCommand { } pub(crate) fn run(args: CodegenVerifierCommand, config: NargoConfig) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); // TODO(#1201): Should this be a utility function? let circuit_build_path = args diff --git a/crates/nargo_cli/src/cli/compile_cmd.rs b/crates/nargo_cli/src/cli/compile_cmd.rs index 50c21486385..78b52003166 100644 --- a/crates/nargo_cli/src/cli/compile_cmd.rs +++ b/crates/nargo_cli/src/cli/compile_cmd.rs @@ -30,7 +30,7 @@ pub(crate) struct CompileCommand { pub(crate) fn run(args: CompileCommand, config: NargoConfig) -> Result<(), CliError> { let circuit_dir = config.program_dir.join(TARGET_DIR); - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); // If contracts is set we're compiling every function in a 'contract' rather than just 'main'. if args.contracts { diff --git a/crates/nargo_cli/src/cli/execute_cmd.rs b/crates/nargo_cli/src/cli/execute_cmd.rs index 9d1429bbda7..adeefc860a5 100644 --- a/crates/nargo_cli/src/cli/execute_cmd.rs +++ b/crates/nargo_cli/src/cli/execute_cmd.rs @@ -46,7 +46,7 @@ fn execute_with_path( program_dir: &Path, compile_options: &CompileOptions, ) -> Result<(Option, WitnessMap), CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); let compiled_program = compile_circuit(&backend, program_dir, compile_options)?; diff --git a/crates/nargo_cli/src/cli/gates_cmd.rs b/crates/nargo_cli/src/cli/gates_cmd.rs index a5093b4d775..9fe9f5c7a53 100644 --- a/crates/nargo_cli/src/cli/gates_cmd.rs +++ b/crates/nargo_cli/src/cli/gates_cmd.rs @@ -23,7 +23,7 @@ fn count_gates_with_path>( program_dir: P, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); let compiled_program = compile_circuit(&backend, program_dir.as_ref(), compile_options)?; let num_opcodes = compiled_program.circuit.opcodes.len(); diff --git a/crates/nargo_cli/src/cli/print_acir_cmd.rs b/crates/nargo_cli/src/cli/print_acir_cmd.rs index 589cc490f40..38b841121bc 100644 --- a/crates/nargo_cli/src/cli/print_acir_cmd.rs +++ b/crates/nargo_cli/src/cli/print_acir_cmd.rs @@ -22,7 +22,7 @@ fn print_acir_with_path>( program_dir: P, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); let compiled_program = compile_circuit(&backend, program_dir.as_ref(), compile_options)?; println!("{}", compiled_program.circuit); diff --git a/crates/nargo_cli/src/cli/prove_cmd.rs b/crates/nargo_cli/src/cli/prove_cmd.rs index fd60f004e2b..cecdee23fee 100644 --- a/crates/nargo_cli/src/cli/prove_cmd.rs +++ b/crates/nargo_cli/src/cli/prove_cmd.rs @@ -65,7 +65,7 @@ pub(crate) fn prove_with_path>( check_proof: bool, compile_options: &CompileOptions, ) -> Result, CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); let preprocessed_program = match circuit_build_path { Some(circuit_build_path) => read_program_from_file(circuit_build_path)?, diff --git a/crates/nargo_cli/src/cli/test_cmd.rs b/crates/nargo_cli/src/cli/test_cmd.rs index d168e6c39ca..65f8265a862 100644 --- a/crates/nargo_cli/src/cli/test_cmd.rs +++ b/crates/nargo_cli/src/cli/test_cmd.rs @@ -32,7 +32,7 @@ fn run_tests( test_name: &str, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); let mut driver = Resolver::resolve_root_manifest(program_dir, backend.np_language())?; @@ -79,7 +79,7 @@ fn run_test( driver: &Driver, config: &CompileOptions, ) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); let program = driver .compile_no_check(config, main) diff --git a/crates/nargo_cli/src/cli/verify_cmd.rs b/crates/nargo_cli/src/cli/verify_cmd.rs index cf2e4859091..07b7e351ee9 100644 --- a/crates/nargo_cli/src/cli/verify_cmd.rs +++ b/crates/nargo_cli/src/cli/verify_cmd.rs @@ -43,7 +43,7 @@ fn verify_with_path>( circuit_build_path: Option

, compile_options: CompileOptions, ) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); let preprocessed_program = match circuit_build_path { Some(circuit_build_path) => read_program_from_file(circuit_build_path)?, diff --git a/crates/nargo_cli/tests/test_data/merkle_insert/src/main.nr b/crates/nargo_cli/tests/test_data/merkle_insert/src/main.nr index 9d612977fa8..3ab4efb64c0 100644 --- a/crates/nargo_cli/tests/test_data/merkle_insert/src/main.nr +++ b/crates/nargo_cli/tests/test_data/merkle_insert/src/main.nr @@ -12,8 +12,8 @@ fn main( let old_leaf_exists = std::merkle::check_membership(old_root, old_leaf, index, old_hash_path); constrain old_leaf_exists == 1; constrain old_root == std::merkle::compute_root_from_leaf(old_leaf, index, old_hash_path); - let new_leaf_exists = std::merkle::check_membership(new_root, leaf, index, old_hash_path); - constrain new_leaf_exists == 1; + let calculated_root = std::merkle::compute_merkle_root(leaf, index, old_hash_path); + constrain new_root == calculated_root; let h = std::hash::mimc_bn254(mimc_input); // Regression test for PR #891 diff --git a/crates/noirc_evaluator/src/lib.rs b/crates/noirc_evaluator/src/lib.rs index 64a02061b0f..438ada0167c 100644 --- a/crates/noirc_evaluator/src/lib.rs +++ b/crates/noirc_evaluator/src/lib.rs @@ -174,7 +174,7 @@ impl Evaluator { let inter_var_witness = self.add_witness_to_cs(); // Link that witness to the arithmetic gate - let constraint = &arithmetic_gate - &inter_var_witness; + let constraint = &arithmetic_gate - inter_var_witness; self.opcodes.push(AcirOpcode::Arithmetic(constraint)); inter_var_witness } diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/constraints.rs b/crates/noirc_evaluator/src/ssa/acir_gen/constraints.rs index 8257e0c9f9a..11371dc54a6 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/constraints.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/constraints.rs @@ -31,7 +31,7 @@ pub(crate) fn mul_with_witness( let a_arith; let a_arith = if !a.mul_terms.is_empty() && !b.is_const() { let a_witness = evaluator.create_intermediate_variable(a.clone()); - a_arith = Expression::from(&a_witness); + a_arith = Expression::from(a_witness); &a_arith } else { a @@ -42,7 +42,7 @@ pub(crate) fn mul_with_witness( a_arith } else { let b_witness = evaluator.create_intermediate_variable(b.clone()); - b_arith = Expression::from(&b_witness); + b_arith = Expression::from(b_witness); &b_arith } } else { @@ -54,9 +54,9 @@ pub(crate) fn mul_with_witness( //a*b pub(crate) fn mul(a: &Expression, b: &Expression) -> Expression { if a.is_const() { - return b * &a.q_c; + return b * a.q_c; } else if b.is_const() { - return a * &b.q_c; + return a * b.q_c; } else if !(a.is_linear() && b.is_linear()) { unreachable!("Can only multiply linear terms"); } @@ -125,9 +125,9 @@ pub(crate) fn subtract(a: &Expression, k: FieldElement, b: &Expression) -> Expre // TODO in either case, we can put this in ACIR, if its useful pub(crate) fn add(a: &Expression, k: FieldElement, b: &Expression) -> Expression { if a.is_const() { - return (b * &k) + &a.q_c; + return (b * k) + a.q_c; } else if b.is_const() { - return a.clone() + &(k * b.q_c); + return a.clone() + (k * b.q_c); } let mut output = Expression::from_field(a.q_c + k * b.q_c); @@ -497,7 +497,7 @@ pub(crate) fn evaluate_truncate( if let Some(a_c) = lhs.to_const() { let mut a_big = BigUint::from_bytes_be(&a_c.to_be_bytes()); a_big %= exp_big; - return Expression::from(&FieldElement::from_be_bytes_reduce(&a_big.to_bytes_be())); + return Expression::from(FieldElement::from_be_bytes_reduce(&a_big.to_bytes_be())); } let exp = FieldElement::from_be_bytes_reduce(&exp_big.to_bytes_be()); @@ -524,7 +524,7 @@ pub(crate) fn evaluate_truncate( let my_constraint = add(&res, -FieldElement::one(), lhs); evaluator.push_opcode(AcirOpcode::Arithmetic(my_constraint)); - Expression::from(&b_witness) + Expression::from(b_witness) } pub(crate) fn evaluate_udiv( @@ -552,8 +552,8 @@ pub(crate) fn evaluate_udiv( //range check q<=a try_range_constraint(q_witness, bit_size, evaluator); // a-b*q-r = 0 - let mut d = mul_with_witness(evaluator, rhs, &Expression::from(&q_witness)); - d = add(&d, FieldElement::one(), &Expression::from(&r_witness)); + let mut d = mul_with_witness(evaluator, rhs, &Expression::from(q_witness)); + d = add(&d, FieldElement::one(), &Expression::from(r_witness)); d = mul_with_witness(evaluator, &d, predicate); let div_euclidean = subtract(&pa, FieldElement::one(), &d); diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/internal_var.rs b/crates/noirc_evaluator/src/ssa/acir_gen/internal_var.rs index 8e6e16776a9..27d6b0ec25b 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/internal_var.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/internal_var.rs @@ -98,7 +98,7 @@ impl InternalVar { /// Expression, this method is infallible. pub(crate) fn from_witness(witness: Witness) -> InternalVar { InternalVar { - expression: Expression::from(&witness), + expression: Expression::from(witness), cached_witness: Some(witness), id: None, } diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/binary.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/binary.rs index bf1f59391f9..87280eb1fde 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/binary.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/binary.rs @@ -146,12 +146,12 @@ pub(crate) fn evaluate( if r_value.is_zero() { panic!("Panic - division by zero"); } else { - (l_c.expression() * &r_value.inverse()).into() + (l_c.expression() * r_value.inverse()).into() } } else { //TODO avoid creating witnesses here. let x_witness = acir_gen.var_cache.get_or_compute_witness(r_c, evaluator).expect("unexpected constant expression"); - let inverse = Expression::from(&constraints::evaluate_inverse( + let inverse = Expression::from(constraints::evaluate_inverse( x_witness, &predicate, evaluator, )); InternalVar::from(constraints::mul_with_witness( diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/bitwise.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/bitwise.rs index 947cf93edd9..f8ca271835e 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/bitwise.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/bitwise.rs @@ -155,9 +155,9 @@ pub(super) fn evaluate_bitwise( constraints::subtract( &Expression::from_field(max), FieldElement::one(), - &Expression::from(&result), + &Expression::from(result), ) } else { - Expression::from(&result) + Expression::from(result) } } diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/cmp.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/cmp.rs index 4abd34f6e01..0f8091e2f6f 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/cmp.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/cmp.rs @@ -69,7 +69,7 @@ pub(super) fn evaluate_neq( .get_or_compute_witness(x, evaluator) .expect("unexpected constant expression"); - return Expression::from(&constraints::evaluate_zero_equality(x_witness, evaluator)); + return Expression::from(constraints::evaluate_zero_equality(x_witness, evaluator)); } // Arriving here means that `lhs` and `rhs` are not Arrays @@ -95,7 +95,7 @@ pub(super) fn evaluate_neq( .var_cache .get_or_compute_witness(x, evaluator) .expect("unexpected constant expression"); - Expression::from(&constraints::evaluate_zero_equality(x_witness, evaluator)) + Expression::from(constraints::evaluate_zero_equality(x_witness, evaluator)) } } diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/intrinsics.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/intrinsics.rs index 7d6f7e2c32c..ea7d3d9c6c0 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/intrinsics.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/intrinsics.rs @@ -107,7 +107,7 @@ pub(crate) fn evaluate( } outputs = prepare_outputs(&mut acir_gen.memory, instruction_id, array.len, ctx, evaluator); - let out_expr: Vec = outputs.iter().map(|w| w.into()).collect(); + let out_expr: Vec = outputs.iter().map(|w| (*w).into()).collect(); for i in 0..(out_expr.len() - 1) { bound_constraint_with_offset( &out_expr[i], diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/not.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/not.rs index ff8bb26f788..76ad7c93a88 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/not.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/not.rs @@ -19,7 +19,7 @@ pub(crate) fn evaluate( let l_c = var_cache.get_or_compute_internal_var_unwrap(*value, evaluator, ctx); Some( constraints::subtract( - &Expression::from(&FieldElement::from(a)), + &Expression::from(FieldElement::from(a)), FieldElement::one(), l_c.expression(), ) diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/sort.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/sort.rs index 04524959fbe..ffcbf1ea7c0 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/sort.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/sort.rs @@ -119,6 +119,7 @@ mod test { acir::{circuit::opcodes::BlackBoxFuncCall, native_types::Witness}, pwg::block::Blocks, FieldElement, OpcodeResolution, OpcodeResolutionError, PartialWitnessGenerator, + PartialWitnessGeneratorStatus, }; use crate::{ @@ -180,10 +181,10 @@ mod test { // compute the network output by solving the constraints let backend = MockBackend {}; let mut blocks = Blocks::default(); - let (unresolved_opcodes, oracles) = backend + let solver_status = backend .solve(&mut solved_witness, &mut blocks, eval.opcodes.clone()) .expect("Could not solve permutation constraints"); - assert!(unresolved_opcodes.is_empty() && oracles.is_empty(), "Incomplete solution"); + assert_eq!(solver_status, PartialWitnessGeneratorStatus::Solved, "Incomplete solution"); let mut b_val = Vec::new(); for i in 0..output.len() { b_val.push(solved_witness[&b_wit[i]]); diff --git a/crates/noirc_evaluator/src/ssa/builtin.rs b/crates/noirc_evaluator/src/ssa/builtin.rs index 8248322c488..8e402343bb5 100644 --- a/crates/noirc_evaluator/src/ssa/builtin.rs +++ b/crates/noirc_evaluator/src/ssa/builtin.rs @@ -77,10 +77,10 @@ impl Opcode { | BlackBoxFunc::Pedersen | BlackBoxFunc::FixedBaseScalarMul => BigUint::zero(), // Verify returns zero or one - BlackBoxFunc::SchnorrVerify - | BlackBoxFunc::EcdsaSecp256k1 - | BlackBoxFunc::MerkleMembership => BigUint::one(), - BlackBoxFunc::HashToField128Security => ObjectType::native_field().max_size(), + BlackBoxFunc::SchnorrVerify | BlackBoxFunc::EcdsaSecp256k1 => BigUint::one(), + BlackBoxFunc::ComputeMerkleRoot | BlackBoxFunc::HashToField128Security => { + ObjectType::native_field().max_size() + } BlackBoxFunc::AES => { todo!("ICE: AES is unimplemented") } @@ -111,11 +111,13 @@ impl Opcode { BlackBoxFunc::SHA256 | BlackBoxFunc::Blake2s => { (32, ObjectType::unsigned_integer(8)) } - BlackBoxFunc::HashToField128Security => (1, ObjectType::native_field()), + BlackBoxFunc::ComputeMerkleRoot | BlackBoxFunc::HashToField128Security => { + (1, ObjectType::native_field()) + } // See issue #775 on changing this to return a boolean - BlackBoxFunc::MerkleMembership - | BlackBoxFunc::SchnorrVerify - | BlackBoxFunc::EcdsaSecp256k1 => (1, ObjectType::native_field()), + BlackBoxFunc::SchnorrVerify | BlackBoxFunc::EcdsaSecp256k1 => { + (1, ObjectType::native_field()) + } BlackBoxFunc::Pedersen => (2, ObjectType::native_field()), BlackBoxFunc::FixedBaseScalarMul => (2, ObjectType::native_field()), BlackBoxFunc::RANGE | BlackBoxFunc::AND | BlackBoxFunc::XOR => { diff --git a/flake.lock b/flake.lock index 7c01326f86d..6a6aff10f73 100644 --- a/flake.lock +++ b/flake.lock @@ -10,11 +10,11 @@ ] }, "locked": { - "lastModified": 1682345890, - "narHash": "sha256-ZsInK9Iy81MaCugouU3ifa5Vw2GKlJK9MxCU/LF8bIw=", + "lastModified": 1682626614, + "narHash": "sha256-TC535frlYLUTDZ2iHAtUtKpMJWngL1MFxzEXhOfeCo8=", "owner": "AztecProtocol", "repo": "barretenberg", - "rev": "87aeb375d7b434e0faf47abb79f97753ab760987", + "rev": "7b5598890c1fa4ee485a4a0015fcb23b5389392e", "type": "github" }, "original": { diff --git a/noir_stdlib/src/merkle.nr b/noir_stdlib/src/merkle.nr index 9f7c5d5b130..a47ce86c94b 100644 --- a/noir_stdlib/src/merkle.nr +++ b/noir_stdlib/src/merkle.nr @@ -5,16 +5,15 @@ // and the hashpath proves this // Currently we assume that it is a binary tree, so depth k implies a width of 2^k // XXX: In the future we can add an arity parameter -#[foreign(merkle_membership)] -fn check_membership(_root : Field, _leaf : Field, _index : Field, _hash_path: [Field]) -> Field {} - - -#[alternative(merkle_membership)] -fn check_membership_in_noir(root : Field, leaf : Field, index : Field, hash_path: [Field]) -> Field { - (compute_root_from_leaf(leaf, index, hash_path) == root) as Field +fn check_membership(_root : Field, _leaf : Field, _index : Field, _hash_path: [Field]) -> Field { + (compute_merkle_root(_leaf, _index, _hash_path) == _root) as Field } +#[foreign(compute_merkle_root)] +fn compute_merkle_root(_leaf : Field, _index : Field, _hash_path: [Field]) -> Field {} + // Returns the root of the tree from the provided leaf and its hashpath, using pedersen hash +#[alternative(compute_merkle_root)] fn compute_root_from_leaf(leaf : Field, index : Field, hash_path: [Field]) -> Field { let n = hash_path.len(); let index_bits = index.to_le_bits(n as u32); From 2a5aa52435294ddeda5b4506c3117cbd164ca2ff Mon Sep 17 00:00:00 2001 From: kevaundray Date: Mon, 1 May 2023 17:09:23 +0100 Subject: [PATCH 56/63] fix: Add Poseidon examples into integration tests (#1257) add poseidon examples into nargo_cli --- .../tests/test_data/poseidon_bn254_hash/Nargo.toml | 0 .../tests/test_data/poseidon_bn254_hash/Prover.toml | 0 .../tests/test_data/poseidon_bn254_hash/src/main.nr | 0 .../tests/test_data/poseidonsponge_x5_254/Nargo.toml | 0 .../tests/test_data/poseidonsponge_x5_254/Prover.toml | 0 .../tests/test_data/poseidonsponge_x5_254/src/main.nr | 0 6 files changed, 0 insertions(+), 0 deletions(-) rename crates/{nargo => nargo_cli}/tests/test_data/poseidon_bn254_hash/Nargo.toml (100%) rename crates/{nargo => nargo_cli}/tests/test_data/poseidon_bn254_hash/Prover.toml (100%) rename crates/{nargo => nargo_cli}/tests/test_data/poseidon_bn254_hash/src/main.nr (100%) rename crates/{nargo => nargo_cli}/tests/test_data/poseidonsponge_x5_254/Nargo.toml (100%) rename crates/{nargo => nargo_cli}/tests/test_data/poseidonsponge_x5_254/Prover.toml (100%) rename crates/{nargo => nargo_cli}/tests/test_data/poseidonsponge_x5_254/src/main.nr (100%) diff --git a/crates/nargo/tests/test_data/poseidon_bn254_hash/Nargo.toml b/crates/nargo_cli/tests/test_data/poseidon_bn254_hash/Nargo.toml similarity index 100% rename from crates/nargo/tests/test_data/poseidon_bn254_hash/Nargo.toml rename to crates/nargo_cli/tests/test_data/poseidon_bn254_hash/Nargo.toml diff --git a/crates/nargo/tests/test_data/poseidon_bn254_hash/Prover.toml b/crates/nargo_cli/tests/test_data/poseidon_bn254_hash/Prover.toml similarity index 100% rename from crates/nargo/tests/test_data/poseidon_bn254_hash/Prover.toml rename to crates/nargo_cli/tests/test_data/poseidon_bn254_hash/Prover.toml diff --git a/crates/nargo/tests/test_data/poseidon_bn254_hash/src/main.nr b/crates/nargo_cli/tests/test_data/poseidon_bn254_hash/src/main.nr similarity index 100% rename from crates/nargo/tests/test_data/poseidon_bn254_hash/src/main.nr rename to crates/nargo_cli/tests/test_data/poseidon_bn254_hash/src/main.nr diff --git a/crates/nargo/tests/test_data/poseidonsponge_x5_254/Nargo.toml b/crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/Nargo.toml similarity index 100% rename from crates/nargo/tests/test_data/poseidonsponge_x5_254/Nargo.toml rename to crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/Nargo.toml diff --git a/crates/nargo/tests/test_data/poseidonsponge_x5_254/Prover.toml b/crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/Prover.toml similarity index 100% rename from crates/nargo/tests/test_data/poseidonsponge_x5_254/Prover.toml rename to crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/Prover.toml diff --git a/crates/nargo/tests/test_data/poseidonsponge_x5_254/src/main.nr b/crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/src/main.nr similarity index 100% rename from crates/nargo/tests/test_data/poseidonsponge_x5_254/src/main.nr rename to crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/src/main.nr From 7f6dede414c46790545b1994713d1976c5623711 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Mon, 1 May 2023 19:53:28 +0100 Subject: [PATCH 57/63] chore(noir): Release 0.5.1 (#1264) * chore(noir): Release 0.5.1 * chore: Update lockfile --- CHANGELOG.md | 8 ++++++++ Cargo.lock | 22 +++++++++++----------- Cargo.toml | 2 +- flake.nix | 2 +- 4 files changed, 21 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 292c08fb8c2..c554330a470 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.5.1](https://github.com/noir-lang/noir/compare/v0.5.0...v0.5.1) (2023-05-01) + + +### Bug Fixes + +* Add Poseidon examples into integration tests ([#1257](https://github.com/noir-lang/noir/issues/1257)) ([2a5aa52](https://github.com/noir-lang/noir/commit/2a5aa52435294ddeda5b4506c3117cbd164ca2ff)) +* fix `linear_eval is no 0` serialisation issue ([#1226](https://github.com/noir-lang/noir/issues/1226)) ([41d96ae](https://github.com/noir-lang/noir/commit/41d96ae9bbb9ce7010451cae5dc1f66d5e57d45b)) + ## [0.5.0](https://github.com/noir-lang/noir/compare/v0.4.1...v0.5.0) (2023-04-28) diff --git a/Cargo.lock b/Cargo.lock index b281ed5a9d0..b8647a9622d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -137,7 +137,7 @@ checksum = "23ea9e81bd02e310c216d080f6223c179012256e5151c41db88d12c88a1684d2" [[package]] name = "arena" -version = "0.5.0" +version = "0.5.1" dependencies = [ "generational-arena", ] @@ -1161,7 +1161,7 @@ dependencies = [ [[package]] name = "fm" -version = "0.5.0" +version = "0.5.1" dependencies = [ "cfg-if 1.0.0", "codespan-reporting 0.9.5", @@ -1638,7 +1638,7 @@ dependencies = [ [[package]] name = "iter-extended" -version = "0.5.0" +version = "0.5.1" [[package]] name = "itertools" @@ -1838,7 +1838,7 @@ checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" [[package]] name = "nargo" -version = "0.5.0" +version = "0.5.1" dependencies = [ "acvm", "iter-extended", @@ -1852,7 +1852,7 @@ dependencies = [ [[package]] name = "nargo_cli" -version = "0.5.0" +version = "0.5.1" dependencies = [ "acvm", "acvm-backend-barretenberg", @@ -1883,7 +1883,7 @@ dependencies = [ [[package]] name = "noir_wasm" -version = "0.5.0" +version = "0.5.1" dependencies = [ "acvm", "build-data", @@ -1899,7 +1899,7 @@ dependencies = [ [[package]] name = "noirc_abi" -version = "0.5.0" +version = "0.5.1" dependencies = [ "acvm", "iter-extended", @@ -1911,7 +1911,7 @@ dependencies = [ [[package]] name = "noirc_driver" -version = "0.5.0" +version = "0.5.1" dependencies = [ "acvm", "clap", @@ -1926,7 +1926,7 @@ dependencies = [ [[package]] name = "noirc_errors" -version = "0.5.0" +version = "0.5.1" dependencies = [ "chumsky", "codespan", @@ -1937,7 +1937,7 @@ dependencies = [ [[package]] name = "noirc_evaluator" -version = "0.5.0" +version = "0.5.1" dependencies = [ "acvm", "arena", @@ -1953,7 +1953,7 @@ dependencies = [ [[package]] name = "noirc_frontend" -version = "0.5.0" +version = "0.5.1" dependencies = [ "acvm", "arena", diff --git a/Cargo.toml b/Cargo.toml index 34dfe88e2ba..badaab032ce 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,7 +17,7 @@ default-members = ["crates/nargo_cli"] [workspace.package] # x-release-please-start-version -version = "0.5.0" +version = "0.5.1" # x-release-please-end authors = ["The Noir Team "] edition = "2021" diff --git a/flake.nix b/flake.nix index 03d8f6be43f..7109e266e0f 100644 --- a/flake.nix +++ b/flake.nix @@ -106,7 +106,7 @@ commonArgs = environment // { pname = "noir"; # x-release-please-start-version - version = "0.5.0"; + version = "0.5.1"; # x-release-please-end # Use our custom stdenv to build and test our Rust project From 52ce1fd3234bb81ef203feeff3c3a240860df1df Mon Sep 17 00:00:00 2001 From: jfecher Date: Mon, 1 May 2023 15:42:55 -0400 Subject: [PATCH 58/63] chore(ssa refactor): Add all remaining doc comments to ssa generation pass (#1256) * Add remaining doc comments * Update crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs Co-authored-by: kevaundray * Update crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs Co-authored-by: kevaundray * Address PR feedback --------- Co-authored-by: kevaundray --- crates/noirc_evaluator/src/ssa_refactor/ir.rs | 1 - .../src/ssa_refactor/ir/basic_block.rs | 23 ++++--- .../ssa_refactor/ir/basic_block_visitors.rs | 23 ------- .../src/ssa_refactor/ir/cfg.rs | 57 +++++++++-------- .../src/ssa_refactor/ir/constant.rs | 6 +- .../src/ssa_refactor/ir/dfg.rs | 40 +++--------- .../src/ssa_refactor/ir/function.rs | 30 +++++---- .../src/ssa_refactor/ir/instruction.rs | 44 +++---------- .../src/ssa_refactor/ir/printer.rs | 7 ++- .../src/ssa_refactor/ir/types.rs | 4 ++ .../src/ssa_refactor/ir/value.rs | 1 + .../src/ssa_refactor/ssa_builder/mod.rs | 18 +++++- .../src/ssa_refactor/ssa_gen/context.rs | 61 +++++++++++++++++-- .../src/ssa_refactor/ssa_gen/mod.rs | 3 + .../src/ssa_refactor/ssa_gen/program.rs | 3 +- .../src/ssa_refactor/ssa_gen/value.rs | 27 ++++++++ 16 files changed, 204 insertions(+), 144 deletions(-) delete mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/basic_block_visitors.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir.rs b/crates/noirc_evaluator/src/ssa_refactor/ir.rs index 1a1ca9eab89..1f6cca9157d 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir.rs @@ -1,5 +1,4 @@ pub(crate) mod basic_block; -pub(crate) mod basic_block_visitors; pub(crate) mod cfg; pub(crate) mod constant; pub(crate) mod dfg; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs index f6ca293f0fd..8a3f74c4a64 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs @@ -18,12 +18,6 @@ pub(crate) struct BasicBlock { /// Instructions in the basic block. instructions: Vec, - /// A basic block is considered sealed - /// if no further predecessors will be added to it. - /// Since only filled blocks can have successors, - /// predecessors are always filled. - is_sealed: bool, - /// The terminating instruction for the basic block. /// /// This will be a control flow instruction. This is only @@ -35,14 +29,20 @@ pub(crate) struct BasicBlock { pub(crate) type BasicBlockId = Id; impl BasicBlock { + /// Create a new BasicBlock with the given parameters. + /// Parameters can also be added later via BasicBlock::add_parameter pub(crate) fn new(parameters: Vec) -> Self { - Self { parameters, instructions: Vec::new(), is_sealed: false, terminator: None } + Self { parameters, instructions: Vec::new(), terminator: None } } + /// Returns the parameters of this block pub(crate) fn parameters(&self) -> &[ValueId] { &self.parameters } + /// Adds a parameter to this BasicBlock. + /// Expects that the ValueId given should refer to a Value::Param + /// instance with its position equal to self.parameters.len(). pub(crate) fn add_parameter(&mut self, parameter: ValueId) { self.parameters.push(parameter); } @@ -52,14 +52,23 @@ impl BasicBlock { self.instructions.push(instruction); } + /// Retrieve a reference to all instructions in this block. pub(crate) fn instructions(&self) -> &[InstructionId] { &self.instructions } + /// Sets the terminator instruction of this block. + /// + /// A properly-constructed block will always terminate with a TerminatorInstruction - + /// which either jumps to another block or returns from the current function. A block + /// will only have no terminator if it is still under construction. pub(crate) fn set_terminator(&mut self, terminator: TerminatorInstruction) { self.terminator = Some(terminator); } + /// Returns the terminator of this block. + /// + /// Once this block has finished construction, this is expected to always be Some. pub(crate) fn terminator(&self) -> Option<&TerminatorInstruction> { self.terminator.as_ref() } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block_visitors.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block_visitors.rs deleted file mode 100644 index e0d5dc1b3df..00000000000 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block_visitors.rs +++ /dev/null @@ -1,23 +0,0 @@ -use super::{ - basic_block::{BasicBlock, BasicBlockId}, - instruction::TerminatorInstruction, -}; - -/// Visit all successors of a block with a given visitor closure. The closure -/// arguments are the branch instruction that is used to reach the successor, -/// and the id of the successor block itself. -pub(crate) fn visit_block_succs(basic_block: &BasicBlock, mut visit: F) { - match basic_block - .terminator() - .expect("ICE: No terminator indicates block is still under construction.") - { - TerminatorInstruction::Jmp { destination, .. } => visit(*destination), - TerminatorInstruction::JmpIf { then_destination, else_destination, .. } => { - visit(*then_destination); - visit(*else_destination); - } - TerminatorInstruction::Return { .. } => { - // The last block of the control flow - no successors - } - } -} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs index d443d574ca8..b2d16b29bfd 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs @@ -2,7 +2,6 @@ use std::collections::{HashMap, HashSet}; use super::{ basic_block::{BasicBlock, BasicBlockId}, - basic_block_visitors, function::Function, }; @@ -33,25 +32,30 @@ impl ControlFlowGraph { cfg } + /// Compute all of the edges between each block in the function fn compute(&mut self, func: &Function) { for (basic_block_id, basic_block) in func.dfg.basic_blocks_iter() { self.compute_block(basic_block_id, basic_block); } } + /// Compute all of the edges for the current block given fn compute_block(&mut self, basic_block_id: BasicBlockId, basic_block: &BasicBlock) { - basic_block_visitors::visit_block_succs(basic_block, |dest| { + for dest in basic_block.successors() { self.add_edge(basic_block_id, dest); - }); + } } + /// Clears out a given block's successors. This also removes the given block from + /// being a predecessor of any of its previous successors. fn invalidate_block_successors(&mut self, basic_block_id: BasicBlockId) { let node = self .data .get_mut(&basic_block_id) .expect("ICE: Attempted to invalidate cfg node successors for non-existent node."); - let old_successors = node.successors.clone(); - node.successors.clear(); + + let old_successors = std::mem::take(&mut node.successors); + for successor_id in old_successors { self.data .get_mut(&successor_id) @@ -71,6 +75,7 @@ impl ControlFlowGraph { self.compute_block(basic_block_id, basic_block); } + /// Add a directed edge making `from` a predecessor of `to`. fn add_edge(&mut self, from: BasicBlockId, to: BasicBlockId) { let predecessor_node = self.data.entry(from).or_default(); assert!( @@ -87,7 +92,7 @@ impl ControlFlowGraph { } /// Get an iterator over the CFG predecessors to `basic_block_id`. - pub(crate) fn pred_iter( + pub(crate) fn predecessors( &self, basic_block_id: BasicBlockId, ) -> impl ExactSizeIterator + '_ { @@ -100,7 +105,7 @@ impl ControlFlowGraph { } /// Get an iterator over the CFG successors to `basic_block_id`. - pub(crate) fn succ_iter( + pub(crate) fn successors( &self, basic_block_id: BasicBlockId, ) -> impl ExactSizeIterator + '_ { @@ -133,11 +138,11 @@ mod tests { fn jumps() { // Build function of form // fn func { - // block0(cond: u1): + // block0(cond: u1): // jmpif cond, then: block2, else: block1 - // block1(): + // block1(): // jmpif cond, then: block1, else: block2 - // block2(): + // block2(): // return () // } let func_id = Id::test_new(0); @@ -163,13 +168,13 @@ mod tests { #[allow(clippy::needless_collect)] { - let block0_predecessors: Vec<_> = cfg.pred_iter(block0_id).collect(); - let block1_predecessors: Vec<_> = cfg.pred_iter(block1_id).collect(); - let block2_predecessors: Vec<_> = cfg.pred_iter(block2_id).collect(); + let block0_predecessors: Vec<_> = cfg.predecessors(block0_id).collect(); + let block1_predecessors: Vec<_> = cfg.predecessors(block1_id).collect(); + let block2_predecessors: Vec<_> = cfg.predecessors(block2_id).collect(); - let block0_successors: Vec<_> = cfg.succ_iter(block0_id).collect(); - let block1_successors: Vec<_> = cfg.succ_iter(block1_id).collect(); - let block2_successors: Vec<_> = cfg.succ_iter(block2_id).collect(); + let block0_successors: Vec<_> = cfg.successors(block0_id).collect(); + let block1_successors: Vec<_> = cfg.successors(block1_id).collect(); + let block2_successors: Vec<_> = cfg.successors(block2_id).collect(); assert_eq!(block0_predecessors.len(), 0); assert_eq!(block1_predecessors.len(), 2); @@ -192,13 +197,13 @@ mod tests { // Modify function to form: // fn func { - // block0(cond: u1): + // block0(cond: u1): // jmpif cond, then: block1, else: ret_block - // block1(): + // block1(): // jmpif cond, then: block1, else: block2 - // block2(): + // block2(): // jmp ret_block() - // ret_block(): + // ret_block(): // return () // } let ret_block_id = func.dfg.make_block(); @@ -221,13 +226,13 @@ mod tests { #[allow(clippy::needless_collect)] { - let block0_predecessors: Vec<_> = cfg.pred_iter(block0_id).collect(); - let block1_predecessors: Vec<_> = cfg.pred_iter(block1_id).collect(); - let block2_predecessors: Vec<_> = cfg.pred_iter(block2_id).collect(); + let block0_predecessors: Vec<_> = cfg.predecessors(block0_id).collect(); + let block1_predecessors: Vec<_> = cfg.predecessors(block1_id).collect(); + let block2_predecessors: Vec<_> = cfg.predecessors(block2_id).collect(); - let block0_successors: Vec<_> = cfg.succ_iter(block0_id).collect(); - let block1_successors: Vec<_> = cfg.succ_iter(block1_id).collect(); - let block2_successors: Vec<_> = cfg.succ_iter(block2_id).collect(); + let block0_successors: Vec<_> = cfg.successors(block0_id).collect(); + let block1_successors: Vec<_> = cfg.successors(block1_id).collect(); + let block2_successors: Vec<_> = cfg.successors(block2_id).collect(); assert_eq!(block0_predecessors.len(), 0); assert_eq!(block1_predecessors.len(), 2); diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs index 4c793a144da..63c1e528471 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs @@ -2,7 +2,7 @@ use acvm::FieldElement; use super::map::Id; -/// Represents a numeric constant in Ssa. Constants themselves are +/// Represents a numeric constant in SSA. Constants themselves are /// uniqued in the DataFlowGraph and immutable. /// /// This is just a thin wrapper around FieldElement so that @@ -12,10 +12,12 @@ use super::map::Id; pub(crate) struct NumericConstant(FieldElement); impl NumericConstant { + /// Create a new NumericConstant with the given Field value pub(crate) fn new(value: FieldElement) -> Self { Self(value) } + /// Retrieves the Field value for this constant pub(crate) fn value(&self) -> FieldElement { self.0 } @@ -23,6 +25,8 @@ impl NumericConstant { pub(crate) type NumericConstantId = Id; +// Implement some common numeric operations for NumericConstants +// for convenience so developers do not always have to unwrap them to use them. impl std::ops::Add for NumericConstant { type Output = NumericConstant; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index 4d2ebe31efb..67569c6a4c2 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -15,34 +15,10 @@ use super::{ use acvm::FieldElement; use iter_extended::vecmap; -#[derive(Debug, Default)] -/// A convenience wrapper to store `Value`s. -pub(crate) struct ValueList(Vec>); - -impl ValueList { - /// Inserts an element to the back of the list and - /// returns the `position` - pub(crate) fn push(&mut self, value: ValueId) -> usize { - self.0.push(value); - self.len() - 1 - } - - /// Returns the number of values in the list. - fn len(&self) -> usize { - self.0.len() - } - - /// Removes all items from the list. - fn clear(&mut self) { - self.0.clear(); - } - - /// Returns the ValueId's as a slice. - pub(crate) fn as_slice(&self) -> &[ValueId] { - &self.0 - } -} - +/// The DataFlowGraph contains most of the actual data in a function including +/// its blocks, instructions, and values. This struct is largely responsible for +/// owning most data in a function and handing out Ids to this data that can be +/// shared without worrying about ownership. #[derive(Debug, Default)] pub(crate) struct DataFlowGraph { /// All of the instructions in a function @@ -57,7 +33,7 @@ pub(crate) struct DataFlowGraph { /// Currently, we need to define them in a better way /// Call instructions require the func signature, but /// other instructions may need some more reading on my part - results: HashMap, + results: HashMap>, /// Storage for all of the values defined in this /// function. @@ -243,8 +219,7 @@ impl DataFlowGraph { }); // Add value to the list of results for this instruction - let actual_res_position = results.push(value_id); - assert_eq!(actual_res_position, expected_res_position); + results.push(value_id); value_id } @@ -259,6 +234,7 @@ impl DataFlowGraph { self.results.get(&instruction_id).expect("expected a list of Values").as_slice() } + /// Add a parameter to the given block pub(crate) fn add_block_parameter(&mut self, block_id: BasicBlockId, typ: Type) -> Id { let block = &mut self.blocks[block_id]; let position = block.parameters().len(); @@ -267,6 +243,8 @@ impl DataFlowGraph { parameter } + /// Insert an instruction at the end of a given block. + /// If the block already has a terminator, the instruction is inserted before the terminator. pub(crate) fn insert_instruction_in_block( &mut self, block: BasicBlockId, diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs index 6789e5364fe..8d90a139118 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs @@ -1,24 +1,16 @@ -use std::collections::HashMap; - use super::basic_block::BasicBlockId; use super::dfg::DataFlowGraph; -use super::instruction::InstructionId; use super::map::Id; use super::types::Type; -use noirc_errors::Location; - /// A function holds a list of instructions. /// These instructions are further grouped into Basic blocks /// -/// Like Crane-lift all functions outside of the current function is seen as external. -/// To reference external functions, one must first import the function signature -/// into the current function's context. +/// All functions outside of the current function are seen as external. +/// To reference external functions its FunctionId can be used but this +/// cannot be checked for correctness until inlining is performed. #[derive(Debug)] pub struct Function { - /// Maps instructions to source locations - source_locations: HashMap, - /// The first basic block in the function entry_block: BasicBlockId, @@ -27,6 +19,8 @@ pub struct Function { id: FunctionId, + /// The DataFlowGraph holds the majority of data pertaining to the function + /// including its blocks, instructions, and values. pub(crate) dfg: DataFlowGraph, } @@ -37,23 +31,35 @@ impl Function { pub(crate) fn new(name: String, id: FunctionId) -> Self { let mut dfg = DataFlowGraph::default(); let entry_block = dfg.make_block(); - Self { name, source_locations: HashMap::new(), id, entry_block, dfg } + Self { name, id, entry_block, dfg } } + /// The name of the function. + /// Used exclusively for debugging purposes. pub(crate) fn name(&self) -> &str { &self.name } + /// The id of the function. pub(crate) fn id(&self) -> FunctionId { self.id } + /// Retrieves the entry block of a function. + /// + /// A function's entry block contains the instructions + /// to be executed first when the function is called. + /// The function's parameters are also stored as the + /// entry block's parameters. pub(crate) fn entry_block(&self) -> BasicBlockId { self.entry_block } } /// FunctionId is a reference for a function +/// +/// This Id is how each function refers to other functions +/// within Call instructions. pub(crate) type FunctionId = Id; #[derive(Debug, Default, Clone)] diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 545519e316f..66f8b1e3b17 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -3,6 +3,11 @@ use acvm::acir::BlackBoxFunc; use super::{basic_block::BasicBlockId, map::Id, types::Type, value::ValueId}; /// Reference to an instruction +/// +/// Note that InstructionIds are not unique. That is, two InstructionIds +/// may refer to the same Instruction data. This is because, although +/// identical, instructions may have different results based on their +/// placement within a block. pub(crate) type InstructionId = Id; /// These are similar to built-ins in other languages. @@ -36,6 +41,8 @@ impl std::fmt::Display for Intrinsic { } impl Intrinsic { + /// Lookup an Intrinsic by name and return it if found. + /// If there is no such intrinsic by that name, None is returned. pub(crate) fn lookup(name: &str) -> Option { match name { "println" => Some(Intrinsic::Println), @@ -94,42 +101,6 @@ pub(crate) enum Instruction { } impl Instruction { - /// Returns the number of results that this instruction - /// produces. - pub(crate) fn num_fixed_results(&self) -> usize { - match self { - Instruction::Binary(_) => 1, - Instruction::Cast(..) => 0, - Instruction::Not(_) => 1, - Instruction::Truncate { .. } => 1, - Instruction::Constrain(_) => 0, - // This returns 0 as the result depends on the function being called - Instruction::Call { .. } => 0, - Instruction::Allocate { .. } => 1, - Instruction::Load { .. } => 1, - Instruction::Store { .. } => 0, - } - } - - /// Returns the number of arguments required for a call - pub(crate) fn num_fixed_arguments(&self) -> usize { - // Match-all fields syntax (..) is avoided on most cases of this match to ensure that - // if an extra argument is ever added to any of these variants, an error - // is issued pointing to this spot to update it here as well. - match self { - Instruction::Binary(_) => 2, - Instruction::Cast(_, _) => 1, - Instruction::Not(_) => 1, - Instruction::Truncate { value: _, bit_size: _, max_bit_size: _ } => 1, - Instruction::Constrain(_) => 1, - // This returns 0 as the arguments depend on the function being called - Instruction::Call { .. } => 0, - Instruction::Allocate { size: _ } => 1, - Instruction::Load { address: _ } => 1, - Instruction::Store { address: _, value: _ } => 2, - } - } - /// Returns the type that this instruction will return. pub(crate) fn result_type(&self) -> InstructionResultType { match self { @@ -204,6 +175,7 @@ pub(crate) struct Binary { } impl Binary { + /// The type of this Binary instruction's result pub(crate) fn result_type(&self) -> InstructionResultType { match self.operator { BinaryOp::Eq | BinaryOp::Lt => InstructionResultType::Known(Type::bool()), diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs index a0ab65bf639..2e467017885 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -13,6 +13,7 @@ use super::{ value::ValueId, }; +/// Helper function for Function's Display impl to pretty-print the function with the given formatter. pub(crate) fn display_function(function: &Function, f: &mut Formatter) -> Result { writeln!(f, "fn {} {} {{", function.name(), function.id())?; display_block_with_successors(function, function.entry_block(), &mut HashSet::new(), f)?; @@ -20,7 +21,7 @@ pub(crate) fn display_function(function: &Function, f: &mut Formatter) -> Result } /// Displays a block followed by all of its successors recursively. -/// This uses a HashSet to keep track of the visited blocks. Otherwise, +/// This uses a HashSet to keep track of the visited blocks. Otherwise /// there would be infinite recursion for any loops in the IR. pub(crate) fn display_block_with_successors( function: &Function, @@ -39,6 +40,7 @@ pub(crate) fn display_block_with_successors( Ok(()) } +/// Display a single block. This will not display the block's successors. pub(crate) fn display_block( function: &Function, block_id: BasicBlockId, @@ -80,10 +82,12 @@ fn value_list_with_types(function: &Function, values: &[ValueId]) -> String { .join(", ") } +/// Display each value separated by a comma fn value_list(function: &Function, values: &[ValueId]) -> String { vecmap(values, |id| value(function, *id)).join(", ") } +/// Display a terminator instruction pub(crate) fn display_terminator( function: &Function, terminator: Option<&TerminatorInstruction>, @@ -109,6 +113,7 @@ pub(crate) fn display_terminator( } } +/// Display an arbitrary instruction pub(crate) fn display_instruction( function: &Function, instruction: InstructionId, diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs index 8a0f825a117..e00c25a257c 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs @@ -30,18 +30,22 @@ pub(crate) enum Type { } impl Type { + /// Create a new signed integer type with the given amount of bits. pub(crate) fn signed(bit_size: u32) -> Type { Type::Numeric(NumericType::Signed { bit_size }) } + /// Create a new unsigned integer type with the given amount of bits. pub(crate) fn unsigned(bit_size: u32) -> Type { Type::Numeric(NumericType::Unsigned { bit_size }) } + /// Creates the boolean type, represented as u1. pub(crate) fn bool() -> Type { Type::unsigned(1) } + /// Creates the native field type. pub(crate) fn field() -> Type { Type::Numeric(NumericType::NativeField) } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs index 868aee2199e..f8197b06c8a 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs @@ -47,6 +47,7 @@ pub(crate) enum Value { } impl Value { + /// Retrieves the type of this Value pub(crate) fn get_type(&self) -> Type { match self { Value::Instruction { typ, .. } => *typ, diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs index df80799c28a..aa67cbed583 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs @@ -27,6 +27,10 @@ pub(crate) struct FunctionBuilder { } impl FunctionBuilder { + /// Creates a new FunctionBuilder to build the function with the given FunctionId. + /// + /// This creates the new function internally so there is no need to call .new_function() + /// right after constructing a new FunctionBuilder. pub(crate) fn new(function_name: String, function_id: FunctionId) -> Self { let new_function = Function::new(function_name, function_id); let current_block = new_function.entry_block(); @@ -34,7 +38,11 @@ impl FunctionBuilder { Self { current_function: new_function, current_block, finished_functions: Vec::new() } } - /// Finish the current function and create a new function + /// Finish the current function and create a new function. + /// + /// A FunctionBuilder can always only work on one function at a time, so care + /// should be taken not to finish a function that is still in progress by calling + /// new_function before the current function is finished. pub(crate) fn new_function(&mut self, name: String, function_id: FunctionId) { let new_function = Function::new(name, function_id); self.current_block = new_function.entry_block(); @@ -43,11 +51,14 @@ impl FunctionBuilder { self.finished_functions.push(old_function); } + /// Consume the FunctionBuilder returning all the functions it has generated. pub(crate) fn finish(mut self) -> Ssa { self.finished_functions.push(self.current_function); Ssa::new(self.finished_functions) } + /// Add a parameter to the current function with the given parameter type. + /// Returns the newly-added parameter. pub(crate) fn add_parameter(&mut self, typ: Type) -> ValueId { let entry = self.current_function.entry_block(); self.current_function.dfg.add_block_parameter(entry, typ) @@ -67,14 +78,19 @@ impl FunctionBuilder { self.numeric_constant(value.into(), Type::field()) } + /// Returns the type of the given value. pub(crate) fn type_of_value(&self, value: ValueId) -> Type { self.current_function.dfg.type_of_value(value) } + /// Insert a new block into the current function and return it. + /// Note that this block is unreachable until another block is set to jump to it. pub(crate) fn insert_block(&mut self) -> BasicBlockId { self.current_function.dfg.make_block() } + /// Adds a parameter with the given type to the given block. + /// Returns the newly-added parameter. pub(crate) fn add_block_parameter(&mut self, block: BasicBlockId, typ: Type) -> ValueId { self.current_function.dfg.add_block_parameter(block, typ) } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index df54d5bd079..78c64f9fad8 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -16,9 +16,17 @@ use crate::ssa_refactor::ssa_builder::FunctionBuilder; use super::value::{Tree, Value, Values}; -// TODO: Make this a threadsafe queue so we can compile functions in parallel -type FunctionQueue = Vec<(ast::FuncId, IrFunctionId)>; - +/// The FunctionContext is the main context object for translating a +/// function into SSA form during the SSA-gen pass. +/// +/// This context can be used to build any amount of functions, +/// so long as it is cleared out in between each function via +/// calling self.new_function(). +/// +/// If compiling many functions across multiple threads, there should +/// be a separate FunctionContext for each thread. Each FunctionContext +/// can communicate via the SharedContext field which as its name suggests +/// is the only part of the context that needs to be shared between threads. pub(super) struct FunctionContext<'a> { definitions: HashMap, @@ -26,16 +34,50 @@ pub(super) struct FunctionContext<'a> { shared_context: &'a SharedContext, } -/// Shared context for all functions during ssa codegen +/// Shared context for all functions during ssa codegen. This is the only +/// object that is shared across all threads when generating ssa in multiple threads. +/// +/// The main job of the SharedContext is to remember which functions are already +/// compiled, what their IDs are, and keep a queue of which functions still need to +/// be compiled. +/// +/// SSA can be generated by continuously popping from this function_queue and using +/// FunctionContext to generate from the popped function id. Once the queue is empty, +/// no other functions are reachable and the SSA generation is finished. pub(super) struct SharedContext { + /// All currently known functions which have already been assigned function ids. + /// These functions are all either currently having their SSA generated or are + /// already finished. functions: RwLock>, + + /// Queue of which functions still need to be compiled. + /// + /// The queue is currently Last-in First-out (LIFO) but this is an + /// implementation detail that can be trivially changed and should + /// not impact the resulting SSA besides changing which IDs are assigned + /// to which functions. function_queue: Mutex, + + /// Shared counter used to assign the ID of the next function function_counter: AtomicCounter, + /// The entire monomorphized source program pub(super) program: Program, } +/// The queue of functions remaining to compile +type FunctionQueue = Vec<(ast::FuncId, IrFunctionId)>; + impl<'a> FunctionContext<'a> { + /// Create a new FunctionContext to compile the first function in the shared_context's + /// function queue. + /// + /// This will pop from the function queue, so it is expected the shared_context's function + /// queue is non-empty at the time of calling this function. This can be ensured by calling + /// `shared_context.get_or_queue_function(function_to_queue)` before calling this constructor. + /// + /// `function_name` and `parameters` are expected to be the name and parameters of the function + /// this constructor will pop from the function queue. pub(super) fn new( function_name: String, parameters: &Parameters, @@ -52,6 +94,11 @@ impl<'a> FunctionContext<'a> { this } + /// Finish building the current function and switch to building a new function with the + /// given name, id, and parameters. + /// + /// Note that the previous function cannot be resumed after calling this. Developers should + /// avoid calling new_function until the previous function is completely finished with ssa-gen. pub(super) fn new_function(&mut self, id: IrFunctionId, name: String, parameters: &Parameters) { self.definitions.clear(); self.builder.new_function(name, id); @@ -127,6 +174,10 @@ impl<'a> FunctionContext<'a> { Self::map_type_helper(typ, &mut |x| x) } + /// Converts a non-tuple type into an SSA type. Panics if a tuple type is passed. + /// + /// This function is needed since this SSA IR has no concept of tuples and thus no type for + /// them. Use `convert_type` if tuple types need to be handled correctly. pub(super) fn convert_non_tuple_type(typ: &ast::Type) -> Type { match typ { ast::Type::Field => Type::field(), @@ -305,6 +356,7 @@ fn convert_operator(op: noirc_frontend::BinaryOpKind) -> BinaryOp { } impl SharedContext { + /// Create a new SharedContext for the given monomorphized program. pub(super) fn new(program: Program) -> Self { Self { functions: Default::default(), @@ -314,6 +366,7 @@ impl SharedContext { } } + /// Pops the next function from the shared function queue, returning None if the queue is empty. pub(super) fn pop_next_function_in_queue(&self) -> Option<(ast::FuncId, IrFunctionId)> { self.function_queue.lock().expect("Failed to lock function_queue").pop() } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 4b93a7e1185..d6c5731e147 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -16,6 +16,9 @@ use self::{ use super::ir::{instruction::BinaryOp, types::Type, value::ValueId}; +/// Generates SSA for the given monomorphized program. +/// +/// This function will generate the SSA but does not perform any optimizations on it. pub fn generate_ssa(program: Program) -> Ssa { let context = SharedContext::new(program); diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs index 03eb76dec50..99d49456210 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs @@ -2,12 +2,13 @@ use std::fmt::Display; use crate::ssa_refactor::ir::function::Function; -/// Contains the entire Ssa representation of the program +/// Contains the entire SSA representation of the program. pub struct Ssa { functions: Vec, } impl Ssa { + /// Create a new Ssa object from the given SSA functions pub fn new(functions: Vec) -> Self { Self { functions } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs index fa27e70ad9b..02011adbaa8 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs @@ -5,12 +5,27 @@ use crate::ssa_refactor::ir::value::ValueId as IrValueId; use super::context::FunctionContext; +/// A general Tree structure which is used in the SSA generation pass +/// to represent both values and types which may be tuples. +/// +/// Since the underlying SSA intermediate representation (IR) does not +/// support tuples directly, they're instead represented as Tree::Branch +/// nodes. For example, a single ssa value may be a Tree::Leaf(Value), +/// while a tuple would be a Tree::Branch(values). #[derive(Debug, Clone)] pub(super) enum Tree { Branch(Vec>), Leaf(T), } +/// A single value in ssa form. This wrapper enum is needed mostly to enable +/// us to automatically create a Instruction::Load whenever a mutable variable +/// is referenced. +/// +/// Note that these values wrap the ValueIds +/// used internally by functions in the ssa ir and should thus be isolated +/// to a given function. If used outisde their function of origin, the IDs +/// would be invalid. #[derive(Debug, Copy, Clone)] pub(super) enum Value { Normal(IrValueId), @@ -43,9 +58,15 @@ impl Value { } } +/// A tree of values. +/// +/// Compared to Value alone, the addition of being able to represent structs/tuples as +/// a Tree::Branch means this type can hold any kind of value an frontend expression may return. +/// This is why it is used as the return type for every codegen_* function in ssa_gen/mod.rs. pub(super) type Values = Tree; impl Tree { + /// Flattens the tree into a vector of each leaf value pub(super) fn flatten(self) -> Vec { match self { Tree::Branch(values) => values.into_iter().flat_map(Tree::flatten).collect(), @@ -53,6 +74,7 @@ impl Tree { } } + /// Returns the total amount of leaves in this tree pub(super) fn count_leaves(&self) -> usize { match self { Tree::Branch(trees) => trees.iter().map(|tree| tree.count_leaves()).sum(), @@ -72,6 +94,7 @@ impl Tree { } } + /// Map mutably over this tree, mutating each leaf value within using the given function pub(super) fn map_mut(&mut self, mut f: impl FnMut(&T) -> Tree) { self.map_mut_helper(&mut f); } @@ -83,6 +106,10 @@ impl Tree { } } + /// Calls the given function on each leaf node, mapping this tree into a new one. + /// + /// Because the given function returns a Tree rather than a U, it is possible + /// to use this function to turn Leaf nodes into either other Leaf nodes or even Branch nodes. pub(super) fn map(self, mut f: impl FnMut(T) -> Tree) -> Tree { self.map_helper(&mut f) } From 752d2f97a80ec40fb31a136df4e662dfab61e8be Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 2 May 2023 13:26:17 +0100 Subject: [PATCH 59/63] chore: fix clippy warning (#1270) --- crates/noirc_evaluator/src/ssa_refactor/ir/map.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs index a99ff06c5fb..14ea521359d 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs @@ -123,7 +123,7 @@ impl DenseMap { /// /// The id-element pairs are ordered by the numeric values of the ids. pub(crate) fn iter(&self) -> impl ExactSizeIterator, &T)> { - let ids_iter = (0..self.storage.len()).into_iter().map(|idx| Id::new(idx)); + let ids_iter = (0..self.storage.len()).map(|idx| Id::new(idx)); ids_iter.zip(self.storage.iter()) } } From 562c185af5a97b755f5a554a32da518562f88be1 Mon Sep 17 00:00:00 2001 From: joss-aztec <94053499+joss-aztec@users.noreply.github.com> Date: Tue, 2 May 2023 15:55:15 +0100 Subject: [PATCH 60/63] chore(noir): constrain expr; -> assert(expr); (#1276) * chore(noir): constrain expr; -> assert(expr); * chore(noir): replace remaining `constrain` with `assert(expr)` --------- Co-authored-by: Tom French --- crates/nargo_cli/src/cli/new_cmd.rs | 2 +- .../tests/compile_tests_data/pass/basic.nr | 2 +- .../compile_tests_data/pass/basic_import.nr | 2 +- .../target_tests_data/pass/basic/src/main.nr | 2 +- .../target_tests_data/pass/import/src/main.nr | 2 +- .../tests/test_data/1_mul/src/main.nr | 2 +- .../tests/test_data/2_div/src/main.nr | 4 +- .../tests/test_data/3_add/src/main.nr | 4 +- .../tests/test_data/4_sub/src/main.nr | 2 +- .../tests/test_data/5_over/src/main.nr | 4 +- .../nargo_cli/tests/test_data/6/src/main.nr | 4 +- .../tests/test_data/6_array/src/main.nr | 24 +-- .../nargo_cli/tests/test_data/7/src/main.nr | 2 +- .../tests/test_data/7_function/src/main.nr | 32 ++-- .../tests/test_data/8_integration/src/main.nr | 24 +-- .../tests/test_data/9_conditional/src/main.nr | 62 +++--- .../tests/test_data/array_len/src/main.nr | 14 +- .../tests/test_data/array_neq/src/main.nr | 2 +- .../tests/test_data/assign_ex/src/main.nr | 4 +- .../tests/test_data/bit_and/src/main.nr | 6 +- .../tests/test_data/bool_not/src/main.nr | 2 +- .../tests/test_data/bool_or/src/main.nr | 4 +- .../tests/test_data/cast_bool/src/main.nr | 2 +- .../comptime_array_access/src/main.nr | 6 +- .../tests/test_data/comptime_fail/src/main.nr | 6 +- .../comptime_recursion_regression/src/main.nr | 2 +- .../tests/test_data/contracts/src/main.nr | 2 +- .../test_data/ec_baby_jubjub/src/main.nr | 179 ++++++++++-------- .../tests/test_data/generics/src/main.nr | 12 +- .../tests/test_data/global_consts/src/baz.nr | 2 +- .../tests/test_data/global_consts/src/foo.nr | 2 +- .../tests/test_data/global_consts/src/main.nr | 30 +-- .../higher-order-functions/src/main.nr | 22 +-- .../tests/test_data/if_else_chain/src/main.nr | 10 +- .../tests/test_data/main_bool_arg/src/main.nr | 6 +- .../tests/test_data/merkle_insert/src/main.nr | 8 +- .../tests/test_data/modules/src/main.nr | 2 +- .../tests/test_data/modules_more/src/main.nr | 2 +- .../tests/test_data/modulus/src/main.nr | 10 +- .../test_data/numeric_generics/src/main.nr | 10 +- .../test_data/pedersen_check/src/main.nr | 6 +- .../test_data/poseidon_bn254_hash/src/main.nr | 4 +- .../poseidonsponge_x5_254/src/main.nr | 2 +- .../tests/test_data/pred_eq/src/main.nr | 2 +- .../tests/test_data/regression/src/main.nr | 14 +- .../tests/test_data/scalar_mul/src/main.nr | 4 +- .../tests/test_data/schnorr/src/main.nr | 2 +- .../tests/test_data/sha256/src/main.nr | 2 +- .../tests/test_data/sha2_blocks/src/main.nr | 8 +- .../tests/test_data/sha2_byte/src/main.nr | 4 +- .../tests/test_data/simple_shield/src/main.nr | 2 +- .../tests/test_data/strings/src/main.nr | 12 +- .../tests/test_data/struct/src/main.nr | 16 +- .../tests/test_data/struct_inputs/src/main.nr | 16 +- .../tests/test_data/submodules/src/main.nr | 2 +- .../tests/test_data/to_be_bytes/src/main.nr | 6 +- .../to_bytes_integration/src/main.nr | 8 +- .../tests/test_data/tuples/src/main.nr | 12 +- .../nargo_cli/tests/test_data/xor/src/main.nr | 2 +- .../src/hir/resolution/resolver.rs | 8 +- examples_failing/ecdsa_secp256k1/src/main.nr | 2 +- examples_failing/pow_const/src/main.nr | 2 +- noir_stdlib/src/ec/montcurve.nr | 18 +- noir_stdlib/src/ec/swcurve.nr | 10 +- noir_stdlib/src/ec/tecurve.nr | 8 +- noir_stdlib/src/hash/poseidon.nr | 10 +- noir_stdlib/src/hash/poseidon/bn254.nr | 8 +- 67 files changed, 362 insertions(+), 347 deletions(-) diff --git a/crates/nargo_cli/src/cli/new_cmd.rs b/crates/nargo_cli/src/cli/new_cmd.rs index 9d39f8d1d83..36146028454 100644 --- a/crates/nargo_cli/src/cli/new_cmd.rs +++ b/crates/nargo_cli/src/cli/new_cmd.rs @@ -27,7 +27,7 @@ compiler_version = "{CARGO_PKG_VERSION}" ); const EXAMPLE: &str = r#"fn main(x : Field, y : pub Field) { - constrain x != y; + assert(x != y); } #[test] diff --git a/crates/nargo_cli/tests/compile_tests_data/pass/basic.nr b/crates/nargo_cli/tests/compile_tests_data/pass/basic.nr index 6a678f93fe7..90c0d7ffd3e 100644 --- a/crates/nargo_cli/tests/compile_tests_data/pass/basic.nr +++ b/crates/nargo_cli/tests/compile_tests_data/pass/basic.nr @@ -1,4 +1,4 @@ fn main(x : Field, y : Field) { - constrain x != y; + assert(x != y); } \ No newline at end of file diff --git a/crates/nargo_cli/tests/compile_tests_data/pass/basic_import.nr b/crates/nargo_cli/tests/compile_tests_data/pass/basic_import.nr index 212d0f13590..bb61c0f1edc 100644 --- a/crates/nargo_cli/tests/compile_tests_data/pass/basic_import.nr +++ b/crates/nargo_cli/tests/compile_tests_data/pass/basic_import.nr @@ -7,5 +7,5 @@ fn main(x : Field, y : Field) { let _k = std::hash::pedersen([x]); let _l = hello(x); - constrain x != import::hello(y); + assert(x != import::hello(y)); } diff --git a/crates/nargo_cli/tests/target_tests_data/pass/basic/src/main.nr b/crates/nargo_cli/tests/target_tests_data/pass/basic/src/main.nr index 6a678f93fe7..90c0d7ffd3e 100644 --- a/crates/nargo_cli/tests/target_tests_data/pass/basic/src/main.nr +++ b/crates/nargo_cli/tests/target_tests_data/pass/basic/src/main.nr @@ -1,4 +1,4 @@ fn main(x : Field, y : Field) { - constrain x != y; + assert(x != y); } \ No newline at end of file diff --git a/crates/nargo_cli/tests/target_tests_data/pass/import/src/main.nr b/crates/nargo_cli/tests/target_tests_data/pass/import/src/main.nr index 58fb0c3f3f2..cb6476480d8 100644 --- a/crates/nargo_cli/tests/target_tests_data/pass/import/src/main.nr +++ b/crates/nargo_cli/tests/target_tests_data/pass/import/src/main.nr @@ -5,5 +5,5 @@ fn main(x : Field, y : Field) { let _k = dep::std::hash::pedersen([x]); let _l = hello(x); - constrain x != import::hello(y); + assert(x != import::hello(y)); } diff --git a/crates/nargo_cli/tests/test_data/1_mul/src/main.nr b/crates/nargo_cli/tests/test_data/1_mul/src/main.nr index e423eb65667..4587b4b5947 100644 --- a/crates/nargo_cli/tests/test_data/1_mul/src/main.nr +++ b/crates/nargo_cli/tests/test_data/1_mul/src/main.nr @@ -5,5 +5,5 @@ fn main(mut x: u32, y: u32, z: u32) { x *= x; //144 x *= x; //20736 x *= x; //429 981 696 - constrain x == z; + assert(x == z); } diff --git a/crates/nargo_cli/tests/test_data/2_div/src/main.nr b/crates/nargo_cli/tests/test_data/2_div/src/main.nr index 6df75492553..00608cb697d 100644 --- a/crates/nargo_cli/tests/test_data/2_div/src/main.nr +++ b/crates/nargo_cli/tests/test_data/2_div/src/main.nr @@ -1,6 +1,6 @@ // Testing integer division: 7/3 = 2 fn main(mut x: u32, y: u32, z: u32) { let a = x % y; - constrain x / y == z; - constrain a == x - z*y; + assert(x / y == z); + assert(a == x - z*y); } diff --git a/crates/nargo_cli/tests/test_data/3_add/src/main.nr b/crates/nargo_cli/tests/test_data/3_add/src/main.nr index 73ed46a8e5d..2884415b81a 100644 --- a/crates/nargo_cli/tests/test_data/3_add/src/main.nr +++ b/crates/nargo_cli/tests/test_data/3_add/src/main.nr @@ -1,8 +1,8 @@ // Test integer addition: 3 + 4 = 7 fn main(mut x: u32, y: u32, z: u32) { x += y; - constrain x == z; + assert(x == z); x *= 8; - constrain x>9; + assert(x>9); } diff --git a/crates/nargo_cli/tests/test_data/4_sub/src/main.nr b/crates/nargo_cli/tests/test_data/4_sub/src/main.nr index 242be90970f..80fc0177e41 100644 --- a/crates/nargo_cli/tests/test_data/4_sub/src/main.nr +++ b/crates/nargo_cli/tests/test_data/4_sub/src/main.nr @@ -1,5 +1,5 @@ // Test unsafe integer subtraction with underflow: 12 - 2418266113 = 1876701195 modulo 2^32 fn main(mut x: u32, y: u32, z: u32) { x -= y; - constrain x == z; + assert(x == z); } diff --git a/crates/nargo_cli/tests/test_data/5_over/src/main.nr b/crates/nargo_cli/tests/test_data/5_over/src/main.nr index 8701c1e6320..4fdff16c5c0 100644 --- a/crates/nargo_cli/tests/test_data/5_over/src/main.nr +++ b/crates/nargo_cli/tests/test_data/5_over/src/main.nr @@ -2,8 +2,8 @@ // Test odd bits integer fn main(mut x: u32, y: u32) { x = x * x; - constrain y == x; + assert(y == x); let c:u3 = 2; - constrain c > x as u3; + assert(c > x as u3); } diff --git a/crates/nargo_cli/tests/test_data/6/src/main.nr b/crates/nargo_cli/tests/test_data/6/src/main.nr index 61be34e6d88..8b350de16c1 100644 --- a/crates/nargo_cli/tests/test_data/6/src/main.nr +++ b/crates/nargo_cli/tests/test_data/6/src/main.nr @@ -12,9 +12,9 @@ fn main(x: [u8; 5], result: pub [u8; 32]) { let mut digest = std::hash::sha256(x); digest[0] = 5 as u8; digest = std::hash::sha256(x); - constrain digest == result; + assert(digest == result); let y = [12,45,78,41]; let h = std::hash::mimc_bn254(y); - constrain h == 18226366069841799622585958305961373004333097209608110160936134895615261821931; + assert(h == 18226366069841799622585958305961373004333097209608110160936134895615261821931); } diff --git a/crates/nargo_cli/tests/test_data/6_array/src/main.nr b/crates/nargo_cli/tests/test_data/6_array/src/main.nr index 3537740f1e5..30d3ab5a22f 100644 --- a/crates/nargo_cli/tests/test_data/6_array/src/main.nr +++ b/crates/nargo_cli/tests/test_data/6_array/src/main.nr @@ -8,7 +8,7 @@ fn main(x: [u32; 5], y: [u32; 5], mut z: u32, t: u32) { c = z*z*y[i]; z -= c; } - constrain (z==0); //y[4]=0, so c and z are always 0 + assert(z==0); //y[4]=0, so c and z are always 0 //Test 2: c = 2301 as u32; @@ -17,7 +17,7 @@ fn main(x: [u32; 5], y: [u32; 5], mut z: u32, t: u32) { c = z*z*x[i]; z += x[i]*y[i] - c; } - constrain (z==3814912846); + assert(z==3814912846); //Test 3: c = 2300001 as u32; @@ -29,7 +29,7 @@ fn main(x: [u32; 5], y: [u32; 5], mut z: u32, t: u32) { z *= c; } } - constrain (z==41472); + assert(z==41472); //Test 4: z = y[4]; @@ -39,16 +39,16 @@ fn main(x: [u32; 5], y: [u32; 5], mut z: u32, t: u32) { z += x[i+j] - y[i+j]; } } - constrain (z ==11539); + assert(z ==11539); //Test 5: let cc = if z < 1 { x } else { y }; - constrain cc[0] == y[0]; + assert(cc[0] == y[0]); // Test 6: for-each loops for y_elem in y { for x_elem in x { - constrain x_elem != y_elem; + assert(x_elem != y_elem); } } @@ -57,15 +57,15 @@ fn main(x: [u32; 5], y: [u32; 5], mut z: u32, t: u32) { } // fn dyn_array(mut x: [u32; 5], y: Field, z: Field) { -// constrain x[y] == 111; -// constrain x[z] == 101; +// assert(x[y] == 111); +// assert(x[z] == 101); // x[z] = 0; -// constrain x[y] == 111; -// constrain x[1] == 0; +// assert(x[y] == 111); +// assert(x[1] == 0); // if y as u32 < 10 { // x[y] = x[y] - 2; // } else { // x[y] = 0; // } -// constrain x[4] == 109; -// } \ No newline at end of file +// assert(x[4] == 109); +// } diff --git a/crates/nargo_cli/tests/test_data/7/src/main.nr b/crates/nargo_cli/tests/test_data/7/src/main.nr index ec01ea7c4be..a6bba978644 100644 --- a/crates/nargo_cli/tests/test_data/7/src/main.nr +++ b/crates/nargo_cli/tests/test_data/7/src/main.nr @@ -6,5 +6,5 @@ use dep::std; fn main(x: [u8; 5], result: [u8; 32]) { let digest = std::hash::blake2s(x); - constrain digest == result; + assert(digest == result); } diff --git a/crates/nargo_cli/tests/test_data/7_function/src/main.nr b/crates/nargo_cli/tests/test_data/7_function/src/main.nr index 96ca9759a8f..5a23b493871 100644 --- a/crates/nargo_cli/tests/test_data/7_function/src/main.nr +++ b/crates/nargo_cli/tests/test_data/7_function/src/main.nr @@ -15,20 +15,20 @@ fn f2(mut x: Field) -> Field{ // Simple example fn test0(mut a: Field) { a = f2(a); - constrain a == 3; + assert(a == 3); } // Nested call fn test1(mut a: Field) { a = f1(a); - constrain a == 4; + assert(a == 4); } fn test2(z: Field, t: u32 ) { let a = z + t as Field; - constrain a == 64; + assert(a == 64); let e = pow(z, t as Field); - constrain e == 714924299; + assert(e == 714924299); } fn pow(base: Field, exponent: Field) -> Field { @@ -46,7 +46,7 @@ fn test3(x: [u8; 3]) -> [u8; 3] { for i in 0..3 { buffer[i] = x[i]; } - constrain buffer == x; + assert(buffer == x); buffer } @@ -59,7 +59,7 @@ fn test_multiple2() -> my_struct { } fn test_multiple3(x: u32, y: u32) { - constrain x == y; + assert(x == y); } struct my_struct { @@ -73,18 +73,18 @@ struct my2 { } fn test_multiple4(s: my_struct) { - constrain s.a == s.b+2; + assert(s.a == s.b+2); } fn test_multiple5(a: (u32, u32)) { - constrain a.0 == a.1+2; + assert(a.0 == a.1+2); } fn test_multiple6(a: my2, b: my_struct, c: (my2, my_struct)) { test_multiple4(a.aa); test_multiple5((b.a, b.b)); - constrain c.0.aa.a == c.1.a; + assert(c.0.aa.a == c.1.a); } @@ -110,28 +110,28 @@ fn main(x: u32 , y: u32 , a: Field, arr1: [u32; 9], arr2: [u32; 9]) { ab = ab + a; (x,ab) }; - constrain my_block.1 == 4; + assert(my_block.1 == 4); test0(a); test1(a); test2(x as Field, y); - constrain bar()[0] == 0; + assert(bar()[0] == 0); let mut b = [0 as u8, 5 as u8, 2 as u8]; let c = test3(b); - constrain b == c; + assert(b == c); b[0] = 1 as u8; let cc = test3(b); - constrain c != cc; + assert(c != cc); let e = test_multiple(x, y); - constrain e.1 == e.0 + 54 as u32; + assert(e.1 == e.0 + 54 as u32); let d = test_multiple2(); - constrain d.b == d.a + 2 as u32; + assert(d.b == d.a + 2 as u32); test_multiple3(y, y); //Regression test for issue #628: let result = first(arr_to_field(arr1), arr_to_field(arr2)); - constrain result[0] == arr1[0] as Field; + assert(result[0] == arr1[0] as Field); } diff --git a/crates/nargo_cli/tests/test_data/8_integration/src/main.nr b/crates/nargo_cli/tests/test_data/8_integration/src/main.nr index 57dca4e2ac0..56b02650c27 100644 --- a/crates/nargo_cli/tests/test_data/8_integration/src/main.nr +++ b/crates/nargo_cli/tests/test_data/8_integration/src/main.nr @@ -57,7 +57,7 @@ fn iterate1(mut a0: u32) -> u32{ } fn array_noteq(a: [u32; 4], b: [u32; 4]) { - constrain a != b; + assert(a != b); } fn test3(mut b: [Field; 4]) -> [Field; 4] { @@ -105,7 +105,7 @@ fn iterate3( mut hash: [u32; 8]) -> [u32; 8] { g = f; a = t1+t2; } - constrain a == 2470696267; + assert(a == 2470696267); hash[0] = hash[0] + a; hash[1] = hash[1] + b; hash[2] = hash[2] + c; @@ -126,7 +126,7 @@ fn test5() { sha_hash = iterate2(sha_hash); - constrain sha_hash[0] == 9; + assert(sha_hash[0] == 9); } @@ -244,31 +244,31 @@ fn sig1(x: u32) -> u32 { fn main(a: [u32; 100], b: [u32; 100], c: [u32; 4], mut d: [u32; 4], m: [u8; 32]) { let e = matrix_mul_10(a,b); - constrain e[6] == 1866842232; + assert(e[6] == 1866842232); let f = matrix_mul_2(c,d); - constrain f[3] == 2082554100; + assert(f[3] == 2082554100); let mut a = [1 as u32, 2, 3, 4]; a = test4(a); - constrain a[3] == 20; + assert(a[3] == 20); a = test4(c); - constrain a[3] == c[1] * 10; + assert(a[3] == c[1] * 10); d[0] += c[0]; d[0] += c[1]; - constrain d[0] == 2739986880; + assert(d[0] == 2739986880); let h = iterate1(1); - constrain h == 4; + assert(h == 4); let x = d; array_noteq(x, [d[0], d[1], d[2], 0]); let mut h5 = [d[0] as Field, d[1] as Field, d[2] as Field, d[3] as Field]; let t5 = test3(h5); - constrain t5[3] == 3; + assert(t5[3] == 3); h5 = test3(h5); - constrain h5[3] == 3; + assert(h5[3] == 3); test5(); @@ -279,5 +279,5 @@ fn main(a: [u32; 100], b: [u32; 100], c: [u32; 4], mut d: [u32; 4], m: [u8; 32]) sha_hash = iterate3(sha_hash); let h6 = test6(m); - constrain h6[0]== 523008072; //31.. 3800709683; + assert(h6[0]== 523008072); //31.. 3800709683 } diff --git a/crates/nargo_cli/tests/test_data/9_conditional/src/main.nr b/crates/nargo_cli/tests/test_data/9_conditional/src/main.nr index 0f37f3e92f4..48ac639ecf0 100644 --- a/crates/nargo_cli/tests/test_data/9_conditional/src/main.nr +++ b/crates/nargo_cli/tests/test_data/9_conditional/src/main.nr @@ -17,11 +17,11 @@ fn call_intrinsic(x: [u8; 5], result: [u8; 32]) { let mut digest = std::hash::sha256(x); digest[0] = 5 as u8; digest = std::hash::sha256(x); - constrain digest == result; + assert(digest == result); } fn must_be_zero(x: u8) { - constrain x == 0; + assert(x == 0); } fn test3 (x: u8) { @@ -41,19 +41,19 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ let arr: [u8; 2] = [1, 2]; if arr[0] != arr[1] { for i in 0..1 { - constrain i != 2; + assert(i != 2); } } //Issue reported in #421 if a == c[0] { - constrain c[0] == 0; + assert(c[0] == 0); } else { if a == c[1] { - constrain c[1] == 0; + assert(c[1] == 0); } else { if a == c[2] { - constrain c[2] == 0; + assert(c[2] == 0); } } } @@ -67,25 +67,25 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ c1 = c1 + as_bits[0] as Field; if i == 0 { - constrain arr[i] == 1;// 1 + assert(arr[i] == 1);// 1 for k in 0..2 { - constrain as_bits_hardcode_1[k] == as_bits[k]; + assert(as_bits_hardcode_1[k] == as_bits[k]); } } if i == 1 { - constrain arr[i] == 2;//2 + assert(arr[i] == 2);//2 for k in 0..2 { - constrain as_bits_hardcode_1[k] != as_bits[k]; + assert(as_bits_hardcode_1[k] != as_bits[k]); } } } - constrain c1==1; + assert(c1==1); //Regression for Issue #579 let result1_true = test(true); - constrain result1_true.array_param[0] == 1; + assert(result1_true.array_param[0] == 1); let result1_false = test(false); - constrain result1_false.array_param[0] == 0; + assert(result1_false.array_param[0] == 0); //Test case for short-circuit let mut data = [0 as u32; 32]; @@ -104,23 +104,23 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ } } } - constrain data[31] == 0; - constrain ba != 13; + assert(data[31] == 0); + assert(ba != 13); //regression for short-circuit2 if 35 == a { - constrain false; + assert(false); } bar(a as Field); if a == 3 { c = test4(); } - constrain c[1] != 2; + assert(c[1] != 2); call_intrinsic(x, result); //Test case for conditional with arrays from function parameters let b = sort([1,2,3,4]); - constrain b[0] == 1; + assert(b[0] == 1); if a == 0 { must_be_zero(0); @@ -130,9 +130,9 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ c[0] = 1; c[1] = c[2] / a + 11 % a; let f1 = a as Field; - constrain 10/f1 != 0; + assert(10/f1 != 0); } - constrain c[0] == 3; + assert(c[0] == 3); let mut y = 0; if a == 0 { @@ -141,9 +141,9 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ } else { y = 5; } - constrain y == result[0]; + assert(y == result[0]); c = sort(c); - constrain c[0]==0; + assert(c[0]==0); //test 1 let mut x: u32 = 0; @@ -153,16 +153,16 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ x = 6; } else { x = 2; - constrain x == 2; + assert(x == 2); } } else { x = 5; - constrain x == 5; + assert(x == 5); } if c[0] == 0 { x = 3; } - constrain x == 2; + assert(x == 2); //test2: loops! x = 0; @@ -172,19 +172,19 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ x = i as u32 +2; } } - constrain x == 0; + assert(x == 0); test3(1); if a == 0 { c = test4(); } else { - constrain c[1] != 2; + assert(c[1] != 2); } if false { c[1] = 5; } - constrain c[1] == 2; + assert(c[1] == 2); test5(4); @@ -195,7 +195,7 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ } else { c_661 = issue_661_foo(issue_661_bar(c), x); } - constrain c_661[0] < 20000; + assert(c_661[0] < 20000); // Test case for function synchronisation let mut c_sync = 0; @@ -204,7 +204,7 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ } else { c_sync = foo2() + foo2(); } - constrain c_sync == 6; + assert(c_sync == 6); // Regression for predicate simplification safe_inverse(0); @@ -213,7 +213,7 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ fn test5(a : u32) { if a > 1 { let q = a / 2; - constrain q == 2; + assert(q == 2); } } diff --git a/crates/nargo_cli/tests/test_data/array_len/src/main.nr b/crates/nargo_cli/tests/test_data/array_len/src/main.nr index 7ed9ebfd1c9..29ee44ce928 100644 --- a/crates/nargo_cli/tests/test_data/array_len/src/main.nr +++ b/crates/nargo_cli/tests/test_data/array_len/src/main.nr @@ -13,19 +13,19 @@ fn nested_call(b: [Field]) -> Field { } fn main(len3: [u8; 3], len4: [Field; 4]) { - constrain len_plus_1(len3) == 4; - constrain len_plus_1(len4) == 5; - constrain add_lens(len3, len4) == 7; - constrain nested_call(len4) == 5; + assert(len_plus_1(len3) == 4); + assert(len_plus_1(len4) == 5); + assert(add_lens(len3, len4) == 7); + assert(nested_call(len4) == 5); // std::array::len returns a comptime value - constrain len4[len3.len()] == 4; + assert(len4[len3.len()] == 4); // test for std::array::sort let mut unsorted = len3; unsorted[0] = len3[1]; unsorted[1] = len3[0]; - constrain unsorted[0] > unsorted[1]; + assert(unsorted[0] > unsorted[1]); let sorted = unsorted.sort(); - constrain sorted[0] < sorted[1]; + assert(sorted[0] < sorted[1]); } diff --git a/crates/nargo_cli/tests/test_data/array_neq/src/main.nr b/crates/nargo_cli/tests/test_data/array_neq/src/main.nr index 1fc5d9579c7..be734dea368 100644 --- a/crates/nargo_cli/tests/test_data/array_neq/src/main.nr +++ b/crates/nargo_cli/tests/test_data/array_neq/src/main.nr @@ -1,4 +1,4 @@ // Simple example of checking where two arrays are different fn main(a: [Field; 32], b: [Field; 32]) { - constrain a != b; + assert(a != b); } diff --git a/crates/nargo_cli/tests/test_data/assign_ex/src/main.nr b/crates/nargo_cli/tests/test_data/assign_ex/src/main.nr index 158da959352..b0626d63c8e 100644 --- a/crates/nargo_cli/tests/test_data/assign_ex/src/main.nr +++ b/crates/nargo_cli/tests/test_data/assign_ex/src/main.nr @@ -1,6 +1,6 @@ fn main(x: Field, y: Field) { let mut z = x + y; - constrain z == 3; + assert(z == 3); z = x * y; - constrain z == 2; + assert(z == 2); } diff --git a/crates/nargo_cli/tests/test_data/bit_and/src/main.nr b/crates/nargo_cli/tests/test_data/bit_and/src/main.nr index 14b865d1a38..f4805960a33 100644 --- a/crates/nargo_cli/tests/test_data/bit_and/src/main.nr +++ b/crates/nargo_cli/tests/test_data/bit_and/src/main.nr @@ -4,15 +4,15 @@ fn main(x : Field, y : Field) { let x_as_u8 = x as u8; let y_as_u8 = y as u8; - constrain (x_as_u8 & y_as_u8) == x_as_u8; + assert((x_as_u8 & y_as_u8) == x_as_u8); //bitwise and with 1 bit: let flag = (x == 0) & (y == 16); - constrain flag; + assert(flag); //bitwise and with odd bits: let x_as_u11 = x as u11; let y_as_u11 = y as u11; - constrain (x_as_u11 & y_as_u11) == x_as_u11; + assert((x_as_u11 & y_as_u11) == x_as_u11); } diff --git a/crates/nargo_cli/tests/test_data/bool_not/src/main.nr b/crates/nargo_cli/tests/test_data/bool_not/src/main.nr index 035c0630874..d6b4d7a9fad 100644 --- a/crates/nargo_cli/tests/test_data/bool_not/src/main.nr +++ b/crates/nargo_cli/tests/test_data/bool_not/src/main.nr @@ -1,5 +1,5 @@ use dep::std; fn main(x: u1) { - constrain !x == 0; + assert(!x == 0); } diff --git a/crates/nargo_cli/tests/test_data/bool_or/src/main.nr b/crates/nargo_cli/tests/test_data/bool_or/src/main.nr index 147cc23a922..4a74027e4aa 100644 --- a/crates/nargo_cli/tests/test_data/bool_or/src/main.nr +++ b/crates/nargo_cli/tests/test_data/bool_or/src/main.nr @@ -1,7 +1,7 @@ use dep::std; fn main(x: u1, y: u1) { - constrain x | y == 1; + assert(x | y == 1); - constrain x | y | x == 1; + assert(x | y | x == 1); } diff --git a/crates/nargo_cli/tests/test_data/cast_bool/src/main.nr b/crates/nargo_cli/tests/test_data/cast_bool/src/main.nr index e62f4b80ddd..57af8120b33 100644 --- a/crates/nargo_cli/tests/test_data/cast_bool/src/main.nr +++ b/crates/nargo_cli/tests/test_data/cast_bool/src/main.nr @@ -1,6 +1,6 @@ fn main(x: Field, y: Field) { let z = x == y; let t = z as u8; - constrain t == 1; + assert(t == 1); } diff --git a/crates/nargo_cli/tests/test_data/comptime_array_access/src/main.nr b/crates/nargo_cli/tests/test_data/comptime_array_access/src/main.nr index 0c8242bca4b..04f08bb70c5 100644 --- a/crates/nargo_cli/tests/test_data/comptime_array_access/src/main.nr +++ b/crates/nargo_cli/tests/test_data/comptime_array_access/src/main.nr @@ -7,11 +7,11 @@ fn main(a: [Field; 3]) { // Nor should using it in an expression with a non-comptime variable. let two = i + ii; - constrain i == ii; + assert(i == ii); let elem2 = a[i]; - constrain elem1 == elem2; - constrain two == 2; + assert(elem1 == elem2); + assert(two == 2); } fn foo(x: Field) -> Field { x } diff --git a/crates/nargo_cli/tests/test_data/comptime_fail/src/main.nr b/crates/nargo_cli/tests/test_data/comptime_fail/src/main.nr index 9e861b5dc57..ad9ecc2f689 100644 --- a/crates/nargo_cli/tests/test_data/comptime_fail/src/main.nr +++ b/crates/nargo_cli/tests/test_data/comptime_fail/src/main.nr @@ -4,12 +4,12 @@ fn main(x: Field) { // Error here: let foo = my_const + x; - constrain array[foo] == x; + assert(array[foo] == x); let my_const2 = 3; - constrain array[my_const2] == 3; + assert(array[my_const2] == 3); // Using a comptime variable where a non-comptime variable is expected should be fine main(my_const2); - constrain x != 0; + assert(x != 0); } diff --git a/crates/nargo_cli/tests/test_data/comptime_recursion_regression/src/main.nr b/crates/nargo_cli/tests/test_data/comptime_recursion_regression/src/main.nr index 31d7d10975c..0461fd9c4cb 100644 --- a/crates/nargo_cli/tests/test_data/comptime_recursion_regression/src/main.nr +++ b/crates/nargo_cli/tests/test_data/comptime_recursion_regression/src/main.nr @@ -1,4 +1,4 @@ fn main(x: Field, y: Field) { let flag = (x == 1) | (y == 2); - constrain flag | false == flag; + assert(flag | false == flag); } diff --git a/crates/nargo_cli/tests/test_data/contracts/src/main.nr b/crates/nargo_cli/tests/test_data/contracts/src/main.nr index f236186d426..53e094eb4cc 100644 --- a/crates/nargo_cli/tests/test_data/contracts/src/main.nr +++ b/crates/nargo_cli/tests/test_data/contracts/src/main.nr @@ -1,5 +1,5 @@ fn main(x : Field, y : pub Field) { - constrain x * 2 == y * 3; + assert(x * 2 == y * 3); } contract Foo { diff --git a/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr b/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr index ee9e2e2eeee..3372e969d4b 100644 --- a/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr +++ b/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr @@ -23,26 +23,36 @@ fn main() { let p2_affine = Gaffine::new(16540640123574156134436876038791482806971768689494387082833631921987005038935, 20819045374670962167435360035096875258406992893633759881276124905556507972311); let p3_affine = bjj_affine.add(p1_affine, p2_affine); - constrain p3_affine.eq(Gaffine::new(7916061937171219682591368294088513039687205273691143098332585753343424131937, - 14035240266687799601661095864649209771790948434046947201833777492504781204499)); + assert( + p3_affine.eq(Gaffine::new( + 7916061937171219682591368294088513039687205273691143098332585753343424131937, + 14035240266687799601661095864649209771790948434046947201833777492504781204499 + )) + ); // Test scalar multiplication let p4_affine = bjj_affine.mul(2, p1_affine); - constrain p4_affine.eq(Gaffine::new(6890855772600357754907169075114257697580319025794532037257385534741338397365, - 4338620300185947561074059802482547481416142213883829469920100239455078257889)); - constrain p4_affine.eq(bjj_affine.bit_mul([0,1], p1_affine)); + assert( + p4_affine.eq(Gaffine::new( + 6890855772600357754907169075114257697580319025794532037257385534741338397365, + 4338620300185947561074059802482547481416142213883829469920100239455078257889 + )) + ); + assert(p4_affine.eq(bjj_affine.bit_mul([0,1], p1_affine))); // Test subtraction let p5_affine = bjj_affine.subtract(p3_affine, p3_affine); - constrain p5_affine.eq(Gaffine::zero()); + assert(p5_affine.eq(Gaffine::zero())); // Check that these points are on the curve - constrain bjj_affine.contains(bjj_affine.gen) - & bjj_affine.contains(p1_affine) - & bjj_affine.contains(p2_affine) - & bjj_affine.contains(p3_affine) - & bjj_affine.contains(p4_affine) - & bjj_affine.contains(p5_affine); + assert( + bjj_affine.contains(bjj_affine.gen) & + bjj_affine.contains(p1_affine) & + bjj_affine.contains(p2_affine) & + bjj_affine.contains(p3_affine) & + bjj_affine.contains(p4_affine) & + bjj_affine.contains(p5_affine) + ); // Test CurveGroup equivalents let bjj = bjj_affine.into_group(); // Baby Jubjub @@ -54,23 +64,25 @@ fn main() { let p5 = p5_affine.into_group(); // Test addition - constrain p3.eq(bjj.add(p1, p2)); + assert(p3.eq(bjj.add(p1, p2))); // Test scalar multiplication - constrain p4.eq(bjj.mul(2, p1)); - constrain p4.eq(bjj.bit_mul([0,1], p1)); + assert(p4.eq(bjj.mul(2, p1))); + assert(p4.eq(bjj.bit_mul([0,1], p1))); // Test subtraction - constrain G::zero().eq(bjj.subtract(p3, p3)); - constrain p5.eq(G::zero()); + assert(G::zero().eq(bjj.subtract(p3, p3))); + assert(p5.eq(G::zero())); // Check that these points are on the curve - constrain bjj.contains(bjj.gen) - & bjj.contains(p1) - & bjj.contains(p2) - & bjj.contains(p3) - & bjj.contains(p4) - & bjj.contains(p5); + assert( + bjj.contains(bjj.gen) & + bjj.contains(p1) & + bjj.contains(p2) & + bjj.contains(p3) & + bjj.contains(p4) & + bjj.contains(p5) + ); // Test SWCurve equivalents of the above // First the affine representation @@ -83,26 +95,32 @@ fn main() { let p5_swcurve_affine = bjj_affine.map_into_swcurve(p5_affine); // Addition - constrain p3_swcurve_affine.eq( - bjj_swcurve_affine.add( - p1_swcurve_affine, - p2_swcurve_affine)); + assert( + p3_swcurve_affine.eq( + bjj_swcurve_affine.add( + p1_swcurve_affine, + p2_swcurve_affine + ) + ) + ); // Doubling - constrain p4_swcurve_affine.eq(bjj_swcurve_affine.mul(2, p1_swcurve_affine)); - constrain p4_swcurve_affine.eq(bjj_swcurve_affine.bit_mul([0,1], p1_swcurve_affine)); + assert(p4_swcurve_affine.eq(bjj_swcurve_affine.mul(2, p1_swcurve_affine))); + assert(p4_swcurve_affine.eq(bjj_swcurve_affine.bit_mul([0,1], p1_swcurve_affine))); // Subtraction - constrain SWGaffine::zero().eq(bjj_swcurve_affine.subtract(p3_swcurve_affine, p3_swcurve_affine)); - constrain p5_swcurve_affine.eq(SWGaffine::zero()); + assert(SWGaffine::zero().eq(bjj_swcurve_affine.subtract(p3_swcurve_affine, p3_swcurve_affine))); + assert(p5_swcurve_affine.eq(SWGaffine::zero())); // Check that these points are on the curve - constrain bjj_swcurve_affine.contains(bjj_swcurve_affine.gen) - & bjj_swcurve_affine.contains(p1_swcurve_affine) - & bjj_swcurve_affine.contains(p2_swcurve_affine) - & bjj_swcurve_affine.contains(p3_swcurve_affine) - & bjj_swcurve_affine.contains(p4_swcurve_affine) - & bjj_swcurve_affine.contains(p5_swcurve_affine); + assert( + bjj_swcurve_affine.contains(bjj_swcurve_affine.gen) & + bjj_swcurve_affine.contains(p1_swcurve_affine) & + bjj_swcurve_affine.contains(p2_swcurve_affine) & + bjj_swcurve_affine.contains(p3_swcurve_affine) & + bjj_swcurve_affine.contains(p4_swcurve_affine) & + bjj_swcurve_affine.contains(p5_swcurve_affine) + ); // Then the CurveGroup representation let bjj_swcurve = bjj.into_swcurve(); @@ -114,26 +132,25 @@ fn main() { let p5_swcurve = bjj.map_into_swcurve(p5); // Addition - constrain p3_swcurve.eq( - bjj_swcurve.add( - p1_swcurve, - p2_swcurve)); + assert(p3_swcurve.eq(bjj_swcurve.add(p1_swcurve,p2_swcurve))); // Doubling - constrain p4_swcurve.eq(bjj_swcurve.mul(2, p1_swcurve)); - constrain p4_swcurve.eq(bjj_swcurve.bit_mul([0,1], p1_swcurve)); + assert(p4_swcurve.eq(bjj_swcurve.mul(2, p1_swcurve))); + assert(p4_swcurve.eq(bjj_swcurve.bit_mul([0,1], p1_swcurve))); // Subtraction - constrain SWG::zero().eq(bjj_swcurve.subtract(p3_swcurve, p3_swcurve)); - constrain p5_swcurve.eq(SWG::zero()); + assert(SWG::zero().eq(bjj_swcurve.subtract(p3_swcurve, p3_swcurve))); + assert(p5_swcurve.eq(SWG::zero())); // Check that these points are on the curve - constrain bjj_swcurve.contains(bjj_swcurve.gen) - & bjj_swcurve.contains(p1_swcurve) - & bjj_swcurve.contains(p2_swcurve) - & bjj_swcurve.contains(p3_swcurve) - & bjj_swcurve.contains(p4_swcurve) - & bjj_swcurve.contains(p5_swcurve); + assert( + bjj_swcurve.contains(bjj_swcurve.gen) & + bjj_swcurve.contains(p1_swcurve) & + bjj_swcurve.contains(p2_swcurve) & + bjj_swcurve.contains(p3_swcurve) & + bjj_swcurve.contains(p4_swcurve) & + bjj_swcurve.contains(p5_swcurve) + ); // Test MontCurve conversions // First the affine representation @@ -146,26 +163,25 @@ fn main() { let p5_montcurve_affine = p5_affine.into_montcurve(); // Addition - constrain p3_montcurve_affine.eq( - bjj_montcurve_affine.add( - p1_montcurve_affine, - p2_montcurve_affine)); + assert(p3_montcurve_affine.eq(bjj_montcurve_affine.add(p1_montcurve_affine, p2_montcurve_affine))); // Doubling - constrain p4_montcurve_affine.eq(bjj_montcurve_affine.mul(2, p1_montcurve_affine)); - constrain p4_montcurve_affine.eq(bjj_montcurve_affine.bit_mul([0,1], p1_montcurve_affine)); + assert(p4_montcurve_affine.eq(bjj_montcurve_affine.mul(2, p1_montcurve_affine))); + assert(p4_montcurve_affine.eq(bjj_montcurve_affine.bit_mul([0,1], p1_montcurve_affine))); // Subtraction - constrain MGaffine::zero().eq(bjj_montcurve_affine.subtract(p3_montcurve_affine, p3_montcurve_affine)); - constrain p5_montcurve_affine.eq(MGaffine::zero()); + assert(MGaffine::zero().eq(bjj_montcurve_affine.subtract(p3_montcurve_affine, p3_montcurve_affine))); + assert(p5_montcurve_affine.eq(MGaffine::zero())); // Check that these points are on the curve - constrain bjj_montcurve_affine.contains(bjj_montcurve_affine.gen) - & bjj_montcurve_affine.contains(p1_montcurve_affine) - & bjj_montcurve_affine.contains(p2_montcurve_affine) - & bjj_montcurve_affine.contains(p3_montcurve_affine) - & bjj_montcurve_affine.contains(p4_montcurve_affine) - & bjj_montcurve_affine.contains(p5_montcurve_affine); + assert( + bjj_montcurve_affine.contains(bjj_montcurve_affine.gen) & + bjj_montcurve_affine.contains(p1_montcurve_affine) & + bjj_montcurve_affine.contains(p2_montcurve_affine) & + bjj_montcurve_affine.contains(p3_montcurve_affine) & + bjj_montcurve_affine.contains(p4_montcurve_affine) & + bjj_montcurve_affine.contains(p5_montcurve_affine) + ); // Then the CurveGroup representation let bjj_montcurve = bjj.into_montcurve(); @@ -177,35 +193,34 @@ fn main() { let p5_montcurve = p5_montcurve_affine.into_group(); // Addition - constrain p3_montcurve.eq( - bjj_montcurve.add( - p1_montcurve, - p2_montcurve)); - + assert(p3_montcurve.eq(bjj_montcurve.add(p1_montcurve, p2_montcurve))); + // Doubling - constrain p4_montcurve.eq(bjj_montcurve.mul(2, p1_montcurve)); - constrain p4_montcurve.eq(bjj_montcurve.bit_mul([0,1], p1_montcurve)); + assert(p4_montcurve.eq(bjj_montcurve.mul(2, p1_montcurve))); + assert(p4_montcurve.eq(bjj_montcurve.bit_mul([0,1], p1_montcurve))); // Subtraction - constrain MG::zero().eq(bjj_montcurve.subtract(p3_montcurve, p3_montcurve)); - constrain p5_montcurve.eq(MG::zero()); + assert(MG::zero().eq(bjj_montcurve.subtract(p3_montcurve, p3_montcurve))); + assert(p5_montcurve.eq(MG::zero())); // Check that these points are on the curve - constrain bjj_montcurve.contains(bjj_montcurve.gen) - & bjj_montcurve.contains(p1_montcurve) - & bjj_montcurve.contains(p2_montcurve) - & bjj_montcurve.contains(p3_montcurve) - & bjj_montcurve.contains(p4_montcurve) - & bjj_montcurve.contains(p5_montcurve); + assert( + bjj_montcurve.contains(bjj_montcurve.gen) & + bjj_montcurve.contains(p1_montcurve) & + bjj_montcurve.contains(p2_montcurve) & + bjj_montcurve.contains(p3_montcurve) & + bjj_montcurve.contains(p4_montcurve) & + bjj_montcurve.contains(p5_montcurve) + ); // Elligator 2 map-to-curve let ell2_pt_map = bjj_affine.elligator2_map(27); - constrain ell2_pt_map.eq(MGaffine::new(7972459279704486422145701269802978968072470631857513331988813812334797879121, 8142420778878030219043334189293412482212146646099536952861607542822144507872).into_tecurve()); + assert(ell2_pt_map.eq(MGaffine::new(7972459279704486422145701269802978968072470631857513331988813812334797879121, 8142420778878030219043334189293412482212146646099536952861607542822144507872).into_tecurve())); // SWU map-to-curve let swu_pt_map = bjj_affine.swu_map(5,27); - constrain swu_pt_map.eq(bjj_affine.map_from_swcurve(SWGaffine::new(2162719247815120009132293839392097468339661471129795280520343931405114293888, 5341392251743377373758788728206293080122949448990104760111875914082289313973))); + assert(swu_pt_map.eq(bjj_affine.map_from_swcurve(SWGaffine::new(2162719247815120009132293839392097468339661471129795280520343931405114293888, 5341392251743377373758788728206293080122949448990104760111875914082289313973)))); } } diff --git a/crates/nargo_cli/tests/test_data/generics/src/main.nr b/crates/nargo_cli/tests/test_data/generics/src/main.nr index 56078a304e0..c506995adc3 100644 --- a/crates/nargo_cli/tests/test_data/generics/src/main.nr +++ b/crates/nargo_cli/tests/test_data/generics/src/main.nr @@ -5,7 +5,7 @@ struct Bar { } fn foo(bar: Bar) { - constrain bar.one == bar.two; + assert(bar.one == bar.two); } struct BigInt { @@ -15,12 +15,12 @@ struct BigInt { impl BigInt { // `N` is in scope of all methods in the impl fn first(first: BigInt, second: BigInt) -> Self { - constrain first.limbs != second.limbs; + assert(first.limbs != second.limbs); first } fn second(first: BigInt, second: Self) -> Self { - constrain first.limbs != second.limbs; + assert(first.limbs != second.limbs); second } } @@ -42,11 +42,11 @@ fn main(x: Field, y: Field) { let int1 = BigInt { limbs: [1] }; let int2 = BigInt { limbs: [2] }; let BigInt { limbs } = int1.second(int2).first(int1); - constrain limbs == int2.limbs; + assert(limbs == int2.limbs); // Test impl exclusively for Bar - constrain bar1.get_other() == bar1.other; + assert(bar1.get_other() == bar1.other); // Expected type error - // constrain bar2.get_other() == bar2.other; + // assert(bar2.get_other() == bar2.other); } diff --git a/crates/nargo_cli/tests/test_data/global_consts/src/baz.nr b/crates/nargo_cli/tests/test_data/global_consts/src/baz.nr index 3471da43105..e52efc52eae 100644 --- a/crates/nargo_cli/tests/test_data/global_consts/src/baz.nr +++ b/crates/nargo_cli/tests/test_data/global_consts/src/baz.nr @@ -1,5 +1,5 @@ fn from_baz(x : [Field; crate::foo::MAGIC_NUMBER]) { for i in 0..crate::foo::MAGIC_NUMBER { - constrain x[i] == crate::foo::MAGIC_NUMBER; + assert(x[i] == crate::foo::MAGIC_NUMBER); }; } \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/global_consts/src/foo.nr b/crates/nargo_cli/tests/test_data/global_consts/src/foo.nr index c54a85ae120..2db74fb1ff7 100644 --- a/crates/nargo_cli/tests/test_data/global_consts/src/foo.nr +++ b/crates/nargo_cli/tests/test_data/global_consts/src/foo.nr @@ -6,6 +6,6 @@ global TYPE_INFERRED = 42; fn from_foo(x : [Field; bar::N]) { for i in 0..bar::N { - constrain x[i] == bar::N; + assert(x[i] == bar::N); }; } \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/global_consts/src/main.nr b/crates/nargo_cli/tests/test_data/global_consts/src/main.nr index fb48eb2b798..9bcca2b8071 100644 --- a/crates/nargo_cli/tests/test_data/global_consts/src/main.nr +++ b/crates/nargo_cli/tests/test_data/global_consts/src/main.nr @@ -16,14 +16,14 @@ fn main(a: [Field; M + N - N], b: [Field; 30 + N / 2], c : pub [Field; foo::MAGI let test_struct = Dummy { x: d, y: c }; for i in 0..foo::MAGIC_NUMBER { - constrain c[i] == foo::MAGIC_NUMBER; - constrain test_struct.y[i] == foo::MAGIC_NUMBER; + assert(c[i] == foo::MAGIC_NUMBER); + assert(test_struct.y[i] == foo::MAGIC_NUMBER); } - constrain N != M; + assert(N != M); let expected: u32 = 42; - constrain foo::TYPE_INFERRED == expected; + assert(foo::TYPE_INFERRED == expected); let mut y = 5; let mut x = M; @@ -33,30 +33,30 @@ fn main(a: [Field; M + N - N], b: [Field; 30 + N / 2], c : pub [Field; foo::MAGI y = i; } - constrain y == 24; - constrain x == 10; + assert(y == 24); + assert(x == 10); let q = multiplyByM(3); - constrain q == 96; + assert(q == 96); arrays_neq(a, b); let t: [Field; T_LEN] = [N, M]; - constrain t[1] == 32; + assert(t[1] == 32); - constrain 15 == mysubmodule::my_helper(); + assert(15 == mysubmodule::my_helper()); let add_submodules_N = mysubmodule::N + foo::bar::N; - constrain 15 == add_submodules_N; + assert(15 == add_submodules_N); let add_from_bar_N = mysubmodule::N + foo::bar::from_bar(1); - constrain 15 == add_from_bar_N; + assert(15 == add_from_bar_N); // Example showing an array filled with (mysubmodule::N + 2) 0's let sugared = [0; mysubmodule::N + 2]; - constrain sugared[mysubmodule::N + 1] == 0; + assert(sugared[mysubmodule::N + 1] == 0); let arr: [Field; mysubmodule::N] = [N; 10]; - constrain (arr[0] == 5) & (arr[9] == 5); + assert((arr[0] == 5) & (arr[9] == 5)); foo::from_foo(d); baz::from_baz(c); @@ -67,7 +67,7 @@ fn multiplyByM(x: Field) -> Field { } fn arrays_neq(a: [Field; M], b: [Field; M]) { - constrain a != b; + assert(a != b); } mod mysubmodule { @@ -77,7 +77,7 @@ mod mysubmodule { global L: Field = 50; fn my_bool_or(x: u1, y: u1) { - constrain x | y == 1; + assert(x | y == 1); } fn my_helper() -> comptime Field { diff --git a/crates/nargo_cli/tests/test_data/higher-order-functions/src/main.nr b/crates/nargo_cli/tests/test_data/higher-order-functions/src/main.nr index 70b281951a8..572e6603cc5 100644 --- a/crates/nargo_cli/tests/test_data/higher-order-functions/src/main.nr +++ b/crates/nargo_cli/tests/test_data/higher-order-functions/src/main.nr @@ -2,16 +2,16 @@ use dep::std; fn main() -> pub Field { let f = if 3 * 7 > 200 { foo } else { bar }; - constrain f()[1] == 2; + assert(f()[1] == 2); // Lambdas: - constrain twice(|x| x * 2, 5) == 20; - constrain (|x, y| x + y + 1)(2, 3) == 6; + assert(twice(|x| x * 2, 5) == 20); + assert((|x, y| x + y + 1)(2, 3) == 6); // Closures: let a = 42; let g = || a; - constrain g() == 42; + assert(g() == 42); // Mutable variables cannot be captured, but you can // copy them into immutable variables and capture those: @@ -22,7 +22,7 @@ fn main() -> pub Field { // Add extra mutations to ensure we can mutate x without the // captured z changing. x = x + 1; - constrain (|y| y + z)(1) == 4; + assert((|y| y + z)(1) == 4); x = x + 1; let ret = twice(add1, 3); @@ -34,18 +34,18 @@ fn main() -> pub Field { /// Test the array functions in std::array fn test_array_functions() { let myarray: [i32; 3] = [1, 2, 3]; - constrain myarray.any(|n| n > 2); + assert(myarray.any(|n| n > 2)); let evens: [i32; 3] = [2, 4, 6]; - constrain evens.all(|n| n > 1); + assert(evens.all(|n| n > 1)); - constrain evens.fold(0, |a, b| a + b) == 12; - constrain evens.reduce(|a, b| a + b) == 12; + assert(evens.fold(0, |a, b| a + b) == 12); + assert(evens.reduce(|a, b| a + b) == 12); let descending = myarray.sort_via(|a, b| a > b); - constrain descending == [3, 2, 1]; + assert(descending == [3, 2, 1]); - constrain evens.map(|n| n / 2) == myarray; + assert(evens.map(|n| n / 2) == myarray); } fn foo() -> [u32; 2] { diff --git a/crates/nargo_cli/tests/test_data/if_else_chain/src/main.nr b/crates/nargo_cli/tests/test_data/if_else_chain/src/main.nr index af04fc7bdf8..5105c18c7de 100644 --- a/crates/nargo_cli/tests/test_data/if_else_chain/src/main.nr +++ b/crates/nargo_cli/tests/test_data/if_else_chain/src/main.nr @@ -1,16 +1,16 @@ fn main(a: u32, mut c: [u32; 4]){ if a == c[0] { - constrain c[0] == 0; + assert(c[0] == 0); } else if a == c[1] { - constrain c[1] == 0; + assert(c[1] == 0); } else if a == c[2] { - constrain c[2] == 0; + assert(c[2] == 0); } else if a == c[3] { // expect to match this case - constrain c[3] == 0; + assert(c[3] == 0); } else { - constrain c[0] == 10; + assert(c[0] == 10); } } diff --git a/crates/nargo_cli/tests/test_data/main_bool_arg/src/main.nr b/crates/nargo_cli/tests/test_data/main_bool_arg/src/main.nr index 91a8db03ff3..0615a7dbca4 100644 --- a/crates/nargo_cli/tests/test_data/main_bool_arg/src/main.nr +++ b/crates/nargo_cli/tests/test_data/main_bool_arg/src/main.nr @@ -1,8 +1,8 @@ fn main(x : bool, y: [bool;2]) { if x { - constrain 1 != 2; + assert(1 != 2); } - constrain x; - constrain y[0] != y[1]; + assert(x); + assert(y[0] != y[1]); } diff --git a/crates/nargo_cli/tests/test_data/merkle_insert/src/main.nr b/crates/nargo_cli/tests/test_data/merkle_insert/src/main.nr index 3ab4efb64c0..53d876272ac 100644 --- a/crates/nargo_cli/tests/test_data/merkle_insert/src/main.nr +++ b/crates/nargo_cli/tests/test_data/merkle_insert/src/main.nr @@ -10,13 +10,13 @@ fn main( mimc_input: [Field; 4], ) { let old_leaf_exists = std::merkle::check_membership(old_root, old_leaf, index, old_hash_path); - constrain old_leaf_exists == 1; - constrain old_root == std::merkle::compute_root_from_leaf(old_leaf, index, old_hash_path); + assert(old_leaf_exists == 1); + assert(old_root == std::merkle::compute_root_from_leaf(old_leaf, index, old_hash_path)); let calculated_root = std::merkle::compute_merkle_root(leaf, index, old_hash_path); - constrain new_root == calculated_root; + assert(new_root == calculated_root); let h = std::hash::mimc_bn254(mimc_input); // Regression test for PR #891 std::println(h); - constrain h == 18226366069841799622585958305961373004333097209608110160936134895615261821931; + assert(h == 18226366069841799622585958305961373004333097209608110160936134895615261821931); } diff --git a/crates/nargo_cli/tests/test_data/modules/src/main.nr b/crates/nargo_cli/tests/test_data/modules/src/main.nr index 4a773c9ed6b..167f7e671a0 100644 --- a/crates/nargo_cli/tests/test_data/modules/src/main.nr +++ b/crates/nargo_cli/tests/test_data/modules/src/main.nr @@ -10,5 +10,5 @@ mod foo; // // To verify that proof, type `cargo run verify {proof_name}` fn main(x: Field, y: pub Field) { - constrain x != foo::hello(y); + assert(x != foo::hello(y)); } diff --git a/crates/nargo_cli/tests/test_data/modules_more/src/main.nr b/crates/nargo_cli/tests/test_data/modules_more/src/main.nr index 73f3a0a6d8b..8862e5a8650 100644 --- a/crates/nargo_cli/tests/test_data/modules_more/src/main.nr +++ b/crates/nargo_cli/tests/test_data/modules_more/src/main.nr @@ -2,5 +2,5 @@ mod foo; // An example of the module system fn main(x: Field, y: Field) { - constrain x != foo::bar::from_bar(y); + assert(x != foo::bar::from_bar(y)); } diff --git a/crates/nargo_cli/tests/test_data/modulus/src/main.nr b/crates/nargo_cli/tests/test_data/modulus/src/main.nr index 070d934976d..4a13a6e06ba 100644 --- a/crates/nargo_cli/tests/test_data/modulus/src/main.nr +++ b/crates/nargo_cli/tests/test_data/modulus/src/main.nr @@ -3,24 +3,24 @@ use dep::std; fn main(bn254_modulus_be_bytes : [u8; 32], bn254_modulus_be_bits : [u1; 254]) -> pub Field { let modulus_size = std::field::modulus_num_bits(); // NOTE: The constraints used in this circuit will only work when testing nargo with the plonk bn254 backend - constrain modulus_size == 254; + assert(modulus_size == 254); let modulus_be_byte_array = std::field::modulus_be_bytes(); for i in 0..32 { - constrain modulus_be_byte_array[i] == bn254_modulus_be_bytes[i]; + assert(modulus_be_byte_array[i] == bn254_modulus_be_bytes[i]); } let modulus_le_byte_array = std::field::modulus_le_bytes(); for i in 0..32 { - constrain modulus_le_byte_array[i] == bn254_modulus_be_bytes[31-i]; + assert(modulus_le_byte_array[i] == bn254_modulus_be_bytes[31-i]); } let modulus_be_bits = std::field::modulus_be_bits(); for i in 0..254 { - constrain modulus_be_bits[i] == bn254_modulus_be_bits[i]; + assert(modulus_be_bits[i] == bn254_modulus_be_bits[i]); } let modulus_le_bits = std::field::modulus_le_bits(); for i in 0..254 { - constrain modulus_le_bits[i] == bn254_modulus_be_bits[253-i]; + assert(modulus_le_bits[i] == bn254_modulus_be_bits[253-i]); } modulus_size diff --git a/crates/nargo_cli/tests/test_data/numeric_generics/src/main.nr b/crates/nargo_cli/tests/test_data/numeric_generics/src/main.nr index ebe50c4d0d9..f1efafc19fd 100644 --- a/crates/nargo_cli/tests/test_data/numeric_generics/src/main.nr +++ b/crates/nargo_cli/tests/test_data/numeric_generics/src/main.nr @@ -3,15 +3,15 @@ fn main() { let b = id([1, 2, 3]); let itWorks1 = MyStruct { data: a }; - constrain itWorks1.data[1] == 2; + assert(itWorks1.data[1] == 2); let itWorks2 = MyStruct { data: b }; - constrain itWorks2.data[1] == 2; + assert(itWorks2.data[1] == 2); let c = [1, 2]; let itAlsoWorks = MyStruct { data: c }; - constrain itAlsoWorks.data[1] == 2; + assert(itAlsoWorks.data[1] == 2); - constrain foo(itWorks2).data[0] == itWorks2.data[0] + 1; + assert(foo(itWorks2).data[0] == itWorks2.data[0] + 1); } fn id(x: [Field; I]) -> [Field; I] { @@ -25,7 +25,7 @@ struct MyStruct { impl MyStruct { fn insert(mut self: Self, index: comptime Field, elem: Field) -> Self { // Regression test for numeric generics on impls - constrain index as u64 < S as u64; + assert(index as u64 < S as u64); self.data[index] = elem; self diff --git a/crates/nargo_cli/tests/test_data/pedersen_check/src/main.nr b/crates/nargo_cli/tests/test_data/pedersen_check/src/main.nr index b727112ce55..37fc3f61188 100644 --- a/crates/nargo_cli/tests/test_data/pedersen_check/src/main.nr +++ b/crates/nargo_cli/tests/test_data/pedersen_check/src/main.nr @@ -2,8 +2,8 @@ use dep::std; fn main(x: Field, y: Field, salt: Field, out_x: Field, out_y: Field ) { let res = std::hash::pedersen([x, y]); - constrain res[0] == out_x; - constrain res[1] == out_y; + assert(res[0] == out_x); + assert(res[1] == out_y); let raw_data = [x,y]; let mut state = 0; @@ -12,6 +12,6 @@ fn main(x: Field, y: Field, salt: Field, out_x: Field, out_y: Field ) { } state += salt; let hash = std::hash::pedersen([state]); - constrain std::hash::pedersen([43])[0] == hash[0]; + assert(std::hash::pedersen([43])[0] == hash[0]); } diff --git a/crates/nargo_cli/tests/test_data/poseidon_bn254_hash/src/main.nr b/crates/nargo_cli/tests/test_data/poseidon_bn254_hash/src/main.nr index f2f1af7ab7d..37621c732a8 100644 --- a/crates/nargo_cli/tests/test_data/poseidon_bn254_hash/src/main.nr +++ b/crates/nargo_cli/tests/test_data/poseidon_bn254_hash/src/main.nr @@ -3,8 +3,8 @@ use dep::std::hash::poseidon; fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field) { let hash1 = poseidon::bn254::hash_2(x1); - constrain hash1 == y1; + assert(hash1 == y1); let hash2 = poseidon::bn254::hash_4(x2); - constrain hash2 == y2; + assert(hash2 == y2); } diff --git a/crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/src/main.nr b/crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/src/main.nr index f5135897f19..3addc1cec97 100644 --- a/crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/src/main.nr +++ b/crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/src/main.nr @@ -5,5 +5,5 @@ fn main(x: [Field; 7]) // Test optimised sponge let result = poseidon::bn254::sponge(x); - constrain result == 0x080ae1669d62f0197190573d4a325bfb8d8fc201ce3127cbac0c47a7ac81ac48; + assert(result == 0x080ae1669d62f0197190573d4a325bfb8d8fc201ce3127cbac0c47a7ac81ac48); } diff --git a/crates/nargo_cli/tests/test_data/pred_eq/src/main.nr b/crates/nargo_cli/tests/test_data/pred_eq/src/main.nr index c9c43b56c07..c7986cb7af3 100644 --- a/crates/nargo_cli/tests/test_data/pred_eq/src/main.nr +++ b/crates/nargo_cli/tests/test_data/pred_eq/src/main.nr @@ -2,5 +2,5 @@ use dep::std; fn main(x: Field, y: Field) { let p = x == y; - constrain p == true; + assert(p == true); } diff --git a/crates/nargo_cli/tests/test_data/regression/src/main.nr b/crates/nargo_cli/tests/test_data/regression/src/main.nr index 2fcf41c8d7f..06e35827d1e 100644 --- a/crates/nargo_cli/tests/test_data/regression/src/main.nr +++ b/crates/nargo_cli/tests/test_data/regression/src/main.nr @@ -2,8 +2,8 @@ global NIBBLE_LENGTH: comptime Field = 16; fn compact_decode(input: [u8; N], length: Field) -> ([u4; NIBBLE_LENGTH], Field) { - constrain 2*input.len() as u64 <= NIBBLE_LENGTH as u64; - constrain length as u64 <= input.len() as u64; + assert(2*input.len() as u64 <= NIBBLE_LENGTH as u64); + assert(length as u64 <= input.len() as u64); let mut nibble = [0 as u4; NIBBLE_LENGTH]; @@ -43,7 +43,7 @@ fn compact_decode(input: [u8; N], length: Field) -> ([u4; NIBBLE_LENGTH], Fie fn enc(value: [u8; N], value_length: Field) -> ([u8; 32], Field) { - constrain value.len() as u8 >= value_length as u8; + assert(value.len() as u8 >= value_length as u8); let mut out_value = [0; 32]; if value_length == 0 { @@ -75,8 +75,8 @@ fn main(x: [u8; 5], z: Field) { //Issue 1144 let (nib, len) = compact_decode(x,z); - constrain len == 5; - constrain [nib[0], nib[1], nib[2], nib[3], nib[4]] == [15, 1, 12, 11, 8]; + assert(len == 5); + assert([nib[0], nib[1], nib[2], nib[3], nib[4]] == [15, 1, 12, 11, 8]); } @@ -96,6 +96,6 @@ fn enc_test() let enc_val1 = enc(val1,val1_length); - constrain enc_val1.0 == [0x94,0xb8,0x8f,0x61,0xe6,0xfb,0xda,0x83,0xfb,0xff,0xfa,0xbe,0x36,0x41,0x12,0x13,0x74,0x80,0x39,0x80,0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00]; - constrain enc_val1.1 == 21; + assert(enc_val1.0 == [0x94,0xb8,0x8f,0x61,0xe6,0xfb,0xda,0x83,0xfb,0xff,0xfa,0xbe,0x36,0x41,0x12,0x13,0x74,0x80,0x39,0x80,0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00]); + assert(enc_val1.1 == 21); } \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/scalar_mul/src/main.nr b/crates/nargo_cli/tests/test_data/scalar_mul/src/main.nr index 72e022edc8e..d9d267f1dcd 100644 --- a/crates/nargo_cli/tests/test_data/scalar_mul/src/main.nr +++ b/crates/nargo_cli/tests/test_data/scalar_mul/src/main.nr @@ -17,6 +17,6 @@ fn main( pub_y = b_pub_y; } let res = std::scalar_mul::fixed_base(priv_key); - constrain res[0] == pub_x; - constrain res[1] == pub_y; + assert(res[0] == pub_x); + assert(res[1] == pub_y); } diff --git a/crates/nargo_cli/tests/test_data/schnorr/src/main.nr b/crates/nargo_cli/tests/test_data/schnorr/src/main.nr index 39676ca7389..ec4f819858a 100644 --- a/crates/nargo_cli/tests/test_data/schnorr/src/main.nr +++ b/crates/nargo_cli/tests/test_data/schnorr/src/main.nr @@ -6,5 +6,5 @@ fn main(message: [u8; 10], pub_key_x: Field, pub_key_y: Field, signature: [u8; 6 // Is there ever a situation where someone would want // to ensure that a signature was invalid? let x = std::schnorr::verify_signature(pub_key_x,pub_key_y,signature, message); - constrain x == 1; + assert(x == 1); } diff --git a/crates/nargo_cli/tests/test_data/sha256/src/main.nr b/crates/nargo_cli/tests/test_data/sha256/src/main.nr index bf2249c4faf..fd5340e2384 100644 --- a/crates/nargo_cli/tests/test_data/sha256/src/main.nr +++ b/crates/nargo_cli/tests/test_data/sha256/src/main.nr @@ -15,5 +15,5 @@ fn main(x: Field, result: [u8; 32]) { // We use the `as` keyword here to denote the fact that we want to take just the first byte from the x Field // The padding is taken care of by the program let digest = std::hash::sha256([x as u8]); - constrain digest == result; + assert(digest == result); } diff --git a/crates/nargo_cli/tests/test_data/sha2_blocks/src/main.nr b/crates/nargo_cli/tests/test_data/sha2_blocks/src/main.nr index 7e687cd179b..fcdcdb8684f 100644 --- a/crates/nargo_cli/tests/test_data/sha2_blocks/src/main.nr +++ b/crates/nargo_cli/tests/test_data/sha2_blocks/src/main.nr @@ -5,18 +5,18 @@ fn main(x: [u8; 3], result256: [u8; 32], result512: [u8; 64]) { // One-block tests. let mut digest256 = std::sha256::digest(x); - constrain digest256 == result256; + assert(digest256 == result256); let mut digest512 = std::sha512::digest(x); - constrain digest512 == result512; + assert(digest512 == result512); // Two-block SHA256 test. Taken from https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/SHA256.pdf let y: [u8; 56] = [97,98,99,100,98,99,100,101,99,100,101,102,100,101,102,103,101,102,103,104,102,103,104,105,103,104,105,106,104,105,106,107,105,106,107,108,106,107,108,109,107,108,109,110,108,109,110,111,109,110,111,112,110,111,112,113]; // "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" digest256 = std::sha256::digest(y); - constrain digest256 == [36,141,106,97,210,6,56,184,229,192,38,147,12,62,96,57,163,60,228,89,100,255,33,103,246,236,237,212,25,219,6,193]; + assert(digest256 == [36,141,106,97,210,6,56,184,229,192,38,147,12,62,96,57,163,60,228,89,100,255,33,103,246,236,237,212,25,219,6,193]); // Two-block SHA256 test. Taken from https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/SHA512.pdf let z: [u8; 112] = [97,98,99,100,101,102,103,104,98,99,100,101,102,103,104,105,99,100,101,102,103,104,105,106,100,101,102,103,104,105,106,107,101,102,103,104,105,106,107,108,102,103,104,105,106,107,108,109,103,104,105,106,107,108,109,110,104,105,106,107,108,109,110,111,105,106,107,108,109,110,111,112,106,107,108,109,110,111,112,113,107,108,109,110,111,112,113,114,108,109,110,111,112,113,114,115,109,110,111,112,113,114,115,116,110,111,112,113,114,115,116,117]; // "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu" digest512 = std::sha512::digest(z); - constrain digest512 == [142,149,155,117,218,227,19,218,140,244,247,40,20,252,20,63,143,119,121,198,235,159,127,161,114,153,174,173,182,136,144,24,80,29,40,158,73,0,247,228,51,27,153,222,196,181,67,58,199,211,41,238,182,221,38,84,94,150,229,91,135,75,233,9]; + assert(digest512 == [142,149,155,117,218,227,19,218,140,244,247,40,20,252,20,63,143,119,121,198,235,159,127,161,114,153,174,173,182,136,144,24,80,29,40,158,73,0,247,228,51,27,153,222,196,181,67,58,199,211,41,238,182,221,38,84,94,150,229,91,135,75,233,9]); } diff --git a/crates/nargo_cli/tests/test_data/sha2_byte/src/main.nr b/crates/nargo_cli/tests/test_data/sha2_byte/src/main.nr index 3458862b646..a7cc9daebb9 100644 --- a/crates/nargo_cli/tests/test_data/sha2_byte/src/main.nr +++ b/crates/nargo_cli/tests/test_data/sha2_byte/src/main.nr @@ -4,8 +4,8 @@ use dep::std; fn main(x: Field, result256: [u8; 32], result512: [u8; 64]) { let digest256 = std::sha256::digest([x as u8]); - constrain digest256 == result256; + assert(digest256 == result256); let digest512 = std::sha512::digest([x as u8]); - constrain digest512 == result512; + assert(digest512 == result512); } diff --git a/crates/nargo_cli/tests/test_data/simple_shield/src/main.nr b/crates/nargo_cli/tests/test_data/simple_shield/src/main.nr index 20d41481c7e..65d0ec954ac 100644 --- a/crates/nargo_cli/tests/test_data/simple_shield/src/main.nr +++ b/crates/nargo_cli/tests/test_data/simple_shield/src/main.nr @@ -30,7 +30,7 @@ fn main( // Check that the input note nullifier is in the root let is_member = std::merkle::check_membership(note_root, note_commitment[0], index, note_hash_path); - constrain is_member == 1; + assert(is_member == 1); [nullifier[0], receiver_note_commitment[0]] } diff --git a/crates/nargo_cli/tests/test_data/strings/src/main.nr b/crates/nargo_cli/tests/test_data/strings/src/main.nr index ca0d1691f86..bee2370201c 100644 --- a/crates/nargo_cli/tests/test_data/strings/src/main.nr +++ b/crates/nargo_cli/tests/test_data/strings/src/main.nr @@ -3,7 +3,7 @@ use dep::std; fn main(message : pub str<11>, y : Field, hex_as_string : str<4>, hex_as_field : Field) { let mut bad_message = "hello world"; - constrain message == "hello world"; + assert(message == "hello world"); bad_message = "helld world"; let x = 10; let z = x * 5; @@ -13,18 +13,18 @@ fn main(message : pub str<11>, y : Field, hex_as_string : str<4>, hex_as_field : std::println(x); let array = [1, 2, 3, 5, 8]; - constrain y == 5; // Change to y != 5 to see how the later print statements are not called + assert(y == 5); // Change to y != 5 to see how the later print statements are not called std::println(array); std::println(bad_message); - constrain message != bad_message; + assert(message != bad_message); let hash = std::hash::pedersen([x]); std::println(hash); - constrain hex_as_string == "0x41"; - // constrain hex_as_string != 0x41; This will fail with a type mismatch between str[4] and Field - constrain hex_as_field == 0x41; + assert(hex_as_string == "0x41"); + // assert(hex_as_string != 0x41); This will fail with a type mismatch between str[4] and Field + assert(hex_as_field == 0x41); } #[test] diff --git a/crates/nargo_cli/tests/test_data/struct/src/main.nr b/crates/nargo_cli/tests/test_data/struct/src/main.nr index b426339c1e4..6d61393920d 100644 --- a/crates/nargo_cli/tests/test_data/struct/src/main.nr +++ b/crates/nargo_cli/tests/test_data/struct/src/main.nr @@ -58,20 +58,20 @@ fn main(x: Field, y: Field) { let first = Foo::default(x,y); let p = Pair { first, second: 1 }; - constrain p.bar() == x; - constrain p.second == y; - constrain p.first.array[0] != p.first.array[1]; + assert(p.bar() == x); + assert(p.second == y); + assert(p.first.array[0] != p.first.array[1]); // Nested structs let (struct_from_tuple, a_bool) = test_struct_in_tuple(true,x,y); - constrain struct_from_tuple.my_bool == true; - constrain a_bool == true; - constrain struct_from_tuple.my_int == 5; - constrain struct_from_tuple.my_nest.a == 0; + assert(struct_from_tuple.my_bool == true); + assert(a_bool == true); + assert(struct_from_tuple.my_int == 5); + assert(struct_from_tuple.my_nest.a == 0); // Regression test for issue #670 let Animal { legs, eyes } = get_dog(); let six = legs + eyes as Field; - constrain six == 6; + assert(six == 6); } diff --git a/crates/nargo_cli/tests/test_data/struct_inputs/src/main.nr b/crates/nargo_cli/tests/test_data/struct_inputs/src/main.nr index e022f26947c..fe77ed6eee6 100644 --- a/crates/nargo_cli/tests/test_data/struct_inputs/src/main.nr +++ b/crates/nargo_cli/tests/test_data/struct_inputs/src/main.nr @@ -14,23 +14,23 @@ fn main(x : Field, y : pub myStruct, z: pub foo::bar::barStruct, a: pub foo::foo check_inner_struct(a, z); for i in 0 .. struct_from_bar.array.len() { - constrain struct_from_bar.array[i] == z.array[i]; + assert(struct_from_bar.array[i] == z.array[i]); } - constrain z.val == struct_from_bar.val; + assert(z.val == struct_from_bar.val); - constrain (struct_from_bar.val * x) == x; + assert((struct_from_bar.val * x) == x); - constrain x != y.bar; + assert(x != y.bar); - constrain y.message == "hello"; - constrain a.bar_struct.message == struct_from_bar.message; + assert(y.message == "hello"); + assert(a.bar_struct.message == struct_from_bar.message); a.bar_struct.array[1] } fn check_inner_struct(a: foo::fooStruct, z: foo::bar::barStruct) { - constrain a.bar_struct.val == z.val; + assert(a.bar_struct.val == z.val); for i in 0.. a.bar_struct.array.len() { - constrain a.bar_struct.array[i] == z.array[i]; + assert(a.bar_struct.array[i] == z.array[i]); } } diff --git a/crates/nargo_cli/tests/test_data/submodules/src/main.nr b/crates/nargo_cli/tests/test_data/submodules/src/main.nr index 90c778db111..9bfe382663f 100644 --- a/crates/nargo_cli/tests/test_data/submodules/src/main.nr +++ b/crates/nargo_cli/tests/test_data/submodules/src/main.nr @@ -9,7 +9,7 @@ mod mysubmodule { use dep::std; fn my_bool_or(x: u1, y: u1) { - constrain x | y == 1; + assert(x | y == 1); } fn my_helper() {} diff --git a/crates/nargo_cli/tests/test_data/to_be_bytes/src/main.nr b/crates/nargo_cli/tests/test_data/to_be_bytes/src/main.nr index 1253656217d..f5831e8c524 100644 --- a/crates/nargo_cli/tests/test_data/to_be_bytes/src/main.nr +++ b/crates/nargo_cli/tests/test_data/to_be_bytes/src/main.nr @@ -7,8 +7,8 @@ fn main(x : Field) -> pub [u8; 31] { for i in 0..31 { bytes[i] = byte_array[i]; } - constrain bytes[30] == 60; - constrain bytes[29] == 33; - constrain bytes[28] == 31; + assert(bytes[30] == 60); + assert(bytes[29] == 33); + assert(bytes[28] == 31); bytes } diff --git a/crates/nargo_cli/tests/test_data/to_bytes_integration/src/main.nr b/crates/nargo_cli/tests/test_data/to_bytes_integration/src/main.nr index 6f57b407da7..1932b7556a8 100644 --- a/crates/nargo_cli/tests/test_data/to_bytes_integration/src/main.nr +++ b/crates/nargo_cli/tests/test_data/to_bytes_integration/src/main.nr @@ -7,8 +7,8 @@ fn main(x : Field) { // The result of this byte array will be little-endian let le_byte_array = x.to_le_bytes(31); - constrain le_byte_array[0] == 60; - constrain le_byte_array[0] == be_byte_array[30]; - constrain le_byte_array[1] == be_byte_array[29]; - constrain le_byte_array[2] == be_byte_array[28]; + assert(le_byte_array[0] == 60); + assert(le_byte_array[0] == be_byte_array[30]); + assert(le_byte_array[1] == be_byte_array[29]); + assert(le_byte_array[2] == be_byte_array[28]); } \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/tuples/src/main.nr b/crates/nargo_cli/tests/test_data/tuples/src/main.nr index ce25b9171cd..4a003dc5a42 100644 --- a/crates/nargo_cli/tests/test_data/tuples/src/main.nr +++ b/crates/nargo_cli/tests/test_data/tuples/src/main.nr @@ -2,18 +2,18 @@ use dep::std; fn main(x: Field, y: Field) { let pair = (x, y); - constrain pair.0 == 1; - constrain pair.1 == 0; + assert(pair.0 == 1); + assert(pair.1 == 0); let (a, b) = if true { (0, 1) } else { (2, 3) }; - constrain a == 0; - constrain b == 1; + assert(a == 0); + assert(b == 1); let (u,v) = if x as u32 <1 { (x,x+1) } else { (x+1,x) }; - constrain u==x+1; - constrain v==x; + assert(u==x+1); + assert(v==x); } diff --git a/crates/nargo_cli/tests/test_data/xor/src/main.nr b/crates/nargo_cli/tests/test_data/xor/src/main.nr index cc7caf17fad..e893c938fc3 100644 --- a/crates/nargo_cli/tests/test_data/xor/src/main.nr +++ b/crates/nargo_cli/tests/test_data/xor/src/main.nr @@ -1,5 +1,5 @@ fn main(x : u32, y : pub u32) { let m = x ^ y; - constrain m != 10; + assert(m != 10); } \ No newline at end of file diff --git a/crates/noirc_frontend/src/hir/resolution/resolver.rs b/crates/noirc_frontend/src/hir/resolution/resolver.rs index 98cf5993edf..f03bcefeb2d 100644 --- a/crates/noirc_frontend/src/hir/resolution/resolver.rs +++ b/crates/noirc_frontend/src/hir/resolution/resolver.rs @@ -1357,7 +1357,7 @@ mod test { let src = r#" fn main(x : Field) { let y = x + x; - constrain y == x; + assert(y == x); } "#; @@ -1369,7 +1369,7 @@ mod test { let src = r#" fn main(x : Field) { let y = x + x; - constrain x == x; + assert(x == x); } "#; @@ -1392,7 +1392,7 @@ mod test { let src = r#" fn main(x : Field) { let y = x + x; - constrain y == z; + assert(y == z); } "#; @@ -1428,7 +1428,7 @@ mod test { let src = r#" fn main(x : Field) { let y = 5; - constrain y == x; + assert(y == x); } "#; diff --git a/examples_failing/ecdsa_secp256k1/src/main.nr b/examples_failing/ecdsa_secp256k1/src/main.nr index 6ee7a98a89d..43a4f78e634 100644 --- a/examples_failing/ecdsa_secp256k1/src/main.nr +++ b/examples_failing/ecdsa_secp256k1/src/main.nr @@ -13,5 +13,5 @@ fn main(hashed_message : [32]u8, pub_key_x : [32]u8, pub_key_y : [32]u8, signatu // Is there ever a situation where someone would want // to ensure that a signature was invalid? let x = std::ecdsa_secp256k1::verify_signature(pub_key_x,pub_key_y,signature, hashed_message); - constrain x == 1; + assert(x == 1); } diff --git a/examples_failing/pow_const/src/main.nr b/examples_failing/pow_const/src/main.nr index 4355935d2ad..9b742417e15 100644 --- a/examples_failing/pow_const/src/main.nr +++ b/examples_failing/pow_const/src/main.nr @@ -1,6 +1,6 @@ use dep::std; fn main(_x : Field) { - constrain std::pow_const(2,3) == _x; + assert(std::pow_const(2,3) == _x); } diff --git a/noir_stdlib/src/ec/montcurve.nr b/noir_stdlib/src/ec/montcurve.nr index fad5e5e0a97..e917661f0f1 100644 --- a/noir_stdlib/src/ec/montcurve.nr +++ b/noir_stdlib/src/ec/montcurve.nr @@ -82,13 +82,13 @@ mod affine { // Curve constructor fn new(j: Field, k: Field, gen: Point) -> Self { // Check curve coefficients - constrain k != 0; - constrain j*j != 4; + assert(k != 0); + assert(j*j != 4); let curve = Self {j, k, gen}; // gen should be on the curve - constrain curve.contains(curve.gen); + assert(curve.contains(curve.gen)); curve } @@ -180,10 +180,10 @@ mod affine { let z = ZETA; // Non-square Field element required for map // Check whether curve is admissible - constrain j != 0; + assert(j != 0); let l = (j*j - 4)/(k*k); - constrain l != 0; - constrain is_square(l) == false; + assert(l != 0); + assert(is_square(l) == false); let x1 = safe_inverse(1+z*u*u)*(0 - (j/k)); @@ -284,13 +284,13 @@ mod curvegroup { // Curve constructor fn new(j: Field, k: Field, gen: Point) -> Self { // Check curve coefficients - constrain k != 0; - constrain j*j != 4; + assert(k != 0); + assert(j*j != 4); let curve = Self {j, k, gen}; // gen should be on the curve - constrain curve.contains(curve.gen); + assert(curve.contains(curve.gen)); curve } diff --git a/noir_stdlib/src/ec/swcurve.nr b/noir_stdlib/src/ec/swcurve.nr index 8e2a996e927..eae4f375e43 100644 --- a/noir_stdlib/src/ec/swcurve.nr +++ b/noir_stdlib/src/ec/swcurve.nr @@ -71,12 +71,12 @@ mod affine { // Curve constructor fn new(a: Field, b: Field, gen: Point) -> Curve { // Check curve coefficients - constrain 4*a*a*a + 27*b*b != 0; + assert(4*a*a*a + 27*b*b != 0); let curve = Curve { a, b, gen }; // gen should be on the curve - constrain curve.contains(curve.gen); + assert(curve.contains(curve.gen)); curve } @@ -164,7 +164,7 @@ mod affine { // where g(x) = x^3 + a*x + b. swu_map(c,z,.) then maps a Field element to a point on curve c. fn swu_map(self, z: Field, u: Field) -> Point { // Check whether curve is admissible - constrain self.a*self.b != 0; + assert(self.a*self.b != 0); let Curve {a, b, gen: _gen} = self; @@ -248,12 +248,12 @@ mod curvegroup { // Curve constructor fn new(a: Field, b: Field, gen: Point) -> Curve { // Check curve coefficients - constrain 4*a*a*a + 27*b*b != 0; + assert(4*a*a*a + 27*b*b != 0); let curve = Curve { a, b, gen }; // gen should be on the curve - constrain curve.contains(curve.gen); + assert(curve.contains(curve.gen)); curve } diff --git a/noir_stdlib/src/ec/tecurve.nr b/noir_stdlib/src/ec/tecurve.nr index 43c9f5d2017..8611e4270c3 100644 --- a/noir_stdlib/src/ec/tecurve.nr +++ b/noir_stdlib/src/ec/tecurve.nr @@ -81,12 +81,12 @@ mod affine { // Curve constructor fn new(a: Field, d: Field, gen: Point) -> Curve { // Check curve coefficients - constrain a*d*(a-d) != 0; + assert(a*d*(a-d) != 0); let curve = Curve {a, d, gen}; // gen should be on the curve - constrain curve.contains(curve.gen); + assert(curve.contains(curve.gen)); curve } @@ -286,12 +286,12 @@ mod curvegroup { // Curve constructor fn new(a: Field, d: Field, gen: Point) -> Curve { // Check curve coefficients - constrain a*d*(a-d) != 0; + assert(a*d*(a-d) != 0); let curve = Curve { a, d, gen }; // gen should be on the curve - constrain curve.contains(curve.gen); + assert(curve.contains(curve.gen)); curve } diff --git a/noir_stdlib/src/hash/poseidon.nr b/noir_stdlib/src/hash/poseidon.nr index 7ac365c4995..416f740bbdf 100644 --- a/noir_stdlib/src/hash/poseidon.nr +++ b/noir_stdlib/src/hash/poseidon.nr @@ -20,9 +20,9 @@ fn config( mds: [Field; N]) -> PoseidonConfig { // Input checks - constrain t as u8 * (rf + rp) == ark.len() as u8; - constrain t * t == mds.len(); - constrain alpha != 0; + assert(t as u8 * (rf + rp) == ark.len() as u8); + assert(t * t == mds.len()); + assert(alpha != 0); PoseidonConfig {t, rf, rp, alpha, ark, mds} } @@ -34,7 +34,7 @@ fn permute( -> [Field; O] { let PoseidonConfig {t, rf, rp, alpha, ark, mds} = pos_conf; - constrain t == state.len(); + assert(t == state.len()); let mut count = 0; @@ -68,7 +68,7 @@ fn absorb( capacity: comptime Field, // Capacity; usually 1 msg: [Field; P]) // Arbitrary length message -> [Field; O] { - constrain pos_conf.t == rate + capacity; + assert(pos_conf.t == rate + capacity); let mut i = 0; diff --git a/noir_stdlib/src/hash/poseidon/bn254.nr b/noir_stdlib/src/hash/poseidon/bn254.nr index 355e7d13a5f..9ba26dbd878 100644 --- a/noir_stdlib/src/hash/poseidon/bn254.nr +++ b/noir_stdlib/src/hash/poseidon/bn254.nr @@ -15,9 +15,9 @@ fn permute( let rf = 8; let rp = [56, 57, 56, 60, 60, 63, 64, 63, 60, 66, 60, 65, 70, 60, 64, 68][state.len() - 2]; - constrain t == state.len(); - constrain rf == config_rf as Field; - constrain rp == config_rp as Field; + assert(t == state.len()); + assert(rf == config_rf as Field); + assert(rp == config_rp as Field); let mut count = 0; @@ -73,7 +73,7 @@ fn absorb( msg: [Field; P] // Arbitrary length message ) -> [Field; O] { - constrain pos_conf.t == rate + capacity; + assert(pos_conf.t == rate + capacity); let mut i = 0; From 62b7496c450fbf105e405aa463c3e796de92a428 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 2 May 2023 16:29:46 +0100 Subject: [PATCH 61/63] chore: Replace explicit if-elses with `FieldElement::from()` for boolean fields (#1266) * chore: replace if-elses with `FieldElement::from()` * chore: replace explicit equality with usage of `is_zero()` * chore: replace explicit usage of `from` with `.into()` --- crates/noirc_abi/src/input_parser/toml.rs | 14 +------ .../src/ssa/acir_gen/operations/bitwise.rs | 4 +- crates/noirc_evaluator/src/ssa/node.rs | 42 +++++++++++-------- 3 files changed, 28 insertions(+), 32 deletions(-) diff --git a/crates/noirc_abi/src/input_parser/toml.rs b/crates/noirc_abi/src/input_parser/toml.rs index 180cde4bf78..a737f784031 100644 --- a/crates/noirc_abi/src/input_parser/toml.rs +++ b/crates/noirc_abi/src/input_parser/toml.rs @@ -115,11 +115,7 @@ impl InputValue { InputValue::Field(new_value) } - TomlTypes::Bool(boolean) => { - let new_value = if boolean { FieldElement::one() } else { FieldElement::zero() }; - - InputValue::Field(new_value) - } + TomlTypes::Bool(boolean) => InputValue::Field(boolean.into()), TomlTypes::ArrayNum(arr_num) => { let array_elements = vecmap(arr_num, |elem_num| FieldElement::from(i128::from(elem_num))); @@ -132,13 +128,7 @@ impl InputValue { InputValue::Vec(array_elements) } TomlTypes::ArrayBool(arr_bool) => { - let array_elements = vecmap(arr_bool, |elem_bool| { - if elem_bool { - FieldElement::one() - } else { - FieldElement::zero() - } - }); + let array_elements = vecmap(arr_bool, FieldElement::from); InputValue::Vec(array_elements) } diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/bitwise.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/bitwise.rs index f8ca271835e..00396f4d4b6 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/bitwise.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/bitwise.rs @@ -40,8 +40,8 @@ pub(super) fn simplify_bitwise( let max = FieldElement::from((1_u128 << bit_size) - 1); let (field, var) = match (lhs.to_const(), rhs.to_const()) { - (Some(l_c), None) => (l_c == FieldElement::zero() || l_c == max).then_some((l_c, rhs))?, - (None, Some(r_c)) => (r_c == FieldElement::zero() || r_c == max).then_some((r_c, lhs))?, + (Some(l_c), None) => (l_c.is_zero() || l_c == max).then_some((l_c, rhs))?, + (None, Some(r_c)) => (r_c.is_zero() || r_c == max).then_some((r_c, lhs))?, _ => return None, }; diff --git a/crates/noirc_evaluator/src/ssa/node.rs b/crates/noirc_evaluator/src/ssa/node.rs index 8819a96e1c3..bec3c923a6d 100644 --- a/crates/noirc_evaluator/src/ssa/node.rs +++ b/crates/noirc_evaluator/src/ssa/node.rs @@ -918,8 +918,10 @@ impl Binary { !res_type.is_native_field(), "ICE: comparisons are not implemented for field elements" ); - let res = if lhs < rhs { FieldElement::one() } else { FieldElement::zero() }; - return Ok(NodeEval::Const(res, ObjectType::boolean())); + return Ok(NodeEval::Const( + FieldElement::from(lhs < rhs), + ObjectType::boolean(), + )); } } BinaryOp::Ule => { @@ -931,8 +933,10 @@ impl Binary { !res_type.is_native_field(), "ICE: comparisons are not implemented for field elements" ); - let res = if lhs <= rhs { FieldElement::one() } else { FieldElement::zero() }; - return Ok(NodeEval::Const(res, ObjectType::boolean())); + return Ok(NodeEval::Const( + FieldElement::from(lhs <= rhs), + ObjectType::boolean(), + )); } } BinaryOp::Slt => (), @@ -942,8 +946,10 @@ impl Binary { return Ok(NodeEval::Const(FieldElement::zero(), ObjectType::boolean())); //n.b we assume the type of lhs and rhs is unsigned because of the opcode, we could also verify this } else if let (Some(lhs), Some(rhs)) = (lhs, rhs) { - let res = if lhs < rhs { FieldElement::one() } else { FieldElement::zero() }; - return Ok(NodeEval::Const(res, ObjectType::boolean())); + return Ok(NodeEval::Const( + FieldElement::from(lhs < rhs), + ObjectType::boolean(), + )); } } BinaryOp::Lte => { @@ -951,30 +957,30 @@ impl Binary { return Ok(NodeEval::Const(FieldElement::one(), ObjectType::boolean())); //n.b we assume the type of lhs and rhs is unsigned because of the opcode, we could also verify this } else if let (Some(lhs), Some(rhs)) = (lhs, rhs) { - let res = if lhs <= rhs { FieldElement::one() } else { FieldElement::zero() }; - return Ok(NodeEval::Const(res, ObjectType::boolean())); + return Ok(NodeEval::Const( + FieldElement::from(lhs <= rhs), + ObjectType::boolean(), + )); } } BinaryOp::Eq => { if self.lhs == self.rhs { return Ok(NodeEval::Const(FieldElement::one(), ObjectType::boolean())); } else if let (Some(lhs), Some(rhs)) = (lhs, rhs) { - if lhs == rhs { - return Ok(NodeEval::Const(FieldElement::one(), ObjectType::boolean())); - } else { - return Ok(NodeEval::Const(FieldElement::zero(), ObjectType::boolean())); - } + return Ok(NodeEval::Const( + FieldElement::from(lhs == rhs), + ObjectType::boolean(), + )); } } BinaryOp::Ne => { if self.lhs == self.rhs { return Ok(NodeEval::Const(FieldElement::zero(), ObjectType::boolean())); } else if let (Some(lhs), Some(rhs)) = (lhs, rhs) { - if lhs != rhs { - return Ok(NodeEval::Const(FieldElement::one(), ObjectType::boolean())); - } else { - return Ok(NodeEval::Const(FieldElement::zero(), ObjectType::boolean())); - } + return Ok(NodeEval::Const( + FieldElement::from(lhs != rhs), + ObjectType::boolean(), + )); } } BinaryOp::And => { From 4422bed143508e37cda6fb218f1cf62921fc0f6f Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 2 May 2023 16:37:39 +0100 Subject: [PATCH 62/63] feat: add integration tests for bitshift operators (#1272) * chore: add test for bitshift operators * chore: update error message to flag up that runtime shifts are not implemented yet * chore: split runtime and comptime bitshift tests * chore: replace `constrain` with `assert()` --- .../tests/test_data/bit_shifts_comptime/Nargo.toml | 5 +++++ .../tests/test_data/bit_shifts_comptime/Prover.toml | 1 + .../tests/test_data/bit_shifts_comptime/src/main.nr | 13 +++++++++++++ .../tests/test_data/bit_shifts_runtime/Nargo.toml | 5 +++++ .../tests/test_data/bit_shifts_runtime/Prover.toml | 2 ++ .../tests/test_data/bit_shifts_runtime/src/main.nr | 12 ++++++++++++ crates/nargo_cli/tests/test_data/config.toml | 2 +- .../src/ssa/acir_gen/operations/binary.rs | 2 +- 8 files changed, 40 insertions(+), 2 deletions(-) create mode 100644 crates/nargo_cli/tests/test_data/bit_shifts_comptime/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/bit_shifts_comptime/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/bit_shifts_comptime/src/main.nr create mode 100644 crates/nargo_cli/tests/test_data/bit_shifts_runtime/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/bit_shifts_runtime/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/bit_shifts_runtime/src/main.nr diff --git a/crates/nargo_cli/tests/test_data/bit_shifts_comptime/Nargo.toml b/crates/nargo_cli/tests/test_data/bit_shifts_comptime/Nargo.toml new file mode 100644 index 00000000000..e0b467ce5da --- /dev/null +++ b/crates/nargo_cli/tests/test_data/bit_shifts_comptime/Nargo.toml @@ -0,0 +1,5 @@ +[package] +authors = [""] +compiler_version = "0.1" + +[dependencies] \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/bit_shifts_comptime/Prover.toml b/crates/nargo_cli/tests/test_data/bit_shifts_comptime/Prover.toml new file mode 100644 index 00000000000..cfd62c406cb --- /dev/null +++ b/crates/nargo_cli/tests/test_data/bit_shifts_comptime/Prover.toml @@ -0,0 +1 @@ +x = 64 diff --git a/crates/nargo_cli/tests/test_data/bit_shifts_comptime/src/main.nr b/crates/nargo_cli/tests/test_data/bit_shifts_comptime/src/main.nr new file mode 100644 index 00000000000..c1c6890febb --- /dev/null +++ b/crates/nargo_cli/tests/test_data/bit_shifts_comptime/src/main.nr @@ -0,0 +1,13 @@ +fn main(x: u64) { + let two: u64 = 2; + let three: u64 = 3; + + // comptime shifts on comptime values + assert(two << 2 == 8); + assert((two << 3) / 8 == two); + assert((three >> 1) == 1); + + // comptime shifts on runtime values + assert(x << 1 == 128); + assert(x >> 2 == 16); +} diff --git a/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Nargo.toml b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Nargo.toml new file mode 100644 index 00000000000..e0b467ce5da --- /dev/null +++ b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Nargo.toml @@ -0,0 +1,5 @@ +[package] +authors = [""] +compiler_version = "0.1" + +[dependencies] \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Prover.toml b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Prover.toml new file mode 100644 index 00000000000..67bf6a6a234 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Prover.toml @@ -0,0 +1,2 @@ +x = 64 +y = 1 diff --git a/crates/nargo_cli/tests/test_data/bit_shifts_runtime/src/main.nr b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/src/main.nr new file mode 100644 index 00000000000..903a5f35463 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/src/main.nr @@ -0,0 +1,12 @@ +fn main(x: u64, y: u64) { + // These are currently unimplemented and panic with "ShiftLeft and ShiftRight operations with shifts which are only known at runtime are not yet implemented." + // See: https://github.com/noir-lang/noir/issues/1265 + + // runtime shifts on comptime values + assert(64 << y == 128); + assert(64 >> y == 32); + + // runtime shifts on runtime values + assert(x << y == 128); + assert(x >> y == 32); +} diff --git a/crates/nargo_cli/tests/test_data/config.toml b/crates/nargo_cli/tests/test_data/config.toml index 1c7536af5a2..80822d22375 100644 --- a/crates/nargo_cli/tests/test_data/config.toml +++ b/crates/nargo_cli/tests/test_data/config.toml @@ -2,7 +2,7 @@ # "1_mul", "2_div","3_add","4_sub","5_over", "6","6_array", "7_function","7","8_integration", "9_conditional", "10_slices", "assign_ex", "bool_not", "bool_or", "pedersen_check", "poseidonperm_x5_254", "poseidonsponge_x5_254", "pred_eq", "schnorr", "sha256", "tuples", # "array_len", "array_neq", "bit_and", "cast_bool", "comptime_array_access", "generics", "global_comptime", "main_bool_arg", "main_return", "merkle_insert", "modules", "modules_more", "scalar_mul", "simple_shield", "struct", "submodules", # Exclude "poseidonsponge_x5_254" and "sha2_byte" due to relatively long computation time and "sha2_blocks" due to very long computation time. -exclude = ["comptime_fail", "poseidonsponge_x5_254", "sha2_blocks", "sha2_byte"] +exclude = ["bit_shifts_runtime", "comptime_fail", "poseidonsponge_x5_254", "sha2_blocks", "sha2_byte"] # List of tests (as their directory name in test_data) expecting to fail: if the test pass, we report an error. diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/binary.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/binary.rs index 87280eb1fde..166a55b0d52 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/binary.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/binary.rs @@ -238,7 +238,7 @@ pub(crate) fn evaluate( }; InternalVar::from(bitwise_result) } - BinaryOp::Shl | BinaryOp::Shr(_) => unreachable!("ICE: ShiftLeft and ShiftRight are replaced by multiplications and divisions in optimization pass."), + BinaryOp::Shl | BinaryOp::Shr(_) => todo!("ShiftLeft and ShiftRight operations with shifts which are only known at runtime are not yet implemented."), i @ BinaryOp::Assign => unreachable!("Invalid Instruction: {:?}", i), }; Some(binary_output) From f144391b4295b127f3f422e862a087a90dac1dbf Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 2 May 2023 17:36:22 +0100 Subject: [PATCH 63/63] feat(nargo): Remove usage of `CompiledProgram` in CLI code and use separate ABI/bytecode (#1269) * feat: pass bytecode and abi separately in CLI code * chore: move throwing of error on invalid proof into `verify_with_path` --- crates/nargo_cli/src/cli/execute_cmd.rs | 19 ++++----- crates/nargo_cli/src/cli/mod.rs | 1 - crates/nargo_cli/src/cli/prove_cmd.rs | 38 ++++++++---------- crates/nargo_cli/src/cli/verify_cmd.rs | 51 ++++++++----------------- 4 files changed, 40 insertions(+), 69 deletions(-) diff --git a/crates/nargo_cli/src/cli/execute_cmd.rs b/crates/nargo_cli/src/cli/execute_cmd.rs index adeefc860a5..b9b2be3febf 100644 --- a/crates/nargo_cli/src/cli/execute_cmd.rs +++ b/crates/nargo_cli/src/cli/execute_cmd.rs @@ -1,9 +1,10 @@ use std::path::Path; +use acvm::acir::circuit::Circuit; use acvm::PartialWitnessGenerator; use clap::Args; use noirc_abi::input_parser::{Format, InputValue}; -use noirc_abi::{InputMap, WitnessMap}; +use noirc_abi::{Abi, InputMap, WitnessMap}; use noirc_driver::{CompileOptions, CompiledProgram}; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; @@ -48,15 +49,15 @@ fn execute_with_path( ) -> Result<(Option, WitnessMap), CliError> { let backend = crate::backends::ConcreteBackend::default(); - let compiled_program = compile_circuit(&backend, program_dir, compile_options)?; + let CompiledProgram { abi, circuit } = compile_circuit(&backend, program_dir, compile_options)?; // Parse the initial witness values from Prover.toml let (inputs_map, _) = - read_inputs_from_file(program_dir, PROVER_INPUT_FILE, Format::Toml, &compiled_program.abi)?; + read_inputs_from_file(program_dir, PROVER_INPUT_FILE, Format::Toml, &abi)?; - let solved_witness = execute_program(&backend, &compiled_program, &inputs_map)?; + let solved_witness = execute_program(&backend, circuit, &abi, &inputs_map)?; - let public_abi = compiled_program.abi.public_abi(); + let public_abi = abi.public_abi(); let (_, return_value) = public_abi.decode(&solved_witness)?; Ok((return_value, solved_witness)) @@ -64,13 +65,13 @@ fn execute_with_path( pub(crate) fn execute_program( backend: &impl PartialWitnessGenerator, - compiled_program: &CompiledProgram, + circuit: Circuit, + abi: &Abi, inputs_map: &InputMap, ) -> Result { - let initial_witness = compiled_program.abi.encode(inputs_map, None)?; + let initial_witness = abi.encode(inputs_map, None)?; - let solved_witness = - nargo::ops::execute_circuit(backend, compiled_program.circuit.clone(), initial_witness)?; + let solved_witness = nargo::ops::execute_circuit(backend, circuit, initial_witness)?; Ok(solved_witness) } diff --git a/crates/nargo_cli/src/cli/mod.rs b/crates/nargo_cli/src/cli/mod.rs index 2bb92925e59..ef54682ab50 100644 --- a/crates/nargo_cli/src/cli/mod.rs +++ b/crates/nargo_cli/src/cli/mod.rs @@ -1,6 +1,5 @@ use clap::{Args, Parser, Subcommand}; use const_format::formatcp; -use noirc_abi::InputMap; use noirc_driver::CompileOptions; use std::path::{Path, PathBuf}; diff --git a/crates/nargo_cli/src/cli/prove_cmd.rs b/crates/nargo_cli/src/cli/prove_cmd.rs index cecdee23fee..2b9ed4b067a 100644 --- a/crates/nargo_cli/src/cli/prove_cmd.rs +++ b/crates/nargo_cli/src/cli/prove_cmd.rs @@ -2,9 +2,9 @@ use std::path::{Path, PathBuf}; use clap::Args; use nargo::artifacts::program::PreprocessedProgram; -use nargo::ops::{preprocess_program, prove_execution}; +use nargo::ops::{preprocess_program, prove_execution, verify_proof}; use noirc_abi::input_parser::Format; -use noirc_driver::{CompileOptions, CompiledProgram}; +use noirc_driver::CompileOptions; use super::NargoConfig; use super::{ @@ -16,7 +16,7 @@ use super::{ }, }; use crate::{ - cli::{execute_cmd::execute_program, verify_cmd::verify_proof}, + cli::execute_cmd::execute_program, constants::{PROOFS_DIR, PROVER_INPUT_FILE, TARGET_DIR, VERIFIER_INPUT_FILE}, errors::CliError, }; @@ -78,20 +78,15 @@ pub(crate) fn prove_with_path>( let PreprocessedProgram { abi, bytecode, proving_key, verification_key, .. } = preprocessed_program; - let compiled_program = CompiledProgram { abi, circuit: bytecode }; // Parse the initial witness values from Prover.toml - let (inputs_map, _) = read_inputs_from_file( - &program_dir, - PROVER_INPUT_FILE, - Format::Toml, - &compiled_program.abi, - )?; + let (inputs_map, _) = + read_inputs_from_file(&program_dir, PROVER_INPUT_FILE, Format::Toml, &abi)?; - let solved_witness = execute_program(&backend, &compiled_program, &inputs_map)?; + let solved_witness = execute_program(&backend, bytecode.clone(), &abi, &inputs_map)?; // Write public inputs into Verifier.toml - let public_abi = compiled_program.abi.clone().public_abi(); + let public_abi = abi.public_abi(); let (public_inputs, return_value) = public_abi.decode(&solved_witness)?; write_inputs_to_file( @@ -102,19 +97,16 @@ pub(crate) fn prove_with_path>( Format::Toml, )?; - let proof = prove_execution(&backend, &compiled_program.circuit, solved_witness, &proving_key)?; + let proof = prove_execution(&backend, &bytecode, solved_witness, &proving_key)?; if check_proof { - let no_proof_name = "".into(); - verify_proof( - &backend, - &compiled_program, - public_inputs, - return_value, - &proof, - &verification_key, - no_proof_name, - )?; + let public_inputs = public_abi.encode(&public_inputs, return_value)?; + let valid_proof = + verify_proof(&backend, &bytecode, &proof, public_inputs, &verification_key)?; + + if !valid_proof { + return Err(CliError::InvalidProof("".into())); + } } let proof_path = if let Some(proof_name) = proof_name { diff --git a/crates/nargo_cli/src/cli/verify_cmd.rs b/crates/nargo_cli/src/cli/verify_cmd.rs index 07b7e351ee9..84a6416d44e 100644 --- a/crates/nargo_cli/src/cli/verify_cmd.rs +++ b/crates/nargo_cli/src/cli/verify_cmd.rs @@ -1,16 +1,16 @@ use super::compile_cmd::compile_circuit; use super::fs::{inputs::read_inputs_from_file, load_hex_data, program::read_program_from_file}; -use super::{InputMap, NargoConfig}; +use super::NargoConfig; use crate::{ constants::{PROOFS_DIR, PROOF_EXT, TARGET_DIR, VERIFIER_INPUT_FILE}, errors::CliError, }; -use acvm::ProofSystemCompiler; + use clap::Args; use nargo::artifacts::program::PreprocessedProgram; use nargo::ops::preprocess_program; -use noirc_abi::input_parser::{Format, InputValue}; -use noirc_driver::{CompileOptions, CompiledProgram}; +use noirc_abi::input_parser::Format; +use noirc_driver::CompileOptions; use std::path::{Path, PathBuf}; /// Given a proof and a program, verify whether the proof is valid @@ -34,7 +34,12 @@ pub(crate) fn run(args: VerifyCommand, config: NargoConfig) -> Result<(), CliErr .circuit_name .map(|circuit_name| config.program_dir.join(TARGET_DIR).join(circuit_name)); - verify_with_path(config.program_dir, proof_path, circuit_build_path, args.compile_options) + verify_with_path( + &config.program_dir, + proof_path, + circuit_build_path.as_ref(), + args.compile_options, + ) } fn verify_with_path>( @@ -55,47 +60,21 @@ fn verify_with_path>( }; let PreprocessedProgram { abi, bytecode, verification_key, .. } = preprocessed_program; - let compiled_program = CompiledProgram { abi, circuit: bytecode }; // Load public inputs (if any) from `VERIFIER_INPUT_FILE`. - let public_abi = compiled_program.abi.clone().public_abi(); + let public_abi = abi.public_abi(); let (public_inputs_map, return_value) = read_inputs_from_file(program_dir, VERIFIER_INPUT_FILE, Format::Toml, &public_abi)?; - verify_proof( - &backend, - &compiled_program, - public_inputs_map, - return_value, - &load_hex_data(&proof_path)?, - &verification_key, - proof_path, - ) -} - -pub(crate) fn verify_proof( - backend: &impl ProofSystemCompiler, - compiled_program: &CompiledProgram, - public_inputs_map: InputMap, - return_value: Option, - proof: &[u8], - verification_key: &[u8], - proof_name: PathBuf, -) -> Result<(), CliError> { - let public_abi = compiled_program.abi.clone().public_abi(); let public_inputs = public_abi.encode(&public_inputs_map, return_value)?; + let proof = load_hex_data(&proof_path)?; - let valid_proof = nargo::ops::verify_proof( - backend, - &compiled_program.circuit, - proof, - public_inputs, - verification_key, - )?; + let valid_proof = + nargo::ops::verify_proof(&backend, &bytecode, &proof, public_inputs, &verification_key)?; if valid_proof { Ok(()) } else { - Err(CliError::InvalidProof(proof_name)) + Err(CliError::InvalidProof(proof_path)) } }