diff --git a/avm-transpiler/Cargo.lock b/avm-transpiler/Cargo.lock index 2a7ac18c9c2..43287fcb588 100644 --- a/avm-transpiler/Cargo.lock +++ b/avm-transpiler/Cargo.lock @@ -160,9 +160,6 @@ dependencies = [ [[package]] name = "arena" version = "0.24.0" -dependencies = [ - "generational-arena", -] [[package]] name = "ark-bn254" @@ -800,15 +797,6 @@ dependencies = [ "byteorder", ] -[[package]] -name = "generational-arena" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877e94aff08e743b651baaea359664321055749b398adff8740a7399af7796e7" -dependencies = [ - "cfg-if", -] - [[package]] name = "generic-array" version = "0.14.7" @@ -1096,6 +1084,7 @@ dependencies = [ "noirc_macros", "rust-embed", "serde", + "thiserror", "tracing", ] diff --git a/avm-transpiler/rust-toolchain.toml b/avm-transpiler/rust-toolchain.toml index eb89935c774..c9f40a6577e 100644 --- a/avm-transpiler/rust-toolchain.toml +++ b/avm-transpiler/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "1.72" -components = [ "rust-src" ] +channel = "1.73" +components = ["rust-src"] targets = [] profile = "default" diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index 1bb6b32f93e..e18d365249c 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -150,7 +150,7 @@ bool accumulateAndVerifyGoblin(const std::string& bytecodePath, const std::strin // TODO(https://github.com/AztecProtocol/barretenberg/issues/811): Don't hardcode dyadic circuit size. Currently set // to max circuit size present in acir tests suite. - size_t hardcoded_bn254_dyadic_size_hack = 1 << 18; + size_t hardcoded_bn254_dyadic_size_hack = 1 << 19; init_bn254_crs(hardcoded_bn254_dyadic_size_hack); size_t hardcoded_grumpkin_dyadic_size_hack = 1 << 10; // For eccvm only init_grumpkin_crs(hardcoded_grumpkin_dyadic_size_hack); @@ -189,7 +189,7 @@ bool proveAndVerifyGoblin(const std::string& bytecodePath, const std::string& wi // TODO(https://github.com/AztecProtocol/barretenberg/issues/811): Don't hardcode dyadic circuit size. Currently set // to max circuit size present in acir tests suite. - size_t hardcoded_bn254_dyadic_size_hack = 1 << 18; + size_t hardcoded_bn254_dyadic_size_hack = 1 << 19; init_bn254_crs(hardcoded_bn254_dyadic_size_hack); size_t hardcoded_grumpkin_dyadic_size_hack = 1 << 10; // For eccvm only init_grumpkin_crs(hardcoded_grumpkin_dyadic_size_hack); diff --git a/noir-projects/aztec-nr/authwit/src/entrypoint/app.nr b/noir-projects/aztec-nr/authwit/src/entrypoint/app.nr index a921a5d5542..dc04a740238 100644 --- a/noir-projects/aztec-nr/authwit/src/entrypoint/app.nr +++ b/noir-projects/aztec-nr/authwit/src/entrypoint/app.nr @@ -4,11 +4,11 @@ use dep::aztec::protocol_types::{constants::GENERATOR_INDEX__SIGNATURE_PAYLOAD, use crate::entrypoint::function_call::{FunctionCall, FUNCTION_CALL_SIZE_IN_BYTES}; // FUNCTION_CALL_SIZE * ACCOUNT_MAX_CALLS + 1 -global APP_PAYLOAD_SIZE: Field = 17; +global APP_PAYLOAD_SIZE: u64 = 17; // FUNCTION_CALL_SIZE_IN_BYTES * ACCOUNT_MAX_CALLS + 32 -global APP_PAYLOAD_SIZE_IN_BYTES: Field = 420; +global APP_PAYLOAD_SIZE_IN_BYTES: u64 = 420; -global ACCOUNT_MAX_CALLS: Field = 4; +global ACCOUNT_MAX_CALLS: u64 = 4; // Note: If you change the following struct you have to update default_entrypoint.ts // docs:start:app-payload-struct @@ -21,7 +21,7 @@ struct AppPayload { impl Serialize for AppPayload { // Serializes the entrypoint struct fn serialize(self) -> [Field; APP_PAYLOAD_SIZE] { - let mut fields: BoundedVec = BoundedVec::new(0); + let mut fields: BoundedVec = BoundedVec::new(); for call in self.function_calls { fields.extend_from_array(call.serialize()); } @@ -42,7 +42,7 @@ impl Hash for AppPayload { impl AppPayload { // Serializes the payload as an array of bytes. Useful for hashing with sha256. fn to_be_bytes(self) -> [u8; APP_PAYLOAD_SIZE_IN_BYTES] { - let mut bytes: BoundedVec = BoundedVec::new(0); + let mut bytes: BoundedVec = BoundedVec::new(); for i in 0..ACCOUNT_MAX_CALLS { bytes.extend_from_array(self.function_calls[i].to_be_bytes()); diff --git a/noir-projects/aztec-nr/authwit/src/entrypoint/fee.nr b/noir-projects/aztec-nr/authwit/src/entrypoint/fee.nr index 1d46e802686..657d21dff30 100644 --- a/noir-projects/aztec-nr/authwit/src/entrypoint/fee.nr +++ b/noir-projects/aztec-nr/authwit/src/entrypoint/fee.nr @@ -20,7 +20,7 @@ struct FeePayload { impl Serialize for FeePayload { // Serializes the entrypoint struct fn serialize(self) -> [Field; FEE_PAYLOAD_SIZE] { - let mut fields: BoundedVec = BoundedVec::new(0); + let mut fields: BoundedVec = BoundedVec::new(); for i in 0..MAX_FEE_FUNCTION_CALLS { fields.extend_from_array(self.function_calls[i].serialize()); } @@ -40,7 +40,7 @@ impl Hash for FeePayload { impl FeePayload { fn to_be_bytes(self) -> [u8; FEE_PAYLOAD_SIZE_IN_BYTES] { - let mut bytes: BoundedVec = BoundedVec::new(0); + let mut bytes: BoundedVec = BoundedVec::new(); for i in 0..MAX_FEE_FUNCTION_CALLS { bytes.extend_from_array(self.function_calls[i].to_be_bytes()); diff --git a/noir-projects/aztec-nr/aztec/src/context/globals/private_global_variables.nr b/noir-projects/aztec-nr/aztec/src/context/globals/private_global_variables.nr index 038273862fb..99126a10dbf 100644 --- a/noir-projects/aztec-nr/aztec/src/context/globals/private_global_variables.nr +++ b/noir-projects/aztec-nr/aztec/src/context/globals/private_global_variables.nr @@ -1,6 +1,6 @@ use dep::protocol_types::traits::Serialize; -global PRIVATE_GLOBAL_VARIABLES_LENGTH: Field = 2; +global PRIVATE_GLOBAL_VARIABLES_LENGTH: u64 = 2; // docs:start:private-global-variables struct PrivateGlobalVariables { diff --git a/noir-projects/aztec-nr/aztec/src/context/private_context.nr b/noir-projects/aztec-nr/aztec/src/context/private_context.nr index 3f4185d047c..d480fff9a36 100644 --- a/noir-projects/aztec-nr/aztec/src/context/private_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/private_context.nr @@ -79,15 +79,15 @@ impl PrivateContext { side_effect_counter, min_revertible_side_effect_counter, args_hash, - return_values: BoundedVec::new(0), - read_requests: BoundedVec::new(SideEffect::empty()), - nullifier_key_validation_requests: BoundedVec::new(NullifierKeyValidationRequest::empty()), - new_note_hashes: BoundedVec::new(SideEffect::empty()), - new_nullifiers: BoundedVec::new(SideEffectLinkedToNoteHash::empty()), + return_values: BoundedVec::new(), + read_requests: BoundedVec::new(), + nullifier_key_validation_requests: BoundedVec::new(), + new_note_hashes: BoundedVec::new(), + new_nullifiers: BoundedVec::new(), historical_header: inputs.historical_header, - private_call_stack_hashes: BoundedVec::new(0), - public_call_stack_hashes: BoundedVec::new(0), - new_l2_to_l1_msgs: BoundedVec::new(L2ToL1Message::empty()), + private_call_stack_hashes: BoundedVec::new(), + public_call_stack_hashes: BoundedVec::new(), + new_l2_to_l1_msgs: BoundedVec::new(), // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165) // encrypted_logs_preimages: Vec::new(), // unencrypted_logs_preimages: Vec::new(), diff --git a/noir-projects/aztec-nr/aztec/src/context/public_context.nr b/noir-projects/aztec-nr/aztec/src/context/public_context.nr index c75f79d902d..6c74475d1b5 100644 --- a/noir-projects/aztec-nr/aztec/src/context/public_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/public_context.nr @@ -54,14 +54,14 @@ impl PublicContext { inputs, side_effect_counter: inputs.call_context.start_side_effect_counter, args_hash, - return_values: BoundedVec::new(0), - contract_storage_update_requests: BoundedVec::new(empty_storage_update), - contract_storage_reads: BoundedVec::new(empty_storage_read), - public_call_stack_hashes: BoundedVec::new(0), - new_note_hashes: BoundedVec::new(SideEffect::empty()), - new_nullifiers: BoundedVec::new(SideEffectLinkedToNoteHash::empty()), - new_l2_to_l1_msgs: BoundedVec::new(L2ToL1Message::empty()), - unencrypted_logs_hash: BoundedVec::new(0), + return_values: BoundedVec::new(), + contract_storage_update_requests: BoundedVec::new(), + contract_storage_reads: BoundedVec::new(), + public_call_stack_hashes: BoundedVec::new(), + new_note_hashes: BoundedVec::new(), + new_nullifiers: BoundedVec::new(), + new_l2_to_l1_msgs: BoundedVec::new(), + unencrypted_logs_hash: BoundedVec::new(), unencrypted_logs_preimages_length: 0, historical_header: inputs.historical_header, prover_address: AztecAddress::zero() // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165) diff --git a/noir-projects/aztec-nr/aztec/src/messaging/l1_to_l2_message_getter_data.nr b/noir-projects/aztec-nr/aztec/src/messaging/l1_to_l2_message_getter_data.nr index ae710e30e1b..25da5bfec56 100644 --- a/noir-projects/aztec-nr/aztec/src/messaging/l1_to_l2_message_getter_data.nr +++ b/noir-projects/aztec-nr/aztec/src/messaging/l1_to_l2_message_getter_data.nr @@ -7,13 +7,13 @@ struct L1ToL2MessageGetterData { leaf_index: Field } -pub fn l1_to_l2_message_getter_len() -> Field { +pub fn l1_to_l2_message_getter_len() -> u64 { L1_TO_L2_MESSAGE_LENGTH + 1 + L1_TO_L2_MSG_TREE_HEIGHT } pub fn make_l1_to_l2_message_getter_data( fields: [Field; N], - start: Field, + start: u64, secret: Field ) -> L1ToL2MessageGetterData { L1ToL2MessageGetterData { diff --git a/noir-projects/aztec-nr/aztec/src/note/note_getter_options.nr b/noir-projects/aztec-nr/aztec/src/note/note_getter_options.nr index bb013ef946e..85b0f0d37ed 100644 --- a/noir-projects/aztec-nr/aztec/src/note/note_getter_options.nr +++ b/noir-projects/aztec-nr/aztec/src/note/note_getter_options.nr @@ -91,8 +91,8 @@ impl NoteGetterOptions { // This function initializes a NoteGetterOptions that simply returns the maximum number of notes allowed in a call. pub fn new() -> NoteGetterOptions where Note: NoteInterface { NoteGetterOptions { - selects: BoundedVec::new(Option::none()), - sorts: BoundedVec::new(Option::none()), + selects: BoundedVec::new(), + sorts: BoundedVec::new(), limit: MAX_READ_REQUESTS_PER_CALL as u32, offset: 0, filter: return_all_notes, @@ -108,8 +108,8 @@ impl NoteGetterOptions { filter_args: FILTER_ARGS ) -> Self where Note: NoteInterface { NoteGetterOptions { - selects: BoundedVec::new(Option::none()), - sorts: BoundedVec::new(Option::none()), + selects: BoundedVec::new(), + sorts: BoundedVec::new(), limit: MAX_READ_REQUESTS_PER_CALL as u32, offset: 0, filter, diff --git a/noir-projects/aztec-nr/aztec/src/note/note_viewer_options.nr b/noir-projects/aztec-nr/aztec/src/note/note_viewer_options.nr index a291b8f8c5d..e7fe43a66bf 100644 --- a/noir-projects/aztec-nr/aztec/src/note/note_viewer_options.nr +++ b/noir-projects/aztec-nr/aztec/src/note/note_viewer_options.nr @@ -16,8 +16,8 @@ struct NoteViewerOptions { impl NoteViewerOptions { pub fn new() -> NoteViewerOptions where Note: NoteInterface { NoteViewerOptions { - selects: BoundedVec::new(Option::none()), - sorts: BoundedVec::new(Option::none()), + selects: BoundedVec::new(), + sorts: BoundedVec::new(), limit: MAX_NOTES_PER_PAGE as u32, offset: 0, status: NoteStatus.ACTIVE diff --git a/noir-projects/aztec-nr/aztec/src/oracle/debug_log.nr b/noir-projects/aztec-nr/aztec/src/oracle/debug_log.nr index 8259bf2cba0..a701336d926 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/debug_log.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/debug_log.nr @@ -26,7 +26,7 @@ unconstrained pub fn debug_log(msg: T) { /// Example: /// debug_log_format("get_2(slot:{0}) =>\n\t0:{1}\n\t1:{2}", [storage_slot, note0_hash, note1_hash]); unconstrained pub fn debug_log_format(msg: T, args: [Field; N]) { - assert(debug_log_format_oracle(msg, args, args.len()) == 0); + assert(debug_log_format_oracle(msg, args, args.len() as Field) == 0); } /// Example: diff --git a/noir-projects/aztec-nr/aztec/src/oracle/get_public_data_witness.nr b/noir-projects/aztec-nr/aztec/src/oracle/get_public_data_witness.nr index c7a8f537e97..f59ae02a838 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/get_public_data_witness.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/get_public_data_witness.nr @@ -4,7 +4,7 @@ use dep::protocol_types::{ utils::arr_copy_slice }; -global LEAF_PREIMAGE_LENGTH: Field = 4; +global LEAF_PREIMAGE_LENGTH: u64 = 4; global PUBLIC_DATA_WITNESS: Field = 45; struct PublicDataWitness { @@ -23,7 +23,7 @@ unconstrained pub fn get_public_data_witness(block_number: u32, leaf_slot: Field let fields = get_public_data_witness_oracle(block_number, leaf_slot); PublicDataWitness { index: fields[0], - leaf_preimage: PublicDataTreeLeafPreimage { slot: fields[1], value: fields[2], next_index: fields[3] as u32, next_slot: fields[4] }, + leaf_preimage: PublicDataTreeLeafPreimage { slot: fields[1], value: fields[2], next_index: fields[3] as u64, next_slot: fields[4] }, path: arr_copy_slice(fields, [0; PUBLIC_DATA_TREE_HEIGHT], 1 + LEAF_PREIMAGE_LENGTH) } } diff --git a/noir-projects/aztec-nr/aztec/src/oracle/notes.nr b/noir-projects/aztec-nr/aztec/src/oracle/notes.nr index 6b91b35392e..02194699c8f 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/notes.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/notes.nr @@ -101,14 +101,14 @@ unconstrained pub fn get_notes( status, placeholder_fields ); - let num_notes = fields[0] as u32; + let num_notes = fields[0] as u64; let contract_address = AztecAddress::from_field(fields[1]); for i in 0..placeholder_opt_notes.len() { - if i as u32 < num_notes { + if i < num_notes { // lengths named as per typescript. - let return_header_length: Field = 2; // num_notes & contract_address. - let extra_preimage_length: Field = 2; // nonce & is_transient. - let read_offset: Field = return_header_length + i * (N + extra_preimage_length); + let return_header_length: u64 = 2; // num_notes & contract_address. + let extra_preimage_length: u64 = 2; // nonce & is_transient. + let read_offset: u64 = return_header_length + i * (N + extra_preimage_length); let nonce = fields[read_offset]; let is_transient = fields[read_offset + 1] as bool; let header = NoteHeader { contract_address, nonce, storage_slot, is_transient }; diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/shared_immutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/shared_immutable.nr index 8720a8f58c5..16b15e6ab10 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/shared_immutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/shared_immutable.nr @@ -63,7 +63,7 @@ impl SharedImmutable { // - a membership proof of the value in the public tree of the header prove_public_value_inclusion( fields[i], - self.storage_slot + i, + self.storage_slot + i as Field, (*private_context).this_address(), (*private_context) ) diff --git a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/dapp_payload.nr b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/dapp_payload.nr index fc55799b218..941f86472f0 100644 --- a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/dapp_payload.nr +++ b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/dapp_payload.nr @@ -7,11 +7,11 @@ use dep::aztec::protocol_types::{ use dep::authwit::entrypoint::function_call::{FunctionCall, FUNCTION_CALL_SIZE_IN_BYTES}; // FUNCTION_CALL_SIZE * DAPP_MAX_CALLS + 1 -global DAPP_PAYLOAD_SIZE: Field = 5; +global DAPP_PAYLOAD_SIZE: u64 = 5; // FUNCTION_CALL_SIZE_IN_BYTES * DAPP_MAX_CALLS + 32 -global DAPP_PAYLOAD_SIZE_IN_BYTES: Field = 129; +global DAPP_PAYLOAD_SIZE_IN_BYTES: u64 = 129; -global DAPP_MAX_CALLS: Field = 1; +global DAPP_MAX_CALLS: u64 = 1; // Note: If you change the following struct you have to update default_entrypoint.ts // docs:start:app-payload-struct @@ -24,7 +24,7 @@ struct DAppPayload { impl Serialize for DAppPayload { // Serializes the entrypoint struct fn serialize(self) -> [Field; DAPP_PAYLOAD_SIZE] { - let mut fields: BoundedVec = BoundedVec::new(0); + let mut fields: BoundedVec = BoundedVec::new(); for call in self.function_calls { fields.extend_from_array(call.serialize()); } @@ -45,7 +45,7 @@ impl Hash for DAppPayload { impl DAppPayload { // Serializes the payload as an array of bytes. Useful for hashing with sha256. fn to_be_bytes(self) -> [u8; DAPP_PAYLOAD_SIZE_IN_BYTES] { - let mut bytes: BoundedVec = BoundedVec::new(0); + let mut bytes: BoundedVec = BoundedVec::new(); for i in 0..DAPP_MAX_CALLS { bytes.extend_from_array(self.function_calls[i].to_be_bytes()); diff --git a/noir-projects/noir-contracts/contracts/slow_tree_contract/src/main.nr b/noir-projects/noir-contracts/contracts/slow_tree_contract/src/main.nr index e930be53d75..5161f7b04db 100644 --- a/noir-projects/noir-contracts/contracts/slow_tree_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/slow_tree_contract/src/main.nr @@ -24,7 +24,7 @@ contract SlowTree { use crate::types::{MembershipProof, deserialize_membership_proof}; // docs:start:constants_and_storage - global TREE_HEIGHT: Field = 254; + global TREE_HEIGHT: u64 = 254; global MEMBERSHIP_SIZE: Field = 256; // TREE_HEIGHT + 2 global UPDATE_SIZE: Field = 512; // TREE_HEIGHT * 2 + 4 diff --git a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr index 90228a29b91..1c161de5935 100644 --- a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr @@ -171,7 +171,7 @@ contract Test { a_struct: DummyNote, a_deep_struct: DeepStruct ) -> distinct pub PrivateCircuitPublicInputs { - let mut args: BoundedVec = BoundedVec::new(0); + let mut args: BoundedVec = BoundedVec::new(); args.push(a_field); args.push(a_bool as Field); args.push(a_number as Field); diff --git a/noir-projects/noir-protocol-circuits/src/crates/private-kernel-lib/src/common.nr b/noir-projects/noir-protocol-circuits/src/crates/private-kernel-lib/src/common.nr index 3a9adffd96b..ea65c769314 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/private-kernel-lib/src/common.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/private-kernel-lib/src/common.nr @@ -187,7 +187,7 @@ pub fn update_end_values( // Transient read requests and witnesses are accumulated in public_inputs.end // We silo the read requests (domain separation per contract address) - let mut siloed_read_requests: BoundedVec = BoundedVec::new(SideEffect::empty()); + let mut siloed_read_requests: BoundedVec = BoundedVec::new(); for i in 0..MAX_READ_REQUESTS_PER_CALL { let read_request = read_requests[i].value; let witness = read_request_membership_witnesses[i]; @@ -210,7 +210,7 @@ pub fn update_end_values( // Enhance commitments and nullifiers with domain separation whereby domain is the contract. // // nullifiers - let mut siloed_new_nullifiers: BoundedVec = BoundedVec::new(SideEffectLinkedToNoteHash::empty()); + let mut siloed_new_nullifiers: BoundedVec = BoundedVec::new(); for i in 0..MAX_NEW_NULLIFIERS_PER_CALL { let new_nullifier = new_nullifiers[i]; if new_nullifier.value != 0 { @@ -231,7 +231,7 @@ pub fn update_end_values( public_inputs.end.new_nullifiers.extend_from_bounded_vec(siloed_new_nullifiers); // note hashes - let mut siloed_new_note_hashes: BoundedVec = BoundedVec::new(SideEffect::empty()); + let mut siloed_new_note_hashes: BoundedVec = BoundedVec::new(); for i in 0..MAX_NEW_NOTE_HASHES_PER_CALL { let new_note_hash = new_note_hashes[i].value; if new_note_hash != 0 { @@ -262,7 +262,7 @@ pub fn update_end_values( // new l2 to l1 messages let new_l2_to_l1_msgs = private_call_public_inputs.new_l2_to_l1_msgs; - let mut new_l2_to_l1_msgs_to_insert : BoundedVec = BoundedVec::new(0); + let mut new_l2_to_l1_msgs_to_insert : BoundedVec = BoundedVec::new(); for i in 0..MAX_NEW_L2_TO_L1_MSGS_PER_CALL { let msg = new_l2_to_l1_msgs[i]; if !is_empty(msg) { diff --git a/noir-projects/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_inner.nr b/noir-projects/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_inner.nr index d4234a022fd..797690f882c 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_inner.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_inner.nr @@ -522,7 +522,7 @@ mod tests { let mut full_new_note_hashes = [SideEffect::empty(); MAX_NEW_NOTE_HASHES_PER_TX]; for i in 0..MAX_NEW_NOTE_HASHES_PER_TX { full_new_note_hashes[i] = SideEffect { - value: i + 1, + value: i as Field + 1, counter: i as u32, }; } @@ -750,7 +750,7 @@ mod tests { fn zero_0th_nullifier_fails() { let mut builder = PrivateKernelInnerInputsBuilder::new(); - builder.previous_kernel.end.new_nullifiers = BoundedVec::new(SideEffectLinkedToNoteHash::empty()); + builder.previous_kernel.end.new_nullifiers = BoundedVec::new(); builder.failed(); } diff --git a/noir-projects/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_tail.nr b/noir-projects/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_tail.nr index eaa0fc06353..55e8b64a232 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_tail.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_tail.nr @@ -20,11 +20,11 @@ use dep::types::{ struct PrivateKernelTailCircuitPrivateInputs { previous_kernel: PrivateKernelInnerData, sorted_new_note_hashes: [SideEffect; MAX_NEW_NOTE_HASHES_PER_TX], - sorted_new_note_hashes_indexes: [u32; MAX_NEW_NOTE_HASHES_PER_TX], - read_commitment_hints: [Field; MAX_READ_REQUESTS_PER_TX], + sorted_new_note_hashes_indexes: [u64; MAX_NEW_NOTE_HASHES_PER_TX], + read_commitment_hints: [u64; MAX_READ_REQUESTS_PER_TX], sorted_new_nullifiers: [SideEffectLinkedToNoteHash; MAX_NEW_NULLIFIERS_PER_TX], - sorted_new_nullifiers_indexes: [u32; MAX_NEW_NULLIFIERS_PER_TX], - nullifier_commitment_hints: [Field; MAX_NEW_NULLIFIERS_PER_TX], + sorted_new_nullifiers_indexes: [u64; MAX_NEW_NULLIFIERS_PER_TX], + nullifier_commitment_hints: [u64; MAX_NEW_NULLIFIERS_PER_TX], master_nullifier_secret_keys: [GrumpkinPrivateKey; MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX], } @@ -54,7 +54,7 @@ impl PrivateKernelTailCircuitPrivateInputs { } // Empty out nullifier key validation requests after verifying them. - public_inputs.end.nullifier_key_validation_requests = BoundedVec::new(NullifierKeyValidationRequestContext::empty()); + public_inputs.end.nullifier_key_validation_requests = BoundedVec::new(); } fn match_reads_to_commitments(self, public_inputs: &mut PrivateKernelCircuitPublicInputsBuilder) { @@ -67,7 +67,7 @@ impl PrivateKernelTailCircuitPrivateInputs { let read_commitment_hint = self.read_commitment_hints[rr_idx] as u64; if (read_request.value != 0) { - let hash = new_note_hashes.get_unchecked(read_commitment_hint as Field); + let hash = new_note_hashes.get_unchecked(read_commitment_hint); assert_eq(read_request.value, hash.value, "Hinted hash does not match read request"); assert( read_request.counter > hash.counter, "Read request counter must be greater than hash counter" @@ -76,10 +76,10 @@ impl PrivateKernelTailCircuitPrivateInputs { } // Empty out read requests after matching them to commitments - public_inputs.end.read_requests = BoundedVec::new(SideEffect::empty()); + public_inputs.end.read_requests = BoundedVec::new(); } - fn assert_sorted_counters(original: [T; N], sorted: [T; N], indexes: [u32; N]) where T: Eq + Ordered + Empty { + fn assert_sorted_counters(original: [T; N], sorted: [T; N], indexes: [u64; N]) where T: Eq + Ordered + Empty { let mut prev_was_empty = false; for i in 0..N { @@ -153,7 +153,7 @@ impl PrivateKernelTailCircuitPrivateInputs { // Move all zero-ed (removed) entries of these arrays to the end and preserve ordering of other entries - let mut new_note_hashes_vec = BoundedVec::new(SideEffect::empty()); + let mut new_note_hashes_vec = BoundedVec::new(); for c_idx in 0..MAX_NEW_NOTE_HASHES_PER_TX { if new_note_hashes[c_idx].value != 0 { @@ -163,7 +163,7 @@ impl PrivateKernelTailCircuitPrivateInputs { public_inputs.end.new_note_hashes = new_note_hashes_vec; - let mut new_nullifiers_vec = BoundedVec::new(SideEffectLinkedToNoteHash::empty()); + let mut new_nullifiers_vec = BoundedVec::new(); for n_idx in 0..MAX_NEW_NULLIFIERS_PER_TX { if new_nullifiers[n_idx].value != 0 { @@ -240,8 +240,8 @@ mod tests { struct PrivateKernelOrderingInputsBuilder { previous_kernel: PreviousKernelDataBuilder, - read_commitment_hints: [Field; MAX_READ_REQUESTS_PER_TX], - nullifier_commitment_hints: [Field; MAX_NEW_NULLIFIERS_PER_TX], + read_commitment_hints: [u64; MAX_READ_REQUESTS_PER_TX], + nullifier_commitment_hints: [u64; MAX_NEW_NULLIFIERS_PER_TX], } impl PrivateKernelOrderingInputsBuilder { @@ -272,32 +272,32 @@ mod tests { compute_unique_siloed_note_hashes(first_nullifier.value, commitments) } - pub fn append_transient_commitments(&mut self, num_commitments: Field) { + pub fn append_transient_commitments(&mut self, num_commitments: u64) { // All new note hashes aggregated in the previous kernel are transient commitments. self.previous_kernel.append_new_note_hashes(num_commitments); } - pub fn add_transient_read(&mut self, commitment_index: Field) { + pub fn add_transient_read(&mut self, commitment_index: u64) { let read_request_index = self.previous_kernel.add_read_request_for_transient_commitment(commitment_index); self.read_commitment_hints[read_request_index] = commitment_index; } - pub fn append_nullifiers(&mut self, num_nullifiers: Field) { + pub fn append_nullifiers(&mut self, num_nullifiers: u64) { self.previous_kernel.append_new_nullifiers_from_private(num_nullifiers); } - pub fn nullify_transient_commitment(&mut self, nullifier_index: Field, commitment_index: Field) { + pub fn nullify_transient_commitment(&mut self, nullifier_index: Field, commitment_index: u64) { self.previous_kernel.end.new_nullifiers.storage[nullifier_index].note_hash = self.previous_kernel.end.new_note_hashes.get(commitment_index).value; self.nullifier_commitment_hints[nullifier_index] = commitment_index; } - fn sort_sideffects(original: [T; N]) -> ([T; N], [u32; N]) where T: Ordered + Eq + Empty { + fn sort_sideffects(original: [T; N]) -> ([T; N], [u64; N]) where T: Ordered + Eq + Empty { let mut indexes = [0; N]; for i in 0..N { - indexes[i] = i as u32; + indexes[i] = i; } let sorted_indexes = indexes.sort_via( - |a_index: u32, b_index: u32| { + |a_index: u64, b_index: u64| { let a = original[a_index]; let b = original[b_index]; if is_empty(b) { @@ -309,7 +309,7 @@ mod tests { } } ); - let sorted_sideffects = sorted_indexes.map(|i: u32| original[i]); + let sorted_sideffects = sorted_indexes.map(|i: u64| original[i]); let mut reverse_map = [0; N]; for i in 0..N { reverse_map[sorted_indexes[i]] = i; @@ -322,12 +322,12 @@ mod tests { let (sorted_new_note_hashes, sorted_new_note_hashes_indexes) = PrivateKernelOrderingInputsBuilder::sort_sideffects(self.get_new_note_hashes()); let mut sorted_read_commitment_hints = [0; MAX_READ_REQUESTS_PER_TX]; for i in 0..sorted_read_commitment_hints.len() { - sorted_read_commitment_hints[i] = sorted_new_note_hashes_indexes[self.read_commitment_hints[i]] as Field; + sorted_read_commitment_hints[i] = sorted_new_note_hashes_indexes[self.read_commitment_hints[i]]; } let (sorted_new_nullifiers, sorted_new_nullifiers_indexes) = PrivateKernelOrderingInputsBuilder::sort_sideffects(self.get_new_nullifiers()); let mut sorted_nullifier_commitment_hints = [0; MAX_NEW_NULLIFIERS_PER_TX]; for i in 0..sorted_nullifier_commitment_hints.len() { - sorted_nullifier_commitment_hints[i] = sorted_new_nullifiers_indexes[self.nullifier_commitment_hints[i]] as Field; + sorted_nullifier_commitment_hints[i] = sorted_new_nullifiers_indexes[self.nullifier_commitment_hints[i]]; } let kernel = PrivateKernelTailCircuitPrivateInputs { @@ -569,7 +569,7 @@ mod tests { #[test(should_fail_with="The 0th nullifier in the accumulated nullifier array is zero")] unconstrained fn zero_0th_nullifier_fails() { let mut builder = PrivateKernelOrderingInputsBuilder::new(); - builder.previous_kernel.end.new_nullifiers = BoundedVec::new(SideEffectLinkedToNoteHash::empty()); + builder.previous_kernel.end.new_nullifiers = BoundedVec::new(); builder.failed(); } diff --git a/noir-projects/noir-protocol-circuits/src/crates/public-kernel-lib/src/common.nr b/noir-projects/noir-protocol-circuits/src/crates/public-kernel-lib/src/common.nr index a6842068219..bd27c381283 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/public-kernel-lib/src/common.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/public-kernel-lib/src/common.nr @@ -185,7 +185,7 @@ fn propagate_valid_public_data_update_requests( let update_requests = public_call.call_stack_item.public_inputs.contract_storage_update_requests; // TODO(fees) should we have a MAX_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL - let mut public_data_update_requests : BoundedVec = BoundedVec::new(PublicDataUpdateRequest::empty()); + let mut public_data_update_requests : BoundedVec = BoundedVec::new(); for i in 0..MAX_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX { let update_request = update_requests[i]; @@ -209,7 +209,7 @@ fn propagate_valid_non_revertible_public_data_update_requests( let update_requests = public_call.call_stack_item.public_inputs.contract_storage_update_requests; // TODO(fees) should we have a MAX_NON_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL ? - let mut public_data_update_requests : BoundedVec = BoundedVec::new(PublicDataUpdateRequest::empty()); + let mut public_data_update_requests : BoundedVec = BoundedVec::new(); for i in 0..MAX_NON_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX { let update_request = update_requests[i]; @@ -230,7 +230,7 @@ fn propagate_valid_public_data_reads(public_call: PublicCallData, circuit_output let read_requests = public_call.call_stack_item.public_inputs.contract_storage_reads; // TODO(fees) should we have a MAX_REVERTIBLE_PUBLIC_DATA_READS_PER_CALL - let mut public_data_reads : BoundedVec = BoundedVec::new(PublicDataRead::empty()); + let mut public_data_reads : BoundedVec = BoundedVec::new(); for i in 0..MAX_REVERTIBLE_PUBLIC_DATA_READS_PER_TX { let read_request: StorageRead = read_requests[i]; @@ -253,7 +253,7 @@ fn propagate_valid_non_revertible_public_data_reads( let read_requests = public_call.call_stack_item.public_inputs.contract_storage_reads; // TODO(fees) should we have a MAX_REVERTIBLE_PUBLIC_DATA_READS_PER_CALL - let mut public_data_reads : BoundedVec = BoundedVec::new(PublicDataRead::empty()); + let mut public_data_reads : BoundedVec = BoundedVec::new(); for i in 0..MAX_NON_REVERTIBLE_PUBLIC_DATA_READS_PER_TX { let read_request: StorageRead = read_requests[i]; @@ -277,7 +277,7 @@ fn propagate_new_note_hashes_non_revertible( let new_note_hashes = public_call.call_stack_item.public_inputs.new_note_hashes; let storage_contract_address = public_call_public_inputs.call_context.storage_contract_address; - let mut siloed_new_note_hashes : BoundedVec = BoundedVec::new(SideEffect::empty()); + let mut siloed_new_note_hashes : BoundedVec = BoundedVec::new(); for i in 0..MAX_NEW_NOTE_HASHES_PER_CALL { let new_note_hash = new_note_hashes[i].value; if new_note_hash != 0 { @@ -297,7 +297,7 @@ fn propagate_new_note_hashes( let new_note_hashes = public_call.call_stack_item.public_inputs.new_note_hashes; let storage_contract_address = public_call_public_inputs.call_context.storage_contract_address; - let mut siloed_new_note_hashes : BoundedVec = BoundedVec::new(SideEffect::empty()); + let mut siloed_new_note_hashes : BoundedVec = BoundedVec::new(); for i in 0..MAX_NEW_NOTE_HASHES_PER_CALL { let new_note_hash = new_note_hashes[i].value; if new_note_hash != 0 { @@ -316,7 +316,7 @@ fn propagate_new_nullifiers_non_revertible( let storage_contract_address = public_call.call_stack_item.public_inputs.call_context.storage_contract_address; // Enhance commitments and nullifiers with domain separation whereby domain is the contract. - let mut siloed_new_nullifiers : BoundedVec = BoundedVec::new(SideEffectLinkedToNoteHash::empty()); + let mut siloed_new_nullifiers : BoundedVec = BoundedVec::new(); for i in 0..MAX_NEW_NULLIFIERS_PER_CALL { let new_nullifier = new_nullifiers[i].value; if new_nullifier != 0 { @@ -342,7 +342,7 @@ fn propagate_new_nullifiers( let storage_contract_address = public_call.call_stack_item.public_inputs.call_context.storage_contract_address; // Enhance commitments and nullifiers with domain separation whereby domain is the contract. - let mut siloed_new_nullifiers : BoundedVec = BoundedVec::new(SideEffectLinkedToNoteHash::empty()); + let mut siloed_new_nullifiers : BoundedVec = BoundedVec::new(); for i in 0..MAX_NEW_NULLIFIERS_PER_CALL { let new_nullifier = new_nullifiers[i].value; if new_nullifier != 0 { @@ -367,7 +367,7 @@ fn propagate_new_l2_to_l1_messages(public_call: PublicCallData, public_inputs: & let storage_contract_address = public_call_public_inputs.call_context.storage_contract_address; let new_l2_to_l1_msgs = public_call_public_inputs.new_l2_to_l1_msgs; - let mut new_l2_to_l1_msgs_to_insert : BoundedVec = BoundedVec::new(0); + let mut new_l2_to_l1_msgs_to_insert : BoundedVec = BoundedVec::new(); for i in 0..MAX_NEW_L2_TO_L1_MSGS_PER_CALL { let msg = new_l2_to_l1_msgs[i]; if !is_empty(msg) { diff --git a/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/base/base_rollup_inputs.nr b/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/base/base_rollup_inputs.nr index 9d9433400ce..67777b378f9 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/base/base_rollup_inputs.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/base/base_rollup_inputs.nr @@ -45,7 +45,7 @@ struct BaseRollupInputs { // TODO: The following 6 values are eventually going to be nuked from here. See discussion: // https://aztecprotocol.slack.com/archives/C060PU5R327/p1701965354071269 sorted_public_data_writes: [PublicDataTreeLeaf; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], - sorted_public_data_writes_indexes: [u32; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], + sorted_public_data_writes_indexes: [u64; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], low_public_data_writes_preimages: [PublicDataTreeLeafPreimage; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], low_public_data_writes_witnesses: [PublicDataMembershipWitness; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], public_data_reads_preimages: [PublicDataTreeLeafPreimage; MAX_PUBLIC_DATA_READS_PER_TX], @@ -195,7 +195,7 @@ impl BaseRollupInputs { ((low_leaf.next_index == 0) & (low_leaf.next_nullifier == 0)) ) }, - |low_leaf: NullifierLeafPreimage, nullifier: Field, nullifier_index: u32| { // Update low leaf + |low_leaf: NullifierLeafPreimage, nullifier: Field, nullifier_index: u64| { // Update low leaf NullifierLeafPreimage{ nullifier : low_leaf.nullifier, next_nullifier : nullifier, @@ -382,7 +382,7 @@ fn insert_public_data_update_requests( prev_snapshot: AppendOnlyTreeSnapshot, public_data_writes: [PublicDataTreeLeaf; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], sorted_public_data_writes: [PublicDataTreeLeaf; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], - sorted_public_data_writes_indexes: [u32; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], + sorted_public_data_writes_indexes: [u64; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], low_public_data_writes_preimages: [PublicDataTreeLeafPreimage; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], low_public_data_writes_witnesses: [PublicDataMembershipWitness; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], public_data_writes_subtree_sibling_path: [Field; PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH] @@ -417,7 +417,7 @@ fn insert_public_data_update_requests( (!is_low_empty) & (is_update | is_in_range) }, - |low_preimage: PublicDataTreeLeafPreimage, write: PublicDataTreeLeaf, write_index: u32| { // Update low leaf + |low_preimage: PublicDataTreeLeafPreimage, write: PublicDataTreeLeaf, write_index: u64| { // Update low leaf let is_update = low_preimage.slot == write.slot; if is_update { PublicDataTreeLeafPreimage{ @@ -493,7 +493,7 @@ fn validate_public_data_reads( #[test] fn consistent_not_hash_subtree_width() { assert_eq( - MAX_NEW_NOTE_HASHES_PER_TX, 2.pow_32(NOTE_HASH_SUBTREE_HEIGHT), "note hash subtree width is incorrect" + MAX_NEW_NOTE_HASHES_PER_TX, 2.pow_32(NOTE_HASH_SUBTREE_HEIGHT as Field) as u64, "note hash subtree width is incorrect" ); } @@ -595,7 +595,7 @@ mod tests { struct SortedTuple { value: T, - original_index: u32, + original_index: u64, } global MAX_NEW_NULLIFIERS_PER_TEST = 4; @@ -608,7 +608,7 @@ mod tests { for i in 0..N { sorted_tuples[i] = SortedTuple { value: values[i], - original_index: i as u32, + original_index: i, }; } @@ -621,10 +621,10 @@ mod tests { snapshot: AppendOnlyTreeSnapshot, public_data_writes: BoundedVec<(u64, PublicDataTreeLeaf), 2>, mut pre_existing_public_data: [PublicDataTreeLeafPreimage; EXISTING_LEAVES] - ) -> ([Field; PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH], [PublicDataTreeLeaf; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], [u32; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], [PublicDataTreeLeafPreimage; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], [PublicDataMembershipWitness; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], [PublicDataTreeLeafPreimage; MAX_PUBLIC_DATA_READS_PER_TX], [PublicDataMembershipWitness; MAX_PUBLIC_DATA_READS_PER_TX], [PublicDataTreeLeafPreimage; EXISTING_LEAVES]) { + ) -> ([Field; PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH], [PublicDataTreeLeaf; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], [u64; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], [PublicDataTreeLeafPreimage; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], [PublicDataMembershipWitness; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], [PublicDataTreeLeafPreimage; MAX_PUBLIC_DATA_READS_PER_TX], [PublicDataMembershipWitness; MAX_PUBLIC_DATA_READS_PER_TX], [PublicDataTreeLeafPreimage; EXISTING_LEAVES]) { let mut subtree_path: [Field; PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH] = dep::std::unsafe::zeroed(); let mut sorted_public_data_writes: [PublicDataTreeLeaf; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX] = dep::std::unsafe::zeroed(); - let mut sorted_public_data_writes_indexes: [u32; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX] = dep::std::unsafe::zeroed(); + let mut sorted_public_data_writes_indexes: [u64; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX] = dep::std::unsafe::zeroed(); let mut low_public_data_writes_preimages: [PublicDataTreeLeafPreimage; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX] = dep::std::unsafe::zeroed(); let mut low_public_data_writes_witnesses: [PublicDataMembershipWitness; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX] = dep::std::unsafe::zeroed(); let mut public_data_reads_preimages: [PublicDataTreeLeafPreimage; MAX_PUBLIC_DATA_READS_PER_TX] = dep::std::unsafe::zeroed(); @@ -632,8 +632,8 @@ mod tests { let mut new_subtree: [PublicDataTreeLeafPreimage; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX] = dep::std::unsafe::zeroed(); for i in 0..MAX_PUBLIC_DATA_WRITES_PER_TEST { - if i < (public_data_writes.len() as u64) { - let leaf = public_data_writes.get_unchecked(i as Field).1; + if i < (public_data_writes.len()) { + let leaf = public_data_writes.get_unchecked(i).1; kernel_data.public_inputs.end.public_data_update_requests[i] = PublicDataUpdateRequest { leaf_slot : leaf.slot, @@ -668,13 +668,13 @@ mod tests { slot: low_leaf.slot, value: low_leaf.value, next_slot: leaf.slot, - next_index: (MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX as u32)+(i as u32) + next_index: MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX + i }; } low_public_data_writes_preimages[i] = low_leaf; low_public_data_writes_witnesses[i] = PublicDataMembershipWitness { leaf_index: low_leaf_index as Field, - sibling_path: public_data_tree.get_sibling_path(low_leaf_index as Field) + sibling_path: public_data_tree.get_sibling_path(low_leaf_index) }; public_data_tree.update_leaf( @@ -684,11 +684,11 @@ mod tests { } } else { sorted_public_data_writes[i] = PublicDataTreeLeaf::empty(); - sorted_public_data_writes_indexes[i] = i as u32; + sorted_public_data_writes_indexes[i] = i; } } - subtree_path = BaseRollupInputsBuilder::extract_subtree_sibling_path(public_data_tree.get_sibling_path(snapshot.next_available_leaf_index as Field), [0; PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH]); + subtree_path = BaseRollupInputsBuilder::extract_subtree_sibling_path(public_data_tree.get_sibling_path(snapshot.next_available_leaf_index as u64), [0; PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH]); ( subtree_path, sorted_public_data_writes, sorted_public_data_writes_indexes, low_public_data_writes_preimages, low_public_data_writes_witnesses, public_data_reads_preimages, public_data_reads_witnesses, pre_existing_public_data @@ -743,7 +743,7 @@ mod tests { nullifier_tree: &mut NonEmptyMerkleTree, kernel_data: &mut PublicKernelData, start_nullifier_tree_snapshot: AppendOnlyTreeSnapshot - ) -> ([NullifierLeafPreimage; MAX_NEW_NULLIFIERS_PER_TX], [NullifierMembershipWitness; MAX_NEW_NULLIFIERS_PER_TX], [Field; MAX_NEW_NULLIFIERS_PER_TX], [u32; MAX_NEW_NULLIFIERS_PER_TX]) { + ) -> ([NullifierLeafPreimage; MAX_NEW_NULLIFIERS_PER_TX], [NullifierMembershipWitness; MAX_NEW_NULLIFIERS_PER_TX], [Field; MAX_NEW_NULLIFIERS_PER_TX], [u64; MAX_NEW_NULLIFIERS_PER_TX]) { let mut nullifier_predecessor_preimages: [NullifierLeafPreimage; MAX_NEW_NULLIFIERS_PER_TX] = dep::std::unsafe::zeroed(); let mut low_nullifier_membership_witness: [NullifierMembershipWitness; MAX_NEW_NULLIFIERS_PER_TX] = dep::std::unsafe::zeroed(); @@ -761,19 +761,19 @@ mod tests { sorted_nullifiers_indexes[i] = sorted_new_nullifier_tuples[i].original_index; } else { sorted_nullifiers[i] = 0; - sorted_nullifiers_indexes[i] = i as u32; + sorted_nullifiers_indexes[i] = i; } } let mut pre_existing_nullifiers = self.pre_existing_nullifiers; for i in 0..MAX_NEW_NULLIFIERS_PER_TEST { - if (i as u64) < (self.new_nullifiers.len() as u64) { + if (i as u64) < (self.new_nullifiers.len()) { let sorted_tuple = sorted_new_nullifier_tuples[i]; let new_nullifier = sorted_tuple.value; let original_index = sorted_tuple.original_index; - let low_index = self.new_nullifiers.get_unchecked(original_index as Field).existing_index; + let low_index = self.new_nullifiers.get_unchecked(original_index).existing_index; kernel_data.public_inputs.end.new_nullifiers[original_index].value = new_nullifier; @@ -781,11 +781,11 @@ mod tests { nullifier_predecessor_preimages[i] = low_preimage; low_nullifier_membership_witness[i] = NullifierMembershipWitness { leaf_index: low_index as Field, - sibling_path: nullifier_tree.get_sibling_path(low_index as Field) + sibling_path: nullifier_tree.get_sibling_path(low_index) }; low_preimage.next_nullifier = new_nullifier; - low_preimage.next_index = start_nullifier_tree_snapshot.next_available_leaf_index + original_index; + low_preimage.next_index = start_nullifier_tree_snapshot.next_available_leaf_index as u64 + original_index; pre_existing_nullifiers[low_index] = low_preimage; nullifier_tree.update_leaf(low_index, low_preimage.hash()); @@ -1101,7 +1101,7 @@ mod tests { tree_nullifiers[0] = NullifierLeafPreimage { nullifier : 0, next_nullifier : 1, - next_index : MAX_NEW_NULLIFIERS_PER_TX as u32, + next_index : MAX_NEW_NULLIFIERS_PER_TX, }; tree_nullifiers[1] = builder.pre_existing_nullifiers[1]; tree_nullifiers[MAX_NEW_NULLIFIERS_PER_TX] = NullifierLeafPreimage { @@ -1153,7 +1153,7 @@ mod tests { tree_nullifiers[1] = NullifierLeafPreimage { nullifier : 7, next_nullifier : 8, - next_index : MAX_NEW_NULLIFIERS_PER_TX as u32, + next_index : MAX_NEW_NULLIFIERS_PER_TX, }; let last_index = builder.new_nullifiers.max_len() - 1; @@ -1161,7 +1161,7 @@ mod tests { tree_nullifiers[MAX_NEW_NULLIFIERS_PER_TX + i] = NullifierLeafPreimage { nullifier : (8 + i) as Field, next_nullifier : (8 + i + 1) as Field, - next_index : (MAX_NEW_NULLIFIERS_PER_TX + i) as u32 + 1, + next_index : MAX_NEW_NULLIFIERS_PER_TX + i + 1, }; } tree_nullifiers[MAX_NEW_NULLIFIERS_PER_TX+last_index] = NullifierLeafPreimage { @@ -1354,7 +1354,7 @@ mod tests { slot: 20, value: 90, next_slot: 25, - next_index: MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX as u32, + next_index: MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, }.hash(); public_data_leaves[1] = PublicDataTreeLeafPreimage { slot: 28, diff --git a/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/base/state_diff_hints.nr b/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/base/state_diff_hints.nr index bfdae2ecf5d..a15bb81021f 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/base/state_diff_hints.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/base/state_diff_hints.nr @@ -12,7 +12,7 @@ struct StateDiffHints { nullifier_predecessor_membership_witnesses: [NullifierMembershipWitness; MAX_NEW_NULLIFIERS_PER_TX], sorted_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_TX], - sorted_nullifier_indexes: [u32; MAX_NEW_NULLIFIERS_PER_TX], + sorted_nullifier_indexes: [u64; MAX_NEW_NULLIFIERS_PER_TX], // For inserting the new subtrees into their respective trees: // Note: the insertion leaf index can be derived from the snapshots' `next_available_leaf_index` values (tree diff --git a/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/indexed_tree.nr b/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/indexed_tree.nr index 47713a81496..fc9ba43d74f 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/indexed_tree.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/indexed_tree.nr @@ -5,7 +5,7 @@ use dep::types::abis::{append_only_tree_snapshot::AppendOnlyTreeSnapshot, member fn check_permutation( original_array: [T; N], sorted_array: [T; N], - indexes: [u32; N], + indexes: [u64; N], is_equal: fn(T, T) -> bool ) { let mut seen_value = [false; N]; @@ -50,7 +50,7 @@ pub fn batch_insert; SubtreeWidth], @@ -58,7 +58,7 @@ pub fn batch_insert bool, hash_leaf: fn(Leaf) -> Field, is_valid_low_leaf: fn(Leaf, Value) -> bool, - update_low_leaf: fn(Leaf, Value, u32) -> Leaf, + update_low_leaf: fn(Leaf, Value, u64) -> Leaf, build_insertion_leaf: fn(Value, Leaf) -> Leaf, _subtree_height: [Field; SubtreeHeight], _tree_height: [Field; TreeHeight] @@ -95,7 +95,11 @@ pub fn batch_insert(leaves: [Field; N]) -> Field { // // Would be good if we could use width since the compute_subtree // algorithm uses depth. -pub fn calculate_empty_tree_root(depth: Field) -> Field { +pub fn calculate_empty_tree_root(depth: u64) -> Field { if depth == 0 { 0 } else if depth == 1 { diff --git a/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr b/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr index c4b8f860732..34312802a4e 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr @@ -79,7 +79,7 @@ impl RootRollupInputs { // See `test_message_input_flattened_length` on keeping this in sync, // why its here and how this constant was computed. -global NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP_NUM_BYTES = 512; +global NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP_NUM_BYTES: u64 = 512; // Computes the messages hash from the leaves array // diff --git a/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/tests/merkle_tree_utils.nr b/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/tests/merkle_tree_utils.nr index 31491802803..5857980ea7b 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/tests/merkle_tree_utils.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/rollup-lib/src/tests/merkle_tree_utils.nr @@ -103,7 +103,7 @@ impl NonEmptyMerkl NonEmptyMerkleTree { subtree, zero_hashes, left_supertree_branch, _phantom_subtree_height: _subtree_height } } - pub fn get_sibling_path(self, leaf_index: Field) -> [Field; TREE_HEIGHT] { + pub fn get_sibling_path(self, leaf_index: u64) -> [Field; TREE_HEIGHT] { let mut path = [0; TREE_HEIGHT]; let mut current_index = leaf_index as u64; let mut subtree_width = SUBTREE_ITEMS as u64; diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/append_only_tree_snapshot.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/append_only_tree_snapshot.nr index 138760030c7..83925d95897 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/append_only_tree_snapshot.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/append_only_tree_snapshot.nr @@ -2,10 +2,11 @@ use dep::std::cmp::Eq; struct AppendOnlyTreeSnapshot { root : Field, + // TODO(Alvaro) change this to a u64 next_available_leaf_index : u32 } -global APPEND_ONLY_TREE_SNAPSHOT_LENGTH: Field = 2; +global APPEND_ONLY_TREE_SNAPSHOT_LENGTH: u64 = 2; impl AppendOnlyTreeSnapshot { pub fn serialize(self) -> [Field; APPEND_ONLY_TREE_SNAPSHOT_LENGTH] { diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/nullifier_leaf_preimage.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/nullifier_leaf_preimage.nr index 605740777fe..ee5b89b2bc8 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/nullifier_leaf_preimage.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/nullifier_leaf_preimage.nr @@ -1,11 +1,11 @@ -global NULLIFIER_LEAF_PREIMAGE_LENGTH: Field = 3; +global NULLIFIER_LEAF_PREIMAGE_LENGTH: u64 = 3; use crate::traits::{Empty, Hash}; struct NullifierLeafPreimage { nullifier : Field, next_nullifier :Field, - next_index : u32, + next_index : u64, } impl Empty for NullifierLeafPreimage { @@ -38,6 +38,6 @@ impl NullifierLeafPreimage { } pub fn deserialize(fields: [Field; NULLIFIER_LEAF_PREIMAGE_LENGTH]) -> Self { - Self { nullifier: fields[0], next_nullifier: fields[1], next_index: fields[2] as u32 } + Self { nullifier: fields[0], next_nullifier: fields[1], next_index: fields[2] as u64 } } } diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/private_call_stack_item.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/private_call_stack_item.nr index 4afd285dd64..01ffcff4520 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/private_call_stack_item.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/private_call_stack_item.nr @@ -26,7 +26,7 @@ impl Eq for PrivateCallStackItem { impl Serialize for PrivateCallStackItem { fn serialize(self) -> [Field; PRIVATE_CALL_STACK_ITEM_LENGTH] { - let mut fields: BoundedVec = BoundedVec::new(0); + let mut fields: BoundedVec = BoundedVec::new(); fields.push(self.contract_address.to_field()); fields.extend_from_array(self.function_data.serialize()); diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/private_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/private_circuit_public_inputs.nr index afff569ec71..0566283bfe2 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/private_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/private_circuit_public_inputs.nr @@ -79,7 +79,7 @@ impl Eq for PrivateCircuitPublicInputs { impl Serialize for PrivateCircuitPublicInputs { fn serialize(self) -> [Field; PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH] { - let mut fields: BoundedVec = BoundedVec::new(0); + let mut fields: BoundedVec = BoundedVec::new(); fields.extend_from_array(self.call_context.serialize()); fields.push(self.args_hash); fields.extend_from_array(self.return_values); diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/public_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/public_circuit_public_inputs.nr index afbfe068e8b..92968c80264 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/public_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/abis/public_circuit_public_inputs.nr @@ -48,7 +48,7 @@ impl Eq for PublicCircuitPublicInputs { impl Serialize for PublicCircuitPublicInputs { fn serialize(self) -> [Field; PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH] { - let mut fields: BoundedVec = BoundedVec::new(0); + let mut fields: BoundedVec = BoundedVec::new(); fields.extend_from_array(self.call_context.serialize()); fields.push(self.args_hash); fields.extend_from_array(self.return_values); diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/constants.nr index ac6e623d309..4c69e3ae439 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/constants.nr @@ -1,5 +1,5 @@ -global ARGS_LENGTH: Field = 16; -global RETURN_VALUES_LENGTH: Field = 4; +global ARGS_LENGTH: u64 = 16; +global RETURN_VALUES_LENGTH: u64 = 4; /** * Convention for constant array lengths are mainly divided in 2 classes: @@ -23,80 +23,80 @@ global RETURN_VALUES_LENGTH: Field = 4; // docs:start:constants // "PER CALL" CONSTANTS -global MAX_NEW_NOTE_HASHES_PER_CALL: Field = 16; -global MAX_NEW_NULLIFIERS_PER_CALL: Field = 16; -global MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL: Field = 4; -global MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL: Field = 4; -global MAX_NEW_L2_TO_L1_MSGS_PER_CALL: Field = 2; -global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL: Field = 16; -global MAX_PUBLIC_DATA_READS_PER_CALL: Field = 16; -global MAX_READ_REQUESTS_PER_CALL: Field = 32; -global MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL: Field = 1; +global MAX_NEW_NOTE_HASHES_PER_CALL: u64 = 16; +global MAX_NEW_NULLIFIERS_PER_CALL: u64 = 16; +global MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL: u64 = 4; +global MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL: u64 = 4; +global MAX_NEW_L2_TO_L1_MSGS_PER_CALL: u64 = 2; +global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL: u64 = 16; +global MAX_PUBLIC_DATA_READS_PER_CALL: u64 = 16; +global MAX_READ_REQUESTS_PER_CALL: u64 = 32; +global MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_CALL: u64 = 1; // "PER TRANSACTION" CONSTANTS -global MAX_NEW_NOTE_HASHES_PER_TX: Field = 64; -global MAX_NON_REVERTIBLE_NOTE_HASHES_PER_TX: Field = 8; -global MAX_REVERTIBLE_NOTE_HASHES_PER_TX: Field = 56; - -global MAX_NEW_NULLIFIERS_PER_TX: Field = 64; -global MAX_NON_REVERTIBLE_NULLIFIERS_PER_TX: Field = 8; -global MAX_REVERTIBLE_NULLIFIERS_PER_TX: Field = 56; - -global MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX: Field = 8; - -global MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX: Field = 8; -global MAX_NON_REVERTIBLE_PUBLIC_CALL_STACK_LENGTH_PER_TX: Field = 3; -global MAX_REVERTIBLE_PUBLIC_CALL_STACK_LENGTH_PER_TX: Field = 5; - -global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX: Field = 32; -global MAX_NON_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX: Field = 16; -global MAX_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX: Field = 16; - -global MAX_PUBLIC_DATA_READS_PER_TX: Field = 32; -global MAX_NON_REVERTIBLE_PUBLIC_DATA_READS_PER_TX: Field = 16; -global MAX_REVERTIBLE_PUBLIC_DATA_READS_PER_TX: Field = 16; - -global MAX_NEW_L2_TO_L1_MSGS_PER_TX: Field = 2; -global MAX_NEW_CONTRACTS_PER_TX: Field = 1; -global MAX_READ_REQUESTS_PER_TX: Field = 128; -global MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX: Field = 4; -global NUM_ENCRYPTED_LOGS_HASHES_PER_TX: Field = 1; -global NUM_UNENCRYPTED_LOGS_HASHES_PER_TX: Field = 1; +global MAX_NEW_NOTE_HASHES_PER_TX: u64 = 64; +global MAX_NON_REVERTIBLE_NOTE_HASHES_PER_TX: u64 = 8; +global MAX_REVERTIBLE_NOTE_HASHES_PER_TX: u64 = 56; + +global MAX_NEW_NULLIFIERS_PER_TX: u64 = 64; +global MAX_NON_REVERTIBLE_NULLIFIERS_PER_TX: u64 = 8; +global MAX_REVERTIBLE_NULLIFIERS_PER_TX: u64 = 56; + +global MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX: u64 = 8; + +global MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX: u64 = 8; +global MAX_NON_REVERTIBLE_PUBLIC_CALL_STACK_LENGTH_PER_TX: u64 = 3; +global MAX_REVERTIBLE_PUBLIC_CALL_STACK_LENGTH_PER_TX: u64 = 5; + +global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX: u64 = 32; +global MAX_NON_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX: u64 = 16; +global MAX_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX: u64 = 16; + +global MAX_PUBLIC_DATA_READS_PER_TX: u64 = 32; +global MAX_NON_REVERTIBLE_PUBLIC_DATA_READS_PER_TX: u64 = 16; +global MAX_REVERTIBLE_PUBLIC_DATA_READS_PER_TX: u64 = 16; + +global MAX_NEW_L2_TO_L1_MSGS_PER_TX: u64 = 2; +global MAX_NEW_CONTRACTS_PER_TX: u64 = 1; +global MAX_READ_REQUESTS_PER_TX: u64 = 128; +global MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX: u64 = 4; +global NUM_ENCRYPTED_LOGS_HASHES_PER_TX: u64 = 1; +global NUM_UNENCRYPTED_LOGS_HASHES_PER_TX: u64 = 1; // docs:end:constants // ROLLUP CONTRACT CONSTANTS - constants used only in l1-contracts -global NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP: Field = 16; +global NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP: u64 = 16; // TREES RELATED CONSTANTS -global VK_TREE_HEIGHT: Field = 3; -global FUNCTION_TREE_HEIGHT: Field = 5; -global CONTRACT_TREE_HEIGHT: Field = 16; -global NOTE_HASH_TREE_HEIGHT: Field = 32; -global PUBLIC_DATA_TREE_HEIGHT: Field = 40; -global NULLIFIER_TREE_HEIGHT: Field = 20; -global L1_TO_L2_MSG_TREE_HEIGHT: Field = 16; -global ROLLUP_VK_TREE_HEIGHT: Field = 8; +global VK_TREE_HEIGHT: u64 = 3; +global FUNCTION_TREE_HEIGHT: u64 = 5; +global CONTRACT_TREE_HEIGHT: u64 = 16; +global NOTE_HASH_TREE_HEIGHT: u64 = 32; +global PUBLIC_DATA_TREE_HEIGHT: u64 = 40; +global NULLIFIER_TREE_HEIGHT: u64 = 20; +global L1_TO_L2_MSG_TREE_HEIGHT: u64 = 16; +global ROLLUP_VK_TREE_HEIGHT: u64 = 8; global ARTIFACT_FUNCTION_TREE_MAX_HEIGHT = 5; // SUB-TREES RELATED CONSTANTS -global CONTRACT_SUBTREE_HEIGHT: Field = 0; -global CONTRACT_SUBTREE_SIBLING_PATH_LENGTH: Field = 16; -global NOTE_HASH_SUBTREE_HEIGHT: Field = 6; -global NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH: Field = 26; -global NULLIFIER_SUBTREE_HEIGHT: Field = 6; -global PUBLIC_DATA_SUBTREE_HEIGHT: Field = 5; -global ARCHIVE_HEIGHT: Field = 16; -global NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH: Field = 14; -global PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH: Field = 35; -global L1_TO_L2_MSG_SUBTREE_HEIGHT: Field = 4; -global L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH: Field = 12; +global CONTRACT_SUBTREE_HEIGHT: u64 = 0; +global CONTRACT_SUBTREE_SIBLING_PATH_LENGTH: u64 = 16; +global NOTE_HASH_SUBTREE_HEIGHT: u64 = 6; +global NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH: u64 = 26; +global NULLIFIER_SUBTREE_HEIGHT: u64 = 6; +global PUBLIC_DATA_SUBTREE_HEIGHT: u64 = 5; +global ARCHIVE_HEIGHT: u64 = 16; +global NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH: u64 = 14; +global PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH: u64 = 35; +global L1_TO_L2_MSG_SUBTREE_HEIGHT: u64 = 4; +global L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH: u64 = 12; // MISC CONSTANTS global FUNCTION_SELECTOR_NUM_BYTES: Field = 4; // sha256 hash is stored in two fields to accommodate all 256-bits of the hash -global NUM_FIELDS_PER_SHA256: Field = 2; -global ARGS_HASH_CHUNK_LENGTH: u32 = 32; -global ARGS_HASH_CHUNK_COUNT: u32 = 32; +global NUM_FIELDS_PER_SHA256: u64 = 2; +global ARGS_HASH_CHUNK_LENGTH: u64 = 32; +global ARGS_HASH_CHUNK_COUNT: u64 = 32; // The following is used in immutable state variables to compute an initialization slot whose value is used to // determine whether a given variable has been initialized (by asserting that the value in the slot is 0). // The initialization slot is computed by adding the constant bellow to the variable's storage slot. This constant has @@ -104,12 +104,12 @@ global ARGS_HASH_CHUNK_COUNT: u32 = 32; global INITIALIZATION_SLOT_SEPARATOR: Field = 1000_000_000; // CONTRACT CLASS CONSTANTS -global MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS: Field = 8000; +global MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS: u64 = 8000; // Bytecode size for private functions is per function, not for the entire contract. // Note that private functions bytecode includes a mix of acir and brillig. -global MAX_PACKED_BYTECODE_SIZE_PER_PRIVATE_FUNCTION_IN_FIELDS: Field = 500; +global MAX_PACKED_BYTECODE_SIZE_PER_PRIVATE_FUNCTION_IN_FIELDS: u64 = 500; // Same for unconstrained functions: the size is per function. -global MAX_PACKED_BYTECODE_SIZE_PER_UNCONSTRAINED_FUNCTION_IN_FIELDS: Field = 500; +global MAX_PACKED_BYTECODE_SIZE_PER_UNCONSTRAINED_FUNCTION_IN_FIELDS: u64 = 500; // Since we are not yet emitting selectors we'll use this magic value to identify events emitted by the ClassRegisterer. // This is just a stopgap until we implement proper selectors. // sha224sum 'struct ContractClassRegistered {contract_class_id: ContractClassId, version: Field, artifact_hash: Field, private_functions_root: Field, packed_public_bytecode: [Field; MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS] }' @@ -127,46 +127,46 @@ global DEPLOYER_CONTRACT_INSTANCE_DEPLOYED_MAGIC_VALUE = 0x85864497636cf755ae7bd // Some are defined here because Noir doesn't yet support globals referencing other globals yet. // Move these constants to a noir file once the issue below is resolved: // https://github.com/noir-lang/noir/issues/1734 -global L1_TO_L2_MESSAGE_ORACLE_CALL_LENGTH: Field = 25; -global MAX_NOTE_FIELDS_LENGTH: Field = 20; +global L1_TO_L2_MESSAGE_ORACLE_CALL_LENGTH: u64 = 25; +global MAX_NOTE_FIELDS_LENGTH: u64 = 20; // GET_NOTE_ORACLE_RETURN_LENGT = MAX_NOTE_FIELDS_LENGTH + 1 + 2 // The plus 1 is 1 extra field for nonce. // + 2 for EXTRA_DATA: [number_of_return_notes, contract_address] -global GET_NOTE_ORACLE_RETURN_LENGTH: Field = 23; -global MAX_NOTES_PER_PAGE: Field = 10; +global GET_NOTE_ORACLE_RETURN_LENGTH: u64 = 23; +global MAX_NOTES_PER_PAGE: u64 = 10; // VIEW_NOTE_ORACLE_RETURN_LENGTH = MAX_NOTES_PER_PAGE * (MAX_NOTE_FIELDS_LENGTH + 1) + 2; -global VIEW_NOTE_ORACLE_RETURN_LENGTH: Field = 212; +global VIEW_NOTE_ORACLE_RETURN_LENGTH: u64 = 212; // LENGTH OF STRUCTS SERIALIZED TO FIELDS global AZTEC_ADDRESS_LENGTH = 1; -global CALL_CONTEXT_LENGTH: Field = 8; -global CONTENT_COMMITMENT_LENGTH: Field = 7; -global CONTRACT_DEPLOYMENT_DATA_LENGTH: Field = 6; -global CONTRACT_STORAGE_READ_LENGTH: Field = 2; -global CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH: Field = 2; +global CALL_CONTEXT_LENGTH: u64 = 8; +global CONTENT_COMMITMENT_LENGTH: u64 = 7; +global CONTRACT_DEPLOYMENT_DATA_LENGTH: u64 = 6; +global CONTRACT_STORAGE_READ_LENGTH: u64 = 2; +global CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH: u64 = 2; global ETH_ADDRESS_LENGTH = 1; -global FUNCTION_DATA_LENGTH: Field = 4; -global FUNCTION_LEAF_PREIMAGE_LENGTH: Field = 5; -global GLOBAL_VARIABLES_LENGTH: Field = 6; -global HEADER_LENGTH: Field = 25; // 2 for last_archive, 7 for content commitment, 10 for state reference, 6 for global vars -global L1_TO_L2_MESSAGE_LENGTH: Field = 8; -global L2_TO_L1_MESSAGE_LENGTH: Field = 2; -global NEW_CONTRACT_DATA_LENGTH: Field = 3; +global FUNCTION_DATA_LENGTH: u64 = 4; +global FUNCTION_LEAF_PREIMAGE_LENGTH: u64 = 5; +global GLOBAL_VARIABLES_LENGTH: u64 = 6; +global HEADER_LENGTH: u64 = 25; // 2 for last_archive, 7 for content commitment, 10 for state reference, 6 for global vars +global L1_TO_L2_MESSAGE_LENGTH: u64 = 8; +global L2_TO_L1_MESSAGE_LENGTH: u64 = 2; +global NEW_CONTRACT_DATA_LENGTH: u64 = 3; global NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = 4; global NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH = 5; -global PARTIAL_STATE_REFERENCE_LENGTH: Field = 8; -global PRIVATE_CALL_STACK_ITEM_LENGTH: Field = 219; +global PARTIAL_STATE_REFERENCE_LENGTH: u64 = 8; +global PRIVATE_CALL_STACK_ITEM_LENGTH: u64 = 219; // Change this ONLY if you have changed the PrivateCircuitPublicInputs structure. // In other words, if the structure/size of the public inputs of a function call changes then we should change this // constant as well PRIVATE_CALL_STACK_ITEM_LENGTH -global PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH: Field = 214; +global PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH: u64 = 214; // Change this ONLY if you have changed the PublicCircuitPublicInputs structure. -global PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH: Field = 194; -global STATE_REFERENCE_LENGTH: Field = 10; // 2 for snap + 8 for partial -global TX_CONTEXT_DATA_LENGTH: Field = 11; -global TX_REQUEST_LENGTH: Field = 17; +global PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH: u64 = 194; +global STATE_REFERENCE_LENGTH: u64 = 10; // 2 for snap + 8 for partial +global TX_CONTEXT_DATA_LENGTH: u64 = 11; +global TX_REQUEST_LENGTH: u64 = 17; -global GET_NOTES_ORACLE_RETURN_LENGTH: Field = 674; +global GET_NOTES_ORACLE_RETURN_LENGTH: u64 = 674; global NOTE_HASHES_NUM_BYTES_PER_BASE_ROLLUP: Field = 2048; global NULLIFIERS_NUM_BYTES_PER_BASE_ROLLUP: Field = 2048; global PUBLIC_DATA_WRITES_NUM_BYTES_PER_BASE_ROLLUP: Field = 2048; diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/content_commitment.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/content_commitment.nr index 17ee068c466..44e0833c81a 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/content_commitment.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/content_commitment.nr @@ -1,7 +1,6 @@ use crate::{ - constants::{NUM_FIELDS_PER_SHA256, CONTENT_COMMITMENT_LENGTH}, - traits::{Deserialize, Empty, Hash, Serialize}, - utils::{arr_copy_slice}, + constants::{NUM_FIELDS_PER_SHA256, CONTENT_COMMITMENT_LENGTH}, + traits::{Deserialize, Empty, Hash, Serialize}, utils::{arr_copy_slice} }; struct ContentCommitment { @@ -13,7 +12,7 @@ struct ContentCommitment { impl Serialize for ContentCommitment { fn serialize(self) -> [Field; CONTENT_COMMITMENT_LENGTH] { - let mut fields: BoundedVec = BoundedVec::new(0); + let mut fields: BoundedVec = BoundedVec::new(); fields.extend_from_array([self.tx_tree_height]); fields.extend_from_array(self.txs_hash); diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/hash.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/hash.nr index 1ba04777e9a..55c416efcf4 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/hash.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/hash.nr @@ -47,11 +47,11 @@ pub fn hash_args(args: [Field; N]) -> Field { for i in 0..ARGS_HASH_CHUNK_COUNT { let mut chunk_hash = 0; let start_chunk_index = i * ARGS_HASH_CHUNK_LENGTH; - if start_chunk_index < (args.len() as u32) { + if start_chunk_index < args.len() { let mut chunk_args = [0; ARGS_HASH_CHUNK_LENGTH]; for j in 0..ARGS_HASH_CHUNK_LENGTH { let item_index = i * ARGS_HASH_CHUNK_LENGTH + j; - if item_index < (args.len() as u32) { + if item_index < args.len() { chunk_args[j] = args[item_index]; } } @@ -200,7 +200,7 @@ pub fn compute_l2_to_l1_hash( chain_id: Field, message: L2ToL1Message ) -> Field { - let mut bytes: BoundedVec = BoundedVec::new(0); + let mut bytes: BoundedVec = BoundedVec::new(); let inputs = [ contract_address.to_field(), rollup_version_id, message.recipient.to_field(), chain_id, message.content @@ -282,11 +282,11 @@ pub fn compute_logs_hash( ) } -pub fn compute_note_hash_nonce(first_nullifier: Field, commitment_index: Field) -> Field { +pub fn compute_note_hash_nonce(first_nullifier: Field, commitment_index: u64) -> Field { pedersen_hash( [ first_nullifier, - commitment_index + commitment_index as Field ], GENERATOR_INDEX__NOTE_HASH_NONCE ) diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/header.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/header.nr index ec0d66df868..ece1d741a96 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/header.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/header.nr @@ -31,7 +31,7 @@ impl Eq for Header { impl Serialize for Header { fn serialize(self) -> [Field; HEADER_LENGTH] { - let mut fields: BoundedVec = BoundedVec::new(0); + let mut fields: BoundedVec = BoundedVec::new(); fields.extend_from_array(self.last_archive.serialize()); fields.extend_from_array(self.content_commitment.serialize()); diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/public_data_tree_leaf_preimage.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/public_data_tree_leaf_preimage.nr index dc84f6ee903..dcc84fe7026 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/public_data_tree_leaf_preimage.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/public_data_tree_leaf_preimage.nr @@ -4,7 +4,7 @@ struct PublicDataTreeLeafPreimage { slot : Field, value: Field, next_slot :Field, - next_index : u32, + next_index : u64, } impl Empty for PublicDataTreeLeafPreimage { diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/state_reference.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/state_reference.nr index 1fdaba370f7..e64c2654dd6 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/state_reference.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/state_reference.nr @@ -19,7 +19,7 @@ impl Eq for StateReference { impl Serialize for StateReference { fn serialize(self) -> [Field; STATE_REFERENCE_LENGTH] { - let mut fields: BoundedVec = BoundedVec::new(0); + let mut fields: BoundedVec = BoundedVec::new(); fields.extend_from_array(self.l1_to_l2_message_tree.serialize()); fields.extend_from_array(self.partial.serialize()); diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/tests/fixtures/read_requests.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/tests/fixtures/read_requests.nr index 5c1eacf6658..f23ef8080ef 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/tests/fixtures/read_requests.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/tests/fixtures/read_requests.nr @@ -2,26 +2,26 @@ use crate::abis::{membership_witness::ReadRequestMembershipWitness, side_effect: use crate::tests::fixtures; use crate::constants::{MAX_READ_REQUESTS_PER_CALL}; -pub fn generate_read_requests(how_many: Field) -> (BoundedVec, BoundedVec) { +pub fn generate_read_requests(how_many: u64) -> (BoundedVec, BoundedVec) { generate_read_requests_with_config(how_many, false, [0; MAX_READ_REQUESTS_PER_CALL]) } -pub fn generate_transient_read_requests(how_many: Field) -> (BoundedVec, BoundedVec) { +pub fn generate_transient_read_requests(how_many: u64) -> (BoundedVec, BoundedVec) { generate_read_requests_with_config(how_many, true, [0; MAX_READ_REQUESTS_PER_CALL]) } pub fn generate_read_requests_with_config( - how_many: Field, + how_many: u64, is_transient: bool, hints_to_commitment: [Field; MAX_READ_REQUESTS_PER_CALL] ) -> (BoundedVec, BoundedVec) { - let mut read_requests = BoundedVec::new(SideEffect::empty()); - let mut read_request_membership_witnesses = BoundedVec::new(dep::std::unsafe::zeroed()); + let mut read_requests = BoundedVec::new(); + let mut read_request_membership_witnesses = BoundedVec::new(); for i in 0..how_many { - read_requests.push(SideEffect { value: i + 1, counter: 0 }); + read_requests.push(SideEffect { value: (i as Field) + 1, counter: 0 }); let witness = ReadRequestMembershipWitness { - leaf_index: i, + leaf_index: i as Field, sibling_path: fixtures::note_hash_tree::SIBLING_PATHS[i], is_transient, hint_to_note_hash: hints_to_commitment[i] diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/tests/kernel_data_builder.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/tests/kernel_data_builder.nr index ccf24bb261f..4a22a03cb5a 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/tests/kernel_data_builder.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/tests/kernel_data_builder.nr @@ -82,9 +82,9 @@ impl PreviousKernelDataBuilder { if i as u64 < num_updates as u64 { let update_request = PublicDataUpdateRequest { // The default leaf index is its index + 23. - leaf_slot: value_offset + i + 23, + leaf_slot: (value_offset + i + 23) as Field, // The default value is its index + 678. - new_value: value_offset + i + 678 + new_value: (value_offset + i + 678) as Field }; self.end.public_data_update_requests.push(update_request); } @@ -97,9 +97,9 @@ impl PreviousKernelDataBuilder { if i as u64 < num_reads as u64 { let read_request = PublicDataRead { // The default leaf index is its index + 34. - leaf_slot: value_offset + i + 34, + leaf_slot: (value_offset + i + 34) as Field, // The default value is its index + 5566. - value: value_offset + i + 5566 + value: (value_offset + i + 5566) as Field }; self.end.public_data_reads.push(read_request); } @@ -112,7 +112,7 @@ impl PreviousKernelDataBuilder { counter } - pub fn add_read_request_for_transient_commitment(&mut self, commitment_index: Field) -> Field { + pub fn add_read_request_for_transient_commitment(&mut self, commitment_index: u64) -> u64 { let new_read_request_index = self.end.read_requests.len(); let commitment = self.end.new_note_hashes.get(commitment_index); let read_request = SideEffect { value: commitment.value, counter: self.next_sideffect_counter() }; @@ -120,30 +120,30 @@ impl PreviousKernelDataBuilder { new_read_request_index } - pub fn append_new_note_hashes(&mut self, num_new_note_hashes: Field) { + pub fn append_new_note_hashes(&mut self, num_new_note_hashes: u64) { let mocked_value_offset = self.end.new_note_hashes.len() + 1; for i in 0..MAX_NEW_NOTE_HASHES_PER_TX { - if i as u64 < num_new_note_hashes as u64 { + if i < num_new_note_hashes { // The default value is its index + 1. self.end.new_note_hashes.push( - SideEffect { value: i + mocked_value_offset, counter: self.next_sideffect_counter() } + SideEffect { value: (i + mocked_value_offset) as Field, counter: self.next_sideffect_counter() } ); } } } - pub fn append_new_nullifiers_from_private(&mut self, num_extra_nullifier: Field) { + pub fn append_new_nullifiers_from_private(&mut self, num_extra_nullifier: u64) { // in private kernel, the nullifiers have not yet been partitioned // (that is part of the job of the private kernel tail) // so the tx nullifier is in `end` let first_nullifier = self.end.new_nullifiers.get(0); - let mocked_value_offset = first_nullifier.value + self.end.new_nullifiers.len(); + let mocked_value_offset = first_nullifier.value + self.end.new_nullifiers.len() as Field; for i in 1..MAX_NEW_NULLIFIERS_PER_TX { - if i as u64 <= num_extra_nullifier as u64 { + if i <= num_extra_nullifier { // The default value is its index + the value of the first nullifier. self.end.new_nullifiers.push( SideEffectLinkedToNoteHash { - value: i + mocked_value_offset, + value: i as Field + mocked_value_offset, note_hash: first_nullifier.note_hash, counter: self.next_sideffect_counter() } @@ -152,15 +152,15 @@ impl PreviousKernelDataBuilder { } } - pub fn append_new_nullifiers_from_public(&mut self, num_extra_nullifier: Field) { + pub fn append_new_nullifiers_from_public(&mut self, num_extra_nullifier: u64) { let first_nullifier = self.end_non_revertible.new_nullifiers.get(0); - let mocked_value_offset = first_nullifier.value + self.end.new_nullifiers.len(); + let mocked_value_offset = first_nullifier.value + self.end.new_nullifiers.len() as Field; for i in 1..MAX_NEW_NULLIFIERS_PER_TX { - if i as u64 <= num_extra_nullifier as u64 { + if i <= num_extra_nullifier { // The default value is its index + the value of the first nullifier. self.end.new_nullifiers.push( SideEffectLinkedToNoteHash { - value: i + mocked_value_offset, + value: i as Field + mocked_value_offset, note_hash: first_nullifier.note_hash, counter: self.next_sideffect_counter() } @@ -169,15 +169,15 @@ impl PreviousKernelDataBuilder { } } - pub fn append_new_nullifiers_non_revertible(&mut self, num_extra_nullifier: Field) { + pub fn append_new_nullifiers_non_revertible(&mut self, num_extra_nullifier: u64) { let first_nullifier = self.end_non_revertible.new_nullifiers.get(0); - let mocked_value_offset = first_nullifier.value + self.end_non_revertible.new_nullifiers.len(); + let mocked_value_offset = first_nullifier.value + self.end_non_revertible.new_nullifiers.len() as Field; for i in 1..MAX_NON_REVERTIBLE_NULLIFIERS_PER_TX { - if i as u64 <= num_extra_nullifier as u64 { + if i <= num_extra_nullifier { // The default value is its index + the value of the first nullifier. self.end_non_revertible.new_nullifiers.push( SideEffectLinkedToNoteHash { - value: i + mocked_value_offset, + value: i as Field + mocked_value_offset, note_hash: first_nullifier.note_hash, counter: self.next_sideffect_counter() } diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/tests/private_call_data_builder.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/tests/private_call_data_builder.nr index 31bf4bab027..85c4f08c7be 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/tests/private_call_data_builder.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/tests/private_call_data_builder.nr @@ -108,13 +108,13 @@ impl PrivateCallDataBuilder { } } - pub fn append_private_call_requests(&mut self, num_requests: Field, is_delegate_call: bool) { + pub fn append_private_call_requests(&mut self, num_requests: u64, is_delegate_call: bool) { let (hashes, call_requests) = self.generate_call_requests(self.private_call_stack, num_requests, is_delegate_call); self.public_inputs.private_call_stack_hashes.extend_from_bounded_vec(hashes); self.private_call_stack.extend_from_bounded_vec(call_requests); } - pub fn append_public_call_requests(&mut self, num_requests: Field, is_delegate_call: bool) { + pub fn append_public_call_requests(&mut self, num_requests: u64, is_delegate_call: bool) { let (hashes, call_requests) = self.generate_call_requests(self.public_call_stack, num_requests, is_delegate_call); self.public_inputs.public_call_stack_hashes.extend_from_bounded_vec(hashes); self.public_call_stack.extend_from_bounded_vec(call_requests); @@ -123,7 +123,7 @@ impl PrivateCallDataBuilder { fn generate_call_requests( self, requests: BoundedVec, - num_requests: Field, + num_requests: u64, is_delegate_call: bool ) -> (BoundedVec, BoundedVec) { let value_offset = requests.len(); @@ -133,14 +133,14 @@ impl PrivateCallDataBuilder { caller_context.msg_sender = call_context.msg_sender; caller_context.storage_contract_address = call_context.storage_contract_address; } - let mut call_requests: BoundedVec = BoundedVec::new(CallRequest::empty()); - let mut hashes: BoundedVec = BoundedVec::new(0); + let mut call_requests: BoundedVec = BoundedVec::new(); + let mut hashes: BoundedVec = BoundedVec::new(); let mut exceeded_len = false; for i in 0..N { exceeded_len |= i == num_requests; if !exceeded_len { // The default hash is its index + 7788. - let hash = value_offset + 7788; + let hash = (value_offset + 7788) as Field; let request = CallRequest { hash, caller_contract_address: self.contract_address, @@ -156,13 +156,13 @@ impl PrivateCallDataBuilder { (hashes, call_requests) } - pub fn append_read_requests(&mut self, num_read_requests: Field) { + pub fn append_read_requests(&mut self, num_read_requests: u64) { let (read_requests, read_request_membership_witnesses) = fixtures::read_requests::generate_read_requests(num_read_requests); self.public_inputs.read_requests.extend_from_bounded_vec(read_requests); self.read_request_membership_witnesses.extend_from_bounded_vec(read_request_membership_witnesses); } - pub fn append_transient_read_requests(&mut self, num_read_requests: Field) { + pub fn append_transient_read_requests(&mut self, num_read_requests: u64) { let (read_requests, read_request_membership_witnesses) = fixtures::read_requests::generate_transient_read_requests(num_read_requests); self.public_inputs.read_requests.extend_from_bounded_vec(read_requests); self.read_request_membership_witnesses.extend_from_bounded_vec(read_request_membership_witnesses); diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/tests/public_call_data_builder.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/tests/public_call_data_builder.nr index 9d47e973824..7ac3af533b5 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/tests/public_call_data_builder.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/tests/public_call_data_builder.nr @@ -75,15 +75,15 @@ impl PublicCallDataBuilder { *self } - pub fn append_public_call_requests_for_regular_calls(&mut self, num_requests: Field) { + pub fn append_public_call_requests_for_regular_calls(&mut self, num_requests: u64) { self.append_public_call_requests(num_requests, false); } - pub fn append_public_call_requests_for_delegate_calls(&mut self, num_requests: Field) { + pub fn append_public_call_requests_for_delegate_calls(&mut self, num_requests: u64) { self.append_public_call_requests(num_requests, true); } - pub fn append_public_call_requests(&mut self, num_requests: Field, is_delegate_call: bool) { + pub fn append_public_call_requests(&mut self, num_requests: u64, is_delegate_call: bool) { let value_offset = self.public_inputs.public_call_stack_hashes.len(); let mut caller_context = CallerContext::empty(); if is_delegate_call { @@ -96,7 +96,7 @@ impl PublicCallDataBuilder { exceeded_len |= i == num_requests; if !exceeded_len { // The default hash is its index + 7788. - let hash = value_offset + 7788; + let hash = (value_offset + 7788) as Field; let call_request = CallRequest { hash, caller_contract_address: self.contract_address, @@ -117,9 +117,9 @@ impl PublicCallDataBuilder { if i as u64 < num_reads as u64 { let read_request = StorageRead { // The default storage slot is its index + 1. - storage_slot: value_offset + i + 1, + storage_slot: (value_offset + i + 1) as Field, // The default value is its index + 999. - current_value: value_offset + i + 999 + current_value: (value_offset + i + 999) as Field }; self.public_inputs.contract_storage_reads.push(read_request); } @@ -140,9 +140,9 @@ impl PublicCallDataBuilder { if i as u64 < num_updates as u64 { let update_request = StorageUpdateRequest { // The default storage slot is its index + 1. - storage_slot: value_offset + i + 1, + storage_slot: (value_offset + i + 1) as Field, // The default value is its index + 890. - new_value: value_offset + i + 890 + new_value: (value_offset + i + 890) as Field }; self.public_inputs.contract_storage_update_requests.push(update_request); } diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/transaction/tx_context.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/transaction/tx_context.nr index 2534e047535..d33ee842c3a 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/transaction/tx_context.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/transaction/tx_context.nr @@ -28,7 +28,7 @@ impl Eq for TxContext { impl Serialize for TxContext { fn serialize(self) -> [Field; TX_CONTEXT_DATA_LENGTH] { - let mut fields: BoundedVec = BoundedVec::new(0); + let mut fields: BoundedVec = BoundedVec::new(); fields.push(self.is_fee_payment_tx as Field); fields.push(self.is_rebate_payment_tx as Field); diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/transaction/tx_request.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/transaction/tx_request.nr index ebbe212d5ea..17b604530f4 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/transaction/tx_request.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/transaction/tx_request.nr @@ -29,7 +29,7 @@ impl Hash for TxRequest { impl Serialize for TxRequest { fn serialize(self) -> [Field; TX_REQUEST_LENGTH] { // TODO(#4390): This should accept a reader ^ to avoid copying data. - let mut fields: BoundedVec = BoundedVec::new(0); + let mut fields: BoundedVec = BoundedVec::new(); fields.push(self.origin.to_field()); fields.extend_from_array(self.function_data.serialize()); diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/utils.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/utils.nr index 7b1496afb46..547ff3ea09e 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/utils.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/utils.nr @@ -12,7 +12,7 @@ pub fn conditional_assign(predicate: bool, lhs: Field, rhs: Field) -> Field { if predicate { lhs } else { rhs } } -pub fn arr_copy_slice(src: [T; N], mut dst: [T; M], offset: Field) -> [T; M] { +pub fn arr_copy_slice(src: [T; N], mut dst: [T; M], offset: u64) -> [T; M] { for i in 0..dst.len() { dst[i] = src[i + offset]; } diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/utils/arrays.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/utils/arrays.nr index 7cfcfc011a5..4cd8dd3e196 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/utils/arrays.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/utils/arrays.nr @@ -10,7 +10,7 @@ pub fn array_to_bounded_vec(array: [T; N]) -> BoundedVec where T: Em } } - BoundedVec { storage: array, len, empty_value: T::empty() } + BoundedVec { storage: array, len } } // Routine which validates that all zero values of an array form a contiguous region at the end, i.e., @@ -35,7 +35,7 @@ pub fn validate_array(array: [T; N]) where T: Empty + Eq { // Helper method to determine the number of non-zero/empty elements in a validated array (ie, validate_array(array) // should be true). -pub fn array_length(array: [T; N]) -> Field where T: Empty + Eq { +pub fn array_length(array: [T; N]) -> u64 where T: Empty + Eq { let mut length = 0; let mut end = false; for elem in array { diff --git a/noir-projects/noir-protocol-circuits/src/crates/types/src/utils/reader.nr b/noir-projects/noir-protocol-circuits/src/crates/types/src/utils/reader.nr index 6546b4aced6..dffc118e7d8 100644 --- a/noir-projects/noir-protocol-circuits/src/crates/types/src/utils/reader.nr +++ b/noir-projects/noir-protocol-circuits/src/crates/types/src/utils/reader.nr @@ -1,6 +1,6 @@ struct Reader { data: [Field; N], - offset: Field, + offset: u64, } impl Reader { diff --git a/noir/.github/ACVM_NOT_PUBLISHABLE.md b/noir/.github/ACVM_NOT_PUBLISHABLE.md new file mode 100644 index 00000000000..e7eacb3b523 --- /dev/null +++ b/noir/.github/ACVM_NOT_PUBLISHABLE.md @@ -0,0 +1,13 @@ +--- +title: "ACVM crates are not publishable" +assignees: TomAFrench kevaundray savio-sou +--- + + +The ACVM crates are currently unpublishable, making a release will NOT push our crates to crates.io. + +This is likely due to a crate we depend on bumping its MSRV above our own. Our lockfile is not taken into account when publishing to crates.io (as people downloading our crate don't use it) so we need to be able to use the most up to date versions of our dependencies (including transient dependencies) specified. + +Check the [MSRV check]({{env.WORKFLOW_URL}}) workflow for details. + +This issue was raised by the workflow `{{env.WORKFLOW_NAME}}` diff --git a/noir/.github/CRATES_IO_PUBLISH_FAILED.md b/noir/.github/CRATES_IO_PUBLISH_FAILED.md new file mode 100644 index 00000000000..ec4de319772 --- /dev/null +++ b/noir/.github/CRATES_IO_PUBLISH_FAILED.md @@ -0,0 +1,10 @@ +--- +title: "ACVM crates failed to publish" +assignees: TomAFrench kevaundray savio-sou +--- + +The {{env.CRATE_VERSION}} release of the ACVM crates failed. + +Check the [Publish ACVM]({{env.WORKFLOW_URL}}) workflow for details. + +This issue was raised by the workflow `{{env.WORKFLOW_NAME}}` diff --git a/noir/.github/JS_PUBLISH_FAILED.md b/noir/.github/JS_PUBLISH_FAILED.md new file mode 100644 index 00000000000..5b9f79aac1f --- /dev/null +++ b/noir/.github/JS_PUBLISH_FAILED.md @@ -0,0 +1,11 @@ +--- +title: "JS packages failed to publish" +assignees: TomAFrench kevaundray savio-sou +labels: js +--- + +The {{env.NPM_TAG}} release of the JS packages failed. + +Check the [Publish JS packages]({{env.WORKFLOW_URL}}) workflow for details. + +This issue was raised by the workflow `{{env.WORKFLOW_NAME}}` diff --git a/noir/.github/scripts/integration-test-browser.sh b/noir/.github/scripts/integration-test-browser.sh index c9cda58aab8..12195a88928 100755 --- a/noir/.github/scripts/integration-test-browser.sh +++ b/noir/.github/scripts/integration-test-browser.sh @@ -2,4 +2,4 @@ set -eu ./.github/scripts/playwright-install.sh -yarn workspace integration-tests test +yarn workspace integration-tests test:browser \ No newline at end of file diff --git a/noir/.github/scripts/integration-test-node.sh b/noir/.github/scripts/integration-test-node.sh index 7260ca4bb0f..b7f00c65620 100755 --- a/noir/.github/scripts/integration-test-node.sh +++ b/noir/.github/scripts/integration-test-node.sh @@ -2,4 +2,4 @@ set -eu apt-get install libc++-dev -y -yarn workspace integration-tests test +yarn workspace integration-tests test:node diff --git a/noir/.github/scripts/wasm-opt-install.sh b/noir/.github/scripts/wasm-opt-install.sh new file mode 100755 index 00000000000..cbdeb8f2bfe --- /dev/null +++ b/noir/.github/scripts/wasm-opt-install.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -eu + +cd $(dirname "$0") + +./cargo-binstall-install.sh + +cargo-binstall wasm-opt --version 0.116.0 -y diff --git a/noir/.github/workflows/docs-pr.yml b/noir/.github/workflows/docs-pr.yml index 712fb100ba6..5d0b72c6ad8 100644 --- a/noir/.github/workflows/docs-pr.yml +++ b/noir/.github/workflows/docs-pr.yml @@ -55,7 +55,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -114,3 +114,16 @@ jobs: NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} timeout-minutes: 1 + + add_comment: + needs: [deploy_preview] + runs-on: ubuntu-latest + permissions: + pull-requests: write + steps: + - name: Tag dev rel in comment + uses: marocchino/sticky-pull-request-comment@v2 + with: + message: | + FYI @noir-lang/developerrelations on Noir doc changes. + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/noir/.github/workflows/formatting.yml b/noir/.github/workflows/formatting.yml index 97736e2415e..279e90f5f6f 100644 --- a/noir/.github/workflows/formatting.yml +++ b/noir/.github/workflows/formatting.yml @@ -32,7 +32,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: ${{ matrix.target }} components: clippy, rustfmt @@ -63,3 +63,68 @@ jobs: - name: Run `yarn lint` run: yarn lint + + build-nargo: + runs-on: ubuntu-22.04 + timeout-minutes: 30 + + steps: + - name: Checkout Noir repo + uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.73.0 + + - uses: Swatinem/rust-cache@v2 + with: + key: x86_64-unknown-linux-gnu + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + + - name: Build Nargo + run: cargo build --package nargo_cli --release + + - name: Package artifacts + run: | + mkdir dist + cp ./target/release/nargo ./dist/nargo + 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz + + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: nargo + path: ./dist/* + retention-days: 3 + + nargo_fmt: + needs: [build-nargo] + name: Nargo fmt + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Format stdlib + working-directory: ./noir_stdlib + run: nargo fmt --check + + - name: Format test suite + working-directory: ./test_programs + run: ./format.sh check diff --git a/noir/.github/workflows/gates_report.yml b/noir/.github/workflows/gates_report.yml index 39416e628a9..f3f798fc5ea 100644 --- a/noir/.github/workflows/gates_report.yml +++ b/noir/.github/workflows/gates_report.yml @@ -18,7 +18,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: diff --git a/noir/.github/workflows/publish-acvm.yml b/noir/.github/workflows/publish-acvm.yml index 0251aaa0377..959cd8e4bca 100644 --- a/noir/.github/workflows/publish-acvm.yml +++ b/noir/.github/workflows/publish-acvm.yml @@ -18,7 +18,7 @@ jobs: ref: ${{ inputs.noir-ref }} - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 # These steps are in a specific order so crate dependencies are updated first - name: Publish acir_field @@ -62,3 +62,16 @@ jobs: cargo publish --package acvm env: CARGO_REGISTRY_TOKEN: ${{ secrets.ACVM_CRATES_IO_TOKEN }} + + # Raise an issue if any package failed to publish + - name: Alert on failed publish + uses: JasonEtco/create-an-issue@v2 + if: ${{ failure() }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + CRATE_VERSION: ${{ inputs.noir-ref }} + WORKFLOW_NAME: ${{ github.workflow }} + WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + with: + update_existing: true + filename: .github/JS_PUBLISH_FAILED.md \ No newline at end of file diff --git a/noir/.github/workflows/publish-es-packages.yml b/noir/.github/workflows/publish-es-packages.yml index 231a6124785..f72a97b2684 100644 --- a/noir/.github/workflows/publish-es-packages.yml +++ b/noir/.github/workflows/publish-es-packages.yml @@ -22,7 +22,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -32,6 +32,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build noirc_abi run: ./.github/scripts/noirc-abi-build.sh @@ -51,7 +54,7 @@ jobs: ref: ${{ inputs.noir-ref }} - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -61,6 +64,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build noir_js_types run: yarn workspace @noir-lang/types build @@ -83,7 +89,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -93,6 +99,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build acvm_js run: ./.github/scripts/acvm_js-build.sh @@ -114,7 +123,7 @@ jobs: - uses: actions/download-artifact@v4 with: - name: acvm_js + name: acvm-js path: acvm-repo/acvm_js - uses: actions/download-artifact@v4 @@ -143,3 +152,16 @@ jobs: - name: Publish ES Packages run: yarn publish:all --access public --tag ${{ inputs.npm-tag }} + + # Raise an issue if any package failed to publish + - name: Alert on failed publish + uses: JasonEtco/create-an-issue@v2 + if: ${{ failure() }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + NPM_TAG: ${{ inputs.npm-tag }} + WORKFLOW_NAME: ${{ github.workflow }} + WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + with: + update_existing: true + filename: .github/JS_PUBLISH_FAILED.md \ No newline at end of file diff --git a/noir/.github/workflows/publish-nargo.yml b/noir/.github/workflows/publish-nargo.yml index 085ab013e4e..e47e1a13053 100644 --- a/noir/.github/workflows/publish-nargo.yml +++ b/noir/.github/workflows/publish-nargo.yml @@ -46,7 +46,7 @@ jobs: echo "MACOSX_DEPLOYMENT_TARGET=$(xcrun -sdk macosx$(sw_vers -productVersion) --show-sdk-platform-version)" >> $GITHUB_ENV - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: ${{ matrix.target }} @@ -120,7 +120,7 @@ jobs: ref: ${{ inputs.tag || env.GITHUB_REF }} - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: ${{ matrix.target }} diff --git a/noir/.github/workflows/pull-request-title.yml b/noir/.github/workflows/pull-request-title.yml index 4b8a626a94e..7e9b729da28 100644 --- a/noir/.github/workflows/pull-request-title.yml +++ b/noir/.github/workflows/pull-request-title.yml @@ -27,3 +27,22 @@ jobs: fix feat chore + + force-push-comment: + name: Warn external contributors about force-pushing + runs-on: ubuntu-latest + if: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.repo.full_name != 'noir-lang/noir' }} + permissions: + pull-requests: write + + steps: + - name: Post comment on force pushes + uses: marocchino/sticky-pull-request-comment@v2 + with: + message: | + Thank you for your contribution to the Noir language. + + Please **do not force push to this branch** after the Noir team have started review of this PR. Doing so will only delay us merging your PR as we will need to start the review process from scratch. + + Thanks for your understanding. + \ No newline at end of file diff --git a/noir/.github/workflows/test-js-packages.yml b/noir/.github/workflows/test-js-packages.yml index 1afd11c94fa..b3908ee5d3e 100644 --- a/noir/.github/workflows/test-js-packages.yml +++ b/noir/.github/workflows/test-js-packages.yml @@ -22,7 +22,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -55,7 +55,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -66,6 +66,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build noirc_abi run: ./.github/scripts/noirc-abi-build.sh @@ -86,7 +89,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -97,6 +100,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build noir_js_types run: yarn workspace @noir-lang/types build @@ -121,7 +127,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -132,6 +138,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build acvm_js run: ./.github/scripts/acvm_js-build.sh diff --git a/noir/.github/workflows/test-rust-workspace-msrv.yml b/noir/.github/workflows/test-rust-workspace-msrv.yml new file mode 100644 index 00000000000..0b2855fa834 --- /dev/null +++ b/noir/.github/workflows/test-rust-workspace-msrv.yml @@ -0,0 +1,125 @@ +name: Test (MSRV check) + +# TL;DR https://github.com/noir-lang/noir/issues/4384 +# +# This workflow acts to ensure that we can publish to crates.io, we need this extra check as libraries don't respect the Cargo.lock file committed in this repository. +# We must then always be able to build the workspace using the latest versions of all of our dependencies, so we explicitly update them and build in this workflow. + +on: + schedule: + # Run a nightly check at 2 AM UTC + - cron: "0 2 * * *" + push: + branches: + - master + +# This will cancel previous runs when a branch or PR is updated +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} + cancel-in-progress: true + +jobs: + build-test-artifacts: + name: Build test artifacts + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.73.0 + with: + targets: x86_64-unknown-linux-gnu + + # We force the ACVM crate and all of its dependencies to update their dependencies + # This ensures that we'll be able to build the crates when they're being published. + - name: Update Cargo.lock + run: | + cargo update --package acvm --aggressive + cargo update --package bn254_blackbox_solver --aggressive + + - uses: Swatinem/rust-cache@v2 + with: + key: x86_64-unknown-linux-gnu-msrv-check + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + + - name: Install nextest + uses: taiki-e/install-action@v2 + with: + tool: nextest@0.9.67 + + - name: Build and archive tests + run: cargo nextest archive --workspace --release --archive-file nextest-archive.tar.zst + + - name: Upload archive to workflow + uses: actions/upload-artifact@v4 + with: + name: nextest-archive + path: nextest-archive.tar.zst + + run-tests: + name: "Run tests (partition ${{matrix.partition}})" + runs-on: ubuntu-latest + needs: [build-test-artifacts] + strategy: + fail-fast: false + matrix: + partition: [1, 2, 3, 4] + steps: + - uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.73.0 + with: + targets: x86_64-unknown-linux-gnu + + - name: Install nextest + uses: taiki-e/install-action@v2 + with: + tool: nextest@0.9.67 + + - name: Download archive + uses: actions/download-artifact@v4 + with: + name: nextest-archive + - name: Run tests + run: | + cargo nextest run --archive-file nextest-archive.tar.zst \ + --partition count:${{ matrix.partition }}/4 + + # This is a job which depends on all test jobs and reports the overall status. + # This allows us to add/remove test jobs without having to update the required workflows. + tests-end: + name: Rust End + runs-on: ubuntu-latest + # We want this job to always run (even if the dependant jobs fail) as we want this job to fail rather than skipping. + if: ${{ always() }} + needs: + - run-tests + + steps: + - name: Report overall success + run: | + if [[ $FAIL == true ]]; then + exit 1 + else + exit 0 + fi + env: + # We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole. + FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} + + # Raise an issue if the tests failed + - name: Alert on failed publish + uses: JasonEtco/create-an-issue@v2 + if: ${{ failure() }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + WORKFLOW_NAME: ${{ github.workflow }} + WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + with: + update_existing: true + filename: .github/JS_PUBLISH_FAILED.md \ No newline at end of file diff --git a/noir/.github/workflows/test-rust-workspace.yml b/noir/.github/workflows/test-rust-workspace.yml index bb31ab7873a..22684de3044 100644 --- a/noir/.github/workflows/test-rust-workspace.yml +++ b/noir/.github/workflows/test-rust-workspace.yml @@ -23,7 +23,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: x86_64-unknown-linux-gnu @@ -59,7 +59,7 @@ jobs: - uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: x86_64-unknown-linux-gnu @@ -88,13 +88,13 @@ jobs: - run-tests steps: - - name: Report overall success - run: | - if [[ $FAIL == true ]]; then - exit 1 - else - exit 0 - fi - env: - # We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole. - FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} + - name: Report overall success + run: | + if [[ $FAIL == true ]]; then + exit 1 + else + exit 0 + fi + env: + # We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole. + FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} diff --git a/noir/.gitrepo b/noir/.gitrepo index 8b34412d5de..48acdbfe88c 100644 --- a/noir/.gitrepo +++ b/noir/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/noir-lang/noir branch = master - commit = c44ef14847a436733206b6dd9590a7ab214ecd97 - parent = 382626cddaa175041695e2eb70ad3c350351ffe3 + commit = e80c5f73a4cdcba3f5cf44576c605ba1e611a2ab + parent = 8307dadd853d5091841e169c841ab6b09c223efb method = merge cmdver = 0.4.6 diff --git a/noir/Cargo.lock b/noir/Cargo.lock index 84d7f5052db..c0438eaf81f 100644 --- a/noir/Cargo.lock +++ b/noir/Cargo.lock @@ -213,9 +213,6 @@ checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" [[package]] name = "arena" version = "0.24.0" -dependencies = [ - "generational-arena", -] [[package]] name = "ark-bls12-381" @@ -593,6 +590,7 @@ dependencies = [ "js-sys", "noir_grumpkin", "num-bigint", + "num-traits", "pkg-config", "reqwest", "rust-embed", @@ -1843,15 +1841,6 @@ dependencies = [ "byteorder", ] -[[package]] -name = "generational-arena" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877e94aff08e743b651baaea359664321055749b398adff8740a7399af7796e7" -dependencies = [ - "cfg-if 1.0.0", -] - [[package]] name = "generic-array" version = "0.14.7" @@ -2960,6 +2949,7 @@ dependencies = [ "noirc_macros", "rust-embed", "serde", + "thiserror", "tracing", ] diff --git a/noir/Cargo.toml b/noir/Cargo.toml index 4f95e3b0821..7d5da7b00d0 100644 --- a/noir/Cargo.toml +++ b/noir/Cargo.toml @@ -45,7 +45,7 @@ version = "0.24.0" # x-release-please-end authors = ["The Noir Team "] edition = "2021" -rust-version = "1.71.1" +rust-version = "1.73.0" license = "MIT OR Apache-2.0" repository = "https://github.com/noir-lang/noir/" @@ -69,12 +69,10 @@ noirc_errors = { path = "compiler/noirc_errors" } noirc_evaluator = { path = "compiler/noirc_evaluator" } noirc_frontend = { path = "compiler/noirc_frontend" } noirc_printable_type = { path = "compiler/noirc_printable_type" } -noir_wasm = { path = "compiler/wasm" } # Noir tooling workspace dependencies nargo = { path = "tooling/nargo" } nargo_fmt = { path = "tooling/nargo_fmt" } -nargo_cli = { path = "tooling/nargo_cli" } nargo_toml = { path = "tooling/nargo_toml" } noir_lsp = { path = "tooling/lsp" } noir_debugger = { path = "tooling/debugger" } @@ -97,8 +95,6 @@ getrandom = "0.2" # Debugger dap = "0.4.1-alpha1" - -cfg-if = "1.0.0" clap = { version = "4.3.19", features = ["derive", "env"] } codespan = { version = "0.11.1", features = ["serialization"] } codespan-lsp = "0.11.1" diff --git a/noir/Dockerfile.ci b/noir/Dockerfile.ci index a73ce4ab969..e0dc030980c 100644 --- a/noir/Dockerfile.ci +++ b/noir/Dockerfile.ci @@ -1,4 +1,4 @@ -FROM rust:1.71.1-slim-bookworm as base +FROM rust:1.73.0-slim-bookworm as base RUN apt-get update && apt-get upgrade -y && apt-get install build-essential git -y WORKDIR /usr/src/noir ENV PATH="${PATH}:/usr/src/noir/target/release" diff --git a/noir/README.md b/noir/README.md index 771c3f1c74d..5c93512ae26 100644 --- a/noir/README.md +++ b/noir/README.md @@ -54,7 +54,7 @@ Concretely the following items are on the road map: ## Minimum Rust version -This crate's minimum supported rustc version is 1.71.1. +This crate's minimum supported rustc version is 1.73.0. ## Working on this project diff --git a/noir/acvm-repo/acir_field/src/generic_ark.rs b/noir/acvm-repo/acir_field/src/generic_ark.rs index dc54d271beb..3178011a075 100644 --- a/noir/acvm-repo/acir_field/src/generic_ark.rs +++ b/noir/acvm-repo/acir_field/src/generic_ark.rs @@ -429,63 +429,6 @@ impl SubAssign for FieldElement { } } -#[cfg(test)] -mod tests { - #[test] - fn and() { - let max = 10_000u32; - - let num_bits = (std::mem::size_of::() * 8) as u32 - max.leading_zeros(); - - for x in 0..max { - let x = crate::generic_ark::FieldElement::::from(x as i128); - let res = x.and(&x, num_bits); - assert_eq!(res.to_be_bytes(), x.to_be_bytes()); - } - } - - #[test] - fn serialize_fixed_test_vectors() { - // Serialized field elements from of 0, -1, -2, -3 - let hex_strings = vec![ - "0000000000000000000000000000000000000000000000000000000000000000", - "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000", - "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffffff", - "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593effffffe", - ]; - - for (i, string) in hex_strings.into_iter().enumerate() { - let minus_i_field_element = - -crate::generic_ark::FieldElement::::from(i as i128); - assert_eq!(minus_i_field_element.to_hex(), string); - } - } - - #[test] - fn deserialize_even_and_odd_length_hex() { - // Test cases of (odd, even) length hex strings - let hex_strings = - vec![("0x0", "0x00"), ("0x1", "0x01"), ("0x002", "0x0002"), ("0x00003", "0x000003")]; - for (i, case) in hex_strings.into_iter().enumerate() { - let i_field_element = - crate::generic_ark::FieldElement::::from(i as i128); - let odd_field_element = - crate::generic_ark::FieldElement::::from_hex(case.0).unwrap(); - let even_field_element = - crate::generic_ark::FieldElement::::from_hex(case.1).unwrap(); - - assert_eq!(i_field_element, odd_field_element); - assert_eq!(odd_field_element, even_field_element); - } - } - - #[test] - fn max_num_bits_smoke() { - let max_num_bits_bn254 = crate::generic_ark::FieldElement::::max_num_bits(); - assert_eq!(max_num_bits_bn254, 254); - } -} - fn mask_vector_le(bytes: &mut [u8], num_bits: usize) { // reverse to big endian format bytes.reverse(); @@ -543,3 +486,60 @@ fn superscript(n: u64) -> String { panic!("{}", n.to_string() + " can't be converted to superscript."); } } + +#[cfg(test)] +mod tests { + #[test] + fn and() { + let max = 10_000u32; + + let num_bits = (std::mem::size_of::() * 8) as u32 - max.leading_zeros(); + + for x in 0..max { + let x = crate::generic_ark::FieldElement::::from(x as i128); + let res = x.and(&x, num_bits); + assert_eq!(res.to_be_bytes(), x.to_be_bytes()); + } + } + + #[test] + fn serialize_fixed_test_vectors() { + // Serialized field elements from of 0, -1, -2, -3 + let hex_strings = vec![ + "0000000000000000000000000000000000000000000000000000000000000000", + "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000", + "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffffff", + "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593effffffe", + ]; + + for (i, string) in hex_strings.into_iter().enumerate() { + let minus_i_field_element = + -crate::generic_ark::FieldElement::::from(i as i128); + assert_eq!(minus_i_field_element.to_hex(), string); + } + } + + #[test] + fn deserialize_even_and_odd_length_hex() { + // Test cases of (odd, even) length hex strings + let hex_strings = + vec![("0x0", "0x00"), ("0x1", "0x01"), ("0x002", "0x0002"), ("0x00003", "0x000003")]; + for (i, case) in hex_strings.into_iter().enumerate() { + let i_field_element = + crate::generic_ark::FieldElement::::from(i as i128); + let odd_field_element = + crate::generic_ark::FieldElement::::from_hex(case.0).unwrap(); + let even_field_element = + crate::generic_ark::FieldElement::::from_hex(case.1).unwrap(); + + assert_eq!(i_field_element, odd_field_element); + assert_eq!(odd_field_element, even_field_element); + } + } + + #[test] + fn max_num_bits_smoke() { + let max_num_bits_bn254 = crate::generic_ark::FieldElement::::max_num_bits(); + assert_eq!(max_num_bits_bn254, 254); + } +} diff --git a/noir/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs b/noir/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs index 64fe5291cc6..c6ca18d30ae 100644 --- a/noir/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs +++ b/noir/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs @@ -72,12 +72,9 @@ impl RangeOptimizer { } } - Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { input: FunctionInput { witness, num_bits }, - }) => { - Some((*witness, *num_bits)) - } + }) => Some((*witness, *num_bits)), _ => None, }) else { diff --git a/noir/acvm-repo/acvm/src/pwg/blackbox/bigint.rs b/noir/acvm-repo/acvm/src/pwg/blackbox/bigint.rs index 986afaa3ce7..f094bb1ba20 100644 --- a/noir/acvm-repo/acvm/src/pwg/blackbox/bigint.rs +++ b/noir/acvm-repo/acvm/src/pwg/blackbox/bigint.rs @@ -69,7 +69,7 @@ impl BigIntSolver { pub(crate) fn bigint_to_bytes( &self, input: u32, - outputs: &Vec, + outputs: &[Witness], initial_witness: &mut WitnessMap, ) -> Result<(), OpcodeResolutionError> { let bigint = self.get_bigint(input, BlackBoxFunc::BigIntToLeBytes)?; diff --git a/noir/acvm-repo/acvm/src/pwg/blackbox/hash.rs b/noir/acvm-repo/acvm/src/pwg/blackbox/hash.rs index 06489822c92..1bc26f06188 100644 --- a/noir/acvm-repo/acvm/src/pwg/blackbox/hash.rs +++ b/noir/acvm-repo/acvm/src/pwg/blackbox/hash.rs @@ -3,7 +3,7 @@ use acir::{ native_types::{Witness, WitnessMap}, BlackBoxFunc, FieldElement, }; -use acvm_blackbox_solver::{sha256compression, BlackBoxResolutionError}; +use acvm_blackbox_solver::{sha256compression, BlackBoxFunctionSolver, BlackBoxResolutionError}; use crate::pwg::{insert_value, witness_to_value}; use crate::OpcodeResolutionError; @@ -131,3 +131,37 @@ pub(crate) fn solve_sha_256_permutation_opcode( Ok(()) } + +pub(crate) fn solve_poseidon2_permutation_opcode( + backend: &impl BlackBoxFunctionSolver, + initial_witness: &mut WitnessMap, + inputs: &[FunctionInput], + outputs: &[Witness], + len: u32, +) -> Result<(), OpcodeResolutionError> { + if len as usize != inputs.len() { + return Err(OpcodeResolutionError::BlackBoxFunctionFailed( + acir::BlackBoxFunc::Poseidon2Permutation, + format!( + "the number of inputs does not match specified length. {} > {}", + inputs.len(), + len + ), + )); + } + + // Read witness assignments + let mut state = Vec::new(); + for input in inputs.iter() { + let witness_assignment = witness_to_value(initial_witness, input.witness)?; + state.push(*witness_assignment); + } + + let state = backend.poseidon2_permutation(&state, len)?; + + // Write witness assignments + for (output_witness, value) in outputs.iter().zip(state.into_iter()) { + insert_value(output_witness, value, initial_witness)?; + } + Ok(()) +} diff --git a/noir/acvm-repo/acvm/src/pwg/blackbox/mod.rs b/noir/acvm-repo/acvm/src/pwg/blackbox/mod.rs index 7ae92fd84fc..4309cad1b2e 100644 --- a/noir/acvm-repo/acvm/src/pwg/blackbox/mod.rs +++ b/noir/acvm-repo/acvm/src/pwg/blackbox/mod.rs @@ -5,7 +5,9 @@ use acir::{ }; use acvm_blackbox_solver::{blake2s, blake3, keccak256, keccakf1600, sha256}; -use self::{bigint::BigIntSolver, pedersen::pedersen_hash}; +use self::{ + bigint::BigIntSolver, hash::solve_poseidon2_permutation_opcode, pedersen::pedersen_hash, +}; use super::{insert_value, OpcodeNotSolvable, OpcodeResolutionError}; use crate::{pwg::witness_to_value, BlackBoxFunctionSolver}; @@ -204,7 +206,6 @@ pub(crate) fn solve( BlackBoxFuncCall::BigIntToLeBytes { input, outputs } => { bigint_solver.bigint_to_bytes(*input, outputs, initial_witness) } - BlackBoxFuncCall::Poseidon2Permutation { .. } => todo!(), BlackBoxFuncCall::Sha256Compression { inputs, hash_values, outputs } => { solve_sha_256_permutation_opcode( initial_witness, @@ -214,5 +215,8 @@ pub(crate) fn solve( bb_func.get_black_box_func(), ) } + BlackBoxFuncCall::Poseidon2Permutation { inputs, outputs, len } => { + solve_poseidon2_permutation_opcode(backend, initial_witness, inputs, outputs, *len) + } } } diff --git a/noir/acvm-repo/blackbox_solver/src/curve_specific_solver.rs b/noir/acvm-repo/blackbox_solver/src/curve_specific_solver.rs index 2234710dec0..f0ab4561229 100644 --- a/noir/acvm-repo/blackbox_solver/src/curve_specific_solver.rs +++ b/noir/acvm-repo/blackbox_solver/src/curve_specific_solver.rs @@ -36,6 +36,11 @@ pub trait BlackBoxFunctionSolver { input2_x: &FieldElement, input2_y: &FieldElement, ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError>; + fn poseidon2_permutation( + &self, + _inputs: &[FieldElement], + _len: u32, + ) -> Result, BlackBoxResolutionError>; } pub struct StubbedBlackBoxSolver; @@ -89,4 +94,11 @@ impl BlackBoxFunctionSolver for StubbedBlackBoxSolver { ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { Err(Self::fail(BlackBoxFunc::EmbeddedCurveAdd)) } + fn poseidon2_permutation( + &self, + _inputs: &[FieldElement], + _len: u32, + ) -> Result, BlackBoxResolutionError> { + Err(Self::fail(BlackBoxFunc::Poseidon2Permutation)) + } } diff --git a/noir/acvm-repo/bn254_blackbox_solver/Cargo.toml b/noir/acvm-repo/bn254_blackbox_solver/Cargo.toml index ef80e2c1c0f..ea601a6b80f 100644 --- a/noir/acvm-repo/bn254_blackbox_solver/Cargo.toml +++ b/noir/acvm-repo/bn254_blackbox_solver/Cargo.toml @@ -16,6 +16,7 @@ repository.workspace = true acir.workspace = true acvm_blackbox_solver.workspace = true thiserror.workspace = true +num-traits.workspace = true rust-embed = { version = "6.6.0", features = [ "debug-embed", diff --git a/noir/acvm-repo/bn254_blackbox_solver/src/lib.rs b/noir/acvm-repo/bn254_blackbox_solver/src/lib.rs index 13aa956f9e1..be0e60ada96 100644 --- a/noir/acvm-repo/bn254_blackbox_solver/src/lib.rs +++ b/noir/acvm-repo/bn254_blackbox_solver/src/lib.rs @@ -6,9 +6,11 @@ use acir::{BlackBoxFunc, FieldElement}; use acvm_blackbox_solver::{BlackBoxFunctionSolver, BlackBoxResolutionError}; mod fixed_base_scalar_mul; +mod poseidon2; mod wasm; pub use fixed_base_scalar_mul::{embedded_curve_add, fixed_base_scalar_mul}; +use poseidon2::Poseidon2; use wasm::Barretenberg; use self::wasm::{Pedersen, SchnorrSig}; @@ -97,4 +99,13 @@ impl BlackBoxFunctionSolver for Bn254BlackBoxSolver { ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { embedded_curve_add(*input1_x, *input1_y, *input2_x, *input2_y) } + + fn poseidon2_permutation( + &self, + inputs: &[FieldElement], + len: u32, + ) -> Result, BlackBoxResolutionError> { + let poseidon = Poseidon2::new(); + poseidon.permutation(inputs, len) + } } diff --git a/noir/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs b/noir/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs new file mode 100644 index 00000000000..e0ed5bcd053 --- /dev/null +++ b/noir/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs @@ -0,0 +1,1043 @@ +use acir::FieldElement; +use acvm_blackbox_solver::BlackBoxResolutionError; +use num_bigint::BigUint; +use num_traits::Num; + +pub(crate) struct Poseidon2 { + t: u32, + rounds_f: u32, + rounds_p: u32, + internal_matrix_diagonal: [FieldElement; 4], + round_constant: [[FieldElement; 4]; 64], +} + +impl Poseidon2 { + pub(crate) fn new() -> Self { + Poseidon2 { + t: 4, + rounds_f: 8, + rounds_p: 56, + internal_matrix_diagonal: [ + Poseidon2::field_from_hex( + "0x10dc6e9c006ea38b04b1e03b4bd9490c0d03f98929ca1d7fb56821fd19d3b6e7", + ), + Poseidon2::field_from_hex( + "0x0c28145b6a44df3e0149b3d0a30b3bb599df9756d4dd9b84a86b38cfb45a740b", + ), + Poseidon2::field_from_hex( + "0x00544b8338791518b2c7645a50392798b21f75bb60e3596170067d00141cac15", + ), + Poseidon2::field_from_hex( + "0x222c01175718386f2e2e82eb122789e352e105a3b8fa852613bc534433ee428b", + ), + ], + round_constant: [ + [ + Poseidon2::field_from_hex( + "0x19b849f69450b06848da1d39bd5e4a4302bb86744edc26238b0878e269ed23e5", + ), + Poseidon2::field_from_hex( + "0x265ddfe127dd51bd7239347b758f0a1320eb2cc7450acc1dad47f80c8dcf34d6", + ), + Poseidon2::field_from_hex( + "0x199750ec472f1809e0f66a545e1e51624108ac845015c2aa3dfc36bab497d8aa", + ), + Poseidon2::field_from_hex( + "0x157ff3fe65ac7208110f06a5f74302b14d743ea25067f0ffd032f787c7f1cdf8", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2e49c43c4569dd9c5fd35ac45fca33f10b15c590692f8beefe18f4896ac94902", + ), + Poseidon2::field_from_hex( + "0x0e35fb89981890520d4aef2b6d6506c3cb2f0b6973c24fa82731345ffa2d1f1e", + ), + Poseidon2::field_from_hex( + "0x251ad47cb15c4f1105f109ae5e944f1ba9d9e7806d667ffec6fe723002e0b996", + ), + Poseidon2::field_from_hex( + "0x13da07dc64d428369873e97160234641f8beb56fdd05e5f3563fa39d9c22df4e", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0c009b84e650e6d23dc00c7dccef7483a553939689d350cd46e7b89055fd4738", + ), + Poseidon2::field_from_hex( + "0x011f16b1c63a854f01992e3956f42d8b04eb650c6d535eb0203dec74befdca06", + ), + Poseidon2::field_from_hex( + "0x0ed69e5e383a688f209d9a561daa79612f3f78d0467ad45485df07093f367549", + ), + Poseidon2::field_from_hex( + "0x04dba94a7b0ce9e221acad41472b6bbe3aec507f5eb3d33f463672264c9f789b", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0a3f2637d840f3a16eb094271c9d237b6036757d4bb50bf7ce732ff1d4fa28e8", + ), + Poseidon2::field_from_hex( + "0x259a666f129eea198f8a1c502fdb38fa39b1f075569564b6e54a485d1182323f", + ), + Poseidon2::field_from_hex( + "0x28bf7459c9b2f4c6d8e7d06a4ee3a47f7745d4271038e5157a32fdf7ede0d6a1", + ), + Poseidon2::field_from_hex( + "0x0a1ca941f057037526ea200f489be8d4c37c85bbcce6a2aeec91bd6941432447", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0c6f8f958be0e93053d7fd4fc54512855535ed1539f051dcb43a26fd926361cf", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x123106a93cd17578d426e8128ac9d90aa9e8a00708e296e084dd57e69caaf811", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x26e1ba52ad9285d97dd3ab52f8e840085e8fa83ff1e8f1877b074867cd2dee75", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1cb55cad7bd133de18a64c5c47b9c97cbe4d8b7bf9e095864471537e6a4ae2c5", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1dcd73e46acd8f8e0e2c7ce04bde7f6d2a53043d5060a41c7143f08e6e9055d0", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x011003e32f6d9c66f5852f05474a4def0cda294a0eb4e9b9b12b9bb4512e5574", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2b1e809ac1d10ab29ad5f20d03a57dfebadfe5903f58bafed7c508dd2287ae8c", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2539de1785b735999fb4dac35ee17ed0ef995d05ab2fc5faeaa69ae87bcec0a5", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0c246c5a2ef8ee0126497f222b3e0a0ef4e1c3d41c86d46e43982cb11d77951d", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x192089c4974f68e95408148f7c0632edbb09e6a6ad1a1c2f3f0305f5d03b527b", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1eae0ad8ab68b2f06a0ee36eeb0d0c058529097d91096b756d8fdc2fb5a60d85", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x179190e5d0e22179e46f8282872abc88db6e2fdc0dee99e69768bd98c5d06bfb", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x29bb9e2c9076732576e9a81c7ac4b83214528f7db00f31bf6cafe794a9b3cd1c", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x225d394e42207599403efd0c2464a90d52652645882aac35b10e590e6e691e08", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x064760623c25c8cf753d238055b444532be13557451c087de09efd454b23fd59", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x10ba3a0e01df92e87f301c4b716d8a394d67f4bf42a75c10922910a78f6b5b87", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0e070bf53f8451b24f9c6e96b0c2a801cb511bc0c242eb9d361b77693f21471c", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1b94cd61b051b04dd39755ff93821a73ccd6cb11d2491d8aa7f921014de252fb", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1d7cb39bafb8c744e148787a2e70230f9d4e917d5713bb050487b5aa7d74070b", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2ec93189bd1ab4f69117d0fe980c80ff8785c2961829f701bb74ac1f303b17db", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2db366bfdd36d277a692bb825b86275beac404a19ae07a9082ea46bd83517926", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x062100eb485db06269655cf186a68532985275428450359adc99cec6960711b8", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0761d33c66614aaa570e7f1e8244ca1120243f92fa59e4f900c567bf41f5a59b", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x20fc411a114d13992c2705aa034e3f315d78608a0f7de4ccf7a72e494855ad0d", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x25b5c004a4bdfcb5add9ec4e9ab219ba102c67e8b3effb5fc3a30f317250bc5a", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x23b1822d278ed632a494e58f6df6f5ed038b186d8474155ad87e7dff62b37f4b", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x22734b4c5c3f9493606c4ba9012499bf0f14d13bfcfcccaa16102a29cc2f69e0", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x26c0c8fe09eb30b7e27a74dc33492347e5bdff409aa3610254413d3fad795ce5", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x070dd0ccb6bd7bbae88eac03fa1fbb26196be3083a809829bbd626df348ccad9", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x12b6595bdb329b6fb043ba78bb28c3bec2c0a6de46d8c5ad6067c4ebfd4250da", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x248d97d7f76283d63bec30e7a5876c11c06fca9b275c671c5e33d95bb7e8d729", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1a306d439d463b0816fc6fd64cc939318b45eb759ddde4aa106d15d9bd9baaaa", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x28a8f8372e3c38daced7c00421cb4621f4f1b54ddc27821b0d62d3d6ec7c56cf", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0094975717f9a8a8bb35152f24d43294071ce320c829f388bc852183e1e2ce7e", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x04d5ee4c3aa78f7d80fde60d716480d3593f74d4f653ae83f4103246db2e8d65", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2a6cf5e9aa03d4336349ad6fb8ed2269c7bef54b8822cc76d08495c12efde187", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2304d31eaab960ba9274da43e19ddeb7f792180808fd6e43baae48d7efcba3f3", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x03fd9ac865a4b2a6d5e7009785817249bff08a7e0726fcb4e1c11d39d199f0b0", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x00b7258ded52bbda2248404d55ee5044798afc3a209193073f7954d4d63b0b64", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x159f81ada0771799ec38fca2d4bf65ebb13d3a74f3298db36272c5ca65e92d9a", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1ef90e67437fbc8550237a75bc28e3bb9000130ea25f0c5471e144cf4264431f", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1e65f838515e5ff0196b49aa41a2d2568df739bc176b08ec95a79ed82932e30d", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2b1b045def3a166cec6ce768d079ba74b18c844e570e1f826575c1068c94c33f", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0832e5753ceb0ff6402543b1109229c165dc2d73bef715e3f1c6e07c168bb173", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x02f614e9cedfb3dc6b762ae0a37d41bab1b841c2e8b6451bc5a8e3c390b6ad16", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0e2427d38bd46a60dd640b8e362cad967370ebb777bedff40f6a0be27e7ed705", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0493630b7c670b6deb7c84d414e7ce79049f0ec098c3c7c50768bbe29214a53a", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x22ead100e8e482674decdab17066c5a26bb1515355d5461a3dc06cc85327cea9", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x25b3e56e655b42cdaae2626ed2554d48583f1ae35626d04de5084e0b6d2a6f16", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1e32752ada8836ef5837a6cde8ff13dbb599c336349e4c584b4fdc0a0cf6f9d0", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2fa2a871c15a387cc50f68f6f3c3455b23c00995f05078f672a9864074d412e5", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2f569b8a9a4424c9278e1db7311e889f54ccbf10661bab7fcd18e7c7a7d83505", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x044cb455110a8fdd531ade530234c518a7df93f7332ffd2144165374b246b43d", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x227808de93906d5d420246157f2e42b191fe8c90adfe118178ddc723a5319025", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x02fcca2934e046bc623adead873579865d03781ae090ad4a8579d2e7a6800355", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0ef915f0ac120b876abccceb344a1d36bad3f3c5ab91a8ddcbec2e060d8befac", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1797130f4b7a3e1777eb757bc6f287f6ab0fb85f6be63b09f3b16ef2b1405d38", + ), + Poseidon2::field_from_hex( + "0x0a76225dc04170ae3306c85abab59e608c7f497c20156d4d36c668555decc6e5", + ), + Poseidon2::field_from_hex( + "0x1fffb9ec1992d66ba1e77a7b93209af6f8fa76d48acb664796174b5326a31a5c", + ), + Poseidon2::field_from_hex( + "0x25721c4fc15a3f2853b57c338fa538d85f8fbba6c6b9c6090611889b797b9c5f", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0c817fd42d5f7a41215e3d07ba197216adb4c3790705da95eb63b982bfcaf75a", + ), + Poseidon2::field_from_hex( + "0x13abe3f5239915d39f7e13c2c24970b6df8cf86ce00a22002bc15866e52b5a96", + ), + Poseidon2::field_from_hex( + "0x2106feea546224ea12ef7f39987a46c85c1bc3dc29bdbd7a92cd60acb4d391ce", + ), + Poseidon2::field_from_hex( + "0x21ca859468a746b6aaa79474a37dab49f1ca5a28c748bc7157e1b3345bb0f959", + ), + ], + [ + Poseidon2::field_from_hex( + "0x05ccd6255c1e6f0c5cf1f0df934194c62911d14d0321662a8f1a48999e34185b", + ), + Poseidon2::field_from_hex( + "0x0f0e34a64b70a626e464d846674c4c8816c4fb267fe44fe6ea28678cb09490a4", + ), + Poseidon2::field_from_hex( + "0x0558531a4e25470c6157794ca36d0e9647dbfcfe350d64838f5b1a8a2de0d4bf", + ), + Poseidon2::field_from_hex( + "0x09d3dca9173ed2faceea125157683d18924cadad3f655a60b72f5864961f1455", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0328cbd54e8c0913493f866ed03d218bf23f92d68aaec48617d4c722e5bd4335", + ), + Poseidon2::field_from_hex( + "0x2bf07216e2aff0a223a487b1a7094e07e79e7bcc9798c648ee3347dd5329d34b", + ), + Poseidon2::field_from_hex( + "0x1daf345a58006b736499c583cb76c316d6f78ed6a6dffc82111e11a63fe412df", + ), + Poseidon2::field_from_hex( + "0x176563472456aaa746b694c60e1823611ef39039b2edc7ff391e6f2293d2c404", + ), + ], + ], + } + } + fn field_from_hex(hex: &str) -> FieldElement { + let bigint = BigUint::from_str_radix(hex.strip_prefix("0x").unwrap(), 16).unwrap(); + FieldElement::from_be_bytes_reduce(&bigint.to_bytes_be()) + } + + fn single_box(x: FieldElement) -> FieldElement { + let s = x * x; + s * s * x + } + + fn s_box(input: &mut [FieldElement]) { + for i in input { + *i = Self::single_box(*i); + } + } + + fn add_round_constants(&self, state: &mut [FieldElement], round: usize) { + for (state_element, constant_element) in state.iter_mut().zip(self.round_constant[round]) { + *state_element += constant_element; + } + } + + /// Algorithm is taken directly from the Poseidon2 implementation in Barretenberg crypto module. + fn matrix_multiplication_4x4(input: &mut [FieldElement]) { + assert!(input.len() == 4); + let t0 = input[0] + input[1]; // A + B + let t1 = input[2] + input[3]; // C + D + let mut t2 = input[1] + input[1]; // 2B + t2 += t1; // 2B + C + D + let mut t3 = input[3] + input[3]; // 2D + t3 += t0; // 2D + A + B + let mut t4 = t1 + t1; + t4 += t4; + t4 += t3; // A + B + 4C + 6D + let mut t5 = t0 + t0; + t5 += t5; + t5 += t2; // 4A + 6B + C + D + let t6 = t3 + t5; // 5A + 7B + C + 3D + let t7 = t2 + t4; // A + 3B + 5C + 7D + input[0] = t6; + input[1] = t5; + input[2] = t7; + input[3] = t4; + } + + fn internal_m_multiplication(&self, input: &mut [FieldElement]) { + let mut sum = FieldElement::zero(); + for i in input.iter() { + sum += *i; + } + for (index, i) in input.iter_mut().enumerate() { + *i = *i * self.internal_matrix_diagonal[index]; + *i += sum; + } + } + + pub(crate) fn permutation( + &self, + inputs: &[FieldElement], + len: u32, + ) -> Result, BlackBoxResolutionError> { + if len as usize != inputs.len() { + return Err(BlackBoxResolutionError::Failed( + acir::BlackBoxFunc::Poseidon2Permutation, + format!( + "the number of inputs does not match specified length. {} > {}", + inputs.len(), + len + ), + )); + } + if len != self.t { + return Err(BlackBoxResolutionError::Failed( + acir::BlackBoxFunc::Poseidon2Permutation, + format!("Expected {} values but encountered {}", self.t, len), + )); + } + // Read witness assignments + let mut state = [FieldElement::zero(); 4]; + for (index, input) in inputs.iter().enumerate() { + state[index] = *input; + } + // Apply 1st linear layer + Self::matrix_multiplication_4x4(&mut state); + + // First set of external rounds + let rf_first = self.rounds_f / 2; + for r in 0..rf_first { + self.add_round_constants(&mut state, r as usize); + Self::s_box(&mut state); + Self::matrix_multiplication_4x4(&mut state); + } + // Internal rounds + let p_end = rf_first + self.rounds_p; + for r in rf_first..p_end { + state[0] += self.round_constant[r as usize][0]; + state[0] = Self::single_box(state[0]); + self.internal_m_multiplication(&mut state); + } + + // Remaining external rounds + let num_rounds = self.rounds_f + self.rounds_p; + for i in p_end..num_rounds { + self.add_round_constants(&mut state, i as usize); + Self::s_box(&mut state); + Self::matrix_multiplication_4x4(&mut state); + } + Ok(state.into()) + } +} diff --git a/noir/acvm-repo/brillig_vm/src/arithmetic.rs b/noir/acvm-repo/brillig_vm/src/arithmetic.rs index 263a733e3c4..9d7b6fe8f02 100644 --- a/noir/acvm-repo/brillig_vm/src/arithmetic.rs +++ b/noir/acvm-repo/brillig_vm/src/arithmetic.rs @@ -36,18 +36,20 @@ pub(crate) fn evaluate_binary_bigint_op( BinaryIntOp::UnsignedDiv => { let b_mod = b % bit_modulo; if b_mod.is_zero() { - return Err("Division by zero".to_owned()); + BigUint::zero() + } else { + (a % bit_modulo) / b_mod } - (a % bit_modulo) / b_mod } // Perform signed division by first converting a and b to signed integers and then back to unsigned after the operation. BinaryIntOp::SignedDiv => { let b_signed = to_big_signed(b, bit_size); if b_signed.is_zero() { - return Err("Division by zero".to_owned()); + BigUint::zero() + } else { + let signed_div = to_big_signed(a, bit_size) / b_signed; + to_big_unsigned(signed_div, bit_size) } - let signed_div = to_big_signed(a, bit_size) / b_signed; - to_big_unsigned(signed_div, bit_size) } // Perform a == operation, returning 0 or 1 BinaryIntOp::Equals => { diff --git a/noir/acvm-repo/brillig_vm/src/black_box.rs b/noir/acvm-repo/brillig_vm/src/black_box.rs index 5b2680465ab..73b57b907f3 100644 --- a/noir/acvm-repo/brillig_vm/src/black_box.rs +++ b/noir/acvm-repo/brillig_vm/src/black_box.rs @@ -184,7 +184,18 @@ pub(crate) fn evaluate_black_box( BlackBoxOp::BigIntDiv { .. } => todo!(), BlackBoxOp::BigIntFromLeBytes { .. } => todo!(), BlackBoxOp::BigIntToLeBytes { .. } => todo!(), - BlackBoxOp::Poseidon2Permutation { .. } => todo!(), + BlackBoxOp::Poseidon2Permutation { message, output, len } => { + let input = read_heap_vector(memory, message); + let input: Vec = input.iter().map(|x| x.to_field()).collect(); + let len = memory.read(*len).to_u128() as u32; + let result = solver.poseidon2_permutation(&input, len)?; + let mut values = Vec::new(); + for i in result { + values.push(Value::from(i)); + } + memory.write_slice(memory.read_ref(output.pointer), &values); + Ok(()) + } BlackBoxOp::Sha256Compression { input, hash_values, output } => { let mut message = [0; 16]; let inputs = read_heap_vector(memory, input); diff --git a/noir/acvm-repo/brillig_vm/src/lib.rs b/noir/acvm-repo/brillig_vm/src/lib.rs index 13accbeacb3..c7bf014f068 100644 --- a/noir/acvm-repo/brillig_vm/src/lib.rs +++ b/noir/acvm-repo/brillig_vm/src/lib.rs @@ -568,6 +568,13 @@ impl BlackBoxFunctionSolver for DummyBlackBoxSolver { ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { Ok((5_u128.into(), 6_u128.into())) } + fn poseidon2_permutation( + &self, + _input: &[FieldElement], + len: u32, + ) -> Result, BlackBoxResolutionError> { + Ok(vec![0_u128.into(); len as usize]) + } } #[cfg(test)] diff --git a/noir/aztec_macros/src/lib.rs b/noir/aztec_macros/src/lib.rs index 07dca820fd9..0ccc421d3bc 100644 --- a/noir/aztec_macros/src/lib.rs +++ b/noir/aztec_macros/src/lib.rs @@ -711,7 +711,7 @@ fn collect_traits(context: &HirContext) -> Vec { crates .flat_map(|crate_id| context.def_map(&crate_id).map(|def_map| def_map.modules())) .flatten() - .flat_map(|(_, module)| { + .flat_map(|module| { module.type_definitions().filter_map(|typ| { if let ModuleDefId::TraitId(struct_id) = typ { Some(struct_id) @@ -777,11 +777,11 @@ fn transform_event( HirExpression::Literal(HirLiteral::Str(signature)) if signature == SIGNATURE_PLACEHOLDER => { - let selector_literal_id = first_arg_id; + let selector_literal_id = *first_arg_id; let structure = interner.get_struct(struct_id); let signature = event_signature(&structure.borrow()); - interner.update_expression(*selector_literal_id, |expr| { + interner.update_expression(selector_literal_id, |expr| { *expr = HirExpression::Literal(HirLiteral::Str(signature.clone())); }); @@ -823,7 +823,7 @@ fn get_serialized_length( ) -> Result { let (struct_name, maybe_stored_in_state) = match typ { Type::Struct(struct_type, generics) => { - Ok((struct_type.borrow().name.0.contents.clone(), generics.get(0))) + Ok((struct_type.borrow().name.0.contents.clone(), generics.first())) } _ => Err(AztecMacroError::CouldNotAssignStorageSlots { secondary_message: Some("State storage variable must be a struct".to_string()), @@ -847,7 +847,7 @@ fn get_serialized_length( let serialized_trait_impl_kind = traits .iter() - .filter_map(|&trait_id| { + .find_map(|&trait_id| { let r#trait = interner.get_trait(trait_id); if r#trait.borrow().name.0.contents == "Serialize" && r#trait.borrow().generics.len() == 1 @@ -860,7 +860,6 @@ fn get_serialized_length( None } }) - .next() .ok_or(AztecMacroError::CouldNotAssignStorageSlots { secondary_message: Some("Stored data must implement Serialize trait".to_string()), })?; @@ -873,7 +872,7 @@ fn get_serialized_length( let serialized_trait_impl_shared = interner.get_trait_implementation(*serialized_trait_impl_id); let serialized_trait_impl = serialized_trait_impl_shared.borrow(); - match serialized_trait_impl.trait_generics.get(0).unwrap() { + match serialized_trait_impl.trait_generics.first().unwrap() { Type::Constant(value) => Ok(*value), _ => Err(AztecMacroError::CouldNotAssignStorageSlots { secondary_message: None }), } @@ -960,9 +959,7 @@ fn assign_storage_slots( let slot_arg_expression = interner.expression(&new_call_expression.arguments[1]); let current_storage_slot = match slot_arg_expression { - HirExpression::Literal(HirLiteral::Integer(slot, _)) => { - Ok(slot.borrow().to_u128()) - } + HirExpression::Literal(HirLiteral::Integer(slot, _)) => Ok(slot.to_u128()), _ => Err(( AztecMacroError::CouldNotAssignStorageSlots { secondary_message: Some( diff --git a/noir/compiler/noirc_driver/Cargo.toml b/noir/compiler/noirc_driver/Cargo.toml index d9b240101d8..681976735f3 100644 --- a/noir/compiler/noirc_driver/Cargo.toml +++ b/noir/compiler/noirc_driver/Cargo.toml @@ -23,6 +23,7 @@ serde.workspace = true fxhash.workspace = true rust-embed.workspace = true tracing.workspace = true +thiserror.workspace = true aztec_macros = { path = "../../aztec_macros" } noirc_macros = { path = "../../noirc_macros" } diff --git a/noir/compiler/noirc_driver/src/lib.rs b/noir/compiler/noirc_driver/src/lib.rs index 8b0fc5dc97a..11f53cdb749 100644 --- a/noir/compiler/noirc_driver/src/lib.rs +++ b/noir/compiler/noirc_driver/src/lib.rs @@ -16,9 +16,10 @@ use noirc_frontend::graph::{CrateId, CrateName}; use noirc_frontend::hir::def_map::{Contract, CrateDefMap}; use noirc_frontend::hir::Context; use noirc_frontend::macros_api::MacroProcessor; -use noirc_frontend::monomorphization::{monomorphize, monomorphize_debug}; +use noirc_frontend::monomorphization::{monomorphize, monomorphize_debug, MonomorphizationError}; use noirc_frontend::node_interner::FuncId; use std::path::Path; +use thiserror::Error; use tracing::info; mod abi_gen; @@ -107,6 +108,24 @@ fn parse_expression_width(input: &str) -> Result for FileDiagnostic { + fn from(error: CompileError) -> FileDiagnostic { + match error { + CompileError::RuntimeError(err) => err.into(), + CompileError::MonomorphizationError(err) => err.into(), + } + } +} + /// Helper type used to signify where only warnings are expected in file diagnostics pub type Warnings = Vec; @@ -436,11 +455,11 @@ pub fn compile_no_check( main_function: FuncId, cached_program: Option, force_compile: bool, -) -> Result { +) -> Result { let program = if options.instrument_debug { - monomorphize_debug(main_function, &mut context.def_interner, &context.debug_instrumenter) + monomorphize_debug(main_function, &mut context.def_interner, &context.debug_instrumenter)? } else { - monomorphize(main_function, &mut context.def_interner) + monomorphize(main_function, &mut context.def_interner)? }; let hash = fxhash::hash64(&program); diff --git a/noir/compiler/noirc_errors/src/debug_info.rs b/noir/compiler/noirc_errors/src/debug_info.rs index 25722aac57f..67ec851d46d 100644 --- a/noir/compiler/noirc_errors/src/debug_info.rs +++ b/noir/compiler/noirc_errors/src/debug_info.rs @@ -90,7 +90,7 @@ impl DebugInfo { for (opcode_location, locations) in self.locations.iter() { for location in locations.iter() { - let opcodes = accumulator.entry(*location).or_insert(Vec::new()); + let opcodes = accumulator.entry(*location).or_default(); opcodes.push(opcode_location); } } diff --git a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index dfe23b45034..d542240a40c 100644 --- a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -56,12 +56,12 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::Keccak256 => { if let ( - [message, BrilligVariable::Simple(array_size)], + [message, BrilligVariable::SingleAddr(array_size)], [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { let mut message_vector = convert_array_or_vector(brillig_context, message, bb_func); - message_vector.size = *array_size; + message_vector.size = array_size.address; brillig_context.black_box_op_instruction(BlackBoxOp::Keccak256 { message: message_vector.to_heap_vector(), @@ -88,7 +88,7 @@ pub(crate) fn convert_black_box_call( BlackBoxFunc::EcdsaSecp256k1 => { if let ( [BrilligVariable::BrilligArray(public_key_x), BrilligVariable::BrilligArray(public_key_y), BrilligVariable::BrilligArray(signature), message], - [BrilligVariable::Simple(result_register)], + [BrilligVariable::SingleAddr(result_register)], ) = (function_arguments, function_results) { let message_hash_vector = @@ -98,7 +98,7 @@ pub(crate) fn convert_black_box_call( public_key_x: public_key_x.to_heap_array(), public_key_y: public_key_y.to_heap_array(), signature: signature.to_heap_array(), - result: *result_register, + result: result_register.address, }); } else { unreachable!( @@ -109,7 +109,7 @@ pub(crate) fn convert_black_box_call( BlackBoxFunc::EcdsaSecp256r1 => { if let ( [BrilligVariable::BrilligArray(public_key_x), BrilligVariable::BrilligArray(public_key_y), BrilligVariable::BrilligArray(signature), message], - [BrilligVariable::Simple(result_register)], + [BrilligVariable::SingleAddr(result_register)], ) = (function_arguments, function_results) { let message_hash_vector = @@ -119,7 +119,7 @@ pub(crate) fn convert_black_box_call( public_key_x: public_key_x.to_heap_array(), public_key_y: public_key_y.to_heap_array(), signature: signature.to_heap_array(), - result: *result_register, + result: result_register.address, }); } else { unreachable!( @@ -130,14 +130,14 @@ pub(crate) fn convert_black_box_call( BlackBoxFunc::PedersenCommitment => { if let ( - [message, BrilligVariable::Simple(domain_separator)], + [message, BrilligVariable::SingleAddr(domain_separator)], [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { let message_vector = convert_array_or_vector(brillig_context, message, bb_func); brillig_context.black_box_op_instruction(BlackBoxOp::PedersenCommitment { inputs: message_vector.to_heap_vector(), - domain_separator: *domain_separator, + domain_separator: domain_separator.address, output: result_array.to_heap_array(), }); } else { @@ -146,15 +146,15 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::PedersenHash => { if let ( - [message, BrilligVariable::Simple(domain_separator)], - [BrilligVariable::Simple(result)], + [message, BrilligVariable::SingleAddr(domain_separator)], + [BrilligVariable::SingleAddr(result)], ) = (function_arguments, function_results) { let message_vector = convert_array_or_vector(brillig_context, message, bb_func); brillig_context.black_box_op_instruction(BlackBoxOp::PedersenHash { inputs: message_vector.to_heap_vector(), - domain_separator: *domain_separator, - output: *result, + domain_separator: domain_separator.address, + output: result.address, }); } else { unreachable!("ICE: Pedersen hash expects one array argument, a register for the domain separator, and one register result") @@ -162,18 +162,18 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::SchnorrVerify => { if let ( - [BrilligVariable::Simple(public_key_x), BrilligVariable::Simple(public_key_y), BrilligVariable::BrilligArray(signature), message], - [BrilligVariable::Simple(result_register)], + [BrilligVariable::SingleAddr(public_key_x), BrilligVariable::SingleAddr(public_key_y), BrilligVariable::BrilligArray(signature), message], + [BrilligVariable::SingleAddr(result_register)], ) = (function_arguments, function_results) { let message_hash = convert_array_or_vector(brillig_context, message, bb_func); let signature = brillig_context.array_to_vector(signature); brillig_context.black_box_op_instruction(BlackBoxOp::SchnorrVerify { - public_key_x: *public_key_x, - public_key_y: *public_key_y, + public_key_x: public_key_x.address, + public_key_y: public_key_y.address, message: message_hash.to_heap_vector(), signature: signature.to_heap_vector(), - result: *result_register, + result: result_register.address, }); } else { unreachable!("ICE: Schnorr verify expects two registers for the public key, an array for signature, an array for the message hash and one result register") @@ -181,13 +181,13 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::FixedBaseScalarMul => { if let ( - [BrilligVariable::Simple(low), BrilligVariable::Simple(high)], + [BrilligVariable::SingleAddr(low), BrilligVariable::SingleAddr(high)], [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::FixedBaseScalarMul { - low: *low, - high: *high, + low: low.address, + high: high.address, result: result_array.to_heap_array(), }); } else { @@ -198,15 +198,15 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::EmbeddedCurveAdd => { if let ( - [BrilligVariable::Simple(input1_x), BrilligVariable::Simple(input1_y), BrilligVariable::Simple(input2_x), BrilligVariable::Simple(input2_y)], + [BrilligVariable::SingleAddr(input1_x), BrilligVariable::SingleAddr(input1_y), BrilligVariable::SingleAddr(input2_x), BrilligVariable::SingleAddr(input2_y)], [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::EmbeddedCurveAdd { - input1_x: *input1_x, - input1_y: *input1_y, - input2_x: *input2_x, - input2_y: *input2_y, + input1_x: input1_x.address, + input1_y: input1_y.address, + input2_x: input2_x.address, + input2_y: input2_y.address, result: result_array.to_heap_array(), }); } else { @@ -229,14 +229,14 @@ pub(crate) fn convert_black_box_call( ), BlackBoxFunc::BigIntAdd => { if let ( - [BrilligVariable::Simple(lhs), BrilligVariable::Simple(rhs)], - [BrilligVariable::Simple(output)], + [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(rhs)], + [BrilligVariable::SingleAddr(output)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::BigIntAdd { - lhs: *lhs, - rhs: *rhs, - output: *output, + lhs: lhs.address, + rhs: rhs.address, + output: output.address, }); } else { unreachable!( @@ -246,14 +246,14 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::BigIntSub => { if let ( - [BrilligVariable::Simple(lhs), BrilligVariable::Simple(rhs)], - [BrilligVariable::Simple(output)], + [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(rhs)], + [BrilligVariable::SingleAddr(output)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::BigIntSub { - lhs: *lhs, - rhs: *rhs, - output: *output, + lhs: lhs.address, + rhs: rhs.address, + output: output.address, }); } else { unreachable!( @@ -263,14 +263,14 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::BigIntMul => { if let ( - [BrilligVariable::Simple(lhs), BrilligVariable::Simple(rhs)], - [BrilligVariable::Simple(output)], + [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(rhs)], + [BrilligVariable::SingleAddr(output)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::BigIntMul { - lhs: *lhs, - rhs: *rhs, - output: *output, + lhs: lhs.address, + rhs: rhs.address, + output: output.address, }); } else { unreachable!( @@ -280,14 +280,14 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::BigIntDiv => { if let ( - [BrilligVariable::Simple(lhs), BrilligVariable::Simple(rhs)], - [BrilligVariable::Simple(output)], + [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(rhs)], + [BrilligVariable::SingleAddr(output)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::BigIntDiv { - lhs: *lhs, - rhs: *rhs, - output: *output, + lhs: lhs.address, + rhs: rhs.address, + output: output.address, }); } else { unreachable!( @@ -296,7 +296,7 @@ pub(crate) fn convert_black_box_call( } } BlackBoxFunc::BigIntFromLeBytes => { - if let ([inputs, modulus], [BrilligVariable::Simple(output)]) = + if let ([inputs, modulus], [BrilligVariable::SingleAddr(output)]) = (function_arguments, function_results) { let inputs_vector = convert_array_or_vector(brillig_context, inputs, bb_func); @@ -304,7 +304,7 @@ pub(crate) fn convert_black_box_call( brillig_context.black_box_op_instruction(BlackBoxOp::BigIntFromLeBytes { inputs: inputs_vector.to_heap_vector(), modulus: modulus_vector.to_heap_vector(), - output: *output, + output: output.address, }); } else { unreachable!( @@ -314,12 +314,12 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::BigIntToLeBytes => { if let ( - [BrilligVariable::Simple(input)], + [BrilligVariable::SingleAddr(input)], [BrilligVariable::BrilligVector(result_vector)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::BigIntToLeBytes { - input: *input, + input: input.address, output: result_vector.to_heap_vector(), }); } else { @@ -330,7 +330,7 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::Poseidon2Permutation => { if let ( - [message, BrilligVariable::Simple(state_len)], + [message, BrilligVariable::SingleAddr(state_len)], [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { @@ -338,7 +338,7 @@ pub(crate) fn convert_black_box_call( brillig_context.black_box_op_instruction(BlackBoxOp::Poseidon2Permutation { message: message_vector.to_heap_vector(), output: result_array.to_heap_array(), - len: *state_len, + len: state_len.address, }); } else { unreachable!("ICE: Poseidon2Permutation expects one array argument, a length and one array result") diff --git a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 7697d7e65fa..f01f60252f6 100644 --- a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -1,5 +1,5 @@ use crate::brillig::brillig_ir::brillig_variable::{ - type_to_heap_value_type, BrilligArray, BrilligVariable, BrilligVector, + type_to_heap_value_type, BrilligArray, BrilligVariable, BrilligVector, SingleAddrVariable, }; use crate::brillig::brillig_ir::{ BrilligBinaryOp, BrilligContext, BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, @@ -117,9 +117,9 @@ impl<'block> BrilligBlock<'block> { ) { match terminator_instruction { TerminatorInstruction::JmpIf { condition, then_destination, else_destination } => { - let condition = self.convert_ssa_register_value(*condition, dfg); + let condition = self.convert_ssa_single_addr_value(*condition, dfg); self.brillig_context.jump_if_instruction( - condition, + condition.address, self.create_block_label_for_current_function(*then_destination), ); self.brillig_context.jump_instruction( @@ -164,10 +164,10 @@ impl<'block> BrilligBlock<'block> { fn pass_variable(&mut self, source: BrilligVariable, destination: BrilligVariable) { match (source, destination) { ( - BrilligVariable::Simple(source_register), - BrilligVariable::Simple(destination_register), + BrilligVariable::SingleAddr(source_var), + BrilligVariable::SingleAddr(destination_var), ) => { - self.brillig_context.mov_instruction(destination_register, source_register); + self.brillig_context.mov_instruction(destination_var.address, source_var.address); } ( BrilligVariable::BrilligArray(BrilligArray { @@ -241,16 +241,19 @@ impl<'block> BrilligBlock<'block> { match instruction { Instruction::Binary(binary) => { - let result_register = self.variables.define_register_variable( + let result_var = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, dfg.instruction_results(instruction_id)[0], dfg, ); - self.convert_ssa_binary(binary, dfg, result_register); + self.convert_ssa_binary(binary, dfg, result_var); } Instruction::Constrain(lhs, rhs, assert_message) => { - let condition = self.brillig_context.allocate_register(); + let condition = SingleAddrVariable { + address: self.brillig_context.allocate_register(), + bit_size: 1, + }; self.convert_ssa_binary( &Binary { lhs: *lhs, rhs: *rhs, operator: BinaryOp::Eq }, @@ -266,7 +269,7 @@ impl<'block> BrilligBlock<'block> { unreachable!("expected a call instruction") }; - let Value::Function(func_id) = &dfg[*func] else { + let Value::Function(func_id) = &dfg[*func] else { unreachable!("expected a function value") }; @@ -281,12 +284,12 @@ impl<'block> BrilligBlock<'block> { None }; - self.brillig_context.constrain_instruction(condition, assert_message); - self.brillig_context.deallocate_register(condition); + self.brillig_context.constrain_instruction(condition.address, assert_message); + self.brillig_context.deallocate_register(condition.address); } Instruction::Allocate => { let result_value = dfg.instruction_results(instruction_id)[0]; - let address_register = self.variables.define_register_variable( + let address_register = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, result_value, @@ -296,15 +299,16 @@ impl<'block> BrilligBlock<'block> { Type::Reference(element) => match *element { Type::Array(..) => { self.brillig_context - .allocate_array_reference_instruction(address_register); + .allocate_array_reference_instruction(address_register.address); } Type::Slice(..) => { self.brillig_context - .allocate_vector_reference_instruction(address_register); + .allocate_vector_reference_instruction(address_register.address); } _ => { - self.brillig_context - .allocate_simple_reference_instruction(address_register); + self.brillig_context.allocate_single_addr_reference_instruction( + address_register.address, + ); } }, _ => { @@ -313,10 +317,11 @@ impl<'block> BrilligBlock<'block> { } } Instruction::Store { address, value } => { - let address_register = self.convert_ssa_register_value(*address, dfg); + let address_var = self.convert_ssa_single_addr_value(*address, dfg); let source_variable = self.convert_ssa_value(*value, dfg); - self.brillig_context.store_variable_instruction(address_register, source_variable); + self.brillig_context + .store_variable_instruction(address_var.address, source_variable); } Instruction::Load { address } => { let target_variable = self.variables.define_variable( @@ -326,34 +331,34 @@ impl<'block> BrilligBlock<'block> { dfg, ); - let address_register = self.convert_ssa_register_value(*address, dfg); + let address_variable = self.convert_ssa_single_addr_value(*address, dfg); - self.brillig_context.load_variable_instruction(target_variable, address_register); + self.brillig_context + .load_variable_instruction(target_variable, address_variable.address); } Instruction::Not(value) => { - let condition_register = self.convert_ssa_register_value(*value, dfg); - let result_register = self.variables.define_register_variable( + let condition_register = self.convert_ssa_single_addr_value(*value, dfg); + let result_register = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, dfg.instruction_results(instruction_id)[0], dfg, ); - let bit_size = get_bit_size_from_ssa_type(&dfg.type_of_value(*value)); - self.brillig_context.not_instruction(condition_register, bit_size, result_register); + self.brillig_context.not_instruction(condition_register, result_register); } Instruction::Call { func, arguments } => match &dfg[*func] { Value::ForeignFunction(func_name) => { let result_ids = dfg.instruction_results(instruction_id); let input_registers = vecmap(arguments, |value_id| { - self.convert_ssa_value(*value_id, dfg).to_register_or_memory() + self.convert_ssa_value(*value_id, dfg).to_value_or_array() }); let input_value_types = vecmap(arguments, |value_id| { let value_type = dfg.type_of_value(*value_id); type_to_heap_value_type(&value_type) }); let output_registers = vecmap(result_ids, |value_id| { - self.allocate_external_call_result(*value_id, dfg).to_register_or_memory() + self.allocate_external_call_result(*value_id, dfg).to_value_or_array() }); let output_value_types = vecmap(result_ids, |value_id| { let value_type = dfg.type_of_value(*value_id); @@ -431,7 +436,7 @@ impl<'block> BrilligBlock<'block> { ); } Value::Intrinsic(Intrinsic::ArrayLen) => { - let result_register = self.variables.define_register_variable( + let result_variable = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, dfg.instruction_results(instruction_id)[0], @@ -443,10 +448,11 @@ impl<'block> BrilligBlock<'block> { // or an array in the case of an array. if let Type::Numeric(_) = dfg.type_of_value(param_id) { let len_variable = self.convert_ssa_value(arguments[0], dfg); - let len_register_index = len_variable.extract_register(); - self.brillig_context.mov_instruction(result_register, len_register_index); + let length = len_variable.extract_single_addr(); + self.brillig_context + .mov_instruction(result_variable.address, length.address); } else { - self.convert_ssa_array_len(arguments[0], result_register, dfg); + self.convert_ssa_array_len(arguments[0], result_variable.address, dfg); } } Value::Intrinsic( @@ -465,13 +471,13 @@ impl<'block> BrilligBlock<'block> { ); } Value::Intrinsic(Intrinsic::ToRadix(endianness)) => { - let source = self.convert_ssa_register_value(arguments[0], dfg); - let radix = self.convert_ssa_register_value(arguments[1], dfg); - let limb_count = self.convert_ssa_register_value(arguments[2], dfg); + let source = self.convert_ssa_single_addr_value(arguments[0], dfg); + let radix = self.convert_ssa_single_addr_value(arguments[1], dfg); + let limb_count = self.convert_ssa_single_addr_value(arguments[2], dfg); let results = dfg.instruction_results(instruction_id); - let target_len = self.variables.define_register_variable( + let target_len = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, results[0], @@ -489,19 +495,19 @@ impl<'block> BrilligBlock<'block> { .extract_vector(); // Update the user-facing slice length - self.brillig_context.mov_instruction(target_len, limb_count); + self.brillig_context.mov_instruction(target_len.address, limb_count.address); self.brillig_context.radix_instruction( - source, + source.address, target_vector, - radix, - limb_count, + radix.address, + limb_count.address, matches!(endianness, Endian::Big), ); } Value::Intrinsic(Intrinsic::ToBits(endianness)) => { - let source = self.convert_ssa_register_value(arguments[0], dfg); - let limb_count = self.convert_ssa_register_value(arguments[1], dfg); + let source = self.convert_ssa_single_addr_value(arguments[0], dfg); + let limb_count = self.convert_ssa_single_addr_value(arguments[1], dfg); let results = dfg.instruction_results(instruction_id); @@ -511,7 +517,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ); - let target_len = target_len_variable.extract_register(); + let target_len = target_len_variable.extract_single_addr(); let target_vector = match self.variables.define_variable( self.function_context, @@ -523,7 +529,7 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.array_to_vector(&array) } BrilligVariable::BrilligVector(vector) => vector, - BrilligVariable::Simple(..) => unreachable!("ICE: ToBits on non-array"), + BrilligVariable::SingleAddr(..) => unreachable!("ICE: ToBits on non-array"), }; let radix = self @@ -531,13 +537,13 @@ impl<'block> BrilligBlock<'block> { .make_constant(2_usize.into(), FieldElement::max_num_bits()); // Update the user-facing slice length - self.brillig_context.mov_instruction(target_len, limb_count); + self.brillig_context.mov_instruction(target_len.address, limb_count.address); self.brillig_context.radix_instruction( - source, + source.address, target_vector, radix, - limb_count, + limb_count.address, matches!(endianness, Endian::Big), ); @@ -549,29 +555,29 @@ impl<'block> BrilligBlock<'block> { }, Instruction::Truncate { value, bit_size, .. } => { let result_ids = dfg.instruction_results(instruction_id); - let destination_register = self.variables.define_register_variable( + let destination_register = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, result_ids[0], dfg, ); - let source_register = self.convert_ssa_register_value(*value, dfg); + let source_register = self.convert_ssa_single_addr_value(*value, dfg); self.brillig_context.truncate_instruction( destination_register, source_register, *bit_size, ); } - Instruction::Cast(value, typ) => { + Instruction::Cast(value, _) => { let result_ids = dfg.instruction_results(instruction_id); - let destination_register = self.variables.define_register_variable( + let destination_variable = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, result_ids[0], dfg, ); - let source_register = self.convert_ssa_register_value(*value, dfg); - self.convert_cast(destination_register, source_register, typ); + let source_variable = self.convert_ssa_single_addr_value(*value, dfg); + self.convert_cast(destination_variable, source_variable); } Instruction::ArrayGet { array, index } => { let result_ids = dfg.instruction_results(instruction_id); @@ -589,17 +595,17 @@ impl<'block> BrilligBlock<'block> { _ => unreachable!("ICE: array get on non-array"), }; - let index_register = self.convert_ssa_register_value(*index, dfg); - self.validate_array_index(array_variable, index_register); + let index_variable = self.convert_ssa_single_addr_value(*index, dfg); + self.validate_array_index(array_variable, index_variable); self.retrieve_variable_from_array( array_pointer, - index_register, + index_variable.address, destination_variable, ); } Instruction::ArraySet { array, index, value, .. } => { let source_variable = self.convert_ssa_value(*array, dfg); - let index_register = self.convert_ssa_register_value(*index, dfg); + let index_register = self.convert_ssa_single_addr_value(*index, dfg); let value_variable = self.convert_ssa_value(*value, dfg); let result_ids = dfg.instruction_results(instruction_id); @@ -614,15 +620,18 @@ impl<'block> BrilligBlock<'block> { self.convert_ssa_array_set( source_variable, destination_variable, - index_register, + index_register.address, value_variable, ); } Instruction::RangeCheck { value, max_bit_size, assert_message } => { - let value = self.convert_ssa_register_value(*value, dfg); + let value = self.convert_ssa_single_addr_value(*value, dfg); // Cast original value to field - let left = self.brillig_context.allocate_register(); - self.convert_cast(left, value, &Type::field()); + let left = SingleAddrVariable { + address: self.brillig_context.allocate_register(), + bit_size: FieldElement::max_num_bits(), + }; + self.convert_cast(left, value); // Create a field constant with the max let max = BigUint::from(2_u128).pow(*max_bit_size) - BigUint::from(1_u128); @@ -637,11 +646,16 @@ impl<'block> BrilligBlock<'block> { bit_size: FieldElement::max_num_bits(), }; let condition = self.brillig_context.allocate_register(); - self.brillig_context.binary_instruction(left, right, condition, brillig_binary_op); + self.brillig_context.binary_instruction( + left.address, + right, + condition, + brillig_binary_op, + ); self.brillig_context.constrain_instruction(condition, assert_message.clone()); self.brillig_context.deallocate_register(condition); - self.brillig_context.deallocate_register(left); + self.brillig_context.deallocate_register(left.address); self.brillig_context.deallocate_register(right); } Instruction::IncrementRc { value } => { @@ -730,7 +744,7 @@ impl<'block> BrilligBlock<'block> { fn validate_array_index( &mut self, array_variable: BrilligVariable, - index_register: MemoryAddress, + index_register: SingleAddrVariable, ) { let (size_as_register, should_deallocate_size) = match array_variable { BrilligVariable::BrilligArray(BrilligArray { size, .. }) => { @@ -743,7 +757,7 @@ impl<'block> BrilligBlock<'block> { let condition = self.brillig_context.allocate_register(); self.brillig_context.memory_op( - index_register, + index_register.address, size_as_register, condition, BinaryIntOp::LessThan, @@ -765,8 +779,12 @@ impl<'block> BrilligBlock<'block> { destination_variable: BrilligVariable, ) { match destination_variable { - BrilligVariable::Simple(destination_register) => { - self.brillig_context.array_get(array_pointer, index_register, destination_register); + BrilligVariable::SingleAddr(destination_register) => { + self.brillig_context.array_get( + array_pointer, + index_register, + destination_register.address, + ); } BrilligVariable::BrilligArray(..) | BrilligVariable::BrilligVector(..) => { let reference = self.brillig_context.allocate_register(); @@ -868,8 +886,8 @@ impl<'block> BrilligBlock<'block> { value_variable: BrilligVariable, ) { match value_variable { - BrilligVariable::Simple(value_register) => { - ctx.array_set(destination_pointer, index_register, value_register); + BrilligVariable::SingleAddr(value_variable) => { + ctx.array_set(destination_pointer, index_register, value_variable.address); } BrilligVariable::BrilligArray(_) => { let reference: MemoryAddress = ctx.allocate_register(); @@ -924,7 +942,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -940,7 +958,7 @@ impl<'block> BrilligBlock<'block> { self.convert_ssa_value(*arg, dfg) }); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Add); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Add); self.slice_push_back_operation(target_vector, source_vector, &item_values); } @@ -951,7 +969,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -966,7 +984,7 @@ impl<'block> BrilligBlock<'block> { self.convert_ssa_value(*arg, dfg) }); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Add); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Add); self.slice_push_front_operation(target_vector, source_vector, &item_values); } @@ -977,7 +995,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -999,7 +1017,7 @@ impl<'block> BrilligBlock<'block> { ) }); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Sub); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Sub); self.slice_pop_back_operation(target_vector, source_vector, &pop_variables); } @@ -1010,7 +1028,7 @@ impl<'block> BrilligBlock<'block> { results[element_size], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -1031,7 +1049,7 @@ impl<'block> BrilligBlock<'block> { ); let target_vector = target_variable.extract_vector(); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Sub); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Sub); self.slice_pop_front_operation(target_vector, source_vector, &pop_variables); } @@ -1042,7 +1060,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -1058,13 +1076,13 @@ impl<'block> BrilligBlock<'block> { // Remove if indexing in insert is changed to flattened indexing // https://github.com/noir-lang/noir/issues/1889#issuecomment-1668048587 - let user_index = self.convert_ssa_register_value(arguments[2], dfg); + let user_index = self.convert_ssa_single_addr_value(arguments[2], dfg); let converted_index = self.brillig_context.make_usize_constant(element_size.into()); self.brillig_context.memory_op( converted_index, - user_index, + user_index.address, converted_index, BinaryIntOp::Mul, ); @@ -1073,7 +1091,7 @@ impl<'block> BrilligBlock<'block> { self.convert_ssa_value(*arg, dfg) }); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Add); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Add); self.slice_insert_operation(target_vector, source_vector, converted_index, &items); self.brillig_context.deallocate_register(converted_index); @@ -1085,7 +1103,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -1101,12 +1119,12 @@ impl<'block> BrilligBlock<'block> { // Remove if indexing in remove is changed to flattened indexing // https://github.com/noir-lang/noir/issues/1889#issuecomment-1668048587 - let user_index = self.convert_ssa_register_value(arguments[2], dfg); + let user_index = self.convert_ssa_single_addr_value(arguments[2], dfg); let converted_index = self.brillig_context.make_usize_constant(element_size.into()); self.brillig_context.memory_op( converted_index, - user_index, + user_index.address, converted_index, BinaryIntOp::Mul, ); @@ -1120,7 +1138,7 @@ impl<'block> BrilligBlock<'block> { ) }); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Sub); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Sub); self.slice_remove_operation( target_vector, @@ -1152,18 +1170,18 @@ impl<'block> BrilligBlock<'block> { binary_op: BinaryIntOp, ) { let source_len_variable = self.convert_ssa_value(source_value, dfg); - let source_len = source_len_variable.extract_register(); + let source_len = source_len_variable.extract_single_addr(); - self.brillig_context.usize_op(source_len, target_len, binary_op, 1); + self.brillig_context.usize_op(source_len.address, target_len, binary_op, 1); } /// Converts an SSA cast to a sequence of Brillig opcodes. /// Casting is only necessary when shrinking the bit size of a numeric value. - fn convert_cast(&mut self, destination: MemoryAddress, source: MemoryAddress, typ: &Type) { + fn convert_cast(&mut self, destination: SingleAddrVariable, source: SingleAddrVariable) { // We assume that `source` is a valid `target_type` as it's expected that a truncate instruction was emitted // to ensure this is the case. - self.brillig_context.cast_instruction(destination, source, get_bit_size_from_ssa_type(typ)); + self.brillig_context.cast_instruction(destination, source); } /// Converts the Binary instruction into a sequence of Brillig opcodes. @@ -1171,18 +1189,23 @@ impl<'block> BrilligBlock<'block> { &mut self, binary: &Binary, dfg: &DataFlowGraph, - result_register: MemoryAddress, + result_variable: SingleAddrVariable, ) { let binary_type = type_of_binary_operation(dfg[binary.lhs].get_type(), dfg[binary.rhs].get_type()); - let left = self.convert_ssa_register_value(binary.lhs, dfg); - let right = self.convert_ssa_register_value(binary.rhs, dfg); + let left = self.convert_ssa_single_addr_value(binary.lhs, dfg); + let right = self.convert_ssa_single_addr_value(binary.rhs, dfg); let brillig_binary_op = convert_ssa_binary_op_to_brillig_binary_op(binary.operator, &binary_type); - self.brillig_context.binary_instruction(left, right, result_register, brillig_binary_op); + self.brillig_context.binary_instruction( + left.address, + right.address, + result_variable.address, + brillig_binary_op, + ); } /// Converts an SSA `ValueId` into a `RegisterOrMemory`. Initializes if necessary. @@ -1204,10 +1227,10 @@ impl<'block> BrilligBlock<'block> { } else { let new_variable = self.variables.allocate_constant(self.brillig_context, value_id, dfg); - let register_index = new_variable.extract_register(); + let register_index = new_variable.extract_single_addr(); self.brillig_context.const_instruction( - register_index, + register_index.address, (*constant).into(), get_bit_size_from_ssa_type(typ), ); @@ -1273,10 +1296,10 @@ impl<'block> BrilligBlock<'block> { // value. let new_variable = self.variables.allocate_constant(self.brillig_context, value_id, dfg); - let register_index = new_variable.extract_register(); + let register_index = new_variable.extract_single_addr(); self.brillig_context.const_instruction( - register_index, + register_index.address, value_id.to_usize().into(), 32, ); @@ -1289,13 +1312,13 @@ impl<'block> BrilligBlock<'block> { } /// Converts an SSA `ValueId` into a `MemoryAddress`. Initializes if necessary. - fn convert_ssa_register_value( + fn convert_ssa_single_addr_value( &mut self, value_id: ValueId, dfg: &DataFlowGraph, - ) -> MemoryAddress { + ) -> SingleAddrVariable { let variable = self.convert_ssa_value(value_id, dfg); - variable.extract_register() + variable.extract_single_addr() } fn allocate_external_call_result( diff --git a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs index b4c96de1969..f463bd4de4d 100644 --- a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs +++ b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs @@ -1,10 +1,9 @@ -use acvm::brillig_vm::brillig::MemoryAddress; use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; use crate::{ brillig::brillig_ir::{ - brillig_variable::{BrilligArray, BrilligVariable, BrilligVector}, - BrilligContext, + brillig_variable::{BrilligArray, BrilligVariable, BrilligVector, SingleAddrVariable}, + BrilligContext, BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, }, ssa::ir::{ basic_block::BasicBlockId, @@ -71,15 +70,15 @@ impl BlockVariables { } /// Defines a variable that fits in a single register and returns the allocated register. - pub(crate) fn define_register_variable( + pub(crate) fn define_single_addr_variable( &mut self, function_context: &mut FunctionContext, brillig_context: &mut BrilligContext, value: ValueId, dfg: &DataFlowGraph, - ) -> MemoryAddress { + ) -> SingleAddrVariable { let variable = self.define_variable(function_context, brillig_context, value, dfg); - variable.extract_register() + variable.extract_single_addr() } /// Removes a variable so it's not used anymore within this block. @@ -190,12 +189,22 @@ pub(crate) fn allocate_value( let typ = dfg.type_of_value(value_id); match typ { - Type::Numeric(_) | Type::Reference(_) | Type::Function => { + Type::Numeric(numeric_type) => BrilligVariable::SingleAddr(SingleAddrVariable { + address: brillig_context.allocate_register(), + bit_size: numeric_type.bit_size(), + }), + Type::Reference(_) => BrilligVariable::SingleAddr(SingleAddrVariable { + address: brillig_context.allocate_register(), + bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + }), + Type::Function => { // NB. function references are converted to a constant when // translating from SSA to Brillig (to allow for debugger // instrumentation to work properly) - let register = brillig_context.allocate_register(); - BrilligVariable::Simple(register) + BrilligVariable::SingleAddr(SingleAddrVariable { + address: brillig_context.allocate_register(), + bit_size: 32, + }) } Type::Array(item_typ, elem_count) => { let pointer_register = brillig_context.allocate_register(); diff --git a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs index e96a756a9ee..b5da8296ba5 100644 --- a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs +++ b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs @@ -74,7 +74,7 @@ impl FunctionContext { fn ssa_type_to_parameter(typ: &Type) -> BrilligParameter { match typ { Type::Numeric(_) | Type::Reference(_) => { - BrilligParameter::Simple(get_bit_size_from_ssa_type(typ)) + BrilligParameter::SingleAddr(get_bit_size_from_ssa_type(typ)) } Type::Array(item_type, size) => BrilligParameter::Array( vecmap(item_type.iter(), |item_typ| { diff --git a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs index 933396be0cb..3fc0e981165 100644 --- a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs +++ b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs @@ -334,7 +334,7 @@ mod tests { use crate::brillig::brillig_gen::brillig_fn::FunctionContext; use crate::brillig::brillig_ir::artifact::BrilligParameter; use crate::brillig::brillig_ir::brillig_variable::{ - BrilligArray, BrilligVariable, BrilligVector, + BrilligArray, BrilligVariable, BrilligVector, SingleAddrVariable, }; use crate::brillig::brillig_ir::tests::{ create_and_run_vm, create_context, create_entry_point_bytecode, @@ -379,13 +379,13 @@ mod tests { ) { let arguments = vec![ BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len(), ), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), ]; let returns = vec![BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len() + 1, )]; @@ -397,7 +397,10 @@ mod tests { size: array.len(), rc: context.allocate_register(), }; - let item_to_insert = context.allocate_register(); + let item_to_insert = SingleAddrVariable { + address: context.allocate_register(), + bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + }; // Cast the source array to a vector let source_vector = context.array_to_vector(&array_variable); @@ -415,13 +418,13 @@ mod tests { block.slice_push_back_operation( target_vector, source_vector, - &[BrilligVariable::Simple(item_to_insert)], + &[BrilligVariable::SingleAddr(item_to_insert)], ); } else { block.slice_push_front_operation( target_vector, source_vector, - &[BrilligVariable::Simple(item_to_insert)], + &[BrilligVariable::SingleAddr(item_to_insert)], ); } @@ -472,15 +475,15 @@ mod tests { expected_return_item: Value, ) { let arguments = vec![BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len(), )]; let returns = vec![ BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len() - 1, ), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), ]; let (_, mut function_context, mut context) = create_test_environment(); @@ -501,7 +504,10 @@ mod tests { size: context.allocate_register(), rc: context.allocate_register(), }; - let removed_item = context.allocate_register(); + let removed_item = SingleAddrVariable { + address: context.allocate_register(), + bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + }; let mut block = create_brillig_block(&mut function_context, &mut context); @@ -509,17 +515,21 @@ mod tests { block.slice_pop_back_operation( target_vector, source_vector, - &[BrilligVariable::Simple(removed_item)], + &[BrilligVariable::SingleAddr(removed_item)], ); } else { block.slice_pop_front_operation( target_vector, source_vector, - &[BrilligVariable::Simple(removed_item)], + &[BrilligVariable::SingleAddr(removed_item)], ); } - context.return_instruction(&[target_vector.pointer, target_vector.rc, removed_item]); + context.return_instruction(&[ + target_vector.pointer, + target_vector.rc, + removed_item.address, + ]); let bytecode = create_entry_point_bytecode(context, arguments, returns).byte_code; let expected_return: Vec<_> = @@ -559,14 +569,14 @@ mod tests { ) { let arguments = vec![ BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len(), ), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), ]; let returns = vec![BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len() + 1, )]; @@ -578,7 +588,10 @@ mod tests { size: array.len(), rc: context.allocate_register(), }; - let item_to_insert = context.allocate_register(); + let item_to_insert = SingleAddrVariable { + address: context.allocate_register(), + bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + }; let index_to_insert = context.allocate_register(); // Cast the source array to a vector @@ -597,7 +610,7 @@ mod tests { target_vector, source_vector, index_to_insert, - &[BrilligVariable::Simple(item_to_insert)], + &[BrilligVariable::SingleAddr(item_to_insert)], ); context.return_instruction(&[target_vector.pointer, target_vector.rc]); @@ -676,17 +689,17 @@ mod tests { ) { let arguments = vec![ BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len(), ), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), ]; let returns = vec![ BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len() - 1, ), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), ]; let (_, mut function_context, mut context) = create_test_environment(); @@ -708,7 +721,10 @@ mod tests { size: context.allocate_register(), rc: context.allocate_register(), }; - let removed_item = context.allocate_register(); + let removed_item = SingleAddrVariable { + address: context.allocate_register(), + bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + }; let mut block = create_brillig_block(&mut function_context, &mut context); @@ -716,10 +732,14 @@ mod tests { target_vector, source_vector, index_to_insert, - &[BrilligVariable::Simple(removed_item)], + &[BrilligVariable::SingleAddr(removed_item)], ); - context.return_instruction(&[target_vector.pointer, target_vector.size, removed_item]); + context.return_instruction(&[ + target_vector.pointer, + target_vector.size, + removed_item.address, + ]); let calldata: Vec<_> = array.into_iter().chain(vec![index]).collect(); diff --git a/noir/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/noir/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index 8bbde88c89e..90608974f98 100644 --- a/noir/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/noir/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -15,7 +15,7 @@ use crate::ssa::ir::dfg::CallStack; use self::{ artifact::{BrilligArtifact, UnresolvedJumpLocation}, - brillig_variable::{BrilligArray, BrilligVariable, BrilligVector}, + brillig_variable::{BrilligArray, BrilligVariable, BrilligVector, SingleAddrVariable}, registers::BrilligRegistersContext, }; use acvm::{ @@ -27,6 +27,7 @@ use acvm::{ FieldElement, }; use debug_show::DebugShow; +use num_bigint::BigUint; /// Integer arithmetic in Brillig is limited to 127 bit /// integers. @@ -189,7 +190,7 @@ impl BrilligContext { self.deallocate_register(size_register); } - pub(crate) fn allocate_simple_reference_instruction( + pub(crate) fn allocate_single_addr_reference_instruction( &mut self, pointer_register: MemoryAddress, ) { @@ -295,18 +296,21 @@ impl BrilligContext { // Loop body // Check if iterator < iteration_count - let iterator_less_than_iterations = self.allocate_register(); + let iterator_less_than_iterations = + SingleAddrVariable { address: self.allocate_register(), bit_size: 1 }; + self.memory_op( iterator_register, iteration_count, - iterator_less_than_iterations, + iterator_less_than_iterations.address, BinaryIntOp::LessThan, ); let (exit_loop_section, exit_loop_label) = self.reserve_next_section_label(); - self.not_instruction(iterator_less_than_iterations, 1, iterator_less_than_iterations); - self.jump_if_instruction(iterator_less_than_iterations, exit_loop_label); + self.not_instruction(iterator_less_than_iterations, iterator_less_than_iterations); + + self.jump_if_instruction(iterator_less_than_iterations.address, exit_loop_label); // Call the on iteration function on_iteration(self, iterator_register); @@ -320,7 +324,7 @@ impl BrilligContext { self.enter_section(exit_loop_section); // Deallocate our temporary registers - self.deallocate_register(iterator_less_than_iterations); + self.deallocate_register(iterator_less_than_iterations.address); self.deallocate_register(iterator_register); } @@ -507,12 +511,15 @@ impl BrilligContext { /// Cast truncates the value to the given bit size and converts the type of the value in memory to that bit size. pub(crate) fn cast_instruction( &mut self, - destination: MemoryAddress, - source: MemoryAddress, - bit_size: u32, + destination: SingleAddrVariable, + source: SingleAddrVariable, ) { - self.debug_show.cast_instruction(destination, source, bit_size); - self.push_opcode(BrilligOpcode::Cast { destination, source, bit_size }); + self.debug_show.cast_instruction(destination.address, source.address, destination.bit_size); + self.push_opcode(BrilligOpcode::Cast { + destination: destination.address, + source: source.address, + bit_size: destination.bit_size, + }); } /// Processes a binary instruction according `operation`. @@ -564,21 +571,20 @@ impl BrilligContext { /// in Brillig. pub(crate) fn not_instruction( &mut self, - input: MemoryAddress, - bit_size: u32, - result: MemoryAddress, + input: SingleAddrVariable, + result: SingleAddrVariable, ) { - self.debug_show.not_instruction(input, bit_size, result); + self.debug_show.not_instruction(input.address, input.bit_size, result.address); // Compile !x as ((-1) - x) - let u_max = FieldElement::from(2_i128).pow(&FieldElement::from(bit_size as i128)) + let u_max = FieldElement::from(2_i128).pow(&FieldElement::from(input.bit_size as i128)) - FieldElement::one(); - let max = self.make_constant(Value::from(u_max), bit_size); + let max = self.make_constant(Value::from(u_max), input.bit_size); let opcode = BrilligOpcode::BinaryIntOp { - destination: result, + destination: result.address, op: BinaryIntOp::Sub, - bit_size, + bit_size: input.bit_size, lhs: max, - rhs: input, + rhs: input.address, }; self.push_opcode(opcode); self.deallocate_register(max); @@ -626,8 +632,8 @@ impl BrilligContext { variable_pointer: MemoryAddress, ) { match destination { - BrilligVariable::Simple(register_index) => { - self.load_instruction(register_index, variable_pointer); + BrilligVariable::SingleAddr(single_addr) => { + self.load_instruction(single_addr.address, variable_pointer); } BrilligVariable::BrilligArray(BrilligArray { pointer, size: _, rc }) => { self.load_instruction(pointer, variable_pointer); @@ -676,8 +682,8 @@ impl BrilligContext { source: BrilligVariable, ) { match source { - BrilligVariable::Simple(register_index) => { - self.store_instruction(variable_pointer, register_index); + BrilligVariable::SingleAddr(single_addr) => { + self.store_instruction(variable_pointer, single_addr.address); } BrilligVariable::BrilligArray(BrilligArray { pointer, size: _, rc }) => { self.store_instruction(variable_pointer, pointer); @@ -717,31 +723,36 @@ impl BrilligContext { /// For Brillig, all integer operations will overflow as its cheap. pub(crate) fn truncate_instruction( &mut self, - destination_of_truncated_value: MemoryAddress, - value_to_truncate: MemoryAddress, + destination_of_truncated_value: SingleAddrVariable, + value_to_truncate: SingleAddrVariable, bit_size: u32, ) { self.debug_show.truncate_instruction( - destination_of_truncated_value, - value_to_truncate, + destination_of_truncated_value.address, + value_to_truncate.address, bit_size, ); assert!( - bit_size <= BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, - "tried to truncate to a bit size greater than allowed {bit_size}" + bit_size <= value_to_truncate.bit_size, + "tried to truncate to a bit size {} greater than the variable size {}", + bit_size, + value_to_truncate.bit_size + ); + + let mask = BigUint::from(2_u32).pow(bit_size) - BigUint::from(1_u32); + let mask_constant = self.make_constant( + FieldElement::from_be_bytes_reduce(&mask.to_bytes_be()).into(), + value_to_truncate.bit_size, ); - // The brillig VM performs all arithmetic operations modulo 2**bit_size - // So to truncate any value to a target bit size we can just issue a no-op arithmetic operation - // With bit size equal to target_bit_size - let zero_register = self.make_constant(Value::from(FieldElement::zero()), bit_size); self.binary_instruction( - value_to_truncate, - zero_register, - destination_of_truncated_value, - BrilligBinaryOp::Integer { op: BinaryIntOp::Add, bit_size }, + value_to_truncate.address, + mask_constant, + destination_of_truncated_value.address, + BrilligBinaryOp::Integer { op: BinaryIntOp::And, bit_size: value_to_truncate.bit_size }, ); - self.deallocate_register(zero_register); + + self.deallocate_register(mask_constant); } /// Emits a stop instruction @@ -1140,6 +1151,14 @@ pub(crate) mod tests { ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { panic!("Path not trodden by this test") } + + fn poseidon2_permutation( + &self, + _inputs: &[FieldElement], + _len: u32, + ) -> Result, BlackBoxResolutionError> { + Ok(vec![0_u128.into(), 1_u128.into(), 2_u128.into(), 3_u128.into()]) + } } pub(crate) fn create_context() -> BrilligContext { diff --git a/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs b/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs index 4ef8c9d1dfc..d10dcf13d9f 100644 --- a/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs +++ b/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs @@ -6,8 +6,8 @@ use crate::ssa::ir::dfg::CallStack; /// Represents a parameter or a return value of a function. #[derive(Debug, Clone)] pub(crate) enum BrilligParameter { - /// A simple parameter or return value. Holds the bit size of the parameter. - Simple(u32), + /// A single address parameter or return value. Holds the bit size of the parameter. + SingleAddr(u32), /// An array parameter or return value. Holds the type of an array item and its size. Array(Vec, usize), /// A slice parameter or return value. Holds the type of a slice item. diff --git a/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs b/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs index 856fb709fa9..48ad3c5bae4 100644 --- a/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs +++ b/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs @@ -5,6 +5,12 @@ use serde::{Deserialize, Serialize}; use crate::ssa::ir::types::Type; +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] +pub(crate) struct SingleAddrVariable { + pub(crate) address: MemoryAddress, + pub(crate) bit_size: u32, +} + /// The representation of a noir array in the Brillig IR #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] pub(crate) struct BrilligArray { @@ -52,15 +58,15 @@ impl BrilligVector { /// The representation of a noir value in the Brillig IR #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] pub(crate) enum BrilligVariable { - Simple(MemoryAddress), + SingleAddr(SingleAddrVariable), BrilligArray(BrilligArray), BrilligVector(BrilligVector), } impl BrilligVariable { - pub(crate) fn extract_register(self) -> MemoryAddress { + pub(crate) fn extract_single_addr(self) -> SingleAddrVariable { match self { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(single_addr) => single_addr, _ => unreachable!("ICE: Expected register, got {self:?}"), } } @@ -81,15 +87,17 @@ impl BrilligVariable { pub(crate) fn extract_registers(self) -> Vec { match self { - BrilligVariable::Simple(register_index) => vec![register_index], + BrilligVariable::SingleAddr(single_addr) => vec![single_addr.address], BrilligVariable::BrilligArray(array) => array.extract_registers(), BrilligVariable::BrilligVector(vector) => vector.extract_registers(), } } - pub(crate) fn to_register_or_memory(self) -> ValueOrArray { + pub(crate) fn to_value_or_array(self) -> ValueOrArray { match self { - BrilligVariable::Simple(register_index) => ValueOrArray::MemoryAddress(register_index), + BrilligVariable::SingleAddr(single_addr) => { + ValueOrArray::MemoryAddress(single_addr.address) + } BrilligVariable::BrilligArray(array) => ValueOrArray::HeapArray(array.to_heap_array()), BrilligVariable::BrilligVector(vector) => { ValueOrArray::HeapVector(vector.to_heap_vector()) diff --git a/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs b/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs index 0eb4c8c31bd..9d186f9bc60 100644 --- a/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs +++ b/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs @@ -1,6 +1,6 @@ use super::{ artifact::{BrilligArtifact, BrilligParameter}, - brillig_variable::{BrilligArray, BrilligVariable}, + brillig_variable::{BrilligArray, BrilligVariable, SingleAddrVariable}, debug_show::DebugShow, registers::BrilligRegistersContext, BrilligContext, ReservedRegisters, BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, @@ -63,10 +63,13 @@ impl BrilligContext { let mut argument_variables: Vec<_> = arguments .iter() .map(|argument| match argument { - BrilligParameter::Simple(_) => { - let simple_address = self.allocate_register(); - let var = BrilligVariable::Simple(simple_address); - self.mov_instruction(simple_address, MemoryAddress(current_calldata_pointer)); + BrilligParameter::SingleAddr(bit_size) => { + let single_address = self.allocate_register(); + let var = BrilligVariable::SingleAddr(SingleAddrVariable { + address: single_address, + bit_size: *bit_size, + }); + self.mov_instruction(single_address, MemoryAddress(current_calldata_pointer)); current_calldata_pointer += 1; var } @@ -116,7 +119,7 @@ impl BrilligContext { fn flat_bit_sizes(param: &BrilligParameter) -> Box + '_> { match param { - BrilligParameter::Simple(bit_size) => Box::new(std::iter::once(*bit_size)), + BrilligParameter::SingleAddr(bit_size) => Box::new(std::iter::once(*bit_size)), BrilligParameter::Array(item_types, item_count) => Box::new( (0..*item_count).flat_map(move |_| item_types.iter().flat_map(flat_bit_sizes)), ), @@ -139,7 +142,7 @@ impl BrilligContext { /// Computes the size of a parameter if it was flattened fn flattened_size(param: &BrilligParameter) -> usize { match param { - BrilligParameter::Simple(_) => 1, + BrilligParameter::SingleAddr(_) => 1, BrilligParameter::Array(item_types, item_count) => { let item_size: usize = item_types.iter().map(BrilligContext::flattened_size).sum(); item_count * item_size @@ -157,7 +160,7 @@ impl BrilligContext { /// Computes the size of a parameter if it was flattened fn has_nested_arrays(tuple: &[BrilligParameter]) -> bool { - tuple.iter().any(|param| !matches!(param, BrilligParameter::Simple(_))) + tuple.iter().any(|param| !matches!(param, BrilligParameter::SingleAddr(_))) } /// Deflatten an array by recursively allocating nested arrays and copying the plain values. @@ -194,7 +197,7 @@ impl BrilligContext { self.make_usize_constant((target_item_base_index + subitem_index).into()); match subitem { - BrilligParameter::Simple(_) => { + BrilligParameter::SingleAddr(_) => { self.array_get( flattened_array_pointer, source_index, @@ -279,7 +282,12 @@ impl BrilligContext { let returned_variables: Vec<_> = return_parameters .iter() .map(|return_parameter| match return_parameter { - BrilligParameter::Simple(_) => BrilligVariable::Simple(self.allocate_register()), + BrilligParameter::SingleAddr(bit_size) => { + BrilligVariable::SingleAddr(SingleAddrVariable { + address: self.allocate_register(), + bit_size: *bit_size, + }) + } BrilligParameter::Array(item_types, item_count) => { BrilligVariable::BrilligArray(BrilligArray { pointer: self.allocate_register(), @@ -301,10 +309,10 @@ impl BrilligContext { for (return_param, returned_variable) in return_parameters.iter().zip(&returned_variables) { match return_param { - BrilligParameter::Simple(_) => { + BrilligParameter::SingleAddr(_) => { self.mov_instruction( MemoryAddress(return_data_index), - returned_variable.extract_register(), + returned_variable.extract_single_addr().address, ); return_data_index += 1; } @@ -359,7 +367,7 @@ impl BrilligContext { self.make_usize_constant((target_item_base_index + target_offset).into()); match subitem { - BrilligParameter::Simple(_) => { + BrilligParameter::SingleAddr(_) => { self.array_get( deflattened_array_pointer, source_index, @@ -468,12 +476,12 @@ mod tests { ]; let arguments = vec![BrilligParameter::Array( vec![ - BrilligParameter::Array(vec![BrilligParameter::Simple(8)], 2), - BrilligParameter::Simple(8), + BrilligParameter::Array(vec![BrilligParameter::SingleAddr(8)], 2), + BrilligParameter::SingleAddr(8), ], 2, )]; - let returns = vec![BrilligParameter::Simple(8)]; + let returns = vec![BrilligParameter::SingleAddr(8)]; let mut context = create_context(); @@ -506,8 +514,8 @@ mod tests { ]; let array_param = BrilligParameter::Array( vec![ - BrilligParameter::Array(vec![BrilligParameter::Simple(8)], 2), - BrilligParameter::Simple(8), + BrilligParameter::Array(vec![BrilligParameter::SingleAddr(8)], 2), + BrilligParameter::SingleAddr(8), ], 2, ); diff --git a/noir/compiler/noirc_evaluator/src/errors.rs b/noir/compiler/noirc_evaluator/src/errors.rs index ed94adac28e..40f4336e0b5 100644 --- a/noir/compiler/noirc_evaluator/src/errors.rs +++ b/noir/compiler/noirc_evaluator/src/errors.rs @@ -158,7 +158,7 @@ impl RuntimeError { RuntimeError::InternalError(cause) => { Diagnostic::simple_error( "Internal Consistency Evaluators Errors: \n - This is likely a bug. Consider Opening an issue at https://github.com/noir-lang/noir/issues".to_owned(), + This is likely a bug. Consider opening an issue at https://github.com/noir-lang/noir/issues".to_owned(), cause.to_string(), noirc_errors::Span::inclusive(0, 0) ) diff --git a/noir/compiler/noirc_evaluator/src/ssa.rs b/noir/compiler/noirc_evaluator/src/ssa.rs index d19c4467235..0bb81efe977 100644 --- a/noir/compiler/noirc_evaluator/src/ssa.rs +++ b/noir/compiler/noirc_evaluator/src/ssa.rs @@ -54,11 +54,6 @@ pub(crate) fn optimize_into_acir( .try_run_pass(Ssa::evaluate_assert_constant, "After Assert Constant:")? .try_run_pass(Ssa::unroll_loops, "After Unrolling:")? .run_pass(Ssa::simplify_cfg, "After Simplifying:") - // Run mem2reg before flattening to handle any promotion - // of values that can be accessed after loop unrolling. - // If there are slice mergers uncovered by loop unrolling - // and this pass is missed, slice merging will fail inside of flattening. - .run_pass(Ssa::mem2reg, "After Mem2Reg:") .run_pass(Ssa::flatten_cfg, "After Flattening:") .run_pass(Ssa::remove_bit_shifts, "After Removing Bit Shifts:") // Run mem2reg once more with the flattened CFG to catch any remaining loads/stores diff --git a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index 2360d053887..fb11bae556c 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -67,6 +67,13 @@ impl AcirType { pub(crate) fn unsigned(bit_size: u32) -> Self { AcirType::NumericType(NumericType::Unsigned { bit_size }) } + + pub(crate) fn to_numeric_type(&self) -> NumericType { + match self { + AcirType::NumericType(numeric_type) => *numeric_type, + AcirType::Array(_, _) => unreachable!("cannot fetch a numeric type for an array type"), + } + } } impl From for AcirType { @@ -88,6 +95,12 @@ impl<'a> From<&'a SsaType> for AcirType { } } +impl From for AcirType { + fn from(value: NumericType) -> Self { + AcirType::NumericType(value) + } +} + #[derive(Debug, Default)] /// Context object which holds the relationship between /// `Variables`(AcirVar) and types such as `Expression` and `Witness` @@ -1415,13 +1428,13 @@ impl AcirContext { } Ok(values) } - AcirValue::DynamicArray(AcirDynamicArray { block_id, len, .. }) => { + AcirValue::DynamicArray(AcirDynamicArray { block_id, len, value_types, .. }) => { try_vecmap(0..len, |i| { let index_var = self.add_constant(i); Ok::<(AcirVar, AcirType), InternalError>(( self.read_from_memory(block_id, &index_var)?, - AcirType::field(), + value_types[i].into(), )) }) } diff --git a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 46be6efcadd..8d4d0668534 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -99,6 +99,18 @@ pub(crate) struct AcirDynamicArray { block_id: BlockId, /// Length of the array len: usize, + /// An ACIR dynamic array is a flat structure, so we use + /// the inner structure of an `AcirType::NumericType` directly. + /// Some usages of ACIR arrays (e.g. black box functions) require the bit size + /// of every value to be known, thus we store the types as part of the dynamic + /// array definition. + /// + /// A dynamic non-homogenous array can potentially have values of differing types. + /// Thus, we store a vector of types rather than a single type, as a dynamic non-homogenous array + /// is still represented in ACIR by a single `AcirDynamicArray` structure. + /// + /// The length of the value types vector must match the `len` field in this structure. + value_types: Vec, /// Identification for the ACIR dynamic array /// inner element type sizes array element_type_sizes: Option, @@ -150,6 +162,16 @@ impl AcirValue { AcirValue::DynamicArray(_) => unimplemented!("Cannot flatten a dynamic array"), } } + + fn flat_numeric_types(self) -> Vec { + match self { + AcirValue::Array(_) => { + self.flatten().into_iter().map(|(_, typ)| typ.to_numeric_type()).collect() + } + AcirValue::DynamicArray(AcirDynamicArray { value_types, .. }) => value_types, + _ => unreachable!("An AcirValue::Var cannot be used as an array value"), + } + } } impl Ssa { @@ -1007,9 +1029,15 @@ impl Context { } else { None }; + + let value_types = self.convert_value(array_id, dfg).flat_numeric_types(); + // Compiler sanity check + assert_eq!(value_types.len(), array_len, "ICE: The length of the flattened type array should match the length of the dynamic array"); + let result_value = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, len: array_len, + value_types, element_type_sizes, }); self.define_result(dfg, instruction, result_value); @@ -1093,7 +1121,7 @@ impl Context { &mut self, array_typ: &Type, array_id: ValueId, - array_acir_value: Option, + supplied_acir_value: Option<&AcirValue>, dfg: &DataFlowGraph, ) -> Result { let element_type_sizes = self.internal_block_id(&array_id); @@ -1119,26 +1147,23 @@ impl Context { Value::Instruction { .. } | Value::Param { .. } => { // An instruction representing the slice means it has been processed previously during ACIR gen. // Use the previously defined result of an array operation to fetch the internal type information. - let array_acir_value = if let Some(array_acir_value) = array_acir_value { - array_acir_value - } else { - self.convert_value(array_id, dfg) - }; + let array_acir_value = &self.convert_value(array_id, dfg); + let array_acir_value = supplied_acir_value.unwrap_or(array_acir_value); match array_acir_value { AcirValue::DynamicArray(AcirDynamicArray { element_type_sizes: inner_elem_type_sizes, .. }) => { if let Some(inner_elem_type_sizes) = inner_elem_type_sizes { - if self.initialized_arrays.contains(&inner_elem_type_sizes) { - let type_sizes_array_len = self.internal_mem_block_lengths.get(&inner_elem_type_sizes).copied().ok_or_else(|| + if self.initialized_arrays.contains(inner_elem_type_sizes) { + let type_sizes_array_len = *self.internal_mem_block_lengths.get(inner_elem_type_sizes).ok_or_else(|| InternalError::General { message: format!("Array {array_id}'s inner element type sizes array does not have a tracked length"), call_stack: self.acir_context.get_call_stack(), } )?; self.copy_dynamic_array( - inner_elem_type_sizes, + *inner_elem_type_sizes, element_type_sizes, type_sizes_array_len, )?; @@ -1683,15 +1708,24 @@ impl Context { Some(self.init_element_type_sizes_array( &slice_typ, slice_contents, - Some(new_slice_val), + Some(&new_slice_val), dfg, )?) } else { None }; + + let value_types = new_slice_val.flat_numeric_types(); + assert_eq!( + value_types.len(), + new_elem_size, + "ICE: Value types array must match new slice size" + ); + let result = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, len: new_elem_size, + value_types, element_type_sizes, }); Ok(vec![AcirValue::Var(new_slice_length, AcirType::field()), result]) @@ -1738,15 +1772,24 @@ impl Context { Some(self.init_element_type_sizes_array( &slice_typ, slice_contents, - Some(new_slice_val), + Some(&new_slice_val), dfg, )?) } else { None }; + + let value_types = new_slice_val.flat_numeric_types(); + assert_eq!( + value_types.len(), + new_slice_size, + "ICE: Value types array must match new slice size" + ); + let result = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, len: new_slice_size, + value_types, element_type_sizes, }); @@ -1943,15 +1986,24 @@ impl Context { Some(self.init_element_type_sizes_array( &slice_typ, slice_contents, - Some(slice), + Some(&slice), dfg, )?) } else { None }; + + let value_types = slice.flat_numeric_types(); + assert_eq!( + value_types.len(), + slice_size, + "ICE: Value types array must match new slice size" + ); + let result = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, len: slice_size, + value_types, element_type_sizes, }); @@ -2059,15 +2111,24 @@ impl Context { Some(self.init_element_type_sizes_array( &slice_typ, slice_contents, - Some(new_slice_val), + Some(&new_slice_val), dfg, )?) } else { None }; + + let value_types = new_slice_val.flat_numeric_types(); + assert_eq!( + value_types.len(), + slice_size, + "ICE: Value types array must match new slice size" + ); + let result = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, len: slice_size, + value_types, element_type_sizes, }); diff --git a/noir/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/noir/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index 9e17595a033..9d27554dcaa 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -115,6 +115,11 @@ impl FunctionBuilder { self.numeric_constant(value.into(), Type::field()) } + /// Insert a numeric constant into the current function of type Type::length_type() + pub(crate) fn length_constant(&mut self, value: impl Into) -> ValueId { + self.numeric_constant(value.into(), Type::length_type()) + } + /// Insert an array constant into the current function with the given element values. pub(crate) fn array_constant(&mut self, elements: im::Vector, typ: Type) -> ValueId { self.current_function.dfg.make_array(elements, typ) @@ -216,6 +221,11 @@ impl FunctionBuilder { operator: BinaryOp, rhs: ValueId, ) -> ValueId { + assert_eq!( + self.type_of_value(lhs), + self.type_of_value(rhs), + "ICE - Binary instruction operands must have the same type" + ); let instruction = Instruction::Binary(Binary { lhs, rhs, operator }); self.insert_instruction(instruction, None).first() } diff --git a/noir/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/noir/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 4217a3d4710..9349d58c4d9 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -77,7 +77,7 @@ pub(super) fn simplify_call( Intrinsic::ArrayLen => { if let Some(length) = dfg.try_get_array_length(arguments[0]) { let length = FieldElement::from(length as u128); - SimplifyResult::SimplifiedTo(dfg.make_constant(length, Type::field())) + SimplifyResult::SimplifiedTo(dfg.make_constant(length, Type::length_type())) } else if matches!(dfg.type_of_value(arguments[1]), Type::Slice(_)) { SimplifyResult::SimplifiedTo(arguments[0]) } else { @@ -283,7 +283,7 @@ fn update_slice_length( operator: BinaryOp, block: BasicBlockId, ) -> ValueId { - let one = dfg.make_constant(FieldElement::one(), Type::field()); + let one = dfg.make_constant(FieldElement::one(), Type::length_type()); let instruction = Instruction::Binary(Binary { lhs: slice_len, operator, rhs: one }); let call_stack = dfg.get_value_call_stack(slice_len); dfg.insert_instruction_and_results(instruction, block, None, call_stack).first() @@ -296,8 +296,8 @@ fn simplify_slice_push_back( dfg: &mut DataFlowGraph, block: BasicBlockId, ) -> SimplifyResult { - // The capacity must be an integer so that we can compare it against the slice length which is represented as a field - let capacity = dfg.make_constant((slice.len() as u128).into(), Type::unsigned(64)); + // The capacity must be an integer so that we can compare it against the slice length + let capacity = dfg.make_constant((slice.len() as u128).into(), Type::length_type()); let len_equals_capacity_instr = Instruction::Binary(Binary { lhs: arguments[0], operator: BinaryOp::Eq, rhs: capacity }); let call_stack = dfg.get_value_call_stack(arguments[0]); @@ -362,7 +362,7 @@ fn simplify_slice_pop_back( let new_slice_length = update_slice_length(arguments[0], dfg, BinaryOp::Sub, block); - let element_size = dfg.make_constant((element_count as u128).into(), Type::field()); + let element_size = dfg.make_constant((element_count as u128).into(), Type::length_type()); let flattened_len_instr = Instruction::binary(BinaryOp::Mul, arguments[0], element_size); let mut flattened_len = dfg .insert_instruction_and_results(flattened_len_instr, block, None, CallStack::new()) @@ -478,7 +478,7 @@ fn make_constant_slice( let typ = Type::Slice(Rc::new(vec![typ])); let length = FieldElement::from(result_constants.len() as u128); - (dfg.make_constant(length, Type::field()), dfg.make_array(result_constants.into(), typ)) + (dfg.make_constant(length, Type::length_type()), dfg.make_array(result_constants.into(), typ)) } /// Returns a slice (represented by a tuple (len, slice)) of constants corresponding to the limbs of the radix decomposition. diff --git a/noir/compiler/noirc_evaluator/src/ssa/ir/types.rs b/noir/compiler/noirc_evaluator/src/ssa/ir/types.rs index 8dc9e67db79..ea3f5393245 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -90,6 +90,11 @@ impl Type { Type::Numeric(NumericType::NativeField) } + /// Creates the type of an array's length. + pub(crate) fn length_type() -> Type { + Type::unsigned(64) + } + /// Returns the bit size of the provided numeric type. /// /// # Panics diff --git a/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index 845ffd15413..9c760c013a9 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -192,7 +192,7 @@ impl<'a> FunctionContext<'a> { ast::Type::Slice(elements) => { let element_types = Self::convert_type(elements).flatten(); Tree::Branch(vec![ - Tree::Leaf(f(Type::field())), + Tree::Leaf(f(Type::length_type())), Tree::Leaf(f(Type::Slice(Rc::new(element_types)))), ]) } @@ -640,13 +640,13 @@ impl<'a> FunctionContext<'a> { let result_alloc = self.builder.set_location(location).insert_allocate(Type::bool()); let true_value = self.builder.numeric_constant(1u128, Type::bool()); self.builder.insert_store(result_alloc, true_value); - let zero = self.builder.field_constant(0u128); + let zero = self.builder.length_constant(0u128); self.builder.terminate_with_jmp(loop_start, vec![zero]); // loop_start self.builder.switch_to_block(loop_start); - let i = self.builder.add_block_parameter(loop_start, Type::field()); - let array_length = self.builder.field_constant(array_length as u128); + let i = self.builder.add_block_parameter(loop_start, Type::length_type()); + let array_length = self.builder.length_constant(array_length as u128); let v0 = self.builder.insert_binary(i, BinaryOp::Lt, array_length); self.builder.terminate_with_jmpif(v0, loop_body, loop_end); @@ -658,7 +658,7 @@ impl<'a> FunctionContext<'a> { let v4 = self.builder.insert_load(result_alloc, Type::bool()); let v5 = self.builder.insert_binary(v4, BinaryOp::And, v3); self.builder.insert_store(result_alloc, v5); - let one = self.builder.field_constant(1u128); + let one = self.builder.length_constant(1u128); let v6 = self.builder.insert_binary(i, BinaryOp::Add, one); self.builder.terminate_with_jmp(loop_start, vec![v6]); diff --git a/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index ecc8bf87597..d95295ae3c9 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -196,7 +196,7 @@ impl<'a> FunctionContext<'a> { } ast::Type::Slice(_) => { let slice_length = - self.builder.field_constant(array.contents.len() as u128); + self.builder.length_constant(array.contents.len() as u128); let slice_contents = self.codegen_array_checked(elements, typ[1].clone())?; Tree::Branch(vec![slice_length.into(), slice_contents]) @@ -221,7 +221,7 @@ impl<'a> FunctionContext<'a> { // A caller needs multiple pieces of information to make use of a format string // The message string, the number of fields to be formatted, and the fields themselves let string = self.codegen_string(string); - let field_count = self.builder.field_constant(*number_of_fields as u128); + let field_count = self.builder.length_constant(*number_of_fields as u128); let fields = self.codegen_expression(fields)?; Ok(Tree::Branch(vec![string, field_count.into(), fields])) @@ -615,7 +615,7 @@ impl<'a> FunctionContext<'a> { { match intrinsic { Intrinsic::SliceInsert => { - let one = self.builder.field_constant(1u128); + let one = self.builder.length_constant(1u128); // We add one here in the case of a slice insert as a slice insert at the length of the slice // can be converted to a slice push back @@ -684,9 +684,7 @@ impl<'a> FunctionContext<'a> { &mut self, assert_message: &Option>, ) -> Result>, RuntimeError> { - let Some(assert_message_expr) = assert_message else { - return Ok(None) - }; + let Some(assert_message_expr) = assert_message else { return Ok(None) }; if let ast::Expression::Literal(ast::Literal::Str(assert_message)) = assert_message_expr.as_ref() diff --git a/noir/compiler/noirc_frontend/src/ast/expression.rs b/noir/compiler/noirc_frontend/src/ast/expression.rs index c78deaf6dbb..2a252633a29 100644 --- a/noir/compiler/noirc_frontend/src/ast/expression.rs +++ b/noir/compiler/noirc_frontend/src/ast/expression.rs @@ -236,7 +236,15 @@ impl BinaryOpKind { } pub fn is_valid_for_field_type(self) -> bool { - matches!(self, BinaryOpKind::Equal | BinaryOpKind::NotEqual) + matches!( + self, + BinaryOpKind::Add + | BinaryOpKind::Subtract + | BinaryOpKind::Multiply + | BinaryOpKind::Divide + | BinaryOpKind::Equal + | BinaryOpKind::NotEqual + ) } pub fn as_string(self) -> &'static str { @@ -280,14 +288,6 @@ impl BinaryOpKind { BinaryOpKind::Modulo => Token::Percent, } } - - pub fn is_bit_shift(&self) -> bool { - matches!(self, BinaryOpKind::ShiftRight | BinaryOpKind::ShiftLeft) - } - - pub fn is_modulo(&self) -> bool { - matches!(self, BinaryOpKind::Modulo) - } } #[derive(PartialEq, PartialOrd, Eq, Ord, Hash, Debug, Copy, Clone)] diff --git a/noir/compiler/noirc_frontend/src/debug/mod.rs b/noir/compiler/noirc_frontend/src/debug/mod.rs index 9056e821e8d..a88567fcaf9 100644 --- a/noir/compiler/noirc_frontend/src/debug/mod.rs +++ b/noir/compiler/noirc_frontend/src/debug/mod.rs @@ -96,7 +96,7 @@ impl DebugInstrumenter { self.walk_scope(&mut func.body.0, func.span); // prepend fn params: - func.body.0 = vec![set_fn_params, func.body.0.clone()].concat(); + func.body.0 = [set_fn_params, func.body.0.clone()].concat(); } // Modify a vector of statements in-place, adding instrumentation for sets and drops. @@ -130,7 +130,7 @@ impl DebugInstrumenter { let span = Span::empty(span.end()); // drop scope variables - let scope_vars = self.scope.pop().unwrap_or(HashMap::default()); + let scope_vars = self.scope.pop().unwrap_or_default(); let drop_vars_stmts = scope_vars.values().map(|var_id| build_drop_var_stmt(*var_id, span)); statements.extend(drop_vars_stmts); diff --git a/noir/compiler/noirc_frontend/src/hir/def_map/mod.rs b/noir/compiler/noirc_frontend/src/hir/def_map/mod.rs index 0372edfd031..8721bdb6c3c 100644 --- a/noir/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/noir/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -31,7 +31,7 @@ pub struct LocalModuleId(pub Index); impl LocalModuleId { pub fn dummy_id() -> LocalModuleId { - LocalModuleId(Index::from_raw_parts(std::usize::MAX, std::u64::MAX)) + LocalModuleId(Index::dummy()) } } diff --git a/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs index d4aae133b35..7f9e48353a7 100644 --- a/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -1463,8 +1463,8 @@ impl<'a> Resolver<'a> { // they're used in expressions. We must do this here since the type // checker does not check definition kinds and otherwise expects // parameters to already be typed. - if self.interner.id_type(hir_ident.id) == Type::Error { - let typ = Type::polymorphic_integer(self.interner); + if self.interner.definition_type(hir_ident.id) == Type::Error { + let typ = Type::polymorphic_integer_or_field(self.interner); self.interner.push_definition_type(hir_ident.id, typ); } } diff --git a/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs b/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs index 96a79152f69..b78f07c88f2 100644 --- a/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -65,7 +65,7 @@ impl<'interner> TypeChecker<'interner> { let elem_types = vecmap(&arr, |arg| self.check_expression(arg)); let first_elem_type = elem_types - .get(0) + .first() .cloned() .unwrap_or_else(|| self.interner.next_type_variable()); @@ -104,7 +104,7 @@ impl<'interner> TypeChecker<'interner> { Type::Array(Box::new(length), Box::new(elem_type)) } HirLiteral::Bool(_) => Type::Bool, - HirLiteral::Integer(_, _) => Type::polymorphic_integer(self.interner), + HirLiteral::Integer(_, _) => Type::polymorphic_integer_or_field(self.interner), HirLiteral::Str(string) => { let len = Type::Constant(string.len() as u64); Type::String(Box::new(len)) @@ -284,8 +284,9 @@ impl<'interner> TypeChecker<'interner> { Type::Tuple(vecmap(&elements, |elem| self.check_expression(elem))) } HirExpression::Lambda(lambda) => { - let captured_vars = - vecmap(lambda.captures, |capture| self.interner.id_type(capture.ident.id)); + let captured_vars = vecmap(lambda.captures, |capture| { + self.interner.definition_type(capture.ident.id) + }); let env_type: Type = if captured_vars.is_empty() { Type::Unit } else { Type::Tuple(captured_vars) }; @@ -308,7 +309,7 @@ impl<'interner> TypeChecker<'interner> { } }; - self.interner.push_expr_type(expr_id, typ.clone()); + self.interner.push_expr_type(*expr_id, typ.clone()); typ } @@ -459,7 +460,7 @@ impl<'interner> TypeChecker<'interner> { operator: UnaryOp::MutableReference, rhs: method_call.object, })); - self.interner.push_expr_type(&new_object, new_type); + self.interner.push_expr_type(new_object, new_type); self.interner.push_expr_location(new_object, location.span, location.file); new_object }); @@ -485,7 +486,7 @@ impl<'interner> TypeChecker<'interner> { operator: UnaryOp::Dereference { implicitly_added: true }, rhs: object, })); - self.interner.push_expr_type(&object, element.as_ref().clone()); + self.interner.push_expr_type(object, element.as_ref().clone()); self.interner.push_expr_location(object, location.span, location.file); // Recursively dereference to allow for converting &mut &mut T to T @@ -528,13 +529,15 @@ impl<'interner> TypeChecker<'interner> { let index_type = self.check_expression(&index_expr.index); let span = self.interner.expr_span(&index_expr.index); - index_type.unify(&Type::polymorphic_integer(self.interner), &mut self.errors, || { - TypeCheckError::TypeMismatch { + index_type.unify( + &Type::polymorphic_integer_or_field(self.interner), + &mut self.errors, + || TypeCheckError::TypeMismatch { expected_typ: "an integer".to_owned(), expr_typ: index_type.to_string(), expr_span: span, - } - }); + }, + ); // When writing `a[i]`, if `a : &mut ...` then automatically dereference `a` as many // times as needed to get the underlying array. @@ -682,8 +685,8 @@ impl<'interner> TypeChecker<'interner> { operator: crate::UnaryOp::Dereference { implicitly_added: true }, rhs: old_lhs, })); - this.interner.push_expr_type(&old_lhs, lhs_type); - this.interner.push_expr_type(access_lhs, element); + this.interner.push_expr_type(old_lhs, lhs_type); + this.interner.push_expr_type(*access_lhs, element); let old_location = this.interner.id_location(old_lhs); this.interner.push_expr_location(*access_lhs, span, old_location.file); @@ -806,43 +809,13 @@ impl<'interner> TypeChecker<'interner> { // Matches on TypeVariable must be first to follow any type // bindings. - (TypeVariable(int, int_kind), other) | (other, TypeVariable(int, int_kind)) => { - if let TypeBinding::Bound(binding) = &*int.borrow() { + (TypeVariable(var, _), other) | (other, TypeVariable(var, _)) => { + if let TypeBinding::Bound(binding) = &*var.borrow() { return self.comparator_operand_type_rules(other, binding, op, span); } - if !op.kind.is_valid_for_field_type() && (other.is_bindable() || other.is_field()) { - let other = other.follow_bindings(); - - self.push_delayed_type_check(Box::new(move || { - if other.is_field() || other.is_bindable() { - Err(TypeCheckError::InvalidComparisonOnField { span }) - } else { - Ok(()) - } - })); - } - - let mut bindings = TypeBindings::new(); - if other - .try_bind_to_polymorphic_int( - int, - &mut bindings, - *int_kind == TypeVariableKind::Integer, - ) - .is_ok() - || other == &Type::Error - { - Type::apply_type_bindings(bindings); - Ok((Bool, false)) - } else { - Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs_type.clone(), - actual: rhs_type.clone(), - span, - source: Source::Binary, - }) - } + self.bind_type_variables_for_infix(lhs_type, op, rhs_type, span); + Ok((Bool, false)) } (Alias(alias, args), other) | (other, Alias(alias, args)) => { let alias = alias.borrow().get_type(args); @@ -1070,6 +1043,38 @@ impl<'interner> TypeChecker<'interner> { } } + fn bind_type_variables_for_infix( + &mut self, + lhs_type: &Type, + op: &HirBinaryOp, + rhs_type: &Type, + span: Span, + ) { + self.unify(lhs_type, rhs_type, || TypeCheckError::TypeMismatchWithSource { + expected: lhs_type.clone(), + actual: rhs_type.clone(), + source: Source::Binary, + span, + }); + + // In addition to unifying both types, we also have to bind either + // the lhs or rhs to an integer type variable. This ensures if both lhs + // and rhs are type variables, that they will have the correct integer + // type variable kind instead of TypeVariableKind::Normal. + let target = if op.kind.is_valid_for_field_type() { + Type::polymorphic_integer_or_field(self.interner) + } else { + Type::polymorphic_integer(self.interner) + }; + + self.unify(lhs_type, &target, || TypeCheckError::TypeMismatchWithSource { + expected: lhs_type.clone(), + actual: rhs_type.clone(), + source: Source::Binary, + span, + }); + } + // Given a binary operator and another type. This method will produce the output type // and a boolean indicating whether to use the trait impl corresponding to the operator // or not. A value of false indicates the caller to use a primitive operation for this @@ -1092,58 +1097,15 @@ impl<'interner> TypeChecker<'interner> { // Matches on TypeVariable must be first so that we follow any type // bindings. - (TypeVariable(int, int_kind), other) | (other, TypeVariable(int, int_kind)) => { + (TypeVariable(int, _), other) | (other, TypeVariable(int, _)) => { if let TypeBinding::Bound(binding) = &*int.borrow() { return self.infix_operand_type_rules(binding, op, other, span); } - if (op.is_modulo() || op.is_bitwise()) && (other.is_bindable() || other.is_field()) - { - let other = other.follow_bindings(); - let kind = op.kind; - // This will be an error if these types later resolve to a Field, or stay - // polymorphic as the bit size will be unknown. Delay this error until the function - // finishes resolving so we can still allow cases like `let x: u8 = 1 << 2;`. - self.push_delayed_type_check(Box::new(move || { - if other.is_field() { - if kind == BinaryOpKind::Modulo { - Err(TypeCheckError::FieldModulo { span }) - } else { - Err(TypeCheckError::InvalidBitwiseOperationOnField { span }) - } - } else if other.is_bindable() { - Err(TypeCheckError::AmbiguousBitWidth { span }) - } else if kind.is_bit_shift() && other.is_signed() { - Err(TypeCheckError::TypeCannotBeUsed { - typ: other, - place: "bit shift", - span, - }) - } else { - Ok(()) - } - })); - } - let mut bindings = TypeBindings::new(); - if other - .try_bind_to_polymorphic_int( - int, - &mut bindings, - *int_kind == TypeVariableKind::Integer, - ) - .is_ok() - || other == &Type::Error - { - Type::apply_type_bindings(bindings); - Ok((other.clone(), false)) - } else { - Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs_type.clone(), - actual: rhs_type.clone(), - source: Source::Binary, - span, - }) - } + self.bind_type_variables_for_infix(lhs_type, op, rhs_type, span); + + // Both types are unified so the choice of which to return is arbitrary + Ok((other.clone(), false)) } (Alias(alias, args), other) | (other, Alias(alias, args)) => { let alias = alias.borrow().get_type(args); @@ -1168,11 +1130,12 @@ impl<'interner> TypeChecker<'interner> { } // The result of two Fields is always a witness (FieldElement, FieldElement) => { - if op.is_bitwise() { - return Err(TypeCheckError::InvalidBitwiseOperationOnField { span }); - } - if op.is_modulo() { - return Err(TypeCheckError::FieldModulo { span }); + if !op.kind.is_valid_for_field_type() { + if op.kind == BinaryOpKind::Modulo { + return Err(TypeCheckError::FieldModulo { span }); + } else { + return Err(TypeCheckError::InvalidBitwiseOperationOnField { span }); + } } Ok((FieldElement, false)) } @@ -1212,7 +1175,7 @@ impl<'interner> TypeChecker<'interner> { self.errors .push(TypeCheckError::InvalidUnaryOp { kind: rhs_type.to_string(), span }); } - let expected = Type::polymorphic_integer(self.interner); + let expected = Type::polymorphic_integer_or_field(self.interner); rhs_type.unify(&expected, &mut self.errors, || TypeCheckError::InvalidUnaryOp { kind: rhs_type.to_string(), span, diff --git a/noir/compiler/noirc_frontend/src/hir/type_check/mod.rs b/noir/compiler/noirc_frontend/src/hir/type_check/mod.rs index 8952ba83586..21d1c75a0f2 100644 --- a/noir/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/noir/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -21,10 +21,7 @@ use crate::{ use self::errors::Source; -type TypeCheckFn = Box Result<(), TypeCheckError>>; - pub struct TypeChecker<'interner> { - delayed_type_checks: Vec, interner: &'interner mut NodeInterner, errors: Vec, current_function: Option, @@ -80,15 +77,7 @@ pub fn type_check_func(interner: &mut NodeInterner, func_id: FuncId) -> Vec (noirc_e impl<'interner> TypeChecker<'interner> { fn new(interner: &'interner mut NodeInterner) -> Self { - Self { - delayed_type_checks: Vec::new(), - interner, - errors: Vec::new(), - trait_constraints: Vec::new(), - current_function: None, - } - } - - pub fn push_delayed_type_check(&mut self, f: TypeCheckFn) { - self.delayed_type_checks.push(f); + Self { interner, errors: Vec::new(), trait_constraints: Vec::new(), current_function: None } } - fn check_function_body(&mut self, body: &ExprId) -> (Type, Vec) { - let body_type = self.check_expression(body); - (body_type, std::mem::take(&mut self.delayed_type_checks)) + fn check_function_body(&mut self, body: &ExprId) -> Type { + self.check_expression(body) } pub fn check_global( @@ -198,7 +176,6 @@ impl<'interner> TypeChecker<'interner> { interner: &'interner mut NodeInterner, ) -> Vec { let mut this = Self { - delayed_type_checks: Vec::new(), interner, errors: Vec::new(), trait_constraints: Vec::new(), @@ -458,7 +435,7 @@ mod test { } fn local_module_id(&self) -> LocalModuleId { - LocalModuleId(arena::Index::from_raw_parts(0, 0)) + LocalModuleId(arena::Index::unsafe_zeroed()) } fn module_id(&self) -> ModuleId { @@ -509,7 +486,7 @@ mod test { let mut def_maps = BTreeMap::new(); let file = FileId::default(); - let mut modules = arena::Arena::new(); + let mut modules = arena::Arena::default(); let location = Location::new(Default::default(), file); modules.insert(ModuleData::new(None, location, false)); diff --git a/noir/compiler/noirc_frontend/src/hir/type_check/stmt.rs b/noir/compiler/noirc_frontend/src/hir/type_check/stmt.rs index 03d61b93e0c..358bea86922 100644 --- a/noir/compiler/noirc_frontend/src/hir/type_check/stmt.rs +++ b/noir/compiler/noirc_frontend/src/hir/type_check/stmt.rs @@ -73,13 +73,10 @@ impl<'interner> TypeChecker<'interner> { let expected_type = Type::polymorphic_integer(self.interner); - self.unify(&start_range_type, &expected_type, || { - TypeCheckError::TypeCannotBeUsed { - typ: start_range_type.clone(), - place: "for loop", - span: range_span, - } - .add_context("The range of a loop must be known at compile-time") + self.unify(&start_range_type, &expected_type, || TypeCheckError::TypeCannotBeUsed { + typ: start_range_type.clone(), + place: "for loop", + span: range_span, }); self.interner.push_definition_type(for_loop.identifier.id, start_range_type); @@ -192,7 +189,7 @@ impl<'interner> TypeChecker<'interner> { mutable = definition.mutable; } - let typ = self.interner.id_type(ident.id).instantiate(self.interner).0; + let typ = self.interner.definition_type(ident.id).instantiate(self.interner).0; typ.follow_bindings() }; @@ -235,7 +232,7 @@ impl<'interner> TypeChecker<'interner> { let expr_span = self.interner.expr_span(index); index_type.unify( - &Type::polymorphic_integer(self.interner), + &Type::polymorphic_integer_or_field(self.interner), &mut self.errors, || TypeCheckError::TypeMismatch { expected_typ: "an integer".to_owned(), diff --git a/noir/compiler/noirc_frontend/src/hir_def/expr.rs b/noir/compiler/noirc_frontend/src/hir_def/expr.rs index 75ed68af0f6..b4c590de491 100644 --- a/noir/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/noir/compiler/noirc_frontend/src/hir_def/expr.rs @@ -94,19 +94,6 @@ impl HirBinaryOp { let location = Location::new(op.span(), file); HirBinaryOp { location, kind } } - - pub fn is_bitwise(&self) -> bool { - use BinaryOpKind::*; - matches!(self.kind, And | Or | Xor | ShiftRight | ShiftLeft) - } - - pub fn is_bit_shift(&self) -> bool { - self.kind.is_bit_shift() - } - - pub fn is_modulo(&self) -> bool { - self.kind.is_modulo() - } } #[derive(Debug, Clone)] diff --git a/noir/compiler/noirc_frontend/src/hir_def/types.rs b/noir/compiler/noirc_frontend/src/hir_def/types.rs index 98b47f17cd4..e105da1ccf0 100644 --- a/noir/compiler/noirc_frontend/src/hir_def/types.rs +++ b/noir/compiler/noirc_frontend/src/hir_def/types.rs @@ -571,13 +571,20 @@ impl Type { Type::TypeVariable(var, kind) } - pub fn polymorphic_integer(interner: &mut NodeInterner) -> Type { + pub fn polymorphic_integer_or_field(interner: &mut NodeInterner) -> Type { let id = interner.next_type_variable_id(); let kind = TypeVariableKind::IntegerOrField; let var = TypeVariable::unbound(id); Type::TypeVariable(var, kind) } + pub fn polymorphic_integer(interner: &mut NodeInterner) -> Type { + let id = interner.next_type_variable_id(); + let kind = TypeVariableKind::Integer; + let var = TypeVariable::unbound(id); + Type::TypeVariable(var, kind) + } + /// A bit of an awkward name for this function - this function returns /// true for type variables or polymorphic integers which are unbound. /// NamedGenerics will always be false as although they are bindable, @@ -964,7 +971,7 @@ impl Type { /// Try to bind a PolymorphicInt variable to self, succeeding if self is an integer, field, /// other PolymorphicInt type, or type variable. If successful, the binding is placed in the /// given TypeBindings map rather than linked immediately. - pub fn try_bind_to_polymorphic_int( + fn try_bind_to_polymorphic_int( &self, var: &TypeVariable, bindings: &mut TypeBindings, @@ -977,7 +984,11 @@ impl Type { let this = self.substitute(bindings).follow_bindings(); match &this { - Type::FieldElement | Type::Integer(..) => { + Type::Integer(..) => { + bindings.insert(target_id, (var.clone(), this)); + Ok(()) + } + Type::FieldElement if !only_integer => { bindings.insert(target_id, (var.clone(), this)); Ok(()) } @@ -1672,11 +1683,10 @@ fn convert_array_expression_to_slice( interner.push_expr_location(call, location.span, location.file); interner.push_expr_location(func, location.span, location.file); - interner.push_expr_type(&call, target_type.clone()); - interner.push_expr_type( - &func, - Type::Function(vec![array_type], Box::new(target_type), Box::new(Type::Unit)), - ); + interner.push_expr_type(call, target_type.clone()); + + let func_type = Type::Function(vec![array_type], Box::new(target_type), Box::new(Type::Unit)); + interner.push_expr_type(func, func_type); } impl BinaryTypeOperator { diff --git a/noir/compiler/noirc_frontend/src/lexer/token.rs b/noir/compiler/noirc_frontend/src/lexer/token.rs index f7c07c5f5db..fe12132e202 100644 --- a/noir/compiler/noirc_frontend/src/lexer/token.rs +++ b/noir/compiler/noirc_frontend/src/lexer/token.rs @@ -774,6 +774,27 @@ impl Keyword { } } +pub struct Tokens(pub Vec); + +type TokenMapIter = Map, fn(SpannedToken) -> (Token, Span)>; + +impl<'a> From for chumsky::Stream<'a, Token, Span, TokenMapIter> { + fn from(tokens: Tokens) -> Self { + let end_of_input = match tokens.0.last() { + Some(spanned_token) => spanned_token.to_span(), + None => Span::single_char(0), + }; + + fn get_span(token: SpannedToken) -> (Token, Span) { + let span = token.to_span(); + (token.into_token(), span) + } + + let iter = tokens.0.into_iter().map(get_span as fn(_) -> _); + chumsky::Stream::from_iter(end_of_input, iter) + } +} + #[cfg(test)] mod keywords { use strum::IntoEnumIterator; @@ -796,24 +817,3 @@ mod keywords { } } } - -pub struct Tokens(pub Vec); - -type TokenMapIter = Map, fn(SpannedToken) -> (Token, Span)>; - -impl<'a> From for chumsky::Stream<'a, Token, Span, TokenMapIter> { - fn from(tokens: Tokens) -> Self { - let end_of_input = match tokens.0.last() { - Some(spanned_token) => spanned_token.to_span(), - None => Span::single_char(0), - }; - - fn get_span(token: SpannedToken) -> (Token, Span) { - let span = token.to_span(); - (token.into_token(), span) - } - - let iter = tokens.0.into_iter().map(get_span as fn(_) -> _); - chumsky::Stream::from_iter(end_of_input, iter) - } -} diff --git a/noir/compiler/noirc_frontend/src/monomorphization/debug.rs b/noir/compiler/noirc_frontend/src/monomorphization/debug.rs index d36816e3d37..a8ff4399f99 100644 --- a/noir/compiler/noirc_frontend/src/monomorphization/debug.rs +++ b/noir/compiler/noirc_frontend/src/monomorphization/debug.rs @@ -8,7 +8,7 @@ use crate::hir_def::expr::*; use crate::node_interner::ExprId; use super::ast::{Expression, Ident}; -use super::Monomorphizer; +use super::{MonomorphizationError, Monomorphizer}; const DEBUG_MEMBER_ASSIGN_PREFIX: &str = "__debug_member_assign_"; const DEBUG_VAR_ID_ARG_SLOT: usize = 0; @@ -39,18 +39,19 @@ impl<'interner> Monomorphizer<'interner> { &mut self, call: &HirCallExpression, arguments: &mut [Expression], - ) { - let original_func = Box::new(self.expr(call.func)); + ) -> Result<(), MonomorphizationError> { + let original_func = Box::new(self.expr(call.func)?); if let Expression::Ident(Ident { name, .. }) = original_func.as_ref() { if name == "__debug_var_assign" { - self.patch_debug_var_assign(call, arguments); + self.patch_debug_var_assign(call, arguments)?; } else if name == "__debug_var_drop" { - self.patch_debug_var_drop(call, arguments); + self.patch_debug_var_drop(call, arguments)?; } else if let Some(arity) = name.strip_prefix(DEBUG_MEMBER_ASSIGN_PREFIX) { let arity = arity.parse::().expect("failed to parse member assign arity"); - self.patch_debug_member_assign(call, arguments, arity); + self.patch_debug_member_assign(call, arguments, arity)?; } } + Ok(()) } /// Update instrumentation code inserted on variable assignment. We need to @@ -59,7 +60,11 @@ impl<'interner> Monomorphizer<'interner> { /// variable are possible if using generic functions, hence the temporary ID /// created when injecting the instrumentation code can map to multiple IDs /// at runtime. - fn patch_debug_var_assign(&mut self, call: &HirCallExpression, arguments: &mut [Expression]) { + fn patch_debug_var_assign( + &mut self, + call: &HirCallExpression, + arguments: &mut [Expression], + ) -> Result<(), MonomorphizationError> { let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); let var_id_arg = hir_arguments.get(DEBUG_VAR_ID_ARG_SLOT); let Some(HirExpression::Literal(HirLiteral::Integer(source_var_id, _))) = var_id_arg else { @@ -73,13 +78,18 @@ impl<'interner> Monomorphizer<'interner> { // then update the ID used for tracking at runtime let var_id = self.debug_type_tracker.insert_var(source_var_id, var_type); let interned_var_id = self.intern_var_id(var_id, &call.location); - arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id); + arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id)?; + Ok(()) } /// Update instrumentation code for a variable being dropped out of scope. /// Given the source_var_id we search for the last assigned debug var_id and /// replace it instead. - fn patch_debug_var_drop(&mut self, call: &HirCallExpression, arguments: &mut [Expression]) { + fn patch_debug_var_drop( + &mut self, + call: &HirCallExpression, + arguments: &mut [Expression], + ) -> Result<(), MonomorphizationError> { let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); let var_id_arg = hir_arguments.get(DEBUG_VAR_ID_ARG_SLOT); let Some(HirExpression::Literal(HirLiteral::Integer(source_var_id, _))) = var_id_arg else { @@ -92,7 +102,8 @@ impl<'interner> Monomorphizer<'interner> { .get_var_id(source_var_id) .unwrap_or_else(|| unreachable!("failed to find debug variable")); let interned_var_id = self.intern_var_id(var_id, &call.location); - arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id); + arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id)?; + Ok(()) } /// Update instrumentation code inserted when assigning to a member of an @@ -106,7 +117,7 @@ impl<'interner> Monomorphizer<'interner> { call: &HirCallExpression, arguments: &mut [Expression], arity: usize, - ) { + ) -> Result<(), MonomorphizationError> { let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); let var_id_arg = hir_arguments.get(DEBUG_VAR_ID_ARG_SLOT); let Some(HirExpression::Literal(HirLiteral::Integer(source_var_id, _))) = var_id_arg else { @@ -143,13 +154,13 @@ impl<'interner> Monomorphizer<'interner> { let index_id = self.interner.push_expr(HirExpression::Literal( HirLiteral::Integer(field_index.into(), false), )); - self.interner.push_expr_type(&index_id, crate::Type::FieldElement); + self.interner.push_expr_type(index_id, crate::Type::FieldElement); self.interner.push_expr_location( index_id, call.location.span, call.location.file, ); - arguments[DEBUG_MEMBER_FIELD_INDEX_ARG_SLOT + i] = self.expr(index_id); + arguments[DEBUG_MEMBER_FIELD_INDEX_ARG_SLOT + i] = self.expr(index_id)?; } else { // array/string element using constant index cursor_type = element_type_at_index(cursor_type, index as usize); @@ -165,13 +176,14 @@ impl<'interner> Monomorphizer<'interner> { .get_var_id(source_var_id) .unwrap_or_else(|| unreachable!("failed to find debug variable")); let interned_var_id = self.intern_var_id(var_id, &call.location); - arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id); + arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id)?; + Ok(()) } fn intern_var_id(&mut self, var_id: DebugVarId, location: &Location) -> ExprId { let var_id_literal = HirLiteral::Integer((var_id.0 as u128).into(), false); let expr_id = self.interner.push_expr(HirExpression::Literal(var_id_literal)); - self.interner.push_expr_type(&expr_id, crate::Type::FieldElement); + self.interner.push_expr_type(expr_id, crate::Type::FieldElement); self.interner.push_expr_location(expr_id, location.span, location.file); expr_id } diff --git a/noir/compiler/noirc_frontend/src/monomorphization/mod.rs b/noir/compiler/noirc_frontend/src/monomorphization/mod.rs index f691a0c9065..ce880401d77 100644 --- a/noir/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/noir/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -9,13 +9,14 @@ //! The entry point to this pass is the `monomorphize` function which, starting from a given //! function, will monomorphize the entire reachable program. use acvm::FieldElement; -use iter_extended::{btree_map, vecmap}; -use noirc_errors::Location; +use iter_extended::{btree_map, try_vecmap, vecmap}; +use noirc_errors::{CustomDiagnostic, FileDiagnostic, Location}; use noirc_printable_type::PrintableType; use std::{ collections::{BTreeMap, HashMap, VecDeque}, unreachable, }; +use thiserror::Error; use crate::{ debug::DebugInstrumenter, @@ -27,8 +28,8 @@ use crate::{ }, node_interner::{self, DefinitionKind, NodeInterner, StmtId, TraitImplKind, TraitMethodId}, token::FunctionAttribute, - ContractFunctionType, FunctionKind, IntegerBitSize, Type, TypeBinding, TypeBindings, - TypeVariable, TypeVariableKind, UnaryOp, Visibility, + ContractFunctionType, FunctionKind, IntegerBitSize, Signedness, Type, TypeBinding, + TypeBindings, TypeVariable, TypeVariableKind, UnaryOp, Visibility, }; use self::ast::{Definition, FuncId, Function, LocalId, Program}; @@ -87,6 +88,40 @@ struct Monomorphizer<'interner> { type HirType = crate::Type; +#[derive(Debug, Error)] +pub enum MonomorphizationError { + #[error("Length of generic array could not be determined.")] + UnknownArrayLength { location: Location }, +} + +impl MonomorphizationError { + fn call_stack(&self) -> Vec { + match self { + MonomorphizationError::UnknownArrayLength { location } => vec![*location], + } + } +} + +impl From for FileDiagnostic { + fn from(error: MonomorphizationError) -> FileDiagnostic { + let call_stack = error.call_stack(); + let file_id = call_stack.last().map(|location| location.file).unwrap_or_default(); + let diagnostic = error.into_diagnostic(); + diagnostic.in_file(file_id).with_call_stack(call_stack) + } +} + +impl MonomorphizationError { + fn into_diagnostic(self) -> CustomDiagnostic { + CustomDiagnostic::simple_error( + "Internal Consistency Evaluators Errors: \n + This is likely a bug. Consider opening an issue at https://github.com/noir-lang/noir/issues".to_owned(), + self.to_string(), + noirc_errors::Span::inclusive(0, 0) + ) + } +} + /// Starting from the given `main` function, monomorphize the entire program, /// replacing all references to type variables and NamedGenerics with concrete /// types, duplicating definitions as necessary to do so. @@ -99,7 +134,10 @@ type HirType = crate::Type; /// this function. Typically, this is the function named "main" in the source project, /// but it can also be, for example, an arbitrary test function for running `nargo test`. #[tracing::instrument(level = "trace", skip(main, interner))] -pub fn monomorphize(main: node_interner::FuncId, interner: &mut NodeInterner) -> Program { +pub fn monomorphize( + main: node_interner::FuncId, + interner: &mut NodeInterner, +) -> Result { monomorphize_debug(main, interner, &DebugInstrumenter::default()) } @@ -107,10 +145,10 @@ pub fn monomorphize_debug( main: node_interner::FuncId, interner: &mut NodeInterner, debug_instrumenter: &DebugInstrumenter, -) -> Program { +) -> Result { let debug_type_tracker = DebugTypeTracker::build_from_debug_instrumenter(debug_instrumenter); let mut monomorphizer = Monomorphizer::new(interner, debug_type_tracker); - let function_sig = monomorphizer.compile_main(main); + let function_sig = monomorphizer.compile_main(main)?; while !monomorphizer.queue.is_empty() { let (next_fn_id, new_id, bindings, trait_method) = monomorphizer.queue.pop_front().unwrap(); @@ -118,7 +156,7 @@ pub fn monomorphize_debug( perform_instantiation_bindings(&bindings); let impl_bindings = monomorphizer.perform_impl_bindings(trait_method, next_fn_id); - monomorphizer.function(next_fn_id, new_id); + monomorphizer.function(next_fn_id, new_id)?; undo_instantiation_bindings(impl_bindings); undo_instantiation_bindings(bindings); } @@ -128,7 +166,7 @@ pub fn monomorphize_debug( monomorphizer.interner.function_meta(&main); let (debug_variables, debug_types) = monomorphizer.debug_type_tracker.extract_vars_and_types(); - Program::new( + let program = Program::new( functions, function_sig, *return_distinctness, @@ -137,7 +175,8 @@ pub fn monomorphize_debug( *kind == FunctionKind::Recursive, debug_variables, debug_types, - ) + ); + Ok(program) } impl<'interner> Monomorphizer<'interner> { @@ -233,10 +272,13 @@ impl<'interner> Monomorphizer<'interner> { self.globals.entry(id).or_default().insert(typ, new_id); } - fn compile_main(&mut self, main_id: node_interner::FuncId) -> FunctionSignature { + fn compile_main( + &mut self, + main_id: node_interner::FuncId, + ) -> Result { let new_main_id = self.next_function_id(); assert_eq!(new_main_id, Program::main_id()); - self.function(main_id, new_main_id); + self.function(main_id, new_main_id)?; self.return_location = self.interner.function(&main_id).block(self.interner).statements().last().and_then( |x| match self.interner.statement(x) { @@ -245,10 +287,14 @@ impl<'interner> Monomorphizer<'interner> { }, ); let main_meta = self.interner.function_meta(&main_id); - main_meta.function_signature() + Ok(main_meta.function_signature()) } - fn function(&mut self, f: node_interner::FuncId, id: FuncId) { + fn function( + &mut self, + f: node_interner::FuncId, + id: FuncId, + ) -> Result<(), MonomorphizationError> { if let Some((self_type, trait_id)) = self.interner.get_function_trait(&f) { let the_trait = self.interner.get_trait(trait_id); the_trait.self_type_typevar.force_bind(self_type); @@ -268,10 +314,11 @@ impl<'interner> Monomorphizer<'interner> { || matches!(modifiers.contract_function_type, Some(ContractFunctionType::Open)); let parameters = self.parameters(&meta.parameters); - let body = self.expr(body_expr_id); + let body = self.expr(body_expr_id)?; let function = ast::Function { id, name, parameters, body, return_type, unconstrained }; self.push_function(id, function); + Ok(()) } fn push_function(&mut self, id: FuncId, function: ast::Function) { @@ -331,15 +378,18 @@ impl<'interner> Monomorphizer<'interner> { } } - fn expr(&mut self, expr: node_interner::ExprId) -> ast::Expression { + fn expr( + &mut self, + expr: node_interner::ExprId, + ) -> Result { use ast::Expression::Literal; use ast::Literal::*; - match self.interner.expression(&expr) { - HirExpression::Ident(ident) => self.ident(ident, expr), + let expr = match self.interner.expression(&expr) { + HirExpression::Ident(ident) => self.ident(ident, expr)?, HirExpression::Literal(HirLiteral::Str(contents)) => Literal(Str(contents)), HirExpression::Literal(HirLiteral::FmtStr(contents, idents)) => { - let fields = vecmap(idents, |ident| self.expr(ident)); + let fields = try_vecmap(idents, |ident| self.expr(ident))?; Literal(FmtStr( contents, fields.len() as u64, @@ -367,27 +417,27 @@ impl<'interner> Monomorphizer<'interner> { } } HirExpression::Literal(HirLiteral::Array(array)) => match array { - HirArrayLiteral::Standard(array) => self.standard_array(expr, array), + HirArrayLiteral::Standard(array) => self.standard_array(expr, array)?, HirArrayLiteral::Repeated { repeated_element, length } => { - self.repeated_array(expr, repeated_element, length) + self.repeated_array(expr, repeated_element, length)? } }, HirExpression::Literal(HirLiteral::Unit) => ast::Expression::Block(vec![]), - HirExpression::Block(block) => self.block(block.0), + HirExpression::Block(block) => self.block(block.0)?, HirExpression::Prefix(prefix) => { let location = self.interner.expr_location(&expr); ast::Expression::Unary(ast::Unary { operator: prefix.operator, - rhs: Box::new(self.expr(prefix.rhs)), + rhs: Box::new(self.expr(prefix.rhs)?), result_type: self.convert_type(&self.interner.id_type(expr)), location, }) } HirExpression::Infix(infix) => { - let lhs = self.expr(infix.lhs); - let rhs = self.expr(infix.rhs); + let lhs = self.expr(infix.lhs)?; + let rhs = self.expr(infix.rhs)?; let operator = infix.operator.kind; let location = self.interner.expr_location(&expr); if self.interner.get_selected_impl_for_expression(expr).is_some() { @@ -418,26 +468,27 @@ impl<'interner> Monomorphizer<'interner> { } } - HirExpression::Index(index) => self.index(expr, index), + HirExpression::Index(index) => self.index(expr, index)?, HirExpression::MemberAccess(access) => { let field_index = self.interner.get_field_index(expr); - let expr = Box::new(self.expr(access.lhs)); + let expr = Box::new(self.expr(access.lhs)?); ast::Expression::ExtractTupleField(expr, field_index) } - HirExpression::Call(call) => self.function_call(call, expr), + HirExpression::Call(call) => self.function_call(call, expr)?, HirExpression::Cast(cast) => ast::Expression::Cast(ast::Cast { - lhs: Box::new(self.expr(cast.lhs)), + lhs: Box::new(self.expr(cast.lhs)?), r#type: self.convert_type(&cast.r#type), location: self.interner.expr_location(&expr), }), HirExpression::If(if_expr) => { - let cond = self.expr(if_expr.condition); - let then = self.expr(if_expr.consequence); - let else_ = if_expr.alternative.map(|alt| Box::new(self.expr(alt))); + let cond = self.expr(if_expr.condition)?; + let then = self.expr(if_expr.consequence)?; + let else_ = + if_expr.alternative.map(|alt| self.expr(alt)).transpose()?.map(Box::new); ast::Expression::If(ast::If { condition: Box::new(cond), consequence: Box::new(then), @@ -447,28 +498,30 @@ impl<'interner> Monomorphizer<'interner> { } HirExpression::Tuple(fields) => { - let fields = vecmap(fields, |id| self.expr(id)); + let fields = try_vecmap(fields, |id| self.expr(id))?; ast::Expression::Tuple(fields) } - HirExpression::Constructor(constructor) => self.constructor(constructor, expr), + HirExpression::Constructor(constructor) => self.constructor(constructor, expr)?, - HirExpression::Lambda(lambda) => self.lambda(lambda, expr), + HirExpression::Lambda(lambda) => self.lambda(lambda, expr)?, HirExpression::MethodCall(hir_method_call) => { unreachable!("Encountered HirExpression::MethodCall during monomorphization {hir_method_call:?}") } HirExpression::Error => unreachable!("Encountered Error node during monomorphization"), - } + }; + + Ok(expr) } fn standard_array( &mut self, array: node_interner::ExprId, array_elements: Vec, - ) -> ast::Expression { + ) -> Result { let typ = self.convert_type(&self.interner.id_type(array)); - let contents = vecmap(array_elements, |id| self.expr(id)); - ast::Expression::Literal(ast::Literal::Array(ast::ArrayLiteral { contents, typ })) + let contents = try_vecmap(array_elements, |id| self.expr(id))?; + Ok(ast::Expression::Literal(ast::Literal::Array(ast::ArrayLiteral { contents, typ }))) } fn repeated_array( @@ -476,48 +529,56 @@ impl<'interner> Monomorphizer<'interner> { array: node_interner::ExprId, repeated_element: node_interner::ExprId, length: HirType, - ) -> ast::Expression { + ) -> Result { let typ = self.convert_type(&self.interner.id_type(array)); - let length = length - .evaluate_to_u64() - .expect("Length of array is unknown when evaluating numeric generic"); + let length = length.evaluate_to_u64().ok_or_else(|| { + let location = self.interner.expr_location(&array); + MonomorphizationError::UnknownArrayLength { location } + })?; - let contents = vecmap(0..length, |_| self.expr(repeated_element)); - ast::Expression::Literal(ast::Literal::Array(ast::ArrayLiteral { contents, typ })) + let contents = try_vecmap(0..length, |_| self.expr(repeated_element))?; + Ok(ast::Expression::Literal(ast::Literal::Array(ast::ArrayLiteral { contents, typ }))) } - fn index(&mut self, id: node_interner::ExprId, index: HirIndexExpression) -> ast::Expression { + fn index( + &mut self, + id: node_interner::ExprId, + index: HirIndexExpression, + ) -> Result { let element_type = self.convert_type(&self.interner.id_type(id)); - let collection = Box::new(self.expr(index.collection)); - let index = Box::new(self.expr(index.index)); + let collection = Box::new(self.expr(index.collection)?); + let index = Box::new(self.expr(index.index)?); let location = self.interner.expr_location(&id); - ast::Expression::Index(ast::Index { collection, index, element_type, location }) + Ok(ast::Expression::Index(ast::Index { collection, index, element_type, location })) } - fn statement(&mut self, id: StmtId) -> ast::Expression { + fn statement(&mut self, id: StmtId) -> Result { match self.interner.statement(&id) { HirStatement::Let(let_statement) => self.let_statement(let_statement), HirStatement::Constrain(constrain) => { - let expr = self.expr(constrain.0); + let expr = self.expr(constrain.0)?; let location = self.interner.expr_location(&constrain.0); - let assert_message = - constrain.2.map(|assert_msg_expr| Box::new(self.expr(assert_msg_expr))); - ast::Expression::Constrain(Box::new(expr), location, assert_message) + let assert_message = constrain + .2 + .map(|assert_msg_expr| self.expr(assert_msg_expr)) + .transpose()? + .map(Box::new); + Ok(ast::Expression::Constrain(Box::new(expr), location, assert_message)) } HirStatement::Assign(assign) => self.assign(assign), HirStatement::For(for_loop) => { self.is_range_loop = true; - let start = self.expr(for_loop.start_range); - let end = self.expr(for_loop.end_range); + let start = self.expr(for_loop.start_range)?; + let end = self.expr(for_loop.end_range)?; self.is_range_loop = false; let index_variable = self.next_local_id(); self.define_local(for_loop.identifier.id, index_variable); - let block = Box::new(self.expr(for_loop.block)); + let block = Box::new(self.expr(for_loop.block)?); - ast::Expression::For(ast::For { + Ok(ast::Expression::For(ast::For { index_variable, index_name: self.interner.definition_name(for_loop.identifier.id).to_owned(), index_type: self.convert_type(&self.interner.id_type(for_loop.start_range)), @@ -526,25 +587,30 @@ impl<'interner> Monomorphizer<'interner> { start_range_location: self.interner.expr_location(&for_loop.start_range), end_range_location: self.interner.expr_location(&for_loop.end_range), block, - }) + })) } HirStatement::Expression(expr) => self.expr(expr), - HirStatement::Semi(expr) => ast::Expression::Semi(Box::new(self.expr(expr))), + HirStatement::Semi(expr) => { + self.expr(expr).map(|expr| ast::Expression::Semi(Box::new(expr))) + } HirStatement::Error => unreachable!(), } } - fn let_statement(&mut self, let_statement: HirLetStatement) -> ast::Expression { - let expr = self.expr(let_statement.expression); + fn let_statement( + &mut self, + let_statement: HirLetStatement, + ) -> Result { + let expr = self.expr(let_statement.expression)?; let expected_type = self.interner.id_type(let_statement.expression); - self.unpack_pattern(let_statement.pattern, expr, &expected_type) + Ok(self.unpack_pattern(let_statement.pattern, expr, &expected_type)) } fn constructor( &mut self, constructor: HirConstructorExpression, id: node_interner::ExprId, - ) -> ast::Expression { + ) -> Result { let typ = self.interner.id_type(id); let field_types = unwrap_struct_type(&typ); @@ -561,7 +627,7 @@ impl<'interner> Monomorphizer<'interner> { let typ = self.convert_type(field_type); field_vars.insert(field_name.0.contents.clone(), (new_id, typ)); - let expression = Box::new(self.expr(expr_id)); + let expression = Box::new(self.expr(expr_id)?); new_exprs.push(ast::Expression::Let(ast::Let { id: new_id, @@ -586,11 +652,15 @@ impl<'interner> Monomorphizer<'interner> { // Finally we can return the created Tuple from the new block new_exprs.push(ast::Expression::Tuple(field_idents)); - ast::Expression::Block(new_exprs) + Ok(ast::Expression::Block(new_exprs)) } - fn block(&mut self, statement_ids: Vec) -> ast::Expression { - ast::Expression::Block(vecmap(statement_ids, |id| self.statement(id))) + fn block( + &mut self, + statement_ids: Vec, + ) -> Result { + let stmts = try_vecmap(statement_ids, |id| self.statement(id)); + stmts.map(ast::Expression::Block) } fn unpack_pattern( @@ -696,25 +766,28 @@ impl<'interner> Monomorphizer<'interner> { let mutable = definition.mutable; let definition = self.lookup_local(ident.id)?; - let typ = self.convert_type(&self.interner.id_type(ident.id)); + let typ = self.convert_type(&self.interner.definition_type(ident.id)); Some(ast::Ident { location: Some(ident.location), mutable, definition, name, typ }) } - fn ident(&mut self, ident: HirIdent, expr_id: node_interner::ExprId) -> ast::Expression { + fn ident( + &mut self, + ident: HirIdent, + expr_id: node_interner::ExprId, + ) -> Result { let typ = self.interner.id_type(expr_id); if let ImplKind::TraitMethod(method, _, _) = ident.impl_kind { - return self.resolve_trait_method_reference(expr_id, typ, method); + return Ok(self.resolve_trait_method_reference(expr_id, typ, method)); } let definition = self.interner.definition(ident.id); - match &definition.kind { + let ident = match &definition.kind { DefinitionKind::Function(func_id) => { let mutable = definition.mutable; let location = Some(ident.location); let name = definition.name.clone(); - let typ = self.interner.id_type(expr_id); let definition = self.lookup_function(*func_id, expr_id, &typ, None); let typ = self.convert_type(&typ); let ident = ast::Ident { location, mutable, definition, name, typ: typ.clone() }; @@ -733,9 +806,11 @@ impl<'interner> Monomorphizer<'interner> { } DefinitionKind::Global(global_id) => { let Some(let_) = self.interner.get_global_let_statement(*global_id) else { - unreachable!("Globals should have a corresponding let statement by monomorphization") + unreachable!( + "Globals should have a corresponding let statement by monomorphization" + ) }; - self.expr(let_.expression) + self.expr(let_.expression)? } DefinitionKind::Local(_) => self.lookup_captured_expr(ident.id).unwrap_or_else(|| { let ident = self.local_ident(&ident).unwrap(); @@ -753,9 +828,12 @@ impl<'interner> Monomorphizer<'interner> { let value = FieldElement::from(value as u128); let location = self.interner.id_location(expr_id); - ast::Expression::Literal(ast::Literal::Integer(value, ast::Type::Field, location)) + let typ = self.convert_type(&typ); + ast::Expression::Literal(ast::Literal::Integer(value, typ, location)) } - } + }; + + Ok(ident) } /// Convert a non-tuple/struct type to a monomorphized type @@ -947,12 +1025,12 @@ impl<'interner> Monomorphizer<'interner> { &mut self, call: HirCallExpression, id: node_interner::ExprId, - ) -> ast::Expression { - let original_func = Box::new(self.expr(call.func)); - let mut arguments = vecmap(&call.arguments, |id| self.expr(*id)); + ) -> Result { + let original_func = Box::new(self.expr(call.func)?); + let mut arguments = try_vecmap(&call.arguments, |id| self.expr(*id))?; let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); - self.patch_debug_instrumentation_call(&call, &mut arguments); + self.patch_debug_instrumentation_call(&call, &mut arguments)?; let return_type = self.interner.id_type(id); let return_type = self.convert_type(&return_type); @@ -967,7 +1045,7 @@ impl<'interner> Monomorphizer<'interner> { // The second argument is expected to always be an ident self.append_printable_type_info(&hir_arguments[1], &mut arguments); } else if name.as_str() == "assert_message" { - // The first argument to the `assert_message` oracle is the expression passed as a mesage to an `assert` or `assert_eq` statement + // The first argument to the `assert_message` oracle is the expression passed as a message to an `assert` or `assert_eq` statement self.append_printable_type_info(&hir_arguments[0], &mut arguments); } } @@ -1015,9 +1093,9 @@ impl<'interner> Monomorphizer<'interner> { if !block_expressions.is_empty() { block_expressions.push(call); - ast::Expression::Block(block_expressions) + Ok(ast::Expression::Block(block_expressions)) } else { - call + Ok(call) } } @@ -1038,7 +1116,7 @@ impl<'interner> Monomorphizer<'interner> { ) { match hir_argument { HirExpression::Ident(ident) => { - let typ = self.interner.id_type(ident.id); + let typ = self.interner.definition_type(ident.id); let typ: Type = typ.follow_bindings(); let is_fmt_str = match typ { // A format string has many different possible types that need to be handled. @@ -1105,7 +1183,8 @@ impl<'interner> Monomorphizer<'interner> { return match opcode.as_str() { "modulus_num_bits" => { let bits = (FieldElement::max_num_bits() as u128).into(); - let typ = ast::Type::Field; + let typ = + ast::Type::Integer(Signedness::Unsigned, IntegerBitSize::SixtyFour); Some(ast::Expression::Literal(ast::Literal::Integer(bits, typ, location))) } "zeroed" => { @@ -1187,47 +1266,59 @@ impl<'interner> Monomorphizer<'interner> { .collect() } - fn assign(&mut self, assign: HirAssignStatement) -> ast::Expression { - let expression = Box::new(self.expr(assign.expression)); - let lvalue = self.lvalue(assign.lvalue); - ast::Expression::Assign(ast::Assign { expression, lvalue }) + fn assign( + &mut self, + assign: HirAssignStatement, + ) -> Result { + let expression = Box::new(self.expr(assign.expression)?); + let lvalue = self.lvalue(assign.lvalue)?; + Ok(ast::Expression::Assign(ast::Assign { expression, lvalue })) } - fn lvalue(&mut self, lvalue: HirLValue) -> ast::LValue { - match lvalue { + fn lvalue(&mut self, lvalue: HirLValue) -> Result { + let value = match lvalue { HirLValue::Ident(ident, _) => self .lookup_captured_lvalue(ident.id) .unwrap_or_else(|| ast::LValue::Ident(self.local_ident(&ident).unwrap())), HirLValue::MemberAccess { object, field_index, .. } => { let field_index = field_index.unwrap(); - let object = Box::new(self.lvalue(*object)); + let object = Box::new(self.lvalue(*object)?); ast::LValue::MemberAccess { object, field_index } } HirLValue::Index { array, index, typ } => { let location = self.interner.expr_location(&index); - let array = Box::new(self.lvalue(*array)); - let index = Box::new(self.expr(index)); + let array = Box::new(self.lvalue(*array)?); + let index = Box::new(self.expr(index)?); let element_type = self.convert_type(&typ); ast::LValue::Index { array, index, element_type, location } } HirLValue::Dereference { lvalue, element_type } => { - let reference = Box::new(self.lvalue(*lvalue)); + let reference = Box::new(self.lvalue(*lvalue)?); let element_type = self.convert_type(&element_type); ast::LValue::Dereference { reference, element_type } } - } + }; + + Ok(value) } - fn lambda(&mut self, lambda: HirLambda, expr: node_interner::ExprId) -> ast::Expression { + fn lambda( + &mut self, + lambda: HirLambda, + expr: node_interner::ExprId, + ) -> Result { if lambda.captures.is_empty() { self.lambda_no_capture(lambda) } else { - let (setup, closure_variable) = self.lambda_with_setup(lambda, expr); - ast::Expression::Block(vec![setup, closure_variable]) + let (setup, closure_variable) = self.lambda_with_setup(lambda, expr)?; + Ok(ast::Expression::Block(vec![setup, closure_variable])) } } - fn lambda_no_capture(&mut self, lambda: HirLambda) -> ast::Expression { + fn lambda_no_capture( + &mut self, + lambda: HirLambda, + ) -> Result { let ret_type = self.convert_type(&lambda.return_type); let lambda_name = "lambda"; let parameter_types = vecmap(&lambda.parameters, |(_, typ)| self.convert_type(typ)); @@ -1237,7 +1328,7 @@ impl<'interner> Monomorphizer<'interner> { vecmap(lambda.parameters, |(pattern, typ)| (pattern, typ, Visibility::Private)).into(); let parameters = self.parameters(¶meters); - let body = self.expr(lambda.body); + let body = self.expr(lambda.body)?; let id = self.next_function_id(); let return_type = ret_type.clone(); @@ -1251,20 +1342,20 @@ impl<'interner> Monomorphizer<'interner> { ast::Type::Function(parameter_types, Box::new(ret_type), Box::new(ast::Type::Unit)); let name = lambda_name.to_owned(); - ast::Expression::Ident(ast::Ident { + Ok(ast::Expression::Ident(ast::Ident { definition: Definition::Function(id), mutable: false, location: None, name, typ, - }) + })) } fn lambda_with_setup( &mut self, lambda: HirLambda, expr: node_interner::ExprId, - ) -> (ast::Expression, ast::Expression) { + ) -> Result<(ast::Expression, ast::Expression), MonomorphizationError> { // returns (, ) // which can be used directly in callsites or transformed // directly to a single `Expression` @@ -1340,7 +1431,7 @@ impl<'interner> Monomorphizer<'interner> { self.lambda_envs_stack .push(LambdaContext { env_ident: env_ident.clone(), captures: lambda.captures }); - let body = self.expr(lambda.body); + let body = self.expr(lambda.body)?; self.lambda_envs_stack.pop(); let lambda_fn_typ: ast::Type = @@ -1382,7 +1473,7 @@ impl<'interner> Monomorphizer<'interner> { typ: ast::Type::Tuple(vec![env_typ, lambda_fn_typ]), }); - (block_let_stmt, closure_ident) + Ok((block_let_stmt, closure_ident)) } /// Implements std::unsafe::zeroed by returning an appropriate zeroed diff --git a/noir/compiler/noirc_frontend/src/node_interner.rs b/noir/compiler/noirc_frontend/src/node_interner.rs index c2eeb2c0c50..5de43e59254 100644 --- a/noir/compiler/noirc_frontend/src/node_interner.rs +++ b/noir/compiler/noirc_frontend/src/node_interner.rs @@ -75,13 +75,14 @@ pub struct NodeInterner { // Type checking map // - // Notice that we use `Index` as the Key and not an ExprId or IdentId - // Therefore, If a raw index is passed in, then it is not safe to assume that it will have - // a Type, as not all Ids have types associated to them. - // Further note, that an ExprId and an IdentId will never have the same underlying Index - // Because we use one Arena to store all Definitions/Nodes + // This should only be used with indices from the `nodes` arena. + // Otherwise the indices used may overwrite other existing indices. + // Each type for each index is filled in during type checking. id_to_type: HashMap, + // Similar to `id_to_type` but maps definitions to their type + definition_to_type: HashMap, + // Struct map. // // Each struct definition is possibly shared across multiple type nodes. @@ -277,12 +278,6 @@ impl DefinitionId { } } -impl From for Index { - fn from(id: DefinitionId) -> Self { - Index::from_raw_parts(id.0, u64::MAX) - } -} - /// An ID for a global value #[derive(Debug, Eq, PartialEq, Hash, Clone, Copy)] pub struct GlobalId(usize); @@ -302,7 +297,7 @@ impl StmtId { // This can be anything, as the program will ultimately fail // after resolution pub fn dummy_id() -> StmtId { - StmtId(Index::from_raw_parts(std::usize::MAX, 0)) + StmtId(Index::dummy()) } } @@ -311,7 +306,7 @@ pub struct ExprId(Index); impl ExprId { pub fn empty_block_id() -> ExprId { - ExprId(Index::from_raw_parts(0, 0)) + ExprId(Index::unsafe_zeroed()) } } #[derive(Debug, Eq, PartialEq, Hash, Copy, Clone)] @@ -322,7 +317,7 @@ impl FuncId { // This can be anything, as the program will ultimately fail // after resolution pub fn dummy_id() -> FuncId { - FuncId(Index::from_raw_parts(std::usize::MAX, 0)) + FuncId(Index::dummy()) } } @@ -396,23 +391,9 @@ macro_rules! into_index { }; } -macro_rules! partialeq { - ($id_type:ty) => { - impl PartialEq for &$id_type { - fn eq(&self, other: &usize) -> bool { - let (index, _) = self.0.into_raw_parts(); - index == *other - } - } - }; -} - into_index!(ExprId); into_index!(StmtId); -partialeq!(ExprId); -partialeq!(StmtId); - /// A Definition enum specifies anything that we can intern in the NodeInterner /// We use one Arena for all types that can be interned as that has better cache locality /// This data structure is never accessed directly, so API wise there is no difference between using @@ -496,6 +477,7 @@ impl Default for NodeInterner { id_to_location: HashMap::new(), definitions: vec![], id_to_type: HashMap::new(), + definition_to_type: HashMap::new(), structs: HashMap::new(), struct_attributes: HashMap::new(), type_aliases: Vec::new(), @@ -545,10 +527,15 @@ impl NodeInterner { } /// Store the type for an interned expression - pub fn push_expr_type(&mut self, expr_id: &ExprId, typ: Type) { + pub fn push_expr_type(&mut self, expr_id: ExprId, typ: Type) { self.id_to_type.insert(expr_id.into(), typ); } + /// Store the type for an interned expression + pub fn push_definition_type(&mut self, definition_id: DefinitionId, typ: Type) { + self.definition_to_type.insert(definition_id, typ); + } + pub fn push_empty_trait(&mut self, type_id: TraitId, unresolved_trait: &UnresolvedTrait) { let self_type_typevar_id = self.next_type_variable_id(); @@ -660,11 +647,6 @@ impl NodeInterner { } } - /// Store the type for an interned Identifier - pub fn push_definition_type(&mut self, definition_id: DefinitionId, typ: Type) { - self.id_to_type.insert(definition_id.into(), typ); - } - /// Store [Location] of [Type] reference pub fn push_type_ref_location(&mut self, typ: Type, location: Location) { self.type_ref_locations.push((typ, location)); @@ -980,8 +962,13 @@ impl NodeInterner { self.id_to_type.get(&index.into()).cloned().unwrap_or(Type::Error) } + /// Returns the type of the definition or `Type::Error` if it was not found. + pub fn definition_type(&self, id: DefinitionId) -> Type { + self.definition_to_type.get(&id).cloned().unwrap_or(Type::Error) + } + pub fn id_type_substitute_trait_as_type(&self, def_id: DefinitionId) -> Type { - let typ = self.id_type(def_id); + let typ = self.definition_type(def_id); if let Type::Function(args, ret, env) = &typ { let def = self.definition(def_id); if let Type::TraitAsType(..) = ret.as_ref() { @@ -1147,7 +1134,7 @@ impl NodeInterner { }) .collect() }) - .unwrap_or(vec![]) + .unwrap_or_default() } /// Similar to `lookup_trait_implementation` but does not apply any type bindings on success. @@ -1670,7 +1657,7 @@ impl Methods { for method in self.iter() { match interner.function_meta(&method).typ.instantiate(interner).0 { Type::Function(args, _, _) => { - if let Some(object) = args.get(0) { + if let Some(object) = args.first() { let mut bindings = TypeBindings::new(); if object.try_unify(typ, &mut bindings).is_ok() { diff --git a/noir/compiler/noirc_frontend/src/parser/parser.rs b/noir/compiler/noirc_frontend/src/parser/parser.rs index 8bcd7670716..1cb81e26a0a 100644 --- a/noir/compiler/noirc_frontend/src/parser/parser.rs +++ b/noir/compiler/noirc_frontend/src/parser/parser.rs @@ -833,7 +833,7 @@ where ignore_then_commit(keyword(Keyword::Assert), parenthesized(argument_parser)) .labelled(ParsingRuleLabel::Statement) .validate(|expressions, span, _| { - let condition = expressions.get(0).unwrap_or(&Expression::error(span)).clone(); + let condition = expressions.first().unwrap_or(&Expression::error(span)).clone(); let message = expressions.get(1).cloned(); StatementKind::Constrain(ConstrainStatement(condition, message, ConstrainKind::Assert)) }) @@ -851,7 +851,7 @@ where .validate(|exprs: Vec, span, _| { let predicate = Expression::new( ExpressionKind::Infix(Box::new(InfixExpression { - lhs: exprs.get(0).unwrap_or(&Expression::error(span)).clone(), + lhs: exprs.first().unwrap_or(&Expression::error(span)).clone(), rhs: exprs.get(1).unwrap_or(&Expression::error(span)).clone(), operator: Spanned::from(span, BinaryOpKind::Equal), })), @@ -2483,7 +2483,7 @@ mod test { #[test] fn return_validation() { - let cases = vec![ + let cases = [ Case { source: "{ return 42; }", expect: concat!("{\n", " Error\n", "}",), @@ -2512,7 +2512,7 @@ mod test { #[test] fn expr_no_constructors() { - let cases = vec![ + let cases = [ Case { source: "{ if structure { a: 1 } {} }", expect: concat!( @@ -2567,10 +2567,10 @@ mod test { #[test] fn parse_raw_string_expr() { let cases = vec![ - Case { source: r##" r"foo" "##, expect: r##"r"foo""##, errors: 0 }, + Case { source: r#" r"foo" "#, expect: r#"r"foo""#, errors: 0 }, Case { source: r##" r#"foo"# "##, expect: r##"r#"foo"#"##, errors: 0 }, // backslash - Case { source: r##" r"\\" "##, expect: r##"r"\\""##, errors: 0 }, + Case { source: r#" r"\\" "#, expect: r#"r"\\""#, errors: 0 }, Case { source: r##" r#"\"# "##, expect: r##"r#"\"#"##, errors: 0 }, Case { source: r##" r#"\\"# "##, expect: r##"r#"\\"#"##, errors: 0 }, Case { source: r##" r#"\\\"# "##, expect: r##"r#"\\\"#"##, errors: 0 }, @@ -2582,27 +2582,27 @@ mod test { }, Case { source: r##" r#"\\\\\\\\"# "##, expect: r##"r#"\\\\\\\\"#"##, errors: 0 }, // mismatch - errors: - Case { source: r###" r#"foo"## "###, expect: r###"r#"foo"#"###, errors: 1 }, - Case { source: r###" r##"foo"# "###, expect: "(none)", errors: 2 }, + Case { source: r###" r#"foo"## "###, expect: r##"r#"foo"#"##, errors: 1 }, + Case { source: r##" r##"foo"# "##, expect: "(none)", errors: 2 }, // mismatch: short: - Case { source: r###" r"foo"# "###, expect: r###"r"foo""###, errors: 1 }, - Case { source: r###" r#"foo" "###, expect: "(none)", errors: 2 }, + Case { source: r##" r"foo"# "##, expect: r#"r"foo""#, errors: 1 }, + Case { source: r#" r#"foo" "#, expect: "(none)", errors: 2 }, // empty string - Case { source: r####"r"""####, expect: r####"r"""####, errors: 0 }, + Case { source: r#"r"""#, expect: r#"r"""#, errors: 0 }, Case { source: r####"r###""###"####, expect: r####"r###""###"####, errors: 0 }, // miscellaneous - Case { source: r###" r#\"foo\"# "###, expect: "plain::r", errors: 2 }, - Case { source: r###" r\"foo\" "###, expect: "plain::r", errors: 1 }, - Case { source: r###" r##"foo"# "###, expect: "(none)", errors: 2 }, + Case { source: r##" r#\"foo\"# "##, expect: "plain::r", errors: 2 }, + Case { source: r#" r\"foo\" "#, expect: "plain::r", errors: 1 }, + Case { source: r##" r##"foo"# "##, expect: "(none)", errors: 2 }, // missing 'r' letter - Case { source: r###" ##"foo"# "###, expect: r#""foo""#, errors: 2 }, - Case { source: r###" #"foo" "###, expect: "plain::foo", errors: 2 }, + Case { source: r##" ##"foo"# "##, expect: r#""foo""#, errors: 2 }, + Case { source: r#" #"foo" "#, expect: "plain::foo", errors: 2 }, // whitespace - Case { source: r###" r #"foo"# "###, expect: "plain::r", errors: 2 }, - Case { source: r###" r# "foo"# "###, expect: "plain::r", errors: 3 }, - Case { source: r###" r#"foo" # "###, expect: "(none)", errors: 2 }, + Case { source: r##" r #"foo"# "##, expect: "plain::r", errors: 2 }, + Case { source: r##" r# "foo"# "##, expect: "plain::r", errors: 3 }, + Case { source: r#" r#"foo" # "#, expect: "(none)", errors: 2 }, // after identifier - Case { source: r###" bar#"foo"# "###, expect: "plain::bar", errors: 2 }, + Case { source: r##" bar#"foo"# "##, expect: "plain::bar", errors: 2 }, // nested Case { source: r###"r##"foo r#"bar"# r"baz" ### bye"##"###, @@ -2617,10 +2617,10 @@ mod test { #[test] fn parse_raw_string_lit() { let lit_cases = vec![ - Case { source: r##" r"foo" "##, expect: r##"r"foo""##, errors: 0 }, + Case { source: r#" r"foo" "#, expect: r#"r"foo""#, errors: 0 }, Case { source: r##" r#"foo"# "##, expect: r##"r#"foo"#"##, errors: 0 }, // backslash - Case { source: r##" r"\\" "##, expect: r##"r"\\""##, errors: 0 }, + Case { source: r#" r"\\" "#, expect: r#"r"\\""#, errors: 0 }, Case { source: r##" r#"\"# "##, expect: r##"r#"\"#"##, errors: 0 }, Case { source: r##" r#"\\"# "##, expect: r##"r#"\\"#"##, errors: 0 }, Case { source: r##" r#"\\\"# "##, expect: r##"r#"\\\"#"##, errors: 0 }, @@ -2632,8 +2632,8 @@ mod test { }, Case { source: r##" r#"\\\\\\\\"# "##, expect: r##"r#"\\\\\\\\"#"##, errors: 0 }, // mismatch - errors: - Case { source: r###" r#"foo"## "###, expect: r###"r#"foo"#"###, errors: 1 }, - Case { source: r###" r##"foo"# "###, expect: "(none)", errors: 2 }, + Case { source: r###" r#"foo"## "###, expect: r##"r#"foo"#"##, errors: 1 }, + Case { source: r##" r##"foo"# "##, expect: "(none)", errors: 2 }, ]; check_cases_with_errors(&lit_cases[..], literal()); diff --git a/noir/compiler/noirc_frontend/src/tests.rs b/noir/compiler/noirc_frontend/src/tests.rs index 8a56b337398..c661cc92eef 100644 --- a/noir/compiler/noirc_frontend/src/tests.rs +++ b/noir/compiler/noirc_frontend/src/tests.rs @@ -956,7 +956,7 @@ mod test { #[test] fn resolve_for_expr() { let src = r#" - fn main(x : Field) { + fn main(x : u64) { for i in 1..20 { let _z = x + i; }; @@ -1130,7 +1130,7 @@ mod test { fn check_rewrite(src: &str, expected: &str) { let (_program, mut context, _errors) = get_program(src); let main_func_id = context.def_interner.find_function("main").unwrap(); - let program = monomorphize(main_func_id, &mut context.def_interner); + let program = monomorphize(main_func_id, &mut context.def_interner).unwrap(); assert!(format!("{}", program) == expected); } diff --git a/noir/compiler/utils/arena/Cargo.toml b/noir/compiler/utils/arena/Cargo.toml index e82201a2cf4..41c6ebc9a8b 100644 --- a/noir/compiler/utils/arena/Cargo.toml +++ b/noir/compiler/utils/arena/Cargo.toml @@ -4,8 +4,3 @@ version.workspace = true authors.workspace = true edition.workspace = true license.workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -generational-arena = "0.2.8" diff --git a/noir/compiler/utils/arena/src/lib.rs b/noir/compiler/utils/arena/src/lib.rs index fc19f44ab6e..2d117304e16 100644 --- a/noir/compiler/utils/arena/src/lib.rs +++ b/noir/compiler/utils/arena/src/lib.rs @@ -3,5 +3,89 @@ #![warn(unreachable_pub)] #![warn(clippy::semicolon_if_nothing_returned)] -// For now we use a wrapper around generational-arena -pub use generational_arena::{Arena, Index}; +#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)] +pub struct Index(usize); + +impl Index { + #[cfg(test)] + pub fn test_new(index: usize) -> Index { + Self(index) + } + + /// Return a dummy index (max value internally). + /// This should be avoided over `Option` if possible. + pub fn dummy() -> Self { + Self(usize::MAX) + } + + /// Return the zeroed index. This is unsafe since we don't know + /// if this is a valid index for any particular map yet. + pub fn unsafe_zeroed() -> Self { + Self(0) + } +} + +#[derive(Clone, Debug)] +pub struct Arena { + pub vec: Vec, +} + +impl Default for Arena { + fn default() -> Self { + Self { vec: Vec::new() } + } +} + +impl core::ops::Index for Arena { + type Output = T; + + fn index(&self, index: Index) -> &Self::Output { + self.vec.index(index.0) + } +} + +impl core::ops::IndexMut for Arena { + fn index_mut(&mut self, index: Index) -> &mut Self::Output { + self.vec.index_mut(index.0) + } +} + +impl IntoIterator for Arena { + type Item = T; + + type IntoIter = as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.vec.into_iter() + } +} + +impl<'a, T> IntoIterator for &'a Arena { + type Item = &'a T; + + type IntoIter = <&'a Vec as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.vec.iter() + } +} + +impl Arena { + pub fn insert(&mut self, item: T) -> Index { + let index = self.vec.len(); + self.vec.push(item); + Index(index) + } + + pub fn get(&self, index: Index) -> Option<&T> { + self.vec.get(index.0) + } + + pub fn get_mut(&mut self, index: Index) -> Option<&mut T> { + self.vec.get_mut(index.0) + } + + pub fn iter(&self) -> impl Iterator { + self.vec.iter().enumerate().map(|(index, item)| (Index(index), item)) + } +} diff --git a/noir/compiler/wasm/package.json b/noir/compiler/wasm/package.json index b71058b3367..67584a2def1 100644 --- a/noir/compiler/wasm/package.json +++ b/noir/compiler/wasm/package.json @@ -61,10 +61,10 @@ "assert": "^2.1.0", "browserify-fs": "^1.0.0", "chai": "^4.3.10", - "copy-webpack-plugin": "^11.0.0", + "copy-webpack-plugin": "^12.0.2", "eslint": "^8.56.0", "eslint-plugin-prettier": "^5.0.0", - "html-webpack-plugin": "^5.5.4", + "html-webpack-plugin": "^5.6.0", "memfs": "^4.6.0", "mocha": "^10.2.0", "mocha-each": "^2.0.1", @@ -78,8 +78,9 @@ "typescript": "~5.2.2", "unzipit": "^1.4.3", "url": "^0.11.3", - "webpack": "^5.49.0", - "webpack-cli": "^4.7.2" + "webpack": "^5.90.1", + "webpack-cli": "^5.1.4", + "webpack-dev-server": "^5.0.0" }, "dependencies": { "@noir-lang/types": "workspace:*", diff --git a/noir/compiler/wasm/src/compile.rs b/noir/compiler/wasm/src/compile.rs index c8b1680bc00..9e6fca1126e 100644 --- a/noir/compiler/wasm/src/compile.rs +++ b/noir/compiler/wasm/src/compile.rs @@ -1,19 +1,21 @@ use fm::FileManager; use gloo_utils::format::JsValueSerdeExt; use js_sys::{JsString, Object}; -use nargo::artifacts::{ - contract::{ContractArtifact, ContractFunctionArtifact}, - program::ProgramArtifact, +use nargo::{ + artifacts::{ + contract::{ContractArtifact, ContractFunctionArtifact}, + program::ProgramArtifact, + }, + parse_all, }; use noirc_driver::{ - add_dep, compile_contract, compile_main, file_manager_with_stdlib, prepare_crate, - prepare_dependency, CompileOptions, CompiledContract, CompiledProgram, + add_dep, file_manager_with_stdlib, prepare_crate, prepare_dependency, CompileOptions, NOIR_ARTIFACT_VERSION_STRING, }; use noirc_evaluator::errors::SsaReport; use noirc_frontend::{ graph::{CrateId, CrateName}, - hir::{def_map::parse_file, Context, ParsedFiles}, + hir::Context, }; use serde::Deserialize; use std::{collections::HashMap, path::Path}; @@ -60,51 +62,64 @@ extern "C" { #[derive(Clone, Debug, PartialEq, Eq)] pub type JsDependencyGraph; - #[wasm_bindgen(extends = Object, js_name = "CompileResult", typescript_type = "CompileResult")] + #[wasm_bindgen(extends = Object, js_name = "ProgramCompileResult", typescript_type = "ProgramCompileResult")] #[derive(Clone, Debug, PartialEq, Eq)] - pub type JsCompileResult; + pub type JsCompileProgramResult; #[wasm_bindgen(constructor, js_class = "Object")] - fn constructor() -> JsCompileResult; + fn constructor() -> JsCompileProgramResult; + + #[wasm_bindgen(extends = Object, js_name = "ContractCompileResult", typescript_type = "ContractCompileResult")] + #[derive(Clone, Debug, PartialEq, Eq)] + pub type JsCompileContractResult; + + #[wasm_bindgen(constructor, js_class = "Object")] + fn constructor() -> JsCompileContractResult; } -impl JsCompileResult { - const CONTRACT_PROP: &'static str = "contract"; +impl JsCompileProgramResult { const PROGRAM_PROP: &'static str = "program"; const WARNINGS_PROP: &'static str = "warnings"; - pub fn new(resp: CompileResult) -> JsCompileResult { - let obj = JsCompileResult::constructor(); - match resp { - CompileResult::Contract { contract, warnings } => { - js_sys::Reflect::set( - &obj, - &JsString::from(JsCompileResult::CONTRACT_PROP), - &::from_serde(&contract).unwrap(), - ) - .unwrap(); - js_sys::Reflect::set( - &obj, - &JsString::from(JsCompileResult::WARNINGS_PROP), - &::from_serde(&warnings).unwrap(), - ) - .unwrap(); - } - CompileResult::Program { program, warnings } => { - js_sys::Reflect::set( - &obj, - &JsString::from(JsCompileResult::PROGRAM_PROP), - &::from_serde(&program).unwrap(), - ) - .unwrap(); - js_sys::Reflect::set( - &obj, - &JsString::from(JsCompileResult::WARNINGS_PROP), - &::from_serde(&warnings).unwrap(), - ) - .unwrap(); - } - }; + pub fn new(program: ProgramArtifact, warnings: Vec) -> JsCompileProgramResult { + let obj = JsCompileProgramResult::constructor(); + + js_sys::Reflect::set( + &obj, + &JsString::from(JsCompileProgramResult::PROGRAM_PROP), + &::from_serde(&program).unwrap(), + ) + .unwrap(); + js_sys::Reflect::set( + &obj, + &JsString::from(JsCompileProgramResult::WARNINGS_PROP), + &::from_serde(&warnings).unwrap(), + ) + .unwrap(); + + obj + } +} + +impl JsCompileContractResult { + const CONTRACT_PROP: &'static str = "contract"; + const WARNINGS_PROP: &'static str = "warnings"; + + pub fn new(contract: ContractArtifact, warnings: Vec) -> JsCompileContractResult { + let obj = JsCompileContractResult::constructor(); + + js_sys::Reflect::set( + &obj, + &JsString::from(JsCompileContractResult::CONTRACT_PROP), + &::from_serde(&contract).unwrap(), + ) + .unwrap(); + js_sys::Reflect::set( + &obj, + &JsString::from(JsCompileContractResult::WARNINGS_PROP), + &::from_serde(&warnings).unwrap(), + ) + .unwrap(); obj } @@ -140,77 +155,98 @@ impl PathToFileSourceMap { } } -pub(crate) fn parse_all(fm: &FileManager) -> ParsedFiles { - fm.as_file_map().all_file_ids().map(|&file_id| (file_id, parse_file(fm, file_id))).collect() -} - -pub enum CompileResult { - Contract { contract: ContractArtifact, warnings: Vec }, - Program { program: ProgramArtifact, warnings: Vec }, -} - #[wasm_bindgen] -pub fn compile( +pub fn compile_program( entry_point: String, - contracts: Option, dependency_graph: Option, file_source_map: PathToFileSourceMap, -) -> Result { +) -> Result { console_error_panic_hook::set_once(); - - let dependency_graph: DependencyGraph = if let Some(dependency_graph) = dependency_graph { - ::into_serde(&JsValue::from(dependency_graph)) - .map_err(|err| err.to_string())? - } else { - DependencyGraph { root_dependencies: vec![], library_dependencies: HashMap::new() } - }; - - let fm = file_manager_with_source_map(file_source_map); - let parsed_files = parse_all(&fm); - let mut context = Context::new(fm, parsed_files); - - let path = Path::new(&entry_point); - let crate_id = prepare_crate(&mut context, path); - - process_dependency_graph(&mut context, dependency_graph); + let (crate_id, mut context) = prepare_context(entry_point, dependency_graph, file_source_map)?; let compile_options = CompileOptions::default(); - // For now we default to a bounded width of 3, though we can add it as a parameter let expression_width = acvm::acir::circuit::ExpressionWidth::Bounded { width: 3 }; - if contracts.unwrap_or_default() { - let compiled_contract = compile_contract(&mut context, crate_id, &compile_options) + let compiled_program = + noirc_driver::compile_main(&mut context, crate_id, &compile_options, None) .map_err(|errs| { CompileError::with_file_diagnostics( - "Failed to compile contract", + "Failed to compile program", errs, &context.file_manager, ) })? .0; - let optimized_contract = - nargo::ops::transform_contract(compiled_contract, expression_width); + let optimized_program = nargo::ops::transform_program(compiled_program, expression_width); + let warnings = optimized_program.warnings.clone(); - let compile_output = generate_contract_artifact(optimized_contract); - Ok(JsCompileResult::new(compile_output)) - } else { - let compiled_program = compile_main(&mut context, crate_id, &compile_options, None) + Ok(JsCompileProgramResult::new(optimized_program.into(), warnings)) +} + +#[wasm_bindgen] +pub fn compile_contract( + entry_point: String, + dependency_graph: Option, + file_source_map: PathToFileSourceMap, +) -> Result { + console_error_panic_hook::set_once(); + let (crate_id, mut context) = prepare_context(entry_point, dependency_graph, file_source_map)?; + + let compile_options = CompileOptions::default(); + // For now we default to a bounded width of 3, though we can add it as a parameter + let expression_width = acvm::acir::circuit::ExpressionWidth::Bounded { width: 3 }; + + let compiled_contract = + noirc_driver::compile_contract(&mut context, crate_id, &compile_options) .map_err(|errs| { CompileError::with_file_diagnostics( - "Failed to compile program", + "Failed to compile contract", errs, &context.file_manager, ) })? .0; - let optimized_program = nargo::ops::transform_program(compiled_program, expression_width); + let optimized_contract = nargo::ops::transform_contract(compiled_contract, expression_width); - let compile_output = generate_program_artifact(optimized_program); - Ok(JsCompileResult::new(compile_output)) - } + let functions = + optimized_contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); + + let contract_artifact = ContractArtifact { + noir_version: String::from(NOIR_ARTIFACT_VERSION_STRING), + name: optimized_contract.name, + functions, + events: optimized_contract.events, + file_map: optimized_contract.file_map, + }; + + Ok(JsCompileContractResult::new(contract_artifact, optimized_contract.warnings)) +} + +fn prepare_context( + entry_point: String, + dependency_graph: Option, + file_source_map: PathToFileSourceMap, +) -> Result<(CrateId, Context<'static, 'static>), JsCompileError> { + let dependency_graph: DependencyGraph = if let Some(dependency_graph) = dependency_graph { + ::into_serde(&JsValue::from(dependency_graph)) + .map_err(|err| err.to_string())? + } else { + DependencyGraph { root_dependencies: vec![], library_dependencies: HashMap::new() } + }; + + let fm = file_manager_with_source_map(file_source_map); + let parsed_files = parse_all(&fm); + let mut context = Context::new(fm, parsed_files); + + let path = Path::new(&entry_point); + let crate_id = prepare_crate(&mut context, path); + + process_dependency_graph(&mut context, dependency_graph); + + Ok((crate_id, context)) } // Create a new FileManager with the given source map @@ -270,35 +306,15 @@ fn add_noir_lib(context: &mut Context, library_name: &CrateName) -> CrateId { prepare_dependency(context, &path_to_lib) } -pub(crate) fn generate_program_artifact(program: CompiledProgram) -> CompileResult { - let warnings = program.warnings.clone(); - CompileResult::Program { program: program.into(), warnings } -} - -pub(crate) fn generate_contract_artifact(contract: CompiledContract) -> CompileResult { - let functions = contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); - - let contract_artifact = ContractArtifact { - noir_version: String::from(NOIR_ARTIFACT_VERSION_STRING), - name: contract.name, - functions, - events: contract.events, - file_map: contract.file_map, - }; - - CompileResult::Contract { contract: contract_artifact, warnings: contract.warnings } -} - #[cfg(test)] mod test { + use nargo::parse_all; use noirc_driver::prepare_crate; use noirc_frontend::{graph::CrateName, hir::Context}; use crate::compile::PathToFileSourceMap; - use super::{ - file_manager_with_source_map, parse_all, process_dependency_graph, DependencyGraph, - }; + use super::{file_manager_with_source_map, process_dependency_graph, DependencyGraph}; use std::{collections::HashMap, path::Path}; fn setup_test_context(source_map: PathToFileSourceMap) -> Context<'static, 'static> { diff --git a/noir/compiler/wasm/src/compile_new.rs b/noir/compiler/wasm/src/compile_new.rs index f8fbed4f470..d6b382f669f 100644 --- a/noir/compiler/wasm/src/compile_new.rs +++ b/noir/compiler/wasm/src/compile_new.rs @@ -1,10 +1,13 @@ use crate::compile::{ - file_manager_with_source_map, generate_contract_artifact, generate_program_artifact, parse_all, - JsCompileResult, PathToFileSourceMap, + file_manager_with_source_map, JsCompileContractResult, JsCompileProgramResult, + PathToFileSourceMap, }; use crate::errors::{CompileError, JsCompileError}; +use nargo::artifacts::contract::{ContractArtifact, ContractFunctionArtifact}; +use nargo::parse_all; use noirc_driver::{ add_dep, compile_contract, compile_main, prepare_crate, prepare_dependency, CompileOptions, + NOIR_ARTIFACT_VERSION_STRING, }; use noirc_frontend::{ graph::{CrateId, CrateName}, @@ -92,7 +95,7 @@ impl CompilerContext { pub fn compile_program( mut self, program_width: usize, - ) -> Result { + ) -> Result { let compile_options = CompileOptions::default(); let np_language = acvm::acir::circuit::ExpressionWidth::Bounded { width: program_width }; @@ -110,15 +113,15 @@ impl CompilerContext { .0; let optimized_program = nargo::ops::transform_program(compiled_program, np_language); + let warnings = optimized_program.warnings.clone(); - let compile_output = generate_program_artifact(optimized_program); - Ok(JsCompileResult::new(compile_output)) + Ok(JsCompileProgramResult::new(optimized_program.into(), warnings)) } pub fn compile_contract( mut self, program_width: usize, - ) -> Result { + ) -> Result { let compile_options = CompileOptions::default(); let np_language = acvm::acir::circuit::ExpressionWidth::Bounded { width: program_width }; let root_crate_id = *self.context.root_crate_id(); @@ -136,24 +139,64 @@ impl CompilerContext { let optimized_contract = nargo::ops::transform_contract(compiled_contract, np_language); - let compile_output = generate_contract_artifact(optimized_contract); - Ok(JsCompileResult::new(compile_output)) + let functions = + optimized_contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); + + let contract_artifact = ContractArtifact { + noir_version: String::from(NOIR_ARTIFACT_VERSION_STRING), + name: optimized_contract.name, + functions, + events: optimized_contract.events, + file_map: optimized_contract.file_map, + }; + + Ok(JsCompileContractResult::new(contract_artifact, optimized_contract.warnings)) } } /// This is a method that exposes the same API as `compile` /// But uses the Context based APi internally #[wasm_bindgen] -pub fn compile_( +pub fn compile_program_( entry_point: String, - contracts: Option, dependency_graph: Option, file_source_map: PathToFileSourceMap, -) -> Result { - use std::collections::HashMap; +) -> Result { + console_error_panic_hook::set_once(); + + let compiler_context = + prepare_compiler_context(entry_point, dependency_graph, file_source_map)?; + let program_width = 3; + compiler_context.compile_program(program_width) +} + +/// This is a method that exposes the same API as `compile` +/// But uses the Context based APi internally +#[wasm_bindgen] +pub fn compile_contract_( + entry_point: String, + dependency_graph: Option, + file_source_map: PathToFileSourceMap, +) -> Result { console_error_panic_hook::set_once(); + let compiler_context = + prepare_compiler_context(entry_point, dependency_graph, file_source_map)?; + let program_width = 3; + + compiler_context.compile_contract(program_width) +} + +/// This is a method that exposes the same API as `prepare_context` +/// But uses the Context based API internally +fn prepare_compiler_context( + entry_point: String, + dependency_graph: Option, + file_source_map: PathToFileSourceMap, +) -> Result { + use std::collections::HashMap; + let dependency_graph: crate::compile::DependencyGraph = if let Some(dependency_graph) = dependency_graph { ::into_serde( @@ -218,22 +261,16 @@ pub fn compile_( } } - let is_contract = contracts.unwrap_or(false); - let program_width = 3; - - if is_contract { - compiler_context.compile_contract(program_width) - } else { - compiler_context.compile_program(program_width) - } + Ok(compiler_context) } #[cfg(test)] mod test { + use nargo::parse_all; use noirc_driver::prepare_crate; use noirc_frontend::hir::Context; - use crate::compile::{file_manager_with_source_map, parse_all, PathToFileSourceMap}; + use crate::compile::{file_manager_with_source_map, PathToFileSourceMap}; use std::path::Path; diff --git a/noir/compiler/wasm/src/index.cts b/noir/compiler/wasm/src/index.cts index 7c707e662d8..234bfa7280c 100644 --- a/noir/compiler/wasm/src/index.cts +++ b/noir/compiler/wasm/src/index.cts @@ -2,7 +2,7 @@ import { FileManager } from './noir/file-manager/file-manager'; import { createNodejsFileManager } from './noir/file-manager/nodejs-file-manager'; import { NoirWasmCompiler } from './noir/noir-wasm-compiler'; import { LogData, LogFn } from './utils'; -import { CompilationResult } from './types/noir_artifact'; +import { ContractCompilationArtifacts, ProgramCompilationArtifacts } from './types/noir_artifact'; import { inflateDebugSymbols } from './noir/debug'; /** @@ -17,36 +17,86 @@ import { inflateDebugSymbols } from './noir/debug'; * ```typescript * // Node.js * - * import { compile, createFileManager } from '@noir-lang/noir_wasm'; + * import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; * * const fm = createFileManager(myProjectPath); - * const myCompiledCode = await compile(fm); + * const myCompiledCode = await compile_program(fm); * ``` * * ```typescript * // Browser * - * import { compile, createFileManager } from '@noir-lang/noir_wasm'; + * import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; * * const fm = createFileManager('/'); * for (const path of files) { * await fm.writeFile(path, await getFileAsStream(path)); * } - * const myCompiledCode = await compile(fm); + * const myCompiledCode = await compile_program(fm); * ``` */ -async function compile( +async function compile_program( fileManager: FileManager, projectPath?: string, logFn?: LogFn, debugLogFn?: LogFn, -): Promise { +): Promise { + const compiler = await setup_compiler(fileManager, projectPath, logFn, debugLogFn); + return await compiler.compile_program(); +} + +/** + * Compiles a Noir project + * + * @param fileManager - The file manager to use + * @param projectPath - The path to the project inside the file manager. Defaults to the root of the file manager + * @param logFn - A logging function. If not provided, console.log will be used + * @param debugLogFn - A debug logging function. If not provided, logFn will be used + * + * @example + * ```typescript + * // Node.js + * + * import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + * + * const fm = createFileManager(myProjectPath); + * const myCompiledCode = await compile_contract(fm); + * ``` + * + * ```typescript + * // Browser + * + * import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + * + * const fm = createFileManager('/'); + * for (const path of files) { + * await fm.writeFile(path, await getFileAsStream(path)); + * } + * const myCompiledCode = await compile_contract(fm); + * ``` + */ +async function compile_contract( + fileManager: FileManager, + projectPath?: string, + logFn?: LogFn, + debugLogFn?: LogFn, +): Promise { + const compiler = await setup_compiler(fileManager, projectPath, logFn, debugLogFn); + return await compiler.compile_contract(); +} + +async function setup_compiler( + fileManager: FileManager, + projectPath?: string, + logFn?: LogFn, + debugLogFn?: LogFn, +): Promise { if (logFn && !debugLogFn) { debugLogFn = logFn; } const cjs = await require('../build/cjs'); - const compiler = await NoirWasmCompiler.new( + return await NoirWasmCompiler.new( fileManager, projectPath ?? fileManager.getDataDir(), cjs, @@ -72,9 +122,16 @@ async function compile( }, }, ); - return await compiler.compile(); } const createFileManager = createNodejsFileManager; -export { compile, createFileManager, inflateDebugSymbols, CompilationResult }; +export { + compile_program as compile, + compile_program, + compile_contract, + createFileManager, + inflateDebugSymbols, + ProgramCompilationArtifacts, + ContractCompilationArtifacts, +}; diff --git a/noir/compiler/wasm/src/index.mts b/noir/compiler/wasm/src/index.mts index d4ed0beccfc..326a7337117 100644 --- a/noir/compiler/wasm/src/index.mts +++ b/noir/compiler/wasm/src/index.mts @@ -2,7 +2,7 @@ import { FileManager } from './noir/file-manager/file-manager'; import { createNodejsFileManager } from './noir/file-manager/nodejs-file-manager'; import { NoirWasmCompiler } from './noir/noir-wasm-compiler'; import { LogData, LogFn } from './utils'; -import { CompilationResult } from './types/noir_artifact'; +import { ContractCompilationArtifacts, ProgramCompilationArtifacts } from './types/noir_artifact'; import { inflateDebugSymbols } from './noir/debug'; /** @@ -17,30 +17,80 @@ import { inflateDebugSymbols } from './noir/debug'; * ```typescript * // Node.js * - * import { compile, createFileManager } from '@noir-lang/noir_wasm'; + * import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; * * const fm = createFileManager(myProjectPath); - * const myCompiledCode = await compile(fm); + * const myCompiledCode = await compile_program(fm); * ``` * * ```typescript * // Browser * - * import { compile, createFileManager } from '@noir-lang/noir_wasm'; + * import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; * * const fm = createFileManager('/'); * for (const path of files) { * await fm.writeFile(path, await getFileAsStream(path)); * } - * const myCompiledCode = await compile(fm); + * const myCompiledCode = await compile_program(fm); * ``` */ -async function compile( +async function compile_program( fileManager: FileManager, projectPath?: string, logFn?: LogFn, debugLogFn?: LogFn, -): Promise { +): Promise { + const compiler = await setup_compiler(fileManager, projectPath, logFn, debugLogFn); + return await compiler.compile_program(); +} + +/** + * Compiles a Noir project + * + * @param fileManager - The file manager to use + * @param projectPath - The path to the project inside the file manager. Defaults to the root of the file manager + * @param logFn - A logging function. If not provided, console.log will be used + * @param debugLogFn - A debug logging function. If not provided, logFn will be used + * + * @example + * ```typescript + * // Node.js + * + * import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + * + * const fm = createFileManager(myProjectPath); + * const myCompiledCode = await compile_contract(fm); + * ``` + * + * ```typescript + * // Browser + * + * import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + * + * const fm = createFileManager('/'); + * for (const path of files) { + * await fm.writeFile(path, await getFileAsStream(path)); + * } + * const myCompiledCode = await compile_contract(fm); + * ``` + */ +async function compile_contract( + fileManager: FileManager, + projectPath?: string, + logFn?: LogFn, + debugLogFn?: LogFn, +): Promise { + const compiler = await setup_compiler(fileManager, projectPath, logFn, debugLogFn); + return await compiler.compile_contract(); +} + +async function setup_compiler( + fileManager: FileManager, + projectPath?: string, + logFn?: LogFn, + debugLogFn?: LogFn, +): Promise { if (logFn && !debugLogFn) { debugLogFn = logFn; } @@ -48,7 +98,7 @@ async function compile( const esm = await import(/* webpackMode: "eager" */ '../build/esm'); await esm.default(); - const compiler = await NoirWasmCompiler.new( + return await NoirWasmCompiler.new( fileManager, projectPath ?? fileManager.getDataDir(), esm, @@ -74,9 +124,16 @@ async function compile( }, }, ); - return await compiler.compile(); } const createFileManager = createNodejsFileManager; -export { compile, createFileManager, inflateDebugSymbols, CompilationResult }; +export { + compile_program as compile, + compile_program, + compile_contract, + createFileManager, + inflateDebugSymbols, + ProgramCompilationArtifacts, + ContractCompilationArtifacts, +}; diff --git a/noir/compiler/wasm/src/lib.rs b/noir/compiler/wasm/src/lib.rs index 6d737a0ea6d..6753faf2009 100644 --- a/noir/compiler/wasm/src/lib.rs +++ b/noir/compiler/wasm/src/lib.rs @@ -18,10 +18,10 @@ mod compile; mod compile_new; mod errors; -pub use compile::compile; +pub use compile::{compile_contract, compile_program}; // Expose the new Context-Centric API -pub use compile_new::{compile_, CompilerContext, CrateIDWrapper}; +pub use compile_new::{compile_contract_, compile_program_, CompilerContext, CrateIDWrapper}; use wasm_bindgen::{prelude::wasm_bindgen, JsValue}; #[derive(Serialize, Deserialize)] @@ -32,12 +32,12 @@ pub struct BuildInfo { } #[wasm_bindgen] -pub fn init_log_level(filter: String) { +pub fn init_log_level(level: String) { // Set the static variable from Rust use std::sync::Once; - let filter: EnvFilter = - filter.parse().expect("Could not parse log filter while initializing logger"); + let level_filter: EnvFilter = + level.parse().expect("Could not parse log filter while initializing logger"); static SET_HOOK: Once = Once::new(); SET_HOOK.call_once(|| { @@ -46,7 +46,7 @@ pub fn init_log_level(filter: String) { .without_time() .with_writer(MakeWebConsoleWriter::new()); - tracing_subscriber::registry().with(fmt_layer.with_filter(filter)).init(); + tracing_subscriber::registry().with(fmt_layer.with_filter(level_filter)).init(); }); } diff --git a/noir/compiler/wasm/src/noir/noir-wasm-compiler.ts b/noir/compiler/wasm/src/noir/noir-wasm-compiler.ts index 2a0af5d8fee..1ec3af1fd65 100644 --- a/noir/compiler/wasm/src/noir/noir-wasm-compiler.ts +++ b/noir/compiler/wasm/src/noir/noir-wasm-compiler.ts @@ -6,7 +6,7 @@ import { LocalDependencyResolver } from './dependencies/local-dependency-resolve import { FileManager } from './file-manager/file-manager'; import { Package } from './package'; import { LogFn } from '../utils'; -import { CompilationResult } from '../types/noir_artifact'; +import { ContractCompilationArtifacts, ProgramCompilationArtifacts } from '../types/noir_artifact'; /** Compilation options */ export type NoirWasmCompileOptions = { @@ -84,21 +84,64 @@ export class NoirWasmCompiler { /** * Compile EntryPoint */ + public async compile_program(): Promise { + console.log(`Compiling at ${this.#package.getEntryPointPath()}`); + + if (this.#package.getType() !== 'bin') { + throw new Error(`Expected to find package type "bin" but found ${this.#package.getType()}`); + } + await this.#dependencyManager.resolveDependencies(); + this.#debugLog(`Dependencies: ${this.#dependencyManager.getPackageNames().join(', ')}`); + + try { + const entrypoint = this.#package.getEntryPointPath(); + const deps = { + /* eslint-disable camelcase */ + root_dependencies: this.#dependencyManager.getEntrypointDependencies(), + library_dependencies: this.#dependencyManager.getLibraryDependencies(), + /* eslint-enable camelcase */ + }; + const packageSources = await this.#package.getSources(this.#fm); + const librarySources = ( + await Promise.all( + this.#dependencyManager + .getLibraries() + .map(async ([alias, library]) => await library.package.getSources(this.#fm, alias)), + ) + ).flat(); + [...packageSources, ...librarySources].forEach((sourceFile) => { + this.#debugLog(`Adding source ${sourceFile.path}`); + this.#sourceMap.add_source_code(sourceFile.path, sourceFile.source); + }); + const result = this.#wasmCompiler.compile_program(entrypoint, deps, this.#sourceMap); + + return result; + } catch (err) { + if (err instanceof Error && err.name === 'CompileError') { + const logs = await this.#processCompileError(err); + for (const log of logs) { + this.#log(log); + } + throw new Error(logs.join('\n')); + } + + throw err; + } + } + /** * Compile EntryPoint */ - public async compile(): Promise { + public async compile_contract(): Promise { console.log(`Compiling at ${this.#package.getEntryPointPath()}`); - if (!(this.#package.getType() === 'contract' || this.#package.getType() === 'bin')) { - throw new Error(`Only supports compiling "contract" and "bin" package types (${this.#package.getType()})`); + if (this.#package.getType() !== 'contract') { + throw new Error(`Expected to find package type "contract" but found ${this.#package.getType()}`); } await this.#dependencyManager.resolveDependencies(); this.#debugLog(`Dependencies: ${this.#dependencyManager.getPackageNames().join(', ')}`); try { - const isContract: boolean = this.#package.getType() === 'contract'; - const entrypoint = this.#package.getEntryPointPath(); const deps = { /* eslint-disable camelcase */ @@ -118,11 +161,7 @@ export class NoirWasmCompiler { this.#debugLog(`Adding source ${sourceFile.path}`); this.#sourceMap.add_source_code(sourceFile.path, sourceFile.source); }); - const result = this.#wasmCompiler.compile(entrypoint, isContract, deps, this.#sourceMap); - - if ((isContract && !('contract' in result)) || (!isContract && !('program' in result))) { - throw new Error('Invalid compilation result'); - } + const result = this.#wasmCompiler.compile_contract(entrypoint, deps, this.#sourceMap); return result; } catch (err) { diff --git a/noir/compiler/wasm/src/noir/package.ts b/noir/compiler/wasm/src/noir/package.ts index a2496a03b3a..81178e6ae96 100644 --- a/noir/compiler/wasm/src/noir/package.ts +++ b/noir/compiler/wasm/src/noir/package.ts @@ -91,7 +91,7 @@ export class Package { * Gets this package's dependencies. */ public getDependencies(): Record { - return this.#config.dependencies; + return this.#config.dependencies ?? {}; } /** diff --git a/noir/compiler/wasm/src/types/noir_artifact.ts b/noir/compiler/wasm/src/types/noir_artifact.ts index 350a4053a9a..832a6ed9bf9 100644 --- a/noir/compiler/wasm/src/types/noir_artifact.ts +++ b/noir/compiler/wasm/src/types/noir_artifact.ts @@ -73,6 +73,8 @@ export interface ContractArtifact { * The compilation result of an Noir contract. */ export interface ProgramArtifact { + /** Version of noir used for the build. */ + noir_version: string; /** The hash of the circuit. */ hash?: number; /** * The ABI of the function. */ @@ -178,22 +180,3 @@ export interface ProgramCompilationArtifacts { /** Compilation warnings. */ warnings: Warning[]; } - -/** - * output of Noir Wasm compilation, can be for a contract or lib/binary - */ -export type CompilationResult = ContractCompilationArtifacts | ProgramCompilationArtifacts; - -/** - * Check if it has Contract unique property - */ -export function isContractCompilationArtifacts(artifact: CompilationResult): artifact is ContractCompilationArtifacts { - return (artifact as ContractCompilationArtifacts).contract !== undefined; -} - -/** - * Check if it has Contract unique property - */ -export function isProgramCompilationArtifacts(artifact: CompilationResult): artifact is ProgramCompilationArtifacts { - return (artifact as ProgramCompilationArtifacts).program !== undefined; -} diff --git a/noir/compiler/wasm/src/types/noir_package_config.ts b/noir/compiler/wasm/src/types/noir_package_config.ts index 5f07c380cf3..0203763039a 100644 --- a/noir/compiler/wasm/src/types/noir_package_config.ts +++ b/noir/compiler/wasm/src/types/noir_package_config.ts @@ -20,7 +20,7 @@ type NoirPackageConfigSchema = { backend?: string; license?: string; }; - dependencies: Record; + dependencies?: Record; }; /** diff --git a/noir/compiler/wasm/test/compiler/browser/compile.test.ts b/noir/compiler/wasm/test/compiler/browser/compile.test.ts new file mode 100644 index 00000000000..7d4b3da55aa --- /dev/null +++ b/noir/compiler/wasm/test/compiler/browser/compile.test.ts @@ -0,0 +1,79 @@ +/* eslint-disable @typescript-eslint/ban-ts-comment */ +import { getPaths } from '../../shared'; +import { expect } from '@esm-bundle/chai'; +import { compile_program, compile_contract, createFileManager } from '@noir-lang/noir_wasm'; +import { ContractArtifact, ProgramArtifact } from '../../../src/types/noir_artifact'; +import { shouldCompileContractIdentically, shouldCompileProgramIdentically } from '../shared/compile.test'; + +const paths = getPaths('.'); + +async function getFile(path: string) { + // @ts-ignore + const basePath = new URL('./../../', import.meta.url).toString().replace(/\/$/g, ''); + const url = `${basePath}${path.replace('.', '')}`; + const response = await fetch(url); + return response; +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function getPrecompiledSource(path: string): Promise { + const response = await getFile(path); + const compiledData = await response.text(); + return JSON.parse(compiledData); +} + +describe('noir-compiler/browser', () => { + shouldCompileProgramIdentically( + async () => { + const { simpleScriptExpectedArtifact } = paths; + const fm = createFileManager('/'); + const files = Object.values(paths).filter((fileOrDir) => /^\.?\/.*\..*$/.test(fileOrDir)); + for (const path of files) { + console.log(path); + await fm.writeFile(path, (await getFile(path)).body as ReadableStream); + } + const nargoArtifact = (await getPrecompiledSource(simpleScriptExpectedArtifact)) as ProgramArtifact; + const noirWasmArtifact = await compile_program(fm, '/fixtures/simple'); + + return { nargoArtifact, noirWasmArtifact }; + }, + expect, + 60 * 20e3, + ); + + shouldCompileProgramIdentically( + async () => { + const { depsScriptExpectedArtifact } = paths; + const fm = createFileManager('/'); + const files = Object.values(paths).filter((fileOrDir) => /^\.?\/.*\..*$/.test(fileOrDir)); + for (const path of files) { + console.log(path); + await fm.writeFile(path, (await getFile(path)).body as ReadableStream); + } + const nargoArtifact = (await getPrecompiledSource(depsScriptExpectedArtifact)) as ProgramArtifact; + const noirWasmArtifact = await compile_program(fm, '/fixtures/with-deps'); + + return { nargoArtifact, noirWasmArtifact }; + }, + expect, + 60 * 20e3, + ); + + shouldCompileContractIdentically( + async () => { + const { contractExpectedArtifact } = paths; + const fm = createFileManager('/'); + const files = Object.values(paths).filter((fileOrDir) => /^\.?\/.*\..*$/.test(fileOrDir)); + for (const path of files) { + console.log(path); + await fm.writeFile(path, (await getFile(path)).body as ReadableStream); + } + const nargoArtifact = (await getPrecompiledSource(contractExpectedArtifact)) as ContractArtifact; + const noirWasmArtifact = await compile_contract(fm, '/fixtures/noir-contract'); + + return { nargoArtifact, noirWasmArtifact }; + }, + expect, + 60 * 20e3, + ); +}); diff --git a/noir/compiler/wasm/test/compiler/browser/compile_with_deps.test.ts b/noir/compiler/wasm/test/compiler/browser/compile_with_deps.test.ts deleted file mode 100644 index 0d1e22e288f..00000000000 --- a/noir/compiler/wasm/test/compiler/browser/compile_with_deps.test.ts +++ /dev/null @@ -1,43 +0,0 @@ -/* eslint-disable @typescript-eslint/ban-ts-comment */ -import { getPaths } from '../../shared'; -import { expect } from '@esm-bundle/chai'; -import { compile, createFileManager } from '@noir-lang/noir_wasm'; -import { ContractArtifact } from '../../../src/types/noir_artifact'; -import { shouldCompileIdentically } from '../shared/compile_with_deps.test'; - -const paths = getPaths('.'); - -async function getFile(path: string) { - // @ts-ignore - const basePath = new URL('./../../', import.meta.url).toString().replace(/\/$/g, ''); - const url = `${basePath}${path.replace('.', '')}`; - const response = await fetch(url); - return response; -} - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -async function getPrecompiledSource(path: string): Promise { - const response = await getFile(path); - const compiledData = await response.text(); - return JSON.parse(compiledData); -} - -describe('noir-compiler/browser', () => { - shouldCompileIdentically( - async () => { - const { contractExpectedArtifact } = paths; - const fm = createFileManager('/'); - const files = Object.values(paths).filter((fileOrDir) => /^\.?\/.*\..*$/.test(fileOrDir)); - for (const path of files) { - console.log(path); - await fm.writeFile(path, (await getFile(path)).body as ReadableStream); - } - const nargoArtifact = (await getPrecompiledSource(contractExpectedArtifact)) as ContractArtifact; - const noirWasmArtifact = await compile(fm, '/fixtures/noir-contract'); - - return { nargoArtifact, noirWasmArtifact }; - }, - expect, - 60 * 20e3, - ); -}); diff --git a/noir/compiler/wasm/test/compiler/node/compile.test.ts b/noir/compiler/wasm/test/compiler/node/compile.test.ts new file mode 100644 index 00000000000..811dc95ce16 --- /dev/null +++ b/noir/compiler/wasm/test/compiler/node/compile.test.ts @@ -0,0 +1,39 @@ +import { join, resolve } from 'path'; +import { getPaths } from '../../shared'; + +import { expect } from 'chai'; +import { compile_program, compile_contract, createFileManager } from '@noir-lang/noir_wasm'; +import { readFile } from 'fs/promises'; +import { ContractArtifact, ProgramArtifact } from '../../../src/types/noir_artifact'; +import { shouldCompileContractIdentically, shouldCompileProgramIdentically } from '../shared/compile.test'; + +const basePath = resolve(join(__dirname, '../../')); + +describe('noir-compiler/node', () => { + shouldCompileProgramIdentically(async () => { + const { simpleScriptProjectPath, simpleScriptExpectedArtifact } = getPaths(basePath); + + const fm = createFileManager(simpleScriptProjectPath); + const nargoArtifact = JSON.parse((await readFile(simpleScriptExpectedArtifact)).toString()) as ProgramArtifact; + const noirWasmArtifact = await compile_program(fm); + return { nargoArtifact, noirWasmArtifact }; + }, expect); + + shouldCompileProgramIdentically(async () => { + const { depsScriptProjectPath, depsScriptExpectedArtifact } = getPaths(basePath); + + const fm = createFileManager(depsScriptProjectPath); + const nargoArtifact = JSON.parse((await readFile(depsScriptExpectedArtifact)).toString()) as ProgramArtifact; + const noirWasmArtifact = await compile_program(fm); + return { nargoArtifact, noirWasmArtifact }; + }, expect); + + shouldCompileContractIdentically(async () => { + const { contractProjectPath, contractExpectedArtifact } = getPaths(basePath); + + const fm = createFileManager(contractProjectPath); + const nargoArtifact = JSON.parse((await readFile(contractExpectedArtifact)).toString()) as ContractArtifact; + const noirWasmArtifact = await compile_contract(fm); + return { nargoArtifact, noirWasmArtifact }; + }, expect); +}); diff --git a/noir/compiler/wasm/test/compiler/node/compile_with_deps.test.ts b/noir/compiler/wasm/test/compiler/node/compile_with_deps.test.ts deleted file mode 100644 index 2a402dc9d02..00000000000 --- a/noir/compiler/wasm/test/compiler/node/compile_with_deps.test.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { join, resolve } from 'path'; -import { getPaths } from '../../shared'; - -import { expect } from 'chai'; -import { compile, createFileManager } from '@noir-lang/noir_wasm'; -import { readFile } from 'fs/promises'; -import { ContractArtifact } from '../../../src/types/noir_artifact'; -import { shouldCompileIdentically } from '../shared/compile_with_deps.test'; - -const basePath = resolve(join(__dirname, '../../')); -const { contractProjectPath, contractExpectedArtifact } = getPaths(basePath); - -describe('noir-compiler/node', () => { - shouldCompileIdentically(async () => { - const fm = createFileManager(contractProjectPath); - const nargoArtifact = JSON.parse((await readFile(contractExpectedArtifact)).toString()) as ContractArtifact; - const noirWasmArtifact = await compile(fm); - return { nargoArtifact, noirWasmArtifact }; - }, expect); -}); diff --git a/noir/compiler/wasm/test/compiler/shared/compile_with_deps.test.ts b/noir/compiler/wasm/test/compiler/shared/compile.test.ts similarity index 51% rename from noir/compiler/wasm/test/compiler/shared/compile_with_deps.test.ts rename to noir/compiler/wasm/test/compiler/shared/compile.test.ts index 0960cba0665..52cef14968b 100644 --- a/noir/compiler/wasm/test/compiler/shared/compile_with_deps.test.ts +++ b/noir/compiler/wasm/test/compiler/shared/compile.test.ts @@ -1,4 +1,4 @@ -import { CompilationResult, inflateDebugSymbols } from '@noir-lang/noir_wasm'; +import { inflateDebugSymbols } from '@noir-lang/noir_wasm'; import { type expect as Expect } from 'chai'; import { ContractArtifact, @@ -6,10 +6,12 @@ import { DebugFileMap, DebugInfo, NoirFunctionEntry, + ProgramArtifact, + ProgramCompilationArtifacts, } from '../../../src/types/noir_artifact'; -export function shouldCompileIdentically( - compileFn: () => Promise<{ nargoArtifact: ContractArtifact; noirWasmArtifact: CompilationResult }>, +export function shouldCompileProgramIdentically( + compileFn: () => Promise<{ nargoArtifact: ProgramArtifact; noirWasmArtifact: ProgramCompilationArtifacts }>, expect: typeof Expect, timeout = 5000, ) { @@ -18,13 +20,49 @@ export function shouldCompileIdentically( const { nargoArtifact, noirWasmArtifact } = await compileFn(); // Prepare nargo artifact - const [nargoDebugInfos, nargoFileMap] = deleteDebugMetadata(nargoArtifact); + const [_nargoDebugInfos, nargoFileMap] = deleteProgramDebugMetadata(nargoArtifact); normalizeVersion(nargoArtifact); // Prepare noir-wasm artifact - const noirWasmContract = (noirWasmArtifact as ContractCompilationArtifacts).contract; + const noirWasmProgram = noirWasmArtifact.program; + expect(noirWasmProgram).not.to.be.undefined; + const [_noirWasmDebugInfos, norWasmFileMap] = deleteProgramDebugMetadata(noirWasmProgram); + normalizeVersion(noirWasmProgram); + + // We first compare both contracts without considering debug info + delete (noirWasmProgram as Partial).hash; + delete (nargoArtifact as Partial).hash; + expect(nargoArtifact).to.deep.eq(noirWasmProgram); + + // Compare the file maps, ignoring keys, since those depend in the order in which files are visited, + // which may change depending on the file manager implementation. Also ignores paths, since the base + // path is reported differently between nargo and noir-wasm. + expect(getSources(nargoFileMap)).to.have.members(getSources(norWasmFileMap)); + + // Compare the debug symbol information, ignoring the actual ids used for file identifiers. + // Debug symbol info looks like the following, what we need is to ignore the 'file' identifiers + // {"locations":{"0":[{"span":{"start":141,"end":156},"file":39},{"span":{"start":38,"end":76},"file":38},{"span":{"start":824,"end":862},"file":23}]}} + // expect(nargoDebugInfos).to.deep.eq(noirWasmDebugInfos); + }).timeout(timeout); +} + +export function shouldCompileContractIdentically( + compileFn: () => Promise<{ nargoArtifact: ContractArtifact; noirWasmArtifact: ContractCompilationArtifacts }>, + expect: typeof Expect, + timeout = 5000, +) { + it('both nargo and noir_wasm should compile identically', async () => { + // Compile! + const { nargoArtifact, noirWasmArtifact } = await compileFn(); + + // Prepare nargo artifact + const [nargoDebugInfos, nargoFileMap] = deleteContractDebugMetadata(nargoArtifact); + normalizeVersion(nargoArtifact); + + // Prepare noir-wasm artifact + const noirWasmContract = noirWasmArtifact.contract; expect(noirWasmContract).not.to.be.undefined; - const [noirWasmDebugInfos, norWasmFileMap] = deleteDebugMetadata(noirWasmContract); + const [noirWasmDebugInfos, norWasmFileMap] = deleteContractDebugMetadata(noirWasmContract); normalizeVersion(noirWasmContract); // We first compare both contracts without considering debug info @@ -43,7 +81,7 @@ export function shouldCompileIdentically( } /** Remove commit identifier from version, which may not match depending on cached nargo and noir-wasm */ -function normalizeVersion(contract: ContractArtifact) { +function normalizeVersion(contract: ProgramArtifact | ContractArtifact) { contract.noir_version = contract.noir_version.replace(/\+.+$/, ''); } @@ -57,8 +95,18 @@ function extractDebugInfos(fns: NoirFunctionEntry[]) { }); } +/** Deletes all debug info from a program and returns it. */ +function deleteProgramDebugMetadata(program: ProgramArtifact) { + const debugSymbols = inflateDebugSymbols(program.debug_symbols); + const fileMap = program.file_map; + + delete (program as Partial).debug_symbols; + delete (program as Partial).file_map; + return [debugSymbols, fileMap]; +} + /** Deletes all debug info from a contract and returns it. */ -function deleteDebugMetadata(contract: ContractArtifact) { +function deleteContractDebugMetadata(contract: ContractArtifact) { contract.functions.sort((a, b) => a.name.localeCompare(b.name)); const fileMap = contract.file_map; delete (contract as Partial).file_map; diff --git a/noir/compiler/wasm/test/shared.ts b/noir/compiler/wasm/test/shared.ts index 9181919ff39..9f4d417a614 100644 --- a/noir/compiler/wasm/test/shared.ts +++ b/noir/compiler/wasm/test/shared.ts @@ -1,14 +1,23 @@ export function getPaths(basePath: string) { const fixtures = `${basePath}/fixtures`; - const simpleScriptSourcePath = `${fixtures}/simple/src/main.nr`; - const simpleScriptExpectedArtifact = `${fixtures}/simple/target/noir_wasm_testing.json`; + const simpleScriptProjectPath = `${fixtures}/simple`; + const simpleScriptSourcePath = `${simpleScriptProjectPath}/src/main.nr`; + const simpleScriptTOMLPath = `${simpleScriptProjectPath}/Nargo.toml`; + const simpleScriptExpectedArtifact = `${simpleScriptProjectPath}/target/noir_wasm_testing.json`; - const depsScriptSourcePath = `${fixtures}/with-deps/src/main.nr`; - const depsScriptExpectedArtifact = `${fixtures}/with-deps/target/noir_wasm_testing.json`; + const depsScriptProjectPath = `${fixtures}/with-deps`; + const depsScriptSourcePath = `${depsScriptProjectPath}/src/main.nr`; + const depsScriptTOMLPath = `${depsScriptProjectPath}/Nargo.toml`; + const depsScriptExpectedArtifact = `${depsScriptProjectPath}/target/noir_wasm_testing.json`; - const libASourcePath = `${fixtures}/deps/lib-a/src/lib.nr`; - const libBSourcePath = `${fixtures}/deps/lib-b/src/lib.nr`; + const libAProjectPath = `${fixtures}/deps/lib-a`; + const libASourcePath = `${libAProjectPath}/src/lib.nr`; + const libATOMLPath = `${libAProjectPath}/Nargo.toml`; + + const libBProjectPath = `${fixtures}/deps/lib-b`; + const libBSourcePath = `${libBProjectPath}/src/lib.nr`; + const libBTOMLPath = `${libBProjectPath}/Nargo.toml`; const contractProjectPath = `${fixtures}/noir-contract`; const contractSourcePath = `${contractProjectPath}/src/main.nr`; @@ -22,12 +31,18 @@ export function getPaths(basePath: string) { const libCTOMLPath = `${libCProjectPath}/Nargo.toml`; return { + simpleScriptProjectPath, simpleScriptSourcePath, + simpleScriptTOMLPath, simpleScriptExpectedArtifact, + depsScriptProjectPath, depsScriptSourcePath, + depsScriptTOMLPath, depsScriptExpectedArtifact, libASourcePath, + libATOMLPath, libBSourcePath, + libBTOMLPath, contractProjectPath, contractSourcePath, contractTOMLPath, diff --git a/noir/compiler/wasm/test/wasm/browser/index.test.ts b/noir/compiler/wasm/test/wasm/browser/index.test.ts index 3122fa57945..b59b4ae417a 100644 --- a/noir/compiler/wasm/test/wasm/browser/index.test.ts +++ b/noir/compiler/wasm/test/wasm/browser/index.test.ts @@ -2,7 +2,7 @@ import { getPaths } from '../../shared'; import { expect } from '@esm-bundle/chai'; -import init, { compile, PathToFileSourceMap, compile_, CompilerContext } from '../../../build/esm'; +import init, { compile_program, PathToFileSourceMap, compile_program_, CompilerContext } from '../../../build/esm'; // @ts-ignore await init(); @@ -35,7 +35,7 @@ describe('noir wasm compilation', () => { it('matching nargos compilation', async () => { const sourceMap = new PathToFileSourceMap(); sourceMap.add_source_code('script/main.nr', await getFileAsString(simpleScriptSourcePath)); - const wasmCircuit = compile('script/main.nr', undefined, undefined, sourceMap); + const wasmCircuit = compile_program('script/main.nr', undefined, sourceMap); const cliCircuit = await getPrecompiledSource(simpleScriptExpectedArtifact); if (!('program' in wasmCircuit)) { @@ -58,9 +58,8 @@ describe('noir wasm compilation', () => { }); it('matching nargos compilation', async () => { - const wasmCircuit = compile( + const wasmCircuit = compile_program( 'script/main.nr', - false, { root_dependencies: ['lib_a'], library_dependencies: { @@ -132,9 +131,8 @@ describe('noir wasm compilation', () => { }).timeout(60 * 20e3); it('matching nargos compilation - context-implementation-compile-api', async () => { - const wasmCircuit = await compile_( + const wasmCircuit = await compile_program_( 'script/main.nr', - false, { root_dependencies: ['lib_a'], library_dependencies: { diff --git a/noir/compiler/wasm/test/wasm/node/index.test.ts b/noir/compiler/wasm/test/wasm/node/index.test.ts index c73ce7477e5..23c87cc059a 100644 --- a/noir/compiler/wasm/test/wasm/node/index.test.ts +++ b/noir/compiler/wasm/test/wasm/node/index.test.ts @@ -3,7 +3,7 @@ import { readFileSync } from 'fs'; import { join, resolve } from 'path'; import { expect } from 'chai'; -import { compile, PathToFileSourceMap, compile_, CompilerContext } from '../../../build/cjs'; +import { compile_program, PathToFileSourceMap, compile_program_, CompilerContext } from '../../../build/cjs'; const basePath = resolve(join(__dirname, '../../')); const { @@ -26,7 +26,7 @@ describe('noir wasm compilation', () => { it('matching nargos compilation', async () => { const sourceMap = new PathToFileSourceMap(); sourceMap.add_source_code(simpleScriptSourcePath, readFileSync(simpleScriptSourcePath, 'utf-8')); - const wasmCircuit = compile(simpleScriptSourcePath, undefined, undefined, sourceMap); + const wasmCircuit = compile_program(simpleScriptSourcePath, undefined, sourceMap); const cliCircuit = await getPrecompiledSource(simpleScriptExpectedArtifact); if (!('program' in wasmCircuit)) { @@ -49,9 +49,8 @@ describe('noir wasm compilation', () => { }); it('matching nargos compilation', async () => { - const wasmCircuit = compile( + const wasmCircuit = compile_program( 'script/main.nr', - false, { root_dependencies: ['lib_a'], library_dependencies: { @@ -123,9 +122,8 @@ describe('noir wasm compilation', () => { }).timeout(60 * 20e3); it('matching nargos compilation - context-implementation-compile-api', async () => { - const wasmCircuit = await compile_( + const wasmCircuit = await compile_program_( 'script/main.nr', - false, { root_dependencies: ['lib_a'], library_dependencies: { diff --git a/noir/compiler/wasm/webpack.config.ts b/noir/compiler/wasm/webpack.config.ts index d5d70df2b8a..456c5d82dca 100644 --- a/noir/compiler/wasm/webpack.config.ts +++ b/noir/compiler/wasm/webpack.config.ts @@ -1,6 +1,6 @@ import { resolve, join } from 'path'; import webpack from 'webpack'; -import 'webpack-dev-server'; +import type { Configuration as DevServerConfiguration } from 'webpack-dev-server'; import WasmPackPlugin from '@wasm-tool/wasm-pack-plugin'; import HtmlWebpackPlugin from 'html-webpack-plugin'; import CopyWebpackPlugin from 'copy-webpack-plugin'; @@ -25,6 +25,10 @@ const config: webpack.Configuration = { }, }; +const devServerConfig: DevServerConfiguration = { + static: join(__dirname, 'dist'), +}; + const webConfig: webpack.Configuration = { name: 'web', entry: './src/index.mts', @@ -74,9 +78,7 @@ const webConfig: webpack.Configuration = { }, ], }, - devServer: { - static: join(__dirname, 'dist'), - }, + devServer: devServerConfig, resolve: { ...config.resolve, alias: { diff --git a/noir/cspell.json b/noir/cspell.json index 2acca0633d3..23659b39c68 100644 --- a/noir/cspell.json +++ b/noir/cspell.json @@ -90,6 +90,7 @@ "indexmap", "injective", "Inlines", + "instrumenter", "interner", "intrinsics", "jmp", @@ -117,6 +118,7 @@ "monomorphizes", "monomorphizing", "montcurve", + "MSRV", "nand", "nargo", "neovim", diff --git a/noir/deny.toml b/noir/deny.toml index a3e506984c9..72150f08a3c 100644 --- a/noir/deny.toml +++ b/noir/deny.toml @@ -54,7 +54,7 @@ allow = [ "LicenseRef-ring", # https://github.com/rustls/webpki/blob/main/LICENSE ISC Style "LicenseRef-rustls-webpki", - # bitmaps 2.1.0, generational-arena 0.2.9,im 15.1.0 + # bitmaps 2.1.0, im 15.1.0 "MPL-2.0", # Boost Software License "BSL-1.0", diff --git a/noir/docs/docs/getting_started/installation/other_install_methods.md b/noir/docs/docs/getting_started/installation/other_install_methods.md index 489f1eda802..a35e34aaf9c 100644 --- a/noir/docs/docs/getting_started/installation/other_install_methods.md +++ b/noir/docs/docs/getting_started/installation/other_install_methods.md @@ -112,7 +112,7 @@ Paste and run the following in the terminal to extract and install the binary: ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-aarch64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -122,7 +122,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-x86_64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -132,7 +132,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ source ~/.bashrc @@ -212,7 +212,7 @@ code . #### Building and testing Assuming you are using `direnv` to populate your environment, building and testing the project can be done -with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `rust-toolchain.toml`, which is 1.71.1 at the time of this writing. +with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `rust-toolchain.toml`, which is 1.73.0 at the time of this writing. If you want to build the entire project in an isolated sandbox, you can use Nix commands: diff --git a/noir/docs/docs/noir/concepts/data_types/vectors.mdx b/noir/docs/docs/noir/standard_library/containers/vec.mdx similarity index 71% rename from noir/docs/docs/noir/concepts/data_types/vectors.mdx rename to noir/docs/docs/noir/standard_library/containers/vec.mdx index aed13183719..1954f05bc76 100644 --- a/noir/docs/docs/noir/concepts/data_types/vectors.mdx +++ b/noir/docs/docs/noir/standard_library/containers/vec.mdx @@ -1,6 +1,6 @@ --- title: Vectors -description: Delve into the Vector data type in Noir. Learn about its methods, practical examples, and best practices for using Vectors in your Noir code. +description: Delve into the Vec data type in Noir. Learn about its methods, practical examples, and best practices for using Vectors in your Noir code. keywords: [noir, vector type, methods, examples, dynamic arrays] sidebar_position: 6 --- @@ -9,7 +9,7 @@ import Experimental from '@site/src/components/Notes/_experimental.mdx'; -A vector is a collection type similar to Rust's Vector type. It's convenient way to use slices as mutable arrays. +A vector is a collection type similar to Rust's `Vec` type. In Noir, it is a convenient way to use slices as mutable arrays. Example: @@ -28,9 +28,7 @@ assert(vector.len() == 5); Creates a new, empty vector. ```rust -pub fn new() -> Self { - Self { slice: [] } -} +pub fn new() -> Self ``` Example: @@ -45,9 +43,7 @@ assert(empty_vector.len() == 0); Creates a vector containing each element from a given slice. Mutations to the resulting vector will not affect the original slice. ```rust -pub fn from_slice(slice: [T]) -> Self { - Self { slice } -} +pub fn from_slice(slice: [T]) -> Self ``` Example: @@ -58,14 +54,27 @@ let vector_from_slice = Vec::from_slice(arr); assert(vector_from_slice.len() == 3); ``` +### len + +Returns the number of elements in the vector. + +```rust +pub fn len(self) -> Field +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` + ### get Retrieves an element from the vector at a given index. Panics if the index points beyond the vector's end. ```rust -pub fn get(self, index: Field) -> T { - self.slice[index] -} +pub fn get(self, index: Field) -> T ``` Example: @@ -80,9 +89,7 @@ assert(vector.get(1) == 20); Adds a new element to the vector's end, returning a new vector with a length one greater than the original unmodified vector. ```rust -pub fn push(&mut self, elem: T) { - self.slice = self.slice.push_back(elem); -} +pub fn push(&mut self, elem: T) ``` Example: @@ -98,11 +105,7 @@ assert(vector.len() == 1); Removes an element from the vector's end, returning a new vector with a length one less than the original vector, along with the removed element. Panics if the vector's length is zero. ```rust -pub fn pop(&mut self) -> T { - let (popped_slice, last_elem) = self.slice.pop_back(); - self.slice = popped_slice; - last_elem -} +pub fn pop(&mut self) -> T ``` Example: @@ -119,9 +122,7 @@ assert(vector.len() == 1); Inserts an element at a specified index, shifting subsequent elements to the right. ```rust -pub fn insert(&mut self, index: Field, elem: T) { - self.slice = self.slice.insert(index, elem); -} +pub fn insert(&mut self, index: Field, elem: T) ``` Example: @@ -137,11 +138,7 @@ assert(vector.get(1) == 20); Removes an element at a specified index, shifting subsequent elements to the left, and returns the removed element. ```rust -pub fn remove(&mut self, index: Field) -> T { - let (new_slice, elem) = self.slice.remove(index); - self.slice = new_slice; - elem -} +pub fn remove(&mut self, index: Field) -> T ``` Example: @@ -152,20 +149,3 @@ let removed_elem = vector.remove(1); assert(removed_elem == 20); assert(vector.len() == 2); ``` - -### len - -Returns the number of elements in the vector. - -```rust -pub fn len(self) -> Field { - self.slice.len() -} -``` - -Example: - -```rust -let empty_vector: Vec = Vec::new(); -assert(empty_vector.len() == 0); -``` diff --git a/noir/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx b/noir/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx index 85706384eee..b9239f822e8 100644 --- a/noir/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx +++ b/noir/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx @@ -114,6 +114,19 @@ example: #include_code poseidon test_programs/execution_success/poseidon_bn254_hash/src/main.nr rust +## poseidon 2 + +Given an array of Fields, returns a new Field with the Poseidon2 Hash. Contrary to the Poseidon +function, there is only one hash and you can specify a message_size to hash only the first +`message_size` bytes of the input, + +```rust +// example for hashing the first three elements of the input +Poseidon2::hash(input, 3); +``` + +The above example for Poseidon also includes Poseidon2. + ## mimc_bn254 and mimc `mimc_bn254` is `mimc`, but with hardcoded parameters for the BN254 curve. You can use it by diff --git a/noir/docs/docs/noir/standard_library/recursion.md b/noir/docs/docs/noir/standard_library/recursion.md index f252150c8b5..9337499dac8 100644 --- a/noir/docs/docs/noir/standard_library/recursion.md +++ b/noir/docs/docs/noir/standard_library/recursion.md @@ -8,6 +8,26 @@ Noir supports recursively verifying proofs, meaning you verify the proof of a No Read [the explainer on recursion](../../explainers/explainer-recursion.md) to know more about this function and the [guide on how to use it.](../../how_to/how-to-recursion.md) +## The `#[recursive]` Attribute + +In Noir, the `#[recursive]` attribute is used to indicate that a circuit is designed for recursive proof generation. When applied, it informs the compiler and the tooling that the circuit should be compiled in a way that makes its proofs suitable for recursive verification. This attribute eliminates the need for manual flagging of recursion at the tooling level, streamlining the proof generation process for recursive circuits. + +### Example usage with `#[recursive]` + +```rust +#[recursive] +fn main(x: Field, y: pub Field) { + assert(x == y, "x and y are not equal"); +} + +// This marks the circuit as recursion-friendly and indicates that proofs generated from this circuit +// are intended for recursive verification. +``` + +By incorporating this attribute directly in the circuit's definition, tooling like Nargo and NoirJS can automatically execute recursive-specific duties for Noir programs (e.g. recursive-friendly proof artifact generation) without additional flags or configurations. + +## Verifying Recursive Proofs + ```rust #[foreign(verify_proof)] fn verify_proof(_verification_key : [Field], _proof : [Field], _public_input : Field, _key_hash : Field) {} diff --git a/noir/docs/docusaurus.config.ts b/noir/docs/docusaurus.config.ts index 1b6c65d5139..49566c5c380 100644 --- a/noir/docs/docusaurus.config.ts +++ b/noir/docs/docusaurus.config.ts @@ -38,7 +38,7 @@ export default { }, }, editUrl: ({ versionDocsDirPath, docPath }) => - `https://github.com/noir-lang/noir/edit/master/docs/${versionDocsDirPath}/${docPath}`, + `https://github.com/noir-lang/noir/edit/master/docs/${versionDocsDirPath.replace('processed-docs', 'docs')}/${docPath}`, }, blog: false, theme: { diff --git a/noir/docs/versioned_docs/version-v0.23.0/getting_started/installation/other_install_methods.md b/noir/docs/versioned_docs/version-v0.23.0/getting_started/installation/other_install_methods.md index a532f83750e..746633b628d 100644 --- a/noir/docs/versioned_docs/version-v0.23.0/getting_started/installation/other_install_methods.md +++ b/noir/docs/versioned_docs/version-v0.23.0/getting_started/installation/other_install_methods.md @@ -48,7 +48,7 @@ Paste and run the following in the terminal to extract and install the binary: ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.23.0/nargo-aarch64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -58,7 +58,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.23.0/nargo-x86_64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -68,7 +68,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.23.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ source ~/.bashrc diff --git a/noir/docs/versioned_docs/version-v0.23.0/tutorials/noirjs_app.md b/noir/docs/versioned_docs/version-v0.23.0/tutorials/noirjs_app.md index ad76dd255cc..82899217e61 100644 --- a/noir/docs/versioned_docs/version-v0.23.0/tutorials/noirjs_app.md +++ b/noir/docs/versioned_docs/version-v0.23.0/tutorials/noirjs_app.md @@ -14,9 +14,9 @@ You can find the complete app code for this guide [here](https://github.com/noir :::note -Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.19.x matches `noir_js@0.19.x`, etc. +Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.23.x matches `noir_js@0.23.x`, etc. -In this guide, we will be pinned to 0.19.4. +In this guide, we will be pinned to 0.23.0. ::: @@ -80,7 +80,7 @@ To do this this, go back to the previous folder (`cd ..`) and create a new vite You should see `vite-project` appear in your root folder. This seems like a good time to `cd` into it and install our NoirJS packages: ```bash -npm i @noir-lang/backend_barretenberg@0.19.4 @noir-lang/noir_js@0.19.4 +npm i @noir-lang/backend_barretenberg@0.23.0 @noir-lang/noir_js@0.23.0 vite-plugin-top-level-await ``` :::info @@ -99,6 +99,22 @@ At this point in the tutorial, your folder structure should look like this: #### Some cleanup +Add a `vite.config.js` file containing the following: + +```js +import { defineConfig } from 'vite'; +import topLevelAwait from "vite-plugin-top-level-await"; + +export default defineConfig({ + plugins: [ + topLevelAwait({ + promiseExportName: "__tla", + promiseImportName: i => `__tla_${i}` + }) + ] +}) +``` + `npx create vite` is amazing but it creates a bunch of files we don't really need for our simple example. Actually, let's just delete everything except for `index.html`, `main.js` and `package.json`. I feel lighter already. ![my heart is ready for you, noir.js](@site/static/img/memes/titanic.jpeg) diff --git a/noir/docs/versioned_docs/version-v0.24.0/getting_started/installation/other_install_methods.md b/noir/docs/versioned_docs/version-v0.24.0/getting_started/installation/other_install_methods.md index 489f1eda802..076f26dfd94 100644 --- a/noir/docs/versioned_docs/version-v0.24.0/getting_started/installation/other_install_methods.md +++ b/noir/docs/versioned_docs/version-v0.24.0/getting_started/installation/other_install_methods.md @@ -112,7 +112,7 @@ Paste and run the following in the terminal to extract and install the binary: ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-aarch64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -122,7 +122,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-x86_64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -132,7 +132,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ source ~/.bashrc diff --git a/noir/docs/versioned_docs/version-v0.24.0/noir/standard_library/recursion.md b/noir/docs/versioned_docs/version-v0.24.0/noir/standard_library/recursion.md index f252150c8b5..9337499dac8 100644 --- a/noir/docs/versioned_docs/version-v0.24.0/noir/standard_library/recursion.md +++ b/noir/docs/versioned_docs/version-v0.24.0/noir/standard_library/recursion.md @@ -8,6 +8,26 @@ Noir supports recursively verifying proofs, meaning you verify the proof of a No Read [the explainer on recursion](../../explainers/explainer-recursion.md) to know more about this function and the [guide on how to use it.](../../how_to/how-to-recursion.md) +## The `#[recursive]` Attribute + +In Noir, the `#[recursive]` attribute is used to indicate that a circuit is designed for recursive proof generation. When applied, it informs the compiler and the tooling that the circuit should be compiled in a way that makes its proofs suitable for recursive verification. This attribute eliminates the need for manual flagging of recursion at the tooling level, streamlining the proof generation process for recursive circuits. + +### Example usage with `#[recursive]` + +```rust +#[recursive] +fn main(x: Field, y: pub Field) { + assert(x == y, "x and y are not equal"); +} + +// This marks the circuit as recursion-friendly and indicates that proofs generated from this circuit +// are intended for recursive verification. +``` + +By incorporating this attribute directly in the circuit's definition, tooling like Nargo and NoirJS can automatically execute recursive-specific duties for Noir programs (e.g. recursive-friendly proof artifact generation) without additional flags or configurations. + +## Verifying Recursive Proofs + ```rust #[foreign(verify_proof)] fn verify_proof(_verification_key : [Field], _proof : [Field], _public_input : Field, _key_hash : Field) {} diff --git a/noir/flake.nix b/noir/flake.nix index 659df12f260..4c5db8bfaae 100644 --- a/noir/flake.nix +++ b/noir/flake.nix @@ -44,7 +44,7 @@ rustToolchain = fenix.packages.${system}.fromToolchainFile { file = ./rust-toolchain.toml; - sha256 = "sha256-R0F0Risbr74xg9mEYydyebx/z0Wu6HI0/KWwrV30vZo="; + sha256 = "sha256-rLP8+fTxnPHoR96ZJiCa/5Ans1OojI7MLsmSqR2ip8o="; }; craneLib = (crane.mkLib pkgs).overrideToolchain rustToolchain; diff --git a/noir/noir_stdlib/src/array.nr b/noir/noir_stdlib/src/array.nr index 995af6c4c6f..3da4b649174 100644 --- a/noir/noir_stdlib/src/array.nr +++ b/noir/noir_stdlib/src/array.nr @@ -4,10 +4,10 @@ use crate::cmp::{Ord}; // by the methods in the `slice` module impl [T; N] { #[builtin(array_len)] - pub fn len(self) -> Field {} + pub fn len(self) -> u64 {} pub fn sort(self) -> Self where T: Ord { - self.sort_via(|a, b| a <= b) + self.sort_via(|a: T, b: T| a <= b) } pub fn sort_via(self, ordering: fn[Env](T, T) -> bool) -> Self { @@ -17,27 +17,27 @@ impl [T; N] { for i in 0..N { let pos = find_index(sorted_index, i); assert(sorted_index[pos] == i); - } + } // Sort the array using the indexes - for i in 0..N { + for i in 0..N { result[i] = self[sorted_index[i]]; - } + } // Ensure the array is sorted - for i in 0..N-1 { - assert(ordering(result[i], result[i+1])); + for i in 0..N - 1 { + assert(ordering(result[i], result[i + 1])); } result } /// Returns the index of the elements in the array that would sort it, using the provided custom sorting function. - unconstrained fn get_sorting_index(self, ordering: fn[Env](T, T) -> bool) -> [Field; N] { - let mut result = [0;N]; + unconstrained fn get_sorting_index(self, ordering: fn[Env](T, T) -> bool) -> [u64; N] { + let mut result = [0; N]; let mut a = self; for i in 0..N { result[i] = i; } - for i in 1 .. N { + for i in 1..N { for j in 0..i { if ordering(a[i], a[j]) { let old_a_j = a[j]; @@ -45,14 +45,13 @@ impl [T; N] { a[i] = old_a_j; let old_j = result[j]; result[j] = result[i]; - result[i] = old_j; + result[i] = old_j; } } } result } - // Converts an array into a slice. pub fn as_slice(self) -> [T] { let mut slice = []; @@ -68,7 +67,7 @@ impl [T; N] { let first_elem = f(self[0]); let mut ret = [first_elem; N]; - for i in 1 .. self.len() { + for i in 1..self.len() { ret[i] = f(self[i]); } @@ -90,7 +89,7 @@ impl [T; N] { // element of the given array as its starting accumulator value. pub fn reduce(self, f: fn[Env](T, T) -> T) -> T { let mut accumulator = self[0]; - for i in 1 .. self.len() { + for i in 1..self.len() { accumulator = f(accumulator, self[i]); } accumulator @@ -117,12 +116,12 @@ impl [T; N] { // helper function used to look up the position of a value in an array of Field // Note that function returns 0 if the value is not found -unconstrained fn find_index(a: [Field;N], find: Field) -> Field { +unconstrained fn find_index(a: [u64; N], find: u64) -> u64 { let mut result = 0; for i in 0..a.len() { if a[i] == find { result = i; - } + } } result -} \ No newline at end of file +} diff --git a/noir/noir_stdlib/src/bigint.nr b/noir/noir_stdlib/src/bigint.nr index 61051da7116..98237a54779 100644 --- a/noir/noir_stdlib/src/bigint.nr +++ b/noir/noir_stdlib/src/bigint.nr @@ -13,7 +13,6 @@ global secpr1_fq = [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF]; global secpr1_fr = [0x51, 0x25, 0x63, 0xFC, 0xC2, 0xCA, 0xB9, 0xF3, 0x84, 0x9E, 0x17, 0xA7, 0xAD, 0xFA, 0xE6, 0xBC, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,0xFF, 0xFF, 0xFF, 0xFF]; - struct BigInt { pointer: u32, @@ -22,17 +21,13 @@ struct BigInt { impl BigInt { #[builtin(bigint_add)] - fn bigint_add(self, other: BigInt) -> BigInt { - } + fn bigint_add(self, other: BigInt) -> BigInt {} #[builtin(bigint_sub)] - fn bigint_sub(self, other: BigInt) -> BigInt { - } + fn bigint_sub(self, other: BigInt) -> BigInt {} #[builtin(bigint_mul)] - fn bigint_mul(self, other: BigInt) -> BigInt { - } + fn bigint_mul(self, other: BigInt) -> BigInt {} #[builtin(bigint_div)] - fn bigint_div(self, other: BigInt) -> BigInt { - } + fn bigint_div(self, other: BigInt) -> BigInt {} #[builtin(bigint_from_le_bytes)] fn from_le_bytes(bytes: [u8], modulus: [u8]) -> BigInt {} #[builtin(bigint_to_le_bytes)] @@ -42,14 +37,13 @@ impl BigInt { let bytes = self.to_le_bytes(); let o_bytes = other.to_le_bytes(); let mut result = true; - for i in 0..32{ + for i in 0..32 { result = result & (bytes[i] == o_bytes[i]); } result } } - trait BigField { fn from_le_bytes(bytes: [u8]) -> Self; fn to_le_bytes(self) -> [u8]; diff --git a/noir/noir_stdlib/src/collections.nr b/noir/noir_stdlib/src/collections.nr index 177ca96816f..2d952f4d6cd 100644 --- a/noir/noir_stdlib/src/collections.nr +++ b/noir/noir_stdlib/src/collections.nr @@ -1,2 +1,3 @@ mod vec; mod bounded_vec; +mod map; diff --git a/noir/noir_stdlib/src/collections/bounded_vec.nr b/noir/noir_stdlib/src/collections/bounded_vec.nr index 332fefa63f9..752b96d6591 100644 --- a/noir/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir/noir_stdlib/src/collections/bounded_vec.nr @@ -1,37 +1,35 @@ struct BoundedVec { storage: [T; MaxLen], - // TODO: change this to return a u64 as Noir now - // uses u64 for indexing - len: Field, - empty_value: T, + len: u64, } impl BoundedVec { - pub fn new(initial_value: T) -> Self { - BoundedVec { storage: [initial_value; MaxLen], len: 0, empty_value: initial_value } + pub fn new() -> Self { + let zeroed = crate::unsafe::zeroed(); + BoundedVec { storage: [zeroed; MaxLen], len: 0 } } - pub fn get(mut self: Self, index: Field) -> T { - assert(index as u64 < self.len as u64); + pub fn get(mut self: Self, index: u64) -> T { + assert(index as u64 < self.len); self.storage[index] } - pub fn get_unchecked(mut self: Self, index: Field) -> T { + pub fn get_unchecked(mut self: Self, index: u64) -> T { self.storage[index] } pub fn push(&mut self, elem: T) { - assert(self.len as u64 < MaxLen as u64, "push out of bounds"); + assert(self.len < MaxLen as u64, "push out of bounds"); self.storage[self.len] = elem; self.len += 1; } - pub fn len(self) -> Field { + pub fn len(self) -> u64 { self.len } - pub fn max_len(_self: BoundedVec) -> Field { + pub fn max_len(_self: BoundedVec) -> u64 { MaxLen } @@ -59,7 +57,7 @@ impl BoundedVec { for i in 0..Len { exceeded_len |= i == append_len; if !exceeded_len { - self.storage[self.len + (i as Field)] = vec.get_unchecked(i as Field); + self.storage[self.len + i] = vec.get_unchecked(i); } } self.len = new_len; @@ -70,7 +68,7 @@ impl BoundedVec { self.len -= 1; let elem = self.storage[self.len]; - self.storage[self.len] = self.empty_value; + self.storage[self.len] = crate::unsafe::zeroed(); elem } @@ -79,10 +77,10 @@ impl BoundedVec { let mut exceeded_len = false; for i in 0..MaxLen { exceeded_len |= i == self.len; - if (!exceeded_len) { + if !exceeded_len { ret |= predicate(self.storage[i]); } } ret } -} \ No newline at end of file +} diff --git a/noir/noir_stdlib/src/collections/map.nr b/noir/noir_stdlib/src/collections/map.nr new file mode 100644 index 00000000000..056299b4238 --- /dev/null +++ b/noir/noir_stdlib/src/collections/map.nr @@ -0,0 +1,456 @@ +use crate::cmp::Eq; +use crate::collections::vec::Vec; +use crate::option::Option; +use crate::default::Default; +use crate::hash::{Hash, Hasher, BuildHasher}; + +// We use load factor α_max = 0.75. +// Upon exceeding it, assert will fail in order to inform the user +// about performance degradation, so that he can adjust the capacity. +global MAX_LOAD_FACTOR_NUMERATOR = 3; +global MAX_LOAD_FACTOR_DEN0MINATOR = 4; + +// Hash table with open addressing and quadratic probing. +// Size of the underlying table must be known at compile time. +// It is advised to select capacity N as a power of two, or a prime number +// because utilized probing scheme is best tailored for it. +struct HashMap { + _table: [Slot; N], + + // Amount of valid elements in the map. + _len: u64, + + _build_hasher: B +} + +// Data unit in the HashMap table. +// In case Noir adds support for enums in the future, this +// should be refactored to have three states: +// 1. (key, value) +// 2. (empty) +// 3. (deleted) +struct Slot { + _key_value: Option<(K, V)>, + _is_deleted: bool, +} + +impl Default for Slot{ + fn default() -> Self{ + Slot{ + _key_value: Option::none(), + _is_deleted: false + } + } +} + +impl Slot { + fn is_valid(self) -> bool { + !self._is_deleted & self._key_value.is_some() + } + + fn is_available(self) -> bool { + self._is_deleted | self._key_value.is_none() + } + + fn key_value(self) -> Option<(K, V)> { + self._key_value + } + + fn key_value_unchecked(self) -> (K, V) { + self._key_value.unwrap_unchecked() + } + + fn set(&mut self, key: K, value: V) { + self._key_value = Option::some((key, value)); + self._is_deleted = false; + } + + // Shall not override `_key_value` with Option::none(), + // because we must be able to differentiate empty + // and deleted slots for lookup. + fn mark_deleted(&mut self) { + self._is_deleted = true; + } +} + +// While conducting lookup, we iterate attempt from 0 to N - 1 due to heuristic, +// that if we have went that far without finding desired, +// it is very unlikely to be after - performance will be heavily degraded. +impl HashMap { + // Creates a new instance of HashMap with specified BuildHasher. + pub fn with_hasher(_build_hasher: B) -> Self + where + B: BuildHasher { + let _table = [Slot::default(); N]; + let _len = 0; + Self { _table, _len, _build_hasher } + } + + // Clears the map, removing all key-value entries. + pub fn clear(&mut self) { + self._table = [Slot::default(); N]; + self._len = 0; + } + + // Returns true if the map contains a value for the specified key. + pub fn contains_key( + self, + key: K + ) -> bool + where + K: Hash + Eq, + B: BuildHasher, + H: Hasher { + self.get(key).is_some() + } + + // Returns true if the map contains no elements. + pub fn is_empty(self) -> bool { + self._len == 0 + } + + // Get the Option<(K, V) array of valid entries + // with a length of map capacity. First len() elements + // are safe to unwrap_unchecked(), whilst remaining + // are guaranteed to be Option::none(). + // + // This design is reasoned by compile-time limitations and + // temporary nested slices ban. + pub fn entries(self) -> [Option<(K, V)>; N] { + let mut entries = [Option::none(); N]; + let mut valid_amount = 0; + + for slot in self._table { + if slot.is_valid() { + entries[valid_amount] = slot.key_value(); + valid_amount += 1; + } + } + + let msg = f"Amount of valid elements should have been {self._len} times, but got {valid_amount}."; + assert(valid_amount == self._len, msg); + + entries + } + + // Get the Option array of valid keys + // with a length of map capacity. First len() elements + // are safe to unwrap_unchecked(), whilst remaining + // are guaranteed to be Option::none(). + // + // This design is reasoned by compile-time limitations and + // temporary nested slices ban. + pub fn keys(self) -> [Option; N] { + let mut keys = [Option::none(); N]; + let mut valid_amount = 0; + + for slot in self._table { + if slot.is_valid() { + let (key, _) = slot.key_value_unchecked(); + keys[valid_amount] = Option::some(key); + valid_amount += 1; + } + } + + let msg = f"Amount of valid elements should have been {self._len} times, but got {valid_amount}."; + assert(valid_amount == self._len, msg); + + keys + } + + // Get the Option array of valid values + // with a length of map capacity. First len() elements + // are safe to unwrap_unchecked(), whilst remaining + // are guaranteed to be Option::none(). + // + // This design is reasoned by compile-time limitations and + // temporary nested slices ban. + pub fn values(self) -> [Option; N] { + let mut values = [Option::none(); N]; + let mut valid_amount = 0; + + for slot in self._table { + if slot.is_valid() { + let (_, value) = slot.key_value_unchecked(); + values[valid_amount] = Option::some(value); + valid_amount += 1; + } + } + + let msg = f"Amount of valid elements should have been {self._len} times, but got {valid_amount}."; + assert(valid_amount == self._len, msg); + + values + } + + // For each key-value entry applies mutator function. + pub fn iter_mut( + &mut self, + f: fn(K, V) -> (K, V) + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + let mut entries = self.entries(); + let mut new_map = HashMap::with_hasher(self._build_hasher); + + for i in 0..N { + if i < self._len { + let entry = entries[i].unwrap_unchecked(); + let (key, value) = f(entry.0, entry.1); + new_map.insert(key, value); + } + } + + self._table = new_map._table; + } + + // For each key applies mutator function. + pub fn iter_keys_mut( + &mut self, + f: fn(K) -> K + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + let mut entries = self.entries(); + let mut new_map = HashMap::with_hasher(self._build_hasher); + + for i in 0..N { + if i < self._len { + let entry = entries[i].unwrap_unchecked(); + let (key, value) = (f(entry.0), entry.1); + new_map.insert(key, value); + } + } + + self._table = new_map._table; + } + + // For each value applies mutator function. + pub fn iter_values_mut(&mut self, f: fn(V) -> V) { + for i in 0..N { + let mut slot = self._table[i]; + if slot.is_valid() { + let (key, value) = slot.key_value_unchecked(); + slot.set(key, f(value)); + self._table[i] = slot; + } + } + } + + // Retains only the elements specified by the predicate. + pub fn retain(&mut self, f: fn(K, V) -> bool) { + for index in 0..N { + let mut slot = self._table[index]; + if slot.is_valid() { + let (key, value) = slot.key_value_unchecked(); + if !f(key, value) { + slot.mark_deleted(); + self._len -= 1; + self._table[index] = slot; + } + } + } + } + + // Amount of active key-value entries. + pub fn len(self) -> u64 { + self._len + } + + // Get the compile-time map capacity. + pub fn capacity(_self: Self) -> u64 { + N + } + + // Get the value by key. If it does not exist, returns none(). + pub fn get( + self, + key: K + ) -> Option + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + let mut result = Option::none(); + + let hash = self.hash(key); + let mut break = false; + + for attempt in 0..N { + if !break { + let index = self.quadratic_probe(hash, attempt as u64); + let slot = self._table[index]; + + // Not marked as deleted and has key-value. + if slot.is_valid() { + let (current_key, value) = slot.key_value_unchecked(); + if current_key == key { + result = Option::some(value); + break = true; + } + } + } + } + + result + } + + // Insert key-value entry. In case key was already present, value is overridden. + pub fn insert( + &mut self, + key: K, + value: V + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + self.assert_load_factor(); + + let hash = self.hash(key); + let mut break = false; + + for attempt in 0..N { + if !break { + let index = self.quadratic_probe(hash, attempt as u64); + let mut slot = self._table[index]; + let mut insert = false; + + // Either marked as deleted or has unset key-value. + if slot.is_available() { + insert = true; + self._len += 1; + } else { + let (current_key, _) = slot.key_value_unchecked(); + if current_key == key { + insert = true; + } + } + + if insert { + slot.set(key, value); + self._table[index] = slot; + break = true; + } + } + } + } + + // Remove key-value entry. If key is not present, HashMap remains unchanged. + pub fn remove( + &mut self, + key: K + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + let hash = self.hash(key); + let mut break = false; + + for attempt in 0..N { + if !break { + let index = self.quadratic_probe(hash, attempt as u64); + let mut slot = self._table[index]; + + // Not marked as deleted and has key-value. + if slot.is_valid() { + let (current_key, _) = slot.key_value_unchecked(); + if current_key == key { + slot.mark_deleted(); + self._table[index] = slot; + self._len -= 1; + break = true; + } + } + } + } + } + + // Apply HashMap's hasher onto key to obtain pre-hash for probing. + fn hash( + self, + key: K + ) -> u64 + where + K: Hash, + B: BuildHasher, + H: Hasher { + let mut hasher = self._build_hasher.build_hasher(); + key.hash(&mut hasher); + hasher.finish() as u64 + } + + // Probing scheme: quadratic function. + // We use 0.5 constant near variadic attempt and attempt^2 monomials. + // This ensures good uniformity of distribution for table sizes + // equal to prime numbers or powers of two. + fn quadratic_probe(_self: Self, hash: u64, attempt: u64) -> u64 { + (hash + (attempt + attempt * attempt) / 2) % N + } + + // Amount of elements in the table in relation to available slots exceeds α_max. + // To avoid a comparatively more expensive division operation + // we conduct cross-multiplication instead. + // n / m >= MAX_LOAD_FACTOR_NUMERATOR / MAX_LOAD_FACTOR_DEN0MINATOR + // n * MAX_LOAD_FACTOR_DEN0MINATOR >= m * MAX_LOAD_FACTOR_NUMERATOR + fn assert_load_factor(self) { + let lhs = self._len * MAX_LOAD_FACTOR_DEN0MINATOR; + let rhs = self._table.len() * MAX_LOAD_FACTOR_NUMERATOR; + let exceeded = lhs >= rhs; + assert(!exceeded, "Load factor is exceeded, consider increasing the capacity."); + } +} + +// Equality class on HashMap has to test that they have +// equal sets of key-value entries, +// thus one is a subset of the other and vice versa. +impl Eq for HashMap +where + K: Eq + Hash, + V: Eq, + B: BuildHasher, + H: Hasher +{ + fn eq(self, other: HashMap) -> bool{ + let mut equal = false; + + if self.len() == other.len(){ + equal = true; + for slot in self._table{ + // Not marked as deleted and has key-value. + if equal & slot.is_valid(){ + let (key, value) = slot.key_value_unchecked(); + let other_value = other.get(key); + + if other_value.is_none(){ + equal = false; + }else{ + let other_value = other_value.unwrap_unchecked(); + if value != other_value{ + equal = false; + } + } + } + } + } + + equal + } +} + +impl Default for HashMap +where + B: BuildHasher + Default, + H: Hasher + Default +{ + fn default() -> Self{ + let _build_hasher = B::default(); + let map: HashMap = HashMap::with_hasher(_build_hasher); + map + } +} diff --git a/noir/noir_stdlib/src/collections/vec.nr b/noir/noir_stdlib/src/collections/vec.nr index 43d68e1d1e7..deec98185ff 100644 --- a/noir/noir_stdlib/src/collections/vec.nr +++ b/noir/noir_stdlib/src/collections/vec.nr @@ -17,14 +17,14 @@ impl Vec { /// Get an element from the vector at the given index. /// Panics if the given index /// points beyond the end of the vector. - pub fn get(self, index: Field) -> T { + pub fn get(self, index: u64) -> T { self.slice[index] - } + } /// Push a new element to the end of the vector, returning a /// new vector with a length one greater than the /// original unmodified vector. - pub fn push(&mut self, elem: T) { + pub fn push(&mut self, elem: T) { self.slice = self.slice.push_back(elem); } @@ -32,7 +32,7 @@ impl Vec { /// a new vector with a length of one less than the given vector, /// as well as the popped element. /// Panics if the given vector's length is zero. - pub fn pop(&mut self) -> T { + pub fn pop(&mut self) -> T { let (popped_slice, last_elem) = self.slice.pop_back(); self.slice = popped_slice; last_elem @@ -40,20 +40,20 @@ impl Vec { /// Insert an element at a specified index, shifting all elements /// after it to the right - pub fn insert(&mut self, index: Field, elem: T) { + pub fn insert(&mut self, index: u64, elem: T) { self.slice = self.slice.insert(index, elem); - } + } /// Remove an element at a specified index, shifting all elements /// after it to the left, returning the removed element - pub fn remove(&mut self, index: Field) -> T { + pub fn remove(&mut self, index: u64) -> T { let (new_slice, elem) = self.slice.remove(index); self.slice = new_slice; elem } /// Returns the number of elements in the vector - pub fn len(self) -> Field { + pub fn len(self) -> u64 { self.slice.len() } } diff --git a/noir/noir_stdlib/src/ec/montcurve.nr b/noir/noir_stdlib/src/ec/montcurve.nr index 83a17bae322..7dc756781c0 100644 --- a/noir/noir_stdlib/src/ec/montcurve.nr +++ b/noir/noir_stdlib/src/ec/montcurve.nr @@ -31,7 +31,7 @@ mod affine { impl Point { // Point constructor pub fn new(x: Field, y: Field) -> Self { - Self {x, y, infty: false} + Self { x, y, infty: false } } // Check if zero @@ -45,30 +45,30 @@ mod affine { curvegroup::Point::zero() } else { let (x,y) = (self.x, self.y); - curvegroup::Point::new(x,y,1) + curvegroup::Point::new(x, y, 1) } } // Additive identity pub fn zero() -> Self { - Self {x: 0, y: 0, infty: true} + Self { x: 0, y: 0, infty: true } } // Negation fn negate(self) -> Self { let Self {x, y, infty} = self; - Self {x, y: 0-y, infty} + Self { x, y: 0 - y, infty } } // Map into equivalent Twisted Edwards curve fn into_tecurve(self) -> TEPoint { let Self {x, y, infty} = self; - - if infty | (y*(x+1) == 0) { + + if infty | (y * (x + 1) == 0) { TEPoint::zero() } else { - TEPoint::new(x/y, (x-1)/(x+1)) + TEPoint::new(x / y, (x - 1) / (x + 1)) } } } @@ -84,9 +84,9 @@ mod affine { pub fn new(j: Field, k: Field, gen: Point) -> Self { // Check curve coefficients assert(k != 0); - assert(j*j != 4); + assert(j * j != 4); - let curve = Self {j, k, gen}; + let curve = Self { j, k, gen }; // gen should be on the curve assert(curve.contains(curve.gen)); @@ -103,8 +103,8 @@ mod affine { pub fn contains(self, p: Point) -> bool { let Self {j, k, gen: _gen} = self; let Point {x, y, infty: infty} = p; - - infty | (k*y*y == x*(x*x + j*x + 1)) + + infty | (k * y * y == x * (x * x + j * x + 1)) } // Point addition @@ -122,7 +122,7 @@ mod affine { fn mul(self, n: Field, p: Point) -> Point { self.into_tecurve().mul(n, p.into_tecurve()).into_montcurve() } - + // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); @@ -142,15 +142,15 @@ mod affine { // Conversion to equivalent Twisted Edwards curve fn into_tecurve(self) -> TECurve { let Self {j, k, gen} = self; - TECurve::new((j+2)/k, (j-2)/k, gen.into_tecurve()) + TECurve::new((j + 2) / k, (j - 2) / k, gen.into_tecurve()) } // Conversion to equivalent Short Weierstraß curve pub fn into_swcurve(self) -> SWCurve { let j = self.j; let k = self.k; - let a0 = (3-j*j)/(3*k*k); - let b0 = (2*j*j*j - 9*j)/(27*k*k*k); + let a0 = (3 - j * j) / (3 * k * k); + let b0 = (2 * j * j * j - 9 * j) / (27 * k * k * k); SWCurve::new(a0, b0, self.map_into_swcurve(self.gen)) } @@ -160,8 +160,7 @@ mod affine { if p.is_zero() { SWPoint::zero() } else { - SWPoint::new((3*p.x + self.j)/(3*self.k), - p.y/self.k) + SWPoint::new((3 * p.x + self.j) / (3 * self.k), p.y / self.k) } } @@ -170,8 +169,8 @@ mod affine { let SWPoint {x, y, infty} = p; let j = self.j; let k = self.k; - - Point {x: (3*k*x - j)/3, y: y*k, infty} + + Point { x: (3 * k * x - j) / 3, y: y * k, infty } } // Elligator 2 map-to-curve method; see . @@ -179,18 +178,18 @@ mod affine { let j = self.j; let k = self.k; let z = ZETA; // Non-square Field element required for map - + // Check whether curve is admissible assert(j != 0); - let l = (j*j - 4)/(k*k); + let l = (j * j - 4) / (k * k); assert(l != 0); assert(is_square(l) == false); - let x1 = safe_inverse(1+z*u*u)*(0 - (j/k)); - - let gx1 = x1*x1*x1 + (j/k)*x1*x1 + x1/(k*k); - let x2 = 0 - x1 - (j/k); - let gx2 = x2*x2*x2 + (j/k)*x2*x2 + x2/(k*k); + let x1 = safe_inverse(1 + z * u * u) * (0 - (j / k)); + + let gx1 = x1 * x1 * x1 + (j / k) * x1 * x1 + x1 / (k * k); + let x2 = 0 - x1 - (j / k); + let gx2 = x2 * x2 * x2 + (j / k) * x2 * x2 + x2 / (k * k); let x = if is_square(gx1) { x1 } else { x2 }; @@ -202,13 +201,12 @@ mod affine { if y0.sgn0() == 0 { y0 } else { 0 - y0 } }; - Point::new(x*k, y*k) - + Point::new(x * k, y * k) } // SWU map-to-curve method (via rational map) fn swu_map(self, z: Field, u: Field) -> Point { - self.map_from_swcurve(self.into_swcurve().swu_map(z,u)) + self.map_from_swcurve(self.into_swcurve().swu_map(z, u)) } } } @@ -240,7 +238,7 @@ mod curvegroup { impl Point { // Point constructor pub fn new(x: Field, y: Field, z: Field) -> Self { - Self {x, y, z} + Self { x, y, z } } // Check if zero @@ -254,20 +252,20 @@ mod curvegroup { affine::Point::zero() } else { let (x,y,z) = (self.x, self.y, self.z); - affine::Point::new(x/z, y/z) + affine::Point::new(x / z, y / z) } } // Additive identity pub fn zero() -> Self { - Self {x: 0, y: 1,z: 0} + Self { x: 0, y: 1, z: 0 } } // Negation fn negate(self) -> Self { let Self {x, y, z} = self; - Point::new(x, 0-y, z) + Point::new(x, 0 - y, z) } // Map into equivalent Twisted Edwards curve @@ -287,9 +285,9 @@ mod curvegroup { pub fn new(j: Field, k: Field, gen: Point) -> Self { // Check curve coefficients assert(k != 0); - assert(j*j != 4); + assert(j * j != 4); - let curve = Self {j, k, gen}; + let curve = Self { j, k, gen }; // gen should be on the curve assert(curve.contains(curve.gen)); @@ -306,8 +304,8 @@ mod curvegroup { pub fn contains(self, p: Point) -> bool { let Self {j, k, gen: _gen} = self; let Point {x, y, z} = p; - - k*y*y*z == x*(x*x + j*x*z + z*z) + + k * y * y * z == x * (x * x + j * x * z + z * z) } // Point addition @@ -320,12 +318,12 @@ mod curvegroup { fn bit_mul(self, bits: [u1; N], p: Point) -> Point { self.into_tecurve().bit_mul(bits, p.into_tecurve()).into_montcurve() } - + // Scalar multiplication (p + ... + p n times) pub fn mul(self, n: Field, p: Point) -> Point { self.into_tecurve().mul(n, p.into_tecurve()).into_montcurve() } - + // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); @@ -345,18 +343,17 @@ mod curvegroup { // Conversion to equivalent Twisted Edwards curve fn into_tecurve(self) -> TECurve { let Self {j, k, gen} = self; - TECurve::new((j+2)/k, (j-2)/k, gen.into_tecurve()) + TECurve::new((j + 2) / k, (j - 2) / k, gen.into_tecurve()) } // Conversion to equivalent Short Weierstraß curve fn into_swcurve(self) -> SWCurve { let j = self.j; let k = self.k; - let a0 = (3-j*j)/(3*k*k); - let b0 = (2*j*j*j - 9*j)/(27*k*k*k); + let a0 = (3 - j * j) / (3 * k * k); + let b0 = (2 * j * j * j - 9 * j) / (27 * k * k * k); - SWCurve::new(a0, b0, - self.map_into_swcurve(self.gen)) + SWCurve::new(a0, b0, self.map_into_swcurve(self.gen)) } // Point mapping into equivalent Short Weierstraß curve @@ -373,10 +370,10 @@ mod curvegroup { fn elligator2_map(self, u: Field) -> Point { self.into_affine().elligator2_map(u).into_group() } - + // SWU map-to-curve method (via rational map) fn swu_map(self, z: Field, u: Field) -> Point { - self.into_affine().swu_map(z,u).into_group() + self.into_affine().swu_map(z, u).into_group() } } } diff --git a/noir/noir_stdlib/src/ec/swcurve.nr b/noir/noir_stdlib/src/ec/swcurve.nr index e64f5a7be02..9dd324f3085 100644 --- a/noir/noir_stdlib/src/ec/swcurve.nr +++ b/noir/noir_stdlib/src/ec/swcurve.nr @@ -27,14 +27,14 @@ mod affine { impl Point { // Point constructor pub fn new(x: Field, y: Field) -> Self { - Self {x, y, infty: false} + Self { x, y, infty: false } } // Check if zero pub fn is_zero(self) -> bool { self.eq(Point::zero()) } - + // Conversion to CurveGroup coordinates fn into_group(self) -> curvegroup::Point { let Self {x, y, infty} = self; @@ -45,16 +45,16 @@ mod affine { curvegroup::Point::new(x, y, 1) } } - + // Additive identity pub fn zero() -> Self { - Self {x: 0, y: 0, infty: true} + Self { x: 0, y: 0, infty: true } } - + // Negation fn negate(self) -> Self { let Self {x, y, infty} = self; - Self {x, y: 0-y, infty} + Self { x, y: 0 - y, infty } } } @@ -72,8 +72,8 @@ mod affine { // Curve constructor pub fn new(a: Field, b: Field, gen: Point) -> Curve { // Check curve coefficients - assert(4*a*a*a + 27*b*b != 0); - + assert(4 * a * a * a + 27 * b * b != 0); + let curve = Curve { a, b, gen }; // gen should be on the curve @@ -85,16 +85,16 @@ mod affine { // Conversion to CurveGroup coordinates fn into_group(self) -> curvegroup::Curve { let Curve{a, b, gen} = self; - - curvegroup::Curve {a, b, gen: gen.into_group()} + + curvegroup::Curve { a, b, gen: gen.into_group() } } // Membership check pub fn contains(self, p: Point) -> bool { let Point {x, y, infty} = p; - infty | (y*y == x*x*x + self.a*x + self.b) + infty | (y * y == x * x * x + self.a * x + self.b) } - + // Point addition, implemented in terms of mixed addition for reasons of efficiency pub fn add(self, p1: Point, p2: Point) -> Point { self.mixed_add(p1, p2.into_group()).into_affine() @@ -109,9 +109,9 @@ mod affine { } else { let Point {x: x1, y: y1, infty: _inf} = p1; let curvegroup::Point {x: x2, y: y2, z: z2} = p2; - let you1 = x1*z2*z2; + let you1 = x1 * z2 * z2; let you2 = x2; - let s1 = y1*z2*z2*z2; + let s1 = y1 * z2 * z2 * z2; let s2 = y2; if you1 == you2 { @@ -120,15 +120,14 @@ mod affine { } else { self.into_group().double(p2) } - } else - { + } else { let h = you2 - you1; let r = s2 - s1; - let x3 = r*r - h*h*h - 2*you1*h*h; - let y3 = r*(you1*h*h - x3) - s1*h*h*h; - let z3 = h*z2; + let x3 = r * r - h * h * h - 2 * you1 * h * h; + let y3 = r * (you1 * h * h - x3) - s1 * h * h * h; + let z3 = h * z2; - curvegroup::Point::new(x3,y3,z3) + curvegroup::Point::new(x3, y3, z3) } } } @@ -138,7 +137,7 @@ mod affine { fn bit_mul(self, bits: [u1; N], p: Point) -> Point { self.into_group().bit_mul(bits, p.into_group()).into_affine() } - + // Scalar multiplication (p + ... + p n times) pub fn mul(self, n: Field, p: Point) -> Point { self.into_group().mul(n, p.into_group()).into_affine() @@ -165,17 +164,25 @@ mod affine { // where g(x) = x^3 + a*x + b. swu_map(c,z,.) then maps a Field element to a point on curve c. fn swu_map(self, z: Field, u: Field) -> Point { // Check whether curve is admissible - assert(self.a*self.b != 0); - + assert(self.a * self.b != 0); + let Curve {a, b, gen: _gen} = self; - - let tv1 = safe_inverse(z*z*u*u*u*u + u*u*z); - let x1 = if tv1 == 0 {b/(z*a)} else {(0-b/a)*(1 + tv1)}; - let gx1 = x1*x1*x1 + a*x1 + b; - let x2 = z*u*u*x1; - let gx2 = x2*x2*x2 + a*x2 + b; - let (x,y) = if is_square(gx1) {(x1, sqrt(gx1))} else {(x2, sqrt(gx2))}; - Point::new(x, if u.sgn0() != y.sgn0() {0-y} else {y}) + + let tv1 = safe_inverse(z * z * u * u * u * u + u * u * z); + let x1 = if tv1 == 0 { + b / (z * a) + } else { + (0 - b / a) * (1 + tv1) + }; + let gx1 = x1 * x1 * x1 + a * x1 + b; + let x2 = z * u * u * x1; + let gx2 = x2 * x2 * x2 + a * x2 + b; + let (x,y) = if is_square(gx1) { + (x1, sqrt(gx1)) + } else { + (x2, sqrt(gx2)) + }; + Point::new(x, if u.sgn0() != y.sgn0() { 0 - y } else { y }) } } } @@ -205,14 +212,14 @@ mod curvegroup { impl Point { // Point constructor pub fn new(x: Field, y: Field, z: Field) -> Self { - Self {x, y, z} + Self { x, y, z } } // Check if zero pub fn is_zero(self) -> bool { self.eq(Point::zero()) } - + // Conversion to affine coordinates pub fn into_affine(self) -> affine::Point { let Self {x, y, z} = self; @@ -220,20 +227,19 @@ mod curvegroup { if z == 0 { affine::Point::zero() } else { - affine::Point::new(x/(z*z), y/(z*z*z)) + affine::Point::new(x / (z * z), y / (z * z * z)) } } // Additive identity pub fn zero() -> Self { - Self {x: 0, y: 0, z: 0} + Self { x: 0, y: 0, z: 0 } } - - + // Negation fn negate(self) -> Self { let Self {x, y, z} = self; - Self {x, y: 0-y, z} + Self { x, y: 0 - y, z } } } @@ -250,8 +256,8 @@ mod curvegroup { // Curve constructor pub fn new(a: Field, b: Field, gen: Point) -> Curve { // Check curve coefficients - assert(4*a*a*a + 27*b*b != 0); - + assert(4 * a * a * a + 27 * b * b != 0); + let curve = Curve { a, b, gen }; // gen should be on the curve @@ -264,7 +270,7 @@ mod curvegroup { pub fn into_affine(self) -> affine::Curve { let Curve{a, b, gen} = self; - affine::Curve {a, b, gen: gen.into_affine()} + affine::Curve { a, b, gen: gen.into_affine() } } // Membership check @@ -273,13 +279,12 @@ mod curvegroup { if z == 0 { true } else { - y*y == x*x*x + self.a*x*z*z*z*z + self.b*z*z*z*z*z*z + y * y == x * x * x + self.a * x * z * z * z * z + self.b * z * z * z * z * z * z } } - + // Addition pub fn add(self, p1: Point, p2: Point) -> Point { - if p1.is_zero() { p2 } else if p2.is_zero() { @@ -287,10 +292,10 @@ mod curvegroup { } else { let Point {x: x1, y: y1, z: z1} = p1; let Point {x: x2, y: y2, z: z2} = p2; - let you1 = x1*z2*z2; - let you2 = x2*z1*z1; - let s1 = y1*z2*z2*z2; - let s2 = y2*z1*z1*z1; + let you1 = x1 * z2 * z2; + let you2 = x2 * z1 * z1; + let s1 = y1 * z2 * z2 * z2; + let s2 = y2 * z1 * z1 * z1; if you1 == you2 { if s1 != s2 { @@ -301,11 +306,11 @@ mod curvegroup { } else { let h = you2 - you1; let r = s2 - s1; - let x3 = r*r - h*h*h - 2*you1*h*h; - let y3 = r*(you1*h*h - x3) - s1*h*h*h; - let z3 = h*z1*z2; + let x3 = r * r - h * h * h - 2 * you1 * h * h; + let y3 = r * (you1 * h * h - x3) - s1 * h * h * h; + let z3 = h * z1 * z2; - Point::new(x3,y3,z3) + Point::new(x3, y3, z3) } } } @@ -313,19 +318,19 @@ mod curvegroup { // Point doubling pub fn double(self, p: Point) -> Point { let Point {x, y, z} = p; - + if p.is_zero() { p } else if y == 0 { Point::zero() } else { - let s = 4*x*y*y; - let m = 3*x*x + self.a*z*z*z*z; - let x0 = m*m - 2*s; - let y0 = m*(s-x0) - 8*y*y*y*y; - let z0 = 2*y*z; + let s = 4 * x * y * y; + let m = 3 * x * x + self.a * z * z * z * z; + let x0 = m * m - 2 * s; + let y0 = m * (s - x0) - 8 * y * y * y * y; + let z0 = 2 * y * z; - Point::new(x0,y0,z0) + Point::new(x0, y0, z0) } } @@ -351,7 +356,7 @@ mod curvegroup { let mut n_as_bits: [u1; 254] = [0; 254]; let tmp = n.to_le_bits(N_BITS as u32); for i in 0..254 { - n_as_bits[i] = tmp[i]; + n_as_bits[i] = tmp[i]; } self.bit_mul(n_as_bits, p) @@ -375,7 +380,7 @@ mod curvegroup { // Simplified SWU map-to-curve method fn swu_map(self, z: Field, u: Field) -> Point { - self.into_affine().swu_map(z,u).into_group() + self.into_affine().swu_map(z, u).into_group() } } } diff --git a/noir/noir_stdlib/src/ec/tecurve.nr b/noir/noir_stdlib/src/ec/tecurve.nr index 5333ece4c4a..506fe89313a 100644 --- a/noir/noir_stdlib/src/ec/tecurve.nr +++ b/noir/noir_stdlib/src/ec/tecurve.nr @@ -40,18 +40,18 @@ mod affine { fn into_group(self) -> curvegroup::Point { let Self {x, y} = self; - curvegroup::Point::new(x, y, x*y, 1) + curvegroup::Point::new(x, y, x * y, 1) } // Additive identity pub fn zero() -> Self { - Point::new(0,1) + Point::new(0, 1) } // Negation fn negate(self) -> Self { let Self {x, y} = self; - Point::new(0-x, y) + Point::new(0 - x, y) } // Map into prime-order subgroup of equivalent Montgomery curve @@ -60,10 +60,10 @@ mod affine { MPoint::zero() } else { let Self {x, y} = self; - let x0 = (1+y)/(1-y); - let y0 = (1+y)/(x*(1-y)); + let x0 = (1 + y) / (1 - y); + let y0 = (1 + y) / (x * (1 - y)); - MPoint::new(x0,y0) + MPoint::new(x0, y0) } } } @@ -81,9 +81,9 @@ mod affine { // Curve constructor pub fn new(a: Field, d: Field, gen: Point) -> Curve { // Check curve coefficients - assert(a*d*(a-d) != 0); - - let curve = Curve {a, d, gen}; + assert(a * d * (a - d) != 0); + + let curve = Curve { a, d, gen }; // gen should be on the curve assert(curve.contains(curve.gen)); @@ -95,15 +95,15 @@ mod affine { fn into_group(self) -> curvegroup::Curve { let Curve{a, d, gen} = self; - curvegroup::Curve {a, d, gen: gen.into_group()} + curvegroup::Curve { a, d, gen: gen.into_group() } } - + // Membership check pub fn contains(self, p: Point) -> bool { let Point {x, y} = p; - self.a*x*x + y*y == 1 + self.d*x*x*y*y + self.a * x * x + y * y == 1 + self.d * x * x * y * y } - + // Point addition, implemented in terms of mixed addition for reasons of efficiency pub fn add(self, p1: Point, p2: Point) -> Point { self.mixed_add(p1, p2.into_group()).into_affine() @@ -114,20 +114,20 @@ mod affine { let Point{x: x1, y: y1} = p1; let curvegroup::Point{x: x2, y: y2, t: t2, z: z2} = p2; - let a = x1*x2; - let b = y1*y2; - let c = self.d*x1*y1*t2; - let e = (x1 + y1)*(x2 + y2) - a - b; + let a = x1 * x2; + let b = y1 * y2; + let c = self.d * x1 * y1 * t2; + let e = (x1 + y1) * (x2 + y2) - a - b; let f = z2 - c; let g = z2 + c; - let h = b - self.a*a; + let h = b - self.a * a; - let x = e*f; - let y = g*h; - let t = e*h; - let z = f*g; + let x = e * f; + let y = g * h; + let t = e * h; + let z = f * g; - curvegroup::Point::new(x,y,t,z) + curvegroup::Point::new(x, y, t, z) } // Scalar multiplication with scalar represented by a bit array (little-endian convention). @@ -135,7 +135,7 @@ mod affine { fn bit_mul(self, bits: [u1; N], p: Point) -> Point { self.into_group().bit_mul(bits, p.into_group()).into_affine() } - + // Scalar multiplication (p + ... + p n times) fn mul(self, n: Field, p: Point) -> Point { self.into_group().mul(n, p.into_group()).into_affine() @@ -159,10 +159,10 @@ mod affine { // Conversion to equivalent Montgomery curve pub fn into_montcurve(self) -> MCurve { - let j = 2*(self.a + self.d)/(self.a - self.d); - let k = 4/(self.a - self.d); + let j = 2 * (self.a + self.d) / (self.a - self.d); + let k = 4 / (self.a - self.d); let gen_montcurve = self.gen.into_montcurve(); - + MCurve::new(j, k, gen_montcurve) } @@ -188,7 +188,7 @@ mod affine { // Simplified SWU map-to-curve method (via rational map) fn swu_map(self, z: Field, u: Field) -> Point { - self.into_montcurve().swu_map(z,u).into_tecurve() + self.into_montcurve().swu_map(z, u).into_tecurve() } } } @@ -222,7 +222,7 @@ mod curvegroup { impl Point { // Point constructor pub fn new(x: Field, y: Field, t: Field, z: Field) -> Self { - Self {x, y, t, z} + Self { x, y, t, z } } // Check if zero @@ -235,19 +235,19 @@ mod curvegroup { pub fn into_affine(self) -> affine::Point { let Self {x, y, t: _t, z} = self; - affine::Point::new(x/z, y/z) + affine::Point::new(x / z, y / z) } // Additive identity pub fn zero() -> Self { - Point::new(0,1,0,1) + Point::new(0, 1, 0, 1) } // Negation fn negate(self) -> Self { let Self {x, y, t, z} = self; - Point::new(0-x, y, 0-t, z) + Point::new(0 - x, y, 0 - t, z) } // Map into prime-order subgroup of equivalent Montgomery curve @@ -269,8 +269,8 @@ mod curvegroup { // Curve constructor pub fn new(a: Field, d: Field, gen: Point) -> Curve { // Check curve coefficients - assert(a*d*(a-d) != 0); - + assert(a * d * (a - d) != 0); + let curve = Curve { a, d, gen }; // gen should be on the curve @@ -283,14 +283,16 @@ mod curvegroup { pub fn into_affine(self) -> affine::Curve { let Curve{a, d, gen} = self; - affine::Curve {a, d, gen: gen.into_affine()} + affine::Curve { a, d, gen: gen.into_affine() } } // Membership check pub fn contains(self, p: Point) -> bool { let Point {x, y, t, z} = p; - (z != 0) & (z*t == x*y) & (z*z*(self.a*x*x + y*y) == z*z*z*z + self.d*x*x*y*y) + (z != 0) + & (z * t == x * y) + & (z * z * (self.a * x * x + y * y) == z * z * z * z + self.d * x * x * y * y) } // Point addition @@ -298,40 +300,40 @@ mod curvegroup { let Point{x: x1, y: y1, t: t1, z: z1} = p1; let Point{x: x2, y: y2, t: t2, z: z2} = p2; - let a = x1*x2; - let b = y1*y2; - let c = self.d*t1*t2; - let d = z1*z2; - let e = (x1 + y1)*(x2 + y2) - a - b; + let a = x1 * x2; + let b = y1 * y2; + let c = self.d * t1 * t2; + let d = z1 * z2; + let e = (x1 + y1) * (x2 + y2) - a - b; let f = d - c; let g = d + c; - let h = b - self.a*a; + let h = b - self.a * a; - let x = e*f; - let y = g*h; - let t = e*h; - let z = f*g; + let x = e * f; + let y = g * h; + let t = e * h; + let z = f * g; - Point::new(x,y,t,z) + Point::new(x, y, t, z) } // Point doubling, cf. §3.3 pub fn double(self, p: Point) -> Point { let Point{x, y, t: _t, z} = p; - let a = x*x; - let b = y*y; - let c = 2*z*z; - let d = self.a*a; - let e = (x + y)*(x + y) - a - b; + let a = x * x; + let b = y * y; + let c = 2 * z * z; + let d = self.a * a; + let e = (x + y) * (x + y) - a - b; let g = d + b; let f = g - c; let h = d - b; - let x0 = e*f; - let y0 = g*h; - let t0 = e*h; - let z0 = f*g; + let x0 = e * f; + let y0 = g * h; + let t0 = e * h; + let z0 = f * g; Point::new(x0, y0, t0, z0) } @@ -340,7 +342,7 @@ mod curvegroup { // If k is the natural number represented by `bits`, then this computes p + ... + p k times. fn bit_mul(self, bits: [u1; N], p: Point) -> Point { let mut out = Point::zero(); - + for i in 0..N { out = self.add( self.add(out, out), @@ -349,7 +351,7 @@ mod curvegroup { out } - + // Scalar multiplication (p + ... + p n times) pub fn mul(self, n: Field, p: Point) -> Point { let N_BITS = crate::field::modulus_num_bits(); @@ -358,7 +360,7 @@ mod curvegroup { let mut n_as_bits: [u1; 254] = [0; 254]; let tmp = n.to_le_bits(N_BITS as u32); for i in 0..254 { - n_as_bits[i] = tmp[i]; + n_as_bits[i] = tmp[i]; } self.bit_mul(n_as_bits, p) @@ -407,7 +409,7 @@ mod curvegroup { // Simplified SWU map-to-curve method (via rational map) fn swu_map(self, z: Field, u: Field) -> Point { - self.into_montcurve().swu_map(z,u).into_tecurve() + self.into_montcurve().swu_map(z, u).into_tecurve() } } } diff --git a/noir/noir_stdlib/src/ecdsa_secp256k1.nr b/noir/noir_stdlib/src/ecdsa_secp256k1.nr index e8d9af2230f..b72a1acd041 100644 --- a/noir/noir_stdlib/src/ecdsa_secp256k1.nr +++ b/noir/noir_stdlib/src/ecdsa_secp256k1.nr @@ -7,4 +7,4 @@ pub fn verify_signature( message_hash: [u8; N] ) -> bool // docs:end:ecdsa_secp256k1 -{} \ No newline at end of file +{} diff --git a/noir/noir_stdlib/src/ecdsa_secp256r1.nr b/noir/noir_stdlib/src/ecdsa_secp256r1.nr index 9fe932a2f3d..ef92bf24ae4 100644 --- a/noir/noir_stdlib/src/ecdsa_secp256r1.nr +++ b/noir/noir_stdlib/src/ecdsa_secp256r1.nr @@ -7,4 +7,4 @@ pub fn verify_signature( message_hash: [u8; N] ) -> bool // docs:end:ecdsa_secp256r1 -{} \ No newline at end of file +{} diff --git a/noir/noir_stdlib/src/field.nr b/noir/noir_stdlib/src/field.nr index 66fb50119f9..0f4c2caffdf 100644 --- a/noir/noir_stdlib/src/field.nr +++ b/noir/noir_stdlib/src/field.nr @@ -6,7 +6,7 @@ impl Field { crate::assert_constant(bit_size); self.__to_le_bits(bit_size) } - + pub fn to_be_bits(self: Self, bit_size: u32) -> [u1] { crate::assert_constant(bit_size); self.__to_be_bits(bit_size) @@ -14,7 +14,7 @@ impl Field { #[builtin(to_le_bits)] fn __to_le_bits(self, _bit_size: u32) -> [u1] {} - + #[builtin(to_be_bits)] fn __to_be_bits(self, bit_size: u32) -> [u1] {} @@ -35,7 +35,6 @@ impl Field { self.to_be_radix(256, byte_size) } - pub fn to_le_radix(self: Self, radix: u32, result_len: u32) -> [u8] { crate::assert_constant(radix); crate::assert_constant(result_len); @@ -48,17 +47,14 @@ impl Field { self.__to_be_radix(radix, result_len) } - - // decompose `_self` into a `_result_len` vector over the `_radix` basis // `_radix` must be less than 256 #[builtin(to_le_radix)] fn __to_le_radix(self, radix: u32, result_len: u32) -> [u8] {} - + #[builtin(to_be_radix)] fn __to_be_radix(self, radix: u32, result_len: u32) -> [u8] {} - // Returns self to the power of the given exponent value. // Caution: we assume the exponent fits into 32 bits // using a bigger bit size impacts negatively the performance and should be done only if the exponent does not fit in 32 bits @@ -85,11 +81,10 @@ impl Field { lt_fallback(self, another) } } - } #[builtin(modulus_num_bits)] -pub fn modulus_num_bits() -> Field {} +pub fn modulus_num_bits() -> u64 {} #[builtin(modulus_be_bits)] pub fn modulus_be_bits() -> [u1] {} diff --git a/noir/noir_stdlib/src/hash.nr b/noir/noir_stdlib/src/hash.nr index cc864039a90..fcf21436197 100644 --- a/noir/noir_stdlib/src/hash.nr +++ b/noir/noir_stdlib/src/hash.nr @@ -1,5 +1,9 @@ mod poseidon; mod mimc; +mod poseidon2; +mod pedersen; + +use crate::default::Default; #[foreign(sha256)] // docs:start:sha256 @@ -70,7 +74,56 @@ pub fn keccak256(input: [u8; N], message_size: u32) -> [u8; 32] {} #[foreign(poseidon2_permutation)] -pub fn poseidon2_permutation(_input: [u8; N], _state_length: u32) -> [u8; N] {} +pub fn poseidon2_permutation(_input: [Field; N], _state_length: u32) -> [Field; N] {} #[foreign(sha256_compression)] pub fn sha256_compression(_input: [u32; 16], _state: [u32; 8]) -> [u32; 8] {} + +// Generic hashing support. +// Partially ported and impacted by rust. + +// Hash trait shall be implemented per type. +trait Hash{ + fn hash(self, state: &mut H) where H: Hasher; +} + +// Hasher trait shall be implemented by algorithms to provide hash-agnostic means. +// TODO: consider making the types generic here ([u8], [Field], etc.) +trait Hasher{ + fn finish(self) -> Field; + + fn write(&mut self, input: [Field]); +} + +// BuildHasher is a factory trait, responsible for production of specific Hasher. +trait BuildHasher where H: Hasher{ + fn build_hasher(self) -> H; +} + +struct BuildHasherDefault; + +impl BuildHasher for BuildHasherDefault +where + H: Hasher + Default +{ + fn build_hasher(_self: Self) -> H{ + H::default() + } +} + +impl Default for BuildHasherDefault +where + H: Hasher + Default +{ + fn default() -> Self{ + BuildHasherDefault{} + } +} + +// TODO: add implementations for the remainder of primitive types. +impl Hash for Field{ + fn hash(self, state: &mut H) where H: Hasher{ + let input: [Field] = [self]; + H::write(state, input); + } +} diff --git a/noir/noir_stdlib/src/hash/pedersen.nr b/noir/noir_stdlib/src/hash/pedersen.nr new file mode 100644 index 00000000000..ace6851099d --- /dev/null +++ b/noir/noir_stdlib/src/hash/pedersen.nr @@ -0,0 +1,24 @@ +use crate::hash::{Hasher, pedersen_hash}; +use crate::default::Default; + +struct PedersenHasher{ + _state: [Field] +} + +impl Hasher for PedersenHasher { + fn finish(self) -> Field { + pedersen_hash(self._state) + } + + fn write(&mut self, input: [Field]){ + self._state = self._state.append(input); + } +} + +impl Default for PedersenHasher{ + fn default() -> Self{ + PedersenHasher{ + _state: [] + } + } +} diff --git a/noir/noir_stdlib/src/hash/poseidon.nr b/noir/noir_stdlib/src/hash/poseidon.nr index 3f4de73c0db..b1a7c4a2367 100644 --- a/noir/noir_stdlib/src/hash/poseidon.nr +++ b/noir/noir_stdlib/src/hash/poseidon.nr @@ -21,7 +21,7 @@ pub fn config( // Input checks let mul = crate::wrapping_mul(t as u8, (rf + rp)); assert(mul == ark.len() as u8); - assert(t * t == mds.len()); + assert(t * t == mds.len() as Field); assert(alpha != 0); PoseidonConfig { t, rf, rp, alpha, ark, mds } @@ -30,7 +30,7 @@ pub fn config( fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) -> [Field; O] { let PoseidonConfig {t, rf, rp, alpha, ark, mds} = pos_conf; - assert(t == state.len()); + assert(t == state.len() as Field); let mut count = 0; // for r in 0..rf + rp @@ -47,7 +47,7 @@ fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) -> [F } state = apply_matrix(mds, state); // Apply MDS matrix - count = count + t; + count = count + t as u64; } state @@ -85,7 +85,7 @@ fn absorb( fn check_security(rate: Field, width: Field, security: Field) -> bool { let n = modulus_num_bits(); - ((n - 1) * (width - rate) / 2) as u8 > security as u8 + ((n - 1) as Field * (width - rate) / 2) as u8 > security as u8 } // A*x where A is an n x n matrix in row-major order and x an n-vector fn apply_matrix(a: [Field; M], x: [Field; N]) -> [Field; N] { diff --git a/noir/noir_stdlib/src/hash/poseidon/bn254.nr b/noir/noir_stdlib/src/hash/poseidon/bn254.nr index 0db6d9546dc..37b08e3c8fb 100644 --- a/noir/noir_stdlib/src/hash/poseidon/bn254.nr +++ b/noir/noir_stdlib/src/hash/poseidon/bn254.nr @@ -9,12 +9,12 @@ use crate::hash::poseidon::apply_matrix; #[field(bn254)] pub fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) -> [Field; O] { let PoseidonConfig {t, rf: config_rf, rp: config_rp, alpha, ark, mds} = pos_conf; - let rf = 8; - let rp = [56, 57, 56, 60, 60, 63, 64, 63, 60, 66, 60, 65, 70, 60, 64, 68][state.len() - 2]; + let rf: u8 = 8; + let rp: u8 = [56, 57, 56, 60, 60, 63, 64, 63, 60, 66, 60, 65, 70, 60, 64, 68][state.len() - 2]; - assert(t == state.len()); - assert(rf == config_rf as Field); - assert(rp == config_rp as Field); + assert(t == state.len() as Field); + assert(rf == config_rf); + assert(rp == config_rp); let mut count = 0; // First half of full rounds @@ -27,7 +27,7 @@ pub fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) - } state = apply_matrix(mds, state); // Apply MDS matrix - count = count + t; + count = count + t as u64; } // Partial rounds for _r in 0..rp { @@ -37,7 +37,7 @@ pub fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) - state[0] = state[0].pow_32(alpha); state = apply_matrix(mds, state); // Apply MDS matrix - count = count + t; + count = count + t as u64; } // Second half of full rounds for _r in 0..rf / 2 { @@ -49,7 +49,7 @@ pub fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) - } state = apply_matrix(mds, state); // Apply MDS matrix - count = count + t; + count = count + t as u64; } state diff --git a/noir/noir_stdlib/src/hash/poseidon2.nr b/noir/noir_stdlib/src/hash/poseidon2.nr new file mode 100644 index 00000000000..64c1876b4e2 --- /dev/null +++ b/noir/noir_stdlib/src/hash/poseidon2.nr @@ -0,0 +1,112 @@ +global RATE = 3; + +struct Poseidon2 { + cache: [Field;3], + state: [Field;4], + cache_size: u32, + squeeze_mode: bool, // 0 => absorb, 1 => squeeze +} + +impl Poseidon2 { + + pub fn hash(input: [Field; N], message_size: u32) -> Field { + if message_size == N { + Poseidon2::hash_internal(input, N, false) + } else { + Poseidon2::hash_internal(input, message_size, true) + } + } + + fn new(iv: Field) -> Poseidon2 { + let mut result = Poseidon2 { cache: [0; 3], state: [0; 4], cache_size: 0, squeeze_mode: false }; + result.state[RATE] = iv; + result + } + + fn perform_duplex(&mut self) -> [Field; RATE] { + // zero-pad the cache + for i in 0..RATE { + if i >= self.cache_size { + self.cache[i] = 0; + } + } + // add the cache into sponge state + for i in 0..RATE { + self.state[i] += self.cache[i]; + } + self.state = crate::hash::poseidon2_permutation(self.state, 4); + // return `RATE` number of field elements from the sponge state. + let mut result = [0; RATE]; + for i in 0..RATE { + result[i] = self.state[i]; + } + result + } + + fn absorb(&mut self, input: Field) { + if (!self.squeeze_mode) & (self.cache_size == RATE) { + // If we're absorbing, and the cache is full, apply the sponge permutation to compress the cache + let _ = self.perform_duplex(); + self.cache[0] = input; + self.cache_size = 1; + } else if (!self.squeeze_mode) & (self.cache_size != RATE) { + // If we're absorbing, and the cache is not full, add the input into the cache + self.cache[self.cache_size] = input; + self.cache_size += 1; + } else if self.squeeze_mode { + // If we're in squeeze mode, switch to absorb mode and add the input into the cache. + // N.B. I don't think this code path can be reached?! + self.cache[0] = input; + self.cache_size = 1; + self.squeeze_mode = false; + } + } + + fn squeeze(&mut self) -> Field { + if self.squeeze_mode & (self.cache_size == 0) { + // If we're in squeze mode and the cache is empty, there is nothing left to squeeze out of the sponge! + // Switch to absorb mode. + self.squeeze_mode = false; + self.cache_size = 0; + } + if !self.squeeze_mode { + // If we're in absorb mode, apply sponge permutation to compress the cache, populate cache with compressed + // state and switch to squeeze mode. Note: this code block will execute if the previous `if` condition was + // matched + let new_output_elements = self.perform_duplex(); + self.squeeze_mode = true; + for i in 0..RATE { + self.cache[i] = new_output_elements[i]; + } + self.cache_size = RATE; + } + // By this point, we should have a non-empty cache. Pop one item off the top of the cache and return it. + let result = self.cache[0]; + for i in 1..RATE { + if i < self.cache_size { + self.cache[i - 1] = self.cache[i]; + } + } + self.cache_size -= 1; + self.cache[self.cache_size] = 0; + result + } + + fn hash_internal(input: [Field; N], in_len: u32, is_variable_length: bool) -> Field { + let iv : Field = (in_len as Field) * 18446744073709551616; + let mut sponge = Poseidon2::new(iv); + for i in 0..input.len() { + if i as u32 < in_len { + sponge.absorb(input[i]); + } + } + + // In the case where the hash preimage is variable-length, we append `1` to the end of the input, to distinguish + // from fixed-length hashes. (the combination of this additional field element + the hash IV ensures + // fixed-length and variable-length hashes do not collide) + if is_variable_length { + sponge.absorb(1); + } + sponge.squeeze() + } +} diff --git a/noir/noir_stdlib/src/option.nr b/noir/noir_stdlib/src/option.nr index cab95731d05..1c32f758af7 100644 --- a/noir/noir_stdlib/src/option.nr +++ b/noir/noir_stdlib/src/option.nr @@ -39,11 +39,7 @@ impl Option { /// Returns the wrapped value if `self.is_some()`. Otherwise, returns the given default value. pub fn unwrap_or(self, default: T) -> T { - if self._is_some { - self._value - } else { - default - } + if self._is_some { self._value } else { default } } /// Returns the wrapped value if `self.is_some()`. Otherwise, calls the given function to return @@ -112,31 +108,19 @@ impl Option { /// If self is Some, return self. Otherwise, return `other`. pub fn or(self, other: Self) -> Self { - if self._is_some { - self - } else { - other - } + if self._is_some { self } else { other } } /// If self is Some, return self. Otherwise, return `default()`. pub fn or_else(self, default: fn[Env]() -> Self) -> Self { - if self._is_some { - self - } else { - default() - } + if self._is_some { self } else { default() } } // If only one of the two Options is Some, return that option. // Otherwise, if both options are Some or both are None, None is returned. pub fn xor(self, other: Self) -> Self { if self._is_some { - if other._is_some { - Option::none() - } else { - self - } + if other._is_some { Option::none() } else { self } } else if other._is_some { other } else { diff --git a/noir/noir_stdlib/src/scalar_mul.nr b/noir/noir_stdlib/src/scalar_mul.nr index 26378e4839a..eee7aac39f2 100644 --- a/noir/noir_stdlib/src/scalar_mul.nr +++ b/noir/noir_stdlib/src/scalar_mul.nr @@ -6,7 +6,7 @@ struct EmbeddedCurvePoint { } impl EmbeddedCurvePoint { - fn double(self) -> EmbeddedCurvePoint { + fn double(self) -> EmbeddedCurvePoint { embedded_curve_add(self, self) } } @@ -32,5 +32,14 @@ pub fn fixed_base_embedded_curve( // docs:end:fixed_base_embedded_curve {} +// This is a hack as returning an `EmbeddedCurvePoint` from a foreign function in brillig returns a [BrilligVariable::SingleAddr; 2] rather than BrilligVariable::BrilligArray +// as is defined in the brillig bytecode format. This is a workaround which allows us to fix this without modifying the serialization format. +fn embedded_curve_add(point1: EmbeddedCurvePoint, point2: EmbeddedCurvePoint) -> EmbeddedCurvePoint { + let point_array = embedded_curve_add_array_return(point1, point2); + let x = point_array[0]; + let y = point_array[1]; + EmbeddedCurvePoint { x, y } +} + #[foreign(embedded_curve_add)] -fn embedded_curve_add(_point1: EmbeddedCurvePoint, _point2: EmbeddedCurvePoint) -> EmbeddedCurvePoint {} +fn embedded_curve_add_array_return(_point1: EmbeddedCurvePoint, _point2: EmbeddedCurvePoint) -> [Field; 2] {} diff --git a/noir/noir_stdlib/src/schnorr.nr b/noir/noir_stdlib/src/schnorr.nr index 33656254550..757963d40d7 100644 --- a/noir/noir_stdlib/src/schnorr.nr +++ b/noir/noir_stdlib/src/schnorr.nr @@ -7,4 +7,4 @@ pub fn verify_signature( message: [u8; N] ) -> bool // docs:end:schnorr_verify -{} \ No newline at end of file +{} diff --git a/noir/noir_stdlib/src/sha256.nr b/noir/noir_stdlib/src/sha256.nr index 6bcc5ea74c6..2f686a64165 100644 --- a/noir/noir_stdlib/src/sha256.nr +++ b/noir/noir_stdlib/src/sha256.nr @@ -57,7 +57,7 @@ pub fn digest(msg: [u8; N]) -> [u8; 32] { msg_block[i as Field] = 0; i = i + 1; } else if i < 64 { - let mut len = 8 * msg.len() as u64; + let mut len = 8 * msg.len(); for j in 0..8 { msg_block[63 - j] = len as u8; len >>= 8; diff --git a/noir/noir_stdlib/src/sha512.nr b/noir/noir_stdlib/src/sha512.nr index 155ba593bba..f3155dd7528 100644 --- a/noir/noir_stdlib/src/sha512.nr +++ b/noir/noir_stdlib/src/sha512.nr @@ -136,7 +136,7 @@ pub fn digest(msg: [u8; N]) -> [u8; 64] { msg_block[i as Field] = 0; i += 1; } else if i < 128 { - let mut len = 8 * msg.len() as u64; // u128 unsupported + let mut len = 8 * msg.len(); // u128 unsupported for j in 0..16 { msg_block[127 - j] = len as u8; len >>= 8; diff --git a/noir/noir_stdlib/src/slice.nr b/noir/noir_stdlib/src/slice.nr index aa4b73edc1a..ea8d09d14ce 100644 --- a/noir/noir_stdlib/src/slice.nr +++ b/noir/noir_stdlib/src/slice.nr @@ -3,34 +3,34 @@ impl [T] { /// new slice with a length one greater than the /// original unmodified slice. #[builtin(slice_push_back)] - pub fn push_back(self, elem: T) -> Self { } + pub fn push_back(self, elem: T) -> Self {} /// Push a new element to the front of the slice, returning a /// new slice with a length one greater than the /// original unmodified slice. #[builtin(slice_push_front)] - pub fn push_front(self, elem: T) -> Self { } + pub fn push_front(self, elem: T) -> Self {} /// Remove the last element of the slice, returning the /// popped slice and the element in a tuple #[builtin(slice_pop_back)] - pub fn pop_back(self) -> (Self, T) { } + pub fn pop_back(self) -> (Self, T) {} /// Remove the first element of the slice, returning the /// element and the popped slice in a tuple #[builtin(slice_pop_front)] - pub fn pop_front(self) -> (T, Self) { } + pub fn pop_front(self) -> (T, Self) {} /// Insert an element at a specified index, shifting all elements /// after it to the right #[builtin(slice_insert)] - pub fn insert(self, index: Field, elem: T) -> Self { } + pub fn insert(self, index: u64, elem: T) -> Self {} /// Remove an element at a specified index, shifting all elements /// after it to the left, returning the altered slice and /// the removed element #[builtin(slice_remove)] - pub fn remove(self, index: Field) -> (Self, T) { } + pub fn remove(self, index: u64) -> (Self, T) {} // Append each element of the `other` slice to the end of `self`. // This returns a new slice and leaves both input slices unchanged. diff --git a/noir/noir_stdlib/src/string.nr b/noir/noir_stdlib/src/string.nr index ad6fd19e2de..12b5a1e75ec 100644 --- a/noir/noir_stdlib/src/string.nr +++ b/noir/noir_stdlib/src/string.nr @@ -2,7 +2,7 @@ use crate::collections::vec::Vec; impl str { /// Converts the given string into a byte array #[builtin(str_as_bytes)] - pub fn as_bytes(self) -> [u8; N] { } + pub fn as_bytes(self) -> [u8; N] {} /// return a byte vector of the str content pub fn as_bytes_vec(self: Self) -> Vec { diff --git a/noir/noir_stdlib/src/test.nr b/noir/noir_stdlib/src/test.nr index 560cfde741c..e1c320215de 100644 --- a/noir/noir_stdlib/src/test.nr +++ b/noir/noir_stdlib/src/test.nr @@ -19,9 +19,7 @@ struct OracleMock { impl OracleMock { unconstrained pub fn mock(name: str) -> Self { - Self { - id: create_mock_oracle(name), - } + Self { id: create_mock_oracle(name) } } unconstrained pub fn with_params

(self, params: P) -> Self { diff --git a/noir/noir_stdlib/src/uint128.nr b/noir/noir_stdlib/src/uint128.nr index e2ee8013d48..b91ed5c4cb2 100644 --- a/noir/noir_stdlib/src/uint128.nr +++ b/noir/noir_stdlib/src/uint128.nr @@ -13,14 +13,11 @@ impl U128 { pub fn from_u64s_le(lo: u64, hi: u64) -> U128 { // in order to handle multiplication, we need to represent the product of two u64 without overflow assert(crate::field::modulus_num_bits() as u32 > 128); - U128 { - lo: lo as Field, - hi: hi as Field, - } + U128 { lo: lo as Field, hi: hi as Field } } pub fn from_u64s_be(hi: u64, lo: u64) -> U128 { - U128::from_u64s_le(lo,hi) + U128::from_u64s_le(lo, hi) } pub fn from_le_bytes(bytes: [u8; 16]) -> U128 { @@ -36,16 +33,13 @@ impl U128 { hi += (bytes[i] as Field)*base; base *= 256; } - U128 { - lo, - hi, - } + U128 { lo, hi } } pub fn to_be_bytes(self: Self) -> [u8; 16] { let lo = self.lo.to_be_bytes(8); let hi = self.hi.to_be_bytes(8); - let mut bytes = [0;16]; + let mut bytes = [0; 16]; for i in 0..8 { bytes[i] = hi[i]; bytes[i+8] = lo[i]; @@ -56,7 +50,7 @@ impl U128 { pub fn to_le_bytes(self: Self) -> [u8; 16] { let lo = self.lo.to_le_bytes(8); let hi = self.hi.to_le_bytes(8); - let mut bytes = [0;16]; + let mut bytes = [0; 16]; for i in 0..8 { bytes[i] = lo[i]; bytes[i+8] = hi[i]; @@ -73,9 +67,9 @@ impl U128 { let mut lo = 0; let mut hi = 0; - let mut base = 1; + let mut base = 1; if N <= 18 { - for i in 0..N-2 { + for i in 0..N - 2 { lo += U128::decode_ascii(bytes[N-i-1])*base; base = base*16; } @@ -85,27 +79,21 @@ impl U128 { base = base*16; } base = 1; - for i in 17..N-1 { + for i in 17..N - 1 { hi += U128::decode_ascii(bytes[N-i])*base; base = base*16; } } - U128 { - lo: lo as Field, - hi: hi as Field, - } + U128 { lo: lo as Field, hi: hi as Field } } fn decode_ascii(ascii: u8) -> Field { if ascii < 58 { ascii - 48 + } else if ascii < 71 { + ascii - 55 } else { - if ascii < 71 { - ascii - 55 - } else { - ascii - 87 - } - + ascii - 87 } as Field } @@ -114,15 +102,14 @@ impl U128 { (U128::from_u64s_le(0, 0), self) } else { //TODO check if this can overflow? - let (q,r) = self.unconstrained_div(b * U128::from_u64s_le(2,0)); - let q_mul_2 = q * U128::from_u64s_le(2,0); + let (q,r) = self.unconstrained_div(b * U128::from_u64s_le(2, 0)); + let q_mul_2 = q * U128::from_u64s_le(2, 0); if r < b { (q_mul_2, r) } else { - (q_mul_2 + U128::from_u64s_le(1,0), r - b) + (q_mul_2 + U128::from_u64s_le(1, 0), r - b) } - - } + } } pub fn from_integer(i: T) -> U128 { @@ -130,31 +117,25 @@ impl U128 { // Reject values which would overflow a u128 f.assert_max_bit_size(128); let lo = f as u64 as Field; - let hi = (f-lo) / pow64; - U128 { - lo, - hi, - } + let hi = (f - lo) / pow64; + U128 { lo, hi } } pub fn to_integer(self) -> T { - crate::from_field(self.lo+self.hi*pow64) + crate::from_field(self.lo + self.hi * pow64) } fn wrapping_mul(self: Self, b: U128) -> U128 { - let low = self.lo*b.lo; + let low = self.lo * b.lo; let lo = low as u64 as Field; let carry = (low - lo) / pow64; let high = if crate::field::modulus_num_bits() as u32 > 196 { - (self.lo+self.hi)*(b.lo+b.hi) - low + carry + (self.lo + self.hi) * (b.lo + b.hi) - low + carry } else { - self.lo*b.hi + self.hi*b.lo + carry + self.lo * b.hi + self.hi * b.lo + carry }; let hi = high as u64 as Field; - U128 { - lo, - hi, - } + U128 { lo, hi } } } diff --git a/noir/rust-toolchain.toml b/noir/rust-toolchain.toml index b6f7edc4bde..0e5ac891ce9 100644 --- a/noir/rust-toolchain.toml +++ b/noir/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "1.71.1" +channel = "1.73.0" components = [ "rust-src" ] targets = [ "wasm32-unknown-unknown", "wasm32-wasi", "aarch64-apple-darwin" ] profile = "default" diff --git a/noir/test_programs/.gitignore b/noir/test_programs/.gitignore index a229df6197f..e98a2fb38b6 100644 --- a/noir/test_programs/.gitignore +++ b/noir/test_programs/.gitignore @@ -1,2 +1,3 @@ acir_artifacts -execution_success/**/crs \ No newline at end of file +execution_success/**/crs +Nargo.toml diff --git a/noir/test_programs/compile_failure/hashmap_load_factor/Nargo.toml b/noir/test_programs/compile_failure/hashmap_load_factor/Nargo.toml new file mode 100644 index 00000000000..92da5a357f4 --- /dev/null +++ b/noir/test_programs/compile_failure/hashmap_load_factor/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "hashmap_load_factor" +type = "bin" +authors = [""] + +[dependencies] \ No newline at end of file diff --git a/noir/test_programs/compile_failure/hashmap_load_factor/Prover.toml b/noir/test_programs/compile_failure/hashmap_load_factor/Prover.toml new file mode 100644 index 00000000000..e54319c61e9 --- /dev/null +++ b/noir/test_programs/compile_failure/hashmap_load_factor/Prover.toml @@ -0,0 +1,26 @@ +# Expected 6 key-value entries for hashmap capacity of 8. +# These must be distinct (both key-to-key, and value-to-value) for correct testing. + +[[input]] +key = 2 +value = 17 + +[[input]] +key = 3 +value = 19 + +[[input]] +key = 5 +value = 23 + +[[input]] +key = 7 +value = 29 + +[[input]] +key = 11 +value = 31 + +[[input]] +key = 41 +value = 43 \ No newline at end of file diff --git a/noir/test_programs/compile_failure/hashmap_load_factor/src/main.nr b/noir/test_programs/compile_failure/hashmap_load_factor/src/main.nr new file mode 100644 index 00000000000..ade43f898e1 --- /dev/null +++ b/noir/test_programs/compile_failure/hashmap_load_factor/src/main.nr @@ -0,0 +1,35 @@ +use dep::std::collections::map::HashMap; +use dep::std::hash::BuildHasherDefault; +use dep::std::hash::pedersen::PedersenHasher; + +struct Entry{ + key: Field, + value: Field +} + +global HASHMAP_CAP = 8; +global HASHMAP_LEN = 6; + +fn allocate_hashmap() -> HashMap> { + HashMap::default() +} + +fn main(input: [Entry; HASHMAP_LEN]) { + test_load_factor(input); +} + +// In this test we exceed load factor: +// α_max = 0.75, thus for capacity of 8 and lenght of 6 +// insertion of new unique key (7-th) should throw assertion error. +fn test_load_factor(input: [Entry; HASHMAP_LEN]) { + let mut hashmap = allocate_hashmap(); + + for entry in input { + hashmap.insert(entry.key, entry.value); + } + + // We use prime numbers for testing, + // therefore it is guaranteed that doubling key we get unique value. + let key = input[0].key * 2; + hashmap.insert(key, input[0].value); +} diff --git a/noir/test_programs/compile_success_empty/closure_explicit_types/src/main.nr b/noir/test_programs/compile_success_empty/closure_explicit_types/src/main.nr index eec2b90b5b2..b6c8a6b7b3c 100644 --- a/noir/test_programs/compile_success_empty/closure_explicit_types/src/main.nr +++ b/noir/test_programs/compile_success_empty/closure_explicit_types/src/main.nr @@ -7,13 +7,13 @@ fn ret_closure1() -> fn[(Field,)]() -> Field { || x + 10 } // return lamda that captures two things -fn ret_closure2() -> fn[(Field,Field)]() -> Field { +fn ret_closure2() -> fn[(Field, Field)]() -> Field { let x = 20; let y = 10; || x + y + 10 } // return lamda that captures two things with different types -fn ret_closure3() -> fn[(u32,u64)]() -> u64 { +fn ret_closure3() -> fn[(u32, u64)]() -> u64 { let x: u32 = 20; let y: u64 = 10; || x as u64 + y + 10 diff --git a/noir/test_programs/compile_success_empty/conditional_regression_579/src/main.nr b/noir/test_programs/compile_success_empty/conditional_regression_579/src/main.nr index a479a7a6fbf..a517f4fdb70 100644 --- a/noir/test_programs/compile_success_empty/conditional_regression_579/src/main.nr +++ b/noir/test_programs/compile_success_empty/conditional_regression_579/src/main.nr @@ -12,9 +12,7 @@ struct MyStruct579 { impl MyStruct579 { fn new(array_param: [u32; 2]) -> MyStruct579 { - MyStruct579 { - array_param: array_param - } + MyStruct579 { array_param } } } diff --git a/noir/test_programs/compile_success_empty/reexports/src/main.nr b/noir/test_programs/compile_success_empty/reexports/src/main.nr index bb94b21b221..ed469ff77d0 100644 --- a/noir/test_programs/compile_success_empty/reexports/src/main.nr +++ b/noir/test_programs/compile_success_empty/reexports/src/main.nr @@ -1,8 +1,6 @@ use dep::reexporting_lib::{FooStruct, MyStruct, lib}; fn main() { - let x: FooStruct = MyStruct { - inner: 0 - }; + let x: FooStruct = MyStruct { inner: 0 }; assert(lib::is_struct_zero(x)); } diff --git a/noir/test_programs/compile_success_empty/specialization/src/main.nr b/noir/test_programs/compile_success_empty/specialization/src/main.nr index 9cd32e0f1eb..30116330a86 100644 --- a/noir/test_programs/compile_success_empty/specialization/src/main.nr +++ b/noir/test_programs/compile_success_empty/specialization/src/main.nr @@ -1,11 +1,15 @@ struct Foo {} impl Foo { - fn foo(_self: Self) -> Field { 1 } + fn foo(_self: Self) -> Field { + 1 + } } impl Foo { - fn foo(_self: Self) -> Field { 2 } + fn foo(_self: Self) -> Field { + 2 + } } fn main() { diff --git a/noir/test_programs/execution_success/3_add/src/main.nr b/noir/test_programs/execution_success/3_add/src/main.nr index 9c77af868b6..480348dc1cf 100644 --- a/noir/test_programs/execution_success/3_add/src/main.nr +++ b/noir/test_programs/execution_success/3_add/src/main.nr @@ -5,4 +5,4 @@ fn main(mut x: u32, y: u32, z: u32) { x *= 8; assert(x > 9); -} \ No newline at end of file +} diff --git a/noir/test_programs/execution_success/array_dynamic/src/main.nr b/noir/test_programs/execution_success/array_dynamic/src/main.nr index dde7bacc455..6b51095bd8c 100644 --- a/noir/test_programs/execution_success/array_dynamic/src/main.nr +++ b/noir/test_programs/execution_success/array_dynamic/src/main.nr @@ -2,8 +2,8 @@ fn main( x: [u32; 5], mut z: u32, t: u32, - index: [Field;5], - index2: [Field;5], + index: [Field; 5], + index2: [Field; 5], offset: Field, sublen: Field ) { diff --git a/noir/test_programs/execution_success/array_dynamic_blackbox_input/Nargo.toml b/noir/test_programs/execution_success/array_dynamic_blackbox_input/Nargo.toml new file mode 100644 index 00000000000..03da304acc3 --- /dev/null +++ b/noir/test_programs/execution_success/array_dynamic_blackbox_input/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "array_dynamic_blackbox_input" +type = "bin" +authors = [""] +compiler_version = ">=0.24.0" + +[dependencies] \ No newline at end of file diff --git a/noir/test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml b/noir/test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml new file mode 100644 index 00000000000..cc60eb8a8ba --- /dev/null +++ b/noir/test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml @@ -0,0 +1,4 @@ +index = "1" +leaf = ["51", "109", "224", "175", "60", "42", "79", "222", "117", "255", "174", "79", "126", "242", "74", "34", "100", "35", "20", "200", "109", "89", "191", "219", "41", "10", "118", "217", "165", "224", "215", "109"] +path = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "30", "31", "32", "33", "34", "35", "36", "37", "38", "39", "40", "41", "42", "43", "44", "45", "46", "47", "48", "49", "50", "51", "52", "53", "54", "55", "56", "57", "58", "59", "60", "61", "62", "63"] +root = [79, 230, 126, 184, 98, 125, 226, 58, 117, 45, 140, 15, 72, 118, 89, 173, 117, 161, 166, 0, 214, 125, 13, 16, 113, 81, 173, 156, 97, 15, 57, 216] diff --git a/noir/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr b/noir/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr new file mode 100644 index 00000000000..4cbf1bd8e6d --- /dev/null +++ b/noir/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr @@ -0,0 +1,27 @@ +fn main(leaf: [u8; 32], path: [u8; 64], index: u32, root: [u8; 32]) { + compute_root(leaf, path, index, root); +} + +fn compute_root(leaf: [u8; 32], path: [u8; 64], _index: u32, root: [u8; 32]) { + let mut current = leaf; + let mut index = _index; + + for i in 0..2 { + let mut hash_input = [0; 64]; + let offset = i * 32; + let is_right = (index & 1) != 0; + let a = if is_right { 32 } else { 0 }; + let b = if is_right { 0 } else { 32 }; + + for j in 0..32 { + hash_input[j + a] = current[j]; + hash_input[j + b] = path[offset + j]; + } + + current = dep::std::hash::sha256(hash_input); + index = index >> 1; + } + + // Regression for issue #4258 + assert(root == current); +} diff --git a/noir/test_programs/execution_success/array_dynamic_main_output/src/main.nr b/noir/test_programs/execution_success/array_dynamic_main_output/src/main.nr index ccb7016a190..50feb71f983 100644 --- a/noir/test_programs/execution_success/array_dynamic_main_output/src/main.nr +++ b/noir/test_programs/execution_success/array_dynamic_main_output/src/main.nr @@ -1,4 +1,4 @@ fn main(mut x: [Field; 10], index: u8) -> pub [Field; 10] { x[index] = 0; x -} \ No newline at end of file +} diff --git a/noir/test_programs/execution_success/array_dynamic_nested_blackbox_input/Nargo.toml b/noir/test_programs/execution_success/array_dynamic_nested_blackbox_input/Nargo.toml new file mode 100644 index 00000000000..07d867d433f --- /dev/null +++ b/noir/test_programs/execution_success/array_dynamic_nested_blackbox_input/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "array_dynamic_nested_blackbox_input" +type = "bin" +authors = [""] +compiler_version = ">=0.24.0" + +[dependencies] \ No newline at end of file diff --git a/noir/test_programs/execution_success/array_dynamic_nested_blackbox_input/Prover.toml b/noir/test_programs/execution_success/array_dynamic_nested_blackbox_input/Prover.toml new file mode 100644 index 00000000000..1f291532414 --- /dev/null +++ b/noir/test_programs/execution_success/array_dynamic_nested_blackbox_input/Prover.toml @@ -0,0 +1,23 @@ +y = "3" +hash_result = [50, 53, 90, 252, 105, 236, 223, 30, 135, 229, 193, 172, 51, 139, 8, 32, 188, 104, 151, 115, 129, 168, 27, 71, 203, 47, 40, 228, 89, 177, 129, 100] + +[[x]] +a = "1" +b = ["2", "3", "20"] + +[x.bar] +inner = ["100", "101", "102"] + +[[x]] +a = "4" # idx = 3, flattened start idx = 7 +b = ["5", "6", "21"] # idx = 4, flattened start idx = 8 + +[x.bar] +inner = ["103", "104", "105"] # idx = 5, flattened start idx = 11 + +[[x]] +a = "7" +b = ["8", "9", "22"] + +[x.bar] +inner = ["106", "107", "108"] \ No newline at end of file diff --git a/noir/test_programs/execution_success/array_dynamic_nested_blackbox_input/src/main.nr b/noir/test_programs/execution_success/array_dynamic_nested_blackbox_input/src/main.nr new file mode 100644 index 00000000000..8faaf69dfc8 --- /dev/null +++ b/noir/test_programs/execution_success/array_dynamic_nested_blackbox_input/src/main.nr @@ -0,0 +1,20 @@ +struct Bar { + inner: [u8; 3], +} + +struct Foo { + a: Field, + b: [Field; 3], + bar: Bar, +} + +fn main(mut x: [Foo; 3], y: pub Field, hash_result: pub [u8; 32]) { + // Simple dynamic array set for entire inner most array + x[y - 1].bar.inner = [106, 107, 10]; + let mut hash_input = x[y - 1].bar.inner; + // Make sure that we are passing a dynamic array to the black box function call + // by setting the array using a dynamic index here + hash_input[y - 1] = 0; + let hash = dep::std::hash::sha256(hash_input); + assert_eq(hash, hash_result); +} diff --git a/noir/test_programs/execution_success/array_len/src/main.nr b/noir/test_programs/execution_success/array_len/src/main.nr index b60762f4636..f846cfb9844 100644 --- a/noir/test_programs/execution_success/array_len/src/main.nr +++ b/noir/test_programs/execution_success/array_len/src/main.nr @@ -1,12 +1,12 @@ -fn len_plus_1(array: [T; N]) -> Field { +fn len_plus_1(array: [T; N]) -> u64 { array.len() + 1 } -fn add_lens(a: [T; N], b: [Field; M]) -> Field { +fn add_lens(a: [T; N], b: [Field; M]) -> u64 { a.len() + b.len() } -fn nested_call(b: [Field; N]) -> Field { +fn nested_call(b: [Field; N]) -> u64 { len_plus_1(b) } diff --git a/noir/test_programs/execution_success/assert_statement_recursive/src/main.nr b/noir/test_programs/execution_success/assert_statement_recursive/src/main.nr index 687a0d324ba..d89ea3d35bb 100644 --- a/noir/test_programs/execution_success/assert_statement_recursive/src/main.nr +++ b/noir/test_programs/execution_success/assert_statement_recursive/src/main.nr @@ -8,4 +8,4 @@ fn main(x: Field, y: pub Field) { assert(x == y, "x and y are not equal"); assert_eq(x, y, "x and y are not equal"); -} \ No newline at end of file +} diff --git a/noir/test_programs/execution_success/bigint/src/main.nr b/noir/test_programs/execution_success/bigint/src/main.nr index 4b873506287..b93fec370e5 100644 --- a/noir/test_programs/execution_success/bigint/src/main.nr +++ b/noir/test_programs/execution_success/bigint/src/main.nr @@ -1,8 +1,8 @@ use dep::std::bigint; -fn main(mut x: [u8;5], y: [u8;5]) { - let a = bigint::Secpk1Fq::from_le_bytes([x[0],x[1],x[2],x[3],x[4]]); - let b = bigint::Secpk1Fq::from_le_bytes([y[0],y[1],y[2],y[3],y[4]]); +fn main(mut x: [u8; 5], y: [u8; 5]) { + let a = bigint::Secpk1Fq::from_le_bytes([x[0], x[1], x[2], x[3], x[4]]); + let b = bigint::Secpk1Fq::from_le_bytes([y[0], y[1], y[2], y[3], y[4]]); let a_bytes = a.to_le_bytes(); let b_bytes = b.to_le_bytes(); for i in 0..5 { @@ -10,8 +10,7 @@ fn main(mut x: [u8;5], y: [u8;5]) { assert(b_bytes[i] == y[i]); } - let d = a*b - b; + let d = a * b - b; let d1 = bigint::Secpk1Fq::from_le_bytes(597243850900842442924.to_le_bytes(10)); assert(d1 == d); - } diff --git a/noir/test_programs/execution_success/brillig_cow/src/main.nr b/noir/test_programs/execution_success/brillig_cow/src/main.nr index 7d847e085fe..52ce8b8be3c 100644 --- a/noir/test_programs/execution_success/brillig_cow/src/main.nr +++ b/noir/test_programs/execution_success/brillig_cow/src/main.nr @@ -10,42 +10,37 @@ struct ExecutionResult { impl ExecutionResult { fn is_equal(self, other: ExecutionResult) -> bool { - (self.original == other.original) & - (self.modified_once == other.modified_once) & - (self.modified_twice == other.modified_twice) + (self.original == other.original) + & (self.modified_once == other.modified_once) + & (self.modified_twice == other.modified_twice) } } fn modify_in_inlined_constrained(original: [Field; ARRAY_SIZE], index: u64) -> ExecutionResult { let mut modified = original; - + modified[index] = 27; let modified_once = modified; modified[index+1] = 27; - ExecutionResult { - original, - modified_once, - modified_twice: modified - } + ExecutionResult { original, modified_once, modified_twice: modified } } -unconstrained fn modify_in_unconstrained(original: [Field; ARRAY_SIZE], index: u64) -> ExecutionResult { +unconstrained fn modify_in_unconstrained( + original: [Field; ARRAY_SIZE], + index: u64 +) -> ExecutionResult { let mut modified = original; - + modified[index] = 27; let modified_once = modified; modified[index+1] = 27; - ExecutionResult { - original, - modified_once, - modified_twice: modified - } + ExecutionResult { original, modified_once, modified_twice: modified } } unconstrained fn main(original: [Field; ARRAY_SIZE], index: u64, expected_result: ExecutionResult) { diff --git a/noir/test_programs/execution_success/brillig_cow_regression/src/main.nr b/noir/test_programs/execution_success/brillig_cow_regression/src/main.nr index 457abde32d5..ba51548d9dd 100644 --- a/noir/test_programs/execution_success/brillig_cow_regression/src/main.nr +++ b/noir/test_programs/execution_success/brillig_cow_regression/src/main.nr @@ -1,12 +1,12 @@ // Tests a performance regression found in aztec-packages with brillig cow optimization -global MAX_NEW_NOTE_HASHES_PER_TX: Field = 64; -global MAX_NEW_NULLIFIERS_PER_TX: Field = 64; -global MAX_NEW_L2_TO_L1_MSGS_PER_TX: Field = 2; -global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX: Field = 16; -global MAX_NEW_CONTRACTS_PER_TX: Field = 1; -global NUM_ENCRYPTED_LOGS_HASHES_PER_TX: Field = 1; -global NUM_UNENCRYPTED_LOGS_HASHES_PER_TX: Field = 1; +global MAX_NEW_NOTE_HASHES_PER_TX: u64 = 64; +global MAX_NEW_NULLIFIERS_PER_TX: u64 = 64; +global MAX_NEW_L2_TO_L1_MSGS_PER_TX: u64 = 2; +global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX: u64 = 16; +global MAX_NEW_CONTRACTS_PER_TX: u64 = 1; +global NUM_ENCRYPTED_LOGS_HASHES_PER_TX: u64 = 1; +global NUM_UNENCRYPTED_LOGS_HASHES_PER_TX: u64 = 1; global NUM_FIELDS_PER_SHA256 = 2; global CALLDATA_HASH_INPUT_SIZE = 169; global CALL_DATA_HASH_LOG_FIELDS = 4; diff --git a/noir/test_programs/execution_success/brillig_fns_as_values/src/main.nr b/noir/test_programs/execution_success/brillig_fns_as_values/src/main.nr index 2f5d14583d5..ea3148915b8 100644 --- a/noir/test_programs/execution_success/brillig_fns_as_values/src/main.nr +++ b/noir/test_programs/execution_success/brillig_fns_as_values/src/main.nr @@ -14,7 +14,7 @@ fn main(x: u32) { assert(increment(x) == x + 1); } -unconstrained fn wrapper(func: fn (u32) -> u32, param: u32) -> u32 { +unconstrained fn wrapper(func: fn(u32) -> u32, param: u32) -> u32 { func(param) } diff --git a/noir/test_programs/execution_success/brillig_oracle/Prover.toml b/noir/test_programs/execution_success/brillig_oracle/Prover.toml index 2b26a4ce471..161f4fb62c0 100644 --- a/noir/test_programs/execution_success/brillig_oracle/Prover.toml +++ b/noir/test_programs/execution_success/brillig_oracle/Prover.toml @@ -1,2 +1,2 @@ -x = "10" +_x = "10" diff --git a/noir/test_programs/execution_success/brillig_oracle/src/main.nr b/noir/test_programs/execution_success/brillig_oracle/src/main.nr index 490b7b605e3..6a9e5806621 100644 --- a/noir/test_programs/execution_success/brillig_oracle/src/main.nr +++ b/noir/test_programs/execution_success/brillig_oracle/src/main.nr @@ -2,7 +2,7 @@ use dep::std::slice; use dep::std::test::OracleMock; // Tests oracle usage in brillig/unconstrained functions -fn main(x: Field) { +fn main(_x: Field) { let size = 20; // TODO: Add a method along the lines of `(0..size).to_array()`. let mut mock_oracle_response = [0; 20]; @@ -17,7 +17,7 @@ fn main(x: Field) { let _ = OracleMock::mock("get_number_sequence").with_params(size).returns((20, mock_oracle_response)); let _ = OracleMock::mock("get_reverse_number_sequence").with_params(size).returns((20, reversed_mock_oracle_response)); - get_number_sequence_wrapper(size); + get_number_sequence_wrapper(size as Field); } // Define oracle functions which we have mocked above diff --git a/noir/test_programs/execution_success/brillig_scalar_mul/src/main.nr b/noir/test_programs/execution_success/brillig_scalar_mul/src/main.nr index ab2f79eb815..c7c3a85a4ff 100644 --- a/noir/test_programs/execution_success/brillig_scalar_mul/src/main.nr +++ b/noir/test_programs/execution_success/brillig_scalar_mul/src/main.nr @@ -20,4 +20,13 @@ unconstrained fn main( let res = std::scalar_mul::fixed_base_embedded_curve(priv_key, 0); assert(res[0] == pub_x); assert(res[1] == pub_y); + + let pub_point= std::scalar_mul::EmbeddedCurvePoint { x: pub_x, y: pub_y }; + let g1_y = 17631683881184975370165255887551781615748388533673675138860; + let g1= std::scalar_mul::EmbeddedCurvePoint { x: 1, y: g1_y }; + + let res = pub_point.double(); + let double = g1.add(g1); + + assert(double.x == res.x); } diff --git a/noir/test_programs/execution_success/brillig_slices/src/main.nr b/noir/test_programs/execution_success/brillig_slices/src/main.nr index 48bc8a76bb8..847c41de25c 100644 --- a/noir/test_programs/execution_success/brillig_slices/src/main.nr +++ b/noir/test_programs/execution_success/brillig_slices/src/main.nr @@ -131,7 +131,7 @@ unconstrained fn merge_slices_mutate_in_loop(x: Field, y: Field) -> [Field] { let mut slice = [0; 2]; if x != y { for i in 0..5 { - slice = slice.push_back(i); + slice = slice.push_back(i as Field); } } else { slice = slice.push_back(x); diff --git a/noir/test_programs/execution_success/conditional_regression_661/src/main.nr b/noir/test_programs/execution_success/conditional_regression_661/src/main.nr index 03102eb775e..26521a88358 100644 --- a/noir/test_programs/execution_success/conditional_regression_661/src/main.nr +++ b/noir/test_programs/execution_success/conditional_regression_661/src/main.nr @@ -16,11 +16,11 @@ fn test5(a: u32) { } } -fn issue_661_foo(array: [u32;4], b: u32) -> [u32;1] { +fn issue_661_foo(array: [u32; 4], b: u32) -> [u32; 1] { [array[0] + b] } -fn issue_661_bar(a: [u32;4]) -> [u32;4] { +fn issue_661_bar(a: [u32; 4]) -> [u32; 4] { let mut b: [u32; 4] = [0; 4]; b[0]=a[0]+1; b diff --git a/noir/test_programs/execution_success/databus/src/main.nr b/noir/test_programs/execution_success/databus/src/main.nr index 61a9637f5fe..1cf95be8a22 100644 --- a/noir/test_programs/execution_success/databus/src/main.nr +++ b/noir/test_programs/execution_success/databus/src/main.nr @@ -1,12 +1,12 @@ use dep::std; -fn main(mut x: u32, y: call_data u32, z: call_data [u32;4]) -> return_data u32 { - let a = z[x]; - a+foo(y) +fn main(mut x: u32, y: call_data u32, z: call_data [u32; 4]) -> return_data u32 { + let a = z[x]; + a + foo(y) } // Use an unconstrained function to force the compiler to avoid inlining unconstrained fn foo(x: u32) -> u32 { - x+1 + x + 1 } diff --git a/noir/test_programs/execution_success/debug_logs/src/main.nr b/noir/test_programs/execution_success/debug_logs/src/main.nr index 49e0041594a..ec24b0cc8e8 100644 --- a/noir/test_programs/execution_success/debug_logs/src/main.nr +++ b/noir/test_programs/execution_success/debug_logs/src/main.nr @@ -1,77 +1,76 @@ -use dep::std; - fn main(x: Field, y: pub Field) { let string = "i: {i}, j: {j}"; - std::println(string); + println(string); // TODO: fmtstr cannot be printed // let fmt_str: fmtstr<14, (Field, Field)> = f"i: {x}, j: {y}"; // let fmt_fmt_str = f"fmtstr: {fmt_str}, i: {x}"; - // std::println(fmt_fmt_str); + // println(fmt_fmt_str); // A `fmtstr` lets you easily perform string interpolation. let fmt_str: fmtstr<14, (Field, Field)> = f"i: {x}, j: {y}"; let fmt_str = string_identity(fmt_str); - std::println(fmt_str); + println(fmt_str); let fmt_str_no_type = f"i: {x}, j: {y}"; - std::println(fmt_str_no_type); + println(fmt_str_no_type); let fmt_str_generic = string_with_generics(fmt_str_no_type); - std::println(fmt_str_generic); + println(fmt_str_generic); let s = myStruct { y: x, x: y }; - std::println(s); + println(s); - std::println(f"randomstring{x}{x}"); + println(f"randomstring{x}{x}"); let fmt_str = string_with_partial_generics(f"i: {x}, s: {s}"); - std::println(fmt_str); + println(fmt_str); - std::println(x); - std::println([x, y]); + println(x); + println([x, y]); let foo = fooStruct { my_struct: s, foo: 15 }; - std::println(f"s: {s}, foo: {foo}"); + println(f"s: {s}, foo: {foo}"); - std::println(f"x: 0, y: 1"); + println(f"x: 0, y: 1"); let s_2 = myStruct { x: 20, y: 30 }; - std::println(f"s1: {s}, s2: {s_2}"); + println(f"s1: {s}, s2: {s_2}"); let bar = fooStruct { my_struct: s_2, foo: 20 }; - std::println(f"foo1: {foo}, foo2: {bar}"); + println(f"foo1: {foo}, foo2: {bar}"); let struct_string = if x != 5 { f"{foo}" } else { f"{bar}" }; - std::println(struct_string); + println(struct_string); let one_tuple = (1, 2, 3); let another_tuple = (4, 5, 6); - std::println(f"one_tuple: {one_tuple}, another_tuple: {another_tuple}"); - std::println(one_tuple); + println(f"one_tuple: {one_tuple}, another_tuple: {another_tuple}"); + println(one_tuple); let tuples_nested = (one_tuple, another_tuple); - std::println(f"tuples_nested: {tuples_nested}"); - std::println(tuples_nested); + println(f"tuples_nested: {tuples_nested}"); + println(tuples_nested); + regression_2903(); regression_2906(); let first_array = [1, 2, 3]; let second_array = [4, 5, 6]; let arrays_nested = [first_array, second_array]; - std::println(f"first_array: {first_array}, second_array: {second_array}"); - std::println(f"arrays_nested: {arrays_nested}"); + println(f"first_array: {first_array}, second_array: {second_array}"); + println(f"arrays_nested: {arrays_nested}"); let free_lambda = |x| x + 1; let sentinel: u32 = 8888; - std::println(f"free_lambda: {free_lambda}, sentinel: {sentinel}"); - std::println(free_lambda); + println(f"free_lambda: {free_lambda}, sentinel: {sentinel}"); + println(free_lambda); let one = 1; let closured_lambda = |x| x + one; - std::println(f"closured_lambda: {closured_lambda}, sentinel: {sentinel}"); - std::println(closured_lambda); + println(f"closured_lambda: {closured_lambda}, sentinel: {sentinel}"); + println(closured_lambda); } fn string_identity(string: fmtstr<14, (Field, Field)>) -> fmtstr<14, (Field, Field)> { @@ -96,19 +95,30 @@ struct fooStruct { foo: Field, } +fn regression_2903() { + let v : [str<1>; 1] = ["1"; 1]; + println(v); // will print [1] + + let a = v[0]; + println(a); // will print `1` + + let bytes = ["aaa", "bbb", "ccc"]; + println(bytes); +} + fn regression_2906() { let array_two_vals = [1, 2]; - dep::std::println(f"array_two_vals: {array_two_vals}"); + println(f"array_two_vals: {array_two_vals}"); let label_two_vals = "12"; - dep::std::println(f"label_two_vals: {label_two_vals}"); + println(f"label_two_vals: {label_two_vals}"); let array_five_vals = [1, 2, 3, 4, 5]; - dep::std::println(f"array_five_vals: {array_five_vals}"); + println(f"array_five_vals: {array_five_vals}"); let label_five_vals = "12345"; - dep::std::println(f"label_five_vals: {label_five_vals}"); + println(f"label_five_vals: {label_five_vals}"); - dep::std::println(f"array_five_vals: {array_five_vals}, label_five_vals: {label_five_vals}"); + println(f"array_five_vals: {array_five_vals}, label_five_vals: {label_five_vals}"); } diff --git a/noir/test_programs/execution_success/distinct_keyword/src/main.nr b/noir/test_programs/execution_success/distinct_keyword/src/main.nr index 0e55a011a48..8e9b5c008ed 100644 --- a/noir/test_programs/execution_success/distinct_keyword/src/main.nr +++ b/noir/test_programs/execution_success/distinct_keyword/src/main.nr @@ -1,4 +1,4 @@ // Example that uses the distinct keyword -fn main(x: pub Field) -> distinct pub [Field;2] { +fn main(x: pub Field) -> distinct pub [Field; 2] { [x + 1, x] } diff --git a/noir/test_programs/execution_success/ecdsa_secp256k1/src/main.nr b/noir/test_programs/execution_success/ecdsa_secp256k1/src/main.nr index 2f410755f74..ac0359e4bb8 100644 --- a/noir/test_programs/execution_success/ecdsa_secp256k1/src/main.nr +++ b/noir/test_programs/execution_success/ecdsa_secp256k1/src/main.nr @@ -1,11 +1,11 @@ use dep::std; fn main( - message: [u8;38], - hashed_message: [u8;32], - pub_key_x: [u8;32], - pub_key_y: [u8;32], - signature: [u8;64] + message: [u8; 38], + hashed_message: [u8; 32], + pub_key_x: [u8; 32], + pub_key_y: [u8; 32], + signature: [u8; 64] ) { // Hash the message, since secp256k1 expects a hashed_message let expected = std::hash::sha256(message); diff --git a/noir/test_programs/execution_success/ecdsa_secp256r1/src/main.nr b/noir/test_programs/execution_success/ecdsa_secp256r1/src/main.nr index d23573d13a6..c64e390d652 100644 --- a/noir/test_programs/execution_success/ecdsa_secp256r1/src/main.nr +++ b/noir/test_programs/execution_success/ecdsa_secp256r1/src/main.nr @@ -1,6 +1,6 @@ use dep::std; -fn main(hashed_message: [u8;32], pub_key_x: [u8;32], pub_key_y: [u8;32], signature: [u8;64]) { +fn main(hashed_message: [u8; 32], pub_key_x: [u8; 32], pub_key_y: [u8; 32], signature: [u8; 64]) { let valid_signature = std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); assert(valid_signature); } diff --git a/noir/test_programs/execution_success/global_consts/src/baz.nr b/noir/test_programs/execution_success/global_consts/src/baz.nr index 4271de81118..384cf9d3569 100644 --- a/noir/test_programs/execution_success/global_consts/src/baz.nr +++ b/noir/test_programs/execution_success/global_consts/src/baz.nr @@ -1,5 +1,5 @@ pub fn from_baz(x: [Field; crate::foo::MAGIC_NUMBER]) { for i in 0..crate::foo::MAGIC_NUMBER { - assert(x[i] == crate::foo::MAGIC_NUMBER); + assert(x[i] == crate::foo::MAGIC_NUMBER as Field); } } diff --git a/noir/test_programs/execution_success/global_consts/src/foo.nr b/noir/test_programs/execution_success/global_consts/src/foo.nr index 7b0ae75b74b..413b9c3a74b 100644 --- a/noir/test_programs/execution_success/global_consts/src/foo.nr +++ b/noir/test_programs/execution_success/global_consts/src/foo.nr @@ -1,11 +1,11 @@ mod bar; -global N: Field = 5; -global MAGIC_NUMBER: Field = 3; +global N: u64 = 5; +global MAGIC_NUMBER: u64 = 3; global TYPE_INFERRED = 42; pub fn from_foo(x: [Field; bar::N]) { for i in 0..bar::N { - assert(x[i] == bar::N); + assert(x[i] == bar::N as Field); } } diff --git a/noir/test_programs/execution_success/global_consts/src/foo/bar.nr b/noir/test_programs/execution_success/global_consts/src/foo/bar.nr index b8d0b85b0f3..5404c9cf1e3 100644 --- a/noir/test_programs/execution_success/global_consts/src/foo/bar.nr +++ b/noir/test_programs/execution_success/global_consts/src/foo/bar.nr @@ -1,5 +1,5 @@ -global N: Field = 5; +global N: u64 = 5; pub fn from_bar(x: Field) -> Field { - x * N + x * N as Field } diff --git a/noir/test_programs/execution_success/global_consts/src/main.nr b/noir/test_programs/execution_success/global_consts/src/main.nr index 25cc0e4dd36..3c8ecc67a0c 100644 --- a/noir/test_programs/execution_success/global_consts/src/main.nr +++ b/noir/test_programs/execution_success/global_consts/src/main.nr @@ -3,7 +3,7 @@ mod baz; global M: Field = 32; global L: Field = 10; // Unused globals currently allowed -global N: Field = 5; +global N: u64 = 5; global T_LEN = 2; // Type inference is allowed on globals // Globals can reference other globals @@ -36,12 +36,12 @@ fn main( let test_struct = Dummy { x: d, y: c }; for i in 0..foo::MAGIC_NUMBER { - assert(c[i] == foo::MAGIC_NUMBER); - assert(test_struct.y[i] == foo::MAGIC_NUMBER); + assert(c[i] == foo::MAGIC_NUMBER as Field); + assert(test_struct.y[i] == foo::MAGIC_NUMBER as Field); assert(test_struct.y[i] != NESTED[1][0].v); } - assert(N != M); + assert(N as Field != M); let expected: u32 = 42; assert(foo::TYPE_INFERRED == expected); @@ -62,12 +62,12 @@ fn main( arrays_neq(a, b); - let t: [Field; T_LEN] = [N, M]; + let t: [Field; T_LEN] = [N as Field, M]; assert(t[1] == 32); assert(15 == my_submodule::my_helper()); - let add_submodules_N = my_submodule::N + foo::bar::N; + let add_submodules_N = my_submodule::N + foo::bar::N as Field; assert(15 == add_submodules_N); let add_from_bar_N = my_submodule::N + foo::bar::from_bar(1); assert(15 == add_from_bar_N); @@ -75,7 +75,7 @@ fn main( let sugared = [0; my_submodule::N + 2]; assert(sugared[my_submodule::N + 1] == 0); - let arr: [Field; my_submodule::N] = [N; 10]; + let arr: [Field; my_submodule::N] = [N as Field; 10]; assert((arr[0] == 5) & (arr[9] == 5)); foo::from_foo(d); diff --git a/noir/test_programs/execution_success/hashmap/Nargo.toml b/noir/test_programs/execution_success/hashmap/Nargo.toml new file mode 100644 index 00000000000..c09debc9833 --- /dev/null +++ b/noir/test_programs/execution_success/hashmap/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "hashmap" +type = "bin" +authors = [""] + +[dependencies] \ No newline at end of file diff --git a/noir/test_programs/execution_success/hashmap/Prover.toml b/noir/test_programs/execution_success/hashmap/Prover.toml new file mode 100644 index 00000000000..84d4c0733e4 --- /dev/null +++ b/noir/test_programs/execution_success/hashmap/Prover.toml @@ -0,0 +1,26 @@ +# Input: 6 key-value entries for hashmap capacity of 8. +# These must be distinct (both key-to-key, and value-to-value) for correct testing. + +[[input]] +key = 2 +value = 17 + +[[input]] +key = 3 +value = 19 + +[[input]] +key = 5 +value = 23 + +[[input]] +key = 7 +value = 29 + +[[input]] +key = 11 +value = 31 + +[[input]] +key = 41 +value = 43 \ No newline at end of file diff --git a/noir/test_programs/execution_success/hashmap/src/main.nr b/noir/test_programs/execution_success/hashmap/src/main.nr new file mode 100644 index 00000000000..597a5c0b7de --- /dev/null +++ b/noir/test_programs/execution_success/hashmap/src/main.nr @@ -0,0 +1,192 @@ +mod utils; + +use dep::std::collections::map::HashMap; +use dep::std::hash::BuildHasherDefault; +use dep::std::hash::pedersen::PedersenHasher; +use dep::std::cmp::Eq; + +use utils::cut; + +type K = Field; +type V = Field; + +// It is more convenient and readable to use structs as input. +struct Entry{ + key: Field, + value: Field +} + +global HASHMAP_CAP = 8; +global HASHMAP_LEN = 6; + +global FIELD_CMP = |a: Field, b: Field| a.lt(b); + +global K_CMP = FIELD_CMP; +global V_CMP = FIELD_CMP; +global KV_CMP = |a: (K, V), b: (K, V)| a.0.lt(b.0); + +global ALLOCATE_HASHMAP = || -> HashMap> + HashMap::default(); + +fn main(input: [Entry; HASHMAP_LEN]) { + test_sequential(input[0].key, input[0].value); + test_multiple_equal_insert(input[1].key, input[1].value); + test_value_override(input[2].key, input[2].value, input[3].value); + test_insert_and_methods(input); + test_hashmaps_equality(input); + test_retain(); + test_iterators(); + test_mut_iterators(); +} + +// Insert, get, remove. +fn test_sequential(key: K, value: V) { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New HashMap should be empty."); + + hashmap.insert(key, value); + assert(hashmap.len() == 1, "HashMap after one insert should have a length of 1 element."); + + let got = hashmap.get(key); + assert(got.is_some(), "Got none value."); + let got = got.unwrap_unchecked(); + assert(value == got, f"Inserted {value} but got {got} for the same key."); + + hashmap.remove(key); + assert(hashmap.is_empty(), "HashMap after one insert and corresponding removal should be empty."); + let got = hashmap.get(key); + assert(got.is_none(), "Value has been removed, but is still available (not none)."); +} + +// Insert same pair several times. +fn test_multiple_equal_insert(key: K, value: V) { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New HashMap should be empty."); + + for _ in 0..HASHMAP_LEN { + hashmap.insert(key, value); + } + + let len = hashmap.len(); + assert(len == 1, f"HashMap length must be 1, got {len}."); + + let got = hashmap.get(key); + assert(got.is_some(), "Got none value."); + let got = got.unwrap_unchecked(); + assert(value == got, f"Inserted {value} but got {got} for the same key."); +} + +// Override value for existing pair. +fn test_value_override(key: K, value: V, new_value: V) { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New hashmap should be empty."); + + hashmap.insert(key, value); + hashmap.insert(key, new_value); + assert(hashmap.len() == 1, "HashMap length is invalid."); + + let got = hashmap.get(key); + assert(got.is_some(), "Got none value."); + let got = got.unwrap_unchecked(); + assert(got == new_value, f"Expected {new_value}, but got {got}."); +} + +// Insert several distinct pairs and test auxiliary methods. +fn test_insert_and_methods(input: [Entry; HASHMAP_LEN]) { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New HashMap should be empty."); + + for entry in input { + hashmap.insert(entry.key, entry.value); + } + + assert(hashmap.len() == HASHMAP_LEN, "hashmap.len() does not match input lenght."); + + for entry in input { + assert(hashmap.contains_key(entry.key), f"Not found inserted key {entry.key}."); + } + + hashmap.clear(); + assert(hashmap.is_empty(), "HashMap after clear() should be empty."); +} + +// Insert several pairs and test retaining. +fn test_retain() { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New HashMap should be empty."); + + let (key, value) = (5, 11); + hashmap.insert(key, value); + let (key, value) = (2, 13); + hashmap.insert(key, value); + let (key, value) = (11, 5); + hashmap.insert(key, value); + + let predicate = |key: K, value: V| -> bool {key * value == 55}; + hashmap.retain(predicate); + + assert(hashmap.len() == 2, "HashMap should have retained 2 elements."); + assert(hashmap.get(2).is_none(), "Pair should have been removed, since it does not match predicate."); +} + +// Equality trait check. +fn test_hashmaps_equality(input: [Entry; HASHMAP_LEN]) { + let mut hashmap_1 = ALLOCATE_HASHMAP(); + let mut hashmap_2 = ALLOCATE_HASHMAP(); + + for entry in input { + hashmap_1.insert(entry.key, entry.value); + hashmap_2.insert(entry.key, entry.value); + } + + assert(hashmap_1 == hashmap_2, "HashMaps should be equal."); + + hashmap_2.remove(input[0].key); + + assert(hashmap_1 != hashmap_2, "HashMaps should not be equal."); +} + +// Test entries, keys, values. +fn test_iterators() { + let mut hashmap = ALLOCATE_HASHMAP(); + + hashmap.insert(2, 3); + hashmap.insert(5, 7); + hashmap.insert(11, 13); + + let keys: [K; 3] = cut(hashmap.keys()).map(|k: Option| k.unwrap_unchecked()).sort_via(K_CMP); + let values: [V; 3] = cut(hashmap.values()).map(|v: Option| v.unwrap_unchecked()).sort_via(V_CMP); + let entries: [(K, V); 3] = cut(hashmap.entries()).map(|e: Option<(K, V)>| e.unwrap_unchecked()).sort_via(KV_CMP); + + assert(keys == [2, 5, 11], "Got incorrect iteration of keys."); + assert(values == [3, 7, 13], "Got incorrect iteration of values."); + assert(entries == [(2, 3), (5, 7), (11, 13)], "Got incorrect iteration of entries."); +} + +// Test mutable iteration over keys, values and entries. +fn test_mut_iterators() { + let mut hashmap = ALLOCATE_HASHMAP(); + + hashmap.insert(2, 3); + hashmap.insert(5, 7); + hashmap.insert(11, 13); + + let f = |k: K| -> K{ k * 3}; + hashmap.iter_keys_mut(f); + + let f = |v: V| -> V{ v * 5}; + hashmap.iter_values_mut(f); + + let keys: [K; 3] = cut(hashmap.keys()).map(|k: Option| k.unwrap_unchecked()).sort_via(K_CMP); + let values: [V; 3] = cut(hashmap.values()).map(|v: Option| v.unwrap_unchecked()).sort_via(V_CMP); + + assert(keys == [6, 15, 33], f"Got incorrect iteration of keys: {keys}"); + assert(values == [15, 35, 65], "Got incorrect iteration of values."); + + let f = |k: K, v: V| -> (K, V){(k * 2, v * 2)}; + hashmap.iter_mut(f); + + let entries: [(K, V); 3] = cut(hashmap.entries()).map(|e: Option<(K, V)>| e.unwrap_unchecked()).sort_via(KV_CMP); + + assert(entries == [(12, 30), (30, 70), (66, 130)], "Got incorrect iteration of entries."); +} diff --git a/noir/test_programs/execution_success/hashmap/src/utils.nr b/noir/test_programs/execution_success/hashmap/src/utils.nr new file mode 100644 index 00000000000..45c9ca9bbf7 --- /dev/null +++ b/noir/test_programs/execution_success/hashmap/src/utils.nr @@ -0,0 +1,10 @@ +// Compile-time: cuts the M first elements from the [T; N] array. +pub(crate) fn cut(input: [T; N]) -> [T; M] { + assert(M as u64 < N as u64, "M should be less than N."); + + let mut new = [dep::std::unsafe::zeroed(); M]; + for i in 0..M { + new[i] = input[i]; + } + new +} diff --git a/noir/test_programs/execution_success/main_bool_arg/src/main.nr b/noir/test_programs/execution_success/main_bool_arg/src/main.nr index 111a23ec0c2..2c50d7dee16 100644 --- a/noir/test_programs/execution_success/main_bool_arg/src/main.nr +++ b/noir/test_programs/execution_success/main_bool_arg/src/main.nr @@ -1,4 +1,4 @@ -fn main(x: bool, y: [bool;2]) { +fn main(x: bool, y: [bool; 2]) { if x { assert(1 != 2); } diff --git a/noir/test_programs/execution_success/operator_overloading/src/main.nr b/noir/test_programs/execution_success/operator_overloading/src/main.nr index 3867531abca..d61e1da170e 100644 --- a/noir/test_programs/execution_success/operator_overloading/src/main.nr +++ b/noir/test_programs/execution_success/operator_overloading/src/main.nr @@ -1,4 +1,4 @@ -use dep::std::ops::{ Add, Sub, Mul, Div, Rem, BitAnd, BitOr, BitXor, Shl, Shr }; +use dep::std::ops::{Add, Sub, Mul, Div, Rem, BitAnd, BitOr, BitXor, Shl, Shr}; use dep::std::cmp::Ordering; // x = 3, y = 9 @@ -126,10 +126,6 @@ impl Ord for Wrapper { } } - - - - struct Pair { x: Wrapper, y: Wrapper, diff --git a/noir/test_programs/execution_success/poseidon_bn254_hash/Prover.toml b/noir/test_programs/execution_success/poseidon_bn254_hash/Prover.toml index 8eecf9a3db2..fa6fd05b0a3 100644 --- a/noir/test_programs/execution_success/poseidon_bn254_hash/Prover.toml +++ b/noir/test_programs/execution_success/poseidon_bn254_hash/Prover.toml @@ -2,3 +2,8 @@ x1 = [1,2] y1 = "0x115cc0f5e7d690413df64c6b9662e9cf2a3617f2743245519e19607a4417189a" x2 = [1,2,3,4] y2 = "0x299c867db6c1fdd79dcefa40e4510b9837e60ebb1ce0663dbaa525df65250465" +x3 = ["4218458030232820015255714794613421442512497197372123294583664908262453897094", + "4218458030232820015255714794613421442512497197372123294583664908262453897094", + "4218458030232820015255714794613421442512497197372123294583664908262453897094", + "4218458030232820015255714794613421442512497197372123294583664908262453897094"] + y3 = "0x2f43a0f83b51a6f5fc839dea0ecec74947637802a579fa9841930a25a0bcec11" diff --git a/noir/test_programs/execution_success/poseidon_bn254_hash/src/main.nr b/noir/test_programs/execution_success/poseidon_bn254_hash/src/main.nr index e742a440d1c..939b99595c7 100644 --- a/noir/test_programs/execution_success/poseidon_bn254_hash/src/main.nr +++ b/noir/test_programs/execution_success/poseidon_bn254_hash/src/main.nr @@ -1,11 +1,15 @@ // docs:start:poseidon use dep::std::hash::poseidon; +use dep::std::hash::poseidon2; -fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field) { +fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field, x3: [Field; 4], y3: Field) { let hash1 = poseidon::bn254::hash_2(x1); assert(hash1 == y1); let hash2 = poseidon::bn254::hash_4(x2); assert(hash2 == y2); + + let hash3 = poseidon2::Poseidon2::hash(x3, x3.len() as u32); + assert(hash3 == y3); } // docs:end:poseidon diff --git a/noir/test_programs/execution_success/regression/src/main.nr b/noir/test_programs/execution_success/regression/src/main.nr index c70e2e75fa8..8dc42cb5f10 100644 --- a/noir/test_programs/execution_success/regression/src/main.nr +++ b/noir/test_programs/execution_success/regression/src/main.nr @@ -1,4 +1,4 @@ -global NIBBLE_LENGTH: Field = 16; +global NIBBLE_LENGTH: u64 = 16; struct U4 { inner: u8, @@ -21,8 +21,8 @@ impl Eq for U4 { } fn compact_decode(input: [u8; N], length: Field) -> ([U4; NIBBLE_LENGTH], Field) { - assert(2 * input.len() as u64 <= NIBBLE_LENGTH as u64); - assert(length as u64 <= input.len() as u64); + assert(2 * input.len() <= NIBBLE_LENGTH as u64); + assert(length as u64 <= input.len()); let mut nibble = [U4::zero(); NIBBLE_LENGTH]; diff --git a/noir/test_programs/execution_success/regression_3394/src/main.nr b/noir/test_programs/execution_success/regression_3394/src/main.nr index cc45487b98b..94b6c818ff2 100644 --- a/noir/test_programs/execution_success/regression_3394/src/main.nr +++ b/noir/test_programs/execution_success/regression_3394/src/main.nr @@ -3,4 +3,4 @@ use dep::std; fn main() { let x : i8 = -128; std::println(x); -} \ No newline at end of file +} diff --git a/noir/test_programs/execution_success/regression_3607/src/main.nr b/noir/test_programs/execution_success/regression_3607/src/main.nr index c09211c2810..9c7ef243f60 100644 --- a/noir/test_programs/execution_success/regression_3607/src/main.nr +++ b/noir/test_programs/execution_success/regression_3607/src/main.nr @@ -5,4 +5,4 @@ fn main(mut x: u32) { x = (x+1) / x; } assert(x != 0); -} \ No newline at end of file +} diff --git a/noir/test_programs/execution_success/regression_3889/src/main.nr b/noir/test_programs/execution_success/regression_3889/src/main.nr index 10b8ecabee3..402a69a10da 100644 --- a/noir/test_programs/execution_success/regression_3889/src/main.nr +++ b/noir/test_programs/execution_success/regression_3889/src/main.nr @@ -17,7 +17,6 @@ mod Baz { use crate::Bar::NewType; } - fn main(works: Baz::Works, fails: Baz::BarStruct, also_fails: Bar::NewType) -> pub Field { works.a + fails.a + also_fails.a } diff --git a/noir/test_programs/execution_success/side_effects_constrain_array/src/main.nr b/noir/test_programs/execution_success/side_effects_constrain_array/src/main.nr index fb3c346a460..c4a62603bc3 100644 --- a/noir/test_programs/execution_success/side_effects_constrain_array/src/main.nr +++ b/noir/test_programs/execution_success/side_effects_constrain_array/src/main.nr @@ -7,11 +7,11 @@ fn main(y: pub u32) { // The assert inside the if should be hit if y < 10 { - assert(bar.inner == [100, 101, 102]); + assert(bar.inner == [100, 101, 102]); } // The assert inside the if should not be hit if y > 10 { assert(bar.inner == [0, 1, 2]); } -} \ No newline at end of file +} diff --git a/noir/test_programs/execution_success/slice_dynamic_index/src/main.nr b/noir/test_programs/execution_success/slice_dynamic_index/src/main.nr index 374d2ba4c26..41fc9a645c1 100644 --- a/noir/test_programs/execution_success/slice_dynamic_index/src/main.nr +++ b/noir/test_programs/execution_success/slice_dynamic_index/src/main.nr @@ -6,7 +6,7 @@ fn main(x: Field) { fn regression_dynamic_slice_index(x: Field, y: Field) { let mut slice = []; for i in 0..5 { - slice = slice.push_back(i); + slice = slice.push_back(i as Field); } assert(slice.len() == 5); @@ -124,12 +124,12 @@ fn dynamic_slice_merge_if(mut slice: [Field], x: Field) { assert(first_elem == 12); assert(rest_of_slice.len() == 6); - slice = rest_of_slice.insert(x - 2, 20); + slice = rest_of_slice.insert(x as u64 - 2, 20); assert(slice[2] == 20); assert(slice[6] == 30); assert(slice.len() == 7); - let (removed_slice, removed_elem) = slice.remove(x - 1); + let (removed_slice, removed_elem) = slice.remove(x as u64 - 1); // The deconstructed tuple assigns to the slice but is not seen outside of the if statement // without a direct assignment slice = removed_slice; diff --git a/noir/test_programs/execution_success/slices/src/main.nr b/noir/test_programs/execution_success/slices/src/main.nr index c377d2e5b2f..eca42a660c4 100644 --- a/noir/test_programs/execution_success/slices/src/main.nr +++ b/noir/test_programs/execution_success/slices/src/main.nr @@ -167,7 +167,7 @@ fn merge_slices_mutate_in_loop(x: Field, y: Field) -> [Field] { let mut slice = [0; 2]; if x != y { for i in 0..5 { - slice = slice.push_back(i); + slice = slice.push_back(i as Field); } } else { slice = slice.push_back(x); diff --git a/noir/test_programs/execution_success/struct/src/main.nr b/noir/test_programs/execution_success/struct/src/main.nr index 45c5e347e5a..de08f42f79d 100644 --- a/noir/test_programs/execution_success/struct/src/main.nr +++ b/noir/test_programs/execution_success/struct/src/main.nr @@ -9,8 +9,8 @@ struct Pair { } impl Foo { - fn default(x: Field,y: Field) -> Self { - Self { bar: 0, array: [x,y] } + fn default(x: Field, y: Field) -> Self { + Self { bar: 0, array: [x, y] } } } diff --git a/noir/test_programs/format.sh b/noir/test_programs/format.sh new file mode 100755 index 00000000000..3c679b8689e --- /dev/null +++ b/noir/test_programs/format.sh @@ -0,0 +1,47 @@ +#!/usr/bin/env bash +set -e + +# These tests are incompatible with gas reporting +excluded_dirs=("workspace" "workspace_default_member" "workspace_reexport_bug") + +# These tests cause failures in CI with a stack overflow for some reason. +ci_excluded_dirs=("eddsa") + +current_dir=$(pwd) + +# We generate a Noir workspace which contains all of the test cases +# This allows us to generate a gates report using `nargo info` for all of them at once. + + +function collect_dirs { + test_dirs=$(ls $current_dir/$1) + + for dir in $test_dirs; do + if [[ " ${excluded_dirs[@]} " =~ " ${dir} " ]]; then + continue + fi + + if [[ ${CI-false} = "true" ]] && [[ " ${ci_excluded_dirs[@]} " =~ " ${dir} " ]]; then + continue + fi + + echo " \"$1/$dir\"," >> Nargo.toml +done +} + +echo "[workspace]" > Nargo.toml +echo "members = [" >> Nargo.toml + +collect_dirs compile_success_empty +collect_dirs execution_success + +echo "]" >> Nargo.toml + +if [ "$1" == "check" ]; then + nargo fmt --check +else + nargo fmt +fi + + +rm Nargo.toml diff --git a/noir/test_programs/gates_report.sh b/noir/test_programs/gates_report.sh index 4192c581376..3b0b4d9e148 100755 --- a/noir/test_programs/gates_report.sh +++ b/noir/test_programs/gates_report.sh @@ -2,7 +2,7 @@ set -e # These tests are incompatible with gas reporting -excluded_dirs=("workspace" "workspace_default_member") +excluded_dirs=("workspace" "workspace_default_member" "double_verify_nested_proof") # These tests cause failures in CI with a stack overflow for some reason. ci_excluded_dirs=("eddsa") diff --git a/noir/test_programs/noir_test_success/bounded_vec/src/main.nr b/noir/test_programs/noir_test_success/bounded_vec/src/main.nr index d51d2cc3685..0e2c89c9064 100644 --- a/noir/test_programs/noir_test_success/bounded_vec/src/main.nr +++ b/noir/test_programs/noir_test_success/bounded_vec/src/main.nr @@ -1,6 +1,6 @@ #[test] fn test_vec_push_pop() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); assert(vec.len == 0); vec.push(2); assert(vec.len == 1); @@ -17,7 +17,7 @@ fn test_vec_push_pop() { #[test] fn test_vec_extend_from_array() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2, 4]); assert(vec.len == 2); assert(vec.get(0) == 2); @@ -26,13 +26,13 @@ fn test_vec_extend_from_array() { #[test(should_fail_with="extend_from_array out of bounds")] fn test_vec_extend_from_array_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2, 4, 6]); } #[test(should_fail_with="extend_from_array out of bounds")] fn test_vec_extend_from_array_twice_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2]); assert(vec.len == 1); vec.extend_from_array([4, 6]); @@ -40,36 +40,36 @@ fn test_vec_extend_from_array_twice_out_of_bound() { #[test(should_fail)] fn test_vec_get_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2, 4]); let _x = vec.get(2); } #[test(should_fail)] fn test_vec_get_not_declared() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2]); let _x = vec.get(1); } #[test(should_fail)] fn test_vec_get_uninitialized() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); let _x = vec.get(0); } #[test(should_fail_with="push out of bounds")] fn test_vec_push_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.push(1); vec.push(2); } #[test(should_fail_with="extend_from_bounded_vec out of bounds")] fn test_vec_extend_from_bounded_vec_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); - let mut another_vec: BoundedVec = BoundedVec::new(0); + let mut another_vec: BoundedVec = BoundedVec::new(); another_vec.extend_from_array([1, 2, 3]); vec.extend_from_bounded_vec(another_vec); @@ -77,10 +77,10 @@ fn test_vec_extend_from_bounded_vec_out_of_bound() { #[test(should_fail_with="extend_from_bounded_vec out of bounds")] fn test_vec_extend_from_bounded_vec_twice_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([1, 2]); - let mut another_vec: BoundedVec = BoundedVec::new(0); + let mut another_vec: BoundedVec = BoundedVec::new(); another_vec.push(3); vec.extend_from_bounded_vec(another_vec); @@ -88,7 +88,7 @@ fn test_vec_extend_from_bounded_vec_twice_out_of_bound() { #[test] fn test_vec_any() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2, 4, 6]); assert(vec.any(|v| v == 2) == true); assert(vec.any(|v| v == 4) == true); @@ -98,8 +98,8 @@ fn test_vec_any() { #[test] fn test_vec_any_not_default() { - let default_value = 1; - let mut vec: BoundedVec = BoundedVec::new(default_value); + let default_value = 0; + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2, 4]); assert(vec.any(|v| v == default_value) == false); -} \ No newline at end of file +} diff --git a/noir/tooling/debugger/ignored-tests.txt b/noir/tooling/debugger/ignored-tests.txt index 7ac440c335b..c472e828739 100644 --- a/noir/tooling/debugger/ignored-tests.txt +++ b/noir/tooling/debugger/ignored-tests.txt @@ -7,6 +7,7 @@ brillig_nested_arrays brillig_references brillig_to_bytes_integration debug_logs +double_verify_nested_proof double_verify_proof modulus nested_array_dynamic diff --git a/noir/tooling/debugger/src/context.rs b/noir/tooling/debugger/src/context.rs index 5ab2c63c365..515edf0bb06 100644 --- a/noir/tooling/debugger/src/context.rs +++ b/noir/tooling/debugger/src/context.rs @@ -138,7 +138,7 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { }) .collect() }) - .unwrap_or(vec![]) + .unwrap_or_default() } /// Returns the current call stack with expanded source locations. In diff --git a/noir/tooling/debugger/src/dap.rs b/noir/tooling/debugger/src/dap.rs index dd9a30d50da..7e67a26b257 100644 --- a/noir/tooling/debugger/src/dap.rs +++ b/noir/tooling/debugger/src/dap.rs @@ -115,7 +115,8 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { let source_location = source_locations[0]; let span = source_location.span; let file_id = source_location.file; - let Ok(line_index) = &simple_files[&file_id].line_index((), span.start() as usize) else { + let Ok(line_index) = &simple_files[&file_id].line_index((), span.start() as usize) + else { return; }; let line_number = line_index + 1; @@ -143,7 +144,7 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { pub fn run_loop(&mut self) -> Result<(), ServerError> { self.running = self.context.get_current_opcode_location().is_some(); - if self.running && matches!(self.context.get_current_source_location(), None) { + if self.running && self.context.get_current_source_location().is_none() { // TODO: remove this? This is to ensure that the tool has a proper // source location to show when first starting the debugger, but // maybe the default behavior should be to start executing until the @@ -297,7 +298,7 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { } } // the actual opcodes - while count > 0 && !matches!(opcode_location, None) { + while count > 0 && opcode_location.is_some() { instructions.push(DisassembledInstruction { address: format!("{}", opcode_location.unwrap()), instruction: self.context.render_opcode_at_location(&opcode_location), @@ -446,29 +447,31 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { // compute breakpoints to set and return let mut breakpoints_to_set: Vec<(OpcodeLocation, i64)> = vec![]; - let breakpoints: Vec = args.breakpoints.iter().map(|breakpoint| { - let Ok(location) = OpcodeLocation::from_str(breakpoint.instruction_reference.as_str()) else { - return Breakpoint { - verified: false, - message: Some(String::from("Missing instruction reference")), - ..Breakpoint::default() - }; - }; - if !self.context.is_valid_opcode_location(&location) { - return Breakpoint { - verified: false, - message: Some(String::from("Invalid opcode location")), - ..Breakpoint::default() + let breakpoints: Vec = args + .breakpoints + .iter() + .map(|breakpoint| { + let Ok(location) = + OpcodeLocation::from_str(breakpoint.instruction_reference.as_str()) + else { + return Breakpoint { + verified: false, + message: Some(String::from("Missing instruction reference")), + ..Breakpoint::default() + }; }; - } - let id = self.get_next_breakpoint_id(); - breakpoints_to_set.push((location, id)); - Breakpoint { - id: Some(id), - verified: true, - ..Breakpoint::default() - } - }).collect(); + if !self.context.is_valid_opcode_location(&location) { + return Breakpoint { + verified: false, + message: Some(String::from("Invalid opcode location")), + ..Breakpoint::default() + }; + } + let id = self.get_next_breakpoint_id(); + breakpoints_to_set.push((location, id)); + Breakpoint { id: Some(id), verified: true, ..Breakpoint::default() } + }) + .collect(); // actually set the computed breakpoints self.instruction_breakpoints = breakpoints_to_set; @@ -510,7 +513,12 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { }; let found_index = match line_to_opcodes.binary_search_by(|x| x.0.cmp(&line)) { Ok(index) => line_to_opcodes[index].1, - Err(index) => line_to_opcodes[index].1, + Err(index) => { + if index >= line_to_opcodes.len() { + return None; + } + line_to_opcodes[index].1 + } }; Some(found_index) } @@ -534,7 +542,9 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { let Some(location) = self.find_opcode_for_source_location(&file_id, line) else { return Breakpoint { verified: false, - message: Some(String::from("Source location cannot be matched to opcode location")), + message: Some(String::from( + "Source location cannot be matched to opcode location", + )), ..Breakpoint::default() }; }; diff --git a/noir/tooling/debugger/src/errors.rs b/noir/tooling/debugger/src/errors.rs new file mode 100644 index 00000000000..4578987d715 --- /dev/null +++ b/noir/tooling/debugger/src/errors.rs @@ -0,0 +1,19 @@ +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum DapError { + #[error("{0}")] + PreFlightGenericError(String), + + #[error(transparent)] + LoadError(#[from] LoadError), + + #[error(transparent)] + ServerError(#[from] dap::errors::ServerError), +} + +#[derive(Debug, Error)] +pub enum LoadError { + #[error("{0}")] + Generic(String), +} diff --git a/noir/tooling/debugger/src/foreign_calls.rs b/noir/tooling/debugger/src/foreign_calls.rs index 01676adfef3..68c4d3947b0 100644 --- a/noir/tooling/debugger/src/foreign_calls.rs +++ b/noir/tooling/debugger/src/foreign_calls.rs @@ -100,7 +100,7 @@ impl ForeignCallExecutor for DefaultDebugForeignCallExecutor { Ok(ForeignCallResult::default().into()) } Some(DebugForeignCall::MemberAssign(arity)) => { - if let Some(ForeignCallParam::Single(var_id_value)) = foreign_call.inputs.get(0) { + if let Some(ForeignCallParam::Single(var_id_value)) = foreign_call.inputs.first() { let arity = arity as usize; let var_id = debug_var_id(var_id_value); let n = foreign_call.inputs.len(); @@ -116,7 +116,11 @@ impl ForeignCallExecutor for DefaultDebugForeignCallExecutor { .collect(); let values: Vec = (0..n - 1 - arity) .flat_map(|i| { - foreign_call.inputs.get(1 + i).map(|fci| fci.values()).unwrap_or(vec![]) + foreign_call + .inputs + .get(1 + i) + .map(|fci| fci.values()) + .unwrap_or_default() }) .collect(); self.debug_vars.assign_field(var_id, indexes, &values); diff --git a/noir/tooling/debugger/src/lib.rs b/noir/tooling/debugger/src/lib.rs index 35014f9a8c8..4a25e3417a0 100644 --- a/noir/tooling/debugger/src/lib.rs +++ b/noir/tooling/debugger/src/lib.rs @@ -1,5 +1,6 @@ mod context; mod dap; +pub mod errors; mod foreign_calls; mod repl; mod source_code_printer; diff --git a/noir/tooling/lsp/src/lib.rs b/noir/tooling/lsp/src/lib.rs index a0e024c70fd..be9b83e02f6 100644 --- a/noir/tooling/lsp/src/lib.rs +++ b/noir/tooling/lsp/src/lib.rs @@ -222,11 +222,15 @@ pub(crate) fn resolve_workspace_for_source_path(file_path: &Path) -> Result ParsedFiles { cache_misses .into_iter() .map(|(id, _, _, parse_results)| (id, parse_results)) - .chain(cache_hits.into_iter()) + .chain(cache_hits) .collect() } else { parse_all(file_manager) diff --git a/noir/tooling/lsp/src/solver.rs b/noir/tooling/lsp/src/solver.rs index f001cebaa4d..d0acbf1aec5 100644 --- a/noir/tooling/lsp/src/solver.rs +++ b/noir/tooling/lsp/src/solver.rs @@ -49,4 +49,12 @@ impl BlackBoxFunctionSolver for WrapperSolver { ) -> Result<(acvm::FieldElement, acvm::FieldElement), acvm::BlackBoxResolutionError> { self.0.ec_add(input1_x, input1_y, input2_x, input2_y) } + + fn poseidon2_permutation( + &self, + inputs: &[acvm::FieldElement], + len: u32, + ) -> Result, acvm::BlackBoxResolutionError> { + self.0.poseidon2_permutation(inputs, len) + } } diff --git a/noir/tooling/nargo/build.rs b/noir/tooling/nargo/build.rs index 4fa7f58892a..ab2b7579132 100644 --- a/noir/tooling/nargo/build.rs +++ b/noir/tooling/nargo/build.rs @@ -2,8 +2,8 @@ use rustc_version::{version, Version}; fn check_rustc_version() { assert!( - version().unwrap() >= Version::parse("1.71.1").unwrap(), - "The minimal supported rustc version is 1.71.1." + version().unwrap() >= Version::parse("1.73.0").unwrap(), + "The minimal supported rustc version is 1.73.0." ); } diff --git a/noir/tooling/nargo/src/artifacts/debug_vars.rs b/noir/tooling/nargo/src/artifacts/debug_vars.rs index b5559ca53c8..20f2637f7d6 100644 --- a/noir/tooling/nargo/src/artifacts/debug_vars.rs +++ b/noir/tooling/nargo/src/artifacts/debug_vars.rs @@ -18,23 +18,25 @@ impl DebugVars { self.active .iter() .filter_map(|var_id| { - self.variables - .get(var_id) - .and_then(|debug_var| { - let Some(value) = self.values.get(var_id) else { return None; }; - let Some(ptype) = self.types.get(&debug_var.debug_type_id) else { return None; }; - Some((debug_var.name.as_str(), value, ptype)) - }) + self.variables.get(var_id).and_then(|debug_var| { + let Some(value) = self.values.get(var_id) else { + return None; + }; + let Some(ptype) = self.types.get(&debug_var.debug_type_id) else { + return None; + }; + Some((debug_var.name.as_str(), value, ptype)) + }) }) .collect() } pub fn insert_variables(&mut self, vars: &DebugVariables) { - self.variables.extend(vars.clone().into_iter()); + self.variables.extend(vars.clone()); } pub fn insert_types(&mut self, types: &DebugTypes) { - self.types.extend(types.clone().into_iter()); + self.types.extend(types.clone()); } pub fn assign_var(&mut self, var_id: DebugVarId, values: &[Value]) { diff --git a/noir/tooling/nargo/src/lib.rs b/noir/tooling/nargo/src/lib.rs index 0fdff8b202f..3deced041f8 100644 --- a/noir/tooling/nargo/src/lib.rs +++ b/noir/tooling/nargo/src/lib.rs @@ -16,7 +16,7 @@ pub mod workspace; use std::collections::BTreeMap; -use fm::FileManager; +use fm::{FileManager, FILE_EXTENSION}; use noirc_driver::{add_dep, prepare_crate, prepare_dependency}; use noirc_frontend::{ graph::{CrateId, CrateName}, @@ -65,12 +65,11 @@ fn insert_all_files_for_package_into_file_manager( let entry_path_parent = package .entry_path .parent() - .unwrap_or_else(|| panic!("The entry path is expected to be a single file within a directory and so should have a parent {:?}", package.entry_path)) - .clone(); + .unwrap_or_else(|| panic!("The entry path is expected to be a single file within a directory and so should have a parent {:?}", package.entry_path)); // Get all files in the package and add them to the file manager - let paths = - get_all_paths_in_dir(entry_path_parent).expect("could not get all paths in the package"); + let paths = get_all_noir_source_in_dir(entry_path_parent) + .expect("could not get all paths in the package"); for path in paths { let source = std::fs::read_to_string(path.as_path()) .unwrap_or_else(|_| panic!("could not read file {:?} into string", path)); @@ -125,6 +124,15 @@ pub fn prepare_package<'file_manager, 'parsed_files>( (context, crate_id) } +// Get all Noir source files in the directory and subdirectories. +// +// Panics: If the path is not a path to a directory. +fn get_all_noir_source_in_dir(dir: &std::path::Path) -> std::io::Result> { + get_all_paths_in_dir(dir, |path| { + path.extension().map_or(false, |extension| extension == FILE_EXTENSION) + }) +} + // Get all paths in the directory and subdirectories. // // Panics: If the path is not a path to a directory. @@ -132,7 +140,10 @@ pub fn prepare_package<'file_manager, 'parsed_files>( // TODO: Along with prepare_package, this function is an abstraction leak // TODO: given that this crate should not know about the file manager. // TODO: We can clean this up in a future refactor -fn get_all_paths_in_dir(dir: &std::path::Path) -> std::io::Result> { +fn get_all_paths_in_dir( + dir: &std::path::Path, + predicate: fn(&std::path::Path) -> bool, +) -> std::io::Result> { assert!(dir.is_dir(), "directory {dir:?} is not a path to a directory"); let mut paths = Vec::new(); @@ -142,9 +153,9 @@ fn get_all_paths_in_dir(dir: &std::path::Path) -> std::io::Result( /// that a constraint was never satisfiable. /// An example of this is the program `assert(false)` /// In that case, we check if the test function should fail, and if so, we return `TestStatus::Pass`. -fn test_status_program_compile_fail(err: RuntimeError, test_function: TestFunction) -> TestStatus { +fn test_status_program_compile_fail(err: CompileError, test_function: TestFunction) -> TestStatus { // The test has failed compilation, but it should never fail. Report error. if !test_function.should_fail() { return TestStatus::CompileError(err.into()); } // The test has failed compilation, extract the assertion message if present and check if it's expected. - let assert_message = if let RuntimeError::FailedConstraint { assert_message, .. } = &err { + let assert_message = if let CompileError::RuntimeError(RuntimeError::FailedConstraint { + assert_message, + .. + }) = &err + { assert_message.clone() } else { None diff --git a/noir/tooling/nargo_cli/build.rs b/noir/tooling/nargo_cli/build.rs index 57aa487f66a..1ca12b75dfb 100644 --- a/noir/tooling/nargo_cli/build.rs +++ b/noir/tooling/nargo_cli/build.rs @@ -6,8 +6,8 @@ use std::{env, fs}; fn check_rustc_version() { assert!( - version().unwrap() >= Version::parse("1.71.1").unwrap(), - "The minimal supported rustc version is 1.71.1." + version().unwrap() >= Version::parse("1.73.0").unwrap(), + "The minimal supported rustc version is 1.73.0." ); } diff --git a/noir/tooling/nargo_cli/src/cli/check_cmd.rs b/noir/tooling/nargo_cli/src/cli/check_cmd.rs index a8b9dbdeeb2..4da06d2536a 100644 --- a/noir/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/check_cmd.rs @@ -142,6 +142,24 @@ fn create_input_toml_template( toml::to_string(&map).unwrap() } +/// Run the lexing, parsing, name resolution, and type checking passes and report any warnings +/// and errors found. +pub(crate) fn check_crate_and_report_errors( + context: &mut Context, + crate_id: CrateId, + deny_warnings: bool, + disable_macros: bool, + silence_warnings: bool, +) -> Result<(), CompileError> { + let result = check_crate(context, crate_id, deny_warnings, disable_macros); + super::compile_cmd::report_errors( + result, + &context.file_manager, + deny_warnings, + silence_warnings, + ) +} + #[cfg(test)] mod tests { use noirc_abi::{AbiParameter, AbiType, AbiVisibility, Sign}; @@ -189,21 +207,3 @@ d2 = ["", "", ""] assert_eq!(toml_str, expected_toml_str); } } - -/// Run the lexing, parsing, name resolution, and type checking passes and report any warnings -/// and errors found. -pub(crate) fn check_crate_and_report_errors( - context: &mut Context, - crate_id: CrateId, - deny_warnings: bool, - disable_macros: bool, - silence_warnings: bool, -) -> Result<(), CompileError> { - let result = check_crate(context, crate_id, deny_warnings, disable_macros); - super::compile_cmd::report_errors( - result, - &context.file_manager, - deny_warnings, - silence_warnings, - ) -} diff --git a/noir/tooling/nargo_cli/src/cli/dap_cmd.rs b/noir/tooling/nargo_cli/src/cli/dap_cmd.rs index 7c7e6056901..ba4f91609ef 100644 --- a/noir/tooling/nargo_cli/src/cli/dap_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/dap_cmd.rs @@ -3,38 +3,52 @@ use acvm::acir::native_types::WitnessMap; use backend_interface::Backend; use clap::Args; use nargo::constants::PROVER_INPUT_FILE; -use nargo::ops::compile_program_with_debug_instrumenter; use nargo::workspace::Workspace; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; use std::io::{BufReader, BufWriter, Read, Write}; use std::path::Path; -use dap::errors::ServerError; use dap::requests::Command; use dap::responses::ResponseBody; use dap::server::Server; use dap::types::Capabilities; use serde_json::Value; -use super::compile_cmd::report_errors; -use super::debug_cmd::instrument_package_files; +use super::debug_cmd::compile_bin_package_for_debugging; use super::fs::inputs::read_inputs_from_file; use crate::errors::CliError; use super::NargoConfig; +use noir_debugger::errors::{DapError, LoadError}; + #[derive(Debug, Clone, Args)] pub(crate) struct DapCommand { /// Override the expression width requested by the backend. #[arg(long, value_parser = parse_expression_width)] expression_width: Option, + + #[clap(long)] + preflight_check: bool, + + #[clap(long)] + preflight_project_folder: Option, + + #[clap(long)] + preflight_package: Option, + + #[clap(long)] + preflight_prover_name: Option, + + #[clap(long)] + preflight_generate_acir: bool, + + #[clap(long)] + preflight_skip_instrumentation: bool, } fn parse_expression_width(input: &str) -> Result { @@ -50,8 +64,6 @@ fn parse_expression_width(input: &str) -> Result) -> Option { let Ok(toml_path) = get_package_manifest(Path::new(project_folder)) else { eprintln!("ERROR: Failed to get package manifest"); @@ -72,55 +84,51 @@ fn find_workspace(project_folder: &str, package: Option<&str>) -> Option) -> String { + match package { + Some(pkg) => format!( + r#"Noir Debugger could not load program from {}, package {}"#, + project_folder, pkg + ), + None => format!(r#"Noir Debugger could not load program from {}"#, project_folder), + } +} + fn load_and_compile_project( project_folder: &str, package: Option<&str>, prover_name: &str, expression_width: ExpressionWidth, + acir_mode: bool, + skip_instrumentation: bool, ) -> Result<(CompiledProgram, WitnessMap), LoadError> { - let workspace = - find_workspace(project_folder, package).ok_or(LoadError("Cannot open workspace"))?; - + let workspace = find_workspace(project_folder, package) + .ok_or(LoadError::Generic(workspace_not_found_error_msg(project_folder, package)))?; let package = workspace .into_iter() .find(|p| p.is_binary()) - .ok_or(LoadError("No matching binary packages found in workspace"))?; - - let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let mut parsed_files = parse_all(&workspace_file_manager); + .ok_or(LoadError::Generic("No matching binary packages found in workspace".into()))?; - let compile_options = - CompileOptions { instrument_debug: true, force_brillig: true, ..CompileOptions::default() }; - - let debug_state = instrument_package_files(&mut parsed_files, &workspace_file_manager, package); - - let compilation_result = compile_program_with_debug_instrumenter( - &workspace_file_manager, - &parsed_files, + let compiled_program = compile_bin_package_for_debugging( + &workspace, package, - &compile_options, - None, - debug_state, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - compile_options.deny_warnings, - compile_options.silence_warnings, + acir_mode, + skip_instrumentation, + CompileOptions::default(), ) - .map_err(|_| LoadError("Failed to compile project"))?; + .map_err(|_| LoadError::Generic("Failed to compile project".into()))?; let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &compiled_program.abi) - .map_err(|_| LoadError("Failed to read program inputs"))?; + .map_err(|_| { + LoadError::Generic(format!("Failed to read program inputs from {}", prover_name)) + })?; let initial_witness = compiled_program .abi .encode(&inputs_map, None) - .map_err(|_| LoadError("Failed to encode inputs"))?; + .map_err(|_| LoadError::Generic("Failed to encode inputs".into()))?; Ok((compiled_program, initial_witness)) } @@ -128,7 +136,7 @@ fn load_and_compile_project( fn loop_uninitialized_dap( mut server: Server, expression_width: ExpressionWidth, -) -> Result<(), ServerError> { +) -> Result<(), DapError> { loop { let req = match server.poll_request()? { Some(req) => req, @@ -151,7 +159,8 @@ fn loop_uninitialized_dap( server.respond(req.error("Missing launch arguments"))?; continue; }; - let Some(Value::String(ref project_folder)) = additional_data.get("projectFolder") else { + let Some(Value::String(ref project_folder)) = additional_data.get("projectFolder") + else { server.respond(req.error("Missing project folder argument"))?; continue; }; @@ -163,6 +172,13 @@ fn loop_uninitialized_dap( .and_then(|v| v.as_str()) .unwrap_or(PROVER_INPUT_FILE); + let generate_acir = + additional_data.get("generateAcir").and_then(|v| v.as_bool()).unwrap_or(false); + let skip_instrumentation = additional_data + .get("skipInstrumentation") + .and_then(|v| v.as_bool()) + .unwrap_or(generate_acir); + eprintln!("Project folder: {}", project_folder); eprintln!("Package: {}", package.unwrap_or("(default)")); eprintln!("Prover name: {}", prover_name); @@ -172,6 +188,8 @@ fn loop_uninitialized_dap( package, prover_name, expression_width, + generate_acir, + skip_instrumentation, ) { Ok((compiled_program, initial_witness)) => { server.respond(req.ack()?)?; @@ -186,8 +204,8 @@ fn loop_uninitialized_dap( )?; break; } - Err(LoadError(message)) => { - server.respond(req.error(message))?; + Err(LoadError::Generic(message)) => { + server.respond(req.error(message.as_str()))?; } } } @@ -206,17 +224,58 @@ fn loop_uninitialized_dap( Ok(()) } +fn run_preflight_check( + expression_width: ExpressionWidth, + args: DapCommand, +) -> Result<(), DapError> { + let project_folder = if let Some(project_folder) = args.preflight_project_folder { + project_folder + } else { + return Err(DapError::PreFlightGenericError("Noir Debugger could not initialize because the IDE (for example, VS Code) did not specify a project folder to debug.".into())); + }; + + let package = args.preflight_package.as_deref(); + let prover_name = args.preflight_prover_name.as_deref().unwrap_or(PROVER_INPUT_FILE); + + let _ = load_and_compile_project( + project_folder.as_str(), + package, + prover_name, + expression_width, + args.preflight_generate_acir, + args.preflight_skip_instrumentation, + )?; + + Ok(()) +} + pub(crate) fn run( backend: &Backend, args: DapCommand, _config: NargoConfig, ) -> Result<(), CliError> { + let expression_width = + args.expression_width.unwrap_or_else(|| backend.get_backend_info_or_default()); + + // When the --preflight-check flag is present, we run Noir's DAP server in "pre-flight mode", which test runs + // the DAP initialization code without actually starting the DAP server. + // + // This lets the client IDE present any initialization issues (compiler version mismatches, missing prover files, etc) + // in its own interface. + // + // This was necessary due to the VS Code project being reluctant to let extension authors capture + // stderr output generated by a DAP server wrapped in DebugAdapterExecutable. + // + // Exposing this preflight mode lets us gracefully handle errors that happen *before* + // the DAP loop is established, which otherwise are considered "out of band" by the maintainers of the DAP spec. + // More details here: https://github.com/microsoft/vscode/issues/108138 + if args.preflight_check { + return run_preflight_check(expression_width, args).map_err(CliError::DapError); + } + let output = BufWriter::new(std::io::stdout()); let input = BufReader::new(std::io::stdin()); let server = Server::new(input, output); - let expression_width = - args.expression_width.unwrap_or_else(|| backend.get_backend_info_or_default()); - loop_uninitialized_dap(server, expression_width).map_err(CliError::DapError) } diff --git a/noir/tooling/nargo_cli/src/cli/debug_cmd.rs b/noir/tooling/nargo_cli/src/cli/debug_cmd.rs index b3ee9137530..130a07b5c90 100644 --- a/noir/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -7,8 +7,10 @@ use clap::Args; use fm::FileManager; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; -use nargo::ops::compile_program_with_debug_instrumenter; +use nargo::errors::CompileError; +use nargo::ops::{compile_program, compile_program_with_debug_instrumenter}; use nargo::package::Package; +use nargo::workspace::Workspace; use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; @@ -42,6 +44,14 @@ pub(crate) struct DebugCommand { #[clap(flatten)] compile_options: CompileOptions, + + /// Force ACIR output (disabling instrumentation) + #[clap(long)] + acir_mode: bool, + + /// Disable vars debug instrumentation (enabled by default) + #[clap(long)] + skip_instrumentation: Option, } pub(crate) fn run( @@ -49,9 +59,8 @@ pub(crate) fn run( args: DebugCommand, config: NargoConfig, ) -> Result<(), CliError> { - // Override clap default for compiler option flag - let mut args = args.clone(); - args.compile_options.instrument_debug = true; + let acir_mode = args.acir_mode; + let skip_instrumentation = args.skip_instrumentation.unwrap_or(acir_mode); let toml_path = get_package_manifest(&config.program_dir)?; let selection = args.package.map_or(PackageSelection::DefaultOrAll, PackageSelection::Selected); @@ -66,10 +75,6 @@ pub(crate) fn run( .expression_width .unwrap_or_else(|| backend.get_backend_info_or_default()); - let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let mut parsed_files = parse_all(&workspace_file_manager); - let Some(package) = workspace.into_iter().find(|p| p.is_binary()) else { println!( "No matching binary packages found in workspace. Only binary packages can be debugged." @@ -77,23 +82,12 @@ pub(crate) fn run( return Ok(()); }; - let debug_instrumenter = - instrument_package_files(&mut parsed_files, &workspace_file_manager, package); - - let compilation_result = compile_program_with_debug_instrumenter( - &workspace_file_manager, - &parsed_files, + let compiled_program = compile_bin_package_for_debugging( + &workspace, package, - &args.compile_options, - None, - debug_instrumenter, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, + acir_mode, + skip_instrumentation, + args.compile_options.clone(), )?; let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); @@ -101,9 +95,50 @@ pub(crate) fn run( run_async(package, compiled_program, &args.prover_name, &args.witness_name, target_dir) } +pub(crate) fn compile_bin_package_for_debugging( + workspace: &Workspace, + package: &Package, + acir_mode: bool, + skip_instrumentation: bool, + compile_options: CompileOptions, +) -> Result { + let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); + insert_all_files_for_workspace_into_file_manager(workspace, &mut workspace_file_manager); + let mut parsed_files = parse_all(&workspace_file_manager); + + let compile_options = CompileOptions { + instrument_debug: !skip_instrumentation, + force_brillig: !acir_mode, + ..compile_options + }; + + let compilation_result = if !skip_instrumentation { + let debug_state = + instrument_package_files(&mut parsed_files, &workspace_file_manager, package); + + compile_program_with_debug_instrumenter( + &workspace_file_manager, + &parsed_files, + package, + &compile_options, + None, + debug_state, + ) + } else { + compile_program(&workspace_file_manager, &parsed_files, package, &compile_options, None) + }; + + report_errors( + compilation_result, + &workspace_file_manager, + compile_options.deny_warnings, + compile_options.silence_warnings, + ) +} + /// Add debugging instrumentation to all parsed files belonging to the package /// being compiled -pub(crate) fn instrument_package_files( +fn instrument_package_files( parsed_files: &mut ParsedFiles, file_manager: &FileManager, package: &Package, @@ -112,8 +147,7 @@ pub(crate) fn instrument_package_files( let entry_path_parent = package .entry_path .parent() - .unwrap_or_else(|| panic!("The entry path is expected to be a single file within a directory and so should have a parent {:?}", package.entry_path)) - .clone(); + .unwrap_or_else(|| panic!("The entry path is expected to be a single file within a directory and so should have a parent {:?}", package.entry_path)); let mut debug_instrumenter = DebugInstrumenter::default(); diff --git a/noir/tooling/nargo_cli/src/errors.rs b/noir/tooling/nargo_cli/src/errors.rs index 4636772231b..c2996f53420 100644 --- a/noir/tooling/nargo_cli/src/errors.rs +++ b/noir/tooling/nargo_cli/src/errors.rs @@ -2,6 +2,7 @@ use acvm::acir::native_types::WitnessMapError; use hex::FromHexError; use nargo::{errors::CompileError, NargoError}; use nargo_toml::ManifestError; +use noir_debugger::errors::DapError; use noirc_abi::errors::{AbiError, InputParserError}; use std::path::PathBuf; use thiserror::Error; @@ -54,7 +55,7 @@ pub(crate) enum CliError { LspError(#[from] async_lsp::Error), #[error(transparent)] - DapError(#[from] dap::errors::ServerError), + DapError(#[from] DapError), /// Error from Nargo #[error(transparent)] diff --git a/noir/tooling/nargo_fmt/src/rewrite/infix.rs b/noir/tooling/nargo_fmt/src/rewrite/infix.rs index 15f5fe23aae..5d2b387496a 100644 --- a/noir/tooling/nargo_fmt/src/rewrite/infix.rs +++ b/noir/tooling/nargo_fmt/src/rewrite/infix.rs @@ -96,7 +96,9 @@ pub(crate) fn flatten( result.push(rewrite); - let Some(pop) = stack.pop() else { break; }; + let Some(pop) = stack.pop() else { + break; + }; match &pop.kind { ExpressionKind::Infix(infix) => { diff --git a/noir/tooling/nargo_fmt/src/utils.rs b/noir/tooling/nargo_fmt/src/utils.rs index 5874ebdebbc..94969d45e81 100644 --- a/noir/tooling/nargo_fmt/src/utils.rs +++ b/noir/tooling/nargo_fmt/src/utils.rs @@ -1,3 +1,5 @@ +use std::borrow::Cow; + use crate::items::HasItem; use crate::rewrite; use crate::visitor::{FmtVisitor, Shape}; @@ -143,7 +145,7 @@ impl HasItem for Param { fn format(self, visitor: &FmtVisitor, shape: Shape) -> String { let pattern = visitor.slice(self.pattern.span()); let visibility = match self.visibility { - Visibility::Public => "pub ", + Visibility::Public => "pub", Visibility::Private => "", Visibility::DataBus => "call_data", }; @@ -152,6 +154,7 @@ impl HasItem for Param { pattern.to_string() } else { let ty = rewrite::typ(visitor, shape, self.typ); + let visibility = append_space_if_nonempty(visibility.into()); format!("{pattern}: {visibility}{ty}") } } @@ -183,6 +186,15 @@ pub(crate) fn last_line_contains_single_line_comment(s: &str) -> bool { s.lines().last().map_or(false, |line| line.contains("//")) } +pub(crate) fn append_space_if_nonempty(mut string: Cow) -> Cow { + if !string.is_empty() { + let inner = string.to_mut(); + inner.push(' '); + } + + string +} + pub(crate) fn last_line_used_width(s: &str, offset: usize) -> usize { if s.contains('\n') { last_line_width(s) diff --git a/noir/tooling/nargo_fmt/src/visitor/expr.rs b/noir/tooling/nargo_fmt/src/visitor/expr.rs index 9b36911b1af..2cd0e881e84 100644 --- a/noir/tooling/nargo_fmt/src/visitor/expr.rs +++ b/noir/tooling/nargo_fmt/src/visitor/expr.rs @@ -202,7 +202,6 @@ pub(crate) fn format_seq( reduce: bool, ) -> String { let mut nested_indent = shape; - let shape = shape; nested_indent.indent.block_indent(visitor.config); diff --git a/noir/tooling/nargo_fmt/src/visitor/item.rs b/noir/tooling/nargo_fmt/src/visitor/item.rs index 1825a6e05b0..28aad3c551f 100644 --- a/noir/tooling/nargo_fmt/src/visitor/item.rs +++ b/noir/tooling/nargo_fmt/src/visitor/item.rs @@ -7,7 +7,10 @@ use noirc_frontend::{ use crate::{ rewrite::{self, UseTree}, - utils::{last_line_contains_single_line_comment, last_line_used_width, FindToken}, + utils::{ + append_space_if_nonempty, last_line_contains_single_line_comment, last_line_used_width, + FindToken, + }, visitor::expr::{format_seq, NewlineMode}, }; @@ -119,9 +122,12 @@ impl super::FmtVisitor<'_> { result.push_str("distinct "); } - if let Visibility::Public = func.def.return_visibility { - result.push_str("pub "); - } + let visibility = match func.def.return_visibility { + Visibility::Public => "pub", + Visibility::DataBus => "return_data", + Visibility::Private => "", + }; + result.push_str(&append_space_if_nonempty(visibility.into())); let typ = rewrite::typ(self, self.shape(), func.return_type()); result.push_str(&typ); diff --git a/noir/tooling/nargo_fmt/tests/expected/databus.nr b/noir/tooling/nargo_fmt/tests/expected/databus.nr new file mode 100644 index 00000000000..60934b60b2f --- /dev/null +++ b/noir/tooling/nargo_fmt/tests/expected/databus.nr @@ -0,0 +1,2 @@ +fn main(x: pub u8, y: call_data u8) -> return_data u32 {} + diff --git a/noir/tooling/nargo_fmt/tests/input/databus.nr b/noir/tooling/nargo_fmt/tests/input/databus.nr new file mode 100644 index 00000000000..60934b60b2f --- /dev/null +++ b/noir/tooling/nargo_fmt/tests/input/databus.nr @@ -0,0 +1,2 @@ +fn main(x: pub u8, y: call_data u8) -> return_data u32 {} + diff --git a/noir/tooling/nargo_toml/src/errors.rs b/noir/tooling/nargo_toml/src/errors.rs index 440895056c3..77fe77bcdbb 100644 --- a/noir/tooling/nargo_toml/src/errors.rs +++ b/noir/tooling/nargo_toml/src/errors.rs @@ -28,7 +28,7 @@ pub enum ManifestError { #[error("Nargo.toml is badly formed, could not parse.\n\n {0}")] MalformedFile(#[from] toml::de::Error), - #[error("Unexpected workspace definition found in {0}")] + #[error("Unexpected workspace definition found in {0}. If you're attempting to load this as a dependency, you may need to add a `directory` field to your `Nargo.toml` to show which package within the workspace to use")] UnexpectedWorkspace(PathBuf), #[error("Cannot find file {entry} which was specified as the `entry` field in {toml}")] diff --git a/noir/yarn.lock b/noir/yarn.lock index 84cf3e593c6..ace7959279f 100644 --- a/noir/yarn.lock +++ b/noir/yarn.lock @@ -4050,6 +4050,16 @@ __metadata: languageName: node linkType: hard +"@jridgewell/trace-mapping@npm:^0.3.20": + version: 0.3.22 + resolution: "@jridgewell/trace-mapping@npm:0.3.22" + dependencies: + "@jridgewell/resolve-uri": ^3.1.0 + "@jridgewell/sourcemap-codec": ^1.4.14 + checksum: ac7dd2cfe0b479aa1b81776d40d789243131cc792dc8b6b6a028c70fcd6171958ae1a71bf67b618ffe3c0c3feead9870c095ee46a5e30319410d92976b28f498 + languageName: node + linkType: hard + "@leichtgewicht/ip-codec@npm:^2.0.1": version: 2.0.4 resolution: "@leichtgewicht/ip-codec@npm:2.0.4" @@ -4466,10 +4476,10 @@ __metadata: assert: ^2.1.0 browserify-fs: ^1.0.0 chai: ^4.3.10 - copy-webpack-plugin: ^11.0.0 + copy-webpack-plugin: ^12.0.2 eslint: ^8.56.0 eslint-plugin-prettier: ^5.0.0 - html-webpack-plugin: ^5.5.4 + html-webpack-plugin: ^5.6.0 memfs: ^4.6.0 mocha: ^10.2.0 mocha-each: ^2.0.1 @@ -4484,8 +4494,9 @@ __metadata: typescript: ~5.2.2 unzipit: ^1.4.3 url: ^0.11.3 - webpack: ^5.49.0 - webpack-cli: ^4.7.2 + webpack: ^5.90.1 + webpack-cli: ^5.1.4 + webpack-dev-server: ^5.0.0 languageName: unknown linkType: soft @@ -5267,6 +5278,13 @@ __metadata: languageName: node linkType: hard +"@sindresorhus/merge-streams@npm:^2.1.0": + version: 2.1.0 + resolution: "@sindresorhus/merge-streams@npm:2.1.0" + checksum: 8aa91a3fca68d4ba78f81cad80f2dc280fa82b6c49c9fa5fe37438b6b9082cf993adb2309163f924bef9d7173b2fae6bb40fc4070a344cbab8bcc19eb1ee0b7c + languageName: node + linkType: hard + "@sinonjs/commons@npm:^2.0.0": version: 2.0.0 resolution: "@sinonjs/commons@npm:2.0.0" @@ -5621,7 +5639,7 @@ __metadata: languageName: node linkType: hard -"@types/bonjour@npm:^3.5.9": +"@types/bonjour@npm:^3.5.13, @types/bonjour@npm:^3.5.9": version: 3.5.13 resolution: "@types/bonjour@npm:3.5.13" dependencies: @@ -5663,7 +5681,7 @@ __metadata: languageName: node linkType: hard -"@types/connect-history-api-fallback@npm:^1.3.5": +"@types/connect-history-api-fallback@npm:^1.3.5, @types/connect-history-api-fallback@npm:^1.5.4": version: 1.5.4 resolution: "@types/connect-history-api-fallback@npm:1.5.4" dependencies: @@ -5753,7 +5771,7 @@ __metadata: languageName: node linkType: hard -"@types/estree@npm:*, @types/estree@npm:1.0.5, @types/estree@npm:^1.0.0": +"@types/estree@npm:*, @types/estree@npm:1.0.5, @types/estree@npm:^1.0.0, @types/estree@npm:^1.0.5": version: 1.0.5 resolution: "@types/estree@npm:1.0.5" checksum: dd8b5bed28e6213b7acd0fb665a84e693554d850b0df423ac8076cc3ad5823a6bc26b0251d080bdc545af83179ede51dd3f6fa78cad2c46ed1f29624ddf3e41a @@ -5779,7 +5797,7 @@ __metadata: languageName: node linkType: hard -"@types/express@npm:*, @types/express@npm:^4.17.13": +"@types/express@npm:*, @types/express@npm:^4.17.13, @types/express@npm:^4.17.21": version: 4.17.21 resolution: "@types/express@npm:4.17.21" dependencies: @@ -6233,6 +6251,13 @@ __metadata: languageName: node linkType: hard +"@types/retry@npm:0.12.2": + version: 0.12.2 + resolution: "@types/retry@npm:0.12.2" + checksum: e5675035717b39ce4f42f339657cae9637cf0c0051cf54314a6a2c44d38d91f6544be9ddc0280587789b6afd056be5d99dbe3e9f4df68c286c36321579b1bf4a + languageName: node + linkType: hard + "@types/sax@npm:^1.2.1": version: 1.2.7 resolution: "@types/sax@npm:1.2.7" @@ -6275,7 +6300,7 @@ __metadata: languageName: node linkType: hard -"@types/serve-index@npm:^1.9.1": +"@types/serve-index@npm:^1.9.1, @types/serve-index@npm:^1.9.4": version: 1.9.4 resolution: "@types/serve-index@npm:1.9.4" dependencies: @@ -6284,7 +6309,7 @@ __metadata: languageName: node linkType: hard -"@types/serve-static@npm:*, @types/serve-static@npm:^1.13.10": +"@types/serve-static@npm:*, @types/serve-static@npm:^1.13.10, @types/serve-static@npm:^1.15.5": version: 1.15.5 resolution: "@types/serve-static@npm:1.15.5" dependencies: @@ -6311,7 +6336,7 @@ __metadata: languageName: node linkType: hard -"@types/sockjs@npm:^0.3.33": +"@types/sockjs@npm:^0.3.33, @types/sockjs@npm:^0.3.36": version: 0.3.36 resolution: "@types/sockjs@npm:0.3.36" dependencies: @@ -6343,7 +6368,7 @@ __metadata: languageName: node linkType: hard -"@types/ws@npm:^8.5.5": +"@types/ws@npm:^8.5.10, @types/ws@npm:^8.5.5": version: 8.5.10 resolution: "@types/ws@npm:8.5.10" dependencies: @@ -7181,36 +7206,36 @@ __metadata: languageName: node linkType: hard -"@webpack-cli/configtest@npm:^1.2.0": - version: 1.2.0 - resolution: "@webpack-cli/configtest@npm:1.2.0" +"@webpack-cli/configtest@npm:^2.1.1": + version: 2.1.1 + resolution: "@webpack-cli/configtest@npm:2.1.1" peerDependencies: - webpack: 4.x.x || 5.x.x - webpack-cli: 4.x.x - checksum: a2726cd9ec601d2b57e5fc15e0ebf5200a8892065e735911269ac2038e62be4bfc176ea1f88c2c46ff09b4d05d4c10ae045e87b3679372483d47da625a327e28 + webpack: 5.x.x + webpack-cli: 5.x.x + checksum: 9f9f9145c2d05471fc83d426db1df85cf49f329836b0c4b9f46b6948bed4b013464c00622b136d2a0a26993ce2306976682592245b08ee717500b1db45009a72 languageName: node linkType: hard -"@webpack-cli/info@npm:^1.5.0": - version: 1.5.0 - resolution: "@webpack-cli/info@npm:1.5.0" - dependencies: - envinfo: ^7.7.3 +"@webpack-cli/info@npm:^2.0.2": + version: 2.0.2 + resolution: "@webpack-cli/info@npm:2.0.2" peerDependencies: - webpack-cli: 4.x.x - checksum: 7f56fe037cd7d1fd5c7428588519fbf04a0cad33925ee4202ffbafd00f8ec1f2f67d991245e687d50e0f3e23f7b7814273d56cb9f7da4b05eed47c8d815c6296 + webpack: 5.x.x + webpack-cli: 5.x.x + checksum: 8f9a178afca5c82e113aed1efa552d64ee5ae4fdff63fe747c096a981ec74f18a5d07bd6e89bbe6715c3e57d96eea024a410e58977169489fe1df044c10dd94e languageName: node linkType: hard -"@webpack-cli/serve@npm:^1.7.0": - version: 1.7.0 - resolution: "@webpack-cli/serve@npm:1.7.0" +"@webpack-cli/serve@npm:^2.0.5": + version: 2.0.5 + resolution: "@webpack-cli/serve@npm:2.0.5" peerDependencies: - webpack-cli: 4.x.x + webpack: 5.x.x + webpack-cli: 5.x.x peerDependenciesMeta: webpack-dev-server: optional: true - checksum: d475e8effa23eb7ff9a48b14d4de425989fd82f906ce71c210921cc3852327c22873be00c35e181a25a6bd03d424ae2b83e7f3b3f410ac7ee31b128ab4ac7713 + checksum: 75f0e54681796d567a71ac3e2781d2901a8d8cf1cdfc82f261034dddac59a8343e8c3bc5e32b4bb9d6766759ba49fb29a5cd86ef1701d79c506fe886bb63ac75 languageName: node linkType: hard @@ -8050,6 +8075,16 @@ __metadata: languageName: node linkType: hard +"bonjour-service@npm:^1.2.1": + version: 1.2.1 + resolution: "bonjour-service@npm:1.2.1" + dependencies: + fast-deep-equal: ^3.1.3 + multicast-dns: ^7.2.5 + checksum: b65b3e6e3a07e97f2da5806afb76f3946d5a6426b72e849a0236dc3c9d3612fb8c5359ebade4be7eb63f74a37670c53a53be2ff17f4f709811fda77f600eb25b + languageName: node + linkType: hard + "boolbase@npm:^1.0.0": version: 1.0.0 resolution: "boolbase@npm:1.0.0" @@ -8300,6 +8335,15 @@ __metadata: languageName: node linkType: hard +"bundle-name@npm:^4.1.0": + version: 4.1.0 + resolution: "bundle-name@npm:4.1.0" + dependencies: + run-applescript: ^7.0.0 + checksum: 1d966c8d2dbf4d9d394e53b724ac756c2414c45c01340b37743621f59cc565a435024b394ddcb62b9b335d1c9a31f4640eb648c3fec7f97ee74dc0694c9beb6c + languageName: node + linkType: hard + "bytes@npm:3.0.0": version: 3.0.0 resolution: "bytes@npm:3.0.0" @@ -8669,6 +8713,25 @@ __metadata: languageName: node linkType: hard +"chokidar@npm:^3.6.0": + version: 3.6.0 + resolution: "chokidar@npm:3.6.0" + dependencies: + anymatch: ~3.1.2 + braces: ~3.0.2 + fsevents: ~2.3.2 + glob-parent: ~5.1.2 + is-binary-path: ~2.1.0 + is-glob: ~4.0.1 + normalize-path: ~3.0.0 + readdirp: ~3.6.0 + dependenciesMeta: + fsevents: + optional: true + checksum: d2f29f499705dcd4f6f3bbed79a9ce2388cf530460122eed3b9c48efeab7a4e28739c6551fd15bec9245c6b9eeca7a32baa64694d64d9b6faeb74ddb8c4a413d + languageName: node + linkType: hard + "chownr@npm:^1.1.1": version: 1.1.4 resolution: "chownr@npm:1.1.4" @@ -9101,7 +9164,7 @@ __metadata: languageName: node linkType: hard -"commander@npm:^7.0.0, commander@npm:^7.2.0": +"commander@npm:^7.2.0": version: 7.2.0 resolution: "commander@npm:7.2.0" checksum: 53501cbeee61d5157546c0bef0fedb6cdfc763a882136284bed9a07225f09a14b82d2a84e7637edfd1a679fb35ed9502fd58ef1d091e6287f60d790147f68ddc @@ -9327,6 +9390,22 @@ __metadata: languageName: node linkType: hard +"copy-webpack-plugin@npm:^12.0.2": + version: 12.0.2 + resolution: "copy-webpack-plugin@npm:12.0.2" + dependencies: + fast-glob: ^3.3.2 + glob-parent: ^6.0.1 + globby: ^14.0.0 + normalize-path: ^3.0.0 + schema-utils: ^4.2.0 + serialize-javascript: ^6.0.2 + peerDependencies: + webpack: ^5.1.0 + checksum: 98127735336c6db5924688486d3a1854a41835963d0c0b81695b2e3d58c6675164be7d23dee7090b84a56d3c9923175d3d0863ac1942bcc3317d2efc1962b927 + languageName: node + linkType: hard + "core-js-compat@npm:^3.31.0, core-js-compat@npm:^3.33.1": version: 3.34.0 resolution: "core-js-compat@npm:3.34.0" @@ -9949,6 +10028,13 @@ __metadata: languageName: node linkType: hard +"default-browser-id@npm:^5.0.0": + version: 5.0.0 + resolution: "default-browser-id@npm:5.0.0" + checksum: 185bfaecec2c75fa423544af722a3469b20704c8d1942794a86e4364fe7d9e8e9f63241a5b769d61c8151993bc65833a5b959026fa1ccea343b3db0a33aa6deb + languageName: node + linkType: hard + "default-browser@npm:^4.0.0": version: 4.0.0 resolution: "default-browser@npm:4.0.0" @@ -9961,6 +10047,16 @@ __metadata: languageName: node linkType: hard +"default-browser@npm:^5.2.1": + version: 5.2.1 + resolution: "default-browser@npm:5.2.1" + dependencies: + bundle-name: ^4.1.0 + default-browser-id: ^5.0.0 + checksum: afab7eff7b7f5f7a94d9114d1ec67273d3fbc539edf8c0f80019879d53aa71e867303c6f6d7cffeb10a6f3cfb59d4f963dba3f9c96830b4540cc7339a1bf9840 + languageName: node + linkType: hard + "default-gateway@npm:^6.0.3": version: 6.0.3 resolution: "default-gateway@npm:6.0.3" @@ -12202,6 +12298,20 @@ __metadata: languageName: node linkType: hard +"globby@npm:^14.0.0": + version: 14.0.1 + resolution: "globby@npm:14.0.1" + dependencies: + "@sindresorhus/merge-streams": ^2.1.0 + fast-glob: ^3.3.2 + ignore: ^5.2.4 + path-type: ^5.0.0 + slash: ^5.1.0 + unicorn-magic: ^0.1.0 + checksum: 33568444289afb1135ad62d52d5e8412900cec620e3b6ece533afa46d004066f14b97052b643833d7cf4ee03e7fac571430130cde44c333df91a45d313105170 + languageName: node + linkType: hard + "gopd@npm:^1.0.1": version: 1.0.1 resolution: "gopd@npm:1.0.1" @@ -12791,7 +12901,7 @@ __metadata: languageName: node linkType: hard -"html-entities@npm:^2.3.2": +"html-entities@npm:^2.3.2, html-entities@npm:^2.4.0": version: 2.4.0 resolution: "html-entities@npm:2.4.0" checksum: 25bea32642ce9ebd0eedc4d24381883ecb0335ccb8ac26379a0958b9b16652fdbaa725d70207ce54a51db24103436a698a8e454397d3ba8ad81460224751f1dc @@ -12860,7 +12970,7 @@ __metadata: languageName: node linkType: hard -"html-webpack-plugin@npm:^5.5.0, html-webpack-plugin@npm:^5.5.3, html-webpack-plugin@npm:^5.5.4": +"html-webpack-plugin@npm:^5.5.0, html-webpack-plugin@npm:^5.5.3": version: 5.5.4 resolution: "html-webpack-plugin@npm:5.5.4" dependencies: @@ -12875,6 +12985,27 @@ __metadata: languageName: node linkType: hard +"html-webpack-plugin@npm:^5.6.0": + version: 5.6.0 + resolution: "html-webpack-plugin@npm:5.6.0" + dependencies: + "@types/html-minifier-terser": ^6.0.0 + html-minifier-terser: ^6.0.2 + lodash: ^4.17.21 + pretty-error: ^4.0.0 + tapable: ^2.0.0 + peerDependencies: + "@rspack/core": 0.x || 1.x + webpack: ^5.20.0 + peerDependenciesMeta: + "@rspack/core": + optional: true + webpack: + optional: true + checksum: 32a6e41da538e798fd0be476637d7611a5e8a98a3508f031996e9eb27804dcdc282cb01f847cf5d066f21b49cfb8e21627fcf977ffd0c9bea81cf80e5a65070d + languageName: node + linkType: hard + "htmlparser2@npm:^6.1.0": version: 6.1.0 resolution: "htmlparser2@npm:6.1.0" @@ -13305,10 +13436,10 @@ __metadata: languageName: node linkType: hard -"interpret@npm:^2.2.0": - version: 2.2.0 - resolution: "interpret@npm:2.2.0" - checksum: f51efef7cb8d02da16408ffa3504cd6053014c5aeb7bb8c223727e053e4235bf565e45d67028b0c8740d917c603807aa3c27d7bd2f21bf20b6417e2bb3e5fd6e +"interpret@npm:^3.1.1": + version: 3.1.1 + resolution: "interpret@npm:3.1.1" + checksum: 35cebcf48c7351130437596d9ab8c8fe131ce4038da4561e6d665f25640e0034702a031cf7e3a5cea60ac7ac548bf17465e0571ede126f3d3a6933152171ac82 languageName: node linkType: hard @@ -13351,7 +13482,7 @@ __metadata: languageName: node linkType: hard -"ipaddr.js@npm:^2.0.1": +"ipaddr.js@npm:^2.0.1, ipaddr.js@npm:^2.1.0": version: 2.1.0 resolution: "ipaddr.js@npm:2.1.0" checksum: 807a054f2bd720c4d97ee479d6c9e865c233bea21f139fb8dabd5a35c4226d2621c42e07b4ad94ff3f82add926a607d8d9d37c625ad0319f0e08f9f2bd1968e2 @@ -13609,6 +13740,13 @@ __metadata: languageName: node linkType: hard +"is-network-error@npm:^1.0.0": + version: 1.0.1 + resolution: "is-network-error@npm:1.0.1" + checksum: 165d61500c4186c62db5a3a693d6bfa14ca40fe9b471ef4cd4f27b20ef6760880faf5386dc01ca9867531631782941fedaa94521d09959edf71f046e393c7b91 + languageName: node + linkType: hard + "is-npm@npm:^5.0.0": version: 5.0.0 resolution: "is-npm@npm:5.0.0" @@ -13792,6 +13930,15 @@ __metadata: languageName: node linkType: hard +"is-wsl@npm:^3.1.0": + version: 3.1.0 + resolution: "is-wsl@npm:3.1.0" + dependencies: + is-inside-container: ^1.0.0 + checksum: f9734c81f2f9cf9877c5db8356bfe1ff61680f1f4c1011e91278a9c0564b395ae796addb4bf33956871041476ec82c3e5260ed57b22ac91794d4ae70a1d2f0a9 + languageName: node + linkType: hard + "is-yarn-global@npm:^0.3.0": version: 0.3.0 resolution: "is-yarn-global@npm:0.3.0" @@ -14330,7 +14477,7 @@ __metadata: languageName: node linkType: hard -"launch-editor@npm:^2.6.0": +"launch-editor@npm:^2.6.0, launch-editor@npm:^2.6.1": version: 2.6.1 resolution: "launch-editor@npm:2.6.1" dependencies: @@ -16565,6 +16712,18 @@ __metadata: languageName: node linkType: hard +"open@npm:^10.0.3": + version: 10.0.3 + resolution: "open@npm:10.0.3" + dependencies: + default-browser: ^5.2.1 + define-lazy-prop: ^3.0.0 + is-inside-container: ^1.0.0 + is-wsl: ^3.1.0 + checksum: 3c4b4eb3c08210f7b7b3f3311d36440f4b83f0641ac70e5e56d637f48d4a7736e0fd49a604eebe0a55c51223d77f9ced11912223cab12d5e9fdc866727c6cb1d + languageName: node + linkType: hard + "open@npm:^8.0.2, open@npm:^8.0.9, open@npm:^8.4.0": version: 8.4.2 resolution: "open@npm:8.4.2" @@ -16748,6 +16907,17 @@ __metadata: languageName: node linkType: hard +"p-retry@npm:^6.2.0": + version: 6.2.0 + resolution: "p-retry@npm:6.2.0" + dependencies: + "@types/retry": 0.12.2 + is-network-error: ^1.0.0 + retry: ^0.13.1 + checksum: 6003573c559ee812329c9c3ede7ba12a783fdc8dd70602116646e850c920b4597dc502fe001c3f9526fca4e93275045db7a27341c458e51db179c1374a01ac44 + languageName: node + linkType: hard + "p-try@npm:^1.0.0": version: 1.0.0 resolution: "p-try@npm:1.0.0" @@ -17043,6 +17213,13 @@ __metadata: languageName: node linkType: hard +"path-type@npm:^5.0.0": + version: 5.0.0 + resolution: "path-type@npm:5.0.0" + checksum: 15ec24050e8932c2c98d085b72cfa0d6b4eeb4cbde151a0a05726d8afae85784fc5544f733d8dfc68536587d5143d29c0bd793623fad03d7e61cc00067291cd5 + languageName: node + linkType: hard + "pathval@npm:^1.1.1": version: 1.1.1 resolution: "pathval@npm:1.1.1" @@ -18273,12 +18450,12 @@ __metadata: languageName: node linkType: hard -"rechoir@npm:^0.7.0": - version: 0.7.1 - resolution: "rechoir@npm:0.7.1" +"rechoir@npm:^0.8.0": + version: 0.8.0 + resolution: "rechoir@npm:0.8.0" dependencies: - resolve: ^1.9.0 - checksum: 2a04aab4e28c05fcd6ee6768446bc8b859d8f108e71fc7f5bcbc5ef25e53330ce2c11d10f82a24591a2df4c49c4f61feabe1fd11f844c66feedd4cd7bb61146a + resolve: ^1.20.0 + checksum: ad3caed8afdefbc33fbc30e6d22b86c35b3d51c2005546f4e79bcc03c074df804b3640ad18945e6bef9ed12caedc035655ec1082f64a5e94c849ff939dc0a788 languageName: node linkType: hard @@ -18730,7 +18907,7 @@ __metadata: languageName: node linkType: hard -"resolve@npm:^1.1.6, resolve@npm:^1.14.2, resolve@npm:^1.19.0, resolve@npm:^1.22.1, resolve@npm:^1.3.2, resolve@npm:^1.9.0": +"resolve@npm:^1.1.6, resolve@npm:^1.14.2, resolve@npm:^1.19.0, resolve@npm:^1.20.0, resolve@npm:^1.22.1, resolve@npm:^1.3.2": version: 1.22.8 resolution: "resolve@npm:1.22.8" dependencies: @@ -18752,7 +18929,7 @@ __metadata: languageName: node linkType: hard -"resolve@patch:resolve@^1.1.6#~builtin, resolve@patch:resolve@^1.14.2#~builtin, resolve@patch:resolve@^1.19.0#~builtin, resolve@patch:resolve@^1.22.1#~builtin, resolve@patch:resolve@^1.3.2#~builtin, resolve@patch:resolve@^1.9.0#~builtin": +"resolve@patch:resolve@^1.1.6#~builtin, resolve@patch:resolve@^1.14.2#~builtin, resolve@patch:resolve@^1.19.0#~builtin, resolve@patch:resolve@^1.20.0#~builtin, resolve@patch:resolve@^1.22.1#~builtin, resolve@patch:resolve@^1.3.2#~builtin": version: 1.22.8 resolution: "resolve@patch:resolve@npm%3A1.22.8#~builtin::version=1.22.8&hash=c3c19d" dependencies: @@ -18966,6 +19143,13 @@ __metadata: languageName: node linkType: hard +"run-applescript@npm:^7.0.0": + version: 7.0.0 + resolution: "run-applescript@npm:7.0.0" + checksum: b02462454d8b182ad4117e5d4626e9e6782eb2072925c9fac582170b0627ae3c1ea92ee9b2df7daf84b5e9ffe14eb1cf5fb70bc44b15c8a0bfcdb47987e2410c + languageName: node + linkType: hard + "run-parallel-limit@npm:^1.1.0": version: 1.1.0 resolution: "run-parallel-limit@npm:1.1.0" @@ -19070,7 +19254,7 @@ __metadata: languageName: node linkType: hard -"schema-utils@npm:^4.0.0": +"schema-utils@npm:^4.0.0, schema-utils@npm:^4.2.0": version: 4.2.0 resolution: "schema-utils@npm:4.2.0" dependencies: @@ -19118,7 +19302,7 @@ __metadata: languageName: node linkType: hard -"selfsigned@npm:^2.1.1": +"selfsigned@npm:^2.1.1, selfsigned@npm:^2.4.1": version: 2.4.1 resolution: "selfsigned@npm:2.4.1" dependencies: @@ -19223,6 +19407,15 @@ __metadata: languageName: node linkType: hard +"serialize-javascript@npm:^6.0.2": + version: 6.0.2 + resolution: "serialize-javascript@npm:6.0.2" + dependencies: + randombytes: ^2.1.0 + checksum: c4839c6206c1d143c0f80763997a361310305751171dd95e4b57efee69b8f6edd8960a0b7fbfc45042aadff98b206d55428aee0dc276efe54f100899c7fa8ab7 + languageName: node + linkType: hard + "serve-handler@npm:6.1.5, serve-handler@npm:^6.1.3, serve-handler@npm:^6.1.5": version: 6.1.5 resolution: "serve-handler@npm:6.1.5" @@ -19490,6 +19683,13 @@ __metadata: languageName: node linkType: hard +"slash@npm:^5.1.0": + version: 5.1.0 + resolution: "slash@npm:5.1.0" + checksum: 70434b34c50eb21b741d37d455110258c42d2cf18c01e6518aeb7299f3c6e626330c889c0c552b5ca2ef54a8f5a74213ab48895f0640717cacefeef6830a1ba4 + languageName: node + linkType: hard + "slice-ansi@npm:^4.0.0": version: 4.0.0 resolution: "slice-ansi@npm:4.0.0" @@ -20114,6 +20314,28 @@ __metadata: languageName: node linkType: hard +"terser-webpack-plugin@npm:^5.3.10": + version: 5.3.10 + resolution: "terser-webpack-plugin@npm:5.3.10" + dependencies: + "@jridgewell/trace-mapping": ^0.3.20 + jest-worker: ^27.4.5 + schema-utils: ^3.1.1 + serialize-javascript: ^6.0.1 + terser: ^5.26.0 + peerDependencies: + webpack: ^5.1.0 + peerDependenciesMeta: + "@swc/core": + optional: true + esbuild: + optional: true + uglify-js: + optional: true + checksum: bd6e7596cf815f3353e2a53e79cbdec959a1b0276f5e5d4e63e9d7c3c5bb5306df567729da287d1c7b39d79093e56863c569c42c6c24cc34c76aa313bd2cbcea + languageName: node + linkType: hard + "terser-webpack-plugin@npm:^5.3.3, terser-webpack-plugin@npm:^5.3.7, terser-webpack-plugin@npm:^5.3.9": version: 5.3.9 resolution: "terser-webpack-plugin@npm:5.3.9" @@ -20150,6 +20372,20 @@ __metadata: languageName: node linkType: hard +"terser@npm:^5.26.0": + version: 5.27.0 + resolution: "terser@npm:5.27.0" + dependencies: + "@jridgewell/source-map": ^0.3.3 + acorn: ^8.8.2 + commander: ^2.20.0 + source-map-support: ~0.5.20 + bin: + terser: bin/terser + checksum: c165052cfea061e8512e9b9ba42a098c2ff6382886ae122b040fd5b6153443070cc2dcb4862269f1669c09c716763e856125a355ff984aa72be525d6fffd8729 + languageName: node + linkType: hard + "text-table@npm:^0.2.0": version: 0.2.0 resolution: "text-table@npm:0.2.0" @@ -20765,6 +21001,13 @@ __metadata: languageName: node linkType: hard +"unicorn-magic@npm:^0.1.0": + version: 0.1.0 + resolution: "unicorn-magic@npm:0.1.0" + checksum: 48c5882ca3378f380318c0b4eb1d73b7e3c5b728859b060276e0a490051d4180966beeb48962d850fd0c6816543bcdfc28629dcd030bb62a286a2ae2acb5acb6 + languageName: node + linkType: hard + "unified@npm:9.2.0": version: 9.2.0 resolution: "unified@npm:9.2.0" @@ -21409,36 +21652,35 @@ __metadata: languageName: node linkType: hard -"webpack-cli@npm:^4.7.2": - version: 4.10.0 - resolution: "webpack-cli@npm:4.10.0" +"webpack-cli@npm:^5.1.4": + version: 5.1.4 + resolution: "webpack-cli@npm:5.1.4" dependencies: "@discoveryjs/json-ext": ^0.5.0 - "@webpack-cli/configtest": ^1.2.0 - "@webpack-cli/info": ^1.5.0 - "@webpack-cli/serve": ^1.7.0 + "@webpack-cli/configtest": ^2.1.1 + "@webpack-cli/info": ^2.0.2 + "@webpack-cli/serve": ^2.0.5 colorette: ^2.0.14 - commander: ^7.0.0 + commander: ^10.0.1 cross-spawn: ^7.0.3 + envinfo: ^7.7.3 fastest-levenshtein: ^1.0.12 import-local: ^3.0.2 - interpret: ^2.2.0 - rechoir: ^0.7.0 + interpret: ^3.1.1 + rechoir: ^0.8.0 webpack-merge: ^5.7.3 peerDependencies: - webpack: 4.x.x || 5.x.x + webpack: 5.x.x peerDependenciesMeta: "@webpack-cli/generators": optional: true - "@webpack-cli/migrate": - optional: true webpack-bundle-analyzer: optional: true webpack-dev-server: optional: true bin: webpack-cli: bin/cli.js - checksum: 2ff5355ac348e6b40f2630a203b981728834dca96d6d621be96249764b2d0fc01dd54edfcc37f02214d02935de2cf0eefd6ce689d970d154ef493f01ba922390 + checksum: 3a4ad0d0342a6815c850ee4633cc2a8a5dae04f918e7847f180bf24ab400803cf8a8943707ffbed03eb20fe6ce647f996f60a2aade87b0b4a9954da3da172ce0 languageName: node linkType: hard @@ -21457,6 +21699,24 @@ __metadata: languageName: node linkType: hard +"webpack-dev-middleware@npm:^7.0.0": + version: 7.0.0 + resolution: "webpack-dev-middleware@npm:7.0.0" + dependencies: + colorette: ^2.0.10 + memfs: ^4.6.0 + mime-types: ^2.1.31 + range-parser: ^1.2.1 + schema-utils: ^4.0.0 + peerDependencies: + webpack: ^5.0.0 + peerDependenciesMeta: + webpack: + optional: true + checksum: 90f6c87c80bd5849c34f3a1761ac7dc1b123def2e6e9922f55102ff4b7532538641fa8c7169ce8254b0d471c27d882cdf4a1c32979952474fc8eacc8b3447915 + languageName: node + linkType: hard + "webpack-dev-server@npm:^4.15.1, webpack-dev-server@npm:^4.9.3": version: 4.15.1 resolution: "webpack-dev-server@npm:4.15.1" @@ -21504,6 +21764,53 @@ __metadata: languageName: node linkType: hard +"webpack-dev-server@npm:^5.0.0": + version: 5.0.0 + resolution: "webpack-dev-server@npm:5.0.0" + dependencies: + "@types/bonjour": ^3.5.13 + "@types/connect-history-api-fallback": ^1.5.4 + "@types/express": ^4.17.21 + "@types/serve-index": ^1.9.4 + "@types/serve-static": ^1.15.5 + "@types/sockjs": ^0.3.36 + "@types/ws": ^8.5.10 + ansi-html-community: ^0.0.8 + bonjour-service: ^1.2.1 + chokidar: ^3.6.0 + colorette: ^2.0.10 + compression: ^1.7.4 + connect-history-api-fallback: ^2.0.0 + default-gateway: ^6.0.3 + express: ^4.17.3 + graceful-fs: ^4.2.6 + html-entities: ^2.4.0 + http-proxy-middleware: ^2.0.3 + ipaddr.js: ^2.1.0 + launch-editor: ^2.6.1 + open: ^10.0.3 + p-retry: ^6.2.0 + rimraf: ^5.0.5 + schema-utils: ^4.2.0 + selfsigned: ^2.4.1 + serve-index: ^1.9.1 + sockjs: ^0.3.24 + spdy: ^4.0.2 + webpack-dev-middleware: ^7.0.0 + ws: ^8.16.0 + peerDependencies: + webpack: ^5.0.0 + peerDependenciesMeta: + webpack: + optional: true + webpack-cli: + optional: true + bin: + webpack-dev-server: bin/webpack-dev-server.js + checksum: 419d1af6b6164900fb01168c3ef965fe8d27a78939ef8f5c602f82af5be8a2b68a0b015df564623dd69996d5265c679202c5970b59797e83cf322e47bbcd6022 + languageName: node + linkType: hard + "webpack-merge@npm:^5.7.3, webpack-merge@npm:^5.8.0, webpack-merge@npm:^5.9.0": version: 5.10.0 resolution: "webpack-merge@npm:5.10.0" @@ -21522,7 +21829,7 @@ __metadata: languageName: node linkType: hard -"webpack@npm:^5.49.0, webpack@npm:^5.73.0, webpack@npm:^5.88.1": +"webpack@npm:^5.73.0, webpack@npm:^5.88.1": version: 5.89.0 resolution: "webpack@npm:5.89.0" dependencies: @@ -21559,6 +21866,43 @@ __metadata: languageName: node linkType: hard +"webpack@npm:^5.90.1": + version: 5.90.1 + resolution: "webpack@npm:5.90.1" + dependencies: + "@types/eslint-scope": ^3.7.3 + "@types/estree": ^1.0.5 + "@webassemblyjs/ast": ^1.11.5 + "@webassemblyjs/wasm-edit": ^1.11.5 + "@webassemblyjs/wasm-parser": ^1.11.5 + acorn: ^8.7.1 + acorn-import-assertions: ^1.9.0 + browserslist: ^4.21.10 + chrome-trace-event: ^1.0.2 + enhanced-resolve: ^5.15.0 + es-module-lexer: ^1.2.1 + eslint-scope: 5.1.1 + events: ^3.2.0 + glob-to-regexp: ^0.4.1 + graceful-fs: ^4.2.9 + json-parse-even-better-errors: ^2.3.1 + loader-runner: ^4.2.0 + mime-types: ^2.1.27 + neo-async: ^2.6.2 + schema-utils: ^3.2.0 + tapable: ^2.1.1 + terser-webpack-plugin: ^5.3.10 + watchpack: ^2.4.0 + webpack-sources: ^3.2.3 + peerDependenciesMeta: + webpack-cli: + optional: true + bin: + webpack: bin/webpack.js + checksum: a7be844d5720a0c6282fec012e6fa34b1137dff953c5d48bf2ef066a6c27c1dbc92a9b9effc05ee61c9fe269499266db9782073f2d82a589d3c5c966ffc56584 + languageName: node + linkType: hard + "webpackbar@npm:^5.0.2": version: 5.0.2 resolution: "webpackbar@npm:5.0.2" @@ -21833,6 +22177,21 @@ __metadata: languageName: node linkType: hard +"ws@npm:^8.16.0": + version: 8.16.0 + resolution: "ws@npm:8.16.0" + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ">=5.0.2" + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + checksum: feb3eecd2bae82fa8a8beef800290ce437d8b8063bdc69712725f21aef77c49cb2ff45c6e5e7fce622248f9c7abaee506bae0a9064067ffd6935460c7357321b + languageName: node + linkType: hard + "xdg-basedir@npm:^4.0.0": version: 4.0.0 resolution: "xdg-basedir@npm:4.0.0" diff --git a/yarn-project/circuits.js/fixtures/Benchmarking.test.json b/yarn-project/circuits.js/fixtures/Benchmarking.test.json index 4ee8cebbe8b..cc99240365a 100644 --- a/yarn-project/circuits.js/fixtures/Benchmarking.test.json +++ b/yarn-project/circuits.js/fixtures/Benchmarking.test.json @@ -3388,7 +3388,7 @@ "path": "/home/santiago/Projects/aztec3-packages/noir-contracts/contracts/benchmarking_contract/src/main.nr" }, "40": { - "source": "use crate::{\n abi::{\n PrivateContextInputs,\n PublicContextInputs,\n },\n key::nullifier_key::validate_nullifier_key_against_address,\n messaging::process_l1_to_l2_message,\n oracle::{\n arguments,\n call_private_function::call_private_function_internal,\n public_call::call_public_function_internal,\n enqueue_public_function_call::enqueue_public_function_call_internal,\n context::get_portal_address,\n get_block_header::get_block_header,\n nullifier_key::get_nullifier_key_pair,\n },\n types::vec::BoundedVec,\n utils::Reader,\n};\nuse dep::protocol_types::{\n abis::{\n block_header::BlockHeader,\n call_context::CallContext,\n function_data::FunctionData,\n function_selector::FunctionSelector,\n private_circuit_public_inputs::PrivateCircuitPublicInputs,\n public_circuit_public_inputs::PublicCircuitPublicInputs,\n call_stack_item::PrivateCallStackItem,\n call_stack_item::PublicCallStackItem,\n side_effect::{SideEffect, SideEffectLinkedToNoteHash},\n },\n address::{\n AztecAddress,\n EthAddress,\n },\n constants::{\n MAX_NEW_NOTE_HASHES_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_READ_REQUESTS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n RETURN_VALUES_LENGTH,\n },\n contrakt::{\n deployment_data::ContractDeploymentData,\n storage_read::StorageRead,\n storage_update_request::StorageUpdateRequest,\n },\n hash::hash_args,\n grumpkin_point::GrumpkinPoint,\n};\nuse dep::std::{\n grumpkin_scalar::GrumpkinScalar,\n option::Option,\n};\n\n// TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n// use dep::std::collections::vec::Vec;\n\n// When finished, one can call .finish() to convert back to the abi\nstruct PrivateContext {\n // docs:start:private-context\n inputs: PrivateContextInputs,\n side_effect_counter: u32,\n\n args_hash : Field,\n return_values : BoundedVec,\n\n read_requests: BoundedVec,\n\n new_note_hashes: BoundedVec,\n new_nullifiers: BoundedVec,\n\n private_call_stack_hashes : BoundedVec,\n public_call_stack_hashes : BoundedVec,\n new_l2_to_l1_msgs : BoundedVec,\n // docs:end:private-context\n\n block_header: BlockHeader,\n\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n // encrypted_logs_preimages: Vec,\n // unencrypted_logs_preimages: Vec,\n}\n\nimpl PrivateContext {\n pub fn new(inputs: PrivateContextInputs, args_hash: Field) -> PrivateContext {\n PrivateContext {\n inputs: inputs,\n side_effect_counter: inputs.call_context.start_side_effect_counter,\n\n args_hash: args_hash,\n return_values: BoundedVec::new(0),\n\n read_requests: BoundedVec::new(SideEffect::empty()),\n\n new_note_hashes: BoundedVec::new(SideEffect::empty()),\n new_nullifiers: BoundedVec::new(SideEffectLinkedToNoteHash::empty()),\n\n block_header: inputs.block_header,\n\n private_call_stack_hashes: BoundedVec::new(0),\n public_call_stack_hashes: BoundedVec::new(0),\n new_l2_to_l1_msgs: BoundedVec::new(0),\n\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n // encrypted_logs_preimages: Vec::new(),\n // unencrypted_logs_preimages: Vec::new(),\n }\n }\n\n pub fn msg_sender(self) -> AztecAddress {\n self.inputs.call_context.msg_sender\n }\n\n pub fn this_address(self) -> AztecAddress {\n self.inputs.call_context.storage_contract_address\n }\n\n pub fn this_portal_address(self) -> EthAddress {\n self.inputs.call_context.portal_contract_address\n }\n\n pub fn chain_id(self) -> Field {\n self.inputs.private_global_variables.chain_id\n }\n\n pub fn version(self) -> Field {\n self.inputs.private_global_variables.version\n }\n\n pub fn selector(self) -> FunctionSelector {\n self.inputs.call_context.function_selector\n }\n\n pub fn get_block_header(self, block_number: u32) -> BlockHeader {\n get_block_header(block_number, self)\n }\n\n pub fn finish(self) -> PrivateCircuitPublicInputs {\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n let encrypted_logs_hash = [0; NUM_FIELDS_PER_SHA256];\n let unencrypted_logs_hash = [0; NUM_FIELDS_PER_SHA256];\n let encrypted_log_preimages_length = 0;\n let unencrypted_log_preimages_length = 0;\n\n let priv_circuit_pub_inputs = PrivateCircuitPublicInputs {\n call_context: self.inputs.call_context,\n args_hash: self.args_hash,\n return_values: self.return_values.storage,\n read_requests: self.read_requests.storage,\n new_note_hashes: self.new_note_hashes.storage,\n new_nullifiers: self.new_nullifiers.storage,\n private_call_stack_hashes: self.private_call_stack_hashes.storage,\n public_call_stack_hashes: self.public_call_stack_hashes.storage,\n new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage,\n end_side_effect_counter: self.side_effect_counter,\n encrypted_logs_hash: encrypted_logs_hash,\n unencrypted_logs_hash: unencrypted_logs_hash,\n encrypted_log_preimages_length: encrypted_log_preimages_length,\n unencrypted_log_preimages_length: unencrypted_log_preimages_length,\n block_header: self.block_header,\n contract_deployment_data: self.inputs.contract_deployment_data,\n chain_id: self.inputs.private_global_variables.chain_id,\n version: self.inputs.private_global_variables.version,\n };\n priv_circuit_pub_inputs\n }\n\n pub fn push_read_request(&mut self, read_request: Field) {\n let side_effect = SideEffect {\n value: read_request,\n counter: self.side_effect_counter,\n };\n self.read_requests.push(side_effect);\n self.side_effect_counter = self.side_effect_counter + 1;\n }\n\n pub fn push_new_note_hash(&mut self, note_hash: Field) {\n let side_effect = SideEffect {\n value: note_hash,\n counter: self.side_effect_counter,\n };\n self.new_note_hashes.push(side_effect);\n self.side_effect_counter = self.side_effect_counter + 1;\n }\n\n pub fn push_new_nullifier(&mut self, nullifier: Field, nullified_commitment: Field) {\n let side_effect = SideEffectLinkedToNoteHash {\n value: nullifier,\n note_hash: nullified_commitment,\n counter: self.side_effect_counter,\n };\n self.new_nullifiers.push(side_effect);\n self.side_effect_counter = self.side_effect_counter + 1;\n }\n\n pub fn request_nullifier_secret_key(&mut self, account: AztecAddress) -> GrumpkinScalar {\n let key_pair = get_nullifier_key_pair(account);\n validate_nullifier_key_against_address(account, key_pair.public_key, key_pair.secret_key);\n // TODO: Add request to context.\n // self.context.push_nullifier_key_validation_request(public_key, secret_key);\n key_pair.secret_key\n }\n\n // docs:start:context_message_portal\n pub fn message_portal(&mut self, content: Field) \n // docs:end:context_message_portal\n {\n self.new_l2_to_l1_msgs.push(content);\n }\n\n // PrivateContextInputs must be temporarily passed in to prevent too many unknowns\n // Note this returns self to get around an issue where mutable structs do not maintain mutations unless reassigned\n // docs:start:context_consume_l1_to_l2_message\n // docs:start:consume_l1_to_l2_message\n pub fn consume_l1_to_l2_message(\n &mut self,\n msg_key: Field,\n content: Field,\n secret: Field\n ) \n // docs:end:context_consume_l1_to_l2_message\n {\n let nullifier = process_l1_to_l2_message(self.block_header.l1_to_l2_message_tree_root, self.this_address(), self.this_portal_address(), self.chain_id(), self.version(), msg_key, content, secret);\n\n // Push nullifier (and the \"commitment\" corresponding to this can be \"empty\")\n self.push_new_nullifier(nullifier, 0)\n }\n // docs:end:consume_l1_to_l2_message\n\n pub fn accumulate_encrypted_logs(&mut self, log: [Field; N]) {\n let _void1 = self.inputs;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn accumulate_unencrypted_logs(&mut self, log: T) {\n let _void1 = self.inputs;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn call_private_function(\n &mut self,\n contract_address: AztecAddress, \n function_selector: FunctionSelector, \n args: [Field; ARGS_COUNT]\n ) -> [Field; RETURN_VALUES_LENGTH] {\n let args_hash = hash_args(args);\n assert(args_hash == arguments::pack_arguments(args));\n self.call_private_function_with_packed_args(contract_address, function_selector, args_hash)\n }\n\n pub fn call_private_function_no_args(\n &mut self,\n contract_address: AztecAddress, \n function_selector: FunctionSelector, \n ) -> [Field; RETURN_VALUES_LENGTH] {\n self.call_private_function_with_packed_args(contract_address, function_selector, 0)\n }\n\n pub fn call_private_function_with_packed_args(\n &mut self,\n contract_address: AztecAddress,\n function_selector: FunctionSelector,\n args_hash: Field\n ) -> [Field; RETURN_VALUES_LENGTH] {\n let fields = call_private_function_internal(\n contract_address,\n function_selector, \n args_hash,\n self.side_effect_counter,\n );\n let mut reader = Reader::new(fields);\n\n let item = PrivateCallStackItem {\n contract_address: AztecAddress::from_field(reader.read()),\n function_data: FunctionData {\n selector: FunctionSelector::from_field(reader.read()),\n is_internal: reader.read() as bool,\n is_private: reader.read() as bool,\n is_constructor: reader.read() as bool,\n },\n public_inputs: PrivateCircuitPublicInputs {\n call_context: CallContext {\n msg_sender : AztecAddress::from_field(reader.read()),\n storage_contract_address : AztecAddress::from_field(reader.read()),\n portal_contract_address : EthAddress::from_field(reader.read()),\n function_selector: FunctionSelector::from_field(reader.read()), // practically same as fields[1]\n is_delegate_call : reader.read() as bool,\n is_static_call : reader.read() as bool,\n is_contract_deployment: reader.read() as bool,\n start_side_effect_counter: reader.read() as u32,\n },\n args_hash: reader.read(),\n return_values: reader.read_array([0; RETURN_VALUES_LENGTH]), // +1\n read_requests: reader.read_struct_array(SideEffect::deserialise, [SideEffect::empty(); MAX_READ_REQUESTS_PER_CALL]),\n new_note_hashes: reader.read_struct_array(SideEffect::deserialise, [SideEffect::empty(); MAX_NEW_NOTE_HASHES_PER_CALL]),\n new_nullifiers: reader.read_struct_array(SideEffectLinkedToNoteHash::deserialise, [SideEffectLinkedToNoteHash::empty(); MAX_NEW_NULLIFIERS_PER_CALL]),\n private_call_stack_hashes: reader.read_array([0; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL]),\n public_call_stack_hashes: reader.read_array([0; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL]),\n new_l2_to_l1_msgs: reader.read_array([0; MAX_NEW_L2_TO_L1_MSGS_PER_CALL]),\n end_side_effect_counter: reader.read() as u32,\n encrypted_logs_hash: reader.read_array([0; NUM_FIELDS_PER_SHA256]),\n unencrypted_logs_hash: reader.read_array([0; NUM_FIELDS_PER_SHA256]),\n encrypted_log_preimages_length: reader.read(),\n unencrypted_log_preimages_length: reader.read(),\n block_header: BlockHeader{\n // Must match order in `private_circuit_public_inputs.hpp`\n note_hash_tree_root : reader.read(),\n nullifier_tree_root : reader.read(),\n contract_tree_root : reader.read(),\n l1_to_l2_message_tree_root : reader.read(),\n archive_root : reader.read(),\n public_data_tree_root: reader.read(),\n global_variables_hash: reader.read(),\n },\n contract_deployment_data: ContractDeploymentData {\n deployer_public_key: GrumpkinPoint {\n x: reader.read(), \n y: reader.read()\n },\n constructor_vk_hash : reader.read(),\n function_tree_root : reader.read(),\n contract_address_salt : reader.read(),\n portal_contract_address : EthAddress::from_field(reader.read()),\n },\n chain_id: reader.read(),\n version: reader.read(),\n },\n is_execution_request: reader.read() as bool,\n };\n\n reader.finish();\n\n assert_eq(item.public_inputs.call_context.start_side_effect_counter, self.side_effect_counter);\n self.side_effect_counter = item.public_inputs.end_side_effect_counter + 1;\n \n assert(contract_address.eq(item.contract_address));\n assert(function_selector.eq(item.function_data.selector));\n\n assert(args_hash == item.public_inputs.args_hash);\n\n assert(item.is_execution_request == false);\n\n // Assert that the call context of the enqueued call generated by the oracle matches our request.\n // We are issuing a regular call which is not delegate, static, or deployment. We also constrain\n // the msg_sender in the nested call to be equal to our address, and the execution context address\n // for the nested call to be equal to the address we actually called.\n assert(item.public_inputs.call_context.is_delegate_call == false);\n assert(item.public_inputs.call_context.is_static_call == false);\n assert(item.public_inputs.call_context.is_contract_deployment == false);\n assert(item.public_inputs.call_context.msg_sender.eq(self.inputs.call_context.storage_contract_address));\n assert(item.public_inputs.call_context.storage_contract_address.eq(contract_address));\n\n self.private_call_stack_hashes.push(item.hash());\n\n item.public_inputs.return_values\n }\n\n pub fn call_public_function(\n &mut self,\n contract_address: AztecAddress, \n function_selector: FunctionSelector, \n args: [Field; ARGS_COUNT]\n ) {\n let args_hash = hash_args(args);\n assert(args_hash == arguments::pack_arguments(args));\n self.call_public_function_with_packed_args(contract_address, function_selector, args_hash)\n }\n\n pub fn call_public_function_no_args(\n &mut self,\n contract_address: AztecAddress, \n function_selector: FunctionSelector,\n ) {\n self.call_public_function_with_packed_args(contract_address, function_selector, 0)\n }\n\n pub fn call_public_function_with_packed_args(\n &mut self,\n contract_address: AztecAddress,\n function_selector: FunctionSelector,\n args_hash: Field\n ) {\n let fields = enqueue_public_function_call_internal(\n contract_address, \n function_selector, \n args_hash,\n self.side_effect_counter\n );\n\n let mut reader = Reader::new(fields);\n\n let item = PublicCallStackItem {\n contract_address: AztecAddress::from_field(reader.read()),\n function_data: FunctionData {\n selector: FunctionSelector::from_field(reader.read()),\n is_internal: reader.read() as bool,\n is_private: reader.read() as bool,\n is_constructor: reader.read() as bool,\n },\n public_inputs: PublicCircuitPublicInputs {\n call_context: CallContext {\n msg_sender : AztecAddress::from_field(reader.read()),\n storage_contract_address : AztecAddress::from_field(reader.read()),\n portal_contract_address : EthAddress::from_field(reader.read()),\n function_selector: FunctionSelector::from_field(reader.read()), // practically same as fields[1]\n is_delegate_call : reader.read() as bool,\n is_static_call : reader.read() as bool,\n is_contract_deployment: reader.read() as bool,\n start_side_effect_counter: reader.read() as u32,\n },\n args_hash: reader.read(),\n return_values: [0; RETURN_VALUES_LENGTH],\n contract_storage_update_requests: [StorageUpdateRequest::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL],\n contract_storage_reads: [StorageRead::empty(); MAX_PUBLIC_DATA_READS_PER_CALL],\n public_call_stack_hashes: [0; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_note_hashes: [SideEffect::empty(); MAX_NEW_NOTE_HASHES_PER_CALL],\n new_nullifiers: [SideEffectLinkedToNoteHash::empty(); MAX_NEW_NULLIFIERS_PER_CALL],\n new_l2_to_l1_msgs:[0; MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n unencrypted_logs_hash:[0; NUM_FIELDS_PER_SHA256],\n unencrypted_log_preimages_length: 0,\n block_header: BlockHeader::empty(),\n prover_address: AztecAddress::zero(),\n },\n is_execution_request: true,\n };\n reader.finish();\n\n assert(contract_address.eq(item.contract_address));\n assert(function_selector.eq(item.function_data.selector));\n\n assert_eq(item.public_inputs.call_context.start_side_effect_counter, self.side_effect_counter);\n // We increment the sideffect counter by one, to account for the call itself being a side effect.\n self.side_effect_counter = self.side_effect_counter + 1;\n \n assert(args_hash == item.public_inputs.args_hash);\n\n // Assert that the call context of the enqueued call generated by the oracle matches our request.\n // We are issuing a regular call which is not delegate, static, or deployment. We also constrain\n // the msg_sender in the nested call to be equal to our address, and the execution context address\n // for the nested call to be equal to the address we actually called.\n assert(item.public_inputs.call_context.is_delegate_call == false);\n assert(item.public_inputs.call_context.is_static_call == false);\n assert(item.public_inputs.call_context.is_contract_deployment == false);\n assert(item.public_inputs.call_context.msg_sender.eq(self.inputs.call_context.storage_contract_address));\n assert(item.public_inputs.call_context.storage_contract_address.eq(contract_address));\n\n self.public_call_stack_hashes.push(item.hash());\n }\n}\n\nstruct PublicContext {\n inputs: PublicContextInputs,\n side_effect_counter: u32,\n\n args_hash : Field,\n return_values : BoundedVec,\n\n contract_storage_update_requests: BoundedVec,\n contract_storage_reads: BoundedVec,\n public_call_stack_hashes: BoundedVec,\n\n new_note_hashes: BoundedVec,\n new_nullifiers: BoundedVec,\n\n new_l2_to_l1_msgs: BoundedVec,\n\n unencrypted_logs_hash: BoundedVec,\n unencrypted_logs_preimages_length: Field,\n\n block_header: BlockHeader,\n prover_address: AztecAddress,\n}\n\nimpl PublicContext {\n pub fn new(inputs: PublicContextInputs, args_hash: Field) -> PublicContext {\n let empty_storage_read = StorageRead::empty();\n let empty_storage_update = StorageUpdateRequest::empty();\n PublicContext {\n inputs: inputs,\n side_effect_counter: inputs.call_context.start_side_effect_counter,\n\n args_hash: args_hash,\n return_values: BoundedVec::new(0),\n\n contract_storage_update_requests: BoundedVec::new(empty_storage_update),\n contract_storage_reads: BoundedVec::new(empty_storage_read),\n public_call_stack_hashes: BoundedVec::new(0),\n\n new_note_hashes: BoundedVec::new(SideEffect::empty()),\n new_nullifiers: BoundedVec::new(SideEffectLinkedToNoteHash::empty()),\n\n new_l2_to_l1_msgs: BoundedVec::new(0),\n\n \n unencrypted_logs_hash: BoundedVec::new(0),\n unencrypted_logs_preimages_length: 0,\n\n block_header: inputs.block_header,\n prover_address: AztecAddress::zero(),\n\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n // encrypted_logs_preimages: Vec::new(),\n // unencrypted_logs_preimages: Vec::new(),\n }\n }\n\n pub fn msg_sender(self) -> AztecAddress {\n self.inputs.call_context.msg_sender\n }\n\n pub fn this_address(self) -> AztecAddress {\n self.inputs.call_context.storage_contract_address\n }\n\n pub fn this_portal_address(self) -> EthAddress {\n self.inputs.call_context.portal_contract_address\n }\n\n pub fn chain_id(self) -> Field {\n self.inputs.public_global_variables.chain_id\n }\n\n pub fn version(self) -> Field {\n self.inputs.public_global_variables.version\n }\n\n pub fn selector(self) -> FunctionSelector {\n self.inputs.call_context.function_selector\n }\n\n pub fn block_number(self) -> Field {\n self.inputs.public_global_variables.block_number\n }\n\n pub fn timestamp(self) -> Field {\n self.inputs.public_global_variables.timestamp\n }\n\n pub fn finish(self) -> PublicCircuitPublicInputs {\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n let unencrypted_logs_hash = [0; NUM_FIELDS_PER_SHA256];\n let unencrypted_log_preimages_length = 0;\n\n\n // Compute the public call stack hashes\n let pub_circuit_pub_inputs = PublicCircuitPublicInputs {\n call_context: self.inputs.call_context, // Done\n args_hash: self.args_hash, // Done\n contract_storage_update_requests: self.contract_storage_update_requests.storage,\n contract_storage_reads: self.contract_storage_reads.storage,\n return_values: self.return_values.storage,\n new_note_hashes: self.new_note_hashes.storage,\n new_nullifiers: self.new_nullifiers.storage,\n public_call_stack_hashes: self.public_call_stack_hashes.storage,\n new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage,\n unencrypted_logs_hash: unencrypted_logs_hash,\n unencrypted_log_preimages_length: unencrypted_log_preimages_length,\n block_header: self.inputs.block_header,\n prover_address: self.prover_address,\n };\n pub_circuit_pub_inputs\n }\n\n pub fn push_new_note_hash(&mut self, note_hash: Field) {\n let side_effect = SideEffect {\n value: note_hash,\n counter: self.side_effect_counter\n };\n self.new_note_hashes.push(side_effect);\n self.side_effect_counter = self.side_effect_counter + 1;\n }\n\n pub fn push_new_nullifier(&mut self, nullifier: Field, _nullified_commitment: Field) {\n let side_effect = SideEffectLinkedToNoteHash {\n value: nullifier,\n note_hash: 0, // cannot nullify pending notes in public context\n counter: self.side_effect_counter\n };\n self.new_nullifiers.push(side_effect);\n self.side_effect_counter = self.side_effect_counter + 1;\n }\n\n pub fn message_portal(&mut self, content: Field) {\n self.new_l2_to_l1_msgs.push(content);\n }\n\n // PrivateContextInputs must be temporarily passed in to prevent too many unknowns\n // Note this returns self to get around an issue where mutable structs do not maintain mutations unless reassigned\n pub fn consume_l1_to_l2_message(&mut self, msg_key: Field, content: Field, secret: Field) {\n let this = (*self).this_address();\n let nullifier = process_l1_to_l2_message(self.block_header.l1_to_l2_message_tree_root, this, self.this_portal_address(), self.chain_id(), self.version(), msg_key, content, secret);\n\n // Push nullifier (and the \"commitment\" corresponding to this can be \"empty\")\n self.push_new_nullifier(nullifier, 0)\n }\n\n pub fn accumulate_encrypted_logs(&mut self, log: [Field; N]) {\n let _void1 = self;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn accumulate_unencrypted_logs(&mut self, log: T) {\n let _void1 = self;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn call_public_function(\n _self: Self,\n contract_address: AztecAddress, \n function_selector: FunctionSelector,\n args: [Field; ARGS_COUNT],\n ) -> [Field; RETURN_VALUES_LENGTH] {\n let args_hash = hash_args(args);\n assert(args_hash == arguments::pack_arguments(args));\n call_public_function_internal(\n contract_address, \n function_selector, \n args_hash,\n )\n }\n\n pub fn call_public_function_no_args(\n _self: Self,\n contract_address: AztecAddress, \n function_selector: FunctionSelector,\n ) -> [Field; RETURN_VALUES_LENGTH] {\n call_public_function_internal(\n contract_address, \n function_selector, \n 0,\n )\n }\n\n}\n\nstruct Context {\n private: Option<&mut PrivateContext>,\n public: Option<&mut PublicContext>,\n}\n\nimpl Context {\n pub fn private(context: &mut PrivateContext) -> Context {\n Context {\n private: Option::some(context),\n public: Option::none()\n }\n }\n\n pub fn public(context: &mut PublicContext) -> Context {\n Context {\n public: Option::some(context),\n private: Option::none()\n }\n }\n\n pub fn none() -> Context {\n Context {\n public: Option::none(),\n private: Option::none()\n }\n }\n}\n", + "source": "use crate::{\n abi::{\n PrivateContextInputs,\n PublicContextInputs,\n },\n key::nullifier_key::validate_nullifier_key_against_address,\n messaging::process_l1_to_l2_message,\n oracle::{\n arguments,\n call_private_function::call_private_function_internal,\n public_call::call_public_function_internal,\n enqueue_public_function_call::enqueue_public_function_call_internal,\n context::get_portal_address,\n get_block_header::get_block_header,\n nullifier_key::get_nullifier_key_pair,\n },\n types::vec::BoundedVec,\n utils::Reader,\n};\nuse dep::protocol_types::{\n abis::{\n block_header::BlockHeader,\n call_context::CallContext,\n function_data::FunctionData,\n function_selector::FunctionSelector,\n private_circuit_public_inputs::PrivateCircuitPublicInputs,\n public_circuit_public_inputs::PublicCircuitPublicInputs,\n call_stack_item::PrivateCallStackItem,\n call_stack_item::PublicCallStackItem,\n side_effect::{SideEffect, SideEffectLinkedToNoteHash},\n },\n address::{\n AztecAddress,\n EthAddress,\n },\n constants::{\n MAX_NEW_NOTE_HASHES_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_READ_REQUESTS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n RETURN_VALUES_LENGTH,\n },\n contrakt::{\n deployment_data::ContractDeploymentData,\n storage_read::StorageRead,\n storage_update_request::StorageUpdateRequest,\n },\n hash::hash_args,\n grumpkin_point::GrumpkinPoint,\n};\nuse dep::std::{\n grumpkin_scalar::GrumpkinScalar,\n option::Option,\n};\n\n// TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n// use dep::std::collections::vec::Vec;\n\n// When finished, one can call .finish() to convert back to the abi\nstruct PrivateContext {\n // docs:start:private-context\n inputs: PrivateContextInputs,\n side_effect_counter: u32,\n\n args_hash : Field,\n return_values : BoundedVec,\n\n read_requests: BoundedVec,\n\n new_note_hashes: BoundedVec,\n new_nullifiers: BoundedVec,\n\n private_call_stack_hashes : BoundedVec,\n public_call_stack_hashes : BoundedVec,\n new_l2_to_l1_msgs : BoundedVec,\n // docs:end:private-context\n\n block_header: BlockHeader,\n\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n // encrypted_logs_preimages: Vec,\n // unencrypted_logs_preimages: Vec,\n}\n\nimpl PrivateContext {\n pub fn new(inputs: PrivateContextInputs, args_hash: Field) -> PrivateContext {\n PrivateContext {\n inputs: inputs,\n side_effect_counter: inputs.call_context.start_side_effect_counter,\n\n args_hash: args_hash,\n return_values: BoundedVec::new();\n call_public_function_internal(\n contract_address, \n function_selector, \n args_hash,\n )\n }\n\n pub fn call_public_function_no_args(\n _self: Self,\n contract_address: AztecAddress, \n function_selector: FunctionSelector,\n ) -> [Field; RETURN_VALUES_LENGTH] {\n call_public_function_internal(\n contract_address, \n function_selector, \n 0,\n )\n }\n\n}\n\nstruct Context {\n private: Option<&mut PrivateContext>,\n public: Option<&mut PublicContext>,\n}\n\nimpl Context {\n pub fn private(context: &mut PrivateContext) -> Context {\n Context {\n private: Option::some(context),\n public: Option::none()\n }\n }\n\n pub fn public(context: &mut PublicContext) -> Context {\n Context {\n public: Option::some(context),\n private: Option::none()\n }\n }\n\n pub fn none() -> Context {\n Context {\n public: Option::none(),\n private: Option::none()\n }\n }\n}\n", "path": "/home/santiago/Projects/aztec3-packages/aztec-nr/aztec/src/context.nr" }, "43": { @@ -3468,7 +3468,7 @@ "path": "/home/santiago/Projects/aztec3-packages/aztec-nr/aztec/src/note/utils.nr" }, "94": { - "source": "struct BoundedVec {\n storage: [T; MaxLen],\n len: Field,\n}\n\nimpl BoundedVec {\n pub fn new(initial_value: T) -> Self {\n BoundedVec { storage: [initial_value; MaxLen], len: 0 }\n }\n\n pub fn get(mut self: Self, index: Field) -> T {\n assert(index as u64 < self.len as u64);\n self.storage[index]\n }\n\n pub fn get_unchecked(mut self: Self, index: Field) -> T {\n self.storage[index]\n }\n\n pub fn push(&mut self, elem: T) {\n assert(self.len as u64 < MaxLen as u64);\n\n self.storage[self.len] = elem;\n self.len += 1;\n }\n\n pub fn push_array(&mut self, array: [T; Len]) {\n let newLen = self.len + array.len();\n assert(newLen as u64 <= MaxLen as u64);\n for i in 0..array.len() {\n self.storage[self.len + i] = array[i];\n }\n self.len = newLen;\n }\n\n pub fn pop(&mut self) -> T {\n assert(self.len as u64 > 0);\n\n let elem = self.storage[self.len - 1];\n self.len -= 1;\n elem\n }\n\n pub fn any(self, predicate: fn[Env](T) -> bool) -> bool {\n let mut ret = false;\n let mut exceeded_len = false;\n for i in 0..MaxLen {\n exceeded_len |= i == self.len;\n if (!exceeded_len) {\n ret |= predicate(self.storage[i]);\n }\n }\n ret\n }\n}\n\n#[test]\nfn test_vec_push_pop() {\n let mut vec: BoundedVec = BoundedVec::new(0);\n assert(vec.len == 0);\n vec.push(2);\n assert(vec.len == 1);\n vec.push(4);\n assert(vec.len == 2);\n vec.push(6);\n assert(vec.len == 3);\n let x = vec.pop();\n assert(x == 6);\n assert(vec.len == 2);\n assert(vec.get(0) == 2);\n assert(vec.get(1) == 4);\n}\n\n#[test]\nfn test_vec_push_array() {\n let mut vec: BoundedVec = BoundedVec::new(0);\n vec.push_array([2, 4]);\n assert(vec.len == 2);\n assert(vec.get(0) == 2);\n assert(vec.get(1) == 4);\n}\n\n#[test(should_fail)]\nfn test_vec_get_out_of_bound() {\n let mut vec: BoundedVec = BoundedVec::new(0);\n vec.push_array([2, 4]);\n let _x = vec.get(2);\n}\n\n#[test(should_fail)]\nfn test_vec_get_not_declared() {\n let mut vec: BoundedVec = BoundedVec::new(0);\n vec.push_array([2]);\n let _x = vec.get(1);\n}\n\n#[test(should_fail)]\nfn test_vec_get_uninitialized() {\n let mut vec: BoundedVec = BoundedVec::new(0);\n let _x = vec.get(0);\n}\n\n#[test(should_fail)]\nfn test_vec_push_overflow() {\n let mut vec: BoundedVec = BoundedVec::new(0);\n vec.push(1);\n vec.push(2);\n}\n\n#[test]\nfn test_vec_any() {\n let mut vec: BoundedVec = BoundedVec::new(0);\n vec.push_array([2, 4, 6]);\n assert(vec.any(|v| v == 2) == true);\n assert(vec.any(|v| v == 4) == true);\n assert(vec.any(|v| v == 6) == true);\n assert(vec.any(|v| v == 3) == false);\n}\n\n#[test]\nfn test_vec_any_not_default() {\n let default_value = 1;\n let mut vec: BoundedVec = BoundedVec::new(default_value);\n vec.push_array([2, 4]);\n assert(vec.any(|v| v == default_value) == false);\n}\n", + "source": "struct BoundedVec {\n storage: [T; MaxLen],\n len: Field,\n}\n\nimpl BoundedVec {\n pub fn new(initial_value: T) -> Self {\n BoundedVec { storage: [initial_value; MaxLen], len: 0 }\n }\n\n pub fn get(mut self: Self, index: Field) -> T {\n assert(index as u64 < self.len as u64);\n self.storage[index]\n }\n\n pub fn get_unchecked(mut self: Self, index: Field) -> T {\n self.storage[index]\n }\n\n pub fn push(&mut self, elem: T) {\n assert(self.len as u64 < MaxLen as u64);\n\n self.storage[self.len] = elem;\n self.len += 1;\n }\n\n pub fn push_array(&mut self, array: [T; Len]) {\n let newLen = self.len + array.len();\n assert(newLen as u64 <= MaxLen as u64);\n for i in 0..array.len() {\n self.storage[self.len + i] = array[i];\n }\n self.len = newLen;\n }\n\n pub fn pop(&mut self) -> T {\n assert(self.len as u64 > 0);\n\n let elem = self.storage[self.len - 1];\n self.len -= 1;\n elem\n }\n\n pub fn any(self, predicate: fn[Env](T) -> bool) -> bool {\n let mut ret = false;\n let mut exceeded_len = false;\n for i in 0..MaxLen {\n exceeded_len |= i == self.len;\n if (!exceeded_len) {\n ret |= predicate(self.storage[i]);\n }\n }\n ret\n }\n}\n\n#[test]\nfn test_vec_push_pop() {\n let mut vec: BoundedVec = BoundedVec::new();\n}\n", "path": "/home/santiago/Projects/aztec3-packages/aztec-nr/aztec/src/types/vec.nr" }, "97": { @@ -3480,7 +3480,7 @@ "path": "/home/santiago/Projects/aztec3-packages/noir-protocol-circuits/src/crates/types/src/constants.nr" }, "111": { - "source": "use crate::address::{AztecAddress, EthAddress};\nuse crate::mocked::VerificationKey;\nuse crate::abis::function_selector::FunctionSelector;\nuse crate::abis::function_leaf_preimage::FunctionLeafPreimage;\nuse crate::abis::new_contract_data::NewContractData as ContractLeafPreimage;\nuse crate::abis::function_data::FunctionData;\nuse crate::abis::side_effect::{SideEffect};\nuse crate::utils::uint256::U256;\nuse crate::utils::bounded_vec::BoundedVec;\nuse crate::constants::{\n ARGS_HASH_CHUNK_COUNT,\n ARGS_HASH_CHUNK_LENGTH,\n CONTRACT_TREE_HEIGHT, \n FUNCTION_TREE_HEIGHT, \n NOTE_HASH_TREE_HEIGHT,\n NUM_FIELDS_PER_SHA256,\n GENERATOR_INDEX__SILOED_NOTE_HASH,\n GENERATOR_INDEX__OUTER_NULLIFIER,\n GENERATOR_INDEX__VK,\n GENERATOR_INDEX__CONSTRUCTOR,\n GENERATOR_INDEX__PARTIAL_ADDRESS,\n GENERATOR_INDEX__CONTRACT_ADDRESS,\n GENERATOR_INDEX__NOTE_HASH_NONCE,\n GENERATOR_INDEX__UNIQUE_NOTE_HASH,\n GENERATOR_INDEX__FUNCTION_ARGS,\n};\n\nuse dep::std::hash::{pedersen_hash_with_separator, sha256};\n\npub fn sha256_to_field(bytes_to_hash: [u8; N]) -> Field {\n let sha256_hashed = sha256(bytes_to_hash);\n\n // Convert it to a field element\n let mut v = 1;\n let mut high = 0 as Field;\n let mut low = 0 as Field;\n\n for i in 0..16 {\n high = high + (sha256_hashed[15 - i] as Field) * v;\n low = low + (sha256_hashed[16 + 15 - i] as Field) * v;\n v = v * 256;\n }\n\n // Abuse that a % p + b % p = (a + b) % p and that low < p\n let hash_in_a_field = low + high * v;\n\n hash_in_a_field\n}\n\npub fn hash_args(args: [Field; N]) -> Field {\n if args.len() == 0 {\n 0\n } else {\n let mut chunks_hashes = [0; ARGS_HASH_CHUNK_COUNT];\n for i in 0..ARGS_HASH_CHUNK_COUNT {\n let mut chunk_hash = 0;\n let start_chunk_index = i * ARGS_HASH_CHUNK_LENGTH;\n if start_chunk_index < (args.len() as u32) {\n let mut chunk_args = [0; ARGS_HASH_CHUNK_LENGTH];\n for j in 0..ARGS_HASH_CHUNK_LENGTH {\n let item_index = i * ARGS_HASH_CHUNK_LENGTH + j;\n if item_index < (args.len() as u32) {\n chunk_args[j] = args[item_index];\n }\n }\n chunk_hash = pedersen_hash(chunk_args, GENERATOR_INDEX__FUNCTION_ARGS);\n }\n chunks_hashes[i] = chunk_hash;\n }\n pedersen_hash(chunks_hashes, GENERATOR_INDEX__FUNCTION_ARGS)\n }\n}\n\n// Checks that `value` is a member of a merkle tree with root `root` at position `index`\n// The witness being the `sibling_path`\npub fn assert_check_membership(value: Field, index: Field, sibling_path: [Field; N], root: Field) {\n let calculated_root = root_from_sibling_path(value, index, sibling_path);\n assert(calculated_root == root, \"membership check failed\");\n}\n\n// Calculate the Merkle tree root from the sibling path and leaf.\n//\n// The leaf is hashed with its sibling, and then the result is hashed\n// with the next sibling etc in the path. The last hash is the root.\n//\n// TODO(David/Someone): The cpp code is using a uint256, whereas its\n// TODO a bit simpler in Noir to just have a bit array.\n// TODO: I'd generally like to avoid u256 for algorithms like \n// this because it means we never even need to consider cases where \n// the index is greater than p.\npub fn root_from_sibling_path(leaf: Field, leaf_index: Field, sibling_path: [Field; N]) -> Field {\n let mut node = leaf;\n let indices = leaf_index.to_le_bits(N);\n\n for i in 0..N {\n let (hash_left, hash_right) = if indices[i] == 1 {\n (sibling_path[i], node)\n } else {\n (node, sibling_path[i])\n };\n node = merkle_hash(hash_left, hash_right);\n }\n node\n}\n\n// Calculate the function tree root from the sibling path and leaf preimage.\n//\n// TODO: The cpp code passes in components of the FunctionLeafPreimage and then \n// builds it up. We should build it up and then pass the leaf preimage as a parameter.\n// We can then choose to have a general method that takes in anything hashable\n// and deduplicate the logic in `contract_tree_root_from_siblings`\npub fn function_tree_root_from_siblings(\n selector: FunctionSelector,\n is_internal: bool,\n is_private: bool,\n vk_hash: Field,\n acir_hash: Field,\n function_leaf_index: Field,\n function_leaf_sibling_path: [Field; FUNCTION_TREE_HEIGHT]\n) -> Field {\n let function_leaf_preimage = FunctionLeafPreimage { selector, is_internal, is_private, vk_hash, acir_hash };\n\n let function_leaf = function_leaf_preimage.hash();\n\n let function_tree_root = root_from_sibling_path(function_leaf, function_leaf_index, function_leaf_sibling_path);\n\n function_tree_root\n}\n\n// Calculate the contract tree root from the sibling path and leaf preimage.\npub fn contract_tree_root_from_siblings(\n function_tree_root: Field,\n storage_contract_address: AztecAddress,\n portal_contract_address: EthAddress,\n contract_leaf_index: Field,\n contract_leaf_sibling_path: [Field; CONTRACT_TREE_HEIGHT]\n) -> Field {\n //TODO(Kev): if we use shorthand syntax here, we get an error as expected,\n // since variable name is `storage_contract_address` but the span is incorrect.\n let contract_leaf_preimage = ContractLeafPreimage { contract_address: storage_contract_address, portal_contract_address, function_tree_root };\n\n let contract_leaf = contract_leaf_preimage.hash();\n\n let computed_contract_tree_root = root_from_sibling_path(contract_leaf, contract_leaf_index, contract_leaf_sibling_path);\n\n computed_contract_tree_root\n}\n\npub fn read_request_root_from_siblings(\n read_request: Field,\n leaf_index: Field,\n sibling_path: [Field; NOTE_HASH_TREE_HEIGHT]\n) -> Field {\n root_from_sibling_path(read_request, leaf_index, sibling_path)\n}\n\npub fn silo_note_hash(address: AztecAddress, inner_commitment: Field) -> Field {\n pedersen_hash(\n [\n address.to_field(),\n inner_commitment\n ],\n GENERATOR_INDEX__SILOED_NOTE_HASH\n )\n}\n\npub fn silo_nullifier(address: AztecAddress, nullifier: Field) -> Field {\n pedersen_hash(\n [\n address.to_field(),\n nullifier\n ],\n GENERATOR_INDEX__OUTER_NULLIFIER\n )\n}\n\nfn merkle_hash(left: Field, right: Field) -> Field {\n pedersen_hash([left, right], 0)\n}\n\npub fn stdlib_recursion_verification_key_compress_native_vk(_vk: VerificationKey) -> Field {\n // Original cpp code\n // stdlib::recursion::verification_key::compress_native(private_call.vk, GeneratorIndex::VK);\n // The above cpp method is only ever called on verification key, so it has been special cased here\n let _hash_index = GENERATOR_INDEX__VK;\n 0\n}\n\n// TODO CPP uses blake2s for this\npub fn compute_new_contract_address_hash(new_contract_address: AztecAddress) -> Field {\n dep::std::hash::pedersen_hash([new_contract_address.to_field()])\n}\n\npub fn compute_l2_to_l1_hash(\n contract_address: AztecAddress,\n rollup_version_id: Field,\n portal_contract_address: EthAddress,\n chain_id: Field,\n content: Field\n) -> Field {\n let mut bytes: BoundedVec = BoundedVec::new(0);\n\n let inputs = [\n contract_address.to_field(), rollup_version_id, portal_contract_address.to_field(), chain_id, content\n ];\n for i in 0..inputs.len() {\n // TODO are bytes be in fr.to_buffer() ?\n let item_bytes = inputs[i].to_be_bytes(32);\n for j in 0..32 {\n bytes.push(item_bytes[j]);\n }\n }\n\n sha256_to_field(bytes.storage)\n}\n\npub fn compute_constructor_hash(\n function_data: FunctionData,\n args_hash: Field,\n constructor_vk_hash: Field\n) -> Field {\n let function_data_hash = function_data.hash();\n\n pedersen_hash(\n [\n function_data_hash,\n args_hash,\n constructor_vk_hash\n ],\n GENERATOR_INDEX__CONSTRUCTOR\n )\n}\n\n// Computes sha256 hash of 2 input hashes stored in 4 fields.\n// \n// This method is bn254 specific. Two fields is needed in order to \n// encode the sha256 output. It can be abstracted away with any 4-2 hash function.\n//\n// TODO(Jan and David): This is used for the encrypted_log hashes.\n// Can we check to see if we can just use hash_to_field or pedersen_compress here?\n//\n// Returning a Field would be desirable because then this can be replaced with \n// poseidon without changing the rest of the code\n//\npub fn accumulate_sha256(input: [U128; 4]) -> [Field; NUM_FIELDS_PER_SHA256] {\n // This is a note about the cpp code, since it takes an array of Fields\n // instead of a U128.\n // 4 Field elements when converted to bytes will usually \n // occupy 4 * 32 = 128 bytes.\n // However, this function is making the assumption that each Field \n // only occupies 128 bits.\n //\n // TODO(David): This does not seem to be getting guaranteed anywhere in the code?\n //\n // Concatenate 4 u128 bit integers into a byte array.\n let mut hash_input_flattened = [0; 64];\n for offset in 0..4 {\n let input_as_bytes = input[offset].to_be_bytes();\n for byte_index in 0..16 {\n hash_input_flattened[offset * 16 + byte_index] = input_as_bytes[byte_index];\n }\n }\n\n let sha_digest = dep::std::hash::sha256(hash_input_flattened);\n\n U256::from_bytes32(sha_digest).to_u128_limbs()\n}\n\npub fn compute_logs_hash(\n previous_log_hash: [Field; 2],\n current_log_hash: [Field; 2]\n) -> [Field; NUM_FIELDS_PER_SHA256] {\n accumulate_sha256(\n [\n U128::from_integer(previous_log_hash[0]),\n U128::from_integer(previous_log_hash[1]),\n U128::from_integer(current_log_hash[0]),\n U128::from_integer(current_log_hash[1])\n ]\n )\n}\n\npub fn compute_note_hash_nonce(first_nullifier: Field, commitment_index: Field) -> Field {\n pedersen_hash(\n [\n first_nullifier,\n commitment_index\n ],\n GENERATOR_INDEX__NOTE_HASH_NONCE\n )\n}\n\npub fn compute_unique_siloed_note_hash(nonce: Field, siloed_note_hash: Field) -> Field {\n pedersen_hash(\n [\n nonce,\n siloed_note_hash\n ],\n GENERATOR_INDEX__UNIQUE_NOTE_HASH\n )\n}\n\npub fn compute_unique_siloed_note_hashes(\n first_nullifier: Field,\n siloed_note_hashes: [SideEffect; N]\n) -> [SideEffect; N] {\n let mut unique_siloed_note_hashes = [SideEffect::empty(); N];\n for i in 0..N {\n let siloed_note_hash = siloed_note_hashes[i];\n if siloed_note_hash.value != 0 {\n let nonce = compute_note_hash_nonce(first_nullifier, i);\n unique_siloed_note_hashes[i] = SideEffect {\n value: compute_unique_siloed_note_hash(nonce, siloed_note_hash.value),\n counter: siloed_note_hash.counter\n };\n }\n }\n unique_siloed_note_hashes\n}\n\npub fn pedersen_hash(inputs: [Field; N], hash_index: u32) -> Field {\n dep::std::hash::pedersen_hash_with_separator(inputs, hash_index)\n}\n", + "source": "use crate::address::{AztecAddress, EthAddress};\nuse crate::mocked::VerificationKey;\nuse crate::abis::function_selector::FunctionSelector;\nuse crate::abis::function_leaf_preimage::FunctionLeafPreimage;\nuse crate::abis::new_contract_data::NewContractData as ContractLeafPreimage;\nuse crate::abis::function_data::FunctionData;\nuse crate::abis::side_effect::{SideEffect};\nuse crate::utils::uint256::U256;\nuse crate::utils::bounded_vec::BoundedVec;\nuse crate::constants::{\n ARGS_HASH_CHUNK_COUNT,\n ARGS_HASH_CHUNK_LENGTH,\n CONTRACT_TREE_HEIGHT, \n FUNCTION_TREE_HEIGHT, \n NOTE_HASH_TREE_HEIGHT,\n NUM_FIELDS_PER_SHA256,\n GENERATOR_INDEX__SILOED_NOTE_HASH,\n GENERATOR_INDEX__OUTER_NULLIFIER,\n GENERATOR_INDEX__VK,\n GENERATOR_INDEX__CONSTRUCTOR,\n GENERATOR_INDEX__PARTIAL_ADDRESS,\n GENERATOR_INDEX__CONTRACT_ADDRESS,\n GENERATOR_INDEX__NOTE_HASH_NONCE,\n GENERATOR_INDEX__UNIQUE_NOTE_HASH,\n GENERATOR_INDEX__FUNCTION_ARGS,\n};\n\nuse dep::std::hash::{pedersen_hash_with_separator, sha256};\n\npub fn sha256_to_field(bytes_to_hash: [u8; N]) -> Field {\n let sha256_hashed = sha256(bytes_to_hash);\n\n // Convert it to a field element\n let mut v = 1;\n let mut high = 0 as Field;\n let mut low = 0 as Field;\n\n for i in 0..16 {\n high = high + (sha256_hashed[15 - i] as Field) * v;\n low = low + (sha256_hashed[16 + 15 - i] as Field) * v;\n v = v * 256;\n }\n\n // Abuse that a % p + b % p = (a + b) % p and that low < p\n let hash_in_a_field = low + high * v;\n\n hash_in_a_field\n}\n\npub fn hash_args(args: [Field; N]) -> Field {\n if args.len() == 0 {\n 0\n } else {\n let mut chunks_hashes = [0; ARGS_HASH_CHUNK_COUNT];\n for i in 0..ARGS_HASH_CHUNK_COUNT {\n let mut chunk_hash = 0;\n let start_chunk_index = i * ARGS_HASH_CHUNK_LENGTH;\n if start_chunk_index < (args.len() as u32) {\n let mut chunk_args = [0; ARGS_HASH_CHUNK_LENGTH];\n for j in 0..ARGS_HASH_CHUNK_LENGTH {\n let item_index = i * ARGS_HASH_CHUNK_LENGTH + j;\n if item_index < (args.len() as u32) {\n chunk_args[j] = args[item_index];\n }\n }\n chunk_hash = pedersen_hash(chunk_args, GENERATOR_INDEX__FUNCTION_ARGS);\n }\n chunks_hashes[i] = chunk_hash;\n }\n pedersen_hash(chunks_hashes, GENERATOR_INDEX__FUNCTION_ARGS)\n }\n}\n\n// Checks that `value` is a member of a merkle tree with root `root` at position `index`\n// The witness being the `sibling_path`\npub fn assert_check_membership(value: Field, index: Field, sibling_path: [Field; N], root: Field) {\n let calculated_root = root_from_sibling_path(value, index, sibling_path);\n assert(calculated_root == root, \"membership check failed\");\n}\n\n// Calculate the Merkle tree root from the sibling path and leaf.\n//\n// The leaf is hashed with its sibling, and then the result is hashed\n// with the next sibling etc in the path. The last hash is the root.\n//\n// TODO(David/Someone): The cpp code is using a uint256, whereas its\n// TODO a bit simpler in Noir to just have a bit array.\n// TODO: I'd generally like to avoid u256 for algorithms like \n// this because it means we never even need to consider cases where \n// the index is greater than p.\npub fn root_from_sibling_path(leaf: Field, leaf_index: Field, sibling_path: [Field; N]) -> Field {\n let mut node = leaf;\n let indices = leaf_index.to_le_bits(N);\n\n for i in 0..N {\n let (hash_left, hash_right) = if indices[i] == 1 {\n (sibling_path[i], node)\n } else {\n (node, sibling_path[i])\n };\n node = merkle_hash(hash_left, hash_right);\n }\n node\n}\n\n// Calculate the function tree root from the sibling path and leaf preimage.\n//\n// TODO: The cpp code passes in components of the FunctionLeafPreimage and then \n// builds it up. We should build it up and then pass the leaf preimage as a parameter.\n// We can then choose to have a general method that takes in anything hashable\n// and deduplicate the logic in `contract_tree_root_from_siblings`\npub fn function_tree_root_from_siblings(\n selector: FunctionSelector,\n is_internal: bool,\n is_private: bool,\n vk_hash: Field,\n acir_hash: Field,\n function_leaf_index: Field,\n function_leaf_sibling_path: [Field; FUNCTION_TREE_HEIGHT]\n) -> Field {\n let function_leaf_preimage = FunctionLeafPreimage { selector, is_internal, is_private, vk_hash, acir_hash };\n\n let function_leaf = function_leaf_preimage.hash();\n\n let function_tree_root = root_from_sibling_path(function_leaf, function_leaf_index, function_leaf_sibling_path);\n\n function_tree_root\n}\n\n// Calculate the contract tree root from the sibling path and leaf preimage.\npub fn contract_tree_root_from_siblings(\n function_tree_root: Field,\n storage_contract_address: AztecAddress,\n portal_contract_address: EthAddress,\n contract_leaf_index: Field,\n contract_leaf_sibling_path: [Field; CONTRACT_TREE_HEIGHT]\n) -> Field {\n //TODO(Kev): if we use shorthand syntax here, we get an error as expected,\n // since variable name is `storage_contract_address` but the span is incorrect.\n let contract_leaf_preimage = ContractLeafPreimage { contract_address: storage_contract_address, portal_contract_address, function_tree_root };\n\n let contract_leaf = contract_leaf_preimage.hash();\n\n let computed_contract_tree_root = root_from_sibling_path(contract_leaf, contract_leaf_index, contract_leaf_sibling_path);\n\n computed_contract_tree_root\n}\n\npub fn read_request_root_from_siblings(\n read_request: Field,\n leaf_index: Field,\n sibling_path: [Field; NOTE_HASH_TREE_HEIGHT]\n) -> Field {\n root_from_sibling_path(read_request, leaf_index, sibling_path)\n}\n\npub fn silo_note_hash(address: AztecAddress, inner_commitment: Field) -> Field {\n pedersen_hash(\n [\n address.to_field(),\n inner_commitment\n ],\n GENERATOR_INDEX__SILOED_NOTE_HASH\n )\n}\n\npub fn silo_nullifier(address: AztecAddress, nullifier: Field) -> Field {\n pedersen_hash(\n [\n address.to_field(),\n nullifier\n ],\n GENERATOR_INDEX__OUTER_NULLIFIER\n )\n}\n\nfn merkle_hash(left: Field, right: Field) -> Field {\n pedersen_hash([left, right], 0)\n}\n\npub fn stdlib_recursion_verification_key_compress_native_vk(_vk: VerificationKey) -> Field {\n // Original cpp code\n // stdlib::recursion::verification_key::compress_native(private_call.vk, GeneratorIndex::VK);\n // The above cpp method is only ever called on verification key, so it has been special cased here\n let _hash_index = GENERATOR_INDEX__VK;\n 0\n}\n\n// TODO CPP uses blake2s for this\npub fn compute_new_contract_address_hash(new_contract_address: AztecAddress) -> Field {\n dep::std::hash::pedersen_hash([new_contract_address.to_field()])\n}\n\npub fn compute_l2_to_l1_hash(\n contract_address: AztecAddress,\n rollup_version_id: Field,\n portal_contract_address: EthAddress,\n chain_id: Field,\n content: Field\n) -> Field {\n let mut bytes: BoundedVec = BoundedVec::new();\n unique_siloed_note_hashes[i] = SideEffect {\n value: compute_unique_siloed_note_hash(nonce, siloed_note_hash.value),\n counter: siloed_note_hash.counter\n };\n }\n }\n unique_siloed_note_hashes\n}\n\npub fn pedersen_hash(inputs: [Field; N], hash_index: u32) -> Field {\n dep::std::hash::pedersen_hash_with_separator(inputs, hash_index)\n}\n", "path": "/home/santiago/Projects/aztec3-packages/noir-protocol-circuits/src/crates/types/src/hash.nr" }, "126": {