From e2150a7e5fc84932b65af07025514fc3c57f1cbc Mon Sep 17 00:00:00 2001 From: Facundo Date: Wed, 4 Sep 2024 11:49:51 +0100 Subject: [PATCH 01/18] chore(avm): remove some unused deps (#8366) --- .../src/barretenberg/vm/avm/generated/circuit_builder.hpp | 2 -- .../cpp/src/barretenberg/vm/avm/generated/composer.cpp | 5 ----- .../cpp/src/barretenberg/vm/avm/generated/composer.hpp | 1 - bb-pilcom/bb-pil-backend/templates/circuit_builder.hpp.hbs | 4 +--- bb-pilcom/bb-pil-backend/templates/composer.cpp.hbs | 5 ----- bb-pilcom/bb-pil-backend/templates/composer.hpp.hbs | 1 - 6 files changed, 1 insertion(+), 17 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/circuit_builder.hpp index 3e196511ba2..1c3eff83f94 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/circuit_builder.hpp @@ -3,8 +3,6 @@ #include -#include "barretenberg/stdlib_circuit_builders/circuit_builder_base.hpp" - #include "barretenberg/vm/avm/generated/flavor.hpp" #include "barretenberg/vm/avm/generated/full_row.hpp" diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/composer.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/composer.cpp index 613a96aa9f7..495f24e07d9 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/composer.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/composer.cpp @@ -1,11 +1,6 @@ // AUTOGENERATED FILE #include "barretenberg/vm/avm/generated/composer.hpp" -#include "barretenberg/plonk_honk_shared/composer/composer_lib.hpp" -#include "barretenberg/plonk_honk_shared/composer/permutation_lib.hpp" -#include "barretenberg/vm/avm/generated/circuit_builder.hpp" -#include "barretenberg/vm/avm/generated/verifier.hpp" - namespace bb { using Flavor = AvmFlavor; diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/composer.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/composer.hpp index 590921b908c..a205c890ecf 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/composer.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/composer.hpp @@ -1,7 +1,6 @@ // AUTOGENERATED FILE #pragma once -#include "barretenberg/plonk_honk_shared/composer/composer_lib.hpp" #include "barretenberg/srs/global_crs.hpp" #include "barretenberg/vm/avm/generated/circuit_builder.hpp" #include "barretenberg/vm/avm/generated/prover.hpp" diff --git a/bb-pilcom/bb-pil-backend/templates/circuit_builder.hpp.hbs b/bb-pilcom/bb-pil-backend/templates/circuit_builder.hpp.hbs index 4be6ec2eccd..8b034e983b7 100644 --- a/bb-pilcom/bb-pil-backend/templates/circuit_builder.hpp.hbs +++ b/bb-pilcom/bb-pil-backend/templates/circuit_builder.hpp.hbs @@ -3,8 +3,6 @@ #include -#include "barretenberg/stdlib_circuit_builders/circuit_builder_base.hpp" - #include "barretenberg/vm/{{snakeCase name}}/generated/full_row.hpp" #include "barretenberg/vm/{{snakeCase name}}/generated/flavor.hpp" @@ -27,7 +25,7 @@ class {{name}}CircuitBuilder { ProverPolynomials compute_polynomials() const; bool check_circuit() const; - + size_t get_num_gates() const { return rows.size(); } size_t get_circuit_subgroup_size() const diff --git a/bb-pilcom/bb-pil-backend/templates/composer.cpp.hbs b/bb-pilcom/bb-pil-backend/templates/composer.cpp.hbs index 707d4a0bfd1..76307d0ec27 100644 --- a/bb-pilcom/bb-pil-backend/templates/composer.cpp.hbs +++ b/bb-pilcom/bb-pil-backend/templates/composer.cpp.hbs @@ -1,11 +1,6 @@ // AUTOGENERATED FILE #include "barretenberg/vm/{{snakeCase name}}/generated/composer.hpp" -#include "barretenberg/plonk_honk_shared/composer/composer_lib.hpp" -#include "barretenberg/plonk_honk_shared/composer/permutation_lib.hpp" -#include "barretenberg/vm/{{snakeCase name}}/generated/circuit_builder.hpp" -#include "barretenberg/vm/{{snakeCase name}}/generated/verifier.hpp" - namespace bb { using Flavor = {{name}}Flavor; diff --git a/bb-pilcom/bb-pil-backend/templates/composer.hpp.hbs b/bb-pilcom/bb-pil-backend/templates/composer.hpp.hbs index af7acae3728..c91ddbe8fe3 100644 --- a/bb-pilcom/bb-pil-backend/templates/composer.hpp.hbs +++ b/bb-pilcom/bb-pil-backend/templates/composer.hpp.hbs @@ -1,7 +1,6 @@ // AUTOGENERATED FILE #pragma once -#include "barretenberg/plonk_honk_shared/composer/composer_lib.hpp" #include "barretenberg/srs/global_crs.hpp" #include "barretenberg/vm/{{snakeCase name}}/generated/circuit_builder.hpp" #include "barretenberg/vm/{{snakeCase name}}/generated/prover.hpp" From 0330ced124d5455cc584694255a3ceed9c35b69f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Wed, 4 Sep 2024 13:47:59 +0200 Subject: [PATCH 02/18] feat: Liveness analysis for constants (#8294) Bytecode size change:
Changes in bytecode size ``` contracts: Transpiling AppSubscription::assert_block_number with size 40 => 40 contracts: Transpiling AppSubscription::assert_not_expired with size 37 => 37 contracts: Transpiling AppSubscription::constructor with size 1311 => 1209 contracts: Transpiling Auth::constructor with size 812 => 753 contracts: Transpiling Auth::get_authorized with size 143 => 143 contracts: Transpiling Auth::get_authorized_delay with size 191 => 184 contracts: Transpiling Auth::get_scheduled_authorized with size 115 => 116 contracts: Transpiling Auth::set_authorized with size 1093 => 1040 contracts: Transpiling Auth::set_authorized_delay with size 1047 => 994 contracts: Transpiling AuthRegistry::_set_authorized with size 78 => 78 contracts: Transpiling AuthRegistry::consume with size 731 => 683 contracts: Transpiling AuthRegistry::is_consumable with size 131 => 130 contracts: Transpiling AuthRegistry::is_reject_all with size 116 => 115 contracts: Transpiling AuthRegistry::set_authorized with size 73 => 73 contracts: Transpiling AuthRegistry::set_reject_all with size 58 => 58 contracts: Transpiling AuthWitTest::consume_public with size 1038 => 989 contracts: Transpiling AvmInitializerTest::constructor with size 806 => 749 contracts: Transpiling AvmInitializerTest::read_storage_immutable with size 100 => 99 contracts: Transpiling AvmTest::add_args_return with size 15 => 15 contracts: Transpiling AvmTest::add_storage_map with size 182 => 176 contracts: Transpiling AvmTest::add_u128 with size 32 => 32 contracts: Transpiling AvmTest::assert_nullifier_exists with size 16 => 16 contracts: Transpiling AvmTest::assert_same with size 18 => 18 contracts: Transpiling AvmTest::assert_timestamp with size 15 => 15 contracts: Transpiling AvmTest::assertion_failure with size 14 => 14 contracts: Transpiling AvmTest::check_selector with size 966 => 924 contracts: Transpiling AvmTest::create_different_nullifier_in_nested_call with size 135 => 126 contracts: Transpiling AvmTest::create_same_nullifier_in_nested_call with size 133 => 124 contracts: Transpiling AvmTest::debug_logging with size 205 => 236 contracts: Transpiling AvmTest::elliptic_curve_add_and_double with size 33 => 33 contracts: Transpiling AvmTest::emit_nullifier_and_check with size 17 => 17 contracts: Transpiling AvmTest::emit_unencrypted_log with size 502 => 508 contracts: Transpiling AvmTest::get_address with size 13 => 13 contracts: Transpiling AvmTest::get_args_hash with size 18 => 18 contracts: Transpiling AvmTest::get_block_number with size 13 => 13 contracts: Transpiling AvmTest::get_chain_id with size 13 => 13 contracts: Transpiling AvmTest::get_da_gas_left with size 13 => 13 contracts: Transpiling AvmTest::get_fee_per_da_gas with size 13 => 13 contracts: Transpiling AvmTest::get_fee_per_l2_gas with size 13 => 13 contracts: Transpiling AvmTest::get_function_selector with size 13 => 13 contracts: Transpiling AvmTest::get_l2_gas_left with size 13 => 13 contracts: Transpiling AvmTest::get_sender with size 13 => 13 contracts: Transpiling AvmTest::get_storage_address with size 13 => 13 contracts: Transpiling AvmTest::get_timestamp with size 13 => 13 contracts: Transpiling AvmTest::get_transaction_fee with size 13 => 13 contracts: Transpiling AvmTest::get_version with size 13 => 13 contracts: Transpiling AvmTest::keccak_f1600 with size 61 => 61 contracts: Transpiling AvmTest::keccak_hash with size 840 => 796 contracts: Transpiling AvmTest::l1_to_l2_msg_exists with size 17 => 17 contracts: Transpiling AvmTest::modulo2 with size 16 => 16 contracts: Transpiling AvmTest::nested_call_to_add with size 220 => 206 contracts: Transpiling AvmTest::nested_call_to_add_with_gas with size 213 => 206 contracts: Transpiling AvmTest::nested_static_call_to_add with size 220 => 206 contracts: Transpiling AvmTest::nested_static_call_to_set_storage with size 132 => 123 contracts: Transpiling AvmTest::new_note_hash with size 11 => 11 contracts: Transpiling AvmTest::new_nullifier with size 11 => 11 contracts: Transpiling AvmTest::note_hash_exists with size 17 => 17 contracts: Transpiling AvmTest::nullifier_collision with size 12 => 12 contracts: Transpiling AvmTest::nullifier_exists with size 17 => 17 contracts: Transpiling AvmTest::pedersen_commit with size 81 => 78 contracts: Transpiling AvmTest::pedersen_hash with size 19 => 19 contracts: Transpiling AvmTest::pedersen_hash_with_index with size 19 => 19 contracts: Transpiling AvmTest::poseidon2_hash with size 444 => 432 contracts: Transpiling AvmTest::read_storage_list with size 124 => 123 contracts: Transpiling AvmTest::read_storage_map with size 112 => 111 contracts: Transpiling AvmTest::read_storage_single with size 93 => 92 contracts: Transpiling AvmTest::send_l2_to_l1_msg with size 12 => 12 contracts: Transpiling AvmTest::set_opcode_big_field with size 19 => 19 contracts: Transpiling AvmTest::set_opcode_small_field with size 13 => 13 contracts: Transpiling AvmTest::set_opcode_u32 with size 13 => 13 contracts: Transpiling AvmTest::set_opcode_u64 with size 13 => 13 contracts: Transpiling AvmTest::set_opcode_u8 with size 13 => 13 contracts: Transpiling AvmTest::set_read_storage_single with size 132 => 127 contracts: Transpiling AvmTest::set_storage_list with size 43 => 43 contracts: Transpiling AvmTest::set_storage_map with size 72 => 71 contracts: Transpiling AvmTest::set_storage_single with size 40 => 39 contracts: Transpiling AvmTest::sha256_hash with size 46 => 46 contracts: Transpiling AvmTest::test_get_contract_instance with size 215 => 208 contracts: Transpiling AvmTest::test_get_contract_instance_raw with size 82 => 82 contracts: Transpiling AvmTest::to_radix_le with size 136 => 137 contracts: Transpiling AvmTest::u128_addition_overflow with size 355 => 270 contracts: Transpiling AvmTest::u128_from_integer_overflow with size 140 => 139 contracts: Transpiling AvmTest::variable_base_msm with size 92 => 90 contracts: Transpiling Benchmarking::broadcast with size 114 => 112 contracts: Transpiling Benchmarking::increment_balance with size 282 => 266 contracts: Transpiling CardGame::on_card_played with size 1162 => 1122 contracts: Transpiling CardGame::on_cards_claimed with size 961 => 917 contracts: Transpiling CardGame::on_game_joined with size 893 => 874 contracts: Transpiling CardGame::start_game with size 1396 => 1353 contracts: Transpiling Child::pub_get_value with size 22 => 22 contracts: Transpiling Child::pub_inc_value with size 140 => 135 contracts: Transpiling Child::pub_inc_value_internal with size 145 => 140 contracts: Transpiling Child::pub_set_value with size 51 => 49 contracts: Transpiling Child::set_value_twice_with_nested_first with size 178 => 166 contracts: Transpiling Child::set_value_twice_with_nested_last with size 178 => 166 contracts: Transpiling Child::set_value_with_two_nested_calls with size 120 => 98 contracts: Transpiling Claim::constructor with size 933 => 864 contracts: Transpiling Crowdfunding::_check_deadline with size 123 => 121 contracts: Transpiling Crowdfunding::_publish_donation_receipts with size 1106 => 1066 contracts: Transpiling Crowdfunding::init with size 1061 => 981 contracts: Transpiling DelegatedOn::public_set_value with size 43 => 42 contracts: Transpiling Delegator::public_delegate_set_value with size 97 => 95 contracts: Transpiling DocsExample::get_shared_immutable_constrained_public with size 112 => 111 contracts: Transpiling DocsExample::get_shared_immutable_constrained_public_indirect with size 79 => 72 contracts: Transpiling DocsExample::get_shared_immutable_constrained_public_multiple with size 143 => 142 contracts: Transpiling DocsExample::initialize_public_immutable with size 167 => 161 contracts: Transpiling DocsExample::initialize_shared_immutable with size 167 => 161 contracts: Transpiling DocsExample::spend_public_authwit with size 14 => 14 contracts: Transpiling DocsExample::update_leader with size 50 => 50 contracts: Transpiling EasyPrivateVoting::add_to_tally_public with size 239 => 231 contracts: Transpiling EasyPrivateVoting::constructor with size 870 => 806 contracts: Transpiling EasyPrivateVoting::end_vote with size 138 => 135 contracts: Transpiling FeeJuice::_increase_public_balance with size 197 => 192 contracts: Transpiling FeeJuice::balance_of_public with size 125 => 124 contracts: Transpiling FeeJuice::check_balance with size 187 => 177 contracts: Transpiling FeeJuice::claim_public with size 1811 => 1699 contracts: Transpiling FeeJuice::set_portal with size 238 => 226 contracts: Transpiling FPC::constructor with size 807 => 749 contracts: Transpiling FPC::pay_refund with size 452 => 427 contracts: Transpiling FPC::pay_refund_with_shielded_rebate with size 452 => 427 contracts: Transpiling FPC::prepare_fee with size 360 => 338 contracts: Transpiling ImportTest::pub_call_public_fn with size 132 => 123 contracts: Transpiling InclusionProofs::constructor with size 712 => 661 contracts: Transpiling InclusionProofs::push_nullifier_public with size 18 => 18 contracts: Transpiling InclusionProofs::test_nullifier_inclusion_from_public with size 22 => 22 contracts: Transpiling KeyRegistry::register_initial_keys with size 1660 => 1533 contracts: Transpiling KeyRegistry::rotate_npk_m with size 2361 => 2211 contracts: Transpiling Lending::_borrow with size 2246 => 2307 contracts: Transpiling Lending::_deposit with size 249 => 236 contracts: Transpiling Lending::_repay with size 1331 => 1365 contracts: Transpiling Lending::_withdraw with size 2340 => 2360 contracts: Transpiling Lending::borrow_public with size 283 => 265 contracts: Transpiling Lending::deposit_public with size 616 => 567 contracts: Transpiling Lending::get_asset with size 183 => 182 contracts: Transpiling Lending::get_assets with size 195 => 191 contracts: Transpiling Lending::get_position with size 909 => 883 contracts: Transpiling Lending::init with size 320 => 300 contracts: Transpiling Lending::repay_public with size 544 => 499 contracts: Transpiling Lending::update_accumulator with size 2210 => 2409 contracts: Transpiling Lending::withdraw_public with size 283 => 265 contracts: Transpiling Parent::pub_entry_point with size 67 => 60 contracts: Transpiling Parent::pub_entry_point_twice with size 114 => 93 contracts: Transpiling Parent::public_nested_static_call with size 1081 => 1037 contracts: Transpiling Parent::public_static_call with size 91 => 84 contracts: Transpiling PriceFeed::get_price with size 125 => 124 contracts: Transpiling PriceFeed::set_price with size 68 => 69 contracts: Transpiling PrivateFPC::constructor with size 810 => 752 contracts: Transpiling StatefulTest::get_public_value with size 113 => 112 contracts: Transpiling StatefulTest::increment_public_value with size 143 => 140 contracts: Transpiling StatefulTest::increment_public_value_no_init_check with size 136 => 133 contracts: Transpiling StatefulTest::public_constructor with size 877 => 814 contracts: Transpiling StaticChild::pub_get_value with size 26 => 26 contracts: Transpiling StaticChild::pub_illegal_inc_value with size 144 => 139 contracts: Transpiling StaticChild::pub_inc_value with size 140 => 135 contracts: Transpiling StaticChild::pub_set_value with size 51 => 49 contracts: Transpiling StaticParent::public_call with size 67 => 60 contracts: Transpiling StaticParent::public_get_value_from_child with size 148 => 138 contracts: Transpiling StaticParent::public_nested_static_call with size 303 => 285 contracts: Transpiling StaticParent::public_static_call with size 91 => 84 contracts: Transpiling Test::assert_public_global_vars with size 40 => 40 contracts: Transpiling Test::consume_message_from_arbitrary_sender_public with size 1312 => 1230 contracts: Transpiling Test::consume_mint_public_message with size 1582 => 1483 contracts: Transpiling Test::create_l2_to_l1_message_arbitrary_recipient_public with size 12 => 12 contracts: Transpiling Test::create_l2_to_l1_message_public with size 24 => 24 contracts: Transpiling Test::dummy_public_call with size 14 => 14 contracts: Transpiling Test::emit_nullifier_public with size 11 => 11 contracts: Transpiling Test::emit_unencrypted with size 210 => 217 contracts: Transpiling Test::is_time_equal with size 18 => 18 contracts: Transpiling TestLog::emit_unencrypted_events with size 2147 => 2029 contracts: Transpiling Token::_increase_public_balance with size 196 => 191 contracts: Transpiling Token::_reduce_total_supply with size 174 => 170 contracts: Transpiling Token::admin with size 104 => 103 contracts: Transpiling Token::assert_minter_and_mint with size 257 => 250 contracts: Transpiling Token::balance_of_public with size 132 => 131 contracts: Transpiling Token::burn_public with size 1786 => 1679 contracts: Transpiling Token::complete_refund with size 519 => 478 contracts: Transpiling Token::constructor with size 1310 => 1210 contracts: Transpiling Token::is_minter with size 123 => 122 contracts: Transpiling Token::mint_private with size 579 => 556 contracts: Transpiling Token::mint_public with size 400 => 381 contracts: Transpiling Token::public_get_decimals with size 107 => 106 contracts: Transpiling Token::public_get_name with size 104 => 103 contracts: Transpiling Token::public_get_symbol with size 104 => 103 contracts: Transpiling Token::set_admin with size 139 => 135 contracts: Transpiling Token::set_minter with size 157 => 154 contracts: Transpiling Token::shield with size 1998 => 1880 contracts: Transpiling Token::total_supply with size 116 => 115 contracts: Transpiling Token::transfer_public with size 1808 => 1698 contracts: Transpiling TokenBlacklist::_increase_public_balance with size 196 => 191 contracts: Transpiling TokenBlacklist::_reduce_total_supply with size 174 => 170 contracts: Transpiling TokenBlacklist::balance_of_public with size 132 => 131 contracts: Transpiling TokenBlacklist::burn_public with size 1935 => 1820 contracts: Transpiling TokenBlacklist::constructor with size 1791 => 1674 contracts: Transpiling TokenBlacklist::get_roles with size 193 => 192 contracts: Transpiling TokenBlacklist::mint_private with size 644 => 619 contracts: Transpiling TokenBlacklist::mint_public with size 612 => 581 contracts: Transpiling TokenBlacklist::shield with size 2147 => 2021 contracts: Transpiling TokenBlacklist::total_supply with size 116 => 115 contracts: Transpiling TokenBlacklist::transfer_public with size 2112 => 1983 contracts: Transpiling TokenBlacklist::update_roles with size 1347 => 1266 contracts: Transpiling TokenBridge::_assert_token_is_same with size 106 => 105 contracts: Transpiling TokenBridge::_call_mint_on_token with size 295 => 279 contracts: Transpiling TokenBridge::claim_public with size 1913 => 1790 contracts: Transpiling TokenBridge::constructor with size 838 => 777 contracts: Transpiling TokenBridge::exit_to_l1_public with size 900 => 876 contracts: Transpiling TokenBridge::get_portal_address_public with size 111 => 110 contracts: Transpiling TokenBridge::get_token with size 104 => 103 contracts: Transpiling Uniswap::_approve_bridge_and_exit_input_asset_to_L1 with size 4804 => 4439 contracts: Transpiling Uniswap::_assert_token_is_same with size 78 => 71 contracts: Transpiling Uniswap::constructor with size 807 => 749 contracts: Transpiling Uniswap::swap_public with size 3294 => 3092 ```
--------- Co-authored-by: Maxim Vezenov --- .../src/brillig/brillig_gen.rs | 1 + .../src/brillig/brillig_gen/brillig_block.rs | 72 ++++++--- .../brillig_gen/brillig_block_variables.rs | 62 +++----- .../src/brillig/brillig_gen/brillig_fn.rs | 10 +- .../brillig_gen/constant_allocation.rs | 142 ++++++++++++++++++ .../brillig/brillig_gen/variable_liveness.rs | 86 +++++++---- .../noirc_evaluator/src/ssa/ir/dom.rs | 2 +- 7 files changed, 271 insertions(+), 104 deletions(-) create mode 100644 noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen.rs index b256c2b85ab..628ec9657f2 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen.rs @@ -4,6 +4,7 @@ pub(crate) mod brillig_block_variables; pub(crate) mod brillig_directive; pub(crate) mod brillig_fn; pub(crate) mod brillig_slice_ops; +mod constant_allocation; mod variable_liveness; use acvm::FieldElement; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 26abafe177f..55794c2b7dd 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -29,6 +29,7 @@ use std::sync::Arc; use super::brillig_black_box::convert_black_box_call; use super::brillig_block_variables::BlockVariables; use super::brillig_fn::FunctionContext; +use super::constant_allocation::InstructionLocation; /// Generate the compilation artifacts for compiling a function into brillig bytecode. pub(crate) struct BrilligBlock<'block> { @@ -117,6 +118,13 @@ impl<'block> BrilligBlock<'block> { terminator_instruction: &TerminatorInstruction, dfg: &DataFlowGraph, ) { + self.initialize_constants( + &self + .function_context + .constant_allocation + .allocated_at_location(self.block_id, InstructionLocation::Terminator), + dfg, + ); match terminator_instruction { TerminatorInstruction::JmpIf { condition, @@ -244,6 +252,13 @@ impl<'block> BrilligBlock<'block> { let instruction = &dfg[instruction_id]; self.brillig_context.set_call_stack(dfg.get_call_stack(instruction_id)); + self.initialize_constants( + &self.function_context.constant_allocation.allocated_at_location( + self.block_id, + InstructionLocation::Instruction(instruction_id), + ), + dfg, + ); match instruction { Instruction::Binary(binary) => { let result_var = self.variables.define_single_addr_variable( @@ -734,9 +749,6 @@ impl<'block> BrilligBlock<'block> { .brillig_context .codegen_pre_call_save_registers_prep_args(&argument_registers, &variables_to_save); - // We don't save and restore constants, so we dump them before a external call since the callee might use the registers where they are allocated. - self.variables.dump_constants(); - // Call instruction, which will interpret above registers 0..num args self.brillig_context.add_external_call_instruction(func_id); @@ -1478,6 +1490,12 @@ impl<'block> BrilligBlock<'block> { } } + fn initialize_constants(&mut self, constants: &[ValueId], dfg: &DataFlowGraph) { + for &constant_id in constants { + self.convert_ssa_value(constant_id, dfg); + } + } + /// Converts an SSA `ValueId` into a `RegisterOrMemory`. Initializes if necessary. fn convert_ssa_value(&mut self, value_id: ValueId, dfg: &DataFlowGraph) -> BrilligVariable { let value_id = dfg.resolve(value_id); @@ -1493,11 +1511,15 @@ impl<'block> BrilligBlock<'block> { Value::NumericConstant { constant, .. } => { // Constants might have been converted previously or not, so we get or create and // (re)initialize the value inside. - if let Some(variable) = self.variables.get_constant(value_id, dfg) { - variable + if self.variables.is_allocated(&value_id) { + self.variables.get_allocation(self.function_context, value_id, dfg) } else { - let new_variable = - self.variables.allocate_constant(self.brillig_context, value_id, dfg); + let new_variable = self.variables.define_variable( + self.function_context, + self.brillig_context, + value_id, + dfg, + ); self.brillig_context .const_instruction(new_variable.extract_single_addr(), *constant); @@ -1505,11 +1527,15 @@ impl<'block> BrilligBlock<'block> { } } Value::Array { array, typ } => { - if let Some(variable) = self.variables.get_constant(value_id, dfg) { - variable + if self.variables.is_allocated(&value_id) { + self.variables.get_allocation(self.function_context, value_id, dfg) } else { - let new_variable = - self.variables.allocate_constant(self.brillig_context, value_id, dfg); + let new_variable = self.variables.define_variable( + self.function_context, + self.brillig_context, + value_id, + dfg, + ); // Initialize the variable let pointer = match new_variable { @@ -1549,8 +1575,12 @@ impl<'block> BrilligBlock<'block> { // around values representing function pointers, even though // there is no interaction with the function possible given that // value. - let new_variable = - self.variables.allocate_constant(self.brillig_context, value_id, dfg); + let new_variable = self.variables.define_variable( + self.function_context, + self.brillig_context, + value_id, + dfg, + ); self.brillig_context.const_instruction( new_variable.extract_single_addr(), @@ -1698,18 +1728,10 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.mov_instruction(write_pointer_register, pointer); for (element_idx, element_id) in data.iter().enumerate() { - if let Some((constant, typ)) = dfg.get_numeric_constant_with_type(*element_id) { - self.brillig_context.indirect_const_instruction( - write_pointer_register, - typ.bit_size(), - constant, - ); - } else { - let element_variable = self.convert_ssa_value(*element_id, dfg); - // Store the item in memory - self.brillig_context - .codegen_store_variable_in_pointer(write_pointer_register, element_variable); - } + let element_variable = self.convert_ssa_value(*element_id, dfg); + // Store the item in memory + self.brillig_context + .codegen_store_variable_in_pointer(write_pointer_register, element_variable); if element_idx != data.len() - 1 { // Increment the write_pointer_register diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs index 63b2073c654..90af2e42211 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs @@ -1,5 +1,5 @@ use acvm::FieldElement; -use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; +use fxhash::FxHashSet as HashSet; use crate::{ brillig::brillig_ir::{ @@ -22,16 +22,15 @@ use super::brillig_fn::FunctionContext; #[derive(Debug, Default)] pub(crate) struct BlockVariables { available_variables: HashSet, - available_constants: HashMap, } impl BlockVariables { /// Creates a BlockVariables instance. It uses the variables that are live in to the block and the global available variables (block parameters) pub(crate) fn new(live_in: HashSet) -> Self { - BlockVariables { available_variables: live_in, ..Default::default() } + BlockVariables { available_variables: live_in } } - /// Returns all non-constant variables that have not been removed at this point. + /// Returns all variables that have not been removed at this point. pub(crate) fn get_available_variables( &self, function_context: &FunctionContext, @@ -48,7 +47,7 @@ impl BlockVariables { .collect() } - /// For a given SSA non constant value id, define the variable and return the corresponding cached allocation. + /// For a given SSA value id, define the variable and return the corresponding cached allocation. pub(crate) fn define_variable( &mut self, function_context: &mut FunctionContext, @@ -97,6 +96,11 @@ impl BlockVariables { }); } + /// Checks if a variable is allocated. + pub(crate) fn is_allocated(&self, value_id: &ValueId) -> bool { + self.available_variables.contains(value_id) + } + /// For a given SSA value id, return the corresponding cached allocation. pub(crate) fn get_allocation( &mut self, @@ -105,48 +109,16 @@ impl BlockVariables { dfg: &DataFlowGraph, ) -> BrilligVariable { let value_id = dfg.resolve(value_id); - if let Some(constant) = self.available_constants.get(&value_id) { - *constant - } else { - assert!( - self.available_variables.contains(&value_id), - "ICE: ValueId {value_id:?} is not available" - ); - - *function_context - .ssa_value_allocations - .get(&value_id) - .unwrap_or_else(|| panic!("ICE: Value not found in cache {value_id}")) - } - } - - /// Creates a constant. Constants are a special case in SSA, since they are "defined" every time they are used. - /// We keep constants block-local. - pub(crate) fn allocate_constant( - &mut self, - brillig_context: &mut BrilligContext, - value_id: ValueId, - dfg: &DataFlowGraph, - ) -> BrilligVariable { - let value_id = dfg.resolve(value_id); - let constant = allocate_value(value_id, brillig_context, dfg); - self.available_constants.insert(value_id, constant); - constant - } - /// Gets a constant. - pub(crate) fn get_constant( - &mut self, - value_id: ValueId, - dfg: &DataFlowGraph, - ) -> Option { - let value_id = dfg.resolve(value_id); - self.available_constants.get(&value_id).cloned() - } + assert!( + self.available_variables.contains(&value_id), + "ICE: ValueId {value_id:?} is not available" + ); - /// Removes the allocations of all constants. Constants will need to be reallocated and reinitialized after this. - pub(crate) fn dump_constants(&mut self) { - self.available_constants.clear(); + *function_context + .ssa_value_allocations + .get(&value_id) + .unwrap_or_else(|| panic!("ICE: Value not found in cache {value_id}")) } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs index c1abad17a8f..2779be103cd 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs @@ -15,7 +15,7 @@ use crate::{ }; use fxhash::FxHashMap as HashMap; -use super::variable_liveness::VariableLiveness; +use super::{constant_allocation::ConstantAllocation, variable_liveness::VariableLiveness}; pub(crate) struct FunctionContext { pub(crate) function_id: FunctionId, @@ -25,6 +25,8 @@ pub(crate) struct FunctionContext { pub(crate) blocks: Vec, /// Liveness information for each variable in the function. pub(crate) liveness: VariableLiveness, + /// Information on where to allocate constants + pub(crate) constant_allocation: ConstantAllocation, } impl FunctionContext { @@ -36,11 +38,15 @@ impl FunctionContext { reverse_post_order.extend_from_slice(PostOrder::with_function(function).as_slice()); reverse_post_order.reverse(); + let constants = ConstantAllocation::from_function(function); + let liveness = VariableLiveness::from_function(function, &constants); + Self { function_id: id, ssa_value_allocations: HashMap::default(), blocks: reverse_post_order, - liveness: VariableLiveness::from_function(function), + liveness, + constant_allocation: constants, } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs new file mode 100644 index 00000000000..cf484fa5038 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs @@ -0,0 +1,142 @@ +//! This module analyzes the usage of constants in a given function and decides an allocation point for them. +//! The allocation point will be the common dominator of all the places where the constant is used. +//! By allocating in the common dominator, we can cache the constants for all subsequent uses. +use fxhash::FxHashMap as HashMap; + +use crate::ssa::ir::{ + basic_block::BasicBlockId, + cfg::ControlFlowGraph, + dfg::DataFlowGraph, + dom::DominatorTree, + function::Function, + instruction::InstructionId, + post_order::PostOrder, + value::{Value, ValueId}, +}; + +use super::variable_liveness::{collect_variables_of_value, variables_used_in_instruction}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub(crate) enum InstructionLocation { + Instruction(InstructionId), + Terminator, +} + +pub(crate) struct ConstantAllocation { + constant_usage: HashMap>>, + allocation_points: HashMap>>, + dominator_tree: DominatorTree, +} + +impl ConstantAllocation { + pub(crate) fn from_function(func: &Function) -> Self { + let cfg = ControlFlowGraph::with_function(func); + let post_order = PostOrder::with_function(func); + let dominator_tree = DominatorTree::with_cfg_and_post_order(&cfg, &post_order); + let mut instance = ConstantAllocation { + constant_usage: HashMap::default(), + allocation_points: HashMap::default(), + dominator_tree, + }; + instance.collect_constant_usage(func); + instance.decide_allocation_points(); + + instance + } + + pub(crate) fn allocated_in_block(&self, block_id: BasicBlockId) -> Vec { + self.allocation_points.get(&block_id).map_or(Vec::default(), |allocations| { + allocations.iter().flat_map(|(_, constants)| constants.iter()).copied().collect() + }) + } + + pub(crate) fn allocated_at_location( + &self, + block_id: BasicBlockId, + location: InstructionLocation, + ) -> Vec { + self.allocation_points.get(&block_id).map_or(Vec::default(), |allocations| { + allocations.get(&location).map_or(Vec::default(), |constants| constants.clone()) + }) + } + + fn collect_constant_usage(&mut self, func: &Function) { + let mut record_if_constant = + |block_id: BasicBlockId, value_id: ValueId, location: InstructionLocation| { + if is_constant_value(value_id, &func.dfg) { + self.constant_usage + .entry(value_id) + .or_default() + .entry(block_id) + .or_default() + .push(location); + } + }; + for block_id in func.reachable_blocks() { + let block = &func.dfg[block_id]; + for &inst_id in block.instructions() { + let variables = variables_used_in_instruction(&func.dfg[inst_id], &func.dfg); + for variable in variables { + record_if_constant( + block_id, + variable, + InstructionLocation::Instruction(inst_id), + ); + } + } + if let Some(terminator_instruction) = block.terminator() { + terminator_instruction.for_each_value(|value_id| { + let variables = collect_variables_of_value(value_id, &func.dfg); + for variable in variables { + record_if_constant(block_id, variable, InstructionLocation::Terminator); + } + }); + } + } + } + + fn decide_allocation_points(&mut self) { + for (constant_id, usage_in_blocks) in self.constant_usage.iter() { + let block_ids: Vec<_> = usage_in_blocks.iter().map(|(block_id, _)| *block_id).collect(); + + let common_dominator = self.common_dominator(&block_ids); + + // If the common dominator is one of the places where it's used, we take the first usage in the common dominator. + // Otherwise, we allocate it at the terminator of the common dominator. + let location = if let Some(locations_in_common_dominator) = + usage_in_blocks.get(&common_dominator) + { + *locations_in_common_dominator + .first() + .expect("At least one location must have been found") + } else { + InstructionLocation::Terminator + }; + + self.allocation_points + .entry(common_dominator) + .or_default() + .entry(location) + .or_default() + .push(*constant_id); + } + } + + fn common_dominator(&self, block_ids: &[BasicBlockId]) -> BasicBlockId { + if block_ids.len() == 1 { + return block_ids[0]; + } + + let mut common_dominator = block_ids[0]; + + for block_id in block_ids.iter().skip(1) { + common_dominator = self.dominator_tree.common_dominator(common_dominator, *block_id); + } + + common_dominator + } +} + +pub(crate) fn is_constant_value(id: ValueId, dfg: &DataFlowGraph) -> bool { + matches!(&dfg[dfg.resolve(id)], Value::NumericConstant { .. } | Value::Array { .. }) +} diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs index 52eded81919..73e88cee676 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs @@ -1,5 +1,6 @@ //! This module analyzes the liveness of variables (non-constant values) throughout a function. //! It uses the approach detailed in the section 4.2 of this paper https://inria.hal.science/inria-00558509v2/document + use crate::ssa::ir::{ basic_block::{BasicBlock, BasicBlockId}, cfg::ControlFlowGraph, @@ -13,6 +14,8 @@ use crate::ssa::ir::{ use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; +use super::constant_allocation::ConstantAllocation; + /// A back edge is an edge from a node to one of its ancestors. It denotes a loop in the CFG. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] struct BackEdge { @@ -42,7 +45,7 @@ fn find_back_edges( } /// Collects the underlying variables inside a value id. It might be more than one, for example in constant arrays that are constructed with multiple vars. -fn collect_variables_of_value(value_id: ValueId, dfg: &DataFlowGraph) -> Vec { +pub(crate) fn collect_variables_of_value(value_id: ValueId, dfg: &DataFlowGraph) -> Vec { let value_id = dfg.resolve(value_id); let value = &dfg[value_id]; @@ -52,7 +55,7 @@ fn collect_variables_of_value(value_id: ValueId, dfg: &DataFlowGraph) -> Vec { - let mut value_ids = Vec::new(); + let mut value_ids = vec![value_id]; array.iter().for_each(|item_id| { let underlying_ids = collect_variables_of_value(*item_id, dfg); @@ -61,19 +64,21 @@ fn collect_variables_of_value(value_id: ValueId, dfg: &DataFlowGraph) -> Vec { + vec![value_id] + } // Functions are not variables in a defunctionalized SSA. Only constant function values should appear. - Value::ForeignFunction(_) - | Value::Function(_) - | Value::Intrinsic(..) - // Constants are not treated as variables for the variable liveness analysis, since they are defined every time they are used. - | Value::NumericConstant { .. } => { + Value::ForeignFunction(_) | Value::Function(_) | Value::Intrinsic(..) => { vec![] } } } -fn variables_used_in_instruction(instruction: &Instruction, dfg: &DataFlowGraph) -> Vec { - let mut used = Vec::new(); +pub(crate) fn variables_used_in_instruction( + instruction: &Instruction, + dfg: &DataFlowGraph, +) -> Variables { + let mut used = HashSet::default(); instruction.for_each_value(|value_id| { let underlying_ids = collect_variables_of_value(value_id, dfg); @@ -83,8 +88,8 @@ fn variables_used_in_instruction(instruction: &Instruction, dfg: &DataFlowGraph) used } -fn variables_used_in_block(block: &BasicBlock, dfg: &DataFlowGraph) -> Vec { - let mut used: Vec = block +fn variables_used_in_block(block: &BasicBlock, dfg: &DataFlowGraph) -> Variables { + let mut used: Variables = block .instructions() .iter() .flat_map(|instruction_id| { @@ -124,6 +129,7 @@ type LastUses = HashMap; pub(crate) struct VariableLiveness { cfg: ControlFlowGraph, post_order: PostOrder, + dominator_tree: DominatorTree, /// The variables that are alive before the block starts executing live_in: HashMap, /// The variables that stop being alive after each specific instruction @@ -134,13 +140,15 @@ pub(crate) struct VariableLiveness { impl VariableLiveness { /// Computes the liveness of variables throughout a function. - pub(crate) fn from_function(func: &Function) -> Self { + pub(crate) fn from_function(func: &Function, constants: &ConstantAllocation) -> Self { let cfg = ControlFlowGraph::with_function(func); let post_order = PostOrder::with_function(func); + let dominator_tree = DominatorTree::with_cfg_and_post_order(&cfg, &post_order); let mut instance = Self { cfg, post_order, + dominator_tree, live_in: HashMap::default(), last_uses: HashMap::default(), param_definitions: HashMap::default(), @@ -148,7 +156,7 @@ impl VariableLiveness { instance.compute_block_param_definitions(func); - instance.compute_live_in_of_blocks(func); + instance.compute_live_in_of_blocks(func, constants); instance.compute_last_uses(func); @@ -182,8 +190,6 @@ impl VariableLiveness { } fn compute_block_param_definitions(&mut self, func: &Function) { - let tree = DominatorTree::with_cfg_and_post_order(&self.cfg, &self.post_order); - // Going in reverse post order to process the entry block first let mut reverse_post_order = Vec::new(); reverse_post_order.extend_from_slice(self.post_order.as_slice()); @@ -191,18 +197,19 @@ impl VariableLiveness { for block in reverse_post_order { let params = func.dfg[block].parameters(); // If it has no dominator, it's the entry block - let dominator_block = tree.immediate_dominator(block).unwrap_or(func.entry_block()); + let dominator_block = + self.dominator_tree.immediate_dominator(block).unwrap_or(func.entry_block()); let definitions_for_the_dominator = self.param_definitions.entry(dominator_block).or_default(); definitions_for_the_dominator.extend(params.iter()); } } - fn compute_live_in_of_blocks(&mut self, func: &Function) { + fn compute_live_in_of_blocks(&mut self, func: &Function, constants: &ConstantAllocation) { let back_edges = find_back_edges(func, &self.cfg, &self.post_order); // First pass, propagate up the live_ins skipping back edges - self.compute_live_in_recursive(func, func.entry_block(), &back_edges); + self.compute_live_in_recursive(func, func.entry_block(), &back_edges, constants); // Second pass, propagate header live_ins to the loop bodies for back_edge in back_edges { @@ -215,8 +222,11 @@ impl VariableLiveness { func: &Function, block_id: BasicBlockId, back_edges: &HashSet, + constants: &ConstantAllocation, ) { - let defined = self.compute_defined_variables(block_id, &func.dfg); + let mut defined = self.compute_defined_variables(block_id, &func.dfg); + + defined.extend(constants.allocated_in_block(block_id)); let block: &BasicBlock = &func.dfg[block_id]; @@ -227,7 +237,7 @@ impl VariableLiveness { for successor_id in block.successors() { if !back_edges.contains(&BackEdge { start: block_id, header: successor_id }) { if !self.live_in.contains_key(&successor_id) { - self.compute_live_in_recursive(func, successor_id, back_edges); + self.compute_live_in_recursive(func, successor_id, back_edges, constants); } live_out.extend( self.live_in @@ -332,6 +342,7 @@ impl VariableLiveness { mod test { use fxhash::FxHashSet; + use crate::brillig::brillig_gen::constant_allocation::ConstantAllocation; use crate::brillig::brillig_gen::variable_liveness::VariableLiveness; use crate::ssa::function_builder::FunctionBuilder; use crate::ssa::ir::function::RuntimeType; @@ -403,11 +414,18 @@ mod test { let ssa = builder.finish(); let func = ssa.main(); - let liveness = VariableLiveness::from_function(func); + let constants = ConstantAllocation::from_function(func); + let liveness = VariableLiveness::from_function(func, &constants); assert!(liveness.get_live_in(&func.entry_block()).is_empty()); - assert_eq!(liveness.get_live_in(&b2), &FxHashSet::from_iter([v3, v0].into_iter())); - assert_eq!(liveness.get_live_in(&b1), &FxHashSet::from_iter([v3, v1].into_iter())); + assert_eq!( + liveness.get_live_in(&b2), + &FxHashSet::from_iter([v3, v0, twenty_seven].into_iter()) + ); + assert_eq!( + liveness.get_live_in(&b1), + &FxHashSet::from_iter([v3, v1, twenty_seven].into_iter()) + ); assert_eq!(liveness.get_live_in(&b3), &FxHashSet::from_iter([v3].into_iter())); let block_1 = &func.dfg[b1]; @@ -415,11 +433,11 @@ mod test { let block_3 = &func.dfg[b3]; assert_eq!( liveness.get_last_uses(&b1).get(&block_1.instructions()[0]), - Some(&FxHashSet::from_iter([v1].into_iter())) + Some(&FxHashSet::from_iter([v1, twenty_seven].into_iter())) ); assert_eq!( liveness.get_last_uses(&b2).get(&block_2.instructions()[0]), - Some(&FxHashSet::from_iter([v0].into_iter())) + Some(&FxHashSet::from_iter([v0, twenty_seven].into_iter())) ); assert_eq!( liveness.get_last_uses(&b3).get(&block_3.instructions()[0]), @@ -548,7 +566,8 @@ mod test { let ssa = builder.finish(); let func = ssa.main(); - let liveness = VariableLiveness::from_function(func); + let constants = ConstantAllocation::from_function(func); + let liveness = VariableLiveness::from_function(func, &constants); assert!(liveness.get_live_in(&func.entry_block()).is_empty()); assert_eq!(liveness.get_live_in(&b1), &FxHashSet::from_iter([v0, v1, v3, v4].into_iter())); @@ -558,18 +577,21 @@ mod test { liveness.get_live_in(&b4), &FxHashSet::from_iter([v0, v1, v3, v4, v6, v7].into_iter()) ); - assert_eq!(liveness.get_live_in(&b6), &FxHashSet::from_iter([v0, v1, v3, v4].into_iter())); + assert_eq!( + liveness.get_live_in(&b6), + &FxHashSet::from_iter([v0, v1, v3, v4, one].into_iter()) + ); assert_eq!( liveness.get_live_in(&b5), - &FxHashSet::from_iter([v0, v1, v3, v4, v6, v7].into_iter()) + &FxHashSet::from_iter([v0, v1, v3, v4, v6, v7, one].into_iter()) ); assert_eq!( liveness.get_live_in(&b7), - &FxHashSet::from_iter([v0, v1, v3, v4, v6, v7].into_iter()) + &FxHashSet::from_iter([v0, v1, v3, v4, v6, v7, one].into_iter()) ); assert_eq!( liveness.get_live_in(&b8), - &FxHashSet::from_iter([v0, v1, v3, v4, v6, v7].into_iter()) + &FxHashSet::from_iter([v0, v1, v3, v4, v6, v7, one].into_iter()) ); let block_3 = &func.dfg[b3]; @@ -621,7 +643,9 @@ mod test { let ssa = builder.finish(); let func = ssa.main(); - let liveness = VariableLiveness::from_function(func); + + let constants = ConstantAllocation::from_function(func); + let liveness = VariableLiveness::from_function(func, &constants); // Entry point defines its own params and also b3's params. assert_eq!(liveness.defined_block_params(&func.entry_block()), vec![v0, v1, v2]); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dom.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dom.rs index 15fa3bad38d..94f7a405c05 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dom.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dom.rs @@ -214,7 +214,7 @@ impl DominatorTree { /// Compute the common dominator of two basic blocks. /// /// Both basic blocks are assumed to be reachable. - fn common_dominator( + pub(crate) fn common_dominator( &self, mut block_a_id: BasicBlockId, mut block_b_id: BasicBlockId, From 157dd118896e101d654110f8b519fb059c3e7f4f Mon Sep 17 00:00:00 2001 From: Gregorio Juliana Date: Wed, 4 Sep 2024 14:17:37 +0200 Subject: [PATCH 03/18] fix: TXE logs in docker (#8365) Closes https://github.com/AztecProtocol/aztec-packages/issues/8270 Changed strategy when running `TXE` from aztec binary, now using compose to avoid logs being swallowed. Env variables `DEBUG` and `LOG_LEVEL` can be used as usual to control verbosity. This allows using `dep::aztec::oracle::debug_log::debug_log_format` and variants when running `aztec test`. --- aztec-up/bin/aztec | 21 ++++++++++++------- aztec-up/bin/docker-compose.test.yml | 11 ++++++---- .../testing_contracts/testing.md | 12 +++++++++++ .../token_contract/src/test/access_control.nr | 4 ++-- yarn-project/txe/src/index.ts | 4 ++-- .../txe/src/txe_service/txe_service.ts | 12 ++++++++++- 6 files changed, 47 insertions(+), 17 deletions(-) diff --git a/aztec-up/bin/aztec b/aztec-up/bin/aztec index 0989bb64469..998d05478de 100755 --- a/aztec-up/bin/aztec +++ b/aztec-up/bin/aztec @@ -20,6 +20,10 @@ function parse_ts_file { grep -oE "\| '[^']+'" "$LOCAL_TS_FILE" | sed "s/| '//; s/'//g" >"$LOCAL_ENV_VAR_FILE" } +function cleanup { + get_compose $@ down +} + CALLED_FROM=$PWD if [ "${1:-}" == "test" ]; then @@ -27,22 +31,23 @@ if [ "${1:-}" == "test" ]; then cd $(dirname $0)/.. # Compose file to use FILE_ARG="-f $HOME/.aztec/docker-compose.test.yml" + + # Set trap to catch SIGINT and call the cleanup function. + trap "cleanup $FILE_ARG" SIGINT + # Aztec contract test args for nargo - TEST_ARGS="$@ --silence-warnings --oracle-resolver http://aztec:8081" - get_compose -p aztec-test $FILE_ARG run -e NARGO_FOREIGN_CALL_TIMEOUT=300000 --workdir $CALLED_FROM --rm -it aztec-nargo $TEST_ARGS + export TEST_ARGS="$@ --silence-warnings --oracle-resolver http://txe:8081" + export NARGO_FOREIGN_CALL_TIMEOUT=300000 + export WORKDIR=$CALLED_FROM + get_compose -p aztec-test $FILE_ARG up --force-recreate --remove-orphans --abort-on-container-exit elif [ $# == 2 ] && [ "$1" == "start" ] && [ "$2" == "--sandbox" ]; then # Change working dir, so relative volume mounts are in the right place. cd $(dirname $0)/.. # Compose file to use FILE_ARG="-f $HOME/.aztec/docker-compose.sandbox.yml" - # Function to be executed when SIGINT is received. - cleanup() { - get_compose $FILE_ARG down - } - # Set trap to catch SIGINT and call the cleanup function. - trap cleanup SIGINT + trap "cleanup $FILE_ARG" SIGINT get_compose -p sandbox $FILE_ARG up --force-recreate --remove-orphans elif [ "${1:-}" == "start" ]; then diff --git a/aztec-up/bin/docker-compose.test.yml b/aztec-up/bin/docker-compose.test.yml index 604d3294e3c..91b48742c93 100644 --- a/aztec-up/bin/docker-compose.test.yml +++ b/aztec-up/bin/docker-compose.test.yml @@ -1,20 +1,23 @@ services: - aztec: + txe: image: "aztecprotocol/aztec" environment: DEBUG: # Loaded from the user shell if explicitly set + LOG_LEVEL: # Loaded from the user shell if explicitly set HOST_WORKDIR: "${PWD}" # Loaded from the user shell to show log files absolute path in host volumes: - ./log:/usr/src/yarn-project/aztec/log:rw - ${HOME}:${HOME} command: start --txe --port 8081 - + aztec-nargo: image: "aztecprotocol/aztec-nargo" environment: HOME: # Loaded from the user shell - NARGO_FOREIGN_CALL_TIMEOUT: 300000 # To avoid timeouts when many tests run at once + NARGO_FOREIGN_CALL_TIMEOUT: "${NARGO_FOREIGN_CALL_TIMEOUT}" # To avoid timeouts when many tests run at once + working_dir: "${WORKDIR}" + command: "${TEST_ARGS}" volumes: - ${HOME}:${HOME} depends_on: - - aztec \ No newline at end of file + - txe \ No newline at end of file diff --git a/docs/docs/guides/developer_guides/smart_contracts/testing_contracts/testing.md b/docs/docs/guides/developer_guides/smart_contracts/testing_contracts/testing.md index b6463a0f222..dc933c32927 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/testing_contracts/testing.md +++ b/docs/docs/guides/developer_guides/smart_contracts/testing_contracts/testing.md @@ -210,6 +210,18 @@ You can also use the `assert_public_call_fails` or `assert_private_call_fails` m #include_code assert_public_fail /noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_public.nr rust +### Logging + +You can use `aztec.nr`'s oracles as usual for debug logging, as explained [here](../../../../reference/developer_references/debugging.md) + +:::warning +Remember to set the following environment variables to activate debug logging: +```bash +export DEBUG="aztec:*" +export LOG_LEVEL="debug" +``` +::: + ### All Cheatcodes You can find the full list of cheatcodes available in the TXE [here](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/noir-projects/aztec-nr/aztec/src/test/helpers/cheatcodes.nr) diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/access_control.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/access_control.nr index 588cc71d9a4..7e83b3c16fe 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/access_control.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/access_control.nr @@ -42,11 +42,11 @@ unconstrained fn access_control() { // Impersonate original admin env.impersonate(owner); - // Try to set ourselves as admin, fail miserably + // Try to set ourselves as admin, fail miserably let set_admin_call_interface = Token::at(token_contract_address).set_admin(recipient); env.assert_public_call_fails(set_admin_call_interface); - // Try to revoke minter status to recipient, fail miserably + // Try to revoke minter status to recipient, fail miserably let set_minter_call_interface = Token::at(token_contract_address).set_minter(recipient, false); env.assert_public_call_fails(set_minter_call_interface); } diff --git a/yarn-project/txe/src/index.ts b/yarn-project/txe/src/index.ts index 3af1dee03de..3b03e89c2e6 100644 --- a/yarn-project/txe/src/index.ts +++ b/yarn-project/txe/src/index.ts @@ -65,14 +65,14 @@ class TXEDispatcher { this.logger.debug(`Calling ${functionName} on session ${sessionId}`); if (!TXESessions.has(sessionId) && functionName != 'reset') { - this.logger.info(`Creating new session ${sessionId}`); + this.logger.debug(`Creating new session ${sessionId}`); TXESessions.set(sessionId, await TXEService.init(this.logger)); } switch (functionName) { case 'reset': { TXESessions.delete(sessionId) && - this.logger.info(`Called reset on session ${sessionId}, yeeting it out of existence`); + this.logger.debug(`Called reset on session ${sessionId}, yeeting it out of existence`); return toForeignCallResult([]); } case 'deploy': { diff --git a/yarn-project/txe/src/txe_service/txe_service.ts b/yarn-project/txe/src/txe_service/txe_service.ts index 36a429e0fda..8b57088aade 100644 --- a/yarn-project/txe/src/txe_service/txe_service.ts +++ b/yarn-project/txe/src/txe_service/txe_service.ts @@ -45,7 +45,7 @@ export class TXEService { const noteCache = new ExecutionNoteCache(txHash); const keyStore = new KeyStore(store); const txeDatabase = new TXEDatabase(store); - logger.info(`TXE service initialized`); + logger.debug(`TXE service initialized`); const txe = new TXE(logger, trees, packedValuesCache, noteCache, keyStore, txeDatabase); const service = new TXEService(logger, txe); await service.advanceBlocksBy(toSingle(new Fr(1n))); @@ -308,6 +308,16 @@ export class TXEService { return toForeignCallResult([toSingle(functionSelector.toField())]); } + async avmOpcodeChainId() { + const chainId = await (this.typedOracle as TXE).getChainId(); + return toForeignCallResult([toSingle(chainId)]); + } + + async avmOpcodeVersion() { + const version = await (this.typedOracle as TXE).getVersion(); + return toForeignCallResult([toSingle(version)]); + } + async packArgumentsArray(args: ForeignCallArray) { const packed = await this.typedOracle.packArgumentsArray(fromArray(args)); return toForeignCallResult([toSingle(packed)]); From 4ee69acf8588adb46d2e9369d5541fb04380c652 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Wed, 4 Sep 2024 09:25:15 -0300 Subject: [PATCH 04/18] fix: Split stores per component and split merkle tree operations (#8299) This PR packs two changesets: 1. Spins off a `MerkleTreeAdminOperations` from `MerkleTreeOperations`, to have an interface that does not expose methods that commit changes to the underlying store. World state now exposes a method to get an `ephemeralFork` without these operations, that can be used for answering public simulation requests. Note that we do not yet enforce that no changes go into the store, that requires more changes to have "readonly" versions of the trees and store all the way down. 2. Moves creation of the underlying data store to the factory of each component, so each creates a new db as needed, instead of sharing a single one. This allows to keep separate db files for p2p, archive, and world state. As a bonus, it makes forking world state cheaper. --- yarn-project/archiver/src/factory.ts | 8 ++-- .../aztec-node/src/aztec-node/server.test.ts | 10 ++--- .../aztec-node/src/aztec-node/server.ts | 31 ++++---------- .../aztec/src/cli/cmds/start_archiver.ts | 3 +- .../src/interfaces/merkle_tree_operations.ts | 7 ++-- .../end-to-end/src/e2e_block_building.test.ts | 4 +- .../src/e2e_prover/e2e_prover_test.ts | 4 -- .../src/fixtures/snapshot_manager.ts | 22 ++-------- .../e2e_public_testnet_transfer.test.ts | 7 +--- yarn-project/kv-store/src/utils.ts | 26 ++++++++---- yarn-project/p2p/src/client/index.ts | 15 +++++-- .../reqresp/p2p_client.integration.test.ts | 15 +++++-- .../prover-client/src/mocks/test_context.ts | 5 ++- yarn-project/prover-node/src/factory.ts | 10 +---- .../prover-node/src/prover-node.test.ts | 8 ++-- .../pxe/src/pxe_service/create_pxe_service.ts | 12 +++--- .../src/sequencer/sequencer.test.ts | 7 ++-- .../world-state/src/synchronizer/factory.ts | 7 ++-- .../server_world_state_synchronizer.test.ts | 4 +- .../server_world_state_synchronizer.ts | 28 ++++++++----- .../synchronizer/world_state_synchronizer.ts | 22 +++++----- .../world-state/src/world-state-db/index.ts | 2 +- .../src/world-state-db/merkle_tree_db.ts | 30 +++++++++----- .../merkle_tree_operations_facade.ts | 41 +++++++++++-------- .../src/world-state-db/merkle_trees.ts | 39 +++++++++++------- 25 files changed, 187 insertions(+), 180 deletions(-) diff --git a/yarn-project/archiver/src/factory.ts b/yarn-project/archiver/src/factory.ts index ad7d6cfcdb2..302af46afb1 100644 --- a/yarn-project/archiver/src/factory.ts +++ b/yarn-project/archiver/src/factory.ts @@ -1,4 +1,5 @@ -import { type AztecKVStore } from '@aztec/kv-store'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { createStore } from '@aztec/kv-store/utils'; import { type TelemetryClient } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; @@ -7,14 +8,13 @@ import { type ArchiverConfig } from './archiver/config.js'; import { KVArchiverDataStore } from './archiver/index.js'; import { createArchiverClient } from './rpc/archiver_client.js'; -export function createArchiver( +export async function createArchiver( config: ArchiverConfig, - store: AztecKVStore, telemetry: TelemetryClient = new NoopTelemetryClient(), opts: { blockUntilSync: boolean } = { blockUntilSync: true }, ) { if (!config.archiverUrl) { - // first create and sync the archiver + const store = await createStore('archiver', config, createDebugLogger('aztec:archiver:lmdb')); const archiverStore = new KVArchiverDataStore(store, config.maxLogs); return Archiver.createAndSync(config, archiverStore, telemetry, opts.blockUntilSync); } else { diff --git a/yarn-project/aztec-node/src/aztec-node/server.test.ts b/yarn-project/aztec-node/src/aztec-node/server.test.ts index 4a62d116f51..f753431b918 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.test.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.test.ts @@ -4,12 +4,11 @@ import { type L1ToL2MessageSource, type L2BlockSource, type L2LogsSource, + type MerkleTreeAdminOperations, MerkleTreeId, - type MerkleTreeOperations, mockTxForRollup, } from '@aztec/circuit-types'; import { AztecAddress, EthAddress, Fr, GasFees, GlobalVariables, MaxBlockNumber } from '@aztec/circuits.js'; -import { type AztecLmdbStore } from '@aztec/kv-store/lmdb'; import { type P2P } from '@aztec/p2p'; import { type GlobalVariableBuilder } from '@aztec/sequencer-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; @@ -24,7 +23,7 @@ import { AztecNodeService } from './server.js'; describe('aztec node', () => { let p2p: MockProxy; let globalVariablesBuilder: MockProxy; - let merkleTreeOps: MockProxy; + let merkleTreeOps: MockProxy; let lastBlockNumber: number; @@ -42,7 +41,7 @@ describe('aztec node', () => { p2p = mock(); globalVariablesBuilder = mock(); - merkleTreeOps = mock(); + merkleTreeOps = mock(); const worldState = mock({ getLatest: () => merkleTreeOps, @@ -59,8 +58,6 @@ describe('aztec node', () => { // all txs use the same allowed FPC class const contractSource = mock(); - const store = mock(); - const aztecNodeConfig: AztecNodeConfig = getConfigEnvVars(); node = new AztecNodeService( @@ -86,7 +83,6 @@ describe('aztec node', () => { 31337, 1, globalVariablesBuilder, - store, new TestCircuitVerifier(), new NoopTelemetryClient(), ); diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index da98547c0bf..ed438882142 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -50,10 +50,9 @@ import { AztecAddress } from '@aztec/foundation/aztec-address'; import { padArrayEnd } from '@aztec/foundation/collection'; import { createDebugLogger } from '@aztec/foundation/log'; import { Timer } from '@aztec/foundation/timer'; -import { type AztecKVStore } from '@aztec/kv-store'; -import { createStore, openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/utils'; import { SHA256Trunc, StandardTree, UnbalancedTree } from '@aztec/merkle-tree'; -import { AztecKVTxPool, InMemoryAttestationPool, type P2P, createP2PClient } from '@aztec/p2p'; +import { InMemoryAttestationPool, type P2P, createP2PClient } from '@aztec/p2p'; import { getCanonicalClassRegisterer } from '@aztec/protocol-contracts/class-registerer'; import { getCanonicalFeeJuice } from '@aztec/protocol-contracts/fee-juice'; import { getCanonicalInstanceDeployer } from '@aztec/protocol-contracts/instance-deployer'; @@ -77,7 +76,7 @@ import { type ProtocolContractAddresses, } from '@aztec/types/contracts'; import { createValidatorClient } from '@aztec/validator-client'; -import { MerkleTrees, type WorldStateSynchronizer, createWorldStateSynchronizer } from '@aztec/world-state'; +import { type WorldStateSynchronizer, createWorldStateSynchronizer } from '@aztec/world-state'; import { type AztecNodeConfig, getPackageInfo } from './config.js'; import { NodeMetrics } from './node_metrics.js'; @@ -104,7 +103,6 @@ export class AztecNodeService implements AztecNode { protected readonly l1ChainId: number, protected readonly version: number, protected readonly globalVariableBuilder: GlobalVariableBuilder, - protected readonly merkleTreesDb: AztecKVStore, private proofVerifier: ClientProtocolCircuitVerifier, private telemetry: TelemetryClient, private log = createDebugLogger('aztec:node'), @@ -131,7 +129,6 @@ export class AztecNodeService implements AztecNode { config: AztecNodeConfig, telemetry?: TelemetryClient, log = createDebugLogger('aztec:node'), - storeLog = createDebugLogger('aztec:node:lmdb'), ): Promise { telemetry ??= new NoopTelemetryClient(); const ethereumChain = createEthereumChain(config.l1RpcUrl, config.l1ChainId); @@ -142,25 +139,17 @@ export class AztecNodeService implements AztecNode { ); } - const store = await createStore(config, config.l1Contracts.rollupAddress, storeLog); - - const archiver = await createArchiver(config, store, telemetry, { blockUntilSync: true }); + const archiver = await createArchiver(config, telemetry, { blockUntilSync: true }); // we identify the P2P transaction protocol by using the rollup contract address. // this may well change in future config.transactionProtocol = `/aztec/tx/${config.l1Contracts.rollupAddress.toString()}`; // create the tx pool and the p2p client, which will need the l2 block source - const p2pClient = await createP2PClient( - config, - store, - new AztecKVTxPool(store, telemetry), - new InMemoryAttestationPool(), - archiver, - ); + const p2pClient = await createP2PClient(config, new InMemoryAttestationPool(), archiver, telemetry); // now create the merkle trees and the world state synchronizer - const worldStateSynchronizer = await createWorldStateSynchronizer(config, store, archiver, telemetry); + const worldStateSynchronizer = await createWorldStateSynchronizer(config, archiver, telemetry); // start both and wait for them to sync from the block source await Promise.all([p2pClient.start(), worldStateSynchronizer.start()]); @@ -199,7 +188,6 @@ export class AztecNodeService implements AztecNode { ethereumChain.chainInfo.id, config.version, new GlobalVariableBuilder(config), - store, proofVerifier, telemetry, log, @@ -726,13 +714,8 @@ export class AztecNodeService implements AztecNode { ); const prevHeader = (await this.blockSource.getBlock(-1))?.header; - // Instantiate merkle trees so uncommitted updates by this simulation are local to it. - // TODO we should be able to remove this after https://github.com/AztecProtocol/aztec-packages/issues/1869 - // So simulation of public functions doesn't affect the merkle trees. - const merkleTrees = await MerkleTrees.new(this.merkleTreesDb, new NoopTelemetryClient(), this.log); - const publicProcessorFactory = new PublicProcessorFactory( - merkleTrees.asLatest(), + await this.worldStateSynchronizer.ephemeralFork(), this.contractDataSource, new WASMSimulator(), this.telemetry, diff --git a/yarn-project/aztec/src/cli/cmds/start_archiver.ts b/yarn-project/aztec/src/cli/cmds/start_archiver.ts index a8675164ae1..d9e0c092aa7 100644 --- a/yarn-project/aztec/src/cli/cmds/start_archiver.ts +++ b/yarn-project/aztec/src/cli/cmds/start_archiver.ts @@ -21,8 +21,7 @@ export const startArchiver = async (options: any, signalHandlers: (() => Promise const archiverConfig = extractRelevantOptions(options, archiverConfigMappings, 'archiver'); const storeLog = createDebugLogger('aztec:archiver:lmdb'); - const rollupAddress = archiverConfig.l1Contracts.rollupAddress; - const store = await createStore(archiverConfig, rollupAddress, storeLog); + const store = await createStore('archiver', archiverConfig, storeLog); const archiverStore = new KVArchiverDataStore(store, archiverConfig.maxLogs); const telemetry = await createAndStartTelemetryClient(getTelemetryClientConfig()); diff --git a/yarn-project/circuit-types/src/interfaces/merkle_tree_operations.ts b/yarn-project/circuit-types/src/interfaces/merkle_tree_operations.ts index 2738dbc99e0..af16730d969 100644 --- a/yarn-project/circuit-types/src/interfaces/merkle_tree_operations.ts +++ b/yarn-project/circuit-types/src/interfaces/merkle_tree_operations.ts @@ -84,9 +84,7 @@ type LeafTypes = { export type MerkleTreeLeafType = LeafTypes[ID]; -/** - * Defines the interface for operations on a set of Merkle Trees. - */ +/** Defines the interface for operations on a set of Merkle Trees. */ export interface MerkleTreeOperations { /** * Appends leaves to a given tree. @@ -203,7 +201,10 @@ export interface MerkleTreeOperations { leaves: Buffer[], subtreeHeight: number, ): Promise>; +} +/** Operations on merkle trees world state that can modify the underlying store. */ +export interface MerkleTreeAdminOperations extends MerkleTreeOperations { /** * Handles a single L2 block (i.e. Inserts the new note hashes into the merkle tree). * @param block - The L2 block to handle. diff --git a/yarn-project/end-to-end/src/e2e_block_building.test.ts b/yarn-project/end-to-end/src/e2e_block_building.test.ts index 92f830001ad..cb4655f5561 100644 --- a/yarn-project/end-to-end/src/e2e_block_building.test.ts +++ b/yarn-project/end-to-end/src/e2e_block_building.test.ts @@ -339,7 +339,7 @@ describe('e2e_block_building', () => { }); // Regression for https://github.com/AztecProtocol/aztec-packages/issues/8306 - it.skip('can simulate public txs while building a block', async () => { + it('can simulate public txs while building a block', async () => { ({ teardown, pxe, @@ -368,7 +368,7 @@ describe('e2e_block_building', () => { } logger.info('Waiting for txs to be mined'); - await Promise.all(txs.map(tx => tx.wait())); + await Promise.all(txs.map(tx => tx.wait({ proven: false, timeout: 600 }))); }); }); }); diff --git a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts index b75763aa6dc..e53fb312ec5 100644 --- a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts +++ b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts @@ -17,7 +17,6 @@ import { deployL1Contract, } from '@aztec/aztec.js'; import { BBCircuitVerifier } from '@aztec/bb-prover'; -import { createStore } from '@aztec/kv-store/utils'; import { RollupAbi } from '@aztec/l1-artifacts'; import { TokenContract } from '@aztec/noir-contracts.js'; import { type ProverNode, type ProverNodeConfig, createProverNode } from '@aztec/prover-node'; @@ -225,11 +224,8 @@ export class FullProverTest { // Creating temp store and archiver for fully proven prover node this.logger.verbose('Starting archiver for new prover node'); - const store = await createStore({ dataDirectory: undefined }, this.l1Contracts.l1ContractAddresses.rollupAddress); - const archiver = await createArchiver( { ...this.context.aztecNodeConfig, dataDirectory: undefined }, - store, new NoopTelemetryClient(), { blockUntilSync: true }, ); diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 6b821026179..dac9a054a04 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -8,7 +8,6 @@ import { type CompleteAddress, type DebugLogger, type DeployL1Contracts, - type EthAddress, EthCheatCodes, Fr, GrumpkinScalar, @@ -23,7 +22,6 @@ import { asyncMap } from '@aztec/foundation/async-map'; import { type Logger, createDebugLogger } from '@aztec/foundation/log'; import { makeBackoff, retry } from '@aztec/foundation/retry'; import { resolver, reviver } from '@aztec/foundation/serialize'; -import { createStore } from '@aztec/kv-store/utils'; import { type ProverNode, type ProverNodeConfig, createProverNode } from '@aztec/prover-node'; import { type PXEService, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; @@ -235,21 +233,13 @@ async function teardown(context: SubsystemsContext | undefined) { } export async function createAndSyncProverNode( - rollupAddress: EthAddress, proverNodePrivateKey: `0x${string}`, aztecNodeConfig: AztecNodeConfig, aztecNode: AztecNode, ) { // Creating temp store and archiver for simulated prover node - - const store = await createStore({ dataDirectory: undefined }, rollupAddress); - - const archiver = await createArchiver( - { ...aztecNodeConfig, dataDirectory: undefined }, - store, - new NoopTelemetryClient(), - { blockUntilSync: true }, - ); + const archiverConfig = { ...aztecNodeConfig, dataDirectory: undefined }; + const archiver = await createArchiver(archiverConfig, new NoopTelemetryClient(), { blockUntilSync: true }); // Prover node config is for simulated proofs const proverConfig: ProverNodeConfig = { @@ -335,7 +325,6 @@ async function setupFromFresh( logger.verbose('Creating and syncing a simulated prover node...'); const proverNode = await createAndSyncProverNode( - deployL1ContractsValues.l1ContractAddresses.rollupAddress, `0x${proverNodePrivateKey!.toString('hex')}`, aztecNodeConfig, aztecNode, @@ -433,12 +422,7 @@ async function setupFromState(statePath: string, logger: Logger): Promise { ? `0x${proverNodePrivateKey?.toString('hex')}` : proverConfig.publisherPrivateKey; - proverNode = await createAndSyncProverNode( - config.l1Contracts.rollupAddress, - proverConfig.publisherPrivateKey, - config, - aztecNode, - ); + proverNode = await createAndSyncProverNode(proverConfig.publisherPrivateKey, config, aztecNode); }, 600_000); afterEach(async () => { diff --git a/yarn-project/kv-store/src/utils.ts b/yarn-project/kv-store/src/utils.ts index 3c33d571e77..640f50932c3 100644 --- a/yarn-project/kv-store/src/utils.ts +++ b/yarn-project/kv-store/src/utils.ts @@ -1,17 +1,25 @@ import { type EthAddress } from '@aztec/foundation/eth-address'; import { type Logger, createDebugLogger } from '@aztec/foundation/log'; +import { join } from 'path'; + import { type AztecKVStore } from './interfaces/store.js'; import { AztecLmdbStore } from './lmdb/store.js'; -export function createStore( - config: { dataDirectory: string | undefined } | (string | undefined), - rollupAddress: EthAddress, - log: Logger = createDebugLogger('aztec:kv-store'), -) { - const dataDirectory = typeof config === 'string' ? config : config?.dataDirectory; - log.info(dataDirectory ? `Creating data store at directory ${dataDirectory}` : 'Creating ephemeral data store'); - return initStoreForRollup(AztecLmdbStore.open(dataDirectory, false), rollupAddress, log); +export type DataStoreConfig = { dataDirectory: string | undefined; l1Contracts: { rollupAddress: EthAddress } }; + +export function createStore(name: string, config: DataStoreConfig, log: Logger = createDebugLogger('aztec:kv-store')) { + let { dataDirectory } = config; + if (typeof dataDirectory !== 'undefined') { + dataDirectory = join(dataDirectory, name); + } + + log.info( + dataDirectory + ? `Creating ${name} data store at directory ${dataDirectory}` + : `Creating ${name} ephemeral data store`, + ); + return initStoreForRollup(AztecLmdbStore.open(dataDirectory, false), config.l1Contracts.rollupAddress, log); } /** @@ -21,7 +29,7 @@ export function createStore( * @param rollupAddress - The ETH address of the rollup contract * @returns A promise that resolves when the store is cleared, or rejects if the rollup address does not match */ -export async function initStoreForRollup( +async function initStoreForRollup( store: T, rollupAddress: EthAddress, log?: Logger, diff --git a/yarn-project/p2p/src/client/index.ts b/yarn-project/p2p/src/client/index.ts index d2316c4db76..518387edfa9 100644 --- a/yarn-project/p2p/src/client/index.ts +++ b/yarn-project/p2p/src/client/index.ts @@ -1,5 +1,9 @@ import { type L2BlockSource } from '@aztec/circuit-types'; +import { createDebugLogger } from '@aztec/foundation/log'; import { type AztecKVStore } from '@aztec/kv-store'; +import { type DataStoreConfig, createStore } from '@aztec/kv-store/utils'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type AttestationPool } from '../attestation_pool/attestation_pool.js'; import { P2PClient } from '../client/p2p_client.js'; @@ -7,18 +11,21 @@ import { type P2PConfig } from '../config.js'; import { DiscV5Service } from '../service/discV5_service.js'; import { DummyP2PService } from '../service/dummy_service.js'; import { LibP2PService, createLibP2PPeerId } from '../service/index.js'; -import { type TxPool } from '../tx_pool/index.js'; +import { AztecKVTxPool, type TxPool } from '../tx_pool/index.js'; import { getPublicIp, resolveAddressIfNecessary, splitAddressPort } from '../util.js'; export * from './p2p_client.js'; export const createP2PClient = async ( - config: P2PConfig, - store: AztecKVStore, - txPool: TxPool, + config: P2PConfig & DataStoreConfig, attestationsPool: AttestationPool, l2BlockSource: L2BlockSource, + telemetry: TelemetryClient = new NoopTelemetryClient(), + deps: { txPool?: TxPool; store?: AztecKVStore } = {}, ) => { + const store = deps.store ?? (await createStore('p2p', config, createDebugLogger('aztec:p2p:lmdb'))); + const txPool = deps.txPool ?? new AztecKVTxPool(store, telemetry); + let p2pService; if (config.p2pEnabled) { diff --git a/yarn-project/p2p/src/service/reqresp/p2p_client.integration.test.ts b/yarn-project/p2p/src/service/reqresp/p2p_client.integration.test.ts index 307324f783f..8ec358ee2f4 100644 --- a/yarn-project/p2p/src/service/reqresp/p2p_client.integration.test.ts +++ b/yarn-project/p2p/src/service/reqresp/p2p_client.integration.test.ts @@ -1,9 +1,10 @@ // An integration test for the p2p client to test req resp protocols import { mockTx } from '@aztec/circuit-types'; +import { EthAddress } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; import { sleep } from '@aztec/foundation/sleep'; import { type AztecKVStore } from '@aztec/kv-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { type DataStoreConfig, openTmpStore } from '@aztec/kv-store/utils'; import { describe, expect, it, jest } from '@jest/globals'; import { generatePrivateKey } from 'viem/accounts'; @@ -73,7 +74,7 @@ describe('Req Resp p2p client integration', () => { // Note these bindings are important const addr = `127.0.0.1:${i + 1 + BOOT_NODE_UDP_PORT}`; const listenAddr = `0.0.0.0:${i + 1 + BOOT_NODE_UDP_PORT}`; - const config: P2PConfig = { + const config: P2PConfig & DataStoreConfig = { p2pEnabled: true, peerIdPrivateKey: peerIdPrivateKeys[i], tcpListenAddress: listenAddr, // run on port 0 @@ -89,6 +90,8 @@ describe('Req Resp p2p client integration', () => { maxPeerCount: 10, keepProvenTxsInPoolFor: 0, queryForIp: false, + dataDirectory: undefined, + l1Contracts: { rollupAddress: EthAddress.ZERO }, }; txPool = { @@ -112,12 +115,16 @@ describe('Req Resp p2p client integration', () => { blockSource = new MockBlockSource(); kvStore = openTmpStore(); + const deps = { + txPool: txPool as unknown as TxPool, + store: kvStore, + }; const client = await createP2PClient( config, - kvStore, - txPool as unknown as TxPool, attestationPool as unknown as AttestationPool, blockSource, + undefined, + deps, ); await client.start(); diff --git a/yarn-project/prover-client/src/mocks/test_context.ts b/yarn-project/prover-client/src/mocks/test_context.ts index 46b4f30a68a..16e19745b0a 100644 --- a/yarn-project/prover-client/src/mocks/test_context.ts +++ b/yarn-project/prover-client/src/mocks/test_context.ts @@ -1,6 +1,7 @@ import { type BBProverConfig } from '@aztec/bb-prover'; import { type BlockProver, + type MerkleTreeAdminOperations, type ProcessedTx, type PublicExecutionRequest, type ServerCircuitProver, @@ -23,7 +24,7 @@ import { type WorldStatePublicDB, } from '@aztec/simulator'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; -import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; +import { MerkleTrees } from '@aztec/world-state'; import * as fs from 'fs/promises'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -42,7 +43,7 @@ export class TestContext { public publicProcessor: PublicProcessor, public simulationProvider: SimulationProvider, public globalVariables: GlobalVariables, - public actualDb: MerkleTreeOperations, + public actualDb: MerkleTreeAdminOperations, public prover: ServerCircuitProver, public proverAgent: ProverAgent, public orchestrator: ProvingOrchestrator, diff --git a/yarn-project/prover-node/src/factory.ts b/yarn-project/prover-node/src/factory.ts index 48045ef33da..7e9c31e8cbf 100644 --- a/yarn-project/prover-node/src/factory.ts +++ b/yarn-project/prover-node/src/factory.ts @@ -1,7 +1,6 @@ import { type Archiver, createArchiver } from '@aztec/archiver'; import { type AztecNode } from '@aztec/circuit-types'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; -import { createStore } from '@aztec/kv-store/utils'; import { createProverClient } from '@aztec/prover-client'; import { L1Publisher } from '@aztec/sequencer-client'; import { createSimulationProvider } from '@aztec/simulator'; @@ -20,22 +19,17 @@ export async function createProverNode( deps: { telemetry?: TelemetryClient; log?: DebugLogger; - storeLog?: DebugLogger; aztecNodeTxProvider?: AztecNode; archiver?: Archiver; } = {}, ) { const telemetry = deps.telemetry ?? new NoopTelemetryClient(); const log = deps.log ?? createDebugLogger('aztec:prover'); - const storeLog = deps.storeLog ?? createDebugLogger('aztec:prover:lmdb'); - - const store = await createStore(config, config.l1Contracts.rollupAddress, storeLog); - - const archiver = deps.archiver ?? (await createArchiver(config, store, telemetry, { blockUntilSync: true })); + const archiver = deps.archiver ?? (await createArchiver(config, telemetry, { blockUntilSync: true })); log.verbose(`Created archiver and synced to block ${await archiver.getBlockNumber()}`); const worldStateConfig = { ...config, worldStateProvenBlocksOnly: true }; - const worldStateSynchronizer = await createWorldStateSynchronizer(worldStateConfig, store, archiver, telemetry); + const worldStateSynchronizer = await createWorldStateSynchronizer(worldStateConfig, archiver, telemetry); await worldStateSynchronizer.start(); const simulationProvider = await createSimulationProvider(config, log); diff --git a/yarn-project/prover-node/src/prover-node.test.ts b/yarn-project/prover-node/src/prover-node.test.ts index 97fdda86c1b..b8330eff75c 100644 --- a/yarn-project/prover-node/src/prover-node.test.ts +++ b/yarn-project/prover-node/src/prover-node.test.ts @@ -1,7 +1,7 @@ import { type L1ToL2MessageSource, type L2BlockSource, - type MerkleTreeOperations, + type MerkleTreeAdminOperations, type ProverClient, type TxProvider, } from '@aztec/circuit-types'; @@ -32,7 +32,7 @@ describe('prover-node', () => { let jobs: { job: MockProxy; cleanUp: (job: BlockProvingJob) => Promise; - db: MerkleTreeOperations; + db: MerkleTreeAdminOperations; }[]; beforeEach(() => { @@ -47,7 +47,7 @@ describe('prover-node', () => { const telemetryClient = new NoopTelemetryClient(); // World state returns a new mock db every time it is asked to fork - worldState.syncImmediateAndFork.mockImplementation(() => Promise.resolve(mock())); + worldState.syncImmediateAndFork.mockImplementation(() => Promise.resolve(mock())); jobs = []; proverNode = new TestProverNode( @@ -161,7 +161,7 @@ describe('prover-node', () => { class TestProverNode extends ProverNode { protected override doCreateBlockProvingJob( - db: MerkleTreeOperations, + db: MerkleTreeAdminOperations, _publicProcessorFactory: PublicProcessorFactory, cleanUp: (job: BlockProvingJob) => Promise, ): BlockProvingJob { diff --git a/yarn-project/pxe/src/pxe_service/create_pxe_service.ts b/yarn-project/pxe/src/pxe_service/create_pxe_service.ts index 30d96288cca..e50e14f5339 100644 --- a/yarn-project/pxe/src/pxe_service/create_pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/create_pxe_service.ts @@ -11,8 +11,6 @@ import { getCanonicalInstanceDeployer } from '@aztec/protocol-contracts/instance import { getCanonicalKeyRegistry } from '@aztec/protocol-contracts/key-registry'; import { getCanonicalMultiCallEntrypointContract } from '@aztec/protocol-contracts/multi-call-entrypoint'; -import { join } from 'path'; - import { type PXEServiceConfig } from '../config/index.js'; import { KVPxeDatabase } from '../database/kv_pxe_database.js'; import { TestPrivateKernelProver } from '../kernel_prover/test/test_circuit_prover.js'; @@ -38,12 +36,12 @@ export async function createPXEService( const logSuffix = typeof useLogSuffix === 'boolean' ? (useLogSuffix ? randomBytes(3).toString('hex') : undefined) : useLogSuffix; - const pxeDbPath = config.dataDirectory ? join(config.dataDirectory, 'pxe_data') : undefined; - const keyStorePath = config.dataDirectory ? join(config.dataDirectory, 'pxe_key_store') : undefined; const l1Contracts = await aztecNode.getL1ContractAddresses(); - - const keyStore = new KeyStore(await createStore(keyStorePath, l1Contracts.rollupAddress)); - const db = new KVPxeDatabase(await createStore(pxeDbPath, l1Contracts.rollupAddress)); + const storeConfig = { dataDirectory: config.dataDirectory, l1Contracts }; + const keyStore = new KeyStore( + await createStore('pxe_key_store', storeConfig, createDebugLogger('aztec:pxe:keystore:lmdb')), + ); + const db = new KVPxeDatabase(await createStore('pxe_data', storeConfig, createDebugLogger('aztec:pxe:data:lmdb'))); const prover = proofCreator ?? (await createProver(config, logSuffix)); const server = new PXEService(keyStore, aztecNode, db, prover, config, logSuffix); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index d3a8ec1fc82..462d6687dad 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -5,6 +5,7 @@ import { type L1ToL2MessageSource, L2Block, type L2BlockSource, + type MerkleTreeAdminOperations, MerkleTreeId, PROVING_STATUS, type ProvingSuccess, @@ -34,7 +35,7 @@ import { type PublicProcessor, type PublicProcessorFactory } from '@aztec/simula import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type ContractDataSource } from '@aztec/types/contracts'; import { type ValidatorClient } from '@aztec/validator-client'; -import { type MerkleTreeOperations, WorldStateRunningState, type WorldStateSynchronizer } from '@aztec/world-state'; +import { WorldStateRunningState, type WorldStateSynchronizer } from '@aztec/world-state'; import { type MockProxy, mock, mockFn } from 'jest-mock-extended'; @@ -51,7 +52,7 @@ describe('sequencer', () => { let p2p: MockProxy; let worldState: MockProxy; let blockSimulator: MockProxy; - let merkleTreeOps: MockProxy; + let merkleTreeOps: MockProxy; let publicProcessor: MockProxy; let l2BlockSource: MockProxy; let l1ToL2MessageSource: MockProxy; @@ -112,7 +113,7 @@ describe('sequencer', () => { publisher.validateBlockForSubmission.mockResolvedValue(); globalVariableBuilder = mock(); - merkleTreeOps = mock(); + merkleTreeOps = mock(); blockSimulator = mock(); p2p = mock({ diff --git a/yarn-project/world-state/src/synchronizer/factory.ts b/yarn-project/world-state/src/synchronizer/factory.ts index e2d37d4a542..323fe6382c2 100644 --- a/yarn-project/world-state/src/synchronizer/factory.ts +++ b/yarn-project/world-state/src/synchronizer/factory.ts @@ -1,5 +1,6 @@ import { type L1ToL2MessageSource, type L2BlockSource } from '@aztec/circuit-types'; -import { type AztecKVStore } from '@aztec/kv-store'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { type DataStoreConfig, createStore } from '@aztec/kv-store/utils'; import { type TelemetryClient } from '@aztec/telemetry-client'; import { MerkleTrees } from '../world-state-db/merkle_trees.js'; @@ -7,11 +8,11 @@ import { type WorldStateConfig } from './config.js'; import { ServerWorldStateSynchronizer } from './server_world_state_synchronizer.js'; export async function createWorldStateSynchronizer( - config: WorldStateConfig, - store: AztecKVStore, + config: WorldStateConfig & DataStoreConfig, l2BlockSource: L2BlockSource & L1ToL2MessageSource, client: TelemetryClient, ) { + const store = await createStore('world-state', config, createDebugLogger('aztec:world-state:lmdb')); const merkleTrees = await MerkleTrees.new(store, client); return new ServerWorldStateSynchronizer(store, merkleTrees, l2BlockSource, config); } diff --git a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts index 7e585c12ae7..f46aa23879d 100644 --- a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts +++ b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts @@ -11,7 +11,7 @@ import { INITIAL_LEAF, Pedersen, SHA256Trunc, StandardTree } from '@aztec/merkle import { jest } from '@jest/globals'; import { mock } from 'jest-mock-extended'; -import { type MerkleTreeDb, type MerkleTrees, type WorldStateConfig } from '../index.js'; +import { type MerkleTreeAdminDb, type MerkleTrees, type WorldStateConfig } from '../index.js'; import { ServerWorldStateSynchronizer } from './server_world_state_synchronizer.js'; import { WorldStateRunningState } from './world_state_synchronizer.js'; @@ -39,7 +39,7 @@ describe('server_world_state_synchronizer', () => { getL1ToL2Messages: jest.fn(() => Promise.resolve(l1ToL2Messages)), }); - const merkleTreeDb = mock({ + const merkleTreeDb = mock({ getTreeInfo: jest.fn(() => Promise.resolve({ depth: 8, treeId: MerkleTreeId.NOTE_HASH_TREE, root: Buffer.alloc(32, 0), size: 0n }), ), diff --git a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts index a4512b41048..ce188d1eaa1 100644 --- a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts +++ b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts @@ -4,6 +4,7 @@ import { type L2Block, L2BlockDownloader, type L2BlockSource, + type MerkleTreeAdminOperations, } from '@aztec/circuit-types'; import { type L2BlockHandledStats } from '@aztec/circuit-types/stats'; import { L1_TO_L2_MSG_SUBTREE_HEIGHT } from '@aztec/circuits.js/constants'; @@ -16,8 +17,11 @@ import { type AztecKVStore, type AztecSingleton } from '@aztec/kv-store'; import { openTmpStore } from '@aztec/kv-store/utils'; import { SHA256Trunc, StandardTree } from '@aztec/merkle-tree'; -import { type MerkleTreeOperations, type MerkleTrees } from '../world-state-db/index.js'; -import { MerkleTreeOperationsFacade } from '../world-state-db/merkle_tree_operations_facade.js'; +import { type MerkleTrees } from '../world-state-db/index.js'; +import { + MerkleTreeAdminOperationsFacade, + MerkleTreeOperationsFacade, +} from '../world-state-db/merkle_tree_operations_facade.js'; import { MerkleTreeSnapshotOperationsFacade } from '../world-state-db/merkle_tree_snapshot_operations_facade.js'; import { type WorldStateConfig } from './config.js'; import { @@ -62,21 +66,25 @@ export class ServerWorldStateSynchronizer implements WorldStateSynchronizer { }); } - public getLatest(): MerkleTreeOperations { - return new MerkleTreeOperationsFacade(this.merkleTreeDb, true); + public getLatest(): MerkleTreeAdminOperations { + return new MerkleTreeAdminOperationsFacade(this.merkleTreeDb, true); } - public getCommitted(): MerkleTreeOperations { - return new MerkleTreeOperationsFacade(this.merkleTreeDb, false); + public getCommitted(): MerkleTreeAdminOperations { + return new MerkleTreeAdminOperationsFacade(this.merkleTreeDb, false); } - public getSnapshot(blockNumber: number): MerkleTreeOperations { + public getSnapshot(blockNumber: number): MerkleTreeAdminOperations { return new MerkleTreeSnapshotOperationsFacade(this.merkleTreeDb, blockNumber); } - private async getFork(includeUncommitted: boolean): Promise { + public async ephemeralFork(): Promise { + return new MerkleTreeOperationsFacade(await this.merkleTreeDb.ephemeralFork(), true); + } + + private async getFork(includeUncommitted: boolean): Promise { this.log.verbose(`Forking world state at ${this.blockNumber.get()}`); - return new MerkleTreeOperationsFacade(await this.merkleTreeDb.fork(), includeUncommitted); + return new MerkleTreeAdminOperationsFacade(await this.merkleTreeDb.fork(), includeUncommitted); } public async start() { @@ -212,7 +220,7 @@ export class ServerWorldStateSynchronizer implements WorldStateSynchronizer { public async syncImmediateAndFork( targetBlockNumber: number, forkIncludeUncommitted: boolean, - ): Promise { + ): Promise { try { await this.pause(); await this.syncImmediate(targetBlockNumber); diff --git a/yarn-project/world-state/src/synchronizer/world_state_synchronizer.ts b/yarn-project/world-state/src/synchronizer/world_state_synchronizer.ts index 0411827e10d..0fb17dbe508 100644 --- a/yarn-project/world-state/src/synchronizer/world_state_synchronizer.ts +++ b/yarn-project/world-state/src/synchronizer/world_state_synchronizer.ts @@ -1,4 +1,4 @@ -import { type MerkleTreeOperations } from '../world-state-db/index.js'; +import { type MerkleTreeAdminOperations, type MerkleTreeOperations } from '../world-state-db/index.js'; /** * Defines the possible states of the world state synchronizer. @@ -58,24 +58,26 @@ export interface WorldStateSynchronizer { * @param forkIncludeUncommitted - Whether to include uncommitted data in the fork. * @returns The db forked at the requested target block number. */ - syncImmediateAndFork(targetBlockNumber: number, forkIncludeUncommitted: boolean): Promise; + syncImmediateAndFork(targetBlockNumber: number, forkIncludeUncommitted: boolean): Promise; /** - * Returns an instance of MerkleTreeOperations that will include uncommitted data. - * @returns An instance of MerkleTreeOperations that will include uncommitted data. + * Forks the current in-memory state based off the current committed state, and returns an instance that cannot modify the underlying data store. */ - getLatest(): MerkleTreeOperations; + ephemeralFork(): Promise; /** - * Returns an instance of MerkleTreeOperations that will not include uncommitted data. - * @returns An instance of MerkleTreeOperations that will not include uncommitted data. + * Returns an instance of MerkleTreeAdminOperations that will include uncommitted data. */ - getCommitted(): MerkleTreeOperations; + getLatest(): MerkleTreeAdminOperations; /** - * Returns a readonly instance of MerkleTreeOperations where the state is as it was at the given block number + * Returns an instance of MerkleTreeAdminOperations that will not include uncommitted data. + */ + getCommitted(): MerkleTreeAdminOperations; + + /** + * Returns a readonly instance of MerkleTreeAdminOperations where the state is as it was at the given block number * @param block - The block number to look at - * @returns An instance of MerkleTreeOperations */ getSnapshot(block: number): MerkleTreeOperations; } diff --git a/yarn-project/world-state/src/world-state-db/index.ts b/yarn-project/world-state/src/world-state-db/index.ts index 63ec2e7ba65..44e00549965 100644 --- a/yarn-project/world-state/src/world-state-db/index.ts +++ b/yarn-project/world-state/src/world-state-db/index.ts @@ -3,4 +3,4 @@ export * from './merkle_tree_db.js'; export * from './merkle_tree_operations_facade.js'; export * from './merkle_tree_snapshot_operations_facade.js'; -export { MerkleTreeOperations } from '@aztec/circuit-types/interfaces'; +export { MerkleTreeOperations, MerkleTreeAdminOperations } from '@aztec/circuit-types/interfaces'; diff --git a/yarn-project/world-state/src/world-state-db/merkle_tree_db.ts b/yarn-project/world-state/src/world-state-db/merkle_tree_db.ts index 6471a3e6e03..bf4d97e3924 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_tree_db.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_tree_db.ts @@ -1,5 +1,5 @@ import { type MerkleTreeId } from '@aztec/circuit-types'; -import { type MerkleTreeOperations } from '@aztec/circuit-types/interfaces'; +import { type MerkleTreeAdminOperations, type MerkleTreeOperations } from '@aztec/circuit-types/interfaces'; import { type Fr, MAX_NULLIFIERS_PER_TX, MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX } from '@aztec/circuits.js'; import { type IndexedTreeSnapshot, type TreeSnapshot } from '@aztec/merkle-tree'; @@ -32,13 +32,9 @@ type WithIncludeUncommitted = F extends (...args: [...infer Rest]) => infer R /** * Defines the names of the setters on Merkle Trees. */ -type MerkleTreeSetters = - | 'appendLeaves' - | 'updateLeaf' - | 'commit' - | 'rollback' - | 'handleL2BlockAndMessages' - | 'batchInsert'; +type MerkleTreeSetters = 'appendLeaves' | 'updateLeaf' | 'batchInsert'; + +type MerkleTreeAdmin = 'commit' | 'rollback' | 'handleL2BlockAndMessages'; export type TreeSnapshots = { [MerkleTreeId.NULLIFIER_TREE]: IndexedTreeSnapshot; @@ -48,9 +44,7 @@ export type TreeSnapshots = { [MerkleTreeId.ARCHIVE]: TreeSnapshot; }; -/** - * Defines the interface for operations on a set of Merkle Trees configuring whether to return committed or uncommitted data. - */ +/** Defines the interface for operations on a set of Merkle Trees configuring whether to return committed or uncommitted data. */ export type MerkleTreeDb = { [Property in keyof MerkleTreeOperations as Exclude]: WithIncludeUncommitted< MerkleTreeOperations[Property] @@ -61,6 +55,20 @@ export type MerkleTreeDb = { * @param block - The block number to take the snapshot at. */ getSnapshot(block: number): Promise; + }; + +/** Extends operations on MerkleTreeDb to include modifying the underlying store */ +export type MerkleTreeAdminDb = { + [Property in keyof MerkleTreeAdminOperations as Exclude< + Property, + MerkleTreeSetters | MerkleTreeAdmin + >]: WithIncludeUncommitted; +} & Pick & { + /** + * Returns a snapshot of the current state of the trees. + * @param block - The block number to take the snapshot at. + */ + getSnapshot(block: number): Promise; /** * Forks the database at its current state. diff --git a/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts b/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts index b96fc441178..39975c30551 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts @@ -2,6 +2,7 @@ import { type BatchInsertionResult, type L2Block, type MerkleTreeId, type Siblin import { type HandleL2BlockAndMessagesResult, type IndexedTreeId, + type MerkleTreeAdminOperations, type MerkleTreeLeafType, type MerkleTreeOperations, type TreeInfo, @@ -9,13 +10,13 @@ import { import { type Fr, type Header, type NullifierLeafPreimage, type StateReference } from '@aztec/circuits.js'; import { type IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; -import { type MerkleTreeDb } from './merkle_tree_db.js'; +import { type MerkleTreeAdminDb, type MerkleTreeDb } from './merkle_tree_db.js'; /** * Wraps a MerkleTreeDbOperations to call all functions with a preset includeUncommitted flag. */ export class MerkleTreeOperationsFacade implements MerkleTreeOperations { - constructor(private trees: MerkleTreeDb, private includeUncommitted: boolean) {} + constructor(protected trees: MerkleTreeDb, protected includeUncommitted: boolean) {} /** * Returns the tree info for the specified tree id. @@ -164,6 +165,27 @@ export class MerkleTreeOperationsFacade implements MerkleTreeOperations { return this.trees.updateArchive(header, this.includeUncommitted); } + /** + * Batch insert multiple leaves into the tree. + * @param treeId - The ID of the tree. + * @param leaves - Leaves to insert into the tree. + * @param subtreeHeight - Height of the subtree. + * @returns The data for the leaves to be updated when inserting the new ones. + */ + public batchInsert( + treeId: IndexedTreeId, + leaves: Buffer[], + subtreeHeight: number, + ): Promise> { + return this.trees.batchInsert(treeId, leaves, subtreeHeight); + } +} + +export class MerkleTreeAdminOperationsFacade extends MerkleTreeOperationsFacade implements MerkleTreeAdminOperations { + constructor(protected override trees: MerkleTreeAdminDb, includeUncommitted: boolean) { + super(trees, includeUncommitted); + } + /** * Handles a single L2 block (i.e. Inserts the new note hashes into the merkle tree). * @param block - The L2 block to handle. @@ -190,21 +212,6 @@ export class MerkleTreeOperationsFacade implements MerkleTreeOperations { return await this.trees.rollback(); } - /** - * Batch insert multiple leaves into the tree. - * @param treeId - The ID of the tree. - * @param leaves - Leaves to insert into the tree. - * @param subtreeHeight - Height of the subtree. - * @returns The data for the leaves to be updated when inserting the new ones. - */ - public batchInsert( - treeId: IndexedTreeId, - leaves: Buffer[], - subtreeHeight: number, - ): Promise> { - return this.trees.batchInsert(treeId, leaves, subtreeHeight); - } - public delete(): Promise { return this.trees.delete(); } diff --git a/yarn-project/world-state/src/world-state-db/merkle_trees.ts b/yarn-project/world-state/src/world-state-db/merkle_trees.ts index 1c9bd5b6877..f94cbec7dde 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_trees.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_trees.ts @@ -3,8 +3,8 @@ import { type BatchInsertionResult, type HandleL2BlockAndMessagesResult, type IndexedTreeId, + type MerkleTreeAdminOperations, type MerkleTreeLeafType, - type MerkleTreeOperations, type TreeInfo, } from '@aztec/circuit-types/interfaces'; import { @@ -56,7 +56,7 @@ import { type TreeSnapshots, } from './merkle_tree_db.js'; import { type MerkleTreeMap } from './merkle_tree_map.js'; -import { MerkleTreeOperationsFacade } from './merkle_tree_operations_facade.js'; +import { MerkleTreeAdminOperationsFacade } from './merkle_tree_operations_facade.js'; import { WorldStateMetrics } from './metrics.js'; /** @@ -122,8 +122,8 @@ export class MerkleTrees implements MerkleTreeDb { /** * Initializes the collection of Merkle Trees. */ - async #init() { - const fromDb = this.#isDbPopulated(); + async #init(loadFromDb?: boolean) { + const fromDb = loadFromDb === undefined ? this.#isDbPopulated() : loadFromDb; const initializeTree = fromDb ? loadTree : newTree; const hasher = new Poseidon(); @@ -180,17 +180,14 @@ export class MerkleTrees implements MerkleTreeDb { const initialState = await this.getStateReference(true); await this.#saveInitialStateReference(initialState); await this.#updateArchive(this.getInitialHeader(), true); - } - await this.#commit(); + // And commit anything we did to initialize this set of trees + await this.#commit(); + } } public async fork(): Promise { const [ms, db] = await elapsed(async () => { - // TODO(palla/prover-node): If the underlying store is being shared with other components, we're unnecessarily - // copying a lot of data unrelated to merkle trees. This may be fine for now, and we may be able to ditch backup-based - // forking in favor of a more elegant proposal. But if we see this operation starts taking a lot of time, we may want - // to open separate stores for merkle trees and other components. const forked = await this.store.fork(); return MerkleTrees.new(forked, this.telemetryClient, this.log); }); @@ -199,6 +196,20 @@ export class MerkleTrees implements MerkleTreeDb { return db; } + // REFACTOR: We're hiding the `commit` operations in the tree behind a type check only, but + // we should make sure it's not accidentally called elsewhere by splitting this class into one + // that can work on a read-only store and one that actually writes to the store. This implies + // having read-only versions of the kv-stores, all kv-containers, and all trees. + public async ephemeralFork(): Promise { + const forked = new MerkleTrees( + this.store, + this.telemetryClient, + createDebugLogger('aztec:merkle_trees:ephemeral_fork'), + ); + await forked.#init(true); + return forked; + } + public async delete() { await this.store.delete(); } @@ -218,16 +229,16 @@ export class MerkleTrees implements MerkleTreeDb { * Gets a view of this db that returns uncommitted data. * @returns - A facade for this instance. */ - public asLatest(): MerkleTreeOperations { - return new MerkleTreeOperationsFacade(this, true); + public asLatest(): MerkleTreeAdminOperations { + return new MerkleTreeAdminOperationsFacade(this, true); } /** * Gets a view of this db that returns committed data only. * @returns - A facade for this instance. */ - public asCommitted(): MerkleTreeOperations { - return new MerkleTreeOperationsFacade(this, false); + public asCommitted(): MerkleTreeAdminOperations { + return new MerkleTreeAdminOperationsFacade(this, false); } /** From 4dbad01c866b28f7d440d7b4e17631ed6a0469f3 Mon Sep 17 00:00:00 2001 From: ludamad Date: Wed, 4 Sep 2024 09:51:08 -0400 Subject: [PATCH 05/18] chore(bb): reinstate "chore: uncomment asserts in oink rec verifier"" (#8356) Fixes the base rollup test by making the input proof have the same circuit size, number of public inputs, and pub inputs offset. --------- Co-authored-by: lucasxia01 --- .../barretenberg/aztec_ivc/aztec_ivc.test.cpp | 26 ++++++++++++------- .../client_ivc/client_ivc.test.cpp | 11 +++----- .../honk_verifier/oink_recursive_verifier.cpp | 20 ++++++++------ yarn-project/README.md | 2 +- .../src/structs/verification_key.ts | 2 +- .../src/test/bb_prover_base_rollup.test.ts | 6 ++++- 6 files changed, 39 insertions(+), 28 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/aztec_ivc/aztec_ivc.test.cpp b/barretenberg/cpp/src/barretenberg/aztec_ivc/aztec_ivc.test.cpp index 84012b3cb85..09daa6708e1 100644 --- a/barretenberg/cpp/src/barretenberg/aztec_ivc/aztec_ivc.test.cpp +++ b/barretenberg/cpp/src/barretenberg/aztec_ivc/aztec_ivc.test.cpp @@ -151,10 +151,10 @@ TEST_F(AztecIVCTests, BasicFour) }; /** - * @brief Check that the IVC fails to verify if an intermediate fold proof is invalid + * @brief Check that the IVC fails if an intermediate fold proof is invalid * @details When accumulating 4 circuits, there are 3 fold proofs to verify (the first two are recursively verfied and - * the 3rd is verified as part of the IVC proof). Check that if any of one of these proofs is invalid, the IVC will fail - * to verify. + * the 3rd is verified as part of the IVC proof). Check that if any of one of these proofs is invalid, the IVC will + * fail. * */ TEST_F(AztecIVCTests, BadProofFailure) @@ -175,7 +175,7 @@ TEST_F(AztecIVCTests, BadProofFailure) EXPECT_TRUE(ivc.prove_and_verify()); } - // The IVC fails to verify if the FIRST fold proof is tampered with + // The IVC throws an exception if the FIRST fold proof is tampered with { AztecIVC ivc; ivc.trace_structure = TraceStructure::SMALL_TEST; @@ -185,6 +185,11 @@ TEST_F(AztecIVCTests, BadProofFailure) // Construct and accumulate a set of mocked private function execution circuits size_t NUM_CIRCUITS = 4; for (size_t idx = 0; idx < NUM_CIRCUITS; ++idx) { + if (idx == 3) { // At idx = 3, we've tampered with the one of the folding proofs so create the recursive + // folding verifier will throw an error. + EXPECT_ANY_THROW(circuit_producer.create_next_circuit(ivc, /*log2_num_gates=*/5)); + break; + } auto circuit = circuit_producer.create_next_circuit(ivc, /*log2_num_gates=*/5); ivc.accumulate(circuit); @@ -193,11 +198,9 @@ TEST_F(AztecIVCTests, BadProofFailure) tamper_with_proof(ivc.verification_queue[0].proof); // tamper with first proof } } - - EXPECT_FALSE(ivc.prove_and_verify()); } - // The IVC fails to verify if the SECOND fold proof is tampered with + // The IVC fails if the SECOND fold proof is tampered with { AztecIVC ivc; ivc.trace_structure = TraceStructure::SMALL_TEST; @@ -207,6 +210,11 @@ TEST_F(AztecIVCTests, BadProofFailure) // Construct and accumulate a set of mocked private function execution circuits size_t NUM_CIRCUITS = 4; for (size_t idx = 0; idx < NUM_CIRCUITS; ++idx) { + if (idx == 3) { // At idx = 3, we've tampered with the one of the folding proofs so create the recursive + // folding verifier will throw an error. + EXPECT_ANY_THROW(circuit_producer.create_next_circuit(ivc, /*log2_num_gates=*/5)); + break; + } auto circuit = circuit_producer.create_next_circuit(ivc, /*log2_num_gates=*/5); ivc.accumulate(circuit); @@ -215,11 +223,9 @@ TEST_F(AztecIVCTests, BadProofFailure) tamper_with_proof(ivc.verification_queue[1].proof); // tamper with second proof } } - - EXPECT_FALSE(ivc.prove_and_verify()); } - // The IVC fails to verify if the 3rd/FINAL fold proof is tampered with + // The IVC fails if the 3rd/FINAL fold proof is tampered with { AztecIVC ivc; ivc.trace_structure = TraceStructure::SMALL_TEST; diff --git a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.test.cpp b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.test.cpp index 77685d2717c..7b8587c086a 100644 --- a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.test.cpp +++ b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.test.cpp @@ -106,7 +106,7 @@ TEST_F(ClientIVCTests, BasicThree) }; /** - * @brief Check that the IVC fails to verify if an intermediate fold proof is invalid + * @brief Check that the IVC fails if an intermediate fold proof is invalid * */ TEST_F(ClientIVCTests, BasicFailure) @@ -128,13 +128,10 @@ TEST_F(ClientIVCTests, BasicFailure) break; } } - - // Accumulate another circuit; this involves recursive folding verification of the bad proof + // Accumulate another circuit; this involves recursive folding verification of the bad proof which throws an error + // because of circuit sizes don't match. Builder circuit_2 = create_mock_circuit(ivc); - ivc.accumulate(circuit_2); - - // The bad fold proof should result in an invalid witness in the final circuit and the IVC should fail to verify - EXPECT_FALSE(prove_and_verify(ivc)); + EXPECT_ANY_THROW(ivc.accumulate(circuit_2)); }; /** diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/oink_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/oink_recursive_verifier.cpp index 2a8401d577e..c058770bc2d 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/oink_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/oink_recursive_verifier.cpp @@ -41,14 +41,18 @@ template void OinkRecursiveVerifier_::verify() CommitmentLabels labels; FF circuit_size = transcript->template receive_from_prover(domain_separator + "circuit_size"); - transcript->template receive_from_prover(domain_separator + "public_input_size"); - transcript->template receive_from_prover(domain_separator + "pub_inputs_offset"); - - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1032): Uncomment these once it doesn't cause issues - // with the flows - // ASSERT(static_cast(circuit_size.get_value()) == key->circuit_size); - // ASSERT(static_cast(public_input_size.get_value()) == key->num_public_inputs); - // ASSERT(static_cast(pub_inputs_offset.get_value()) == key->pub_inputs_offset); + FF public_input_size = transcript->template receive_from_prover(domain_separator + "public_input_size"); + FF pub_inputs_offset = transcript->template receive_from_prover(domain_separator + "pub_inputs_offset"); + + if (static_cast(circuit_size.get_value()) != instance->verification_key->circuit_size) { + throw_or_abort("OinkRecursiveVerifier::verify: proof circuit size does not match verification key"); + } + if (static_cast(public_input_size.get_value()) != instance->verification_key->num_public_inputs) { + throw_or_abort("OinkRecursiveVerifier::verify: proof public input size does not match verification key"); + } + if (static_cast(pub_inputs_offset.get_value()) != instance->verification_key->pub_inputs_offset) { + throw_or_abort("OinkRecursiveVerifier::verify: proof public input offset does not match verification key"); + } std::vector public_inputs; for (size_t i = 0; i < instance->verification_key->num_public_inputs; ++i) { diff --git a/yarn-project/README.md b/yarn-project/README.md index d3da1172535..8c5f0014094 100644 --- a/yarn-project/README.md +++ b/yarn-project/README.md @@ -86,6 +86,6 @@ COMMIT_TAG= - Extract `VERSION` as the script shows (in the eg it should be 0.8.8) - Skip the version existing checks like `if [ "$VERSION" == "$PUBLISHED_VERSION" ]` and `if [ "$VERSION" != "$HIGHER_VERSION" ]`. Since this is our first time deploying the package, `PUBLISHED_VERSION` and `HIGHER_VERSION` will be empty and hence these checks would fail. These checks are necessary in the CI for continual releases. - Locally update the package version in package.json using `jq` as shown in the script. - - Do a dry-run + - Do a dry-run. - If dry run succeeds, publish the package! 5. Create a PR by adding your package into the `deploy-npm` script so next release onwards, CI can cut releases for your package. diff --git a/yarn-project/circuits.js/src/structs/verification_key.ts b/yarn-project/circuits.js/src/structs/verification_key.ts index 331cc50ba10..d7f5f4706c7 100644 --- a/yarn-project/circuits.js/src/structs/verification_key.ts +++ b/yarn-project/circuits.js/src/structs/verification_key.ts @@ -79,7 +79,7 @@ export class CommitmentMap { // TODO: find better home for these constants export const CIRCUIT_SIZE_INDEX = 0; export const CIRCUIT_PUBLIC_INPUTS_INDEX = 1; -export const CIRCUIT_RECURSIVE_INDEX = 0; +export const CIRCUIT_RECURSIVE_INDEX = 3; /** * Provides a 'fields' representation of a circuit's verification key diff --git a/yarn-project/prover-client/src/test/bb_prover_base_rollup.test.ts b/yarn-project/prover-client/src/test/bb_prover_base_rollup.test.ts index ea572030820..411960efede 100644 --- a/yarn-project/prover-client/src/test/bb_prover_base_rollup.test.ts +++ b/yarn-project/prover-client/src/test/bb_prover_base_rollup.test.ts @@ -41,9 +41,13 @@ describe('prover/bb_prover/base-rollup', () => { const tx = makePaddingProcessedTxFromTubeProof(paddingTxPublicInputsAndProof); logger.verbose('Building base rollup inputs'); + const baseRollupInputProof = makeEmptyRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH); + baseRollupInputProof.proof[0] = paddingTxPublicInputsAndProof.verificationKey.keyAsFields.key[0]; + baseRollupInputProof.proof[1] = paddingTxPublicInputsAndProof.verificationKey.keyAsFields.key[1]; + baseRollupInputProof.proof[2] = paddingTxPublicInputsAndProof.verificationKey.keyAsFields.key[2]; const baseRollupInputs = await buildBaseRollupInput( tx, - makeEmptyRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH), + baseRollupInputProof, context.globalVariables, context.actualDb, paddingTxPublicInputsAndProof.verificationKey, From 8ee8595d664d7c1dca65bd0496648bb4cf1a32f7 Mon Sep 17 00:00:00 2001 From: ludamad Date: Wed, 4 Sep 2024 10:07:53 -0400 Subject: [PATCH 06/18] chore(ci): Test lowering of non-persistent ebs provisions (#8360) - lower ebs for test instances, doesnt seem to affect test times much - hopefully fix race condition on startup 'command not found: brotli' --- .github/ensure-tester/action.yml | 5 ++++- .github/spot-runner-action/dist/index.js | 13 ++++++++++--- .github/spot-runner-action/src/ec2.ts | 4 ++-- .github/spot-runner-action/src/userdata.ts | 9 ++++++++- 4 files changed, 24 insertions(+), 7 deletions(-) diff --git a/.github/ensure-tester/action.yml b/.github/ensure-tester/action.yml index 9bdda082b79..ca85b7da853 100644 --- a/.github/ensure-tester/action.yml +++ b/.github/ensure-tester/action.yml @@ -70,7 +70,10 @@ runs: - name: Print Startup Log shell: bash - run: cat /run/log.out || true # don't fail + run: + # Try 6 times to see if .user-data-finished has been written to + for i in {0..6} ; do scripts/run_on_tester "cat /home/ubuntu/.user-data-finished" && break ; sleep 5 ; done ; + scripts/run_on_tester "cat /run/log.out" # Set up a context for this run - name: Copy Repo to Tester diff --git a/.github/spot-runner-action/dist/index.js b/.github/spot-runner-action/dist/index.js index d0bf221d435..7f72f808e4f 100644 --- a/.github/spot-runner-action/dist/index.js +++ b/.github/spot-runner-action/dist/index.js @@ -281,8 +281,8 @@ class Ec2Instance { Ebs: { VolumeSize: 64, VolumeType: 'gp3', - Throughput: 1000, - Iops: 5000 + Throughput: 250, + Iops: 3000 }, }, ], @@ -1040,7 +1040,14 @@ class UserData { `sudo service docker restart`, "sudo wget -q https://github.com/earthly/earthly/releases/download/v0.8.10/earthly-linux-$(dpkg --print-architecture) -O /usr/local/bin/earthly", "sudo chmod +x /usr/local/bin/earthly", - `sudo bash -c 'echo \'Acquire::Retries "3"; Acquire::https::Timeout "240"; Acquire::http::Timeout "240"; APT::Get::Assume-Yes "true"; APT::Install-Recommends "false"; APT::Install-Suggests "false";\' > /etc/apt/apt.conf.d/99-aztec-build'`, + `sudo bash -c 'cat < /etc/apt/apt.conf.d/99-aztec-build +Acquire::Retries "3"; +Acquire::https::Timeout "240"; +Acquire::http::Timeout "240"; +APT::Get::Assume-Yes "true"; +APT::Install-Recommends "false"; +APT::Install-Suggests "false"; +EOF'`, "sudo apt install -y brotli", 'echo "MaxStartups 1000" >> /etc/ssh/sshd_config', 'echo "ClientAliveInterval=30" >> /etc/ssh/sshd_config', diff --git a/.github/spot-runner-action/src/ec2.ts b/.github/spot-runner-action/src/ec2.ts index 5e6ed63a6ea..752bcdc8534 100644 --- a/.github/spot-runner-action/src/ec2.ts +++ b/.github/spot-runner-action/src/ec2.ts @@ -200,8 +200,8 @@ export class Ec2Instance { Ebs: { VolumeSize: 64, VolumeType: 'gp3', - Throughput: 1000, - Iops: 5000 + Throughput: 250, + Iops: 3000 }, }, ], diff --git a/.github/spot-runner-action/src/userdata.ts b/.github/spot-runner-action/src/userdata.ts index e15b7097e6b..c4ba6631e2b 100644 --- a/.github/spot-runner-action/src/userdata.ts +++ b/.github/spot-runner-action/src/userdata.ts @@ -22,7 +22,14 @@ export class UserData { `sudo service docker restart`, "sudo wget -q https://github.com/earthly/earthly/releases/download/v0.8.10/earthly-linux-$(dpkg --print-architecture) -O /usr/local/bin/earthly", "sudo chmod +x /usr/local/bin/earthly", - `sudo bash -c 'echo \'Acquire::Retries "3"; Acquire::https::Timeout "240"; Acquire::http::Timeout "240"; APT::Get::Assume-Yes "true"; APT::Install-Recommends "false"; APT::Install-Suggests "false";\' > /etc/apt/apt.conf.d/99-aztec-build'`, + `sudo bash -c 'cat < /etc/apt/apt.conf.d/99-aztec-build +Acquire::Retries "3"; +Acquire::https::Timeout "240"; +Acquire::http::Timeout "240"; +APT::Get::Assume-Yes "true"; +APT::Install-Recommends "false"; +APT::Install-Suggests "false"; +EOF'`, "sudo apt install -y brotli", 'echo "MaxStartups 1000" >> /etc/ssh/sshd_config', 'echo "ClientAliveInterval=30" >> /etc/ssh/sshd_config', From 8fe045ca3c25997f1eda874dec0da67b7d564d06 Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Wed, 4 Sep 2024 10:48:18 -0400 Subject: [PATCH 07/18] chore: move spartan network tests to nightly (#8369) --- .github/workflows/ci.yml | 31 ----- .github/workflows/spartan-test.yml | 175 +++++++++++++++++++++++++++++ 2 files changed, 175 insertions(+), 31 deletions(-) create mode 100644 .github/workflows/spartan-test.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b033f5c559c..f3420cf23b2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -161,37 +161,6 @@ jobs: export FORCE_COLOR=1 ../../scripts/earthly-ci -P --no-output +${{ matrix.test }} - network-e2e: - needs: [build, changes] - if: ${{ needs.changes.outputs.non-barretenberg-cpp == 'true' }} - runs-on: ubuntu-20.04 - strategy: - matrix: - values_file: ["default.yaml", "3-validators.yaml"] - steps: - - uses: actions/checkout@v4 - with: { ref: "${{ env.GIT_COMMIT }}" } - - uses: ./.github/ci-setup-action - - name: Setup and Test - uses: ./.github/ensure-tester-with-images - timeout-minutes: 45 - with: - runner_type: ${{ contains(matrix.test, 'prover') && '64core-tester-x86' || '16core-tester-x86' }} - builder_type: builder-x86 - # these are copied to the tester and expected by the earthly command below - # if they fail to copy, it will try to build them on the tester and fail - builder_images_to_copy: aztecprotocol/aztec:${{ env.GIT_COMMIT }} aztecprotocol/end-to-end:${{ env.GIT_COMMIT }} - # command to produce the images in case they don't exist - builder_command: scripts/earthly-ci ./yarn-project+export-e2e-test-images - tester_ttl: 40 - run: | - set -eux - cd ./yarn-project/end-to-end/ - ./scripts/setup_local_k8s.sh - export FORCE_COLOR=1 - export EARTHLY_BUILD_ARGS="${{ env.EARTHLY_BUILD_ARGS }}" - ../../scripts/earthly-ci --exec-stats -P --no-output ./+network-transfer --values-file=${{ matrix.values_file }} - # all the benchmarking end-to-end integration tests for aztec (not required to merge) bench-e2e: needs: [build, changes] diff --git a/.github/workflows/spartan-test.yml b/.github/workflows/spartan-test.yml new file mode 100644 index 00000000000..902c032b647 --- /dev/null +++ b/.github/workflows/spartan-test.yml @@ -0,0 +1,175 @@ +name: Run spartan network tests +on: + workflow_dispatch: + schedule: + - cron: "00 08 * * 1-5" + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }} + GIT_COMMIT: ${{ github.sha }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + +jobs: + setup: + uses: ./.github/workflows/setup-runner.yml + with: + username: ${{ github.event.pull_request.user.login || github.actor }} + runner_type: builder-x86 + secrets: inherit + + changes: + runs-on: ubuntu-20.04 + # Required permissions. + permissions: + pull-requests: read + # Set job outputs to values from filter step + outputs: + avm-transpiler: ${{ steps.filter.outputs.avm-transpiler }} + build-images: ${{ steps.filter.outputs.build-images }} + barretenberg: ${{ steps.filter.outputs.barretenberg }} + barretenberg-cpp: ${{ steps.filter.outputs.barretenberg-cpp }} + noir: ${{ steps.filter.outputs.noir }} + noir-projects: ${{ steps.filter.outputs.noir-projects }} + l1-contracts: ${{ steps.filter.outputs.l1-contracts }} + non-docs: ${{ steps.filter.outputs.non-docs }} + non-misc-ci: ${{ steps.filter.outputs.non-misc-ci }} + non-barretenberg-cpp: ${{ steps.filter.outputs.non-barretenberg-cpp }} + steps: + - uses: actions/checkout@v4 + with: { ref: "${{ env.GIT_COMMIT }}" } + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 + id: filter + with: + filters: | + barretenberg: + - 'barretenberg/**' + barretenberg-cpp: + - 'barretenberg/cpp/**' + build-images: + - 'build-images/**' + noir: + - 'noir/**' + avm-transpiler: + - 'avm-transpiler/**' + l1-contracts: + - 'l1-contracts/**' + noir-projects: + - 'noir-projects/**' + non-barretenberg-cpp: + - '!(barretenberg/cpp/**)' + # don't consider AVM stuff 'core bb' + - barretenberg/cpp/pil/** + - barretenberg/cpp/src/barretenberg/vm/** + - barretenberg/cpp/src/barretenberg/**/generated/* + - barretenberg/cpp/src/barretenberg/client_ivc.{hpp,cpp} + non-docs: + - '!(docs/**)' + non-misc-ci: + - '!(.github/**)' + # Always rebuild when this file changes + - .github/workflows/ci.yml + + build-images: + needs: [setup, changes] + # Note we don't but the 'if' here as that would also apply to dependent jobs, instead we just copy it into each step + runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 + steps: + - uses: actions/checkout@v4 + if: ${{ needs.changes.outputs.build-images == 'true' }} + with: { ref: "${{ env.GIT_COMMIT }}" } + - uses: ./.github/ci-setup-action + if: ${{ needs.changes.outputs.build-images == 'true' }} + with: + concurrency_key: build-images-x86 + - name: "Push Build Images If Changed" + if: ${{ needs.changes.outputs.build-images == 'true' }} + timeout-minutes: 40 + run: | + earthly-ci --push ./build-images/+build + + build: + needs: [build-images, changes] + if: ${{ needs.changes.outputs.non-docs == 'true' && needs.changes.outputs.non-misc-ci == 'true' && needs.changes.outputs.non-barretenberg-cpp == 'true' }} + runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 + outputs: + e2e_list: ${{ steps.e2e_list.outputs.list }} + bench_list: ${{ steps.bench_list.outputs.list }} + steps: + - uses: actions/checkout@v4 + with: { ref: "${{ env.GIT_COMMIT }}" } + - uses: ./.github/ci-setup-action + with: + concurrency_key: build-x86 + # prepare images locally, tagged by commit hash + - name: "Build E2E Image" + timeout-minutes: 40 + run: | + earthly-ci ./yarn-project+export-e2e-test-images + + network-e2e: + needs: [build, changes] + if: ${{ needs.changes.outputs.non-barretenberg-cpp == 'true' }} + runs-on: ubuntu-20.04 + strategy: + matrix: + values_file: ["default.yaml", "3-validators.yaml"] + steps: + - uses: actions/checkout@v4 + with: { ref: "${{ env.GIT_COMMIT }}" } + - uses: ./.github/ci-setup-action + - name: Setup and Test + uses: ./.github/ensure-tester-with-images + timeout-minutes: 45 + with: + runner_type: ${{ contains(matrix.test, 'prover') && '64core-tester-x86' || '16core-tester-x86' }} + builder_type: builder-x86 + # these are copied to the tester and expected by the earthly command below + # if they fail to copy, it will try to build them on the tester and fail + builder_images_to_copy: aztecprotocol/aztec:${{ env.GIT_COMMIT }} aztecprotocol/end-to-end:${{ env.GIT_COMMIT }} + # command to produce the images in case they don't exist + builder_command: scripts/earthly-ci ./yarn-project+export-e2e-test-images + tester_ttl: 40 + run: | + set -eux + cd ./yarn-project/end-to-end/ + ./scripts/setup_local_k8s.sh + export FORCE_COLOR=1 + export EARTHLY_BUILD_ARGS="${{ env.EARTHLY_BUILD_ARGS }}" + ../../scripts/earthly-ci --exec-stats -P --no-output ./+network-transfer --values-file=${{ matrix.values_file }} + + success-check: + runs-on: ubuntu-20.04 + needs: + - build-and-test + if: always() + steps: + - name: Report overall success + env: + # We treat any skipped or failing jobs as a failure for the workflow as a whole. + FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') }} + run: | + if [[ $FAIL == true ]]; then + echo "Test failed." + exit 1 + fi + + notify: + needs: + - success-check + runs-on: ubuntu-20.04 + if: ${{ github.ref == 'refs/heads/master' && failure() }} + steps: + - name: Send notification to aztec3-ci channel if workflow failed on master + uses: slackapi/slack-github-action@v1.25.0 + with: + payload: | + { + "url": "https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" + } + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_NOTIFY_WORKFLOW_TRIGGER_URL }} From d6ebe3e674ea59acf810c9736aa908c63b5a9b85 Mon Sep 17 00:00:00 2001 From: PhilWindle <60546371+PhilWindle@users.noreply.github.com> Date: Wed, 4 Sep 2024 15:57:44 +0100 Subject: [PATCH 08/18] chore: Change efs volumes to use bursting throughput (#8370) This PR changes our EFS volumes to use the default (burst) throughput mode --- yarn-project/aztec/terraform/node/main.tf | 4 +--- yarn-project/aztec/terraform/prover-node/main.tf | 2 -- yarn-project/aztec/terraform/pxe/main.tf | 4 +--- 3 files changed, 2 insertions(+), 8 deletions(-) diff --git a/yarn-project/aztec/terraform/node/main.tf b/yarn-project/aztec/terraform/node/main.tf index af6b78adbd0..6304b37ab45 100644 --- a/yarn-project/aztec/terraform/node/main.tf +++ b/yarn-project/aztec/terraform/node/main.tf @@ -103,9 +103,7 @@ resource "aws_service_discovery_service" "aztec-node" { # Configure an EFS filesystem. resource "aws_efs_file_system" "node_data_store" { - creation_token = "${var.DEPLOY_TAG}-node-data" - throughput_mode = "provisioned" - provisioned_throughput_in_mibps = 20 + creation_token = "${var.DEPLOY_TAG}-node-data" tags = { Name = "${var.DEPLOY_TAG}-node-data" diff --git a/yarn-project/aztec/terraform/prover-node/main.tf b/yarn-project/aztec/terraform/prover-node/main.tf index e499eacb050..9ea84547ac0 100644 --- a/yarn-project/aztec/terraform/prover-node/main.tf +++ b/yarn-project/aztec/terraform/prover-node/main.tf @@ -104,8 +104,6 @@ resource "aws_service_discovery_service" "aztec-prover-node" { # Configure an EFS filesystem. resource "aws_efs_file_system" "prover_node_data_store" { creation_token = "${var.DEPLOY_TAG}-prover-node-data" - throughput_mode = "provisioned" - provisioned_throughput_in_mibps = 20 tags = { Name = "${var.DEPLOY_TAG}-prover-node-data" diff --git a/yarn-project/aztec/terraform/pxe/main.tf b/yarn-project/aztec/terraform/pxe/main.tf index a6f0e543147..689c86b5961 100644 --- a/yarn-project/aztec/terraform/pxe/main.tf +++ b/yarn-project/aztec/terraform/pxe/main.tf @@ -70,9 +70,7 @@ resource "aws_service_discovery_service" "aztec-pxe" { } resource "aws_efs_file_system" "pxe_data_store" { - creation_token = "${var.DEPLOY_TAG}-pxe-data" - throughput_mode = "provisioned" - provisioned_throughput_in_mibps = 20 + creation_token = "${var.DEPLOY_TAG}-pxe-data" tags = { Name = "${var.DEPLOY_TAG}-pxe-data" From 176bce6dd1a4dfbbd82d4f83fddbb02f84145765 Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Wed, 4 Sep 2024 13:11:12 -0400 Subject: [PATCH 09/18] feat: Sync from noir (#8363) Automated pull of development from the [noir](https://github.com/noir-lang/noir) programming language, a dependency of Aztec. BEGIN_COMMIT_OVERRIDE feat: Sync from aztec-packages (https://github.com/noir-lang/noir/pull/5917) chore: bump some dependencies (https://github.com/noir-lang/noir/pull/5893) chore: make nested slice error more clear for `[[T]; N]` case (https://github.com/noir-lang/noir/pull/5906) feat: better println for Quoted (https://github.com/noir-lang/noir/pull/5896) feat: LSP diagnostics for all package files (https://github.com/noir-lang/noir/pull/5895) feat: LSP code action "Fill struct fields" (https://github.com/noir-lang/noir/pull/5885) chore: Cleanup str_as_bytes (https://github.com/noir-lang/noir/pull/5900) chore: update git user for release PRs (https://github.com/noir-lang/noir/pull/5894) END_COMMIT_OVERRIDE --------- Co-authored-by: TomAFrench --- .noir-sync-commit | 2 +- noir/noir-repo/.github/workflows/release.yml | 8 +- noir/noir-repo/Cargo.lock | 189 +++++------ .../src/brillig/brillig_gen/brillig_block.rs | 44 ++- .../compiler/noirc_evaluator/src/errors.rs | 2 +- .../compiler/noirc_evaluator/src/ssa.rs | 4 +- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 10 +- .../check_for_underconstrained_values.rs | 2 +- .../noirc_evaluator/src/ssa/ir/function.rs | 7 + .../noirc_evaluator/src/ssa/ir/map.rs | 10 +- .../noirc_evaluator/src/ssa/opt/inlining.rs | 13 +- .../noirc_evaluator/src/ssa/opt/mem2reg.rs | 32 +- .../noirc_evaluator/src/ssa/opt/mod.rs | 1 + .../src/ssa/opt/normalize_value_ids.rs | 194 +++++++++++ .../src/hir/comptime/interpreter.rs | 16 +- .../src/hir/comptime/interpreter/builtin.rs | 28 +- .../interpreter/builtin/builtin_helpers.rs | 13 + .../noirc_frontend/src/hir/comptime/value.rs | 37 ++- .../src/hir/resolution/errors.rs | 6 +- noir/noir-repo/tooling/lsp/src/lib.rs | 11 +- .../tooling/lsp/src/notifications/mod.rs | 207 +++++++----- .../tooling/lsp/src/requests/code_action.rs | 136 ++------ .../code_action/fill_struct_fields.rs | 307 ++++++++++++++++++ .../requests/code_action/import_or_qualify.rs | 240 ++++++++++++++ .../lsp/src/requests/code_action/tests.rs | 150 +-------- .../noir-repo/tooling/lsp/src/requests/mod.rs | 10 +- .../tooling/nargo_cli/src/cli/lsp_cmd.rs | 2 - 27 files changed, 1165 insertions(+), 516 deletions(-) create mode 100644 noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs create mode 100644 noir/noir-repo/tooling/lsp/src/requests/code_action/fill_struct_fields.rs create mode 100644 noir/noir-repo/tooling/lsp/src/requests/code_action/import_or_qualify.rs diff --git a/.noir-sync-commit b/.noir-sync-commit index c253290bf18..f58ed005d42 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -45344bfe1148a2f592c2e432744d3fb3d46340cc +44cf9a2140bc06b550d4b46966f1637598ac11a7 diff --git a/noir/noir-repo/.github/workflows/release.yml b/noir/noir-repo/.github/workflows/release.yml index d27fac0e039..5124592a3fe 100644 --- a/noir/noir-repo/.github/workflows/release.yml +++ b/noir/noir-repo/.github/workflows/release.yml @@ -54,8 +54,8 @@ jobs: - name: Configure git run: | - git config user.name kevaundray - git config user.email kevtheappdev@gmail.com + git config user.name noirwhal + git config user.email tomfrench@aztecprotocol.com - name: Commit updates run: | @@ -100,8 +100,8 @@ jobs: - name: Configure git run: | - git config --local user.name 'kevaundray' - git config --local user.email 'kevtheappdev@gmail.com' + git config --local user.name noirwhal + git config --local user.email tomfrench@aztecprotocol.com - name: Commit new documentation version run: | diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index cd936e4bca2..3f56f5b6965 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -7,7 +7,7 @@ name = "acir" version = "0.49.0" dependencies = [ "acir_field", - "base64 0.21.2", + "base64 0.21.7", "bincode", "brillig", "criterion", @@ -488,9 +488,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "base64" -version = "0.21.2" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "base64ct" @@ -665,7 +665,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05" dependencies = [ "memchr", - "regex-automata 0.3.3", + "regex-automata 0.3.9", "serde", ] @@ -771,9 +771,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.37" +version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a0d04d43504c61aa6c7531f1871dd0d418d91130162063b789da00fd7057a5e" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", @@ -781,7 +781,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-targets 0.52.4", + "windows-targets 0.52.6", ] [[package]] @@ -1123,7 +1123,7 @@ dependencies = [ "autocfg", "cfg-if 1.0.0", "crossbeam-utils", - "memoffset 0.9.0", + "memoffset 0.9.1", "scopeguard", ] @@ -1454,12 +1454,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.5" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" dependencies = [ "libc", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -2428,7 +2428,7 @@ version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "507460a910eb7b32ee961886ff48539633b788a36b65692b95f225b844c82553" dependencies = [ - "regex-automata 0.4.5", + "regex-automata 0.4.7", ] [[package]] @@ -2471,9 +2471,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.3" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "lock_api" @@ -2567,9 +2567,9 @@ dependencies = [ [[package]] name = "memoffset" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" dependencies = [ "autocfg", ] @@ -2582,9 +2582,9 @@ checksum = "2145869435ace5ea6ea3d35f59be559317ec9a0d04e1812d5f185a87b6d36f1a" [[package]] name = "miniz_oxide" -version = "0.7.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" dependencies = [ "adler", ] @@ -2986,7 +2986,7 @@ name = "noirc_errors" version = "0.33.0" dependencies = [ "acvm", - "base64 0.21.2", + "base64 0.21.7", "chumsky", "codespan", "codespan-reporting", @@ -3027,7 +3027,7 @@ name = "noirc_frontend" version = "0.33.0" dependencies = [ "acvm", - "base64 0.21.2", + "base64 0.21.7", "bn254_blackbox_solver", "cfg-if 1.0.0", "chumsky", @@ -3272,7 +3272,7 @@ dependencies = [ "libc", "redox_syscall 0.3.5", "smallvec", - "windows-targets 0.48.1", + "windows-targets 0.48.5", ] [[package]] @@ -3743,9 +3743,9 @@ checksum = "977b1e897f9d764566891689e642653e5ed90c6895106acd005eb4c1d0203991" [[package]] name = "rayon" -version = "1.8.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" dependencies = [ "either", "rayon-core", @@ -3753,9 +3753,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.12.0" +version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" dependencies = [ "crossbeam-deque", "crossbeam-utils", @@ -3813,7 +3813,7 @@ checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.5", + "regex-automata 0.4.7", "regex-syntax 0.8.2", ] @@ -3828,15 +3828,15 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.3" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" +checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9" [[package]] name = "regex-automata" -version = "0.4.5" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ "aho-corasick", "memchr", @@ -3945,15 +3945,15 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.4" +version = "0.38.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" dependencies = [ "bitflags 2.5.0", "errno", "libc", "linux-raw-sys", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -4113,9 +4113,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.202" +version = "1.0.209" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "226b61a0d411b2ba5ff6d7f73a476ac4f8bb900373459cd00fab8512828ba395" +checksum = "99fce0ffe7310761ca6bf9faf5115afbc19688edd00171d81b1bb1b116c63e09" dependencies = [ "serde_derive", ] @@ -4156,9 +4156,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.202" +version = "1.0.209" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6048858004bcff69094cd972ed40a32500f153bd3be9f716b2eed2e8217c4838" +checksum = "a5831b979fd7b5439637af1752d535ff49f4860c0f341d1baeb6faf0f4242170" dependencies = [ "proc-macro2", "quote", @@ -4202,7 +4202,7 @@ version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1402f54f9a3b9e2efe71c1cea24e648acce55887983553eeb858cf3115acfd49" dependencies = [ - "base64 0.21.2", + "base64 0.21.7", "chrono", "hex", "indexmap 1.9.3", @@ -4670,9 +4670,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.36.0" +version = "1.38.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931" +checksum = "eb2caba9f80616f438e09748d5acda951967e1ea58508ef53d9c6402485a46df" dependencies = [ "backtrace", "bytes", @@ -4687,9 +4687,9 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" dependencies = [ "proc-macro2", "quote", @@ -5215,7 +5215,7 @@ version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af6041b3f84485c21b57acdc0fee4f4f0c93f426053dc05fa5d6fc262537bbff" dependencies = [ - "windows-targets 0.48.1", + "windows-targets 0.48.5", ] [[package]] @@ -5224,7 +5224,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets 0.48.1", + "windows-targets 0.48.5", ] [[package]] @@ -5233,122 +5233,129 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.4", + "windows-targets 0.52.6", ] [[package]] name = "windows-targets" -version = "0.48.1" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ - "windows_aarch64_gnullvm 0.48.0", - "windows_aarch64_msvc 0.48.0", - "windows_i686_gnu 0.48.0", - "windows_i686_msvc 0.48.0", - "windows_x86_64_gnu 0.48.0", - "windows_x86_64_gnullvm 0.48.0", - "windows_x86_64_msvc 0.48.0", + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", ] [[package]] name = "windows-targets" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.4", - "windows_aarch64_msvc 0.52.4", - "windows_i686_gnu 0.52.4", - "windows_i686_msvc 0.52.4", - "windows_x86_64_gnu 0.52.4", - "windows_x86_64_gnullvm 0.52.4", - "windows_x86_64_msvc 0.52.4", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", ] [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 55794c2b7dd..a38d8ef582d 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -270,16 +270,40 @@ impl<'block> BrilligBlock<'block> { self.convert_ssa_binary(binary, dfg, result_var); } Instruction::Constrain(lhs, rhs, assert_message) => { - let condition = SingleAddrVariable { - address: self.brillig_context.allocate_register(), - bit_size: 1, + let (condition, deallocate) = match ( + dfg.get_numeric_constant_with_type(*lhs), + dfg.get_numeric_constant_with_type(*rhs), + ) { + // If the constraint is of the form `x == u1 1` then we can simply constrain `x` directly + ( + Some((constant, Type::Numeric(NumericType::Unsigned { bit_size: 1 }))), + None, + ) if constant == FieldElement::one() => { + (self.convert_ssa_single_addr_value(*rhs, dfg), false) + } + ( + None, + Some((constant, Type::Numeric(NumericType::Unsigned { bit_size: 1 }))), + ) if constant == FieldElement::one() => { + (self.convert_ssa_single_addr_value(*lhs, dfg), false) + } + + // Otherwise we need to perform the equality explicitly. + _ => { + let condition = SingleAddrVariable { + address: self.brillig_context.allocate_register(), + bit_size: 1, + }; + self.convert_ssa_binary( + &Binary { lhs: *lhs, rhs: *rhs, operator: BinaryOp::Eq }, + dfg, + condition, + ); + + (condition, true) + } }; - self.convert_ssa_binary( - &Binary { lhs: *lhs, rhs: *rhs, operator: BinaryOp::Eq }, - dfg, - condition, - ); match assert_message { Some(ConstrainError::Dynamic(selector, values)) => { let payload_values = @@ -302,7 +326,9 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.codegen_constrain(condition, None); } } - self.brillig_context.deallocate_single_addr(condition); + if deallocate { + self.brillig_context.deallocate_single_addr(condition); + } } Instruction::Allocate => { let result_value = dfg.instruction_results(instruction_id)[0]; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs b/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs index 2c7ec0f8e1a..c4f56d032f9 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs @@ -44,7 +44,7 @@ pub enum RuntimeError { StaticAssertDynamicPredicate { call_stack: CallStack }, #[error("Argument is false")] StaticAssertFailed { call_stack: CallStack }, - #[error("Nested slices are not supported")] + #[error("Nested slices, i.e. slices within an array or slice, are not supported")] NestedSlice { call_stack: CallStack }, #[error("Big Integer modulus do no match")] BigIntModulus { call_stack: CallStack }, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs index 57bd76d4f78..ad6645df228 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs @@ -82,6 +82,7 @@ pub(crate) fn optimize_into_acir( ) -> Result { let ssa_gen_span = span!(Level::TRACE, "ssa_generation"); let ssa_gen_span_guard = ssa_gen_span.enter(); + let mut ssa = SsaBuilder::new( program, options.enable_ssa_logging, @@ -418,8 +419,9 @@ impl SsaBuilder { Ok(self.print(msg)) } - fn print(self, msg: &str) -> Self { + fn print(mut self, msg: &str) -> Self { if self.print_ssa_passes { + self.ssa.normalize_ids(); println!("{msg}\n{}", self.ssa); } self diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index a2b9e46a15a..0360b15d950 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -770,10 +770,12 @@ impl<'a> Context<'a> { .map(|result_id| dfg.type_of_value(*result_id).flattened_size()) .sum(); - let acir_function_id = ssa - .entry_point_to_generated_index - .get(id) - .expect("ICE: should have an associated final index"); + let Some(acir_function_id) = + ssa.entry_point_to_generated_index.get(id) + else { + unreachable!("Expected an associated final index for call to acir function {id} with args {arguments:?}"); + }; + let output_vars = self.acir_context.call_acir_function( AcirFunctionId(*acir_function_id), inputs, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs index 26eab290d4b..aa5f4c8df95 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs @@ -244,7 +244,7 @@ impl Context { } }, Value::ForeignFunction(..) => { - panic!("Should not be able to reach foreign function from non-brillig functions"); + panic!("Should not be able to reach foreign function from non-brillig functions, {func_id} in function {}", function.name()); } Value::Array { .. } | Value::Instruction { .. } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function.rs index bae9f82e4f1..65a616ef612 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function.rs @@ -72,6 +72,13 @@ impl Function { Self { name: another.name.clone(), id, entry_block, dfg, runtime: another.runtime } } + /// Takes the signature (function name & runtime) from a function but does not copy the body. + pub(crate) fn clone_signature(id: FunctionId, another: &Function) -> Self { + let mut new_function = Function::new(another.name.clone(), id); + new_function.runtime = another.runtime; + new_function + } + /// The name of the function. /// Used exclusively for debugging purposes. pub(crate) fn name(&self) -> &str { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/map.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/map.rs index 769d52e6e65..23f5380f030 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/map.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/map.rs @@ -1,6 +1,7 @@ use fxhash::FxHashMap as HashMap; use serde::{Deserialize, Serialize}; use std::{ + collections::BTreeMap, hash::Hash, str::FromStr, sync::atomic::{AtomicUsize, Ordering}, @@ -240,7 +241,7 @@ impl std::ops::IndexMut> for DenseMap { /// call to .remove(). #[derive(Debug)] pub(crate) struct SparseMap { - storage: HashMap, T>, + storage: BTreeMap, T>, } impl SparseMap { @@ -271,11 +272,16 @@ impl SparseMap { pub(crate) fn remove(&mut self, id: Id) -> Option { self.storage.remove(&id) } + + /// Unwraps the inner storage of this map + pub(crate) fn into_btree(self) -> BTreeMap, T> { + self.storage + } } impl Default for SparseMap { fn default() -> Self { - Self { storage: HashMap::default() } + Self { storage: Default::default() } } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index 1ff593a1531..7843c55da65 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -2,7 +2,7 @@ //! The purpose of this pass is to inline the instructions of each function call //! within the function caller. If all function calls are known, there will only //! be a single function remaining when the pass finishes. -use std::collections::{BTreeSet, HashSet}; +use std::collections::{BTreeSet, HashSet, VecDeque}; use acvm::acir::AcirField; use iter_extended::{btree_map, vecmap}; @@ -372,14 +372,14 @@ impl<'function> PerFunctionContext<'function> { fn translate_block( &mut self, source_block: BasicBlockId, - block_queue: &mut Vec, + block_queue: &mut VecDeque, ) -> BasicBlockId { if let Some(block) = self.blocks.get(&source_block) { return *block; } // The block is not yet inlined, queue it - block_queue.push(source_block); + block_queue.push_back(source_block); // The block is not already present in the function being inlined into so we must create it. // The block's instructions are not copied over as they will be copied later in inlining. @@ -415,13 +415,14 @@ impl<'function> PerFunctionContext<'function> { /// Inline all reachable blocks within the source_function into the destination function. fn inline_blocks(&mut self, ssa: &Ssa) -> Vec { let mut seen_blocks = HashSet::new(); - let mut block_queue = vec![self.source_function.entry_block()]; + let mut block_queue = VecDeque::new(); + block_queue.push_back(self.source_function.entry_block()); // This Vec will contain each block with a Return instruction along with the // returned values of that block. let mut function_returns = vec![]; - while let Some(source_block_id) = block_queue.pop() { + while let Some(source_block_id) = block_queue.pop_front() { if seen_blocks.contains(&source_block_id) { continue; } @@ -609,7 +610,7 @@ impl<'function> PerFunctionContext<'function> { fn handle_terminator_instruction( &mut self, block_id: BasicBlockId, - block_queue: &mut Vec, + block_queue: &mut VecDeque, ) -> Option<(BasicBlockId, Vec)> { match self.source_function.dfg[block_id].unwrap_terminator() { TerminatorInstruction::Jmp { destination, arguments, call_stack } => { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index 9d6582c0db7..3d98f4126cf 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -116,7 +116,7 @@ struct PerFunctionContext<'f> { /// Track a value's last load across all blocks. /// If a value is not used in anymore loads we can remove the last store to that value. - last_loads: HashMap, + last_loads: HashMap, } impl<'f> PerFunctionContext<'f> { @@ -152,9 +152,31 @@ impl<'f> PerFunctionContext<'f> { // This rule does not apply to reference parameters, which we must also check for before removing these stores. for (block_id, block) in self.blocks.iter() { let block_params = self.inserter.function.dfg.block_parameters(*block_id); - for (value, store_instruction) in block.last_stores.iter() { - let is_reference_param = block_params.contains(value); - if self.last_loads.get(value).is_none() && !is_reference_param { + for (store_address, store_instruction) in block.last_stores.iter() { + let is_reference_param = block_params.contains(store_address); + let terminator = self.inserter.function.dfg[*block_id].unwrap_terminator(); + + let is_return = matches!(terminator, TerminatorInstruction::Return { .. }); + let remove_load = if is_return { + // Determine whether the last store is used in the return value + let mut is_return_value = false; + terminator.for_each_value(|return_value| { + is_return_value = return_value == *store_address || is_return_value; + }); + + // If the last load of a store is not part of the block with a return terminator, + // we can safely remove this store. + let last_load_not_in_return = self + .last_loads + .get(store_address) + .map(|(_, last_load_block)| *last_load_block != *block_id) + .unwrap_or(true); + !is_return_value && last_load_not_in_return + } else { + self.last_loads.get(store_address).is_none() + }; + + if remove_load && !is_reference_param { self.instructions_to_remove.insert(*store_instruction); } } @@ -259,7 +281,7 @@ impl<'f> PerFunctionContext<'f> { } else { references.mark_value_used(address, self.inserter.function); - self.last_loads.insert(address, instruction); + self.last_loads.insert(address, (instruction, block_id)); } } Instruction::Store { address, value } => { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mod.rs index 4e5fa262696..bd9d0baff97 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mod.rs @@ -13,6 +13,7 @@ mod die; pub(crate) mod flatten_cfg; mod inlining; mod mem2reg; +mod normalize_value_ids; mod rc; mod remove_bit_shifts; mod remove_enable_side_effects; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs new file mode 100644 index 00000000000..f11b310494b --- /dev/null +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs @@ -0,0 +1,194 @@ +use std::collections::BTreeMap; + +use crate::ssa::{ + ir::{ + basic_block::BasicBlockId, + function::{Function, FunctionId}, + map::SparseMap, + post_order::PostOrder, + value::{Value, ValueId}, + }, + ssa_gen::Ssa, +}; +use fxhash::FxHashMap as HashMap; +use iter_extended::vecmap; + +impl Ssa { + /// This is a debugging pass which re-inserts each instruction + /// and block in a fresh DFG context for each function so that ValueIds, + /// BasicBlockIds, and FunctionIds are always identical for the same SSA code. + /// + /// During normal compilation this is often not the case since prior passes + /// may increase the ID counter so that later passes start at different offsets, + /// even if they contain the same SSA code. + pub(crate) fn normalize_ids(&mut self) { + let mut context = Context::default(); + context.populate_functions(&self.functions); + for function in self.functions.values_mut() { + context.normalize_ids(function); + } + self.functions = context.functions.into_btree(); + } +} + +#[derive(Default)] +struct Context { + functions: SparseMap, + + new_ids: IdMaps, +} + +/// Maps from old ids to new ones. +/// Separate from the rest of Context so we can call mutable methods on it +/// while Context gives out mutable references to functions within. +#[derive(Default)] +struct IdMaps { + // Maps old function id -> new function id + function_ids: HashMap, + + // Maps old block id -> new block id + // Cleared in between each function. + blocks: HashMap, + + // Maps old value id -> new value id + // Cleared in between each function. + values: HashMap, +} + +impl Context { + fn populate_functions(&mut self, functions: &BTreeMap) { + for (id, function) in functions { + self.functions.insert_with_id(|new_id| { + self.new_ids.function_ids.insert(*id, new_id); + Function::clone_signature(new_id, function) + }); + } + } + + fn normalize_ids(&mut self, old_function: &mut Function) { + self.new_ids.blocks.clear(); + self.new_ids.values.clear(); + + let new_function_id = self.new_ids.function_ids[&old_function.id()]; + let new_function = &mut self.functions[new_function_id]; + + let mut reachable_blocks = PostOrder::with_function(old_function).into_vec(); + reachable_blocks.reverse(); + + self.new_ids.populate_blocks(&reachable_blocks, old_function, new_function); + + // Map each parameter, instruction, and terminator + for old_block_id in reachable_blocks { + let new_block_id = self.new_ids.blocks[&old_block_id]; + + let old_block = &mut old_function.dfg[old_block_id]; + for old_instruction_id in old_block.take_instructions() { + let instruction = old_function.dfg[old_instruction_id] + .map_values(|value| self.new_ids.map_value(new_function, old_function, value)); + + let call_stack = old_function.dfg.get_call_stack(old_instruction_id); + let old_results = old_function.dfg.instruction_results(old_instruction_id); + + let ctrl_typevars = instruction + .requires_ctrl_typevars() + .then(|| vecmap(old_results, |result| old_function.dfg.type_of_value(*result))); + + let new_results = new_function.dfg.insert_instruction_and_results( + instruction, + new_block_id, + ctrl_typevars, + call_stack, + ); + + assert_eq!(old_results.len(), new_results.len()); + for (old_result, new_result) in old_results.iter().zip(new_results.results().iter()) + { + let old_result = old_function.dfg.resolve(*old_result); + self.new_ids.values.insert(old_result, *new_result); + } + } + + let old_block = &mut old_function.dfg[old_block_id]; + let mut terminator = old_block + .take_terminator() + .map_values(|value| self.new_ids.map_value(new_function, old_function, value)); + terminator.mutate_blocks(|old_block| self.new_ids.blocks[&old_block]); + new_function.dfg.set_block_terminator(new_block_id, terminator); + } + } +} + +impl IdMaps { + fn populate_blocks( + &mut self, + reachable_blocks: &[BasicBlockId], + old_function: &mut Function, + new_function: &mut Function, + ) { + let old_entry = old_function.entry_block(); + self.blocks.insert(old_entry, new_function.entry_block()); + + for old_id in reachable_blocks { + if *old_id != old_entry { + let new_id = new_function.dfg.make_block(); + self.blocks.insert(*old_id, new_id); + } + + let new_id = self.blocks[old_id]; + let old_block = &mut old_function.dfg[*old_id]; + for old_parameter in old_block.take_parameters() { + let old_parameter = old_function.dfg.resolve(old_parameter); + let typ = old_function.dfg.type_of_value(old_parameter); + let new_parameter = new_function.dfg.add_block_parameter(new_id, typ); + self.values.insert(old_parameter, new_parameter); + } + } + } + + fn map_value( + &mut self, + new_function: &mut Function, + old_function: &Function, + old_value: ValueId, + ) -> ValueId { + let old_value = old_function.dfg.resolve(old_value); + match &old_function.dfg[old_value] { + value @ Value::Instruction { instruction, .. } => { + *self.values.get(&old_value).unwrap_or_else(|| { + let instruction = &old_function.dfg[*instruction]; + unreachable!("Unmapped value with id {old_value}: {value:?}\n from instruction: {instruction:?}, SSA: {old_function}") + }) + } + + value @ Value::Param { .. } => { + *self.values.get(&old_value).unwrap_or_else(|| { + unreachable!("Unmapped value with id {old_value}: {value:?}") + }) + } + + Value::Function(id) => { + let new_id = self.function_ids[id]; + new_function.dfg.import_function(new_id) + } + + Value::NumericConstant { constant, typ } => { + new_function.dfg.make_constant(*constant, typ.clone()) + } + Value::Array { array, typ } => { + if let Some(value) = self.values.get(&old_value) { + return *value; + } + + let array = array + .iter() + .map(|value| self.map_value(new_function, old_function, *value)) + .collect(); + let new_value = new_function.dfg.make_array(array, typ.clone()); + self.values.insert(old_value, new_value); + new_value + } + Value::Intrinsic(intrinsic) => new_function.dfg.import_intrinsic(*intrinsic), + Value::ForeignFunction(name) => new_function.dfg.import_foreign_function(name), + } + } +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index 4980045c68d..d8e62b66eca 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -1653,10 +1653,20 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { assert_eq!(arguments.len(), 2); let print_newline = arguments[0].0 == Value::Bool(true); - if print_newline { - println!("{}", arguments[1].0.display(self.elaborator.interner)); + let contents = arguments[1].0.display(self.elaborator.interner); + if self.elaborator.interner.is_in_lsp_mode() { + // If we `println!` in LSP it gets mixed with the protocol stream and leads to crashing + // the connection. If we use `eprintln!` not only it doesn't crash, but the output + // appears in the "Noir Language Server" output window in case you want to see it. + if print_newline { + eprintln!("{}", contents); + } else { + eprint!("{}", contents); + } + } else if print_newline { + println!("{}", contents); } else { - print!("{}", arguments[1].0.display(self.elaborator.interner)); + print!("{}", contents); } Ok(Value::Unit) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index e5b098b41ed..070749e45ba 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -39,7 +39,7 @@ use crate::{ QuotedType, Shared, Type, }; -use self::builtin_helpers::{get_array, get_u8}; +use self::builtin_helpers::{get_array, get_str, get_u8}; use super::Interpreter; pub(crate) mod builtin_helpers; @@ -248,25 +248,15 @@ fn str_as_bytes( arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { - let (string, string_location) = check_one_argument(arguments, location)?; + let string = check_one_argument(arguments, location)?; + let string = get_str(interner, string)?; - match string { - Value::String(string) => { - let string_as_bytes = string.as_bytes(); - let bytes_vector: Vec = string_as_bytes.iter().cloned().map(Value::U8).collect(); - let byte_array_type = Type::Array( - Box::new(Type::Constant(string_as_bytes.len() as u32)), - Box::new(Type::Integer(Signedness::Unsigned, IntegerBitSize::Eight)), - ); - Ok(Value::Array(bytes_vector.into(), byte_array_type)) - } - value => { - let type_var = Box::new(interner.next_type_variable()); - let expected = Type::Array(type_var.clone(), type_var); - let actual = value.get_type().into_owned(); - Err(InterpreterError::TypeMismatch { expected, actual, location: string_location }) - } - } + let bytes: im::Vector = string.bytes().map(Value::U8).collect(); + let byte_array_type = Type::Array( + Box::new(Type::Constant(bytes.len() as u32)), + Box::new(Type::Integer(Signedness::Unsigned, IntegerBitSize::Eight)), + ); + Ok(Value::Array(bytes, byte_array_type)) } /// fn as_type(self) -> Type diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin/builtin_helpers.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin/builtin_helpers.rs index dd9ea51961e..14a0e177544 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin/builtin_helpers.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin/builtin_helpers.rs @@ -104,6 +104,19 @@ pub(crate) fn get_slice( } } +pub(crate) fn get_str( + interner: &NodeInterner, + (value, location): (Value, Location), +) -> IResult> { + match value { + Value::String(string) => Ok(string), + value => { + let expected = Type::String(Box::new(interner.next_type_variable())); + type_mismatch(value, expected, location) + } + } +} + pub(crate) fn get_tuple( interner: &NodeInterner, (value, location): (Value, Location), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs index b96c4852931..c5818c20c57 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs @@ -567,11 +567,33 @@ impl<'value, 'interner> Display for ValuePrinter<'value, 'interner> { Value::Quoted(tokens) => { write!(f, "quote {{")?; for token in tokens.iter() { + write!(f, " ")?; + match token { Token::QuotedType(id) => { - write!(f, " {}", self.interner.get_quoted_type(*id))?; + write!(f, "{}", self.interner.get_quoted_type(*id))?; + } + Token::InternedExpr(id) => { + let value = Value::expression(ExpressionKind::Interned(*id)); + value.display(self.interner).fmt(f)?; + } + Token::InternedStatement(id) => { + let value = Value::statement(StatementKind::Interned(*id)); + value.display(self.interner).fmt(f)?; + } + Token::InternedLValue(id) => { + let value = Value::lvalue(LValue::Interned(*id, Span::default())); + value.display(self.interner).fmt(f)?; } - other => write!(f, " {other}")?, + Token::InternedUnresolvedTypeData(id) => { + let value = Value::UnresolvedType(UnresolvedTypeData::Interned(*id)); + value.display(self.interner).fmt(f)?; + } + Token::UnquoteMarker(id) => { + let value = Value::TypedExpr(TypedExpr::ExprId(*id)); + value.display(self.interner).fmt(f)?; + } + other => write!(f, "{other}")?, } } write!(f, " }}") @@ -632,7 +654,16 @@ impl<'value, 'interner> Display for ValuePrinter<'value, 'interner> { Value::Expr(ExprValue::LValue(lvalue)) => { write!(f, "{}", remove_interned_in_lvalue(self.interner, lvalue.clone())) } - Value::TypedExpr(_) => write!(f, "(typed expr)"), + Value::TypedExpr(TypedExpr::ExprId(id)) => { + let hir_expr = self.interner.expression(id); + let expr = hir_expr.to_display_ast(self.interner, Span::default()); + write!(f, "{}", expr.kind) + } + Value::TypedExpr(TypedExpr::StmtId(id)) => { + let hir_statement = self.interner.statement(id); + let stmt = hir_statement.to_display_ast(self.interner, Span::default()); + write!(f, "{}", stmt.kind) + } Value::UnresolvedType(typ) => { if let UnresolvedTypeData::Interned(id) = typ { let typ = self.interner.get_unresolved_type_data(*id); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs index cede04dd582..c2038c646b5 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -72,7 +72,7 @@ pub enum ResolverError { NumericConstantInFormatString { name: String, span: Span }, #[error("Closure environment must be a tuple or unit type")] InvalidClosureEnvironment { typ: Type, span: Span }, - #[error("Nested slices are not supported")] + #[error("Nested slices, i.e. slices within an array or slice, are not supported")] NestedSlices { span: Span }, #[error("#[recursive] attribute is only allowed on entry points to a program")] MisplacedRecursiveAttribute { ident: Ident }, @@ -323,8 +323,8 @@ impl<'a> From<&'a ResolverError> for Diagnostic { format!("{typ} is not a valid closure environment type"), "Closure environment must be a tuple or unit type".to_string(), *span), ResolverError::NestedSlices { span } => Diagnostic::simple_error( - "Nested slices are not supported".into(), - "Try to use a constant sized array instead".into(), + "Nested slices, i.e. slices within an array or slice, are not supported".into(), + "Try to use a constant sized array or BoundedVec instead".into(), *span, ), ResolverError::MisplacedRecursiveAttribute { ident } => { diff --git a/noir/noir-repo/tooling/lsp/src/lib.rs b/noir/noir-repo/tooling/lsp/src/lib.rs index 4a764f4268b..6557975743c 100644 --- a/noir/noir-repo/tooling/lsp/src/lib.rs +++ b/noir/noir-repo/tooling/lsp/src/lib.rs @@ -4,7 +4,7 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies, unused_extern_crates))] use std::{ - collections::{BTreeMap, HashMap}, + collections::{BTreeMap, HashMap, HashSet}, future::Future, ops::{self, ControlFlow}, path::{Path, PathBuf}, @@ -91,10 +91,13 @@ pub struct LspState { open_documents_count: usize, input_files: HashMap, cached_lenses: HashMap>, - cached_definitions: HashMap, + cached_definitions: HashMap, cached_parsed_files: HashMap))>, - cached_def_maps: HashMap>, + cached_def_maps: HashMap>, options: LspInitializationOptions, + + // Tracks files that currently have errors, by package root. + files_with_errors: HashMap>, } impl LspState { @@ -113,6 +116,8 @@ impl LspState { cached_parsed_files: HashMap::new(), cached_def_maps: HashMap::new(), options: Default::default(), + + files_with_errors: HashMap::new(), } } } diff --git a/noir/noir-repo/tooling/lsp/src/notifications/mod.rs b/noir/noir-repo/tooling/lsp/src/notifications/mod.rs index d1ffdb55066..87e7bea8c3b 100644 --- a/noir/noir-repo/tooling/lsp/src/notifications/mod.rs +++ b/noir/noir-repo/tooling/lsp/src/notifications/mod.rs @@ -1,8 +1,12 @@ +use std::collections::HashSet; use std::ops::ControlFlow; +use std::path::PathBuf; use crate::insert_all_files_for_workspace_into_file_manager; use async_lsp::{ErrorCode, LanguageClient, ResponseError}; -use lsp_types::DiagnosticTag; +use fm::{FileManager, FileMap}; +use fxhash::FxHashMap as HashMap; +use lsp_types::{DiagnosticTag, Url}; use noirc_driver::{check_crate, file_manager_with_stdlib}; use noirc_errors::{DiagnosticKind, FileDiagnostic}; @@ -105,7 +109,7 @@ pub(super) fn on_did_save_text_document( // caching code lenses and type definitions, and notifying about compilation errors. pub(crate) fn process_workspace_for_noir_document( state: &mut LspState, - document_uri: lsp_types::Url, + document_uri: Url, output_diagnostics: bool, ) -> Result<(), async_lsp::Error> { let file_path = document_uri.to_file_path().map_err(|_| { @@ -125,100 +129,123 @@ pub(crate) fn process_workspace_for_noir_document( let parsed_files = parse_diff(&workspace_file_manager, state); - let diagnostics: Vec<_> = workspace - .into_iter() - .flat_map(|package| -> Vec { - let package_root_dir: String = package.root_dir.as_os_str().to_string_lossy().into(); - - let (mut context, crate_id) = - crate::prepare_package(&workspace_file_manager, &parsed_files, package); - - let file_diagnostics = match check_crate(&mut context, crate_id, &Default::default()) { - Ok(((), warnings)) => warnings, - Err(errors_and_warnings) => errors_and_warnings, - }; - - // We don't add test headings for a package if it contains no `#[test]` functions - if let Some(tests) = get_package_tests_in_crate(&context, &crate_id, &package.name) { - let _ = state.client.notify::(NargoPackageTests { - package: package.name.to_string(), - tests, - }); - } - - let collected_lenses = crate::requests::collect_lenses_for_package( - &context, - crate_id, - &workspace, - package, - Some(&file_path), - ); - state.cached_lenses.insert(document_uri.to_string(), collected_lenses); - state.cached_definitions.insert(package_root_dir.clone(), context.def_interner); - state.cached_def_maps.insert(package_root_dir.clone(), context.def_maps); - - let fm = &context.file_manager; - let files = fm.as_file_map(); - - if output_diagnostics { - file_diagnostics - .into_iter() - .filter_map(|FileDiagnostic { file_id, diagnostic, call_stack: _ }| { - // Ignore diagnostics for any file that wasn't the file we saved - // TODO: In the future, we could create "related" diagnostics for these files - if fm.path(file_id).expect("file must exist to have emitted diagnostic") - != file_path - { - return None; - } - - // TODO: Should this be the first item in secondaries? Should we bail when we find a range? - let range = diagnostic - .secondaries - .into_iter() - .filter_map(|sec| byte_span_to_range(files, file_id, sec.span.into())) - .last() - .unwrap_or_default(); - - let severity = match diagnostic.kind { - DiagnosticKind::Error => DiagnosticSeverity::ERROR, - DiagnosticKind::Warning => DiagnosticSeverity::WARNING, - DiagnosticKind::Info => DiagnosticSeverity::INFORMATION, - DiagnosticKind::Bug => DiagnosticSeverity::WARNING, - }; - - let mut tags = Vec::new(); - if diagnostic.unnecessary { - tags.push(DiagnosticTag::UNNECESSARY); - } - if diagnostic.deprecated { - tags.push(DiagnosticTag::DEPRECATED); - } - - Some(Diagnostic { - range, - severity: Some(severity), - message: diagnostic.message, - tags: if tags.is_empty() { None } else { Some(tags) }, - ..Default::default() - }) - }) - .collect() - } else { - vec![] - } - }) - .collect(); - - if output_diagnostics { + for package in workspace.into_iter() { + let (mut context, crate_id) = + crate::prepare_package(&workspace_file_manager, &parsed_files, package); + + let file_diagnostics = match check_crate(&mut context, crate_id, &Default::default()) { + Ok(((), warnings)) => warnings, + Err(errors_and_warnings) => errors_and_warnings, + }; + + // We don't add test headings for a package if it contains no `#[test]` functions + if let Some(tests) = get_package_tests_in_crate(&context, &crate_id, &package.name) { + let _ = state.client.notify::(NargoPackageTests { + package: package.name.to_string(), + tests, + }); + } + + let collected_lenses = crate::requests::collect_lenses_for_package( + &context, + crate_id, + &workspace, + package, + Some(&file_path), + ); + state.cached_lenses.insert(document_uri.to_string(), collected_lenses); + state.cached_definitions.insert(package.root_dir.clone(), context.def_interner); + state.cached_def_maps.insert(package.root_dir.clone(), context.def_maps); + + let fm = &context.file_manager; + let files = fm.as_file_map(); + + if output_diagnostics { + publish_diagnostics(state, &package.root_dir, files, fm, file_diagnostics); + } + } + + Ok(()) +} + +fn publish_diagnostics( + state: &mut LspState, + package_root_dir: &PathBuf, + files: &FileMap, + fm: &FileManager, + file_diagnostics: Vec, +) { + let mut diagnostics_per_url: HashMap> = HashMap::default(); + + for file_diagnostic in file_diagnostics.into_iter() { + let file_id = file_diagnostic.file_id; + let diagnostic = file_diagnostic_to_diagnostic(file_diagnostic, files); + + let path = fm.path(file_id).expect("file must exist to have emitted diagnostic"); + if let Ok(uri) = Url::from_file_path(path) { + diagnostics_per_url.entry(uri).or_default().push(diagnostic); + } + } + + let new_files_with_errors: HashSet<_> = diagnostics_per_url.keys().cloned().collect(); + + for (uri, diagnostics) in diagnostics_per_url { let _ = state.client.publish_diagnostics(PublishDiagnosticsParams { - uri: document_uri, + uri, version: None, diagnostics, }); } - Ok(()) + // For files that previously had errors but no longer have errors we still need to publish empty diagnostics + if let Some(old_files_with_errors) = state.files_with_errors.get(package_root_dir) { + for uri in old_files_with_errors.difference(&new_files_with_errors) { + let _ = state.client.publish_diagnostics(PublishDiagnosticsParams { + uri: uri.clone(), + version: None, + diagnostics: vec![], + }); + } + } + + // Remember which files currently have errors, for next time + state.files_with_errors.insert(package_root_dir.clone(), new_files_with_errors); +} + +fn file_diagnostic_to_diagnostic(file_diagnostic: FileDiagnostic, files: &FileMap) -> Diagnostic { + let file_id = file_diagnostic.file_id; + let diagnostic = file_diagnostic.diagnostic; + + // TODO: Should this be the first item in secondaries? Should we bail when we find a range? + let range = diagnostic + .secondaries + .into_iter() + .filter_map(|sec| byte_span_to_range(files, file_id, sec.span.into())) + .last() + .unwrap_or_default(); + + let severity = match diagnostic.kind { + DiagnosticKind::Error => DiagnosticSeverity::ERROR, + DiagnosticKind::Warning => DiagnosticSeverity::WARNING, + DiagnosticKind::Info => DiagnosticSeverity::INFORMATION, + DiagnosticKind::Bug => DiagnosticSeverity::WARNING, + }; + + let mut tags = Vec::new(); + if diagnostic.unnecessary { + tags.push(DiagnosticTag::UNNECESSARY); + } + if diagnostic.deprecated { + tags.push(DiagnosticTag::DEPRECATED); + } + + Diagnostic { + range, + severity: Some(severity), + message: diagnostic.message, + tags: if tags.is_empty() { None } else { Some(tags) }, + ..Default::default() + } } pub(super) fn on_exit( diff --git a/noir/noir-repo/tooling/lsp/src/requests/code_action.rs b/noir/noir-repo/tooling/lsp/src/requests/code_action.rs index 8e153bb0b46..95cdc0b88b4 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/code_action.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/code_action.rs @@ -7,26 +7,26 @@ use async_lsp::ResponseError; use fm::{FileId, FileMap, PathString}; use lsp_types::{ CodeAction, CodeActionKind, CodeActionOrCommand, CodeActionParams, CodeActionResponse, - Position, Range, TextDocumentPositionParams, TextEdit, Url, WorkspaceEdit, + TextDocumentPositionParams, TextEdit, Url, WorkspaceEdit, }; -use noirc_errors::{Location, Span}; +use noirc_errors::Span; use noirc_frontend::{ - ast::{Ident, Path, Visitor}, + ast::{ConstructorExpression, Path, Visitor}, graph::CrateId, hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}, - macros_api::{ModuleDefId, NodeInterner}, + macros_api::NodeInterner, +}; +use noirc_frontend::{ parser::{Item, ItemKind, ParsedSubModule}, ParsedModule, }; -use crate::{ - byte_span_to_range, - modules::{get_parent_module_id, module_full_path, module_id_path}, - utils, LspState, -}; +use crate::{utils, LspState}; use super::{process_request, to_lsp_location}; +mod fill_struct_fields; +mod import_or_qualify; #[cfg(test)] mod tests; @@ -68,6 +68,7 @@ struct CodeActionFinder<'a> { uri: Url, files: &'a FileMap, file: FileId, + source: &'a str, lines: Vec<&'a str>, byte_index: usize, /// The module ID in scope. This might change as we traverse the AST @@ -108,6 +109,7 @@ impl<'a> CodeActionFinder<'a> { uri, files, file, + source, lines: source.lines().collect(), byte_index, module_id, @@ -137,46 +139,7 @@ impl<'a> CodeActionFinder<'a> { Some(code_actions) } - fn push_import_code_action(&mut self, full_path: &str) { - let line = self.auto_import_line as u32; - let character = (self.nesting * 4) as u32; - let indent = " ".repeat(self.nesting * 4); - let mut newlines = "\n"; - - // If the line we are inserting into is not an empty line, insert an extra line to make some room - if let Some(line_text) = self.lines.get(line as usize) { - if !line_text.trim().is_empty() { - newlines = "\n\n"; - } - } - - let title = format!("Import {}", full_path); - let text_edit = TextEdit { - range: Range { start: Position { line, character }, end: Position { line, character } }, - new_text: format!("use {};{}{}", full_path, newlines, indent), - }; - - let code_action = self.new_quick_fix(title, text_edit); - self.code_actions.push(CodeActionOrCommand::CodeAction(code_action)); - } - - fn push_qualify_code_action(&mut self, ident: &Ident, prefix: &str, full_path: &str) { - let Some(range) = byte_span_to_range( - self.files, - self.file, - ident.span().start() as usize..ident.span().start() as usize, - ) else { - return; - }; - - let title = format!("Qualify as {}", full_path); - let text_edit = TextEdit { range, new_text: format!("{}::", prefix) }; - - let code_action = self.new_quick_fix(title, text_edit); - self.code_actions.push(CodeActionOrCommand::CodeAction(code_action)); - } - - fn new_quick_fix(&self, title: String, text_edit: TextEdit) -> CodeAction { + fn new_quick_fix(&self, title: String, text_edit: TextEdit) -> CodeActionOrCommand { let mut changes = HashMap::new(); changes.insert(self.uri.clone(), vec![text_edit]); @@ -186,7 +149,7 @@ impl<'a> CodeActionFinder<'a> { change_annotations: None, }; - CodeAction { + CodeActionOrCommand::CodeAction(CodeAction { title, kind: Some(CodeActionKind::QUICKFIX), diagnostics: None, @@ -195,7 +158,7 @@ impl<'a> CodeActionFinder<'a> { is_preferred: None, disabled: None, data: None, - } + }) } fn includes_span(&self, span: Span) -> bool { @@ -244,69 +207,16 @@ impl<'a> Visitor for CodeActionFinder<'a> { } fn visit_path(&mut self, path: &Path) { - if path.segments.len() != 1 { - return; - } - - let ident = &path.segments[0].ident; - if !self.includes_span(ident.span()) { - return; - } - - let location = Location::new(ident.span(), self.file); - if self.interner.find_referenced(location).is_some() { - return; - } - - let current_module_parent_id = get_parent_module_id(self.def_maps, self.module_id); - - // The Path doesn't resolve to anything so it means it's an error and maybe we - // can suggest an import or to fully-qualify the path. - for (name, entries) in self.interner.get_auto_import_names() { - if name != &ident.0.contents { - continue; - } - - for (module_def_id, visibility, defining_module) in entries { - let module_full_path = if let Some(defining_module) = defining_module { - module_id_path( - *defining_module, - &self.module_id, - current_module_parent_id, - self.interner, - ) - } else { - let Some(module_full_path) = module_full_path( - *module_def_id, - *visibility, - self.module_id, - current_module_parent_id, - self.interner, - ) else { - continue; - }; - module_full_path - }; - - let full_path = if defining_module.is_some() - || !matches!(module_def_id, ModuleDefId::ModuleId(..)) - { - format!("{}::{}", module_full_path, name) - } else { - module_full_path.clone() - }; + self.import_or_qualify(path); + } - let qualify_prefix = if let ModuleDefId::ModuleId(..) = module_def_id { - let mut segments: Vec<_> = module_full_path.split("::").collect(); - segments.pop(); - segments.join("::") - } else { - module_full_path - }; + fn visit_constructor_expression( + &mut self, + constructor: &ConstructorExpression, + span: Span, + ) -> bool { + self.fill_struct_fields(constructor, span); - self.push_import_code_action(&full_path); - self.push_qualify_code_action(ident, &qualify_prefix, &full_path); - } - } + true } } diff --git a/noir/noir-repo/tooling/lsp/src/requests/code_action/fill_struct_fields.rs b/noir/noir-repo/tooling/lsp/src/requests/code_action/fill_struct_fields.rs new file mode 100644 index 00000000000..f57fbc652ad --- /dev/null +++ b/noir/noir-repo/tooling/lsp/src/requests/code_action/fill_struct_fields.rs @@ -0,0 +1,307 @@ +use lsp_types::TextEdit; +use noirc_errors::{Location, Span}; +use noirc_frontend::{ast::ConstructorExpression, node_interner::ReferenceId}; + +use crate::byte_span_to_range; + +use super::CodeActionFinder; + +impl<'a> CodeActionFinder<'a> { + pub(super) fn fill_struct_fields(&mut self, constructor: &ConstructorExpression, span: Span) { + if !self.includes_span(span) { + return; + } + + // Find out which struct this is + let location = Location::new(constructor.type_name.last_ident().span(), self.file); + let Some(ReferenceId::Struct(struct_id)) = self.interner.find_referenced(location) else { + return; + }; + + let struct_type = self.interner.get_struct(struct_id); + let struct_type = struct_type.borrow(); + + // First get all of the struct's fields + let mut fields = struct_type.get_fields_as_written(); + + // Remove the ones that already exists in the constructor + for (field, _) in &constructor.fields { + fields.retain(|(name, _)| name != &field.0.contents); + } + + if fields.is_empty() { + return; + } + + // Some fields are missing. Let's suggest a quick fix that adds them. + let bytes = self.source.as_bytes(); + let right_brace_index = span.end() as usize - 1; + let mut index = right_brace_index - 1; + while bytes[index].is_ascii_whitespace() { + index -= 1; + } + + let char_before_right_brace = bytes[index] as char; + + index += 1; + + let Some(range) = byte_span_to_range(self.files, self.file, index..index) else { + return; + }; + + // If the constructor spans multiple lines, we'll add the new fields in new lines too. + // Otherwise we'll add all the fields in a single line. + let constructor_range = + byte_span_to_range(self.files, self.file, span.start() as usize..span.end() as usize); + + // If it's multiline, find out the indent of the beginning line: we'll add new fields + // with that indent "plus one" (4 more spaces). + let line_indent = if let Some(constructor_range) = constructor_range { + if constructor_range.start.line == constructor_range.end.line { + None + } else { + let line = self.lines[constructor_range.start.line as usize]; + let whitespace_bytes = + line.bytes().take_while(|byte| byte.is_ascii_whitespace()).count(); + Some(whitespace_bytes) + } + } else { + None + }; + let line_indent = line_indent.map(|indent| " ".repeat(indent + 4)); + + let on_whitespace = bytes[index].is_ascii_whitespace(); + + let mut new_text = String::new(); + + // Add a comma if there's not a trailing one (if there are existing fields) + if !constructor.fields.is_empty() && char_before_right_brace != ',' { + new_text.push(','); + } + + // Add space or newline depending on whether it's multiline or not + if let Some(line_indent) = &line_indent { + new_text.push('\n'); + new_text.push_str(line_indent); + } else if !on_whitespace || constructor.fields.is_empty() { + new_text.push(' '); + } + + for (index, (name, _)) in fields.iter().enumerate() { + if index > 0 { + new_text.push(','); + if let Some(line_indent) = &line_indent { + new_text.push('\n'); + new_text.push_str(line_indent); + } else { + new_text.push(' '); + } + } + new_text.push_str(name); + new_text.push_str(": ()"); + } + + if !bytes[right_brace_index - 1].is_ascii_whitespace() { + new_text.push(' '); + } + + let title = "Fill struct fields".to_string(); + let text_edit = TextEdit { range, new_text }; + let code_action = self.new_quick_fix(title, text_edit); + self.code_actions.push(code_action); + } +} + +#[cfg(test)] +mod tests { + use tokio::test; + + use crate::requests::code_action::tests::assert_code_action; + + #[test] + async fn test_fill_struct_fields_code_action_no_space() { + let title = "Fill struct fields"; + + let src = r#" + struct Foo { + one: Field, + two: Field, + } + + fn main() { + Foo {>|<} + } + "#; + + let expected = r#" + struct Foo { + one: Field, + two: Field, + } + + fn main() { + Foo { one: (), two: () } + } + "#; + + assert_code_action(title, src, expected).await; + } + + #[test] + async fn test_fill_struct_fields_code_action_space() { + let title = "Fill struct fields"; + + let src = r#" + struct Foo { + one: Field, + two: Field, + } + + fn main() { + Foo { >|<} + } + "#; + + let expected = r#" + struct Foo { + one: Field, + two: Field, + } + + fn main() { + Foo { one: (), two: () } + } + "#; + + assert_code_action(title, src, expected).await; + } + + #[test] + async fn test_fill_struct_fields_code_action_some_fields() { + let title = "Fill struct fields"; + + let src = r#" + struct Foo { + one: Field, + two: Field, + three: Field, + } + + fn main() { + Foo { two: 1>|<} + } + "#; + + let expected = r#" + struct Foo { + one: Field, + two: Field, + three: Field, + } + + fn main() { + Foo { two: 1, one: (), three: () } + } + "#; + + assert_code_action(title, src, expected).await; + } + + #[test] + async fn test_fill_struct_fields_code_action_some_fields_trailing_comma() { + let title = "Fill struct fields"; + + let src = r#" + struct Foo { + one: Field, + two: Field, + three: Field, + } + + fn main() { + Foo { two: 1,>|<} + } + "#; + + let expected = r#" + struct Foo { + one: Field, + two: Field, + three: Field, + } + + fn main() { + Foo { two: 1, one: (), three: () } + } + "#; + + assert_code_action(title, src, expected).await; + } + + #[test] + async fn test_fill_struct_fields_code_action_multiline_empty() { + let title = "Fill struct fields"; + + let src = r#" + struct Foo { + one: Field, + two: Field, + } + + fn main() { + Foo {>|< + } + } + "#; + + let expected = r#" + struct Foo { + one: Field, + two: Field, + } + + fn main() { + Foo { + one: (), + two: () + } + } + "#; + + assert_code_action(title, src, expected).await; + } + + #[test] + async fn test_fill_struct_fields_code_action_multiline_some_fields() { + let title = "Fill struct fields"; + + let src = r#" + struct Foo { + one: Field, + two: Field, + } + + fn main() { + Foo {>|< + one: 1, + } + } + "#; + + let expected = r#" + struct Foo { + one: Field, + two: Field, + } + + fn main() { + Foo { + one: 1, + two: () + } + } + "#; + + assert_code_action(title, src, expected).await; + } +} diff --git a/noir/noir-repo/tooling/lsp/src/requests/code_action/import_or_qualify.rs b/noir/noir-repo/tooling/lsp/src/requests/code_action/import_or_qualify.rs new file mode 100644 index 00000000000..d07d117a317 --- /dev/null +++ b/noir/noir-repo/tooling/lsp/src/requests/code_action/import_or_qualify.rs @@ -0,0 +1,240 @@ +use lsp_types::{Position, Range, TextEdit}; +use noirc_errors::Location; +use noirc_frontend::{ + ast::{Ident, Path}, + macros_api::ModuleDefId, +}; + +use crate::{ + byte_span_to_range, + modules::{get_parent_module_id, module_full_path, module_id_path}, +}; + +use super::CodeActionFinder; + +impl<'a> CodeActionFinder<'a> { + pub(super) fn import_or_qualify(&mut self, path: &Path) { + if path.segments.len() != 1 { + return; + } + + let ident = &path.segments[0].ident; + if !self.includes_span(ident.span()) { + return; + } + + let location = Location::new(ident.span(), self.file); + if self.interner.find_referenced(location).is_some() { + return; + } + + let current_module_parent_id = get_parent_module_id(self.def_maps, self.module_id); + + // The Path doesn't resolve to anything so it means it's an error and maybe we + // can suggest an import or to fully-qualify the path. + for (name, entries) in self.interner.get_auto_import_names() { + if name != &ident.0.contents { + continue; + } + + for (module_def_id, visibility, defining_module) in entries { + let module_full_path = if let Some(defining_module) = defining_module { + module_id_path( + *defining_module, + &self.module_id, + current_module_parent_id, + self.interner, + ) + } else { + let Some(module_full_path) = module_full_path( + *module_def_id, + *visibility, + self.module_id, + current_module_parent_id, + self.interner, + ) else { + continue; + }; + module_full_path + }; + + let full_path = if defining_module.is_some() + || !matches!(module_def_id, ModuleDefId::ModuleId(..)) + { + format!("{}::{}", module_full_path, name) + } else { + module_full_path.clone() + }; + + let qualify_prefix = if let ModuleDefId::ModuleId(..) = module_def_id { + let mut segments: Vec<_> = module_full_path.split("::").collect(); + segments.pop(); + segments.join("::") + } else { + module_full_path + }; + + self.push_import_code_action(&full_path); + self.push_qualify_code_action(ident, &qualify_prefix, &full_path); + } + } + } + + fn push_import_code_action(&mut self, full_path: &str) { + let line = self.auto_import_line as u32; + let character = (self.nesting * 4) as u32; + let indent = " ".repeat(self.nesting * 4); + let mut newlines = "\n"; + + // If the line we are inserting into is not an empty line, insert an extra line to make some room + if let Some(line_text) = self.lines.get(line as usize) { + if !line_text.trim().is_empty() { + newlines = "\n\n"; + } + } + + let title = format!("Import {}", full_path); + let text_edit = TextEdit { + range: Range { start: Position { line, character }, end: Position { line, character } }, + new_text: format!("use {};{}{}", full_path, newlines, indent), + }; + + let code_action = self.new_quick_fix(title, text_edit); + self.code_actions.push(code_action); + } + + fn push_qualify_code_action(&mut self, ident: &Ident, prefix: &str, full_path: &str) { + let Some(range) = byte_span_to_range( + self.files, + self.file, + ident.span().start() as usize..ident.span().start() as usize, + ) else { + return; + }; + + let title = format!("Qualify as {}", full_path); + let text_edit = TextEdit { range, new_text: format!("{}::", prefix) }; + + let code_action = self.new_quick_fix(title, text_edit); + self.code_actions.push(code_action); + } +} + +#[cfg(test)] +mod tests { + use tokio::test; + + use crate::requests::code_action::tests::assert_code_action; + + #[test] + async fn test_qualify_code_action_for_struct() { + let title = "Qualify as foo::bar::SomeTypeInBar"; + + let src = r#" + mod foo { + mod bar { + struct SomeTypeInBar {} + } + } + + fn foo(x: SomeType>|||| CodeActionResponse { .unwrap() } -async fn assert_code_action(title: &str, src: &str, expected: &str) { +pub(crate) async fn assert_code_action(title: &str, src: &str, expected: &str) { let actions = get_code_action(src).await; let action = actions .iter() @@ -87,150 +86,3 @@ fn apply_text_edit(src: &str, text_edit: &TextEdit) -> String { lines[text_edit.range.start.line as usize] = &line; lines.join("\n") } - -#[test] -async fn test_qualify_code_action_for_struct() { - let title = "Qualify as foo::bar::SomeTypeInBar"; - - let src = r#" - mod foo { - mod bar { - struct SomeTypeInBar {} - } - } - - fn foo(x: SomeType>||||| { location: noirc_errors::Location, files: &'a FileMap, interner: &'a NodeInterner, - interners: &'a HashMap, + interners: &'a HashMap, crate_id: CrateId, crate_name: String, dependencies: &'a Vec, @@ -432,8 +432,6 @@ where ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not find package for file") })?; - let package_root_path: String = package.root_dir.as_os_str().to_string_lossy().into(); - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager( state, @@ -447,9 +445,9 @@ where let interner; let def_maps; - if let Some(def_interner) = state.cached_definitions.get(&package_root_path) { + if let Some(def_interner) = state.cached_definitions.get(&package.root_dir) { interner = def_interner; - def_maps = state.cached_def_maps.get(&package_root_path).unwrap(); + def_maps = state.cached_def_maps.get(&package.root_dir).unwrap(); } else { // We ignore the warnings and errors produced by compilation while resolving the definition let _ = noirc_driver::check_crate(&mut context, crate_id, &Default::default()); @@ -479,7 +477,7 @@ where pub(crate) fn find_all_references_in_workspace( location: noirc_errors::Location, interner: &NodeInterner, - cached_interners: &HashMap, + cached_interners: &HashMap, files: &FileMap, include_declaration: bool, include_self_type_name: bool, diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/lsp_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/lsp_cmd.rs index 9ff7a42e5f5..bfaa913b33a 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/lsp_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/lsp_cmd.rs @@ -35,8 +35,6 @@ pub(crate) fn run(_args: LspCommand, _config: NargoConfig) -> Result<(), CliErro .service(router) }); - eprintln!("LSP starting..."); - // Prefer truly asynchronous piped stdin/stdout without blocking tasks. #[cfg(unix)] let (stdin, stdout) = ( From e1dc9878de06a1f3d4cde9bbcf652ac342951d52 Mon Sep 17 00:00:00 2001 From: PhilWindle <60546371+PhilWindle@users.noreply.github.com> Date: Wed, 4 Sep 2024 19:30:29 +0100 Subject: [PATCH 10/18] chore: Merge provernet to master (#8373) This PR brings the provernet changes back to master --------- Co-authored-by: Santiago Palladino Co-authored-by: Alex Gherghisan Co-authored-by: spypsy --- .github/workflows/devnet-deploys.yml | 42 +++- iac/mainnet-fork/terraform/main.tf | 4 +- iac/mainnet-fork/terraform/variables.tf | 10 + yarn-project/archiver/package.json | 5 +- .../archiver/src/archiver/data_retrieval.ts | 24 +- .../archiver/src/archiver/eth_log_handlers.ts | 56 ++++- yarn-project/aztec/package.json | 1 + .../aztec/src/cli/aztec_start_options.ts | 10 + yarn-project/aztec/src/cli/cli.ts | 5 +- .../src/cli/cmds/start_proof_verifier.ts | 26 +++ yarn-project/aztec/terraform/bot/main.tf | 3 + yarn-project/aztec/terraform/bot/variables.tf | 15 ++ .../aztec/terraform/proof-verifier/main.tf | 221 ++++++++++++++++++ .../terraform/proof-verifier/variables.tf | 29 +++ yarn-project/aztec/terraform/prover/main.tf | 6 +- yarn-project/aztec/tsconfig.json | 3 + yarn-project/end-to-end/package.local.json | 2 +- yarn-project/foundation/src/config/env_var.ts | 2 + yarn-project/package.json | 1 + yarn-project/proof-verifier/.eslintrc.cjs | 1 + yarn-project/proof-verifier/package.json | 73 ++++++ yarn-project/proof-verifier/src/config.ts | 73 ++++++ yarn-project/proof-verifier/src/index.ts | 2 + .../proof-verifier/src/proof_verifier.ts | 112 +++++++++ yarn-project/proof-verifier/tsconfig.json | 35 +++ .../src/publisher/l1-publisher.ts | 2 +- yarn-project/telemetry-client/src/metrics.ts | 2 + yarn-project/tsconfig.json | 3 +- yarn-project/yarn.lock | 23 ++ 29 files changed, 776 insertions(+), 15 deletions(-) create mode 100644 yarn-project/aztec/src/cli/cmds/start_proof_verifier.ts create mode 100644 yarn-project/aztec/terraform/proof-verifier/main.tf create mode 100644 yarn-project/aztec/terraform/proof-verifier/variables.tf create mode 100644 yarn-project/proof-verifier/.eslintrc.cjs create mode 100644 yarn-project/proof-verifier/package.json create mode 100644 yarn-project/proof-verifier/src/config.ts create mode 100644 yarn-project/proof-verifier/src/index.ts create mode 100644 yarn-project/proof-verifier/src/proof_verifier.ts create mode 100644 yarn-project/proof-verifier/tsconfig.json diff --git a/.github/workflows/devnet-deploys.yml b/.github/workflows/devnet-deploys.yml index 518cf3d81d4..89166794d25 100644 --- a/.github/workflows/devnet-deploys.yml +++ b/.github/workflows/devnet-deploys.yml @@ -66,6 +66,8 @@ env: TF_VAR_FORK_MNEMONIC: ${{ secrets.FORK_MNEMONIC }} TF_VAR_INFURA_API_KEY: ${{ secrets.INFURA_API_KEY }} TF_VAR_FORK_ADMIN_API_KEY: ${{ secrets.DEVNET_API_KEY }} + TF_VAR_MAINNET_FORK_CPU_UNITS: 2048 + TF_VAR_MAINNET_FORK_MEMORY_UNITS: 4096 # Faucet TF_VAR_FAUCET_ACCOUNT_INDEX: 9 @@ -123,6 +125,12 @@ jobs: min_txs_per_block: ${{ steps.set_network_vars.outputs.min_txs_per_block }} bot_flush_setup_txs: ${{ steps.set_network_vars.outputs.bot_flush_setup_txs }} bot_max_pending_txs: ${{ steps.set_network_vars.outputs.bot_max_pending_txs }} + mainnet_fork_cpu_units: ${{ steps.set_network_vars.outputs.mainnet_fork_cpu_units }} + mainnet_fork_memory_units: ${{ steps.set_network_vars.outputs.mainnet_fork_memory_units }} + bot_skip_simulation: ${{ steps.set_network_vars.outputs.bot_skip_simulation }} + bot_l2_gas_limit: ${{ steps.set_network_vars.outputs.bot_l2_gas_limit }} + bot_da_gas_limit: ${{ steps.set_network_vars.outputs.bot_da_gas_limit }} + bot_count: ${{ steps.set_network_vars.outputs.bot_count }} steps: - name: Set network vars shell: bash @@ -135,7 +143,7 @@ jobs: echo "branch_name=devnet" >> $GITHUB_OUTPUT echo "network_api_key=DEVNET_API_KEY" >> $GITHUB_OUTPUT echo "network_fork_admin_api_key=DEVNET_API_KEY" >> $GITHUB_OUTPUT - echo "agents_per_prover=4" >> $GITHUB_OUTPUT + echo "agents_per_prover=2" >> $GITHUB_OUTPUT echo "bot_interval=180" >> $GITHUB_OUTPUT echo "node_tcp_range_start=40100" >> $GITHUB_OUTPUT echo "node_udp_range_start=45100" >> $GITHUB_OUTPUT @@ -147,9 +155,15 @@ jobs: echo "faucet_lb_priority=601" >> $GITHUB_OUTPUT echo "min_txs_per_block=1" >> $GITHUB_OUTPUT echo "max_txs_per_block=64" >> $GITHUB_OUTPUT - echo "bot_follow_chain=NONE" >> $GITHUB_OUTPUT + echo "bot_follow_chain=PROVEN" >> $GITHUB_OUTPUT echo "bot_flush_setup_txs=false" >> $GITHUB_OUTPUT echo "bot_max_pending_txs=1" >> $GITHUB_OUTPUT + echo "mainnet_fork_cpu_units=2048" >> $GITHUB_OUTPUT + echo "mainnet_fork_memory_units=4096" >> $GITHUB_OUTPUT + echo "bot_skip_simulation=false" >> $GITHUB_OUTPUT + echo "bot_l2_gas_limit=" >> $GITHUB_OUTPUT + echo "bot_da_gas_limit=" >> $GITHUB_OUTPUT + echo "bot_count=1" >> $GITHUB_OUTPUT elif [ "$BRANCH_NAME" = "provernet" ] then echo "deploy_tag=provernet" >> $GITHUB_OUTPUT @@ -171,6 +185,12 @@ jobs: echo "bot_follow_chain=NONE" >> $GITHUB_OUTPUT echo "bot_flush_setup_txs=true" >> $GITHUB_OUTPUT echo "bot_max_pending_txs=32" >> $GITHUB_OUTPUT + echo "mainnet_fork_cpu_units=8192" >> $GITHUB_OUTPUT + echo "mainnet_fork_memory_units=32768" >> $GITHUB_OUTPUT + echo "bot_skip_simulation=true" >> $GITHUB_OUTPUT + echo "bot_l2_gas_limit=1000000000" >> $GITHUB_OUTPUT + echo "bot_da_gas_limit=1000000000" >> $GITHUB_OUTPUT + echo "bot_count=1" >> $GITHUB_OUTPUT elif [ "$BRANCH_NAME" = "alphanet" ] then echo "deploy_tag=alphanet" >> $GITHUB_OUTPUT @@ -178,7 +198,7 @@ jobs: echo "network_api_key=ALPHANET_API_KEY" >> $GITHUB_OUTPUT echo "network_fork_admin_api_key=ALPHANET_API_KEY" >> $GITHUB_OUTPUT echo "agents_per_prover=1" >> $GITHUB_OUTPUT - echo "bot_interval=30" >> $GITHUB_OUTPUT + echo "bot_interval=10" >> $GITHUB_OUTPUT echo "node_tcp_range_start=40000" >> $GITHUB_OUTPUT echo "node_udp_range_start=45000" >> $GITHUB_OUTPUT echo "prover_node_tcp_range_start=41000" >> $GITHUB_OUTPUT @@ -192,6 +212,12 @@ jobs: echo "bot_follow_chain=PROVEN" >> $GITHUB_OUTPUT echo "bot_flush_setup_txs=false" >> $GITHUB_OUTPUT echo "bot_max_pending_txs=1" >> $GITHUB_OUTPUT + echo "mainnet_fork_cpu_units=2048" >> $GITHUB_OUTPUT + echo "mainnet_fork_memory_units=4096" >> $GITHUB_OUTPUT + echo "bot_skip_simulation=false" >> $GITHUB_OUTPUT + echo "bot_l2_gas_limit=" >> $GITHUB_OUTPUT + echo "bot_da_gas_limit=" >> $GITHUB_OUTPUT + echo "bot_count=1" >> $GITHUB_OUTPUT else echo "Unrecognized Branch!!" exit 1 @@ -462,6 +488,12 @@ jobs: TF_VAR_PROVER_NODE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.prover_node_lb_priority_range_start }} TF_VAR_SEQ_MIN_TX_PER_BLOCK: 1 TF_VAR_SEQ_MAX_TX_PER_BLOCK: ${{ needs.set-network.outputs.max_txs_per_block }} + TF_VAR_MAINNET_FORK_CPU_UNITS: ${{ needs.set-network.outputs.mainnet_fork_cpu_units }} + TF_VAR_MAINNET_FORK_MEMORY_UNITS: ${{ needs.set-network.outputs.mainnet_fork_memory_units }} + TF_VAR_BOT_SKIP_PUBLIC_SIMULATION: ${{ needs.set-network.outputs.bot_skip_simulation }} + TF_VAR_BOT_L2_GAS_LIMIT: ${{ needs.set-network.outputs.bot_l2_gas_limit }} + TF_VAR_BOT_DA_GAS_LIMIT: ${{ needs.set-network.outputs.bot_da_gas_limit }} + TF_VAR_BOT_COUNT: ${{ needs.set-network.outputs.bot_count }} steps: - uses: actions/checkout@v4 with: @@ -679,6 +711,10 @@ jobs: TF_VAR_BOT_FOLLOW_CHAIN: ${{ needs.set-network.outputs.bot_follow_chain }} TF_VAR_PROVING_ENABLED: true TF_VAR_BOT_NO_START: false + TF_VAR_BOT_SKIP_PUBLIC_SIMULATION: ${{ needs.set-network.outputs.bot_skip_simulation }} + TF_VAR_BOT_L2_GAS_LIMIT: ${{ needs.set-network.outputs.bot_l2_gas_limit }} + TF_VAR_BOT_DA_GAS_LIMIT: ${{ needs.set-network.outputs.bot_da_gas_limit }} + TF_VAR_BOT_COUNT: ${{ needs.set-network.outputs.bot_count }} steps: - uses: actions/checkout@v4 with: diff --git a/iac/mainnet-fork/terraform/main.tf b/iac/mainnet-fork/terraform/main.tf index 49c407db09c..737be2e298f 100644 --- a/iac/mainnet-fork/terraform/main.tf +++ b/iac/mainnet-fork/terraform/main.tf @@ -102,8 +102,8 @@ resource "aws_ecs_task_definition" "aztec_mainnet_fork" { family = "${var.DEPLOY_TAG}-mainnet-fork" requires_compatibilities = ["FARGATE"] network_mode = "awsvpc" - cpu = "2048" - memory = "4096" + cpu = var.MAINNET_FORK_CPU_UNITS + memory = var.MAINNET_FORK_MEMORY_UNITS execution_role_arn = data.terraform_remote_state.setup_iac.outputs.ecs_task_execution_role_arn volume { diff --git a/iac/mainnet-fork/terraform/variables.tf b/iac/mainnet-fork/terraform/variables.tf index 1ba3012169b..c64895845c9 100644 --- a/iac/mainnet-fork/terraform/variables.tf +++ b/iac/mainnet-fork/terraform/variables.tf @@ -25,3 +25,13 @@ variable "DEPLOY_TAG" { variable "L1_CHAIN_ID" { type = string } + +variable "MAINNET_FORK_CPU_UNITS" { + type = string + default = "2048" +} + +variable "MAINNET_FORK_MEMORY_UNITS" { + type = string + default = "4096" +} diff --git a/yarn-project/archiver/package.json b/yarn-project/archiver/package.json index 9514bf2b7e4..c1571c1429c 100644 --- a/yarn-project/archiver/package.json +++ b/yarn-project/archiver/package.json @@ -2,7 +2,10 @@ "name": "@aztec/archiver", "version": "0.1.0", "type": "module", - "exports": "./dest/index.js", + "exports": { + ".": "./dest/index.js", + "./data-retrieval": "./dest/archiver/data_retrieval.js" + }, "typedocOptions": { "entryPoints": [ "./src/index.ts" diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/data_retrieval.ts index 4ae92665b3d..81aca60a783 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/data_retrieval.ts @@ -1,5 +1,5 @@ import { type Body, type InboxLeaf } from '@aztec/circuit-types'; -import { type AppendOnlyTreeSnapshot, Fr, type Header } from '@aztec/circuits.js'; +import { type AppendOnlyTreeSnapshot, Fr, type Header, type Proof } from '@aztec/circuits.js'; import { type EthAddress } from '@aztec/foundation/eth-address'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { RollupAbi } from '@aztec/l1-artifacts'; @@ -7,6 +7,7 @@ import { RollupAbi } from '@aztec/l1-artifacts'; import { type Hex, type PublicClient, getAbiItem } from 'viem'; import { + getBlockProofFromSubmitProofTx, getL2BlockProposedLogs, getMessageSentLogs, getTxsPublishedLogs, @@ -163,3 +164,24 @@ export async function retrieveL2ProofVerifiedEvents( txHash: log.transactionHash, })); } + +/** Retrieve submitted proofs from the rollup contract */ +export async function retrieveL2ProofsFromRollup( + publicClient: PublicClient, + rollupAddress: EthAddress, + searchStartBlock: bigint, + searchEndBlock?: bigint, +): Promise> { + const logs = await retrieveL2ProofVerifiedEvents(publicClient, rollupAddress, searchStartBlock, searchEndBlock); + const retrievedData: { proof: Proof; proverId: Fr; l2BlockNumber: bigint; txHash: `0x${string}` }[] = []; + const lastProcessedL1BlockNumber = logs.length > 0 ? logs.at(-1)!.l1BlockNumber : searchStartBlock - 1n; + + for (const { txHash, proverId, l2BlockNumber } of logs) { + const proofData = await getBlockProofFromSubmitProofTx(publicClient, txHash, l2BlockNumber, proverId); + retrievedData.push({ proof: proofData.proof, proverId: proofData.proverId, l2BlockNumber, txHash }); + } + return { + retrievedData, + lastProcessedL1BlockNumber, + }; +} diff --git a/yarn-project/archiver/src/archiver/eth_log_handlers.ts b/yarn-project/archiver/src/archiver/eth_log_handlers.ts index 04d8a8b5339..990cccde1ae 100644 --- a/yarn-project/archiver/src/archiver/eth_log_handlers.ts +++ b/yarn-project/archiver/src/archiver/eth_log_handlers.ts @@ -1,5 +1,5 @@ import { Body, InboxLeaf } from '@aztec/circuit-types'; -import { AppendOnlyTreeSnapshot, Header } from '@aztec/circuits.js'; +import { AppendOnlyTreeSnapshot, Header, Proof } from '@aztec/circuits.js'; import { type EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { numToUInt32BE } from '@aztec/foundation/serialize'; @@ -257,3 +257,57 @@ export function getMessageSentLogs( toBlock: toBlock + 1n, // the toBlock argument in getLogs is exclusive }); } + +export type SubmitBlockProof = { + header: Header; + archiveRoot: Fr; + proverId: Fr; + aggregationObject: Buffer; + proof: Proof; +}; + +/** + * Gets block metadata (header and archive snapshot) from the calldata of an L1 transaction. + * Assumes that the block was published from an EOA. + * TODO: Add retries and error management. + * @param publicClient - The viem public client to use for transaction retrieval. + * @param txHash - Hash of the tx that published it. + * @param l2BlockNum - L2 block number. + * @returns L2 block metadata (header and archive) from the calldata, deserialized + */ +export async function getBlockProofFromSubmitProofTx( + publicClient: PublicClient, + txHash: `0x${string}`, + l2BlockNum: bigint, + expectedProverId: Fr, +): Promise { + const { input: data } = await publicClient.getTransaction({ hash: txHash }); + const { functionName, args } = decodeFunctionData({ + abi: RollupAbi, + data, + }); + + if (!(functionName === 'submitBlockRootProof')) { + throw new Error(`Unexpected method called ${functionName}`); + } + const [headerHex, archiveHex, proverIdHex, aggregationObjectHex, proofHex] = args!; + + const header = Header.fromBuffer(Buffer.from(hexToBytes(headerHex))); + const proverId = Fr.fromString(proverIdHex); + + const blockNumberFromHeader = header.globalVariables.blockNumber.toBigInt(); + if (blockNumberFromHeader !== l2BlockNum) { + throw new Error(`Block number mismatch: expected ${l2BlockNum} but got ${blockNumberFromHeader}`); + } + if (!proverId.equals(expectedProverId)) { + throw new Error(`Prover ID mismatch: expected ${expectedProverId} but got ${proverId}`); + } + + return { + header, + proverId, + aggregationObject: Buffer.from(hexToBytes(aggregationObjectHex)), + archiveRoot: Fr.fromString(archiveHex), + proof: Proof.fromBuffer(Buffer.from(hexToBytes(proofHex))), + }; +} diff --git a/yarn-project/aztec/package.json b/yarn-project/aztec/package.json index fe0c37048ee..b986f0d167b 100644 --- a/yarn-project/aztec/package.json +++ b/yarn-project/aztec/package.json @@ -48,6 +48,7 @@ "@aztec/noir-protocol-circuits-types": "workspace:^", "@aztec/p2p": "workspace:^", "@aztec/p2p-bootstrap": "workspace:^", + "@aztec/proof-verifier": "workspace:^", "@aztec/protocol-contracts": "workspace:^", "@aztec/prover-client": "workspace:^", "@aztec/prover-node": "workspace:^", diff --git a/yarn-project/aztec/src/cli/aztec_start_options.ts b/yarn-project/aztec/src/cli/aztec_start_options.ts index 1a405bb5475..953f07adcb3 100644 --- a/yarn-project/aztec/src/cli/aztec_start_options.ts +++ b/yarn-project/aztec/src/cli/aztec_start_options.ts @@ -9,6 +9,7 @@ import { isBooleanConfigValue, } from '@aztec/foundation/config'; import { bootnodeConfigMappings, p2pConfigMappings } from '@aztec/p2p'; +import { proofVerifierConfigMappings } from '@aztec/proof-verifier'; import { proverClientConfigMappings } from '@aztec/prover-client'; import { proverNodeConfigMappings } from '@aztec/prover-node'; import { allPxeConfigMappings } from '@aztec/pxe'; @@ -303,6 +304,15 @@ export const aztecStartOptions: { [key: string]: AztecStartOption[] } = { }, ...getOptions('bot', botConfigMappings), ], + 'PROOF VERIFIER': [ + { + flag: '--proof-verifier', + description: 'Starts Aztec Proof Verifier with options', + defaultValue: undefined, + envVar: undefined, + }, + ...getOptions('proofVerifier', proofVerifierConfigMappings), + ], TXE: [ { flag: '--txe', diff --git a/yarn-project/aztec/src/cli/cli.ts b/yarn-project/aztec/src/cli/cli.ts index d05cc439a67..fc8a0d226af 100644 --- a/yarn-project/aztec/src/cli/cli.ts +++ b/yarn-project/aztec/src/cli/cli.ts @@ -76,6 +76,9 @@ export function injectAztecCommands(program: Command, userLog: LogFn, debugLogge if (options.node) { const { startNode } = await import('./cmds/start_node.js'); services = await startNode(options, signalHandlers, userLog); + } else if (options.proofVerifier) { + const { startProofVerifier } = await import('./cmds/start_proof_verifier.js'); + services = await startProofVerifier(options, signalHandlers, userLog); } else if (options.bot) { const { startBot } = await import('./cmds/start_bot.js'); services = await startBot(options, signalHandlers, userLog); @@ -101,7 +104,7 @@ export function injectAztecCommands(program: Command, userLog: LogFn, debugLogge userLog(`Cannot run a standalone sequencer without a node`); process.exit(1); } else { - userLog(`No module specified to start ${JSON.stringify(options, null, 2)}`); + userLog(`No module specified to start`); process.exit(1); } } diff --git a/yarn-project/aztec/src/cli/cmds/start_proof_verifier.ts b/yarn-project/aztec/src/cli/cmds/start_proof_verifier.ts new file mode 100644 index 00000000000..89477caaffb --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/start_proof_verifier.ts @@ -0,0 +1,26 @@ +import { type ServerList } from '@aztec/foundation/json-rpc/server'; +import { type LogFn } from '@aztec/foundation/log'; +import { ProofVerifier, proofVerifierConfigMappings } from '@aztec/proof-verifier'; +import { createAndStartTelemetryClient, telemetryClientConfigMappings } from '@aztec/telemetry-client/start'; + +import { extractRelevantOptions } from '../util.js'; + +export async function startProofVerifier( + options: any, + signalHandlers: (() => Promise)[], + userLog: LogFn, +): Promise { + const services: ServerList = []; + + const config = extractRelevantOptions(options, proofVerifierConfigMappings, 'proofVerifier'); + + const telemetryConfig = extractRelevantOptions(options, telemetryClientConfigMappings, 'tel'); + const telemetry = await createAndStartTelemetryClient(telemetryConfig); + const proofVerifier = await ProofVerifier.new(config, telemetry); + + userLog('Starting proof verifier'); + proofVerifier.start(); + + signalHandlers.push(() => proofVerifier.stop()); + return services; +} diff --git a/yarn-project/aztec/terraform/bot/main.tf b/yarn-project/aztec/terraform/bot/main.tf index 31f9d1cd9ba..4dfe0641bf4 100644 --- a/yarn-project/aztec/terraform/bot/main.tf +++ b/yarn-project/aztec/terraform/bot/main.tf @@ -173,6 +173,9 @@ resource "aws_ecs_task_definition" "aztec-bot" { { name = "NETWORK", value = var.DEPLOY_TAG }, { name = "BOT_FLUSH_SETUP_TRANSACTIONS", value = tostring(var.BOT_FLUSH_SETUP_TRANSACTIONS) }, { name = "BOT_MAX_PENDING_TXS", value = tostring(var.BOT_MAX_PENDING_TXS) }, + { name = "BOT_SKIP_PUBLIC_SIMULATION", value = tostring(var.BOT_SKIP_PUBLIC_SIMULATION) }, + { name = "BOT_L2_GAS_LIMIT", value = var.BOT_L2_GAS_LIMIT }, + { name = "BOT_DA_GAS_LIMIT", value = var.BOT_DA_GAS_LIMIT }, { name = "LOG_JSON", value = "1" } ] logConfiguration = { diff --git a/yarn-project/aztec/terraform/bot/variables.tf b/yarn-project/aztec/terraform/bot/variables.tf index 4d3d78100f4..679dd2187ab 100644 --- a/yarn-project/aztec/terraform/bot/variables.tf +++ b/yarn-project/aztec/terraform/bot/variables.tf @@ -66,3 +66,18 @@ variable "BOT_MAX_PENDING_TXS" { type = number default = 1 } + +variable "BOT_SKIP_PUBLIC_SIMULATION" { + type = bool + default = false +} + +variable "BOT_L2_GAS_LIMIT" { + type = string +} + +variable "BOT_DA_GAS_LIMIT" { + type = string +} + + diff --git a/yarn-project/aztec/terraform/proof-verifier/main.tf b/yarn-project/aztec/terraform/proof-verifier/main.tf new file mode 100644 index 00000000000..970bcc2f9fe --- /dev/null +++ b/yarn-project/aztec/terraform/proof-verifier/main.tf @@ -0,0 +1,221 @@ +terraform { + backend "s3" { + bucket = "aztec-terraform" + region = "eu-west-2" + } + required_providers { + aws = { + source = "hashicorp/aws" + version = "3.74.2" + } + } +} + +# Define provider and region +provider "aws" { + region = "eu-west-2" +} + +data "terraform_remote_state" "aztec2_iac" { + backend = "s3" + config = { + bucket = "aztec-terraform" + key = "aztec2/iac" + region = "eu-west-2" + } +} + +data "terraform_remote_state" "setup_iac" { + backend = "s3" + config = { + bucket = "aztec-terraform" + key = "setup/setup-iac" + region = "eu-west-2" + } +} + +resource "aws_cloudwatch_log_group" "aztec-proof-verifier-log-group" { + name = "/fargate/service/${var.DEPLOY_TAG}/aztec-proof-verifier" + retention_in_days = 14 +} + +resource "aws_service_discovery_service" "aztec-proof-verifier" { + name = "${var.DEPLOY_TAG}-aztec-proof-verifier" + + health_check_custom_config { + failure_threshold = 1 + } + + dns_config { + namespace_id = data.terraform_remote_state.setup_iac.outputs.local_service_discovery_id + + dns_records { + ttl = 60 + type = "A" + } + + dns_records { + ttl = 60 + type = "SRV" + } + + routing_policy = "MULTIVALUE" + } + + # Terraform just fails if this resource changes and you have registered instances. + provisioner "local-exec" { + when = destroy + command = "${path.module}/../servicediscovery-drain.sh ${self.id}" + } +} + +# Create a fleet. +data "template_file" "user_data" { + template = <> /etc/ecs/ecs.config +echo 'ECS_INSTANCE_ATTRIBUTES={"group": "${var.DEPLOY_TAG}-proof-verifier"}' >> /etc/ecs/ecs.config +EOF +} + +resource "aws_launch_template" "proof_verifier_launch_template" { + name = "${var.DEPLOY_TAG}-pf-launch-template" + image_id = "ami-0cd4858f2b923aa6b" + instance_type = "m4.2xlarge" // 8 cores, 32 GB + vpc_security_group_ids = [data.terraform_remote_state.setup_iac.outputs.security_group_private_id] + + iam_instance_profile { + name = data.terraform_remote_state.setup_iac.outputs.ecs_instance_profile_name + } + + key_name = data.terraform_remote_state.setup_iac.outputs.ecs_instance_key_pair_name + + user_data = base64encode(data.template_file.user_data.rendered) + + tag_specifications { + resource_type = "instance" + tags = { + Name = "${var.DEPLOY_TAG}-proof-verifier" + prometheus = "" + } + } +} + +resource "aws_ec2_fleet" "proof_verifier_fleet" { + launch_template_config { + launch_template_specification { + launch_template_id = aws_launch_template.proof_verifier_launch_template.id + version = aws_launch_template.proof_verifier_launch_template.latest_version + } + + override { + subnet_id = data.terraform_remote_state.setup_iac.outputs.subnet_az1_private_id + availability_zone = "eu-west-2a" + max_price = "0.15" + } + + override { + subnet_id = data.terraform_remote_state.setup_iac.outputs.subnet_az2_private_id + availability_zone = "eu-west-2b" + max_price = "0.15" + } + } + + target_capacity_specification { + default_target_capacity_type = "on-demand" + total_target_capacity = 1 + spot_target_capacity = 0 + on_demand_target_capacity = 1 + } + + terminate_instances = true + terminate_instances_with_expiration = true +} + +resource "aws_ecs_task_definition" "aztec-proof-verifier" { + family = "${var.DEPLOY_TAG}-aztec-proof-verifier" + network_mode = "awsvpc" + requires_compatibilities = ["EC2"] + execution_role_arn = data.terraform_remote_state.setup_iac.outputs.ecs_task_execution_role_arn + task_role_arn = data.terraform_remote_state.aztec2_iac.outputs.cloudwatch_logging_ecs_role_arn + + container_definitions = jsonencode([ + { + name = "${var.DEPLOY_TAG}-aztec-proof-verifier" + image = "${var.DOCKERHUB_ACCOUNT}/aztec:${var.DEPLOY_TAG}" + command = ["start", "--proof-verifier"] + essential = true + cpu = 8192 + memoryReservation = 30720 + portMappings = [ + { + containerPort = 80 + } + ] + environment = [ + { name = "PROOF_VERIFIER_L1_START_BLOCK", value = "15918000" }, + { name = "PROOF_VERIFIER_POLL_INTERVAL_MS", value = tostring(var.PROOF_VERIFIER_POLL_INTERVAL_MS) }, + { name = "ETHEREUM_HOST", value = var.ETHEREUM_HOST }, + { name = "L1_CHAIN_ID", value = tostring(var.L1_CHAIN_ID) }, + { name = "ROLLUP_CONTRACT_ADDRESS", value = var.ROLLUP_CONTRACT_ADDRESS }, + { + name = "OTEL_EXPORTER_OTLP_METRICS_ENDPOINT" + value = "http://aztec-otel.local:4318/v1/metrics" + }, + { + name = "OTEL_SERVICE_NAME" + value = "${var.DEPLOY_TAG}-aztec-proof-verifier" + }, + { name = "LOG_LEVEL", value = var.LOG_LEVEL }, + { name = "NETWORK", value = var.DEPLOY_TAG }, + { name = "LOG_JSON", value = "1" } + ] + logConfiguration = { + logDriver = "awslogs" + options = { + "awslogs-group" = aws_cloudwatch_log_group.aztec-proof-verifier-log-group.name + "awslogs-region" = "eu-west-2" + "awslogs-stream-prefix" = "ecs" + } + } + } + ]) +} + +resource "aws_ecs_service" "aztec-proof-verifier" { + name = "${var.DEPLOY_TAG}-aztec-proof-verifier" + cluster = data.terraform_remote_state.setup_iac.outputs.ecs_cluster_id + launch_type = "EC2" + desired_count = 1 + deployment_maximum_percent = 100 + deployment_minimum_healthy_percent = 0 + force_new_deployment = true + enable_execute_command = true + + network_configuration { + subnets = [ + data.terraform_remote_state.setup_iac.outputs.subnet_az1_private_id, + data.terraform_remote_state.setup_iac.outputs.subnet_az2_private_id + ] + security_groups = [data.terraform_remote_state.setup_iac.outputs.security_group_private_id] + } + + # load_balancer { + # target_group_arn = aws_alb_target_group.bot_http.arn + # container_name = "${var.DEPLOY_TAG}-aztec-proof-verifier" + # container_port = 80 + # } + + service_registries { + registry_arn = aws_service_discovery_service.aztec-proof-verifier.arn + container_name = "${var.DEPLOY_TAG}-aztec-proof-verifier" + container_port = 80 + } + + placement_constraints { + type = "memberOf" + expression = "attribute:group == ${var.DEPLOY_TAG}-proof-verifier" + } + + task_definition = aws_ecs_task_definition.aztec-proof-verifier.family +} diff --git a/yarn-project/aztec/terraform/proof-verifier/variables.tf b/yarn-project/aztec/terraform/proof-verifier/variables.tf new file mode 100644 index 00000000000..69aac96df1e --- /dev/null +++ b/yarn-project/aztec/terraform/proof-verifier/variables.tf @@ -0,0 +1,29 @@ +variable "DEPLOY_TAG" { + type = string +} + +variable "DOCKERHUB_ACCOUNT" { + type = string +} + +variable "LOG_LEVEL" { + type = string + default = "verbose" +} + +variable "ETHEREUM_HOST" { + type = string +} + +variable "L1_CHAIN_ID" { + type = number +} + +variable "ROLLUP_CONTRACT_ADDRESS" { + type = string +} + +variable "PROOF_VERIFIER_POLL_INTERVAL_MS" { + type = number + default = 60000 +} diff --git a/yarn-project/aztec/terraform/prover/main.tf b/yarn-project/aztec/terraform/prover/main.tf index 924aae3e6ea..537bc7e73f0 100644 --- a/yarn-project/aztec/terraform/prover/main.tf +++ b/yarn-project/aztec/terraform/prover/main.tf @@ -101,12 +101,12 @@ EOF } # Launch template for our prover agents -# 32 cores and 128 GB memory +# 16 cores and 128 GB memory resource "aws_launch_template" "proving-agent-launch-template" { count = local.node_count name = "${var.DEPLOY_TAG}-proving-agent-launch-template-${count.index + 1}" image_id = "ami-0cd4858f2b923aa6b" - instance_type = "m5.8xlarge" + instance_type = "r5a.4xlarge" vpc_security_group_ids = [data.terraform_remote_state.setup_iac.outputs.security_group_private_id] iam_instance_profile { @@ -237,7 +237,7 @@ resource "aws_ecs_task_definition" "aztec-proving-agent" { "image": "${var.DOCKERHUB_ACCOUNT}/aztec:${var.IMAGE_TAG}", "command": ["start", "--prover"], "essential": true, - "cpu": 32768, + "cpu": 16384, "memoryReservation": 122880, "portMappings": [ { diff --git a/yarn-project/aztec/tsconfig.json b/yarn-project/aztec/tsconfig.json index 72f82e9d805..570da13289f 100644 --- a/yarn-project/aztec/tsconfig.json +++ b/yarn-project/aztec/tsconfig.json @@ -66,6 +66,9 @@ { "path": "../p2p-bootstrap" }, + { + "path": "../proof-verifier" + }, { "path": "../protocol-contracts" }, diff --git a/yarn-project/end-to-end/package.local.json b/yarn-project/end-to-end/package.local.json index a5214893419..62f136fa45d 100644 --- a/yarn-project/end-to-end/package.local.json +++ b/yarn-project/end-to-end/package.local.json @@ -5,4 +5,4 @@ "test": "LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", "test:unit": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest src/fixtures" } -} +} \ No newline at end of file diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 43072be644c..43f908c5d72 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -111,5 +111,7 @@ export type EnvVar = | 'VALIDATOR_DISABLED' | 'PROVER_NODE_DISABLE_AUTOMATIC_PROVING' | 'PROVER_NODE_MAX_PENDING_JOBS' + | 'PROOF_VERIFIER_POLL_INTERVAL_MS' + | 'PROOF_VERIFIER_L1_START_BLOCK' | 'LOG_LEVEL' | 'DEBUG'; diff --git a/yarn-project/package.json b/yarn-project/package.json index 5707c688028..316150e46fd 100644 --- a/yarn-project/package.json +++ b/yarn-project/package.json @@ -49,6 +49,7 @@ "noir-protocol-circuits-types", "p2p", "p2p-bootstrap", + "proof-verifier", "protocol-contracts", "prover-client", "prover-node", diff --git a/yarn-project/proof-verifier/.eslintrc.cjs b/yarn-project/proof-verifier/.eslintrc.cjs new file mode 100644 index 00000000000..e659927475c --- /dev/null +++ b/yarn-project/proof-verifier/.eslintrc.cjs @@ -0,0 +1 @@ +module.exports = require('@aztec/foundation/eslint'); diff --git a/yarn-project/proof-verifier/package.json b/yarn-project/proof-verifier/package.json new file mode 100644 index 00000000000..1a6c5f42afd --- /dev/null +++ b/yarn-project/proof-verifier/package.json @@ -0,0 +1,73 @@ +{ + "name": "@aztec/proof-verifier", + "type": "module", + "exports": { + ".": "./dest/index.js" + }, + "scripts": { + "build": "yarn clean && tsc -b", + "build:dev": "tsc -b --watch", + "clean": "rm -rf ./dest .tsbuildinfo", + "formatting": "run -T prettier --check ./src && run -T eslint ./src", + "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", + "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" + }, + "engines": { + "node": ">=18" + }, + "files": [ + "dest", + "src", + "!*.test.*" + ], + "dependencies": { + "@aztec/archiver": "workspace:^", + "@aztec/bb-prover": "workspace:^", + "@aztec/circuit-types": "workspace:^", + "@aztec/circuits.js": "workspace:^", + "@aztec/ethereum": "workspace:^", + "@aztec/foundation": "workspace:^", + "@aztec/noir-protocol-circuits-types": "workspace:^", + "@aztec/telemetry-client": "workspace:^", + "viem": "^2.7.15" + }, + "devDependencies": { + "@jest/globals": "^29.5.0", + "@types/jest": "^29.5.0", + "@types/node": "^18.7.23", + "jest": "^29.5.0", + "ts-node": "^10.9.1", + "typescript": "^5.0.4" + }, + "jest": { + "extensionsToTreatAsEsm": [ + ".ts" + ], + "transform": { + "^.+\\.tsx?$": [ + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } + ] + }, + "moduleNameMapper": { + "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" + }, + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] + ], + "testRegex": "./src/.*\\.test\\.(js|mjs|ts)$", + "rootDir": "./src" + } +} diff --git a/yarn-project/proof-verifier/src/config.ts b/yarn-project/proof-verifier/src/config.ts new file mode 100644 index 00000000000..fcedde43e4c --- /dev/null +++ b/yarn-project/proof-verifier/src/config.ts @@ -0,0 +1,73 @@ +import { EthAddress } from '@aztec/circuits.js'; +import { + type ConfigMappingsType, + booleanConfigHelper, + getConfigFromMappings, + numberConfigHelper, +} from '@aztec/foundation/config'; +import { type TelemetryClientConfig, telemetryClientConfigMappings } from '@aztec/telemetry-client/start'; + +export type ProofVerifierConfig = { + /** The URL to an L1 node */ + l1Url: string; + /** The L1 chain ID */ + l1ChainId: number; + /** Start block number */ + l1StartBlock: number; + /** The address of the Rollup contract */ + rollupAddress: EthAddress; + /** How often to poll L1 for proof submission */ + pollIntervalMs: number; + /** The path to the bb binary */ + bbBinaryPath: string; + /** Where bb stores temporary files */ + bbWorkingDirectory: string; + /** Whether to skip cleanup of bb temporary files */ + bbSkipCleanup: boolean; +} & TelemetryClientConfig; + +export const proofVerifierConfigMappings: ConfigMappingsType = { + ...telemetryClientConfigMappings, + l1Url: { + env: 'ETHEREUM_HOST', + description: 'The URL to an L1 node', + }, + l1ChainId: { + env: 'L1_CHAIN_ID', + parseEnv: (val: string) => +val, + defaultValue: 31337, + description: 'The chain ID of the ethereum host.', + }, + l1StartBlock: { + env: 'PROOF_VERIFIER_L1_START_BLOCK', + description: 'Start block number', + ...numberConfigHelper(1), + }, + rollupAddress: { + env: 'ROLLUP_CONTRACT_ADDRESS', + description: 'The address of the Rollup contract', + parseEnv: EthAddress.fromString, + }, + pollIntervalMs: { + env: 'PROOF_VERIFIER_POLL_INTERVAL_MS', + description: 'How often to poll L1 for proof submission', + ...numberConfigHelper(60_000), + }, + bbBinaryPath: { + env: 'BB_BINARY_PATH', + description: 'The path to the bb binary', + }, + bbWorkingDirectory: { + env: 'BB_WORKING_DIRECTORY', + description: 'Where bb stores temporary files', + }, + bbSkipCleanup: { + env: 'BB_SKIP_CLEANUP', + description: 'Whether to skip cleanup of bb temporary files', + ...booleanConfigHelper(false), + }, +}; + +export function getProofVerifierConfigFromEnv(): ProofVerifierConfig { + return getConfigFromMappings(proofVerifierConfigMappings); +} diff --git a/yarn-project/proof-verifier/src/index.ts b/yarn-project/proof-verifier/src/index.ts new file mode 100644 index 00000000000..4b3ddaa6d8f --- /dev/null +++ b/yarn-project/proof-verifier/src/index.ts @@ -0,0 +1,2 @@ +export * from './config.js'; +export * from './proof_verifier.js'; diff --git a/yarn-project/proof-verifier/src/proof_verifier.ts b/yarn-project/proof-verifier/src/proof_verifier.ts new file mode 100644 index 00000000000..265a7e8aac5 --- /dev/null +++ b/yarn-project/proof-verifier/src/proof_verifier.ts @@ -0,0 +1,112 @@ +import { retrieveL2ProofsFromRollup } from '@aztec/archiver/data-retrieval'; +import { BBCircuitVerifier } from '@aztec/bb-prover'; +import { createEthereumChain } from '@aztec/ethereum'; +import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; +import { RunningPromise } from '@aztec/foundation/running-promise'; +import { Attributes, Metrics, type TelemetryClient, type UpDownCounter, ValueType } from '@aztec/telemetry-client'; + +import { type PublicClient, createPublicClient, http } from 'viem'; + +import { type ProofVerifierConfig } from './config.js'; + +const EXPECTED_PROOF_SIZE = 13988; + +export class ProofVerifier { + private runningPromise: RunningPromise; + private synchedToL1Block = 0n; + + private proofVerified: UpDownCounter; + + constructor( + private config: ProofVerifierConfig, + private client: PublicClient, + private verifier: BBCircuitVerifier, + telemetryClient: TelemetryClient, + private logger: DebugLogger, + ) { + this.runningPromise = new RunningPromise(this.work.bind(this), config.pollIntervalMs); + this.proofVerified = telemetryClient.getMeter('ProofVerifier').createUpDownCounter(Metrics.PROOF_VERIFIER_COUNT, { + valueType: ValueType.INT, + description: 'The number of proofs verified by the block verifier bot', + }); + this.synchedToL1Block = BigInt(config.l1StartBlock - 1); + } + + static async new(config: ProofVerifierConfig, telemetryClient: TelemetryClient): Promise { + const logger = createDebugLogger('aztec:block-verifier-bot'); + const verifier = await BBCircuitVerifier.new(config, [], logger); + const client = createPublicClient({ + chain: createEthereumChain(config.l1Url, config.l1ChainId).chainInfo, + transport: http(config.l1Url), + }); + + return new ProofVerifier(config, client, verifier, telemetryClient, logger); + } + + start() { + this.logger.info(`Starting proof verifier monitoring rollup=${this.config.rollupAddress}`); + this.runningPromise.start(); + } + + async stop() { + await this.runningPromise.stop(); + } + + private async work() { + const startBlock = this.synchedToL1Block + 1n; + this.logger.debug(`Fetching proofs from L1 block ${startBlock}`); + const { lastProcessedL1BlockNumber, retrievedData } = await retrieveL2ProofsFromRollup( + this.client, + this.config.rollupAddress, + startBlock, + ); + + if (retrievedData.length === 0) { + this.logger.debug(`No proofs found since L1 block ${startBlock}`); + return; + } else { + this.logger.debug(`Fetched ${retrievedData.length} proofs since L1 block ${startBlock}`); + } + + for (const { l2BlockNumber, txHash, proof, proverId } of retrievedData) { + this.logger.debug( + `Proof size ${proof.buffer.length} for L2 block proverId=${proverId} l2Block=${l2BlockNumber} l1Tx=${txHash}`, + ); + + const invalidProofFormat = proof.buffer.length < EXPECTED_PROOF_SIZE; + if (invalidProofFormat) { + this.logger.warn( + `Invalid proof format detected: proof length=${proof.buffer.length}bytes proverId=${proverId} l2Block=${l2BlockNumber} l1Tx=${txHash}`, + ); + } + + try { + await this.verifier.verifyProofForCircuit('RootRollupArtifact', proof); + this.logger.info(`Verified proof for L2 block proverId=${proverId} l2Block=${l2BlockNumber} l1Tx=${txHash}`); + + this.proofVerified.add(1, { + [Attributes.ROLLUP_PROVER_ID]: proverId.toString(), + [Attributes.STATUS]: 'valid', + }); + } catch (err) { + this.logger.warn( + `Failed to verify proof for L2 block proverId=${proverId} l2Block=${l2BlockNumber} l1Tx=${txHash}`, + ); + + if (invalidProofFormat) { + this.proofVerified.add(1, { + [Attributes.ROLLUP_PROVER_ID]: proverId.toString(), + [Attributes.STATUS]: 'invalid_proof_format', + }); + } else { + this.proofVerified.add(1, { + [Attributes.ROLLUP_PROVER_ID]: proverId.toString(), + [Attributes.STATUS]: 'invalid', + }); + } + } + } + + this.synchedToL1Block = lastProcessedL1BlockNumber; + } +} diff --git a/yarn-project/proof-verifier/tsconfig.json b/yarn-project/proof-verifier/tsconfig.json new file mode 100644 index 00000000000..283f554f570 --- /dev/null +++ b/yarn-project/proof-verifier/tsconfig.json @@ -0,0 +1,35 @@ +{ + "extends": "..", + "compilerOptions": { + "outDir": "dest", + "rootDir": "src", + "tsBuildInfoFile": ".tsbuildinfo" + }, + "references": [ + { + "path": "../archiver" + }, + { + "path": "../bb-prover" + }, + { + "path": "../circuit-types" + }, + { + "path": "../circuits.js" + }, + { + "path": "../ethereum" + }, + { + "path": "../foundation" + }, + { + "path": "../noir-protocol-circuits-types" + }, + { + "path": "../telemetry-client" + } + ], + "include": ["src"] +} diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 5c617ab3624..ad5e091ab34 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -332,7 +332,7 @@ export class L1Publisher { archive: archiveRoot.toBuffer(), proverId: proverId.toBuffer(), aggregationObject: serializeToBuffer(aggregationObject), - proof: proof.withoutPublicInputs(), + proof: proof.toBuffer(), }; // Process block diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index 48d8deda753..094044f416d 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -71,3 +71,5 @@ export const WORLD_STATE_FORK_DURATION = 'aztec.world_state.fork.duration'; export const WORLD_STATE_SYNC_DURATION = 'aztec.world_state.sync.duration'; export const WORLD_STATE_MERKLE_TREE_SIZE = 'aztec.world_state.merkle_tree_size'; export const WORLD_STATE_DB_SIZE = 'aztec.world_state.db_size'; + +export const PROOF_VERIFIER_COUNT = 'aztec.proof_verifier.count'; diff --git a/yarn-project/tsconfig.json b/yarn-project/tsconfig.json index 52f60af50fb..45e18ab8129 100644 --- a/yarn-project/tsconfig.json +++ b/yarn-project/tsconfig.json @@ -53,7 +53,8 @@ { "path": "scripts/tsconfig.json" }, { "path": "entrypoints/tsconfig.json" }, { "path": "cli/tsconfig.json" }, - { "path": "cli-wallet/tsconfig.json" } + { "path": "cli-wallet/tsconfig.json" }, + { "path": "proof-verifier/tsconfig.json" } ], "files": ["./@types/jest/index.d.ts"], "exclude": ["node_modules", "**/node_modules", "**/.*/"] diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 3c960d96f53..cc8bf108dd1 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -251,6 +251,7 @@ __metadata: "@aztec/noir-protocol-circuits-types": "workspace:^" "@aztec/p2p": "workspace:^" "@aztec/p2p-bootstrap": "workspace:^" + "@aztec/proof-verifier": "workspace:^" "@aztec/protocol-contracts": "workspace:^" "@aztec/prover-client": "workspace:^" "@aztec/prover-node": "workspace:^" @@ -877,6 +878,28 @@ __metadata: languageName: unknown linkType: soft +"@aztec/proof-verifier@workspace:^, @aztec/proof-verifier@workspace:proof-verifier": + version: 0.0.0-use.local + resolution: "@aztec/proof-verifier@workspace:proof-verifier" + dependencies: + "@aztec/archiver": "workspace:^" + "@aztec/bb-prover": "workspace:^" + "@aztec/circuit-types": "workspace:^" + "@aztec/circuits.js": "workspace:^" + "@aztec/ethereum": "workspace:^" + "@aztec/foundation": "workspace:^" + "@aztec/noir-protocol-circuits-types": "workspace:^" + "@aztec/telemetry-client": "workspace:^" + "@jest/globals": ^29.5.0 + "@types/jest": ^29.5.0 + "@types/node": ^18.7.23 + jest: ^29.5.0 + ts-node: ^10.9.1 + typescript: ^5.0.4 + viem: ^2.7.15 + languageName: unknown + linkType: soft + "@aztec/protocol-contracts@workspace:^, @aztec/protocol-contracts@workspace:protocol-contracts": version: 0.0.0-use.local resolution: "@aztec/protocol-contracts@workspace:protocol-contracts" From 138dc52a232f20248306aa9a99cf66f0ac7ec7eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Bene=C5=A1?= Date: Wed, 4 Sep 2024 22:53:52 +0200 Subject: [PATCH 11/18] feat: router contract (#8352) --- docs/docs/aztec/glossary/call_types.md | 22 ++- .../crowdfunding_contract.md | 30 +--- .../src/core/libraries/ConstantsGen.sol | 2 + .../aztec/src/note/note_getter/mod.nr | 28 ++-- .../aztec/src/note/note_getter/test.nr | 6 +- .../aztec/src/note/note_getter_options.nr | 19 +-- .../aztec/src/test/helpers/cheatcodes.nr | 4 +- .../aztec-nr/aztec/src/utils/collapse.nr | 84 ++++++++++ .../aztec-nr/aztec/src/utils/comparison.nr | 152 ++++++++++++++++++ noir-projects/aztec-nr/aztec/src/utils/mod.nr | 87 +--------- .../aztec-nr/aztec/src/utils/test.nr | 12 +- noir-projects/noir-contracts/Nargo.toml | 1 + .../app_subscription_contract/Nargo.toml | 1 + .../app_subscription_contract/src/main.nr | 43 ++--- .../contracts/counter_contract/src/main.nr | 2 +- .../crowdfunding_contract/Nargo.toml | 1 + .../crowdfunding_contract/src/main.nr | 30 ++-- .../contracts/lending_contract/src/main.nr | 7 +- .../contracts/router_contract/Nargo.toml | 8 + .../contracts/router_contract/src/main.nr | 43 +++++ .../contracts/router_contract/src/test.nr | 27 ++++ .../token_contract/src/test/access_control.nr | 1 - .../contracts/token_contract/src/test/burn.nr | 18 +-- .../src/test/transfer_private.nr | 2 +- .../token_contract/src/test/utils.nr | 3 +- .../crates/types/src/constants.nr | 1 + yarn-project/circuits.js/src/constants.gen.ts | 1 + .../circuits.js/src/contract/artifact_hash.ts | 1 + ...trained_function_broadcasted_event.test.ts | 2 +- .../src/fixtures/snapshot_manager.ts | 11 +- yarn-project/end-to-end/src/fixtures/utils.ts | 42 ++++- .../scripts/copy-contracts.sh | 1 + .../protocol-contracts/src/router/artifact.ts | 6 + .../src/router/index.test.ts | 17 ++ .../protocol-contracts/src/router/index.ts | 22 +++ .../pxe/src/pxe_service/create_pxe_service.ts | 2 + 36 files changed, 510 insertions(+), 229 deletions(-) create mode 100644 noir-projects/aztec-nr/aztec/src/utils/collapse.nr create mode 100644 noir-projects/aztec-nr/aztec/src/utils/comparison.nr create mode 100644 noir-projects/noir-contracts/contracts/router_contract/Nargo.toml create mode 100644 noir-projects/noir-contracts/contracts/router_contract/src/main.nr create mode 100644 noir-projects/noir-contracts/contracts/router_contract/src/test.nr create mode 100644 yarn-project/protocol-contracts/src/router/artifact.ts create mode 100644 yarn-project/protocol-contracts/src/router/index.test.ts create mode 100644 yarn-project/protocol-contracts/src/router/index.ts diff --git a/docs/docs/aztec/glossary/call_types.md b/docs/docs/aztec/glossary/call_types.md index 1f1b3cd8008..48303f44b81 100644 --- a/docs/docs/aztec/glossary/call_types.md +++ b/docs/docs/aztec/glossary/call_types.md @@ -108,21 +108,31 @@ Unlike the EVM however, private execution doesn't revert in the traditional way: Since public execution can only be performed by the sequencer, public functions cannot be executed in a private context. It is possible however to _enqueue_ a public function call during private execution, requesting the sequencer to run it during inclusion of the transaction. It will be [executed in public](#public-execution) normally, including the possibility to enqueue static public calls. -Since the public call is made asynchronously, any return values or side effects are not available during private execution. If the public function fails once executed, the entire transaction is reverted inncluding state changes caused by the private part, such as new notes or nullifiers. Note that this does result in gas being spent, like in the case of the EVM. +Since the public call is made asynchronously, any return values or side effects are not available during private execution. If the public function fails once executed, the entire transaction is reverted including state changes caused by the private part, such as new notes or nullifiers. Note that this does result in gas being spent, like in the case of the EVM. #include_code enqueue_public /noir-projects/noir-contracts/contracts/lending_contract/src/main.nr rust -It is also possible to create public functions that can _only_ be invoked by privately enqueing a call from the same contract, which can very useful to update public state after private exection (e.g. update a token's supply after privately minting). This is achieved by annotating functions with `#[aztec(internal)]`. +It is also possible to create public functions that can _only_ be invoked by privately enqueueing a call from the same contract, which can very useful to update public state after private execution (e.g. update a token's supply after privately minting). This is achieved by annotating functions with `#[aztec(internal)]`. A common pattern is to enqueue public calls to check some validity condition on public state, e.g. that a deadline has not expired or that some public value is set. +#include_code enqueueing /noir-projects/noir-contracts/contracts/router_contract/src/main.nr rust + +Note that this reveals what public function is being called on what contract. +For this reason we've created a canonical router contract which implements some of the checks commonly performed. +This conceals what contract performed the public call as the `context.msg_sender()` in the public function is the router itself (since the router's private function enqueued the public call). + +An example of how a deadline can be checked using the router contract follows: + #include_code call-check-deadline /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust -#include_code deadline /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust +This is what the implementation of the check timestamp functionality looks like: -:::warning -Calling public functions privately leaks some privacy! The caller of the function and all arguments will be revelead, so exercise care when mixing the private and public domains. To learn about alternative ways to access public state privately, look into [Shared State](../../reference/developer_references/smart_contract_reference/storage/shared_state.md). -::: +#include_code check_timestamp /noir-projects/noir-contracts/contracts/router_contract/src/main.nr rust + +Even with the router contract achieving good privacy is hard. +This is especially the case when the value being checked is unique and stored in the contract's public storage. +For this reason it is encouraged to try to avoid public function calls and instead privately read [Shared State](../../reference/developer_references/smart_contract_reference/storage/shared_state.md) when possible. ### Public Execution diff --git a/docs/docs/tutorials/codealong/contract_tutorials/crowdfunding_contract.md b/docs/docs/tutorials/codealong/contract_tutorials/crowdfunding_contract.md index 198c1a3dfc2..e198b68d416 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/crowdfunding_contract.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/crowdfunding_contract.md @@ -131,29 +131,13 @@ You can compile the code so far with `aztec-nargo compile`. To check that the donation occurs before the campaign deadline, we must access the public `timestamp`. It is one of several public global variables. -Declare an Aztec function that is public and internal +We read the deadline from public storage in private and use the router contract to assert that the current `timestamp` is before the deadline. -```rust -#include_code deadline-header /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr raw - //... -} -``` - -Read the deadline from storage and assert that the `timestamp` from this context is before the deadline - -#include_code deadline /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust - ---- - -Since donations are to be private, the donate function will have the user's private context which has these private global variables. So from the private context there is a little extra to call the (public internal) `_check_deadline` function. - -```rust -#include_code call-check-deadline /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr raw - //... -} -``` +#include_code call-check-deadline /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust -Namely calling `enqueue` and passing the (mutable) context. +We do the check via the router contract to conceal which contract is performing the check (This is achieved by calling a private function on the router contract which then enqueues a call to a public function on the router contract. This then results in the msg_sender in the public call being the router contract.) +Note that the privacy here is dependent upon what deadline value is chosen by the Crowdfunding contract deployer. +If it's unique to this contract, then we are leaking a privacy. Now conclude adding all dependencies to the `Crowdfunding` contract: @@ -193,7 +177,7 @@ Copy the last function into your Crowdfunding contract: #include_code operator-withdrawals /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust -You should be able to compile successfully with `aztec-nargo compile`. +You should be able to compile successfully with `aztec-nargo compile`. **Congratulations,** you have just built a multi-contract project on Aztec! @@ -206,7 +190,7 @@ See [claim_contract (GitHub link)](https://github.com/AztecProtocol/aztec-packag ## Next steps -### Build an accounts contract +### Build an accounts contract Follow the account contract tutorial on the [next page](./write_accounts_contract.md) and learn more about account abstraction. diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index d954c8b778f..769a77c970c 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -143,6 +143,8 @@ library Constants { 2631409926445785927331173506476539962589925110142857699603561302478860342858; uint256 internal constant FEE_JUICE_ADDRESS = 10248142274714515101077825679585135641434041564851038865006795089686437446849; + uint256 internal constant ROUTER_ADDRESS = + 8135649085127523915405560812661632604783066942985338123941332115593181690668; uint256 internal constant AZTEC_ADDRESS_LENGTH = 1; uint256 internal constant GAS_FEES_LENGTH = 2; uint256 internal constant GAS_LENGTH = 2; diff --git a/noir-projects/aztec-nr/aztec/src/note/note_getter/mod.nr b/noir-projects/aztec-nr/aztec/src/note/note_getter/mod.nr index 8170ddf8f61..7638072fac2 100644 --- a/noir-projects/aztec-nr/aztec/src/note/note_getter/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/note/note_getter/mod.nr @@ -2,11 +2,12 @@ use dep::protocol_types::{constants::{MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, GET_ use crate::context::PrivateContext; use crate::note::{ constants::{GET_NOTE_ORACLE_RETURN_LENGTH, MAX_NOTES_PER_PAGE, VIEW_NOTE_ORACLE_RETURN_LENGTH}, - note_getter_options::{NoteGetterOptions, Select, Sort, SortOrder, Comparator, NoteStatus, PropertySelector}, + note_getter_options::{NoteGetterOptions, Select, Sort, SortOrder, NoteStatus, PropertySelector}, note_interface::NoteInterface, note_viewer_options::NoteViewerOptions, utils::compute_note_hash_for_read_request }; use crate::oracle; +use crate::utils::comparison::assert_comparison; mod test; @@ -50,23 +51,12 @@ fn check_note_fields( let select = selects.get_unchecked(i).unwrap_unchecked(); let value_field = extract_property_value_from_selector(serialized_note, select.property_selector); - // Values are computed ahead of time because circuits evaluate all branches - let is_equal = value_field == select.value.to_field(); - let is_lt = value_field.lt(select.value.to_field()); - - if (select.comparator == Comparator.EQ) { - assert(is_equal, "Mismatch return note field."); - } else if (select.comparator == Comparator.NEQ) { - assert(!is_equal, "Mismatch return note field."); - } else if (select.comparator == Comparator.LT) { - assert(is_lt, "Mismatch return note field."); - } else if (select.comparator == Comparator.LTE) { - assert(is_lt | is_equal, "Mismatch return note field."); - } else if (select.comparator == Comparator.GT) { - assert(!is_lt & !is_equal, "Mismatch return note field."); - } else if (select.comparator == Comparator.GTE) { - assert(!is_lt, "Mismatch return note field."); - } + assert_comparison( + value_field, + select.comparator, + select.value.to_field(), + "Mismatch return note field." + ); } } @@ -135,7 +125,7 @@ fn constrain_get_notes_internal = BoundedVec::new(); // We have now collapsed the sparse array of Options into a BoundedVec. This is a more ergonomic type and also diff --git a/noir-projects/aztec-nr/aztec/src/note/note_getter/test.nr b/noir-projects/aztec-nr/aztec/src/note/note_getter/test.nr index 0f8c81cf66f..34fd32c8ba4 100644 --- a/noir-projects/aztec-nr/aztec/src/note/note_getter/test.nr +++ b/noir-projects/aztec-nr/aztec/src/note/note_getter/test.nr @@ -1,16 +1,14 @@ use dep::protocol_types::constants::MAX_NOTE_HASH_READ_REQUESTS_PER_CALL; use crate::{ - context::PrivateContext, note::{ - note_header::NoteHeader, - note_getter_options::{NoteGetterOptions, Sort, SortOrder, Comparator, PropertySelector}, + note_getter_options::{NoteGetterOptions, SortOrder, PropertySelector}, note_getter::constrain_get_notes_internal }, oracle::execution::get_contract_address }; -use dep::protocol_types::address::AztecAddress; use crate::test::{helpers::test_environment::TestEnvironment, mocks::mock_note::MockNote}; +use crate::utils::comparison::Comparator; global storage_slot: Field = 42; diff --git a/noir-projects/aztec-nr/aztec/src/note/note_getter_options.nr b/noir-projects/aztec-nr/aztec/src/note/note_getter_options.nr index 88263962e7e..d4317f9edf0 100644 --- a/noir-projects/aztec-nr/aztec/src/note/note_getter_options.nr +++ b/noir-projects/aztec-nr/aztec/src/note/note_getter_options.nr @@ -1,6 +1,7 @@ use std::option::Option; use dep::protocol_types::{constants::MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, traits::ToField}; use crate::note::note_interface::NoteInterface; +use crate::utils::comparison::Comparator; struct PropertySelector { index: u8, @@ -8,24 +9,6 @@ struct PropertySelector { length: u8, } -struct ComparatorEnum { - EQ: u8, - NEQ: u8, - LT: u8, - LTE: u8, - GT: u8, - GTE: u8, -} - -global Comparator = ComparatorEnum { - EQ: 1, - NEQ: 2, - LT: 3, - LTE: 4, - GT: 5, - GTE: 6, -}; - struct Select { property_selector: PropertySelector, value: Field, diff --git a/noir-projects/aztec-nr/aztec/src/test/helpers/cheatcodes.nr b/noir-projects/aztec-nr/aztec/src/test/helpers/cheatcodes.nr index 12ba6715de8..ea5bca963c4 100644 --- a/noir-projects/aztec-nr/aztec/src/test/helpers/cheatcodes.nr +++ b/noir-projects/aztec-nr/aztec/src/test/helpers/cheatcodes.nr @@ -1,9 +1,9 @@ use dep::protocol_types::{ - abis::function_selector::FunctionSelector, address::{AztecAddress, PartialAddress}, + abis::function_selector::FunctionSelector, address::AztecAddress, constants::CONTRACT_INSTANCE_LENGTH, contract_instance::ContractInstance }; use crate::context::inputs::{PublicContextInputs, PrivateContextInputs}; -use crate::test::helpers::utils::{Deployer, TestAccount}; +use crate::test::helpers::utils::TestAccount; use crate::keys::public_keys::PublicKeys; unconstrained pub fn reset() { diff --git a/noir-projects/aztec-nr/aztec/src/utils/collapse.nr b/noir-projects/aztec-nr/aztec/src/utils/collapse.nr new file mode 100644 index 00000000000..aaa531b945e --- /dev/null +++ b/noir-projects/aztec-nr/aztec/src/utils/collapse.nr @@ -0,0 +1,84 @@ +use dep::protocol_types::traits::Eq; + +// Collapses an array of Options with sparse Some values into a BoundedVec, essentially unwrapping the Options and +// removing the None values. For example, given: +// input: [some(3), none(), some(1)] +// this returns +// collapsed: [3, 1] +pub fn collapse_array(input: [Option; N]) -> BoundedVec where T: Eq { + let (collapsed, collapsed_to_input_index_mapping) = unsafe { + get_collapse_hints(input) + }; + verify_collapse_hints(input, collapsed, collapsed_to_input_index_mapping); + collapsed +} + +pub fn verify_collapse_hints( + input: [Option; N], + collapsed: BoundedVec, + collapsed_to_input_index_mapping: BoundedVec +) where T: Eq { + // collapsed should be a BoundedVec with all the non-none elements in input, in the same order. We need to lay down + // multiple constraints to guarantee this. + + // First we check that the number of elements is correct + let mut count = 0; + for i in 0..N { + if input[i].is_some() { + count += 1; + } + } + assert_eq(count, collapsed.len(), "Wrong collapsed vec length"); + + // Then we check that all elements exist in the original array, and are in the same order. To do this we use the + // auxiliary collapsed_to_input_index_mapping array, which at index n contains the index in the input array that + // corresponds to the collapsed entry at index n. + // Example: + // - input: [some(3), none(), some(1)] + // - collapsed: [3, 1] + // - collapsed_to_input_index_mapping: [0, 2] + // These two arrays should therefore have the same length. + assert_eq(collapsed.len(), collapsed_to_input_index_mapping.len(), "Collapse hint vec length mismatch"); + + // We now look at each collapsed entry and check that there is a valid equal entry in the input array. + let mut last_index = Option::none(); + for i in 0..N { + if i < collapsed.len() { + let input_index = collapsed_to_input_index_mapping.get_unchecked(i); + assert(input_index < N, "Out of bounds index hint"); + + assert_eq(collapsed.get_unchecked(i), input[input_index].unwrap(), "Wrong collapsed vec content"); + + // By requiring increasing input indices, we both guarantee that we're not looking at the same input + // element more than once, and that we're going over them in the original order. + if last_index.is_some() { + assert(input_index > last_index.unwrap_unchecked(), "Wrong collapsed vec order"); + } + last_index = Option::some(input_index); + } else { + // BoundedVec assumes that the unused parts of the storage are zeroed out (e.g. in the Eq impl), so we make + // sure that this property holds. + assert_eq(collapsed.get_unchecked(i), std::mem::zeroed(), "Dirty collapsed vec storage"); + } + } + // We now know that: + // - all values in the collapsed array exist in the input array + // - the order of the collapsed values is the same as in the input array + // - no input value is present more than once in the collapsed array + // - the number of elements in the collapsed array is the same as in the input array. + // Therefore, the collapsed array is correct. +} + +unconstrained fn get_collapse_hints(input: [Option; N]) -> (BoundedVec, BoundedVec) { + let mut collapsed: BoundedVec = BoundedVec::new(); + let mut collapsed_to_input_index_mapping: BoundedVec = BoundedVec::new(); + + for i in 0..N { + if input[i].is_some() { + collapsed.push(input[i].unwrap_unchecked()); + collapsed_to_input_index_mapping.push(i); + } + } + + (collapsed, collapsed_to_input_index_mapping) +} diff --git a/noir-projects/aztec-nr/aztec/src/utils/comparison.nr b/noir-projects/aztec-nr/aztec/src/utils/comparison.nr new file mode 100644 index 00000000000..e5a91883775 --- /dev/null +++ b/noir-projects/aztec-nr/aztec/src/utils/comparison.nr @@ -0,0 +1,152 @@ +struct ComparatorEnum { + EQ: u8, + NEQ: u8, + LT: u8, + LTE: u8, + GT: u8, + GTE: u8, +} + +global Comparator = ComparatorEnum { + EQ: 1, + NEQ: 2, + LT: 3, + LTE: 4, + GT: 5, + GTE: 6, +}; + +pub fn assert_comparison(lhs: Field, operation: u8, rhs: Field, error_msg: str) { + // Values are computed ahead of time because circuits evaluate all branches + let is_equal = lhs == rhs; + let is_lt = lhs.lt(rhs); + + if (operation == Comparator.EQ) { + assert(is_equal, error_msg); + } else if (operation == Comparator.NEQ) { + assert(!is_equal, error_msg); + } else if (operation == Comparator.LT) { + assert(is_lt, error_msg); + } else if (operation == Comparator.LTE) { + assert(is_lt | is_equal, error_msg); + } else if (operation == Comparator.GT) { + assert(!is_lt & !is_equal, error_msg); + } else if (operation == Comparator.GTE) { + assert(!is_lt, error_msg); + } +} + +mod test { + use super::assert_comparison; + use super::Comparator; + + #[test] + fn test_assert_comparison_happy_path() { + let lhs = 10; + let rhs = 10; + assert_comparison(lhs, Comparator.EQ, rhs, "Expected lhs to be equal to rhs"); + + let lhs = 10; + let rhs = 11; + assert_comparison(lhs, Comparator.NEQ, rhs, "Expected lhs to be not equal to rhs"); + + let lhs = 10; + let rhs = 11; + assert_comparison(lhs, Comparator.LT, rhs, "Expected lhs to be less than rhs"); + + let lhs = 10; + let rhs = 10; + assert_comparison( + lhs, + Comparator.LTE, + rhs, + "Expected lhs to be less than or equal to rhs" + ); + + let lhs = 11; + let rhs = 10; + assert_comparison(lhs, Comparator.GT, rhs, "Expected lhs to be greater than rhs"); + + let lhs = 10; + let rhs = 10; + assert_comparison( + lhs, + Comparator.GTE, + rhs, + "Expected lhs to be greater than or equal to rhs" + ); + + let lhs = 11; + let rhs = 10; + assert_comparison( + lhs, + Comparator.GTE, + rhs, + "Expected lhs to be greater than or equal to rhs" + ); + } + + #[test(should_fail_with="Expected lhs to be equal to rhs")] + fn test_assert_comparison_fail_eq() { + let lhs = 10; + let rhs = 11; + assert_comparison(lhs, Comparator.EQ, rhs, "Expected lhs to be equal to rhs"); + } + + #[test(should_fail_with="Expected lhs to be not equal to rhs")] + fn test_assert_comparison_fail_neq() { + let lhs = 10; + let rhs = 10; + assert_comparison(lhs, Comparator.NEQ, rhs, "Expected lhs to be not equal to rhs"); + } + + #[test(should_fail_with="Expected lhs to be less than rhs")] + fn test_assert_comparison_fail_lt() { + let lhs = 11; + let rhs = 10; + assert_comparison(lhs, Comparator.LT, rhs, "Expected lhs to be less than rhs"); + } + + #[test(should_fail_with="Expected lhs to be less than or equal to rhs")] + fn test_assert_comparison_fail_lte() { + let lhs = 11; + let rhs = 10; + assert_comparison( + lhs, + Comparator.LTE, + rhs, + "Expected lhs to be less than or equal to rhs" + ); + } + + #[test(should_fail_with="Expected lhs to be greater than rhs")] + fn test_assert_comparison_fail_gt() { + let lhs = 10; + let rhs = 10; + assert_comparison(lhs, Comparator.GT, rhs, "Expected lhs to be greater than rhs"); + } + + #[test(should_fail_with="Expected lhs to be greater than or equal to rhs")] + fn test_assert_comparison_fail_gte() { + let lhs = 10; + let rhs = 11; + assert_comparison( + lhs, + Comparator.GTE, + rhs, + "Expected lhs to be greater than or equal to rhs" + ); + } + + #[test(should_fail_with="Expected lhs to be greater than or equal to rhs")] + fn test_assert_comparison_fail_gte_2() { + let lhs = 10; + let rhs = 11; + assert_comparison( + lhs, + Comparator.GTE, + rhs, + "Expected lhs to be greater than or equal to rhs" + ); + } +} diff --git a/noir-projects/aztec-nr/aztec/src/utils/mod.nr b/noir-projects/aztec-nr/aztec/src/utils/mod.nr index 6d79c195f33..e670a6403bf 100644 --- a/noir-projects/aztec-nr/aztec/src/utils/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/utils/mod.nr @@ -1,85 +1,4 @@ -use dep::protocol_types::traits::Eq; - -mod test; +mod collapse; +mod comparison; mod point; - -// Collapses an array of Options with sparse Some values into a BoundedVec, essentially unwrapping the Options and -// removing the None values. For example, given: -// input: [some(3), none(), some(1)] -// this returns -// collapsed: [3, 1] -pub fn collapse(input: [Option; N]) -> BoundedVec where T: Eq { - let (collapsed, collapsed_to_input_index_mapping) = get_collapse_hints(input); - verify_collapse_hints(input, collapsed, collapsed_to_input_index_mapping); - collapsed -} - -fn verify_collapse_hints( - input: [Option; N], - collapsed: BoundedVec, - collapsed_to_input_index_mapping: BoundedVec -) where T: Eq { - // collapsed should be a BoundedVec with all the non-none elements in input, in the same order. We need to lay down - // multiple constraints to guarantee this. - - // First we check that the number of elements is correct - let mut count = 0; - for i in 0..N { - if input[i].is_some() { - count += 1; - } - } - assert_eq(count, collapsed.len(), "Wrong collapsed vec length"); - - // Then we check that all elements exist in the original array, and are in the same order. To do this we use the - // auxiliary collapsed_to_input_index_mapping array, which at index n contains the index in the input array that - // corresponds to the collapsed entry at index n. - // Example: - // - input: [some(3), none(), some(1)] - // - collapsed: [3, 1] - // - collapsed_to_input_index_mapping: [0, 2] - // These two arrays should therefore have the same length. - assert_eq(collapsed.len(), collapsed_to_input_index_mapping.len(), "Collapse hint vec length mismatch"); - - // We now look at each collapsed entry and check that there is a valid equal entry in the input array. - let mut last_index = Option::none(); - for i in 0..N { - if i < collapsed.len() { - let input_index = collapsed_to_input_index_mapping.get_unchecked(i); - assert(input_index < N, "Out of bounds index hint"); - - assert_eq(collapsed.get_unchecked(i), input[input_index].unwrap(), "Wrong collapsed vec content"); - - // By requiring increasing input indices, we both guarantee that we're not looking at the same input - // element more than once, and that we're going over them in the original order. - if last_index.is_some() { - assert(input_index > last_index.unwrap_unchecked(), "Wrong collapsed vec order"); - } - last_index = Option::some(input_index); - } else { - // BoundedVec assumes that the unused parts of the storage are zeroed out (e.g. in the Eq impl), so we make - // sure that this property holds. - assert_eq(collapsed.get_unchecked(i), std::mem::zeroed(), "Dirty collapsed vec storage"); - } - } - // We now know that: - // - all values in the collapsed array exist in the input array - // - the order of the collapsed values is the same as in the input array - // - no input value is present more than once in the collapsed array - // - the number of elements in the collapsed array is the same as in the input array. - // Therefore, the collapsed array is correct. -} - -unconstrained fn get_collapse_hints(input: [Option; N]) -> (BoundedVec, BoundedVec) { - let mut collapsed: BoundedVec = BoundedVec::new(); - let mut collapsed_to_input_index_mapping: BoundedVec = BoundedVec::new(); - - for i in 0..N { - if input[i].is_some() { - collapsed.push(input[i].unwrap_unchecked()); - collapsed_to_input_index_mapping.push(i); - } - } - - (collapsed, collapsed_to_input_index_mapping) -} +mod test; diff --git a/noir-projects/aztec-nr/aztec/src/utils/test.nr b/noir-projects/aztec-nr/aztec/src/utils/test.nr index 5ea5ad23624..85b827c792d 100644 --- a/noir-projects/aztec-nr/aztec/src/utils/test.nr +++ b/noir-projects/aztec-nr/aztec/src/utils/test.nr @@ -1,9 +1,9 @@ -use crate::utils::{collapse, verify_collapse_hints}; +use super::collapse::{collapse_array, verify_collapse_hints}; #[test] fn collapse_empty_array() { let original: [Option; 2] = [Option::none(), Option::none()]; - let collapsed = collapse(original); + let collapsed = collapse_array(original); assert_eq(collapsed.len(), 0); } @@ -11,7 +11,7 @@ fn collapse_empty_array() { #[test] fn collapse_non_sparse_array() { let original = [Option::some(7), Option::some(3), Option::none()]; - let collapsed = collapse(original); + let collapsed = collapse_array(original); assert_eq(collapsed.len(), 2); assert_eq(collapsed.get(0), 7); @@ -21,7 +21,7 @@ fn collapse_non_sparse_array() { #[test] fn collapse_sparse_array() { let original = [Option::some(7), Option::none(), Option::some(3)]; - let collapsed = collapse(original); + let collapsed = collapse_array(original); assert_eq(collapsed.len(), 2); assert_eq(collapsed.get(0), 7); @@ -31,7 +31,7 @@ fn collapse_sparse_array() { #[test] fn collapse_array_front_padding() { let original = [Option::none(), Option::none(), Option::some(7), Option::none(), Option::some(3)]; - let collapsed = collapse(original); + let collapsed = collapse_array(original); assert_eq(collapsed.len(), 2); assert_eq(collapsed.get(0), 7); @@ -41,7 +41,7 @@ fn collapse_array_front_padding() { #[test] fn collapse_array_back_padding() { let original = [Option::some(7), Option::none(), Option::some(3), Option::none(), Option::none()]; - let collapsed = collapse(original); + let collapsed = collapse_array(original); assert_eq(collapsed.len(), 2); assert_eq(collapsed.get(0), 7); diff --git a/noir-projects/noir-contracts/Nargo.toml b/noir-projects/noir-contracts/Nargo.toml index 1bb0fafff27..f3d2b637a44 100644 --- a/noir-projects/noir-contracts/Nargo.toml +++ b/noir-projects/noir-contracts/Nargo.toml @@ -32,6 +32,7 @@ members = [ "contracts/pending_note_hashes_contract", "contracts/price_feed_contract", "contracts/private_fpc_contract", + "contracts/router_contract", "contracts/schnorr_account_contract", "contracts/schnorr_hardcoded_account_contract", "contracts/schnorr_single_key_account_contract", diff --git a/noir-projects/noir-contracts/contracts/app_subscription_contract/Nargo.toml b/noir-projects/noir-contracts/contracts/app_subscription_contract/Nargo.toml index 956f7855db0..c75ef918f92 100644 --- a/noir-projects/noir-contracts/contracts/app_subscription_contract/Nargo.toml +++ b/noir-projects/noir-contracts/contracts/app_subscription_contract/Nargo.toml @@ -8,3 +8,4 @@ type = "contract" aztec = { path = "../../../aztec-nr/aztec" } authwit = { path = "../../../aztec-nr/authwit" } token = { path = "../token_contract" } +router = { path = "../router_contract" } diff --git a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr index c695fc0841f..73fd1f257bb 100644 --- a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr @@ -2,18 +2,17 @@ mod subscription_note; mod dapp_payload; contract AppSubscription { - use crate::{dapp_payload::DAppPayload, subscription_note::{SubscriptionNote, SUBSCRIPTION_NOTE_LEN}}; + use crate::{dapp_payload::DAppPayload, subscription_note::SubscriptionNote}; use aztec::{ - prelude::{ - AztecAddress, FunctionSelector, PrivateContext, NoteHeader, Map, PrivateMutable, PublicMutable, - SharedImmutable - }, + prelude::{AztecAddress, Map, PrivateMutable, SharedImmutable}, encrypted_logs::encrypted_note_emission::{encode_and_encrypt_note, encode_and_encrypt_note_with_keys}, - keys::getters::get_current_public_keys, protocol_types::constants::MAX_FIELD_VALUE + keys::getters::get_current_public_keys, + protocol_types::constants::{MAX_FIELD_VALUE, ROUTER_ADDRESS}, utils::comparison::Comparator }; - use authwit::{auth_witness::get_auth_witness, auth::assert_current_call_valid_authwit}; + use authwit::auth::assert_current_call_valid_authwit; use token::Token; + use router::Router; #[aztec(storage)] struct Storage { @@ -53,7 +52,9 @@ contract AppSubscription { context.end_setup(); - AppSubscription::at(context.this_address()).assert_not_expired(note.expiry_block_number).enqueue_view(&mut context); + // We check that the note is not expired. We do that via the router contract to conceal which contract + // is performing the check. + Router::at(ROUTER_ADDRESS).check_block_number(note.expiry_block_number, Comparator.GT).call(&mut context); payload.execute_calls(&mut context, storage.target_address.read_private()); } @@ -74,23 +75,6 @@ contract AppSubscription { storage.fee_juice_limit_per_tx.initialize(fee_juice_limit_per_tx); } - #[aztec(public)] - #[aztec(internal)] - #[aztec(view)] - fn assert_not_expired(expiry_block_number: Field) { - assert((context.block_number()) as u64 < expiry_block_number as u64); - } - - #[aztec(public)] - #[aztec(internal)] - #[aztec(view)] - fn assert_block_number(expiry_block_number: Field) { - assert( - (context.block_number() + SUBSCRIPTION_DURATION_IN_BLOCKS) as u64 - >= expiry_block_number as u64 - ); - } - #[aztec(private)] fn subscribe(subscriber: AztecAddress, nonce: Field, expiry_block_number: Field, tx_count: Field) { assert(tx_count as u64 <= SUBSCRIPTION_TXS as u64); @@ -102,8 +86,13 @@ contract AppSubscription { nonce ).call(&mut context); - // Assert that the given expiry_block_number < current_block_number + SUBSCRIPTION_DURATION_IN_BLOCKS. - AppSubscription::at(context.this_address()).assert_block_number(expiry_block_number).enqueue_view(&mut context); + // Assert that the `expiry_block_number - SUBSCRIPTION_DURATION_IN_BLOCKS < current_block_number`. + // --> We do that via the router contract to conceal which contract is performing the check. + Router::at(ROUTER_ADDRESS).check_block_number( + expiry_block_number - SUBSCRIPTION_DURATION_IN_BLOCKS, + Comparator.LT + ).call(&mut context); + let subscriber_keys = get_current_public_keys(&mut context, subscriber); let msg_sender_ovpk_m = get_current_public_keys(&mut context, context.msg_sender()).ovpk_m; diff --git a/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr b/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr index d5e554022cf..62a3afd057d 100644 --- a/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr @@ -1,7 +1,7 @@ contract Counter { // docs:start:imports use dep::aztec::prelude::{AztecAddress, Map}; - use dep::value_note::{balance_utils, value_note::{ValueNote, VALUE_NOTE_LEN}}; + use dep::value_note::{balance_utils, value_note::ValueNote}; use dep::easy_private_state::EasyPrivateUint; // docs:end:imports diff --git a/noir-projects/noir-contracts/contracts/crowdfunding_contract/Nargo.toml b/noir-projects/noir-contracts/contracts/crowdfunding_contract/Nargo.toml index aa72f2f65f4..69185126c38 100644 --- a/noir-projects/noir-contracts/contracts/crowdfunding_contract/Nargo.toml +++ b/noir-projects/noir-contracts/contracts/crowdfunding_contract/Nargo.toml @@ -8,3 +8,4 @@ type = "contract" aztec = { path = "../../../aztec-nr/aztec" } value_note = { path = "../../../aztec-nr/value-note" } token = { path = "../token_contract" } +router = { path = "../router_contract" } diff --git a/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr b/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr index 3c7f9b60389..2d5a06b9e8b 100644 --- a/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr @@ -4,16 +4,17 @@ contract Crowdfunding { // docs:start:all-deps use dep::aztec::{ - protocol_types::address::AztecAddress, + protocol_types::{address::AztecAddress, constants::ROUTER_ADDRESS}, encrypted_logs::encrypted_note_emission::encode_and_encrypt_note_with_keys, - keys::getters::get_current_public_keys, - state_vars::{PrivateSet, PublicImmutable, SharedImmutable} + keys::getters::get_current_public_keys, state_vars::{PrivateSet, SharedImmutable}, + note::note_getter_options::Comparator }; use dep::aztec::unencrypted_logs::unencrypted_event_emission::encode_event; // docs:start:import_valuenote use dep::value_note::value_note::ValueNote; // docs:end:import_valuenote use dep::token::Token; + use router::Router; // docs:end:all-deps #[aztec(event)] @@ -30,12 +31,13 @@ contract Crowdfunding { // Crowdfunding campaign operator operator: SharedImmutable, // End of the crowdfunding campaign after which no more donations are accepted - deadline: PublicImmutable, + deadline: SharedImmutable, // Notes emitted to donors when they donate (can be used as proof to obtain rewards, eg in Claim contracts) donation_receipts: PrivateSet, } // docs:end:storage + // TODO(#8367): Ensure deadline is quantized to improve privacy set. // docs:start:init // docs:start:init-header // docs:start:init-header-error @@ -51,24 +53,14 @@ contract Crowdfunding { } // docs:end:init - // docs:start:deadline - // docs:start:deadline-header - #[aztec(public)] - #[aztec(internal)] - #[aztec(view)] - fn _check_deadline() { - // docs:end:deadline-header - let deadline = storage.deadline.read(); - assert(context.timestamp() < deadline, "Deadline has passed"); - } - // docs:end:deadline - // docs:start:donate - // docs:start:call-check-deadline #[aztec(private)] fn donate(amount: u64) { - // 1) Check that the deadline has not passed - Crowdfunding::at(context.this_address())._check_deadline().enqueue_view(&mut context); + // 1) Check that the deadline has not passed --> we do that via the router contract to conceal which contract + // is performing the check. + // docs:start:call-check-deadline + let deadline = storage.deadline.read_private(); + Router::at(ROUTER_ADDRESS).check_timestamp(deadline, Comparator.GT).call(&mut context); // docs:end:call-check-deadline // docs:start:do-transfer diff --git a/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr b/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr index 2a7eac34721..93d7052e876 100644 --- a/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr @@ -11,13 +11,12 @@ mod helpers; // - A way to repay all debt at once // - Liquidations contract Lending { - use dep::aztec::prelude::{FunctionSelector, AztecAddress, PrivateContext, Map, PublicMutable}; - use dep::aztec::context::{PublicContext, gas::GasOpts}; + use dep::aztec::prelude::{AztecAddress, Map, PublicMutable}; use crate::asset::Asset; use crate::position::Position; use crate::interest_math::compute_multiplier; - use crate::helpers::{covered_by_collateral, DebtReturn, debt_updates, debt_value, compute_identifier}; + use crate::helpers::{covered_by_collateral, debt_updates, debt_value, compute_identifier}; use dep::token::Token; use dep::price_feed::PriceFeed; @@ -81,7 +80,7 @@ contract Lending { // accumulator *= multiplier, and multiplier >= 1 asset.interest_accumulator = (asset.interest_accumulator * multiplier) / precision; - asset.last_updated_ts = context.timestamp(); + asset.last_updated_ts = timestamp; asset_loc.write(asset); } diff --git a/noir-projects/noir-contracts/contracts/router_contract/Nargo.toml b/noir-projects/noir-contracts/contracts/router_contract/Nargo.toml new file mode 100644 index 00000000000..f1e292f4cc5 --- /dev/null +++ b/noir-projects/noir-contracts/contracts/router_contract/Nargo.toml @@ -0,0 +1,8 @@ +[package] +name = "router_contract" +authors = [""] +compiler_version = ">=0.25.0" +type = "contract" + +[dependencies] +aztec = { path = "../../../aztec-nr/aztec" } diff --git a/noir-projects/noir-contracts/contracts/router_contract/src/main.nr b/noir-projects/noir-contracts/contracts/router_contract/src/main.nr new file mode 100644 index 00000000000..484a701b7bb --- /dev/null +++ b/noir-projects/noir-contracts/contracts/router_contract/src/main.nr @@ -0,0 +1,43 @@ +mod test; + +/// The purpose of this contract is to perform a check in public without revealing what contract enqued the public +/// call. This is achieved by having a private function on this contract that enques the public call and hence +/// the `msg_sender` in the public call is the address of this contract. +contract Router { + use aztec::utils::comparison::assert_comparison; + + // docs:start:check_timestamp + /// Enqueues a public call that asserts that `lhs` (left side of the comparison) timestamp satisfies + /// the `operation` with respect to the current timestamp. + #[aztec(private)] + fn check_timestamp(lhs: u64, operation: u8) { + Router::at(context.this_address())._check_timestamp(lhs, operation).enqueue_view(&mut context); + } + + #[aztec(public)] + #[aztec(internal)] + #[aztec(view)] + fn _check_timestamp(lhs: u64, operation: u8) { + let lhs_field = lhs as Field; + let rhs = context.timestamp() as Field; + assert_comparison(lhs_field, operation, rhs, "Timestamp mismatch."); + } + // docs:end:check_timestamp + + /// Enqueues a public call that asserts that `lhs` (left side of the comparison) block number satisfies + /// the `operation` with respect to the current block number. + #[aztec(private)] + fn check_block_number(lhs: Field, operation: u8) { + // docs:start:enqueueing + Router::at(context.this_address())._check_block_number(lhs, operation).enqueue_view(&mut context); + // docs:end:enqueueing + } + + #[aztec(public)] + #[aztec(internal)] + #[aztec(view)] + fn _check_block_number(lhs: Field, operation: u8) { + let rhs = context.block_number(); + assert_comparison(lhs, operation, rhs, "Block number mismatch."); + } +} diff --git a/noir-projects/noir-contracts/contracts/router_contract/src/test.nr b/noir-projects/noir-contracts/contracts/router_contract/src/test.nr new file mode 100644 index 00000000000..992d719425e --- /dev/null +++ b/noir-projects/noir-contracts/contracts/router_contract/src/test.nr @@ -0,0 +1,27 @@ +use dep::aztec::test::helpers::test_environment::TestEnvironment; +use crate::Router; +use aztec::utils::comparison::Comparator; + +#[test] +unconstrained fn test_check_block_number() { + let mut env = TestEnvironment::new(); + + let router_contract = env.deploy_self("Router").without_initializer(); + let router_contract_address = router_contract.to_address(); + let router = Router::at(router_contract_address); + + env.advance_block_by(9); + + // First we sanity-check that current block number is as expected + let current_block_number = env.block_number(); + assert(current_block_number == 10, "Expected block number to be 10"); + + // We test just one success case and 1 failure case in this test as the rest is tested in the comparator unit tests + let call_1 = router.check_block_number(11, Comparator.GT); + env.call_private_void(call_1); + + let call_2 = router.check_block_number(5, Comparator.GT); + env.assert_private_call_fails(call_2); +} + +// TODO(#8372): Add test for check_timestamp --> setting timestamp currently not supported by TXE \ No newline at end of file diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/access_control.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/access_control.nr index 7e83b3c16fe..45a4ce6a295 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/access_control.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/access_control.nr @@ -1,5 +1,4 @@ use crate::test::utils; -use dep::aztec::test::helpers::cheatcodes; use crate::Token; #[test] diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/burn.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/burn.nr index a0a2c08a492..99d71bf0bd9 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/burn.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/burn.nr @@ -1,5 +1,5 @@ use crate::test::utils; -use dep::aztec::{test::helpers::cheatcodes, oracle::unsafe_rand::unsafe_rand}; +use dep::aztec::oracle::unsafe_rand::unsafe_rand; use dep::authwit::cheatcodes as authwit_cheatcodes; use crate::Token; @@ -8,7 +8,7 @@ unconstrained fn burn_public_success() { let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); let burn_amount = mint_amount / 10; - // Burn less than balance + // Burn less than balance let burn_call_interface = Token::at(token_contract_address).burn_public(owner, burn_amount, 0); env.call_public(burn_call_interface); utils::check_public_balance(token_contract_address, owner, mint_amount - burn_amount); @@ -33,7 +33,7 @@ unconstrained fn burn_public_on_behalf_of_other() { unconstrained fn burn_public_failure_more_than_balance() { let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); - // Burn more than balance + // Burn more than balance let burn_amount = mint_amount * 10; let burn_call_interface = Token::at(token_contract_address).burn_public(owner, burn_amount, 0); env.assert_public_call_fails(burn_call_interface); @@ -91,7 +91,7 @@ unconstrained fn burn_private_on_behalf_of_self() { let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); let burn_amount = mint_amount / 10; - // Burn less than balance + // Burn less than balance let burn_call_interface = Token::at(token_contract_address).burn(owner, burn_amount, 0); env.call_private_void(burn_call_interface); utils::check_private_balance(token_contract_address, owner, mint_amount - burn_amount); @@ -116,7 +116,7 @@ unconstrained fn burn_private_on_behalf_of_other() { unconstrained fn burn_private_failure_more_than_balance() { let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); - // Burn more than balance + // Burn more than balance let burn_amount = mint_amount * 10; let burn_call_interface = Token::at(token_contract_address).burn(owner, burn_amount, 0); env.call_private_void(burn_call_interface); @@ -127,7 +127,7 @@ unconstrained fn burn_private_failure_more_than_balance() { unconstrained fn burn_private_failure_on_behalf_of_self_non_zero_nonce() { let (env, token_contract_address, owner, _, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ false); - // Burn more than balance + // Burn more than balance let burn_amount = mint_amount / 10; let burn_call_interface = Token::at(token_contract_address).burn(owner, burn_amount, unsafe_rand()); env.call_private_void(burn_call_interface); @@ -138,7 +138,7 @@ unconstrained fn burn_private_failure_on_behalf_of_self_non_zero_nonce() { unconstrained fn burn_private_failure_on_behalf_of_other_more_than_balance() { let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); - // Burn more than balance + // Burn more than balance let burn_amount = mint_amount * 10; // Burn on behalf of other let burn_call_interface = Token::at(token_contract_address).burn(owner, burn_amount, unsafe_rand()); @@ -153,7 +153,7 @@ unconstrained fn burn_private_failure_on_behalf_of_other_more_than_balance() { unconstrained fn burn_private_failure_on_behalf_of_other_without_approval() { let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); - // Burn more than balance + // Burn more than balance let burn_amount = mint_amount / 10; // Burn on behalf of other let burn_call_interface = Token::at(token_contract_address).burn(owner, burn_amount, unsafe_rand()); @@ -167,7 +167,7 @@ unconstrained fn burn_private_failure_on_behalf_of_other_without_approval() { unconstrained fn burn_private_failure_on_behalf_of_other_wrong_designated_caller() { let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint(/* with_account_contracts */ true); - // Burn more than balance + // Burn more than balance let burn_amount = mint_amount / 10; // Burn on behalf of other let burn_call_interface = Token::at(token_contract_address).burn(owner, burn_amount, unsafe_rand()); diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_private.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_private.nr index 79ab994aeb5..0b4c9cb72d6 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_private.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_private.nr @@ -1,5 +1,5 @@ use crate::test::utils; -use dep::aztec::{test::helpers::cheatcodes, oracle::unsafe_rand::unsafe_rand, protocol_types::address::AztecAddress}; +use dep::aztec::test::helpers::cheatcodes; use dep::authwit::cheatcodes as authwit_cheatcodes; use crate::Token; diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr index abbac962303..fde110352eb 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr @@ -2,11 +2,10 @@ use dep::aztec::{ hash::compute_secret_hash, prelude::AztecAddress, test::helpers::{cheatcodes, test_environment::TestEnvironment}, protocol_types::storage::map::derive_storage_slot_in_map, - note::{note_getter::{MAX_NOTES_PER_PAGE, view_notes}, note_viewer_options::NoteViewerOptions}, oracle::{execution::{get_block_number, get_contract_address}, unsafe_rand::unsafe_rand, storage::storage_read} }; -use crate::{types::{token_note::TokenNote, transparent_note::TransparentNote}, Token}; +use crate::{types::transparent_note::TransparentNote, Token}; pub fn setup(with_account_contracts: bool) -> (&mut TestEnvironment, AztecAddress, AztecAddress, AztecAddress) { // Setup env, generate keys diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index 68f2ed5ea3e..154bc65ecb4 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -196,6 +196,7 @@ global CANONICAL_AUTH_REGISTRY_ADDRESS = AztecAddress::from_field(0x24877c50868f global DEPLOYER_CONTRACT_ADDRESS = AztecAddress::from_field(0x2ab1a2bd6d07d8d61ea56d85861446349e52c6b7c0612b702cb1e6db6ad0b089); global REGISTERER_CONTRACT_ADDRESS = AztecAddress::from_field(0x05d15342d76e46e5be07d3cda0d753158431cdc5e39d29ce4e8fe1f5c070564a); global FEE_JUICE_ADDRESS = AztecAddress::from_field(0x16a83e3395bc921a2441db55dce24f0e0932636901a2e676fa68b9b2b9a644c1); +global ROUTER_ADDRESS = AztecAddress::from_field(0x11fc9d3c438ea027f3d52cb7cf844fa4bb197520205c7366b8887a624f6a7b2c); // LENGTH OF STRUCTS SERIALIZED TO FIELDS global AZTEC_ADDRESS_LENGTH = 1; diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index 9a55c2fee31..10056cb822c 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -126,6 +126,7 @@ export const DEPLOYER_CONTRACT_ADDRESS = 193109947607833303683371634801986023939 export const REGISTERER_CONTRACT_ADDRESS = 2631409926445785927331173506476539962589925110142857699603561302478860342858n; export const FEE_JUICE_ADDRESS = 10248142274714515101077825679585135641434041564851038865006795089686437446849n; +export const ROUTER_ADDRESS = 8135649085127523915405560812661632604783066942985338123941332115593181690668n; export const AZTEC_ADDRESS_LENGTH = 1; export const GAS_FEES_LENGTH = 2; export const GAS_LENGTH = 2; diff --git a/yarn-project/circuits.js/src/contract/artifact_hash.ts b/yarn-project/circuits.js/src/contract/artifact_hash.ts index a5d6f7673af..05fb484ca13 100644 --- a/yarn-project/circuits.js/src/contract/artifact_hash.ts +++ b/yarn-project/circuits.js/src/contract/artifact_hash.ts @@ -72,6 +72,7 @@ export function computeArtifactMetadataHash(artifact: ContractArtifact) { 'FeeJuice', 'ContractInstanceDeployer', 'ContractClassRegisterer', + 'Router', ]; // This is a temporary workaround for the canonical contracts to have deterministic deployments. diff --git a/yarn-project/circuits.js/src/contract/events/unconstrained_function_broadcasted_event.test.ts b/yarn-project/circuits.js/src/contract/events/unconstrained_function_broadcasted_event.test.ts index d331a41b1cf..33cd679e0c5 100644 --- a/yarn-project/circuits.js/src/contract/events/unconstrained_function_broadcasted_event.test.ts +++ b/yarn-project/circuits.js/src/contract/events/unconstrained_function_broadcasted_event.test.ts @@ -18,7 +18,7 @@ describe('UnconstrainedFunctionBroadcastedEvent', () => { expect(event).toMatchSnapshot(); }); - it('filters out zero-elements at the end of the artifcat tree sibling path', () => { + it('filters out zero-elements at the end of the artifact tree sibling path', () => { const siblingPath: Tuple = [Fr.ZERO, new Fr(1), Fr.ZERO, new Fr(2), Fr.ZERO]; const event = new UnconstrainedFunctionBroadcastedEvent( Fr.random(), diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index dac9a054a04..f462b270171 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -38,7 +38,12 @@ import { MNEMONIC } from './fixtures.js'; import { getACVMConfig } from './get_acvm_config.js'; import { getBBConfig } from './get_bb_config.js'; import { setupL1Contracts } from './setup_l1_contracts.js'; -import { deployCanonicalAuthRegistry, deployCanonicalKeyRegistry, getPrivateKeyFromIndex } from './utils.js'; +import { + deployCanonicalAuthRegistry, + deployCanonicalKeyRegistry, + deployCanonicalRouter, + getPrivateKeyFromIndex, +} from './utils.js'; import { Watcher } from './watcher.js'; export type SubsystemsContext = { @@ -350,6 +355,10 @@ async function setupFromFresh( await deployCanonicalAuthRegistry( new SignerlessWallet(pxe, new DefaultMultiCallEntrypoint(aztecNodeConfig.l1ChainId, aztecNodeConfig.version)), ); + logger.verbose('Deploying router...'); + await deployCanonicalRouter( + new SignerlessWallet(pxe, new DefaultMultiCallEntrypoint(aztecNodeConfig.l1ChainId, aztecNodeConfig.version)), + ); if (statePath) { writeFileSync(`${statePath}/aztec_node_config.json`, JSON.stringify(aztecNodeConfig)); diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 17ca30c3300..52322c1d86c 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -35,6 +35,7 @@ import { type EthAddress, GasSettings, MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS, + ROUTER_ADDRESS, computeContractAddressFromInstance, getContractClassFromArtifact, } from '@aztec/circuits.js'; @@ -57,12 +58,13 @@ import { RollupAbi, RollupBytecode, } from '@aztec/l1-artifacts'; -import { AuthRegistryContract, KeyRegistryContract } from '@aztec/noir-contracts.js'; +import { AuthRegistryContract, KeyRegistryContract, RouterContract } from '@aztec/noir-contracts.js'; import { FeeJuiceContract } from '@aztec/noir-contracts.js/FeeJuice'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types'; import { getCanonicalAuthRegistry } from '@aztec/protocol-contracts/auth-registry'; import { FeeJuiceAddress, getCanonicalFeeJuice } from '@aztec/protocol-contracts/fee-juice'; import { getCanonicalKeyRegistry } from '@aztec/protocol-contracts/key-registry'; +import { getCanonicalRouter } from '@aztec/protocol-contracts/router'; import { PXEService, type PXEServiceConfig, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; import { type SequencerClient } from '@aztec/sequencer-client'; import { createAndStartTelemetryClient, getConfigEnvVars as getTelemetryConfig } from '@aztec/telemetry-client/start'; @@ -448,6 +450,11 @@ export async function setup( new SignerlessWallet(pxe, new DefaultMultiCallEntrypoint(config.l1ChainId, config.version)), ); } + + logger.verbose('Deploying router...'); + await deployCanonicalRouter( + new SignerlessWallet(pxe, new DefaultMultiCallEntrypoint(config.l1ChainId, config.version)), + ); } const watcher = new Watcher( @@ -789,6 +796,39 @@ export async function deployCanonicalAuthRegistry(deployer: Wallet) { await expect(deployer.getContractInstance(canonicalAuthRegistry.instance.address)).resolves.toBeDefined(); } +export async function deployCanonicalRouter(deployer: Wallet) { + const canonicalRouter = getCanonicalRouter(); + + // We check to see if there exists a contract at the Router address with the same contract class id as we expect. This means that + // the router has already been deployed to the correct address. + if ( + (await deployer.getContractInstance(canonicalRouter.address))?.contractClassId.equals( + canonicalRouter.contractClass.id, + ) && + (await deployer.isContractClassPubliclyRegistered(canonicalRouter.contractClass.id)) + ) { + return; + } + + const router = await RouterContract.deploy(deployer) + .send({ contractAddressSalt: canonicalRouter.instance.salt, universalDeploy: true }) + .deployed(); + + if ( + !router.address.equals(canonicalRouter.address) || + !router.address.equals(AztecAddress.fromBigInt(ROUTER_ADDRESS)) + ) { + throw new Error( + `Deployed Router address ${router.address} does not match expected address ${canonicalRouter.address}, or they both do not equal ROUTER_ADDRESS`, + ); + } + + expect(computeContractAddressFromInstance(router.instance)).toEqual(router.address); + expect(getContractClassFromArtifact(router.artifact).id).toEqual(router.instance.contractClassId); + await expect(deployer.isContractClassPubliclyRegistered(canonicalRouter.contractClass.id)).resolves.toBe(true); + await expect(deployer.getContractInstance(canonicalRouter.instance.address)).resolves.toBeDefined(); +} + export async function waitForProvenChain(node: AztecNode, targetBlock?: number, timeoutSec = 60, intervalSec = 1) { targetBlock ??= await node.getBlockNumber(); diff --git a/yarn-project/protocol-contracts/scripts/copy-contracts.sh b/yarn-project/protocol-contracts/scripts/copy-contracts.sh index d9f4a60a25c..3646ec63131 100755 --- a/yarn-project/protocol-contracts/scripts/copy-contracts.sh +++ b/yarn-project/protocol-contracts/scripts/copy-contracts.sh @@ -9,6 +9,7 @@ contracts=( key_registry_contract-KeyRegistry auth_registry_contract-AuthRegistry multi_call_entrypoint_contract-MultiCallEntrypoint + router_contract-Router ) diff --git a/yarn-project/protocol-contracts/src/router/artifact.ts b/yarn-project/protocol-contracts/src/router/artifact.ts new file mode 100644 index 00000000000..ca6a86b860e --- /dev/null +++ b/yarn-project/protocol-contracts/src/router/artifact.ts @@ -0,0 +1,6 @@ +import { loadContractArtifact } from '@aztec/types/abi'; +import { type NoirCompiledContract } from '@aztec/types/noir'; + +import RouterJson from '../../artifacts/Router.json' assert { type: 'json' }; + +export const RouterArtifact = loadContractArtifact(RouterJson as NoirCompiledContract); diff --git a/yarn-project/protocol-contracts/src/router/index.test.ts b/yarn-project/protocol-contracts/src/router/index.test.ts new file mode 100644 index 00000000000..2591ede3f73 --- /dev/null +++ b/yarn-project/protocol-contracts/src/router/index.test.ts @@ -0,0 +1,17 @@ +import { + AztecAddress, + ROUTER_ADDRESS, + computeContractAddressFromInstance, + getContractClassFromArtifact, +} from '@aztec/circuits.js'; + +import { getCanonicalRouter } from './index.js'; + +describe('Router', () => { + it('returns canonical protocol contract', () => { + const contract = getCanonicalRouter(); + expect(computeContractAddressFromInstance(contract.instance)).toEqual(contract.address); + expect(getContractClassFromArtifact(contract.artifact).id).toEqual(contract.contractClass.id); + expect(contract.address).toEqual(AztecAddress.fromBigInt(ROUTER_ADDRESS)); + }); +}); diff --git a/yarn-project/protocol-contracts/src/router/index.ts b/yarn-project/protocol-contracts/src/router/index.ts new file mode 100644 index 00000000000..46b94433ba9 --- /dev/null +++ b/yarn-project/protocol-contracts/src/router/index.ts @@ -0,0 +1,22 @@ +import { AztecAddress, ROUTER_ADDRESS } from '@aztec/circuits.js'; + +import { type ProtocolContract, getCanonicalProtocolContract } from '../protocol_contract.js'; +import { RouterArtifact } from './artifact.js'; + +/** Returns the canonical deployment of the router. */ +export function getCanonicalRouter(): ProtocolContract { + const contract = getCanonicalProtocolContract(RouterArtifact, 1); + + if (!contract.address.equals(RouterAddress)) { + throw new Error( + `Incorrect address for router (got ${contract.address.toString()} but expected ${RouterAddress.toString()}). Check ROUTER_ADDRESS is set to the correct value in the constants files and run the protocol-contracts package tests.`, + ); + } + return contract; +} + +export function getCanonicalRouterAddress(): AztecAddress { + return getCanonicalRouter().address; +} + +export const RouterAddress = AztecAddress.fromBigInt(ROUTER_ADDRESS); diff --git a/yarn-project/pxe/src/pxe_service/create_pxe_service.ts b/yarn-project/pxe/src/pxe_service/create_pxe_service.ts index e50e14f5339..fd4b1d3a34d 100644 --- a/yarn-project/pxe/src/pxe_service/create_pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/create_pxe_service.ts @@ -10,6 +10,7 @@ import { getCanonicalFeeJuice } from '@aztec/protocol-contracts/fee-juice'; import { getCanonicalInstanceDeployer } from '@aztec/protocol-contracts/instance-deployer'; import { getCanonicalKeyRegistry } from '@aztec/protocol-contracts/key-registry'; import { getCanonicalMultiCallEntrypointContract } from '@aztec/protocol-contracts/multi-call-entrypoint'; +import { getCanonicalRouter } from '@aztec/protocol-contracts/router'; import { type PXEServiceConfig } from '../config/index.js'; import { KVPxeDatabase } from '../database/kv_pxe_database.js'; @@ -52,6 +53,7 @@ export async function createPXEService( getCanonicalFeeJuice(), getCanonicalKeyRegistry(), getCanonicalAuthRegistry(), + getCanonicalRouter(), ]) { await server.registerContract(contract); } From f1746999ea12cc8117efd5a0c3b2ec5d80196343 Mon Sep 17 00:00:00 2001 From: ludamad Date: Wed, 4 Sep 2024 19:52:03 -0400 Subject: [PATCH 12/18] chore(bb): use std::span for srs (#8371) A bit of safety - will help me catch a bug in polynomial memory PR Fix a breakage in tests due to bad global grumpkin CRS assumptions --- .../commitment_schemes/commitment_key.hpp | 7 ++- .../commitment_schemes/ipa/ipa.hpp | 21 ++++--- .../commitment_schemes/ipa/ipa.test.cpp | 4 +- .../commitment_schemes/verification_key.hpp | 9 +-- .../barretenberg/ecc/curves/bn254/bn254.hpp | 1 + .../ecc/curves/grumpkin/grumpkin.hpp | 1 + .../scalar_multiplication.cpp | 61 ++++++++++--------- .../scalar_multiplication.hpp | 10 ++- .../src/barretenberg/eccvm/eccvm_flavor.hpp | 1 + .../plonk/proof_system/verifier/verifier.cpp | 6 +- .../plonk/work_queue/work_queue.cpp | 2 +- .../polynomials/legacy_polynomials.bench.cpp | 20 +++--- .../srs/factories/crs_factory.hpp | 4 +- .../srs/factories/file_crs_factory.cpp | 19 +++--- .../srs/factories/file_crs_factory.hpp | 7 ++- .../srs/factories/mem_crs_factory.test.cpp | 12 ++-- .../factories/mem_grumpkin_crs_factory.cpp | 5 +- .../srs/factories/mem_prover_crs.hpp | 5 +- .../srs/scalar_multiplication.test.cpp | 34 +++++++---- .../verifier_commitment_key.hpp | 3 +- .../verifier_commitment_key.test.cpp | 2 +- 21 files changed, 129 insertions(+), 105 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/commitment_key.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/commitment_key.hpp index 746ebbe3786..0759472bf83 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/commitment_key.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/commitment_key.hpp @@ -92,7 +92,7 @@ template class CommitmentKey { ASSERT(false); } return scalar_multiplication::pippenger_unsafe_optimized_for_non_dyadic_polys( - polynomial, { srs->get_monomial_points(), srs->get_monomial_size() }, pippenger_runtime_state); + polynomial, srs->get_monomial_points(), pippenger_runtime_state); }; /** @@ -113,7 +113,7 @@ template class CommitmentKey { // Extract the precomputed point table (contains raw SRS points at even indices and the corresponding // endomorphism point (\beta*x, -y) at odd indices). - G1* point_table = srs->get_monomial_points(); + std::span point_table = srs->get_monomial_points(); // Define structures needed to multithread the extraction of non-zero inputs const size_t num_threads = degree >= get_num_cpus_pow2() ? get_num_cpus_pow2() : 1; @@ -133,6 +133,7 @@ template class CommitmentKey { if (!scalar.is_zero()) { thread_scalars[thread_idx].emplace_back(scalar); // Save both the raw srs point and the precomputed endomorphism point from the point table + ASSERT(idx * 2 + 1 < point_table.size()); const G1& point = point_table[idx * 2]; const G1& endo_point = point_table[idx * 2 + 1]; thread_points[thread_idx].emplace_back(point); @@ -158,7 +159,7 @@ template class CommitmentKey { } // Call the version of pippenger which assumes all points are distinct - return scalar_multiplication::pippenger_unsafe(scalars, points.data(), pippenger_runtime_state); + return scalar_multiplication::pippenger_unsafe(scalars, points, pippenger_runtime_state); } }; diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.hpp index fa0ed6e5f96..45b1bf209ab 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.hpp @@ -4,6 +4,7 @@ #include "barretenberg/common/assert.hpp" #include "barretenberg/common/container.hpp" #include "barretenberg/common/thread.hpp" +#include "barretenberg/common/throw_or_abort.hpp" #include "barretenberg/ecc/scalar_multiplication/scalar_multiplication.hpp" #include "barretenberg/transcript/transcript.hpp" #include @@ -162,9 +163,13 @@ template class IPA { // Step 4. // Set initial vector a to the polynomial monomial coefficients and load vector G auto a_vec = polynomial; - auto* srs_elements = ck->srs->get_monomial_points(); + std::span srs_elements = ck->srs->get_monomial_points(); std::vector G_vec_local(poly_length); + if (poly_length * 2 > srs_elements.size()) { + throw_or_abort("potential bug: Not enough SRS points for IPA!"); + } + // The SRS stored in the commitment key is the result after applying the pippenger point table so the // values at odd indices contain the point {srs[i-1].x * beta, srs[i-1].y}, where beta is the endomorphism // G_vec_local should use only the original SRS thus we extract only the even indices. @@ -215,13 +220,13 @@ template class IPA { // Step 6.a (using letters, because doxygen automaticall converts the sublist counters to letters :( ) // L_i = < a_vec_lo, G_vec_hi > + inner_prod_L * aux_generator L_i = bb::scalar_multiplication::pippenger_without_endomorphism_basis_points( - {&a_vec[0], /*size*/ round_size}, &G_vec_local[round_size], ck->pippenger_runtime_state); + {&a_vec[0], /*size*/ round_size}, {&G_vec_local[round_size], /*size*/ round_size}, ck->pippenger_runtime_state); L_i += aux_generator * inner_prod_L; // Step 6.b // R_i = < a_vec_hi, G_vec_lo > + inner_prod_R * aux_generator R_i = bb::scalar_multiplication::pippenger_without_endomorphism_basis_points( - {&a_vec[round_size], /*size*/ round_size}, &G_vec_local[0], ck->pippenger_runtime_state); + {&a_vec[round_size], /*size*/ round_size}, {&G_vec_local[0], /*size*/ round_size}, ck->pippenger_runtime_state); R_i += aux_generator * inner_prod_R; // Step 6.c @@ -345,7 +350,7 @@ template class IPA { // Step 5. // Compute C₀ = C' + ∑_{j ∈ [k]} u_j^{-1}L_j + ∑_{j ∈ [k]} u_jR_j GroupElement LR_sums = bb::scalar_multiplication::pippenger_without_endomorphism_basis_points( - {&msm_scalars[0], /*size*/ pippenger_size}, &msm_elements[0], vk->pippenger_runtime_state); + {&msm_scalars[0], /*size*/ pippenger_size}, {&msm_elements[0], /*size*/ pippenger_size}, vk->pippenger_runtime_state); GroupElement C_zero = C_prime + LR_sums; // Step 6. @@ -377,8 +382,10 @@ template class IPA { } }, thread_heuristics::FF_MULTIPLICATION_COST * log_poly_degree); - auto* srs_elements = vk->get_monomial_points(); - + std::span srs_elements = vk->get_monomial_points(); + if (poly_length * 2 > srs_elements.size()) { + throw_or_abort("potential bug: Not enough SRS points for IPA!"); + } // Copy the G_vector to local memory. std::vector G_vec_local(poly_length); @@ -394,7 +401,7 @@ template class IPA { // Step 8. // Compute G₀ Commitment G_zero = bb::scalar_multiplication::pippenger_without_endomorphism_basis_points( - {&s_vec[0], /*size*/ poly_length}, &G_vec_local[0], vk->pippenger_runtime_state); + {&s_vec[0], /*size*/ poly_length}, {&G_vec_local[0], /*size*/ poly_length}, vk->pippenger_runtime_state); // Step 9. // Receive a₀ from the prover diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.test.cpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.test.cpp index d05e7abbc29..e03803a0132 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.test.cpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.test.cpp @@ -37,7 +37,7 @@ TEST_F(IPATest, CommitOnManyZeroCoeffPolyWorks) } p[3] = Fr::one(); GroupElement commitment = this->commit(p); - auto* srs_elements = this->ck()->srs->get_monomial_points(); + auto srs_elements = this->ck()->srs->get_monomial_points(); GroupElement expected = srs_elements[0] * p[0]; // The SRS stored in the commitment key is the result after applying the pippenger point table so the // values at odd indices contain the point {srs[i-1].x * beta, srs[i-1].y}, where beta is the endomorphism @@ -196,7 +196,7 @@ TEST_F(IPATest, Commit) constexpr size_t n = 128; auto poly = this->random_polynomial(n); GroupElement commitment = this->commit(poly); - auto* srs_elements = this->ck()->srs->get_monomial_points(); + auto srs_elements = this->ck()->srs->get_monomial_points(); GroupElement expected = srs_elements[0] * poly[0]; // The SRS stored in the commitment key is the result after applying the pippenger point table so the // values at odd indices contain the point {srs[i-1].x * beta, srs[i-1].y}, where beta is the endomorphism diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/verification_key.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/verification_key.hpp index 953e66b7534..c5ed65abc86 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/verification_key.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/verification_key.hpp @@ -33,11 +33,7 @@ template <> class VerifierCommitmentKey { using GroupElement = typename Curve::Element; using Commitment = typename Curve::AffineElement; - VerifierCommitmentKey() - { - srs::init_crs_factory("../srs_db/ignition"); - srs = srs::get_crs_factory()->get_verifier_crs(); - }; + VerifierCommitmentKey() { srs = srs::get_crs_factory()->get_verifier_crs(); }; Commitment get_g1_identity() { return srs->get_g1_identity(); } @@ -88,13 +84,12 @@ template <> class VerifierCommitmentKey { VerifierCommitmentKey(size_t num_points) : pippenger_runtime_state(num_points) { - srs::init_grumpkin_crs_factory("../srs_db/grumpkin"); srs = srs::get_crs_factory()->get_verifier_crs(num_points); } Commitment get_g1_identity() { return srs->get_g1_identity(); } - Commitment* get_monomial_points() { return srs->get_monomial_points(); } + std::span get_monomial_points() { return srs->get_monomial_points(); } bb::scalar_multiplication::pippenger_runtime_state pippenger_runtime_state; diff --git a/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/bn254.hpp b/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/bn254.hpp index 37d42124c7e..8b2308d53b7 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/bn254.hpp +++ b/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/bn254.hpp @@ -18,6 +18,7 @@ class BN254 { using G2BaseField = typename bb::fq2; using TargetField = bb::fq12; + static constexpr const char* name = "BN254"; // TODO(#673): This flag is temporary. It is needed in the verifier classes (GeminiVerifier, etc.) while these // classes are instantiated with "native" curve types. Eventually, the verifier classes will be instantiated only // with stdlib types, and "native" verification will be acheived via a simulated builder. diff --git a/barretenberg/cpp/src/barretenberg/ecc/curves/grumpkin/grumpkin.hpp b/barretenberg/cpp/src/barretenberg/ecc/curves/grumpkin/grumpkin.hpp index fc81216686a..cbba4dc346c 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/curves/grumpkin/grumpkin.hpp +++ b/barretenberg/cpp/src/barretenberg/ecc/curves/grumpkin/grumpkin.hpp @@ -49,6 +49,7 @@ class Grumpkin { using Element = typename Group::element; using AffineElement = typename Group::affine_element; + static constexpr const char* name = "Grumpkin"; // TODO(#673): This flag is temporary. It is needed in the verifier classes (GeminiVerifier, etc.) while these // classes are instantiated with "native" curve types. Eventually, the verifier classes will be instantiated only // with stdlib types, and "native" verification will be acheived via a simulated builder. diff --git a/barretenberg/cpp/src/barretenberg/ecc/scalar_multiplication/scalar_multiplication.cpp b/barretenberg/cpp/src/barretenberg/ecc/scalar_multiplication/scalar_multiplication.cpp index 3bd52afd563..2b6e6625630 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/scalar_multiplication/scalar_multiplication.cpp +++ b/barretenberg/cpp/src/barretenberg/ecc/scalar_multiplication/scalar_multiplication.cpp @@ -758,7 +758,7 @@ uint32_t construct_addition_chains(affine_product_runtime_state& state, b template typename Curve::Element evaluate_pippenger_rounds(pippenger_runtime_state& state, - const typename Curve::AffineElement* points, + std::span points, const size_t num_points, bool handle_edge_cases) { @@ -798,7 +798,7 @@ typename Curve::Element evaluate_pippenger_rounds(pippenger_runtime_state affine_product_runtime_state product_state = state.get_affine_product_runtime_state(num_threads, j); product_state.num_points = static_cast(num_round_points_per_thread + leftovers); - product_state.points = points; + product_state.points = points.data(); product_state.point_schedule = thread_point_schedule; product_state.num_buckets = static_cast(num_thread_buckets); AffineElement* output_buckets = reduce_buckets(product_state, true, handle_edge_cases); @@ -874,7 +874,7 @@ typename Curve::Element evaluate_pippenger_rounds(pippenger_runtime_state } template -typename Curve::Element pippenger_internal(typename Curve::AffineElement* points, +typename Curve::Element pippenger_internal(std::span points, std::span scalars, const size_t num_initial_points, pippenger_runtime_state& state, @@ -890,7 +890,7 @@ typename Curve::Element pippenger_internal(typename Curve::AffineElement* points template typename Curve::Element pippenger(std::span scalars, - typename Curve::AffineElement* points, + std::span points, pippenger_runtime_state& state, bool handle_edge_cases) { @@ -928,10 +928,9 @@ typename Curve::Element pippenger(std::span s Element result = pippenger_internal(points, scalars, num_slice_points, state, handle_edge_cases); if (num_slice_points != num_initial_points) { - return result + pippenger(scalars.subspan(num_slice_points), - points + static_cast(num_slice_points * 2), - state, - handle_edge_cases); + return result + + pippenger( + scalars.subspan(num_slice_points), points.subspan(num_slice_points * 2), state, handle_edge_cases); } return result; } @@ -944,7 +943,7 @@ Pippenger above can behavely poorly with numbers with many bits set.*/ template typename Curve::Element pippenger_unsafe_optimized_for_non_dyadic_polys( std::span scalars, - std::span points, + std::span points, pippenger_runtime_state& state) { BB_OP_COUNT_TIME(); @@ -953,12 +952,12 @@ typename Curve::Element pippenger_unsafe_optimized_for_non_dyadic_polys( const size_t threshold = get_num_cpus_pow2() * 8; // Delegate edge-cases to normal pippenger_unsafe(). if (scalars.size() <= threshold) { - return pippenger_unsafe(scalars, &points[0], state); + return pippenger_unsafe(scalars, points, state); } // We need a padding of scalars. ASSERT(numeric::round_up_power_2(scalars.size()) <= points.size()); // We do not optimize for the small case at all. - return pippenger_internal(&points[0], scalars, numeric::round_up_power_2(scalars.size()), state, false); + return pippenger_internal(points, scalars, numeric::round_up_power_2(scalars.size()), state, false); } /** @@ -978,7 +977,7 @@ typename Curve::Element pippenger_unsafe_optimized_for_non_dyadic_polys( **/ template typename Curve::Element pippenger_unsafe(std::span scalars, - typename Curve::AffineElement* points, + std::span points, pippenger_runtime_state& state) { return pippenger(scalars, points, state, false); @@ -987,12 +986,13 @@ typename Curve::Element pippenger_unsafe(std::span typename Curve::Element pippenger_without_endomorphism_basis_points( std::span scalars, - typename Curve::AffineElement* points, + std::span points, pippenger_runtime_state& state) { std::vector G_mod(scalars.size() * 2); - bb::scalar_multiplication::generate_pippenger_point_table(points, &G_mod[0], scalars.size()); - return pippenger(scalars, &G_mod[0], state, false); + ASSERT(scalars.size() <= points.size()); + bb::scalar_multiplication::generate_pippenger_point_table(points.data(), &G_mod[0], scalars.size()); + return pippenger(scalars, G_mod, state, false); } // Explicit instantiation @@ -1015,38 +1015,39 @@ template void add_affine_points_with_edge_cases(curve::BN254::Affi template void evaluate_addition_chains(affine_product_runtime_state& state, const size_t max_bucket_bits, bool handle_edge_cases); -template curve::BN254::Element pippenger_internal(curve::BN254::AffineElement* points, +template curve::BN254::Element pippenger_internal(std::span points, std::span scalars, const size_t num_initial_points, pippenger_runtime_state& state, bool handle_edge_cases); -template curve::BN254::Element evaluate_pippenger_rounds(pippenger_runtime_state& state, - const curve::BN254::AffineElement* points, - const size_t num_points, - bool handle_edge_cases = false); +template curve::BN254::Element evaluate_pippenger_rounds( + pippenger_runtime_state& state, + std::span points, + const size_t num_points, + bool handle_edge_cases = false); template curve::BN254::AffineElement* reduce_buckets(affine_product_runtime_state& state, bool first_round = true, bool handle_edge_cases = false); template curve::BN254::Element pippenger(std::span scalars, - curve::BN254::AffineElement* points, + std::span points, pippenger_runtime_state& state, bool handle_edge_cases = true); template curve::BN254::Element pippenger_unsafe(std::span scalars, - curve::BN254::AffineElement* points, + std::span points, pippenger_runtime_state& state); template curve::BN254::Element pippenger_unsafe_optimized_for_non_dyadic_polys( std::span scalars, - std::span points, + std::span points, pippenger_runtime_state& state); template curve::BN254::Element pippenger_without_endomorphism_basis_points( std::span scalars, - curve::BN254::AffineElement* points, + std::span points, pippenger_runtime_state& state); // Grumpkin @@ -1069,7 +1070,7 @@ template void evaluate_addition_chains(affine_product_runtime_s const size_t max_bucket_bits, bool handle_edge_cases); template curve::Grumpkin::Element pippenger_internal( - curve::Grumpkin::AffineElement* points, + std::span points, std::span scalars, const size_t num_initial_points, pippenger_runtime_state& state, @@ -1077,7 +1078,7 @@ template curve::Grumpkin::Element pippenger_internal( template curve::Grumpkin::Element evaluate_pippenger_rounds( pippenger_runtime_state& state, - const curve::Grumpkin::AffineElement* points, + std::span points, const size_t num_points, bool handle_edge_cases = false); @@ -1085,22 +1086,22 @@ template curve::Grumpkin::AffineElement* reduce_buckets( affine_product_runtime_state& state, bool first_round = true, bool handle_edge_cases = false); template curve::Grumpkin::Element pippenger(std::span scalars, - curve::Grumpkin::AffineElement* points, + std::span points, pippenger_runtime_state& state, bool handle_edge_cases = true); template curve::Grumpkin::Element pippenger_unsafe( std::span scalars, - curve::Grumpkin::AffineElement* points, + std::span points, pippenger_runtime_state& state); template curve::Grumpkin::Element pippenger_unsafe_optimized_for_non_dyadic_polys( std::span scalars, - std::span points, + std::span points, pippenger_runtime_state& state); template curve::Grumpkin::Element pippenger_without_endomorphism_basis_points( std::span scalars, - curve::Grumpkin::AffineElement* points, + std::span points, pippenger_runtime_state& state); } // namespace bb::scalar_multiplication diff --git a/barretenberg/cpp/src/barretenberg/ecc/scalar_multiplication/scalar_multiplication.hpp b/barretenberg/cpp/src/barretenberg/ecc/scalar_multiplication/scalar_multiplication.hpp index a4604f652ab..3dffd18474f 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/scalar_multiplication/scalar_multiplication.hpp +++ b/barretenberg/cpp/src/barretenberg/ecc/scalar_multiplication/scalar_multiplication.hpp @@ -150,22 +150,20 @@ template typename Curve::AffineElement* reduce_buckets(affine_product_runtime_state& state, bool first_round = true, bool handle_edge_cases = false); - template typename Curve::Element pippenger(std::span scalars, - typename Curve::AffineElement* points, + std::span points, pippenger_runtime_state& state, bool handle_edge_cases = true); - template typename Curve::Element pippenger_unsafe(std::span scalars, - typename Curve::AffineElement* points, + std::span points, pippenger_runtime_state& state); template typename Curve::Element pippenger_without_endomorphism_basis_points( std::span scalars, - typename Curve::AffineElement* points, + std::span points, pippenger_runtime_state& state); // NOTE: pippenger_unsafe_optimized_for_non_dyadic_polys requires SRS to have #scalars @@ -173,7 +171,7 @@ typename Curve::Element pippenger_without_endomorphism_basis_points( template typename Curve::Element pippenger_unsafe_optimized_for_non_dyadic_polys( std::span scalars, - std::span points, + std::span points, pippenger_runtime_state& state); // Explicit instantiation diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp index 6affa954598..4d70b0c3110 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp @@ -685,6 +685,7 @@ class ECCVMFlavor { // IPA verification key requires one more point. // TODO(https://github.com/AztecProtocol/barretenberg/issues/1025): make it so that PCSs inform the crs of // how many points they need + info("eccvmvk: ", proving_key->circuit_size + 1); this->pcs_verification_key = std::make_shared(proving_key->circuit_size + 1); this->circuit_size = proving_key->circuit_size; this->log_circuit_size = numeric::get_msb(this->circuit_size); diff --git a/barretenberg/cpp/src/barretenberg/plonk/proof_system/verifier/verifier.cpp b/barretenberg/cpp/src/barretenberg/plonk/proof_system/verifier/verifier.cpp index cea9ea412d2..a8e2e017f7d 100644 --- a/barretenberg/cpp/src/barretenberg/plonk/proof_system/verifier/verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/plonk/proof_system/verifier/verifier.cpp @@ -177,13 +177,13 @@ template bool VerifierBase::verify size_t num_elements = elements.size(); elements.resize(num_elements * 2); - bb::scalar_multiplication::generate_pippenger_point_table(&elements[0], &elements[0], num_elements); + bb::scalar_multiplication::generate_pippenger_point_table( + elements.data(), elements.data(), num_elements); scalar_multiplication::pippenger_runtime_state state(num_elements); g1::element P[2]; - P[0] = - bb::scalar_multiplication::pippenger({ &scalars[0], /*size*/ num_elements }, &elements[0], state); + P[0] = bb::scalar_multiplication::pippenger({ &scalars[0], /*size*/ num_elements }, elements, state); P[1] = -(g1::element(PI_Z_OMEGA) * separator_challenge + PI_Z); if (key->contains_recursive_proof) { diff --git a/barretenberg/cpp/src/barretenberg/plonk/work_queue/work_queue.cpp b/barretenberg/cpp/src/barretenberg/plonk/work_queue/work_queue.cpp index 2b5eef768b5..63a9c7cd089 100644 --- a/barretenberg/cpp/src/barretenberg/plonk/work_queue/work_queue.cpp +++ b/barretenberg/cpp/src/barretenberg/plonk/work_queue/work_queue.cpp @@ -209,7 +209,7 @@ void work_queue::process_queue() ASSERT(msm_size <= key->reference_string->get_monomial_size()); - bb::g1::affine_element* srs_points = key->reference_string->get_monomial_points(); + std::span srs_points = key->reference_string->get_monomial_points(); // Run pippenger multi-scalar multiplication. auto runtime_state = bb::scalar_multiplication::pippenger_runtime_state(msm_size); diff --git a/barretenberg/cpp/src/barretenberg/polynomials/legacy_polynomials.bench.cpp b/barretenberg/cpp/src/barretenberg/polynomials/legacy_polynomials.bench.cpp index abad771e626..7bb4fff0fb2 100644 --- a/barretenberg/cpp/src/barretenberg/polynomials/legacy_polynomials.bench.cpp +++ b/barretenberg/cpp/src/barretenberg/polynomials/legacy_polynomials.bench.cpp @@ -124,7 +124,7 @@ void pippenger_bench(State& state) noexcept state.ResumeTiming(); // uint64_t before = rdtsc(); scalar_multiplication::pippenger( - { &globals.scalars[0], /*size*/ num_points }, &globals.monomials[0], run_state); + { globals.scalars, /*size*/ num_points }, { globals.monomials, /*size*/ num_points * 2 }, run_state); // uint64_t after = rdtsc(); // count += (after - before); // ++i; @@ -169,23 +169,23 @@ void new_plonk_scalar_multiplications_bench(State& state) noexcept uint64_t before = rdtsc(); g1::element a = scalar_multiplication::pippenger( - { &globals.scalars[0], /*size*/ MAX_GATES }, &globals.monomials[0], run_state); + { &globals.scalars[0], /*size*/ MAX_GATES }, { globals.monomials, /*size*/ MAX_GATES * 2 }, run_state); g1::element b = scalar_multiplication::pippenger( - { &globals.scalars[1], /*size*/ MAX_GATES }, &globals.monomials[0], run_state); + { &globals.scalars[1], /*size*/ MAX_GATES }, { globals.monomials, /*size*/ MAX_GATES * 2 }, run_state); g1::element c = scalar_multiplication::pippenger( - { &globals.scalars[2], /*size*/ MAX_GATES }, &globals.monomials[0], run_state); + { &globals.scalars[2], /*size*/ MAX_GATES }, { globals.monomials, /*size*/ MAX_GATES * 2 }, run_state); g1::element d = scalar_multiplication::pippenger( - { &globals.scalars[3], /*size*/ MAX_GATES }, &globals.monomials[0], run_state); + { &globals.scalars[3], /*size*/ MAX_GATES }, { globals.monomials, /*size*/ MAX_GATES * 2 }, run_state); g1::element e = scalar_multiplication::pippenger( - { &globals.scalars[4], /*size*/ MAX_GATES }, &globals.monomials[0], run_state); + { &globals.scalars[4], /*size*/ MAX_GATES }, { globals.monomials, /*size*/ MAX_GATES * 2 }, run_state); g1::element f = scalar_multiplication::pippenger( - { &globals.scalars[5], /*size*/ MAX_GATES }, &globals.monomials[0], run_state); + { &globals.scalars[5], /*size*/ MAX_GATES }, { globals.monomials, /*size*/ MAX_GATES * 2 }, run_state); g1::element g = scalar_multiplication::pippenger( - { &globals.scalars[6], /*size*/ MAX_GATES }, &globals.monomials[0], run_state); + { &globals.scalars[6], /*size*/ MAX_GATES }, { globals.monomials, /*size*/ MAX_GATES * 2 }, run_state); g1::element h = scalar_multiplication::pippenger( - { &globals.scalars[7], /*size*/ MAX_GATES }, &globals.monomials[0], run_state); + { &globals.scalars[7], /*size*/ MAX_GATES }, { globals.monomials, /*size*/ MAX_GATES * 2 }, run_state); g1::element i = scalar_multiplication::pippenger( - { &globals.scalars[8], /*size*/ MAX_GATES }, &globals.monomials[0], run_state); + { &globals.scalars[8], /*size*/ MAX_GATES }, { globals.monomials, /*size*/ MAX_GATES * 2 }, run_state); uint64_t after = rdtsc(); count += (after - before); ++k; diff --git a/barretenberg/cpp/src/barretenberg/srs/factories/crs_factory.hpp b/barretenberg/cpp/src/barretenberg/srs/factories/crs_factory.hpp index 635bc3c0f5f..c56f5b98272 100644 --- a/barretenberg/cpp/src/barretenberg/srs/factories/crs_factory.hpp +++ b/barretenberg/cpp/src/barretenberg/srs/factories/crs_factory.hpp @@ -23,7 +23,7 @@ template class ProverCrs { /** * @brief Returns the monomial points in a form to be consumed by scalar_multiplication pippenger algorithm. */ - virtual typename Curve::AffineElement* get_monomial_points() = 0; + virtual std::span get_monomial_points() = 0; virtual size_t get_monomial_size() const = 0; }; @@ -56,7 +56,7 @@ template <> class VerifierCrs { * @brief Returns the G_1 elements in the CRS after the pippenger point table has been applied on them * */ - virtual Curve::AffineElement* get_monomial_points() const = 0; + virtual std::span get_monomial_points() const = 0; virtual size_t get_monomial_size() const = 0; /** * @brief Returns the first G_1 element from the CRS, used by the Shplonk verifier to compute the final diff --git a/barretenberg/cpp/src/barretenberg/srs/factories/file_crs_factory.cpp b/barretenberg/cpp/src/barretenberg/srs/factories/file_crs_factory.cpp index bb4ddcf5f42..16af14fec8b 100644 --- a/barretenberg/cpp/src/barretenberg/srs/factories/file_crs_factory.cpp +++ b/barretenberg/cpp/src/barretenberg/srs/factories/file_crs_factory.cpp @@ -36,9 +36,9 @@ FileVerifierCrs::FileVerifierCrs(std::string const& path, const g1_identity = monomials_[0]; }; -curve::Grumpkin::AffineElement* FileVerifierCrs::get_monomial_points() const +std::span FileVerifierCrs::get_monomial_points() const { - return monomials_.get(); + return { monomials_.get(), num_points * 2 }; } size_t FileVerifierCrs::get_monomial_size() const @@ -49,15 +49,17 @@ size_t FileVerifierCrs::get_monomial_size() const template FileCrsFactory::FileCrsFactory(std::string path, size_t initial_degree) : path_(std::move(path)) - , degree_(initial_degree) + , prover_degree_(initial_degree) + , verifier_degree_(initial_degree) {} template std::shared_ptr> FileCrsFactory::get_prover_crs(size_t degree) { - if (degree_ < degree || !prover_crs_) { + if (prover_degree_ < degree || !prover_crs_) { prover_crs_ = std::make_shared>(degree, path_); - degree_ = degree; + prover_degree_ = degree; + info("Initializing ", Curve::name, " prover CRS from file of size ", degree); } return prover_crs_; } @@ -65,9 +67,10 @@ std::shared_ptr> FileCrsFactory::get template std::shared_ptr> FileCrsFactory::get_verifier_crs(size_t degree) { - if (degree_ < degree || !verifier_crs_) { + if (verifier_degree_ < degree || !verifier_crs_) { verifier_crs_ = std::make_shared>(path_, degree); - degree_ = degree; + verifier_degree_ = degree; + info("Initializing ", Curve::name, " verifier CRS from file of size ", degree); } return verifier_crs_; } @@ -77,4 +80,4 @@ template class FileProverCrs; template class FileCrsFactory; template class FileCrsFactory; -} // namespace bb::srs::factories +} // namespace bb::srs::factories \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/srs/factories/file_crs_factory.hpp b/barretenberg/cpp/src/barretenberg/srs/factories/file_crs_factory.hpp index 6e3aa283a27..f3eca37a48f 100644 --- a/barretenberg/cpp/src/barretenberg/srs/factories/file_crs_factory.hpp +++ b/barretenberg/cpp/src/barretenberg/srs/factories/file_crs_factory.hpp @@ -24,7 +24,8 @@ template class FileCrsFactory : public CrsFactory { private: std::string path_; - size_t degree_; + size_t prover_degree_; + size_t verifier_degree_; std::shared_ptr> prover_crs_; std::shared_ptr> verifier_crs_; }; @@ -50,7 +51,7 @@ template class FileProverCrs : public ProverCrs { scalar_multiplication::generate_pippenger_point_table(monomials_.get(), monomials_.get(), num_points); }; - typename Curve::AffineElement* get_monomial_points() { return monomials_.get(); } + std::span get_monomial_points() { return { monomials_.get(), num_points * 2 }; } [[nodiscard]] size_t get_monomial_size() const { return num_points; } @@ -86,7 +87,7 @@ template <> class FileVerifierCrs : public VerifierCrs get_monomial_points() const override; size_t get_monomial_size() const override; Curve::AffineElement get_g1_identity() const override { return g1_identity; }; diff --git a/barretenberg/cpp/src/barretenberg/srs/factories/mem_crs_factory.test.cpp b/barretenberg/cpp/src/barretenberg/srs/factories/mem_crs_factory.test.cpp index df243d73785..d33872ad1bd 100644 --- a/barretenberg/cpp/src/barretenberg/srs/factories/mem_crs_factory.test.cpp +++ b/barretenberg/cpp/src/barretenberg/srs/factories/mem_crs_factory.test.cpp @@ -29,8 +29,8 @@ TEST(reference_string, mem_bn254_file_consistency) EXPECT_EQ(mem_prover_crs->get_monomial_size(), file_prover_crs->get_monomial_size()); - EXPECT_EQ(memcmp(mem_prover_crs->get_monomial_points(), - file_prover_crs->get_monomial_points(), + EXPECT_EQ(memcmp(mem_prover_crs->get_monomial_points().data(), + file_prover_crs->get_monomial_points().data(), sizeof(g1::affine_element) * 1024 * 2), 0); @@ -60,8 +60,8 @@ TEST(reference_string, DISABLED_mem_grumpkin_file_consistency) EXPECT_EQ(mem_prover_crs->get_monomial_size(), file_prover_crs->get_monomial_size()); - EXPECT_EQ(memcmp(mem_prover_crs->get_monomial_points(), - file_prover_crs->get_monomial_points(), + EXPECT_EQ(memcmp(mem_prover_crs->get_monomial_points().data(), + file_prover_crs->get_monomial_points().data(), sizeof(Grumpkin::AffineElement) * 1024 * 2), 0); @@ -69,8 +69,8 @@ TEST(reference_string, DISABLED_mem_grumpkin_file_consistency) auto mem_verifier_crs = file_crs.get_verifier_crs(); EXPECT_EQ(mem_verifier_crs->get_g1_identity(), file_verifier_crs->get_g1_identity()); - EXPECT_EQ(memcmp(file_verifier_crs->get_monomial_points(), - mem_verifier_crs->get_monomial_points(), + EXPECT_EQ(memcmp(file_verifier_crs->get_monomial_points().data(), + mem_verifier_crs->get_monomial_points().data(), sizeof(Grumpkin::AffineElement) * 1024 * 2), 0); } diff --git a/barretenberg/cpp/src/barretenberg/srs/factories/mem_grumpkin_crs_factory.cpp b/barretenberg/cpp/src/barretenberg/srs/factories/mem_grumpkin_crs_factory.cpp index 8d0741cc920..f0c3600c869 100644 --- a/barretenberg/cpp/src/barretenberg/srs/factories/mem_grumpkin_crs_factory.cpp +++ b/barretenberg/cpp/src/barretenberg/srs/factories/mem_grumpkin_crs_factory.cpp @@ -25,7 +25,10 @@ class MemVerifierCrs : public VerifierCrs { } virtual ~MemVerifierCrs() = default; - Grumpkin::AffineElement* get_monomial_points() const override { return monomials_.get(); } + std::span get_monomial_points() const override + { + return { monomials_.get(), num_points * 2 }; + } size_t get_monomial_size() const override { return num_points; } Grumpkin::AffineElement get_g1_identity() const override { return monomials_[0]; }; diff --git a/barretenberg/cpp/src/barretenberg/srs/factories/mem_prover_crs.hpp b/barretenberg/cpp/src/barretenberg/srs/factories/mem_prover_crs.hpp index 20fbcda9e54..44dbb961f74 100644 --- a/barretenberg/cpp/src/barretenberg/srs/factories/mem_prover_crs.hpp +++ b/barretenberg/cpp/src/barretenberg/srs/factories/mem_prover_crs.hpp @@ -16,7 +16,10 @@ template class MemProverCrs : public ProverCrs { scalar_multiplication::generate_pippenger_point_table(monomials_.get(), monomials_.get(), num_points); } - typename Curve::AffineElement* get_monomial_points() override { return monomials_.get(); } + std::span get_monomial_points() override + { + return { monomials_.get(), num_points * 2 }; + } size_t get_monomial_size() const override { return num_points; } diff --git a/barretenberg/cpp/src/barretenberg/srs/scalar_multiplication.test.cpp b/barretenberg/cpp/src/barretenberg/srs/scalar_multiplication.test.cpp index 247911339ae..e7db4f2411b 100644 --- a/barretenberg/cpp/src/barretenberg/srs/scalar_multiplication.test.cpp +++ b/barretenberg/cpp/src/barretenberg/srs/scalar_multiplication.test.cpp @@ -70,7 +70,7 @@ TYPED_TEST(ScalarMultiplicationTests, ReduceBucketsSimple) TestFixture::read_transcript_g2(TestFixture::SRS_PATH); } auto crs = srs::factories::FileProverCrs(num_points / 2, TestFixture::SRS_PATH); - auto* monomials = crs.get_monomial_points(); + std::span monomials = crs.get_monomial_points(); std::vector point_schedule(bb::scalar_multiplication::point_table_size(num_points / 2)); std::array bucket_empty_status; @@ -681,7 +681,8 @@ TYPED_TEST(ScalarMultiplicationTests, OversizedInputs) } scalar_multiplication::pippenger_runtime_state state(target_degree); - Element first = scalar_multiplication::pippenger({ scalars, /*size*/ target_degree }, monomials, state); + Element first = scalar_multiplication::pippenger( + { scalars, /*size*/ target_degree }, { monomials, /*size*/ 2 * target_degree }, state); first = first.normalize(); for (size_t i = 0; i < target_degree; ++i) { @@ -689,7 +690,8 @@ TYPED_TEST(ScalarMultiplicationTests, OversizedInputs) } scalar_multiplication::pippenger_runtime_state state_2(target_degree); - Element second = scalar_multiplication::pippenger({ scalars, /*size*/ target_degree }, monomials, state_2); + Element second = scalar_multiplication::pippenger( + { scalars, /*size*/ target_degree }, { monomials, /*size*/ 2 * target_degree }, state_2); second = second.normalize(); EXPECT_EQ((first.z == second.z), true); @@ -732,7 +734,8 @@ TYPED_TEST(ScalarMultiplicationTests, UndersizedInputs) scalar_multiplication::pippenger_runtime_state state(num_points); - Element result = scalar_multiplication::pippenger({ scalars, /*size*/ num_points }, points, state); + Element result = scalar_multiplication::pippenger( + { scalars, /*size*/ num_points }, { points, /*size*/ num_points * 2 }, state); result = result.normalize(); aligned_free(scalars); @@ -769,7 +772,8 @@ TYPED_TEST(ScalarMultiplicationTests, PippengerSmall) scalar_multiplication::generate_pippenger_point_table(points, points, num_points); scalar_multiplication::pippenger_runtime_state state(num_points); - Element result = scalar_multiplication::pippenger({ scalars, /*size*/ num_points }, points, state); + Element result = scalar_multiplication::pippenger( + { scalars, /*size*/ num_points }, { points, /*size*/ num_points * 2 }, state); result = result.normalize(); aligned_free(scalars); @@ -808,7 +812,8 @@ TYPED_TEST(ScalarMultiplicationTests, PippengerEdgeCaseDbl) } scalar_multiplication::generate_pippenger_point_table(points, points, num_points); scalar_multiplication::pippenger_runtime_state state(num_points); - Element result = scalar_multiplication::pippenger({ scalars, /*size*/ num_points }, points, state); + Element result = scalar_multiplication::pippenger( + { scalars, /*size*/ num_points }, { points, /*size*/ num_points * 2 }, state); result = result.normalize(); aligned_free(scalars); @@ -866,7 +871,8 @@ TYPED_TEST(ScalarMultiplicationTests, PippengerShortInputs) scalar_multiplication::generate_pippenger_point_table(points.get(), points.get(), num_points); scalar_multiplication::pippenger_runtime_state state(num_points); - Element result = scalar_multiplication::pippenger({ scalars, /*size*/ num_points }, points.get(), state); + Element result = scalar_multiplication::pippenger( + { scalars, /*size*/ num_points }, { points.get(), /*size*/ num_points * 2 }, state); result = result.normalize(); aligned_free(scalars); @@ -902,8 +908,8 @@ TYPED_TEST(ScalarMultiplicationTests, PippengerUnsafe) scalar_multiplication::generate_pippenger_point_table(points.get(), points.get(), num_points); scalar_multiplication::pippenger_runtime_state state(num_points); - Element result = - scalar_multiplication::pippenger_unsafe({ scalars, /*size*/ num_points }, points.get(), state); + Element result = scalar_multiplication::pippenger_unsafe( + { scalars, /*size*/ num_points }, { points.get(), /* size*/ num_points * 2 }, state); result = result.normalize(); aligned_free(scalars); @@ -960,7 +966,8 @@ TYPED_TEST(ScalarMultiplicationTests, PippengerUnsafeShortInputs) scalar_multiplication::generate_pippenger_point_table(points, points, num_points); scalar_multiplication::pippenger_runtime_state state(num_points); - Element result = scalar_multiplication::pippenger_unsafe({ scalars, /*size*/ num_points }, points, state); + Element result = scalar_multiplication::pippenger_unsafe( + { scalars, /*size*/ num_points }, { points, num_points * 2 + 1 }, state); result = result.normalize(); aligned_free(scalars); @@ -997,7 +1004,8 @@ TYPED_TEST(ScalarMultiplicationTests, PippengerOne) scalar_multiplication::generate_pippenger_point_table(points, points, num_points); scalar_multiplication::pippenger_runtime_state state(num_points); - Element result = scalar_multiplication::pippenger({ scalars, /*size*/ num_points }, points, state); + Element result = scalar_multiplication::pippenger( + { scalars, /*size*/ num_points }, { points, /*size*/ num_points * 2 }, state); result = result.normalize(); aligned_free(scalars); @@ -1018,7 +1026,7 @@ TYPED_TEST(ScalarMultiplicationTests, PippengerZeroPoints) AffineElement* points = (AffineElement*)aligned_alloc(32, sizeof(AffineElement) * (2 + 1)); scalar_multiplication::pippenger_runtime_state state(0); - Element result = scalar_multiplication::pippenger({ scalars, /*size*/ 0 }, points, state); + Element result = scalar_multiplication::pippenger({ scalars, /*size*/ 0 }, { points, /*size*/ 0 }, state); aligned_free(scalars); aligned_free(points); @@ -1043,7 +1051,7 @@ TYPED_TEST(ScalarMultiplicationTests, PippengerMulByZero) scalar_multiplication::generate_pippenger_point_table(points, points, 1); scalar_multiplication::pippenger_runtime_state state(1); - Element result = scalar_multiplication::pippenger({ scalars, /*size*/ 1 }, points, state); + Element result = scalar_multiplication::pippenger({ scalars, /*size*/ 1 }, { points, /*size*/ 2 }, state); aligned_free(scalars); aligned_free(points); diff --git a/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/verifier_commitment_key.hpp b/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/verifier_commitment_key.hpp index b25e5369adb..f90e7540d36 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/verifier_commitment_key.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/verifier_commitment_key.hpp @@ -29,7 +29,8 @@ template class VerifierCommitmentKey { : g1_identity(Commitment(native_pcs_verification_key->get_g1_identity())) { - auto* native_points = native_pcs_verification_key->get_monomial_points(); + auto native_points = native_pcs_verification_key->get_monomial_points(); + ASSERT(num_points * 2 <= native_points.size()); for (size_t i = 0; i < num_points * 2; i += 2) { monomial_points.emplace_back(Commitment(native_points[i])); } diff --git a/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/verifier_commitment_key.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/verifier_commitment_key.test.cpp index a49637a88aa..86523f98b2a 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/verifier_commitment_key.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/verifier_commitment_key.test.cpp @@ -26,7 +26,7 @@ template class RecursiveVeriferCommitmentKeyTest : public testi auto native_vk = std::make_shared(num_points); auto recursive_vk = std::make_shared(&builder, num_points, native_vk); EXPECT_EQ(native_vk->get_g1_identity(), recursive_vk->get_g1_identity().get_value()); - auto* native_monomial_points = native_vk->get_monomial_points(); + auto native_monomial_points = native_vk->get_monomial_points(); auto recursive_monomial_points = recursive_vk->get_monomial_points(); // The recursive verifier commitment key only stores the SRS so we verify against the even indices of the native From 807adeb79332b40280e5f68258460377541dbd74 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 5 Sep 2024 02:20:38 +0000 Subject: [PATCH 13/18] git subrepo push --branch=master barretenberg subrepo: subdir: "barretenberg" merged: "5c80b2b96b" upstream: origin: "https://github.com/AztecProtocol/barretenberg" branch: "master" commit: "5c80b2b96b" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- barretenberg/.gitrepo | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index d5e16ebd810..85c9e0a5eb0 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = 84191703c27890b755124db6255e6a3f0747bb42 - parent = efad298f60a86094394fd4ac67fbf108fba110f9 + commit = 5c80b2b96beab914a7d443310db06e2fb50c1bcd + parent = f1746999ea12cc8117efd5a0c3b2ec5d80196343 method = merge cmdver = 0.4.6 From feff126409a81148daea517e5d4acc7a6ec458c5 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 5 Sep 2024 02:21:13 +0000 Subject: [PATCH 14/18] chore: replace relative paths to noir-protocol-circuits --- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 7a1f1af5863..12c38b63c6c 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.52.0", directory="noir-projects/noir-protocol-circuits/crates/types" } From 05cfa3dc2ac24ce07ade4266e8a7e54e8b0ce3a7 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 5 Sep 2024 02:21:13 +0000 Subject: [PATCH 15/18] git_subrepo.sh: Fix parent in .gitrepo file. [skip ci] --- noir-projects/aztec-nr/.gitrepo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index f3610b04171..26d82eaa2d9 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -9,4 +9,4 @@ remote = https://github.com/AztecProtocol/aztec-nr commit = 1199c594a7e90c126bd291ee57d86b16b26816c9 method = merge cmdver = 0.4.6 - parent = ffb0f5dcba5efb6f664c621f2e4840cfcfe6cda0 + parent = 2f743e3f68c13216afc30fb572e0681126185554 From 1e352f7380cbd6ecc910e4eff2f7ca0f1bbedc4b Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 5 Sep 2024 02:21:18 +0000 Subject: [PATCH 16/18] git subrepo push --branch=master noir-projects/aztec-nr subrepo: subdir: "noir-projects/aztec-nr" merged: "52c0befb2" upstream: origin: "https://github.com/AztecProtocol/aztec-nr" branch: "master" commit: "52c0befb2" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- noir-projects/aztec-nr/.gitrepo | 4 ++-- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 26d82eaa2d9..bd79ab30bae 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = 1199c594a7e90c126bd291ee57d86b16b26816c9 + commit = 52c0befb2bcd0e704c150cd71aa4bae366782136 method = merge cmdver = 0.4.6 - parent = 2f743e3f68c13216afc30fb572e0681126185554 + parent = 5abe573bebf9d2c3e0cd8db1b8c060fff980105f diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 12c38b63c6c..7a1f1af5863 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.52.0", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" } From 7f029007365b57c06699914f97b93d0891d2a6f1 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Thu, 5 Sep 2024 09:40:57 +0100 Subject: [PATCH 17/18] feat: ultra keccak honk verifier (#8261) Enable the new Keccak Ultra Honk flavor for the BlockRootRollup circuit, deploy a Solidity verifier for it and tell the rollup to use it --------- Co-authored-by: Santiago Palladino --- barretenberg/cpp/src/barretenberg/bb/main.cpp | 9 +- yarn-project/Earthfile | 2 +- yarn-project/bb-prover/package.json | 4 +- yarn-project/bb-prover/src/bb/cli.ts | 36 +----- yarn-project/bb-prover/src/bb/execute.ts | 37 +++--- yarn-project/bb-prover/src/honk.ts | 32 +++++ .../src/prover/bb_private_kernel_prover.ts | 20 ++- .../bb-prover/src/prover/bb_prover.ts | 69 ++++------ .../verification_key/verification_key_data.ts | 8 +- .../bb-prover/src/verifier/bb_verifier.ts | 11 +- yarn-project/bb-prover/tsconfig.json | 3 + yarn-project/circuits.js/src/structs/proof.ts | 28 +++- .../src/structs/verification_key.ts | 10 +- .../integration_proof_verification.test.ts | 120 +++++++++--------- .../src/e2e_prover/e2e_prover_test.ts | 2 +- .../end-to-end/src/e2e_prover/full.test.ts | 14 +- .../src/fixtures/dumps/block_result.json | 23 +--- .../src/type_conversion.ts | 9 +- .../src/orchestrator/orchestrator.ts | 7 +- yarn-project/yarn.lock | 2 + 20 files changed, 230 insertions(+), 216 deletions(-) create mode 100644 yarn-project/bb-prover/src/honk.ts diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index f2dee5b9055..07a62fc99cb 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -1286,7 +1286,7 @@ void prove_honk_output_all(const std::string& bytecodePath, using VerificationKey = Flavor::VerificationKey; bool honk_recursion = false; - if constexpr (IsAnyOf) { + if constexpr (IsAnyOf) { honk_recursion = true; } @@ -1478,12 +1478,12 @@ int main(int argc, char* argv[]) } else if (command == "prove_keccak_ultra_honk") { std::string output_path = get_option(args, "-o", "./proofs/proof"); prove_honk(bytecode_path, witness_path, output_path); - } else if (command == "prove_keccak_ultra_honk_output_all") { + } else if (command == "prove_ultra_keccak_honk_output_all") { std::string output_path = get_option(args, "-o", "./proofs/proof"); prove_honk_output_all(bytecode_path, witness_path, output_path); } else if (command == "verify_ultra_honk") { return verify_honk(proof_path, vk_path) ? 0 : 1; - } else if (command == "verify_keccak_ultra_honk") { + } else if (command == "verify_ultra_keccak_honk") { return verify_honk(proof_path, vk_path) ? 0 : 1; } else if (command == "write_vk_ultra_honk") { std::string output_path = get_option(args, "-o", "./target/vk"); @@ -1508,6 +1508,9 @@ int main(int argc, char* argv[]) } else if (command == "vk_as_fields_mega_honk") { std::string output_path = get_option(args, "-o", vk_path + "_fields.json"); vk_as_fields_honk(vk_path, output_path); + } else if (command == "vk_as_fields_ultra_keccak_honk") { + std::string output_path = get_option(args, "-o", vk_path + "_fields.json"); + vk_as_fields_honk(vk_path, output_path); } else { std::cerr << "Unknown command: " << command << "\n"; return 1; diff --git a/yarn-project/Earthfile b/yarn-project/Earthfile index 0098093957e..07be9672103 100644 --- a/yarn-project/Earthfile +++ b/yarn-project/Earthfile @@ -116,7 +116,7 @@ protocol-verification-keys: rollup-verifier-contract: FROM +bb-cli COPY --dir +protocol-verification-keys/usr/src/bb /usr/src - RUN --entrypoint write-contract -c RootRollupArtifact -n UltraHonkVerifier.sol + RUN --entrypoint write-contract -c BlockRootRollupArtifact -n UltraHonkVerifier.sol SAVE ARTIFACT /usr/src/bb /usr/src/bb txe: diff --git a/yarn-project/bb-prover/package.json b/yarn-project/bb-prover/package.json index 37030d36b12..3fc9ae17d27 100644 --- a/yarn-project/bb-prover/package.json +++ b/yarn-project/bb-prover/package.json @@ -73,6 +73,7 @@ "tslib": "^2.4.0" }, "devDependencies": { + "@aztec/ethereum": "workspace:^", "@jest/globals": "^29.5.0", "@types/jest": "^29.5.0", "@types/memdown": "^3.0.0", @@ -81,7 +82,8 @@ "jest": "^29.5.0", "jest-mock-extended": "^3.0.3", "ts-node": "^10.9.1", - "typescript": "^5.0.4" + "typescript": "^5.0.4", + "viem": "^2.7.15" }, "files": [ "dest", diff --git a/yarn-project/bb-prover/src/bb/cli.ts b/yarn-project/bb-prover/src/bb/cli.ts index ca26dabb535..676882d8819 100644 --- a/yarn-project/bb-prover/src/bb/cli.ts +++ b/yarn-project/bb-prover/src/bb/cli.ts @@ -25,38 +25,6 @@ export function getProgram(log: LogFn): Command { log(Object.keys(ProtocolCircuitArtifacts).reduce((prev: string, x: string) => prev.concat(`\n${x}`))); }); - program - .command('write-pk') - .description('Generates the proving key for the specified circuit') - .requiredOption( - '-w, --working-directory ', - 'A directory to use for storing input/output files', - BB_WORKING_DIRECTORY, - ) - .requiredOption('-b, --bb-path ', 'The path to the BB binary', BB_BINARY_PATH) - .requiredOption('-c, --circuit ', 'The name of a protocol circuit') - .action(async options => { - const compiledCircuit = ProtocolCircuitArtifacts[options.circuit as ProtocolArtifact]; - if (!compiledCircuit) { - log(`Failed to find circuit ${options.circuit}`); - return; - } - try { - await fs.access(options.workingDirectory, fs.constants.W_OK); - } catch (error) { - log(`Working directory does not exist`); - return; - } - await generateKeyForNoirCircuit( - options.bbPath, - options.workingDirectory, - options.circuit, - compiledCircuit, - 'pk', - log, - ); - }); - program .command('write-vk') .description('Generates the verification key for the specified circuit') @@ -67,6 +35,7 @@ export function getProgram(log: LogFn): Command { ) .requiredOption('-b, --bb-path ', 'The path to the BB binary', BB_BINARY_PATH) .requiredOption('-c, --circuit ', 'The name of a protocol circuit') + .requiredOption('-f, --flavor ', 'The name of the verification key flavor', 'ultra_honk') .action(async options => { const compiledCircuit = ProtocolCircuitArtifacts[options.circuit as ProtocolArtifact]; if (!compiledCircuit) { @@ -84,7 +53,8 @@ export function getProgram(log: LogFn): Command { options.workingDirectory, options.circuit, compiledCircuit, - 'vk', + options.flavor, + // (options.circuit as ServerProtocolArtifact) === 'RootRollupArtifact' ? 'ultra_keccak_honk' : 'ultra_honk', log, ); }); diff --git a/yarn-project/bb-prover/src/bb/execute.ts b/yarn-project/bb-prover/src/bb/execute.ts index a26a31ba5df..5d941a5811e 100644 --- a/yarn-project/bb-prover/src/bb/execute.ts +++ b/yarn-project/bb-prover/src/bb/execute.ts @@ -8,6 +8,8 @@ import * as proc from 'child_process'; import * as fs from 'fs/promises'; import { basename, dirname, join } from 'path'; +import { type UltraHonkFlavor } from '../honk.js'; + export const VK_FILENAME = 'vk'; export const VK_FIELDS_FILENAME = 'vk_fields.json'; export const PROOF_FILENAME = 'proof'; @@ -113,7 +115,7 @@ export async function generateKeyForNoirCircuit( workingDirectory: string, circuitName: string, compiledCircuit: NoirCompiledCircuit, - key: 'vk' | 'pk', + flavor: UltraHonkFlavor, log: LogFn, force = false, ): Promise { @@ -123,7 +125,7 @@ export async function generateKeyForNoirCircuit( // The bytecode hash file is also written here as /workingDirectory/pk/BaseParityArtifact/bytecode-hash // The bytecode is written to e.g. /workingDirectory/pk/BaseParityArtifact/bytecode // The bytecode is removed after the key is generated, leaving just the hash file - const circuitOutputDirectory = `${workingDirectory}/${key}/${circuitName}`; + const circuitOutputDirectory = `${workingDirectory}/vk/${circuitName}`; const outputPath = `${circuitOutputDirectory}`; const bytecodeHash = sha256(bytecode); @@ -148,11 +150,11 @@ export async function generateKeyForNoirCircuit( // args are the output path and the input bytecode path const args = ['-o', `${outputPath}/${VK_FILENAME}`, '-b', bytecodePath]; const timer = new Timer(); - let result = await executeBB(pathToBB, `write_${key}_ultra_honk`, args, log); + let result = await executeBB(pathToBB, `write_vk_${flavor}`, args, log); // If we succeeded and the type of key if verification, have bb write the 'fields' version too - if (result.status == BB_RESULT.SUCCESS && key === 'vk') { + if (result.status == BB_RESULT.SUCCESS) { const asFieldsArgs = ['-k', `${outputPath}/${VK_FILENAME}`, '-o', `${outputPath}/${VK_FIELDS_FILENAME}`, '-v']; - result = await executeBB(pathToBB, `vk_as_fields_ultra_honk`, asFieldsArgs, log); + result = await executeBB(pathToBB, `vk_as_fields_${flavor}`, asFieldsArgs, log); } const duration = timer.ms(); @@ -160,8 +162,8 @@ export async function generateKeyForNoirCircuit( return { status: BB_RESULT.SUCCESS, durationMs: duration, - pkPath: key === 'pk' ? outputPath : undefined, - vkPath: key === 'vk' ? outputPath : undefined, + pkPath: undefined, + vkPath: outputPath, proofPath: undefined, }; } @@ -179,8 +181,8 @@ export async function generateKeyForNoirCircuit( return { status: BB_RESULT.ALREADY_PRESENT, durationMs: 0, - pkPath: key === 'pk' ? outputPath : undefined, - vkPath: key === 'vk' ? outputPath : undefined, + pkPath: undefined, + vkPath: outputPath, }; } @@ -261,6 +263,7 @@ export async function computeVerificationKey( workingDirectory: string, circuitName: string, bytecode: Buffer, + flavor: UltraHonkFlavor, log: LogFn, ): Promise { // Check that the working directory exists @@ -293,7 +296,7 @@ export async function computeVerificationKey( }; let result = await executeBB( pathToBB, - 'write_vk_ultra_honk', + `write_vk_${flavor}`, ['-o', outputPath, '-b', bytecodePath, '-v'], logFunction, ); @@ -302,7 +305,7 @@ export async function computeVerificationKey( } result = await executeBB( pathToBB, - 'vk_as_fields_ultra_honk', + `vk_as_fields_${flavor}`, ['-o', outputPath + '_fields.json', '-k', outputPath, '-v'], logFunction, ); @@ -343,6 +346,7 @@ export async function generateProof( circuitName: string, bytecode: Buffer, inputWitnessFile: string, + flavor: UltraHonkFlavor, log: LogFn, ): Promise { // Check that the working directory exists @@ -355,7 +359,7 @@ export async function generateProof( // The bytecode is written to e.g. /workingDirectory/BaseParityArtifact-bytecode const bytecodePath = `${workingDirectory}/${circuitName}-bytecode`; - // The proof is written to e.g. /workingDirectory/proof + // The proof is written to e.g. /workingDirectory/ultra_honk/proof const outputPath = `${workingDirectory}`; const binaryPresent = await fs @@ -374,7 +378,7 @@ export async function generateProof( const logFunction = (message: string) => { log(`${circuitName} BB out - ${message}`); }; - const result = await executeBB(pathToBB, 'prove_ultra_honk_output_all', args, logFunction); + const result = await executeBB(pathToBB, `prove_${flavor}_output_all`, args, logFunction); const duration = timer.ms(); if (result.status == BB_RESULT.SUCCESS) { @@ -599,9 +603,10 @@ export async function verifyProof( pathToBB: string, proofFullPath: string, verificationKeyPath: string, + ultraHonkFlavor: UltraHonkFlavor, log: LogFn, ): Promise { - return await verifyProofInternal(pathToBB, proofFullPath, verificationKeyPath, 'verify_ultra_honk', log); + return await verifyProofInternal(pathToBB, proofFullPath, verificationKeyPath, `verify_${ultraHonkFlavor}`, log); } /** @@ -674,7 +679,7 @@ async function verifyProofInternal( pathToBB: string, proofFullPath: string, verificationKeyPath: string, - command: 'verify_ultra_honk' | 'avm_verify', + command: 'verify_ultra_honk' | 'verify_ultra_keccak_honk' | 'avm_verify', log: LogFn, ): Promise { const binaryPresent = await fs @@ -851,7 +856,7 @@ export async function generateContractForCircuit( workingDirectory, circuitName, compiledCircuit, - 'vk', + 'ultra_keccak_honk', log, force, ); diff --git a/yarn-project/bb-prover/src/honk.ts b/yarn-project/bb-prover/src/honk.ts new file mode 100644 index 00000000000..02000438bce --- /dev/null +++ b/yarn-project/bb-prover/src/honk.ts @@ -0,0 +1,32 @@ +import { RECURSIVE_PROOF_LENGTH, VERIFICATION_KEY_LENGTH_IN_FIELDS } from '@aztec/circuits.js'; +import { type ProtocolArtifact } from '@aztec/noir-protocol-circuits-types'; + +export type UltraHonkFlavor = 'ultra_honk' | 'ultra_keccak_honk'; + +const UltraKeccakHonkCircuits = ['BlockRootRollupArtifact'] as const; +type UltraKeccakHonkCircuits = (typeof UltraKeccakHonkCircuits)[number]; +type UltraHonkCircuits = Exclude; + +export function getUltraHonkFlavorForCircuit(artifact: UltraKeccakHonkCircuits): 'ultra_keccak_honk'; +export function getUltraHonkFlavorForCircuit(artifact: UltraHonkCircuits): 'ultra_honk'; +export function getUltraHonkFlavorForCircuit(artifact: ProtocolArtifact): UltraHonkFlavor; +export function getUltraHonkFlavorForCircuit(artifact: ProtocolArtifact): UltraHonkFlavor { + return isUltraKeccakHonkCircuit(artifact) ? 'ultra_keccak_honk' : 'ultra_honk'; +} + +function isUltraKeccakHonkCircuit(artifact: ProtocolArtifact): artifact is UltraKeccakHonkCircuits { + return UltraKeccakHonkCircuits.includes(artifact as UltraKeccakHonkCircuits); +} + +// TODO (alexg) remove these once UltraKeccakHonk proofs are the same size as regular UltraHonk proofs +// see https://github.com/AztecProtocol/aztec-packages/pull/8243 +export function getExpectedVerificationKeyLength(artifact: ProtocolArtifact): number { + return getUltraHonkFlavorForCircuit(artifact) === 'ultra_keccak_honk' ? 120 : VERIFICATION_KEY_LENGTH_IN_FIELDS; +} + +export function getExpectedProofLength(artifact: UltraKeccakHonkCircuits): 393; +export function getExpectedProofLength(artifact: UltraHonkCircuits): typeof RECURSIVE_PROOF_LENGTH; +export function getExpectedProofLength(artifact: ProtocolArtifact): number; +export function getExpectedProofLength(artifact: ProtocolArtifact): number { + return isUltraKeccakHonkCircuit(artifact) ? 393 : RECURSIVE_PROOF_LENGTH; +} diff --git a/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts b/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts index 885eb37007a..52b26ba4535 100644 --- a/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts +++ b/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts @@ -58,6 +58,7 @@ import { verifyProof, } from '../bb/execute.js'; import { type BBConfig } from '../config.js'; +import { type UltraHonkFlavor, getUltraHonkFlavorForCircuit } from '../honk.js'; import { mapProtocolArtifactNameToCircuitName } from '../stats.js'; import { extractVkData } from '../verification_key/verification_key_data.js'; @@ -213,7 +214,12 @@ export class BBNativePrivateKernelProver implements PrivateKernelProver { this.log.debug(`${circuitType} BB out - ${message}`); }; - const result = await this.verifyProofFromKey(verificationKey.keyAsBytes, proof, logFunction); + const result = await this.verifyProofFromKey( + getUltraHonkFlavorForCircuit(circuitType), + verificationKey.keyAsBytes, + proof, + logFunction, + ); if (result.status === BB_RESULT.FAILURE) { const errorMessage = `Failed to verify ${circuitType} proof!`; @@ -224,6 +230,7 @@ export class BBNativePrivateKernelProver implements PrivateKernelProver { } private async verifyProofFromKey( + flavor: UltraHonkFlavor, verificationKey: Buffer, proof: Proof, logFunction: (message: string) => void = () => {}, @@ -234,7 +241,7 @@ export class BBNativePrivateKernelProver implements PrivateKernelProver { await fs.writeFile(proofFileName, proof.buffer); await fs.writeFile(verificationKeyPath, verificationKey); - return await verifyProof(this.bbBinaryPath, proofFileName, verificationKeyPath!, logFunction); + return await verifyProof(this.bbBinaryPath, proofFileName, verificationKeyPath!, flavor, logFunction); }; return await this.runInDirectory(operation); } @@ -301,7 +308,14 @@ export class BBNativePrivateKernelProver implements PrivateKernelProver { const timer = new Timer(); - const vkResult = await computeVerificationKey(this.bbBinaryPath, directory, circuitType, bytecode, this.log.debug); + const vkResult = await computeVerificationKey( + this.bbBinaryPath, + directory, + circuitType, + bytecode, + circuitType === 'App' ? 'ultra_honk' : getUltraHonkFlavorForCircuit(circuitType), + this.log.debug, + ); if (vkResult.status === BB_RESULT.FAILURE) { this.log.error(`Failed to generate proof for ${circuitType}${dbgCircuitName}: ${vkResult.reason}`); diff --git a/yarn-project/bb-prover/src/prover/bb_prover.ts b/yarn-project/bb-prover/src/prover/bb_prover.ts index 2c38db4869e..4bbf12607a8 100644 --- a/yarn-project/bb-prover/src/prover/bb_prover.ts +++ b/yarn-project/bb-prover/src/prover/bb_prover.ts @@ -77,12 +77,12 @@ import * as fs from 'fs/promises'; import * as path from 'path'; import { + type BBFailure, type BBSuccess, BB_RESULT, PROOF_FIELDS_FILENAME, PROOF_FILENAME, VK_FILENAME, - type VerificationFunction, generateAvmProof, generateKeyForNoirCircuit, generateProof, @@ -92,6 +92,7 @@ import { writeProofAsFields, } from '../bb/execute.js'; import type { ACVMConfig, BBConfig } from '../config.js'; +import { type UltraHonkFlavor, getExpectedProofLength, getUltraHonkFlavorForCircuit } from '../honk.js'; import { ProverInstrumentation } from '../instrumentation.js'; import { PublicKernelArtifactMapping } from '../mappings/mappings.js'; import { mapProtocolArtifactNameToCircuitName } from '../stats.js'; @@ -99,11 +100,6 @@ import { extractAvmVkData, extractVkData } from '../verification_key/verificatio const logger = createDebugLogger('aztec:bb-prover'); -const CIRCUITS_WITHOUT_AGGREGATION: Set = new Set([ - 'BaseParityArtifact', - 'EmptyNestedArtifact', -]); - export interface BBProverConfig extends BBConfig, ACVMConfig { // list of circuits supported by this prover. defaults to all circuits if empty circuitFilter?: ServerProtocolArtifact[]; @@ -113,8 +109,8 @@ export interface BBProverConfig extends BBConfig, ACVMConfig { * Prover implementation that uses barretenberg native proving */ export class BBNativeRollupProver implements ServerCircuitProver { - private verificationKeys: Map> = new Map< - ServerProtocolArtifact, + private verificationKeys = new Map< + `ultra${'_keccak_' | '_'}honk_${ServerProtocolArtifact}`, Promise >(); @@ -149,7 +145,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { const { circuitOutput, proof } = await this.createRecursiveProof( inputs, 'BaseParityArtifact', - RECURSIVE_PROOF_LENGTH, + getExpectedProofLength('BaseParityArtifact'), convertBaseParityInputsToWitnessMap, convertBaseParityOutputsFromWitnessMap, ); @@ -235,6 +231,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { ); await this.verifyWithKey( + getUltraHonkFlavorForCircuit(kernelOps.artifact), kernelRequest.inputs.previousKernel.vk, kernelRequest.inputs.previousKernel.proof.binaryProof, ); @@ -539,6 +536,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { circuitType, Buffer.from(artifact.bytecode, 'base64'), outputWitnessFile, + getUltraHonkFlavorForCircuit(circuitType), logger.debug, ); @@ -682,9 +680,8 @@ export class BBNativeRollupProver implements ServerCircuitProver { this.instrumentation.recordSize('circuitSize', 'tubeCircuit', tubeVK.circuitSize); // Sanity check the tube proof (can be removed later) - await this.verifyWithKey(tubeVK, tubeProof.binaryProof); + await this.verifyWithKey('ultra_honk', tubeVK, tubeProof.binaryProof); - // TODO(#7369): properly time tube construction logger.info( `Generated proof for tubeCircuit in ${Math.ceil(provingResult.durationMs)} ms, size: ${ tubeProof.proof.length @@ -762,22 +759,25 @@ export class BBNativeRollupProver implements ServerCircuitProver { */ public async verifyProof(circuitType: ServerProtocolArtifact, proof: Proof) { const verificationKey = await this.getVerificationKeyDataForCircuit(circuitType); - // info(`vkey in: ${verificationKey.keyAsFields.key}`); - return await this.verifyWithKey(verificationKey, proof); + return await this.verifyWithKey(getUltraHonkFlavorForCircuit(circuitType), verificationKey, proof); } public async verifyAvmProof(proof: Proof, verificationKey: AvmVerificationKeyData) { - return await this.verifyWithKeyInternal(proof, verificationKey, verifyAvmProof); + return await this.verifyWithKeyInternal(proof, verificationKey, (proofPath, vkPath) => + verifyAvmProof(this.config.bbBinaryPath, proofPath, vkPath, logger.debug), + ); } - public async verifyWithKey(verificationKey: VerificationKeyData, proof: Proof) { - return await this.verifyWithKeyInternal(proof, verificationKey, verifyProof); + public async verifyWithKey(flavor: UltraHonkFlavor, verificationKey: VerificationKeyData, proof: Proof) { + return await this.verifyWithKeyInternal(proof, verificationKey, (proofPath, vkPath) => + verifyProof(this.config.bbBinaryPath, proofPath, vkPath, flavor, logger.debug), + ); } private async verifyWithKeyInternal( proof: Proof, verificationKey: { keyAsBytes: Buffer }, - verificationFunction: VerificationFunction, + verificationFunction: (proofPath: string, vkPath: string) => Promise, ) { const operation = async (bbWorkingDirectory: string) => { const proofFileName = path.join(bbWorkingDirectory, PROOF_FILENAME); @@ -786,16 +786,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { await fs.writeFile(proofFileName, proof.buffer); await fs.writeFile(verificationKeyPath, verificationKey.keyAsBytes); - const logFunction = (message: string) => { - logger.verbose(`BB out - ${message}`); - }; - - const result = await verificationFunction( - this.config.bbBinaryPath, - proofFileName, - verificationKeyPath!, - logFunction, - ); + const result = await verificationFunction(proofFileName, verificationKeyPath!); if (result.status === BB_RESULT.FAILURE) { const errorMessage = `Failed to verify proof from key!`; @@ -886,14 +877,15 @@ export class BBNativeRollupProver implements ServerCircuitProver { * @returns The verification key data */ private async getVerificationKeyDataForCircuit(circuitType: ServerProtocolArtifact): Promise { - let promise = this.verificationKeys.get(circuitType); + const flavor = getUltraHonkFlavorForCircuit(circuitType); + let promise = this.verificationKeys.get(`${flavor}_${circuitType}`); if (!promise) { promise = generateKeyForNoirCircuit( this.config.bbBinaryPath, this.config.bbWorkingDirectory, circuitType, ServerCircuitArtifacts[circuitType], - 'vk', + flavor, logger.debug, ).then(result => { if (result.status === BB_RESULT.FAILURE) { @@ -901,7 +893,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { } return extractVkData(result.vkPath!); }); - this.verificationKeys.set(circuitType, promise); + this.verificationKeys.set(`${flavor}_${circuitType}`, promise); } const vk = await promise; return vk.clone(); @@ -916,10 +908,11 @@ export class BBNativeRollupProver implements ServerCircuitProver { filePath: string, circuitType: ServerProtocolArtifact, ): Promise { - let promise = this.verificationKeys.get(circuitType); + const flavor = getUltraHonkFlavorForCircuit(circuitType); + let promise = this.verificationKeys.get(`${flavor}_${circuitType}`); if (!promise) { promise = extractVkData(filePath); - this.verificationKeys.set(circuitType, promise); + this.verificationKeys.set(`${flavor}_${circuitType}`, promise); } return promise; } @@ -943,20 +936,14 @@ export class BBNativeRollupProver implements ServerCircuitProver { fs.readFile(proofFieldsFilename, { encoding: 'utf-8' }), ]); const json = JSON.parse(proofString); - const vkData = await this.verificationKeys.get(circuitType); - if (!vkData) { - throw new Error(`Invalid verification key for ${circuitType}`); - } + const vkData = await this.getVerificationKeyDataForCircuit(circuitType); + // TODO (alexg) is this needed anymore? Shouldn't I just use the vkData.numPublicInputs? const numPublicInputs = vkData.numPublicInputs - AGGREGATION_OBJECT_LENGTH; const fieldsWithoutPublicInputs = json .slice(0, 3) .map(Fr.fromString) .concat(json.slice(3 + numPublicInputs).map(Fr.fromString)); - logger.debug( - `num pub inputs ${vkData.numPublicInputs} and without aggregation ${CIRCUITS_WITHOUT_AGGREGATION.has( - circuitType, - )}`, - ); + logger.debug(`num pub inputs ${vkData.numPublicInputs} circuit=${circuitType}`); const proof = new RecursiveProof( fieldsWithoutPublicInputs, diff --git a/yarn-project/bb-prover/src/verification_key/verification_key_data.ts b/yarn-project/bb-prover/src/verification_key/verification_key_data.ts index b5f4bacb1fa..908e49b5aee 100644 --- a/yarn-project/bb-prover/src/verification_key/verification_key_data.ts +++ b/yarn-project/bb-prover/src/verification_key/verification_key_data.ts @@ -3,11 +3,9 @@ import { AvmVerificationKeyAsFields, AvmVerificationKeyData, Fr, - VERIFICATION_KEY_LENGTH_IN_FIELDS, VerificationKeyAsFields, VerificationKeyData, } from '@aztec/circuits.js'; -import { type Tuple } from '@aztec/foundation/serialize'; import { strict as assert } from 'assert'; import * as fs from 'fs/promises'; @@ -29,10 +27,8 @@ export async function extractVkData(vkDirectoryPath: string): Promise, vkHash); - const vk = new VerificationKeyData(vkAsFields, rawBinary); - return vk; + const vkAsFields = new VerificationKeyAsFields(fields, vkHash); + return new VerificationKeyData(vkAsFields, rawBinary); } // TODO: This was adapted from the above function. A refactor might be needed. diff --git a/yarn-project/bb-prover/src/verifier/bb_verifier.ts b/yarn-project/bb-prover/src/verifier/bb_verifier.ts index 8cfc2688de1..55b16f1b846 100644 --- a/yarn-project/bb-prover/src/verifier/bb_verifier.ts +++ b/yarn-project/bb-prover/src/verifier/bb_verifier.ts @@ -22,6 +22,7 @@ import { verifyProof, } from '../bb/execute.js'; import { type BBConfig } from '../config.js'; +import { getUltraHonkFlavorForCircuit } from '../honk.js'; import { mapProtocolArtifactNameToCircuitName } from '../stats.js'; import { extractVkData } from '../verification_key/verification_key_data.js'; @@ -62,7 +63,7 @@ export class BBCircuitVerifier implements ClientProtocolCircuitVerifier { workingDirectory, circuit, ProtocolCircuitArtifacts[circuit], - 'vk', + getUltraHonkFlavorForCircuit(circuit), logFn, ).then(result => { if (result.status === BB_RESULT.FAILURE) { @@ -103,7 +104,13 @@ export class BBCircuitVerifier implements ClientProtocolCircuitVerifier { this.logger.debug(`${circuit} BB out - ${message}`); }; - const result = await verifyProof(this.config.bbBinaryPath, proofFileName, verificationKeyPath!, logFunction); + const result = await verifyProof( + this.config.bbBinaryPath, + proofFileName, + verificationKeyPath!, + getUltraHonkFlavorForCircuit(circuit), + logFunction, + ); if (result.status === BB_RESULT.FAILURE) { const errorMessage = `Failed to verify ${circuit} proof!`; diff --git a/yarn-project/bb-prover/tsconfig.json b/yarn-project/bb-prover/tsconfig.json index e0e59ed584c..77c9c6ff999 100644 --- a/yarn-project/bb-prover/tsconfig.json +++ b/yarn-project/bb-prover/tsconfig.json @@ -23,6 +23,9 @@ }, { "path": "../telemetry-client" + }, + { + "path": "../ethereum" } ], "include": ["src"] diff --git a/yarn-project/circuits.js/src/structs/proof.ts b/yarn-project/circuits.js/src/structs/proof.ts index 1ccfe85ce09..e93af115111 100644 --- a/yarn-project/circuits.js/src/structs/proof.ts +++ b/yarn-project/circuits.js/src/structs/proof.ts @@ -1,6 +1,8 @@ -import { Fr } from '@aztec/bb.js'; +import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; +import { AGGREGATION_OBJECT_LENGTH } from '../constants.gen.js'; + const EMPTY_PROOF_SIZE = 42; /** @@ -12,6 +14,9 @@ const EMPTY_PROOF_SIZE = 42; export class Proof { // Make sure this type is not confused with other buffer wrappers readonly __proofBrand: any; + + readonly publicInputsOffset = 100; + constructor( /** * Holds the serialized proof data in a binary buffer format. @@ -55,11 +60,22 @@ export class Proof { } public withoutPublicInputs(): Buffer { - if (this.numPublicInputs > 0) { - return this.buffer.subarray(Fr.SIZE_IN_BYTES * this.numPublicInputs); - } else { - return this.buffer; - } + return Buffer.concat([ + this.buffer.subarray(4, this.publicInputsOffset), + this.buffer.subarray(this.publicInputsOffset + Fr.SIZE_IN_BYTES * this.numPublicInputs), + ]); + } + + public extractPublicInputs(): Fr[] { + const reader = BufferReader.asReader( + this.buffer.subarray(this.publicInputsOffset, this.publicInputsOffset + Fr.SIZE_IN_BYTES * this.numPublicInputs), + ); + return reader.readArray(this.numPublicInputs, Fr); + } + + public extractAggregationObject(): Fr[] { + const publicInputs = this.extractPublicInputs(); + return publicInputs.slice(-1 * AGGREGATION_OBJECT_LENGTH); } /** diff --git a/yarn-project/circuits.js/src/structs/verification_key.ts b/yarn-project/circuits.js/src/structs/verification_key.ts index d7f5f4706c7..9b927bc834d 100644 --- a/yarn-project/circuits.js/src/structs/verification_key.ts +++ b/yarn-project/circuits.js/src/structs/verification_key.ts @@ -1,7 +1,7 @@ import { makeTuple } from '@aztec/foundation/array'; import { times } from '@aztec/foundation/collection'; import { Fq, Fr } from '@aztec/foundation/fields'; -import { BufferReader, type Tuple, serializeToBuffer } from '@aztec/foundation/serialize'; +import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { strict as assert } from 'assert'; @@ -85,7 +85,7 @@ export const CIRCUIT_RECURSIVE_INDEX = 3; * Provides a 'fields' representation of a circuit's verification key */ export class VerificationKeyAsFields { - constructor(public key: Tuple, public hash: Fr) {} + constructor(public key: Fr[], public hash: Fr) {} public get numPublicInputs() { return Number(this.key[CIRCUIT_PUBLIC_INPUTS_INDEX]); @@ -104,10 +104,10 @@ export class VerificationKeyAsFields { * @returns The buffer. */ toBuffer() { - return serializeToBuffer(this.key, this.hash); + return serializeToBuffer(...this.toFields()); } toFields() { - return [...this.key, this.hash]; + return [this.key.length, ...this.key, this.hash]; } /** @@ -117,7 +117,7 @@ export class VerificationKeyAsFields { */ static fromBuffer(buffer: Buffer | BufferReader): VerificationKeyAsFields { const reader = BufferReader.asReader(buffer); - return new VerificationKeyAsFields(reader.readArray(VERIFICATION_KEY_LENGTH_IN_FIELDS, Fr), reader.readObject(Fr)); + return new VerificationKeyAsFields(reader.readVector(Fr), reader.readObject(Fr)); } /** diff --git a/yarn-project/end-to-end/src/composed/integration_proof_verification.test.ts b/yarn-project/end-to-end/src/composed/integration_proof_verification.test.ts index f77e6069050..594272d005b 100644 --- a/yarn-project/end-to-end/src/composed/integration_proof_verification.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_proof_verification.test.ts @@ -1,10 +1,9 @@ import { L2Block, deployL1Contract, fileURLToPath } from '@aztec/aztec.js'; import { BBCircuitVerifier } from '@aztec/bb-prover'; -import { AGGREGATION_OBJECT_LENGTH, Fr, HEADER_LENGTH, Proof } from '@aztec/circuits.js'; +import { Fr, Proof } from '@aztec/circuits.js'; import { type L1ContractAddresses } from '@aztec/ethereum'; import { type Logger } from '@aztec/foundation/log'; -import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; -import { AvailabilityOracleAbi, RollupAbi } from '@aztec/l1-artifacts'; +import { BufferReader } from '@aztec/foundation/serialize'; import { type Anvil } from '@viem/anvil'; import { readFile } from 'fs/promises'; @@ -34,6 +33,7 @@ import { getLogger, setupL1Contracts, startAnvil } from '../fixtures/utils.js'; describe('proof_verification', () => { let proof: Proof; let proverId: Fr; + let vkTreeRoot: Fr; let block: L2Block; let aggregationObject: Fr[]; let anvil: Anvil | undefined; @@ -53,12 +53,14 @@ describe('proof_verification', () => { if (!rpcUrl) { ({ anvil, rpcUrl } = await startAnvil()); } + logger.info('Anvil started'); ({ l1ContractAddresses, publicClient, walletClient } = await setupL1Contracts( rpcUrl, mnemonicToAccount(MNEMONIC), logger, )); + logger.info('l1 contracts done'); const bb = await getBBConfig(logger); const acvm = await getACVMConfig(logger); @@ -70,12 +72,16 @@ describe('proof_verification', () => { bbTeardown = bb!.cleanup; acvmTeardown = acvm!.cleanup; + logger.info('bb, acvm done'); + + const content = await circuitVerifier.generateSolidityContract('BlockRootRollupArtifact', 'UltraHonkVerifier.sol'); + logger.info('generated contract'); const input = { language: 'Solidity', sources: { 'UltraHonkVerifier.sol': { - content: await circuitVerifier.generateSolidityContract('BlockRootRollupArtifact', 'UltraHonkVerifier.sol'), + content, }, }, settings: { @@ -94,6 +100,7 @@ describe('proof_verification', () => { }; const output = JSON.parse(solc.compile(JSON.stringify(input))); + logger.info('compiled contract'); const abi = output.contracts['UltraHonkVerifier.sol']['HonkVerifier'].abi; const bytecode: string = output.contracts['UltraHonkVerifier.sol']['HonkVerifier'].evm.bytecode.object; @@ -104,6 +111,7 @@ describe('proof_verification', () => { client: publicClient, abi, }) as any; + logger.info('deployed verifier'); }); afterAll(async () => { @@ -121,10 +129,9 @@ describe('proof_verification', () => { ); block = L2Block.fromString(blockResult.block); - // TODO(#6624): Note that with honk proofs the below writes incorrect test data to file. - // The serialisation does not account for the prepended fields (circuit size, PI size, PI offset) in new Honk proofs, so the written data is shifted. proof = Proof.fromString(blockResult.proof); - proverId = Fr.ZERO; + proverId = Fr.fromString(blockResult.proverId); + vkTreeRoot = Fr.fromString(blockResult.vkTreeRoot); aggregationObject = blockResult.aggregationObject.map((x: string) => Fr.fromString(x)); }); @@ -133,68 +140,61 @@ describe('proof_verification', () => { await expect(circuitVerifier.verifyProofForCircuit('BlockRootRollupArtifact', proof)).resolves.toBeUndefined(); }); }); - // TODO(#6624) & TODO(#7346): The below PIs do not correspond to BlockRoot/Root circuits. - // They will need to be updated to whichever circuit we are using when switching on this test. + describe('HonkVerifier', () => { it('verifies full proof', async () => { - const reader = BufferReader.asReader(proof.buffer); - // +2 fields for archive - const archive = reader.readArray(2, Fr); - const header = reader.readArray(HEADER_LENGTH, Fr); - const aggObject = reader.readArray(AGGREGATION_OBJECT_LENGTH, Fr); - - const publicInputs = [...archive, ...header, ...aggObject].map(x => x.toString()); - - const proofStr = `0x${proof.buffer - .subarray((HEADER_LENGTH + 2 + AGGREGATION_OBJECT_LENGTH) * Fr.SIZE_IN_BYTES) - .toString('hex')}` as const; + // skip proof size which is an uint32 + const reader = BufferReader.asReader(proof.buffer.subarray(4)); + const [circuitSize, numPublicInputs, publicInputsOffset] = reader.readArray(3, Fr); + const publicInputs = reader.readArray(numPublicInputs.toNumber(), Fr).map(x => x.toString()); + + const proofStr = `0x${Buffer.concat([ + circuitSize.toBuffer(), + numPublicInputs.toBuffer(), + publicInputsOffset.toBuffer(), + reader.readToEnd(), + ]).toString('hex')}` as const; await expect(verifierContract.read.verify([proofStr, publicInputs])).resolves.toBeTruthy(); }); it('verifies proof taking public inputs from block', async () => { - const proofStr = `0x${proof.withoutPublicInputs().toString('hex')}`; - const publicInputs = [...block.archive.toFields(), ...block.header.toFields(), ...aggregationObject].map(x => - x.toString(), - ); + const reader = BufferReader.asReader(proof.buffer.subarray(4)); + const [circuitSize, numPublicInputs, publicInputsOffset] = reader.readArray(3, Fr); + const publicInputsFromProof = reader.readArray(numPublicInputs.toNumber(), Fr).map(x => x.toString()); + + const proofStr = `0x${Buffer.concat([ + circuitSize.toBuffer(), + numPublicInputs.toBuffer(), + publicInputsOffset.toBuffer(), + reader.readToEnd(), + ]).toString('hex')}` as const; + + const publicInputs = [ + block.header.lastArchive.root, + block.header.globalVariables.blockNumber, + block.archive.root, + new Fr(block.archive.nextAvailableLeafIndex), + Fr.ZERO, // prev block hash + block.hash(), + ...block.header.globalVariables.toFields(), // start global vars + ...block.header.globalVariables.toFields(), // end global vars + new Fr(block.header.contentCommitment.outHash), + block.header.globalVariables.coinbase.toField(), // the fee taker's address + block.header.totalFees, // how much they got + ...Array(62).fill(Fr.ZERO), // 31 other (fee takers, fee) pairs + vkTreeRoot, + proverId, // 0x51 + ...aggregationObject, + ].map((x: Fr) => x.toString()); + + expect(publicInputs.length).toEqual(publicInputsFromProof.length); + expect(publicInputs.slice(0, 27)).toEqual(publicInputsFromProof.slice(0, 27)); + expect(publicInputs.slice(27, 89)).toEqual(publicInputsFromProof.slice(27, 89)); + expect(publicInputs.slice(89, 91)).toEqual(publicInputsFromProof.slice(89, 91)); + expect(publicInputs.slice(91)).toEqual(publicInputsFromProof.slice(91)); await expect(verifierContract.read.verify([proofStr, publicInputs])).resolves.toBeTruthy(); }); }); - - describe('Rollup', () => { - let availabilityContract: GetContractReturnType; - let rollupContract: GetContractReturnType; - - beforeAll(async () => { - rollupContract = getContract({ - address: l1ContractAddresses.rollupAddress.toString(), - abi: RollupAbi, - client: walletClient, - }); - - availabilityContract = getContract({ - address: l1ContractAddresses.availabilityOracleAddress.toString(), - abi: AvailabilityOracleAbi, - client: walletClient, - }); - - await rollupContract.write.setVerifier([verifierContract.address]); - logger.info('Rollup only accepts valid proofs now'); - await availabilityContract.write.publish([`0x${block.body.toBuffer().toString('hex')}`]); - }); - // TODO(#6624) & TODO(#7346): Rollup.submitProof has changed to submitBlockRootProof/submitRootProof - // The inputs below may change depending on which submit fn we are using when we reinstate this test. - it('verifies proof', async () => { - const args = [ - `0x${block.header.toBuffer().toString('hex')}`, - `0x${block.archive.root.toBuffer().toString('hex')}`, - `0x${proverId.toBuffer().toString('hex')}`, - `0x${serializeToBuffer(aggregationObject).toString('hex')}`, - `0x${proof.withoutPublicInputs().toString('hex')}`, - ] as const; - - await expect(rollupContract.write.submitBlockRootProof(args)).resolves.toBeDefined(); - }); - }); }); diff --git a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts index e53fb312ec5..48d87598c82 100644 --- a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts +++ b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts @@ -348,7 +348,7 @@ export class FullProverTest { const { walletClient, publicClient, l1ContractAddresses } = this.context.deployL1ContractsValues; const contract = await this.circuitProofVerifier.generateSolidityContract( - 'RootRollupArtifact', + 'BlockRootRollupArtifact', 'UltraHonkVerifier.sol', ); diff --git a/yarn-project/end-to-end/src/e2e_prover/full.test.ts b/yarn-project/end-to-end/src/e2e_prover/full.test.ts index f5881b11a1d..0430ef690aa 100644 --- a/yarn-project/end-to-end/src/e2e_prover/full.test.ts +++ b/yarn-project/end-to-end/src/e2e_prover/full.test.ts @@ -1,4 +1,3 @@ -import { type Fr } from '@aztec/aztec.js'; import { getTestData, isGenerateTestDataEnabled, writeTestData } from '@aztec/foundation/testing'; import { FullProverTest } from './e2e_prover_test.js'; @@ -18,7 +17,7 @@ describe('full_prover', () => { await t.applyBaseSnapshots(); await t.applyMintSnapshot(); await t.setup(); - // await t.deployVerifier(); + await t.deployVerifier(); ({ provenAssets, accounts, tokenSim, logger } = t); }); @@ -79,16 +78,7 @@ describe('full_prover', () => { // fail the test. User asked for fixtures but we don't have any throw new Error('No block result found in test data'); } - // TODO(#6624): Note that with honk proofs the below writes incorrect test data to file. - // The serialisation does not account for the prepended fields (circuit size, PI size, PI offset) in new Honk proofs, so the written data is shifted. - writeTestData( - 'yarn-project/end-to-end/src/fixtures/dumps/block_result.json', - JSON.stringify({ - block: blockResult.block.toString(), - proof: blockResult.proof.toString(), - aggregationObject: blockResult.aggregationObject.map((x: Fr) => x.toString()), - }), - ); + writeTestData('yarn-project/end-to-end/src/fixtures/dumps/block_result.json', JSON.stringify(blockResult)); } }, TIMEOUT, diff --git a/yarn-project/end-to-end/src/fixtures/dumps/block_result.json b/yarn-project/end-to-end/src/fixtures/dumps/block_result.json index 266cf49864e..c72f6d926c8 100644 --- a/yarn-project/end-to-end/src/fixtures/dumps/block_result.json +++ b/yarn-project/end-to-end/src/fixtures/dumps/block_result.json @@ -1,22 +1 @@ -{ - "block": "1200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e00000001000000000000000000000000000000000000000000000000000000000000000200d09e7feff5a1049661763ded52742f02aac5d9793b27a40d6b9c60a668bdf200747f2ee8836d7dd230b97572463dac0576259dedd98d86c56e2275d6d670d30007638bb56b6dda2b64b8f76841114ac3a87a1820030e2e16772c4d294879c32818d15e97da7dd64b32439cc63b7d03601ccadce81009ab9792a2f487795d30000000100b59baa35b9dc267744f0ccb4e3b0255c1fc512460d91130c6bc19fb2668568d0000008019a8c197c12bb33da6314c4ef4f8f6fcb9e25250c085df8672adf67c8f1e3dbc0000010023c08a6b1297210c5e24c76b9a936250a1ce2721576c26ea797c7ec35f9e46a90000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001dfe666201fc6aee3c5fdcd21deedfa71790c6c7719d2af6919c068ef9b9f4c30000000200000000", - "proof": "00003e84000001f40000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000000006b00000000000000000000000000000000000000000000000000000000000000011200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e00000000000000000000000000000000000000000000000000000000000000011dfe666201fc6aee3c5fdcd21deedfa71790c6c7719d2af6919c068ef9b9f4c3000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002ff40e125f72283eb833736e42285701b003d9e4270756ad3c5ba36ee0dbae760000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007638bb56b6dda2b64b8f76841114ac3a87a1820030e2e16772c4d294879c300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001e54491432d6c962973b71fcfd7b5597486f108bc66cd620099a65702fa0181b000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f3dac4a356c5fdd79000000000000000000000000000000000000000000000001f8b97ff8c78f3f9f00000000000000000000000000000000000000000000000cf72074e065bc22b30000000000000000000000000000000000000000000000000002dc10ffccda590000000000000000000000000000000000000000000000047bfb4dfb23cc889f00000000000000000000000000000000000000000000000871e52274633f4bf70000000000000000000000000000000000000000000000013c49830a0ce95ff20000000000000000000000000000000000000000000000000002c5ab7c0bb98e00000000000000000000000000000000000000000000000b0f322f57a86900ed000000000000000000000000000000000000000000000002e496ababf56e3cd6000000000000000000000000000000000000000000000005dd1141d5deb2050e000000000000000000000000000000000000000000000000000268dc87f9458f000000000000000000000000000000000000000000000003334a597cd9ec0a0e00000000000000000000000000000000000000000000000645a57625996ab518000000000000000000000000000000000000000000000006a2f7ffb16256c45b00000000000000000000000000000000000000000000000000027ca8c331291b000000000000000000000000000000f4ee11b0bde295507e62592c4338bc1f290000000000000000000000000000000000268abf37ebdc51e432b543b9cd13eb00000000000000000000000000000080fb57196db1edb119c4a43096ca53be4700000000000000000000000000000000000c10cb089cb171173b34c8ab3b952c00000000000000000000000000000064a8f95c06b2f690c389f6e9ffbac2f03d0000000000000000000000000000000000305b974421c33e120c6c35e2d49d7e0000000000000000000000000000006360b1b9dbd90a9b1ccb3cd3bc78cd9ecb00000000000000000000000000000000000cafe05c1184abbb1673bebfbdfd08000000000000000000000000000000e9a8914e09dba59c9d0469eac4258a756000000000000000000000000000000000000aab18264ff95058a2bd32aa92ef6f000000000000000000000000000000430eafce70b21dd200f81e952ae95ccea2000000000000000000000000000000000027f21d866b6071e7d5888a222b23f200000000000000000000000000000035e18690ea3538d27c3eb3e49ff771858a0000000000000000000000000000000000253559923d3ef097c24944960baaca0000000000000000000000000000000409efa67b85eec9db156ab6a7caac5f9b00000000000000000000000000000000001379a24c97a4e3f27a72888913923c000000000000000000000000000000968bb1d5b9def16c7ba73eb743ab2082a7000000000000000000000000000000000007ca785c1c0cfd82fbce0d58a4dd19000000000000000000000000000000edd674059047c461b4014800a82e531a550000000000000000000000000000000000227a11996c17a4514c44e17b3e5b92000000000000000000000000000000eac24cd380dd1e269e973ef0e61f1ceacd000000000000000000000000000000000005e248521b8b312e944537ae68ecdb000000000000000000000000000000d2d1bc3109deba532ba528c5732d0a5f640000000000000000000000000000000000015dc7ad1bc5e912a04cb7fa12ff760000000000000000000000000000007c9ccd204792110580933efcdc4e4429b500000000000000000000000000000000000dc9d4003e306af0e5bdf9671ed3b9000000000000000000000000000000a33a9a871c7276da857c0bddcfe44bbf8300000000000000000000000000000000002cb33c7ca066c3c2dcfa000582713e0000000000000000000000000000000283503468afcb144e05a54e030a9a263600000000000000000000000000000000001c444beb50f664ebc2590d48488f1a0000000000000000000000000000004ba5bd89da5d04580ea93ca6e0569750920000000000000000000000000000000000219de77d73911f7d4b40973b92260d20c3f91a2efb28325f9e9c7865a0785c99e3e78307f91517889b623e5248c1920fa05558b23677f758b1a93e1be0e0008e5000c571c05b79bb4693559db73e6f1a5162de6db9f031e6c0d24bbfe35bc0b6efafece270978e59125b4973a997e31c579d28bd16855fb8e7019f0624f0ff8d201cc7331881b9022884717556a7a910df1d8d4f02a36374e38d9ec8cf380ca5c577925a79479d298593db4d0e676a153bcc1799ae8895ffaac643470b7d08e3ed2b39e276ed5620899c30b53ddb180b60f927b3c132347fb728509be0db26cc7461185a6514559b8fa1b648a8fc4a2a8d01adcc80c703889d7085ac6ec52b081316f493ccf91bd5ec3b00c9ca9a392e014410ce4234c9ee9dc4046a89608910a607f85241cb239b4cf7f243af905f1e611e72fbbe893a8994d50241f507c1eef1e806a456b39fc5e823c07bcf897c2f20f1a0e0804767b56b7e14dc82678c3f03b3dd3f22528e6fd5cc4964dd1cc72d5c0d02b0448eadc2f8582fad135db51484d2082a67671b12fc9c19d173fb4119f0b799734b7223bc2056fb84a7798250cb27fa8111bc7f9fc4bf1439ed0b031ec89ed76f1837d4ec12c4b2ea6043510f34d5256445911d477e444338f83d37152974ab6c2851a4925bc5821ced7e10b18aca14c98c1c01ed759d09310273bd0cdb0e8928bcae871efd91d81146bce75ffdafd5dc9b636d121237a15aefe4ff2558b4e99f5aa79a83c160f622073577c99dfd85bc90a921468b5def1b5e6a5813a21e2cdd0fba4d28ab9778b18e2e479db80dea0f335ba0c99f48c6133a1cdd07461930985f5865c1f0b366270971ec3f16e116402951a2cd461365ab808a7d2490cec89fd1b8a8f7553efd519e09529e4eb27a5d5a2398a3010473a191ab09277bc2fbf183afef00e98419a93d6c0169633acff37be4ab5f2183e22c0ec2f214e363cbb7bdda19d2953138d091b902e6e067f4b11e07213175ff514ddfd6662f48f9edca6f052323b8751a5243993f497c10a9d2aad798d875e0caee6e2d0f11ef53cfa924be4c88950b2f8945a4c1b1687a76b3bda7c5f7c0c9b55e71d6db1eb47d29d356167a16eea82725dd97766a0f507d88c6e2e46d4b05b23d538b16163d2eafec2e3803ae06a8936145dc5fc5a9d5f5f449a4da772bcd8393f3839c2297b279aab987372a939f6cc3a8a17893b88ced49e650a7435a0207c0b3d3dd1fe89bc2464bbacf010fcf500b666fac9fac31edd1dc466200b9349c75277a4b0a4c0696b9053d260df2dfdfd28e1171a992d0a44ac4a79ee28358e43d712e04202b8e2915cb56aa73518d40b1ca8dbb8ec3db522e6c7cecb6a1e3820b3ec7f3111e26921335ba8970b1c83cea1d5c2f9a01629825b9f082fbe2692c30da51320a275bb5e10a3ba1c39fceebc54ea1773e6df342be5e10b6b92934d943a3891510d8124b20c13df7fc3febaee595d47eeb7d91a06c6938e354e0242b7f800b7515c391766e31ec5cebfae28caadd4f62f8cdaf9715476ebcefea6047ee8d5c1a08be8ce510de1b1ef721232bdfb66b5aa8a2d3936a72f61866182106b6c04fb126da8402d04044fc34320ea7dc25529adad17903aed9b6ba818fd1545d5d9a8e261fca9c2dd9f4e46ada8b6dab7597b138f496b23ac9130b5e0809ff7a7f81022650da504f0cf0e569f01fcb54f987b4077df2ab9ec7475cc2ee32f61766a7a01c71a792b2bbb401fb5bcc06c53bf885ed51eccb40c1d3dcb77738b12bfc1c7c2f001cdc50e33714ae43eec90a111e3a95c6a2f3e562be4ab584223da1eda2c71064e6eafa71b5b83e51ec68e7a2702e0a4b150b9bbc4439b7dbc2edda5c443205eba641ee7ca513f36d73e89e168eae1253038be675a202bf49f8e0d01c3ee027c4c71190b85111a4a993acd70d5d59a41c3745517ee9454884aad102c3a7460294a40fb2aedb6d7e393491c86cf95592f8eb4e341a9383048ac54b95a98fb91d72578a1f41fe8df57999f4fd5803a1d804e7817e8a313216e7fbb20ef4bfae1c7abaf6230db02f7668ad099b6ac61eb58ddbb6ff081b11494d88e7d08e7b0e1d5151bd5f45ad09bd5d6bd55bc4877298f397c43756f385468b54490f6dc47510a7a4cb94179d0786421ff47903284154fd27b0c855f1afdcfaa89fd255b45008de625de179b91e5b38518c7edcc4ac3839c7c509af9e6612d48fbb6cfb50790d10dc8189a076946bc04e7c8eedd1d9c1c59c3f8cef6084560f127a6c887f4e18a35a3ab369418cfc8ac80d4d1d499be3ffec00c99b1228cebd5b22f3f9e65c2a77f0d8c3d31007f194cde62aac442a0a393cbbba694d7ae12f6d923f2f604426a885e6fa355c5175d4b07d75871c1d63cfd2d41e7fdd9fbf263b46c99bc1a6261200b5c29e0429f41890894a5fa3cb2434c68fabf6d6244b6f60ccb9545e3420fef06a696a636f93caefbcde2b199463402141bf92061a6d5ca1364bbbdbd6144ae133a44eac10081422a15b2566d5cd830da3a631056bc03e7f516bd15ce22c1186cbb384febe13f34df839737c0f1246573f0ccafd319e641e4c41853cb61cb2c6e5f5823a4680fe13e5e91146bb03eb22c7080aa5e1d14c4a7f615455b818e66c7bf833d780dc90aa5bf3c7e58f486c02986f1c551bcb6bbfd8a41f4971050e29cec4cf0ab4ece2b9214c7f2daed4b015cac222fca15feac05f41ebbb90230cd65386bd237067c3164b158e374784734b5d318403001f6836597dea514f2f966959a8b2cddfa0b6d5b3cd3193cb8961157ae0157943c4a8b0423f8c22a222742cc7ce54b1bc8eeb9e3d54396ceae8eeecb03f1cdc61585a4833743bbbf423861708b6672940f39c4d30689675e73238bdb0cd0b6f2eaab9d6daa5bb7cba1be67b411470e13b86af488cf6ef1f7a0748d52fdef40e51a45eb8358579e2001e6e499a9fcdbf253c4e14ae6c75c75c95097e8440edb9a794209d14bf9b096b01417c8a62766e3514850c040dcf1bd57198026b3a0230df831e90cf0a42d3fc29c51e9b0df6c9776190a624cec3c0a174a6dc72e6504d1e2084197ef41a90f71da03690381582c4805ff33db82a6dbc22b43468096b5a8b3135e5a3020f3c610af5c738bafeb2766c800c62db8d4c9e0439adc18f6837f5b1ab9a6f8f54325d0d89314b393a45cac216729563f8f491e637910021b1b6841df2630cf0664401123dcf9f67697dfe3f9dfc320ef75403c7366c86230df14bbc5949683b1f7e101332826dca63e48427b0b4d5d63a40ffb3aa5a218314e8b2192ef7463893ce99018e6f655bb29a298c74052d5cc0f98e6b7f600fa1d7cd3e2851a40e8fdd62b80efd4f22af41916d69c491126aff5b33f3bee2b5c3f7f036abfc5f584a6d39af279b2ba34a897c0732981fe04d212e62cae5cc4f5977b3e0b34068a555c0f4aa0856249aa4710a61d0f964c307828d20c33f9b63367cbbe45f6a3e43e1920505042f24355d1a5ae06abb18e1f57f1d83b5c5df10f3eaac777c9c32e79472cbb00de22b3ca4aa28fe8c8e7f445ad92b29198332bc5c83196bb52c8a28e12f92b9038610671c4f8070b088ec0c05cafc506bf658aa408df9f2d67e4654313a23c817771f0bf5fa4c65f3863106ca75926c6b6879542658d5d926f03031abf7614427b547e5c6871ae6ac39b1c7fb6c3a575a53696085a0e2483a7408012da3f4e122027ac76b7c469e3e99c3db7878fc0db08b107be62918ca5285e3d1ab7694700283c066c81606e966e4d491b0697d11602de7514cd41ef647c0731855ec8da5303bdd477bfb8f7072b0e1ed3355718bbe5d696f0e26969810aa60dde2091fff275394c7a0f9b50a8d9371e95181d164eff0da35ecaa736c5c6e74c9e5ef00e20fd2f16c7e0c0cb972a4cf6f31a38818c67f16b74bc20758d24ec212d02bde762d956c1a157c1dc40cd8c1ac7810cd1513a74daf430241f593036c6bb4a22f1f05f2a899e13342493d11092351fab93680ada41df6d9abc2c89dac9e52d9f169177992e868b700a1931eaa60239e9f3ecf4363f2f4e11c1573b101da9013de1f2c0b5306fe527c45cf4c570a8c0381f0d3ec37295f53a4d99e3cc17fe60fc6762181aca245535b2bff6172e615e9a6aaeb7b7ef072de6ac6bb16ee088d8717e7147ed8537f2102c55d9309f4ad82c0eb27434bebee4797bf573199107a9b6e1f0ba7a3d2f4f6f6a5d57b8f4bfb250eae52eadcb7dccf8034f35366da8d1d13a92f9e614c0f590afad82a14b4fbe124b66aa439d17fa90024fe93b58e43ef93ac0af67c378aa5af71700602f4f8b87eba98eff57c660a6122c7d2e40cd5efca992912c0d6b78d88de7920809aa66ba74f245b01504ce8cee4eeefa51faf7a4c4d2cb1f0efab958c0e007f3ea281ede0930b97daa5d3e6b85ec22e2e2384deefb60343bc64efc8b156f54ad97a6f6a1fa91057c332575e56c01913f39d285b71dc06aab65f896e2590e6806ce9332955e0edf6d485c2e51466e1742d17e437a63c2795504e7dd7056e6d9975d60a27fa496c008696486c2b2135a095afe70e725d17193efe90ae26943854b35cee9f9b656b8616c22a955e307439645e4ad2736b1baac9e704d35fd2c502ebe8a1297a525e8b50b0ed2225cec9648dee61e54ae01c43d9de2249cb108d9f30715e2f6d4c1a411e58cde060e538b413ec1505abe718df610004a08f7ec0157efd617086e271381a3413e73327037ba8531a8dbcae0a4b909e3c22453979cbbba7c9e8b338eb7963b6a2897559a35be81479e4265a26ea6198415b70fd4f12c48c72c90cc777dbaa8a0daaaeecd1705aa7f8de1e2e1fcbbbdd9c79310b7618299468be821f95c1de326161ce26439c05beb5a0d16909025e276e1de97d3643187c340acae1547e36162bed13320222a7690fcc95d31f05a6ced3f78a27bb4fc737d66ce18025f95b254e29b28ff371c33ea1bd8e132de3f054361005d06033267c285b3d9baaef4f929b1d1182fe0b5c18b50496d4143d760243baf23fdc07d7384e9ae5330dd37f71a87758d5412cf1a139da5c3e0929f3bf6cb32fbd157308fc5f2ba0b8af838e93c7e3e1fee780e8a91b5ba7161fee73232e207cf7f91c65a2ec78d494eab6bf0e3f477dc32b7c0b78c521106e02219a2c26e76d1a1c173ed0bbffc02063019c0741f268fbbfa4f160e7fec1c516300372af2e190ed7f4212af578ca73017382150fec38fe0b5c279e1fb5a30611751f2b2ceb7003e13c353335bdfd021037ac2e164f70b7369b831188a5d3301c9b81bade3b43ca3e32ab4d1e1c3df573fff48435c41afd9e182bcc1c725f4f2416b598d7f90c4d87d2d48e377235155a7f34f0ed9d5960273caefd4868bb600d4c13ea609f29f2b39969780d60cb4d737752b3d7828618d4204a1e1f252c5f2bae870edb31ce84bb1d1fc6be8ecab20a45c184c12d73ccd4d92b64e985f2bb0da0cb84306678bc0f2ec5861f17012024e0eeebe8ca9b71050e8e59ea270aea238716cb6ea62eff41f7b10ead872176959d6ecc6046e3e1bfe600ae368afaaa04261e59bcd2eced7fd1bcadc84ed2788a59e6189e356c5bf9d2872249671a9408616c8b96a79de09262c726bd0a85b2ab758fc30ac52c0807086ddebe44c5d324ddca837c33f01b7c4a190cb1f294f574d6a8a022a858aab49e23020162abaa2b3c74d5bfdf8e677205713ab3524400cab5a78d23d9d47a54d2cc13bee6d4d1002cadcb2c914ff3180d62619508ff811aca4904f4d5c7e34d8cd5427316622d0cfdba13fbb72359562838a0dfe1c07dc96d3f29a3cdcb978c100ac75337f7ed264a617d779ef92efaabf31f8d8ee083ef13951e13b7341a32ea5a206ba0e74807fbdcc9fb35a66425f2468df7e3462a97f90c21c6b6ccf03653dcc4a07a13e4188beb58d73fab9bde45f64462d5df158bc5486e5ffcb61267358818b0a1c531206a02999da5ae0e0cb13cdfcbe448e75e91a085756f91f11204d24b9ac26c9f05b0909517430ce3afc8c0959296f0a1a9b708f468a2503ace0ea90dbec7d2e61293679bc3ca66d5cc8133883cc39c89f1650472d2dbc0addeabac8be5736a0f2927c30423c73636944981a57bf36564e3c454f40a4aeaecbcde88a2045b0d7f22ae98c1431ebb5f8827f43f16e6dc1809df7d40b745507cf40db1ae36a1289c206d0b8015b7cf444af29f7e552aef13d8ee4d0dcbdd0b18b35247663e14548319587bce9e2f949b13c8ca7e7e6a7db53ab00b82e8a0144b89a128f7d513ca7407319f5b0c2ab6cc53d3cf9e27ccdb7e98439506d9873e62c87989d7c525e5c62e1ebecaaef0968a8348a1afcaddeaa543ab271f868f4ff41ee72a782270dcb6095466996accc8f37d6b5054c8f0d3cd25af16c86cec489e5a97a2687f9f13d72f8ab24af842626811592ac14f00f47ae1d66e720e43050bea628a6b99a5e20b19e249b107819b5d02e4ac952fc11d716b971227bbf3bcdcddee7272a5c17d1517f3ef792dfed212cce5b4d30ae9d7bacf4432673c597e94a1011e6280c3fea201a709a353d1d7806ef92d17360bab66478ff180f0dfa3452b705a770e9c56b411bd16547623412a971554709dc5adb66ff0dc0913c1f173bd27f9f4807e72232edc4abf1c224085c921e5cec13064839780ef3e80ef272fed1e63a49b289fa30882621de391a717467195fcf23c2e7c7b043e6ee69d66b1c420f3416c33dedb251819bdcbef3b69a2f1a1dd4e6f9b638efb8b2a958de59ada940fe569919ce30b9138168402ea07cc57bc407820d261747f65c3a60c5d3c72e8dfb37eeb164429a574406a0e051276136f142695deed52557dc804ae74074585605e8cdfdc321cc161214c24b8930607b959c3b873f8887136b7a533609b0cd6e323637df3e719e28cd4075be0a5fe6afbbcb45103ea744a9b117fc79ff7c8e4ed2eb0f6ae5c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000755abab3f7b103f419606830d0d840388082435895411b90d4d8924e338d288225d20cb965acd1e966f30241e086c65dfb43f7a3b3e4a6264efe306397505692f3cae8e2b6e6c66d0ca6d9b1085f3529b78bd6b665110ca038427e6e9d5deaa1bbe84a25e455e3c83c39ca18892a482e47b82b7a89e9b5be7529a62dda6f91d0c7828de87520f74267502a1124d4128b9bf6937bdc4843714ec57484ae3faba0ec3703279721edd0682063125f10f22f57f1436b8c837bfad0d09501afb8b1401cc20f14c3fb3adfb07fd941b424725f3961a029488cf77733e63f3a521d47c2b4566e34ecccc6104ee261c33ead96a55f0ae11e1101dcba121d4c9e5eb2ab10034e2c91d4fe984aaefc2a6ca3613348a352b522ed427b004f5cf0d7bac0b481be7bba8fb303fc198a0a0d128592a6383d06df04cab44900c424b19d6e35bbd1d493542c799cd103e957f7396192d236a3c57bb662f3ae75bfc452022f6522f03c29c5a7aaeb266653eea78f42e09900e9e4938dc3a631de28eee2e9488a2762a77d7b4c7dee04743e2fcd5859820a14c81176e9cdc6309f3362ff1ed57777b0b52e5cf478b27a59deabfad2162bd98d9306dab3b22e59486089afc983ed8862b9a7d10611b8f64f05a7ee0de0b47d4bff6f7e19e7f3cbff886461fec1141552263408d7b25e069229b0f58c7300a2f9737c94a9393736c8c11e4c11341890a1d5019f4fdb6e14253909bc1c68466f133fdc96c3bd64700156e6b4568f37ffd1f05340dc04a7ebe8ad4e96c0d0c240ee10abd9b57c5cefc1258a345c56855df0340e35b136571723ca5c477df6c6ce878b90aec1d72dbe229c5be0e3c6fdec5004286e2554287cc7093855ecd4edc7f28fc3de48e040fc4793716e35dbd7d9a26334f4bfa504942cabff7275317ec9dd5c117542ef13c8f0761bef5232a0f42062539b418b01be15071dd250ebface7f45f09cefaea981d594e1fb651131a9c00e29a561cc35341d9bcf8479f03bc621d77840199eec0cd6322278c7c010f6c0a0c3cbff7f1595854424eb82ca3ed94fce1e88d256cd64a071420fe16a6c05e257d6fc9fbbbcf49f89dd2e55579b233386ca18d09a8fc47f3b2b768e60c9d812a664203c1d1e818c9441fbe811ed45d87846a322cb75e41a93cf0dc9f04b8521bb3763468fc29c6203915e8dcb535206d4bef5889a66ad062a0afb84cbc6cac0195bda43c78ef436e0f701f203a2440604c82babfeb07632abfd03011e82011282d27f4bf2b82a075d7580f601099bfeead7156eec207b3126ef0e955f03dca17fa89336ca99efc58b8a64a0e377e369e56db279c031f27d758a6c3b407edc90f38ac48b13d1f63252ff228e3cc9545a9c7bc4b38123dcf50d3f41b148ba10023a1e58f26d71dab1261af500d59f5d189dd43cdaeb9dc266882d8625bf669f911694cb35ab7aa98c1d1b716d150a4eb113f034a4ce7da5a09196ad3ee4e46b40e8aaf0ba5f8fff8ff4d20121441aef29adfc75d4820f78c44198912a93b2072296d7a2582177afbea66ddb25922caa396fe47961c2877e82f774dcc0c3758930e990ee99a8bccb5a50dc90b6721a874091dc762214e56d5b805595475577b2b28d8582ee519ab7a55bbf31cccea5475eb0eba4b6249cb694e7887a65dca7b922675f4bb3e5dca94f1a93ee6efb7165bf4d680cc122dfc48951b1c213c072e7a2c154c2bf538ca9b03f05fa292796086dacaf21a080c3d809674930a04b23598120096e180bb341e3da46ba1d6e1972a59fab49ef16c1e41ebbb783c0e96e7ca1e978911febab8b024df68a2e29fee6fa9f48f7fcf30dcca3dd13874fd034d5711faaf385ef0ad1c8d23a2f36e7c6d3bd14114522f95544c6cd1f8da0257758a000000000000000000000000000000b2bb591300a297e0a753862627bf61b14800000000000000000000000000000000001154933493f3bfd5a9c11b142c3717000000000000000000000000000000a0aafc71c55c4b8483d9e7aef3086cfc570000000000000000000000000000000000039357f77a572f20464ef0be7f46160000000000000000000000000000009cc129b2954978cc792b1535abe0e678ba0000000000000000000000000000000000210ca7c06f146386f9911070d2c18a0000000000000000000000000000003ced3aa175bdabd5eb4b50e2b1876f096700000000000000000000000000000000000f60d5d5c2236f2f9b27e95561781e000000000000000000000000000000565b50a254b922477f8a1b2493656891a500000000000000000000000000000000001a9b39d2ea0535d2953aaea59a8720000000000000000000000000000000bd15bd55c77c56b6390273d0054bdf4e1e00000000000000000000000000000000001564812d349548f16e4a041ec9cfdf00000000000000000000000000000076773335bab1d7b3e8bb293b1bed7b714700000000000000000000000000000000001fba381793ac902ccd92c2739c6f5200000000000000000000000000000075a46556945910ca4724c38e2abf6a64c000000000000000000000000000000000002c0f17e99fe87402a07fc23224fb9400000000000000000000000000000088fd65468ed8af24dafb5a1760b46f463b00000000000000000000000000000000000090a3201e6bd5559a8c73753c2fe500000000000000000000000000000008a63096070df14d043b9fc73bd466baec000000000000000000000000000000000021f085a0da7846087d69d27449e7e5000000000000000000000000000000661c396474836dfa9270a44f3645f067a700000000000000000000000000000000000ec1483f6cb6321f8a96fcde4ee618000000000000000000000000000000b81c5866353d91337b0f0eabe62c0d700300000000000000000000000000000000000d379ca8660e86b61c308c6e16518e00000000000000000000000000000053656c0b8d26d5d7c4ffbc84cb8c8dc78000000000000000000000000000000000002aa9ba2af5ea1164b866e13b2c16d1000000000000000000000000000000922021c4c51e3061eb00b48915e506ba45000000000000000000000000000000000015eca982d68e4abe53d58d4bcf075c00000000000000000000000000000085c04ffd527c9209edf992b974cbeba83900000000000000000000000000000000002ba65d580a93a360025f56b8f592a6000000000000000000000000000000d0f88ff226a3ef57f1f592db98e1d827910000000000000000000000000000000000042735accfa3e630a25d172a09f3f2000000000000000000000000000000847902d41483ffca3c114c8cdb4cedd145000000000000000000000000000000000023efc8d31277e79f56e7613272eded0000000000000000000000000000007a7c50b57185b83ac18ba3261e220d7fe10000000000000000000000000000000000199b7e3b8e089834da02b8b41c1ba50000000000000000000000000000008acbaf87e441a6f1fa602118b02a8793cd00000000000000000000000000000000002ec63fed5f835087f8e1418d0dfc1600000000000000000000000000000020f9c0878fb3277d8d19485475c595827000000000000000000000000000000000000499e7cc31fa2a39831fdf7df8513c000000000000000000000000000000184428a24e481dfeb599723ba6c02288c00000000000000000000000000000000000187c9d4dedd2f97e8a8fc0fd129383000000000000000000000000000000fd6667fb403d52b2991a570668382a43f300000000000000000000000000000000002230ae1eb1441db0886e5443aa7aa90000000000000000000000000000004459f189b482e036feae0da6e37c7fd7590000000000000000000000000000000000182ca7fc790dff23214209ba3d6566000000000000000000000000000000b95f9880fb46bbd90b5117dadf9e67726100000000000000000000000000000000000f15d77545d650bc2a882ed442b8a2000000000000000000000000000000700923f7fd6c51e4941184ab0d23b2f1db00000000000000000000000000000000001e7521d6bad6991d37eb8cf06971320000000000000000000000000000009d81f92a7875c29bd986187c67b812da40000000000000000000000000000000000007c0de4f110d28addf1d83d7749cb0000000000000000000000000000000f38cd83b6ce267d4aa382e6f207591f62000000000000000000000000000000000002f8dbc0ef0e84723c125d13bd0d1e80000000000000000000000000000007d03360528bc17f1fb1181c979746ab2b200000000000000000000000000000000000bde2ae9c687c6f2e4495cb6ceab9700000000000000000000000000000004bfa8f689b7df8174df8c96f57872ceb400000000000000000000000000000000000177e08abb1a6fd4f220bcccb07e62000000000000000000000000000000d59961d99890eb911c402b007b9cbe976d000000000000000000000000000000000001e44c4d03753c3c68e2f72c29b0af000000000000000000000000000000e10bcda37f9b020238ce42cafba364ad0b00000000000000000000000000000000000a2700ee4bcd2ee1f9d187fb9a58ee00000000000000000000000000000034e61a08bc6a2102504fff403b37d354a0000000000000000000000000000000000019060a7026ab5315caf29609416bff000000000000000000000000000000a0b9296028acb200c6d9216bf1fd05587c00000000000000000000000000000000002537afeb211ab20661ccd797030db90000000000000000000000000000008d6fd1fa3c9a7902ae33325a7481e6ead70000000000000000000000000000000000105d1d3ad744922019ce40015b731b000000000000000000000000000000653ae5b8b4e3bf838c8dad5f00baab715200000000000000000000000000000000002ca54cf76b2d0eabd4dfd4c2f269b7000000000000000000000000000000ed0fa724e64d0bb3e921d29010b254b52200000000000000000000000000000000002283dd0b6919a9b907efa0d1450c83000000000000000000000000000000247d9ae0da38579e4d8f9edbafee4b0e870000000000000000000000000000000000038619b10948cee4ab41f87863ea55000000000000000000000000000000c86cf7a365af37b59374be4838604fda5400000000000000000000000000000000000949507e05840a340738aa0059fca80000000000000000000000000000005b9a32f04b291a93fced33191d11eee64700000000000000000000000000000000001dbceafb63478c33422a6f7b27b729000000000000000000000000000000949df4a91191d0883796d0c17e02de6cfd00000000000000000000000000000000001617ac2fdecf6a50088e4f9e89e09c000000000000000000000000000000d46babede40775278eb51c34e5354e155f0000000000000000000000000000000000214599b8fdbaafc8ef1b5eda1d1294000000000000000000000000000000770087323d677abefbba29108b9a928c3300000000000000000000000000000000000806db3099dfbcd0b4b60d575268960000000000000000000000000000008cfc1070fca86e46a864c883af230b0f3b00000000000000000000000000000000000bf85e10e77edbfff9bb9c84b76ee700000000000000000000000000000015d43e3775742ef07d4d253ac41af7234a00000000000000000000000000000000000bb11cc6d7c81d0284afccb0bb848c00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000036a960d6f8e4ebd5c4e7619ef498b47415000000000000000000000000000000000003f8bc6c6f49222e71da7ef4f12f59000000000000000000000000000000da0e9ca5b0bc2a8b9f8dc55583a2f0607000000000000000000000000000000000002b34854ad662b37e0e76506e3eaf170000000000000000000000000000004eece60b91287df5941d8b6b85a1c163d4000000000000000000000000000000000025121f27276f7d6405b64dda2d13bb0000000000000000000000000000006b80ad59b5eaf076a2963328805fa2200f000000000000000000000000000000000018c545a23c08a38624afdc86a65d280000006b", - "aggregationObject": [ - "0x00000000000000000000000000000000000000000000000f3dac4a356c5fdd79", - "0x000000000000000000000000000000000000000000000001f8b97ff8c78f3f9f", - "0x00000000000000000000000000000000000000000000000cf72074e065bc22b3", - "0x0000000000000000000000000000000000000000000000000002dc10ffccda59", - "0x0000000000000000000000000000000000000000000000047bfb4dfb23cc889f", - "0x00000000000000000000000000000000000000000000000871e52274633f4bf7", - "0x0000000000000000000000000000000000000000000000013c49830a0ce95ff2", - "0x0000000000000000000000000000000000000000000000000002c5ab7c0bb98e", - "0x00000000000000000000000000000000000000000000000b0f322f57a86900ed", - "0x000000000000000000000000000000000000000000000002e496ababf56e3cd6", - "0x000000000000000000000000000000000000000000000005dd1141d5deb2050e", - "0x000000000000000000000000000000000000000000000000000268dc87f9458f", - "0x000000000000000000000000000000000000000000000003334a597cd9ec0a0e", - "0x00000000000000000000000000000000000000000000000645a57625996ab518", - "0x000000000000000000000000000000000000000000000006a2f7ffb16256c45b", - "0x00000000000000000000000000000000000000000000000000027ca8c331291b" - ] -} +{"proverId":"0x0000000000000000000000000000000000000000000000000000000000000051","vkTreeRoot":"0x07fb5e90182eefd8cf68a443ae3b87cd3c2d2a17f1fb1f1de931c4d4603b148f","block":"1e812b3df52749854266598c9ac051ad16abd2ccb23eb301552481adb298ef6500000009000000000000000000000000000000000000000000000000000000000000000200b46f4321fb295b47672e53d2df6674826a7dc26d0a8b47cd1f025ea64fc80100089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb14f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb30000009003533f0756b096c2128003467ad6694209acf0dddd7c23db4c2563b6482f7929000004800a61b1e1e6a51ae92c76aa05722659a7fb4dc14444cd377106b0b63353c7ace4000005001a0e7078785d1a251a9a18e64224ee1e9914920933b13287314ed9957de39db2000005000000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000900000000000000000000000000000000000000000000000000000000000000130000000000000000000000000000000000000000000000000000000066d6d5fc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000017db9c0a08fb2faa5f28f92d4ec55fabd85eaf856f362831a1d29850a8ef78791a1a44d40000000a0000000200000000000000000000000000000000000000000000000000000000000bec47b002039c7a317df8635cd038ce92519af7011d79bf518726cbabeedc559d38dc85b30fa6e781fa18d34a511f88df5c42237c69f3e0e24206daa534a3b4fd42a01e0a0204652cde1c31bf2d24fd90de0c1e39c9a6e3e2663bd494feb38b8c4223397ad41f8d6b83e24726b16d292e9f36315291f1b3a5a2b73f2ea33fa623ce83a8a60b00000000000000000000000000000000000000000000000000000000000000000408000000000000000000000000000000000000000000000000000000000000020400000000000000000000000000000000000000000000000000000000000000000000040c000004080000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a6e5ce5a42578d9e7dfbceb8c9c5c9b716544833413702601a8868d800789eb470e2bff676f7605255d020812dc33a17b5d3ea19373c4bca05fee5bd5055cee503df935668b8418510a8139442696d8841641431cf38613c6024214c3fe38c9a6c81eb31d74451c8ac3ddbdb75b8fd9e4b0142ff5fbb1faae7b51e09bfde59406b6ef2785240977ebbcbf5f246343b302685ab28b80d7ee291749f1dda3f3acf42827011eab539dd456d8ff08dce7da737c89a608fbe403aef9b60783be8ef3c07d5ee2a453bff345d193a62e9d9a281b3a852080eebf3438e82a20027073667b9c1e26edf28a3f767d78674a1a4ebd6e52d664710c5e6c5e0fdb8da18f17c9706573b9fcad6364b1bcbc4d1a22ed457062f4c4e2580ae3337de89def5adf646903b02ebea7736f5359785b5744c1eed7d1c980b8c9a0c312244f02ab6ed6ec9f22e4998e0d48ccbeb7bcbe649cb7e798d57870b8ddc40fa16eaee6b2e6f418d1fa817816ddbd560dcd8571074b039442eccc4059696e39ad016dec14782e0624a6b057c159ad46224e9628c4095a0e09178531ea2be02be5777677bd988a32412f90ff4901963b5b9cc6d1b73111770cdab2889d62a1ca2f98520626d3e60af0000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000945878ef5b770cf2caefc1fd14531c5348feb5e4c59d3be7d0f0790a80db4bc00c77e5c5e0eca399cb43bca872036feb14e1f26d11e928313b4f3e6aad3b9484d9a8661d4196dab130afae0e3b4a41e37b3a0aa2c10380449faeca518049678a2303953a74cb9f76bf1edf1807c4f34f755b8f8fc24bd6555bab35f6d3f1b1a3d109c099a2c28da06b59bb2b77bb2a1d2db5134c8e455c05a8ea78d7ea1f3e3722282939fbcac8917eb3d0dc0f264598073c480208f1041874fe31f69abc25b594628b286db5a05b4f670ab9f4c58c99fdfdd3643ceffbf8f91c6b87f910a2b2eee0c4e07b168a4dfc057e4635d2973830ee28239d96646ab35ea24cc69faa08107e0d1b6b4accbc109531607f976cf22bb01f85a0597c87afe9428bda539b62f9191e4aa89cf0c0d032473216d8ba087699774c72a7970407bfe6bab05f9ef54bce3c1917f60c923e87e7f7d0789ad66d4bf82dcc11d53c43b888af1883f4d702485ad81d9d0930ef944f33168ea0c6287b018537239650f05b3ffa83000448f729fa47e00420b4088f18bed5690d4f3771e336e66d337038ea6980b32d7eaf387a3035b61eeda77cde0276787534e9ee7db5dfbd0d6d7d58b0fec992b389a40000022800000224000002201d0a6498e4f1fd6716f7167e2589451ddd0c03d259fd64e251496141f69fd286000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ecb5c0e9ffa674918a8deee9e0e38f9aedefe08087198e22c77eaaac36c228a7fbfbcb32a36a603485d90f169dc5d213cd76e0e96500e4809c3a037e0709a964ee296022c4ea63174ad7faf26bbed544ab1cbbd48b88b1573207d40286463e74bf8d8dec263466062c0062629a6adddf33716876a8ec4661d26952b9510ef4957600e30dc12f706bc583d67ff75db2a84cb6eadc65bbc3e9e6eb09d9462d3d83ba791deb54c31d4456b0fc18e1622eff3245ccb187e0ef311ab7dec8d856090bb9beecd088b1e3a2dcd1b780f9a3a84ac36fd54d7977fe2effbc21beb1f192c048fb9e4d2b2b2628ec86a2496d3d5b998d86452db745d5f64d68cf7cbe5c2c0552862196341d91a35f8ffadeb57f0d2c463b26c11669a7facb82d2f71b5c8d0b5ee8b8b60b15a5fe55beafdb5000dd2647b38c90759b0bca00cee614a60ad01233eaeff9d08e8afe14ef5d8b5a1da23905c4cf2d4d1cd5a9c16beb377e50ba7bd0b1d9e5bc3319d0aa20d4961aa79a35f2c8c39eb063fe221298951a03815107730d6377e91b8d81d107e28f487ead6b6b8a52564ae9892b2c21ba0ecae53ec7bb4c45db0c0f3715efbf5d485f88a54300fdf576bc99a22beea6c9b790ce7cd000000040000000000000000000000000000000000000000000000000000000000000000000bef545a000114b7af4d07a22682d559ededc223bf2f9a785efa3aa8261a284985f65ebd0d2900021a074b13bbd403b409f06afaaa14bb1ffc718ec250d9f488d6019a42eec9463700000000000000000000000000000000000000000000000000000000000013882a73706978ef632308b80c8f27f8f234bcc9680f84fbe714e0b7bfbfbebdc5b9000000000000000000000000000000000000000000000000000000000000138800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000400000000000000080000000000000000","proof":"00003e84000001f40000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000000006b00000000000000000000000000000000000000000000000000000000000000011e812b3df52749854266598c9ac051ad16abd2ccb23eb301552481adb298ef65000000000000000000000000000000000000000000000000000000000000000908fb2faa5f28f92d4ec55fabd85eaf856f362831a1d29850a8ef78791a1a44d4000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000572c74822c20edab7f32c1eaa6cc5d6fbf35e2642f613fbad1fe675cb65b2490000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000900000000000000000000000000000000000000000000000000000000000000130000000000000000000000000000000000000000000000000000000066d6d5fc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000900000000000000000000000000000000000000000000000000000000000000130000000000000000000000000000000000000000000000000000000066d6d5fc000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000017db9c0a0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007fb5e90182eefd8cf68a443ae3b87cd3c2d2a17f1fb1f1de931c4d4603b148f000000000000000000000000000000000000000000000000000000000000005100000000000000000000000000000000000000000000000667141da186fa2ee600000000000000000000000000000000000000000000000b604b01ef8f0d0cbf000000000000000000000000000000000000000000000004aa7261e91c2591c700000000000000000000000000000000000000000000000000022169bf4283ad00000000000000000000000000000000000000000000000da42aad5197c6fae700000000000000000000000000000000000000000000000df94cfc0977edd5b2000000000000000000000000000000000000000000000007c612055fec21ae22000000000000000000000000000000000000000000000000000230175557a7ad000000000000000000000000000000000000000000000001de0f097127c3548c00000000000000000000000000000000000000000000000c907f4f00a40d52dc000000000000000000000000000000000000000000000008101d8c6bd18462f100000000000000000000000000000000000000000000000000023015745a48e5000000000000000000000000000000000000000000000007a8046de88646c85d000000000000000000000000000000000000000000000007870e5c81c68314f700000000000000000000000000000000000000000000000e0abb05f5ec41457d00000000000000000000000000000000000000000000000000008a1d3a84de8a0000000000000000000000000000001f23fafa063b254a8c938bf73d0c2bb7f500000000000000000000000000000000002160241ec6448ef9b3dbc3490425700000000000000000000000000000003cb7ea1f50b4ab963661fcf722095ca6b500000000000000000000000000000000001d0f698346235ecefa3c5849d39f3800000000000000000000000000000008b2df2fcee85b292d3cc2497162b8e5460000000000000000000000000000000000041ced241611aa8b30b8768e7ee9ef000000000000000000000000000000f15d304041c9c10e532071fe58f28531110000000000000000000000000000000000040f11d5e49bf7151d589fce774d7c000000000000000000000000000000e8a3a26d6d023d9318c786305baa4736cf00000000000000000000000000000000000f65d2f8f9b4ece133c12f49c84b07000000000000000000000000000000cdf18fabfdb01dfe2c2cb4eb389e4b9d3800000000000000000000000000000000001443e085441ae71d27f780b0a43356000000000000000000000000000000c9df6dad369f2129e064b2164736947c4c0000000000000000000000000000000000044487a9cc727150abac573a3facee000000000000000000000000000000ba41d8227d0ec33f990c2e701bb5dab629000000000000000000000000000000000005886f8ffc752fc94a017a75a45fa300000000000000000000000000000049a4526224a61bb696cb36aae680434960000000000000000000000000000000000019561ec46d7cccb45c2b02aa5404d500000000000000000000000000000067cd0afd4e592538a5c418cfed003c0fe700000000000000000000000000000000001089aa72005765293614998b79cbda000000000000000000000000000000f2c1de9da0c658db79e6d5c56e83f4633f00000000000000000000000000000000001c93bd0bf50602d11738654de93779000000000000000000000000000000a1af1e36535d721316b00bba52ae988d6e00000000000000000000000000000000000b3a26657a58a697c05f01f4cee6a4000000000000000000000000000000151fea819284536d2b1a26c107d5c3a37e0000000000000000000000000000000000272cf9cdfada33e3e1616af9c5c8f40000000000000000000000000000006e0af12b8c79cc228fed340d6e90d5361900000000000000000000000000000000000b12a3c641fc797bab484aa549d6fb00000000000000000000000000000071dd80894db819246eeebcceed241778d8000000000000000000000000000000000024b77cbba26f503a8ab08989fc5972000000000000000000000000000000fd2372b33aaaccca5ada4259ccbde96ab600000000000000000000000000000000002375822e4a0cfe03e2c010aa3235ed10d078928845174878bc4971f90665fbef8cb9c0b7e8a5f9e9cb7615207dbf0d1f93d5e058ec88e13f93fc44887af26138a72e87c1d0ca975a167f7ecf8240f41fd39d410139af76ea93183e79aa803013a3149f0dcfb4615067a472154702332f6b6b9788e53c87b6e355bc71424f690ac4c70991a6f86b4addf727c5d90da21161e2a620a2af2dc7e07958ed6c0feb927b9b8e6d6e613fa98a20c8301f7029156b900b3be715336d257db43790fb19267e13ca675a2be47c0f9bc6275e76e1042c4f6249798278811a0bdd2b1c2294f067c1385040115303354538f9e497302e2968168f9c1e433eed468b5a5f3fe3574bcb65a90f84e2e90e804bc6d97bf40cc360bf406f6d0646eaa5d0ac510ea813147a1ddfdb7cdf65aac20c1228d73519e41abc9f3a9ae6f7a084714d2ed2d8183b43bd31b3d306c044c9dd1d39dc9e23af282f2d5352842738a29ff692411c6fa4f0756a6883c77939eff2a6548d7d10f1e082a3da4cde73fc119effcf5ded0a8f3b049a1f27569704d81e9929e3fd1579d400df48d4f0bea505cb9da1d4b18ed3182e258ee26f49e11c999ae1209c1d7f38654de7fc5f3cded01fa4a0d2f936267ac6ea04575bb13866932ee885390c104ba28ceb69874760886c9fb10b0dcc2055b356816acc672a71b83ec1f7762c4eb3703b2e910473c883c29ff3aaf276b252540dad8be93f828d4d1715aac31946fadb92b11283dd711be79967ad4f6404353510aebf5878acb2abbc84825528f9d482c445ab8a0c984654c2f3c39e8fb1035903dbd0c37723bb132402aa811bf5fc45985bf9fa715bb4c5f954cc50e78044450c737c0644745a8471a5e9811c4407ab143604db0ee21bf40328ff66ed6ccae51587c93ae03b4b9429c8172313fb7fabd1ee7e91e001e15f0bb25bf31283dc26f7d4615a1fa44c652d2fba3509ed3378489dd162016e6f2f905198b2416d2659d81f85b8864fdac20329f99904a1b00e39dd8bd0279c402f50776c0e5cf53ac6fe3a93dfd54da0bb22735e5313924eaff1067a2f31717cc12d08d351125d65f8d7b484afa715adb441f2125223334502d82e9ceeb0810569b3ad878ee8f618a5e967ebd9e068cf706ea9dcb128112cf143db03f332590540782b76ca2767111aacfadf9b81b1f77daf22728b207c825bd6df289460c5bf8f6203690635e1f2b01011352faf1b0e588489ec02240e33fe6f724d2e0ce3d79188f7a21c59bbab5d8b96c4c1b2beff121132c822283ff4f5e4940ff348ec92f7852e47572674cf532f5e475b2e1eea45505c93482677dfb213ed49d78f2945a77c5142ea7389f4c2cc19a109738d24717352eb3c0ede492407c3c6c41d11d3abc0bc8beda73359de14776c3c377572402389375b28334e116c628b033dfe5f5c85bb38461783d15e9f2110ca46ce6cc8ae7e213f2e325562375730aff4553280ad1c2c07695df662fedf961d52ccc4fae1edfe3a2acee76a358e36ad5b0effc188d069f8c381532cfa3bb3b01e4c494437165f8630106baf1d0d901811be719170898ecd81daaffdd423cbcd794abed6685f534b1d98de26f6ec1e8b639e47345a1b3b61e97c1dbd0cefe716ddc408974a8930cf1f2c00987e87951f8210b6ba84bb3440d7ecd36571c5670164f8060da3c665ed234f87f46164bdbe18fb36b0ddbcbff33ed93411e1e04aad2b2c6f477e91b4982fc446a067ceb109fca883a2b440577033018671b359ec3ca52f43de0ca0063a1eb769869f9f49fcd99e54db0e2154f6f986d25e4261e7e18eafaca02cfd7f242a821cbaedabd0a2de67f47951f85e445aed3a6c547e5cc9fc17f1856a4d3e8a2d673b81a74aa6392d2b34217a903550c6255fa4bc9b1e8d866eb079dc08776f2103f2fcd706df54116b74f66ad52a03188ec932dddc946419d9ff50fb4f18e716987a75c0c2190334ea7a824dfedcc64271d5ad717f06235e407e6499d362e600e02f05917fbb640884d1dde73e5b519830b0f15056f8395f800ffafee2128a03ae1f88e39293df5ec54655ba3e404ea522d81616019394e3bec843eb9a52b61a9c88edd13ae4207ad66d1a38baf46d6f5ddfd004e17d567a28f3a80bfc49b02f865be4360e4e15acd1e6e2fcb3e19dd8f6d462e2110169e35f4c33915ba42100b6ed0231a5483cb7f3d4f397a1d1af9e02a0d0778fb7eb9a765d7b432515b11b7ade835b451150a8f3976a6fdda92f9f6c1f838456769b9bd5ad33bbc641842153756338aa8ccb8c5a1dbb53a7161a3e2590871fab30aa68b3b8d23bff7ab527432369c72a3f1ee7b6fcb383a4128a7c097d29032e5142769237ece97e9f040b97558610eedb00574122cda08a43f1ba2af4cee462d6774e96ba87abd2744f293a3f6d171b4d1d62eb6855f8b8bcb73327998436bf3b4062e0522e81badad40f51880b43e04fc43488fb59d2c3e1f98c09272c2ef5cc8a35e6fedcad99cab313ef8eaa93afc3aacaf6b055637680a163c8ef3da49c7d2a3804b61690f1381f20d30af776051135f1e1467f87bc6898ee77715a6bd106f506ff2d8717b65f9a00881b5239007af56b833d7372bd2fdb07b7632bc1ea62e68a1dccfb2ea680f62bd5eee1e52c1165c480df83341f06c3683d9a07a4b51c9da8a7226820ad24ae160b6f5cc23749d593175d13ff0a313776ee26f07b301de69297600053f6e83c10e844b2d3981e581f3c808a9d18191087f526e1526bfaec04a5c405c05bd59d143d1da437aa44baf1eb5e8adc9baf39685fdc8a6e9637542a59004837954f5509882f9f56aae4aadddbd0a75407916f078ea3e52289c4498317ea77205946660f822bbd7fb2d903ab7a9be6a8bfb8219dc010cabb58da0ef69356d593e7301120603449292c7f6568bfd52c353e984a47829e1788eed3bace4f608c356bcc9101e0d119993b0f409cdee91e88472193da68809d9759d9c045633e1b1a696d0e29b8c5b666512554c931983d005148a5c0d35db645ed2b94f33c99fbe4e441462846f5eef01b770c6d5cdc484c4f9d6000a4237170982951daddbd998d2d4c25238f4392b6448cc3527e58567126c22a54b0770c0d3e16e925aef4a2dcd8d7e02d27ef72b946edea0fbf09256d4aeced9edfd37a75bb377a2ed8579b82f3a0b90fc92ba55a6abef345a93de418106ab9125f4ccb575e1e6fbac17cd05ca906a907f9778decc92b43dc2b9e02e1785720b825f9eb1bb8ed7aacd782d54ea643890f81a5ef3cfc1dbf61d853b88d957f9c727a2dcef200d434fad50c611697b5250fe55590819d6c0113f1ac0ecf0f06f01f17d509cc382d2aea8b8fc45a0a01f613a8d9bbe5d606bb3eb7903ce708efa3431a1aff7fce556daeb80c31a8d46d510b970198f084b08f4bab8f4e46e4bacc42da01e29e52a02322790773ab221aaa1b695e01d1b872dc0c499ce55d4675ebfd3a74a2611d55a9d3c19ceb4721788a1e0f5a77b4b5d2d2402d01337b270d2955d12f67a35df4b185ebb059540dd3fc0ee0f1756261836a278c1b420d1fae2091f0aa54562b85366e46952383671af40df80330e73fb1bb83361e1140ef3ad0c7a7f4b0fe7aecfc6301e84f450c65c21aa7983690d8ad510e9af1beb908d746f0013c8077b6bcd8186a4f9095ac4ccc2c7b5274b4f43ed06f8acf8e9ef04e4a8dbeb929b6dcd673526040c5ae58ab9418ba589f9c63410d1ecd39597b87a60b5ba7c722dd912f0c1d3f15f3ec08f2dc19967a9939d87964bf7b538a064c56afb08758b833b02da2c60fdcce9d91b58427616ea25a1102ce23a62c557944dcfeae2e3f63b8e1db13ce6db5ee56d38978301a80a0513a08387c9faef5243bc9f04761491b3b5c76b301f7a4b44224baf60ff2a1eb1af30f7c3f52f4f5316cb5471cd0fa74900c7b5e4c4b7ad61f0d77d208043494a7918b14a322fd401c14b9f0e0d8c5a76d21fbd756b2c096f5b196a20de1cce6847086f7c292236372303508aeaa1234a92a9d716c43f24128bcdfc519a424b9388429bbe75fbee7a9ed91286ea685801494e9a10218f9dd44c089da25e96ab3ba96e6ea6a9b908b8bdd5d9505b58fb14d979e5ee5559da9e38863f71a19c52af38f3a71c2d1b7b0c1e5a19c469bdf00dbe3f804fe39fbc42ab0951d206e0a9eaef931ae904b6b33ca40dba4e7ee23f6ab6ca6a76668e9c82f81a1ff1956d37364d37dc6faf37796304f89356e78f6882e54c11757018c18d4da1fd21a95a00c9fc2247a4303f9f1df4821d8f037308065ec120ea4c284ec9d6159df0d42b61c4b7595c2431fede96f22a5548291179523622c23ba5eaf08f08c6fb12e90859674c1b630f11c559820f8b65a32cd1ad78435f22dd0ae820c87544e790a1eb28e3ae17775fd8e19ad33e73d45a749347da52d1b7985546ab4dab457af250b9a086fcd04e8af09aa729725432a45ea67c39ab12788739aaa50182ee15830046b60fcc1c0c23a6a565fcd0dc7467f335c2f7a041537d5af1b7aab6ae0f821a22fb88a07117842f8d984c6ef6ff8326921794069c6d84a517acbd21b8b752d8331a7b0c060bd7b8f9ae7bf04cbb5cfc025dd1bb510c2e2f66f6451ec066d29973627aa69efe26e27c60bdd2aa5a0203d1daa2f2cb4fc426a031f80641d661231039abe534aa1d5cf64188e24d024a20566fe554e5a7c20197a8e3fe1ba2b129725cec1bdb56e78adde67262dcecb4b2ef7b46b68a64cff90f5aa9275a3272767ca462c15008124c8191c5327337832626d3fb676b58b7b25f0850702099e22290562e8e1d7eb08c0b5a70c9d2d8735c5e6f281af8b13468637116bec694a2daa79bc3fd180aaafbd7a6fbfa68b7978405558768c417481e4e0259117e95311e4c712eaa4f74d0e5ca6b6c3ce907244dba8c475705b12abdb1e8f6c45a9b10e031c7b7afb401e5acc66743bc10a34b59ff3a14baa5d1cdd56283c85b8be6a18cf3ed8d6888ec791addd0dfe10227d05d14f791ac4f9c04a550da132e39c2e08bdefce980dc6564e81252bb2b82aff55cd4d804c8fd5095c78b57819dc784a03b3b1f4ed79dbdbdfcad18db98779d44f2ec06d8f30de8e4efaa0d658ada7ca0503221e7ce843d6b3ab578075e340ac59db9b9eb148f4f085e838aad90139c104b84bb5a480b9d5af8efa5d6ea0476febeb2670b840608e5d91c3f4a6993767230cca072d23fd79f8d724fa71b878eb1905949c3f8aa6e04a91c1b9450303bd04170662b7a4d3594089a687d0f1e779d103167c1ff8f6443038b6959d8979860c8261f3311e1bf9e7b0c987c399dbed455ac22b5105ab1463caeac74b56b5642ad7241b21a547044079cce998b95e9b9ca56306b15cbf156cf0d294d42bd0fc258b8d45093d0457f467f702b3ff0c9be6aec6cccf5527dc00e466ba3c86687a2a3ebd80a90caed639c19b143170f383c55591ac206e5762ce6ef51ab0562a501f060480cedcfdaf1989fb04fabb4ca541e97c6f6dd4857e39d2ae7dbec100d908b923876c5bd193cc1c8f201da399dec5b5f61d336e81b1170118fbb66bcb160347028725e585e1fdd40e70961fdc9c7fb7cb34ca084aace4372a1f8903cda62c9391463411481770bce5d35b891b57b6657aaf564b55433a2622f51b452efe16860f3225d7ae19e372742fb84659be3e3f37c006305264599f835c0649b2f1052e6a41fd89dfa4726ac74d9244213ed2e0d0fc04be42a8452d75b45b42e42d0677daa913e4acdb6b00aa9cb0bdd698ce6c36259f57c36d5d2795797af649da2ae9f650ee64b782e3aa973a34cd6ce81ea0dcc56d36f53fed6db245e11ca0192d3ef69b4dddb040a25ac3626c2223f06677876e4d1d957c438a82b86ff7e5711bf14138bc4d00dd92f1ec7c66ae191f6cfe7ade089b7c23c82b3e8b80b336b414525731f3a3e0625d5b3d7f74bfcb82aefdbf5342a6b93685ac01e35617287f0f7f75479ba5753ee1484bd7cfe0568053e41f5db512d3c45c0833c12f73df4826797ba3e1c3244e0cd3a6f45f32ce3136088a01b186e33ca7f83cffcbc8f60b154b1e2127f7795ddb056d00b07762a65fe6b4dc70d8b7d7c2e3c87ab81193a3190cc1c42be59e868f650289b8ef15a5f499c9f4b03c4c45c4dd85452992a7e21513a661810eedc3ad332c50a5d9e01b215edaae5067e818f2eb2aa3e1a01f5410d3c4c15834b5ee5695bc3a8494e2180d0f5b2b66cf190ccda60dc0c270a682061b6f5b1c31ede459213b05b52583e59b3b26d2bad6da5f5f01882c56671b07034c9322ea72fb206caf70210d60cc0b8005f9e4e56b9b3bfe8bd48d42e944d11f56ef469176f0b4fa60a6e014c691deec1fdb0db6b76282f87f53dc5c2f5c64301d16dab5d730bab5de5360513f3591dd971178bd50bbde84eb62f27c9c0623047e36fd1607ddbfca8119d550bef9009a0f0c34e53f06e41c4550b8ed79a73b1bb265f84d3a366be954b8d53aaee05fab22fd6f5ed6faf793f2dc55d207fd94177b86f826190de68845795a7c6d11b88bd0bf2737bfe7542c060fa5d11f4a511f9d9e5374a3bdff6673fc46870414c3ce9b6c1d8b1dd6ebcee36cd0adbae05f10f8095807d8df48aff33485cf6b96a54e1df8f33064f3dd195d917197b65e231ca39d74c4a4d4a0d1646c41c651804ed80352f036cf83377f894642fb8475ec2fff1beb238076c0335237eddc67be6b8770f983a12108ab97eb8c6728424b78153858e8d23e7e458aa39d46da9e3feebf4888b3dec4cb65daf9444a0d889b402b5fd3d647985b05e62c203f8604074b23700be060ca80da87fefa8a389a9e060409771babfcf09e8daa783a185a43d463cf8f6f43d8c9820d5576d9ad2ac56b2293cdc311f1e09df5e9aec08553ec711d60b780e843f4b6e1ac8eaafa5b324b2bcdbc0b76162335e026b6a374aadcc096536e15c9ff2bd4d47cd7a280eb1b1600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000022df678d7df39449a632145842a2c9d2a84b2a8dee09477f98b6b082383d33ae1cb025706e0d92b6c5624f124aa3c7a06e77f6b093aed56ac0af4683e151709126d32131fa471c20acf40b6a0d66b008965e0676766cae62bb3b178438c100f209fc4fad21885046d27d03b566973d955f0265dd4711f58217d4644d6d379597227deff490125e37b303d0aa18d30977d448f89b23e1bf025a1ef4ad8375c454272f4a2b56e9090c13c27a0a6d4cef75a1ef8124545d2613af50604e78cee90216a65b031bd614613a0cd2a02a7621f6b3244e8e7da8b4a57e332a40be66d0db2d8c09e46ab777fc1f9523a657c9e886921c04c8c19b2d0c31255d36a6c9bf08247e8698791fc09ba3b9881a1745ae436b872eb92afac4a29005b340b1fd9e1217663070115f7c4b913c2e528b05e789930c42ec239731d34df59b3895ec36583030e3b66ff46aa33e4247036fa8065520544a6cfe6e01e0fc67eeadb84b462f19bc70fa4d5722c5f75656059b2c4e970ed7789afddb529f3fd6c185ba3efbb91e635c57d8ffd4c2f025169d426d99a6e0d8fb7550e5d7951b9b32ab15c17b590d88a842987278149be2e3a58d2126690fa9c764f6f0d4ec45f150e213be727817078f06b940e306d937eb03b80d8afec9eb5fad3e130739e8445ab24c8caad607b735f4b8dcb136683d7705507f5d6c12127e7b6c5a47ff1ebeabe8f748443710290695a88dd754ed720e3a9ec51703e85b18884fe7a3adda56421c95daaf6c06215bb0930b2369a3d3864a9eb86f51d7cdac587b916a650c7929f6380885d61cc5119679d4fc7e0465812edf339cf06bc38924c3141778ecbdb4a6aec7144815d347c81da3a5b14aba68f405bdf5a7d22a5e9ca8c877ae89cfde659a2eb85a1a261b3416e065370c87fe5472c133480011d8e69c8202451526e997fbcfeb902774a429176477a5030bbc0b1e1caed5338dde08737dbb9dbf0023dc1999a4f104f5d46cb776b641b2f829dde96c027cf8ec6447b001f517ca76acd240cddbbb19b3664dc72461e9a6e30f74121ea7b1afe2a583c43741474a716da6646e0a482c00a67b14e27ab1c832659d6804e698c227d65dfb6f81befc77cd5afe082ae20bf91fa9b2571261bf95227c60225fc1303dbd2cf6fdfbfcf2316aa22dae4f2701214afecbdb7e211538437ef6b4a352ad80633f89e29ad325e64f5fab8dc1162cfa4c0a55f6604c0752d5561fdaaaecbcad1a686081751b17944bcd6869816e1d45e1ab0bdc33786ccabda820aa0581cf65250c6cdad4844d29267bed208cb61607585280ef38ef99678a3418f9d13d79145aed7c3adbc7bd10321ab62d834a0351ed8637e5df4ae177b9131df7adeb3db98ed14e8ca7efbf1166cf2eb38c682330e1a1161091c2cd0b855467ab81c2e0cede50c25b9e65988708c2657cee082d2e8596ba2b2d8fa2db3a0a7c4cc1cb9526c11b6074b6858ec8adfcef8a41281ab2d5d7e7a5ca198ec9c64e6f0f2bb46baa4f40171b2487a50bcc50d56336940f5761ac402b1011ad1317904c78ed2e5c6ee3ec7d4556c28a136a3cf564d5ff1e92e5b18fb3ad4478d361be5fc85a1b815a30366160929569b5eae1b1d0a5cb1f19cbf922326dcaaa3616c481ab884b5fee576b39ff8ce37c538ae7543c3ff9045f72adbcd628d77c21cb9c8a5c26c3cdd6b4f87330c4bc640200d891cecda008860e8bf30d0ed0f4b99acfa187591a783b32191168afa017c924418c94c6b5150d737d8aadf8cb45dc2a0516dd9b95e2db4c06294d8d401e1e25ea80fc64e21f868e8fe90ab941a7ea8b57ffa7089a20d21ec42248d10e97110b4f08db6d1f02107457943693c74c4a7981cec70e624e814346ebff4c6e5d12b25c6d4f00c8000000000000000000000000000000287597af45536cb902ebf2d8d55e98a7e00000000000000000000000000000000000299ed570a926fba9ff15b1f026bf7e000000000000000000000000000000810d513449c91bc102818ebc44c20b43bf000000000000000000000000000000000001567389a8a8b2cfa6964da0e2d2a60000000000000000000000000000007bc08e5ff56bad55e91418536927a70ea50000000000000000000000000000000000002ebfae7ff41bd824e770395c2adf000000000000000000000000000000ccadaa8cac51b2775b4c583f954a3cd6cf000000000000000000000000000000000017f9742f19e461f1d74cd619bdbfb2000000000000000000000000000000e460e8ad60368c2420c0922e9df9ea38780000000000000000000000000000000000239df1eb63f39b379751ff17a84b18000000000000000000000000000000893e6d1649e77556f5ac73a6795f137197000000000000000000000000000000000029623c02d3c9addf874b23dc361b170000000000000000000000000000006a18a62ca37ac001c28c06476587c300790000000000000000000000000000000000236dc0b6e7fb8eb4d34b8db08984a1000000000000000000000000000000ea32e216fb7221a7b6f89d939f1a9d28d10000000000000000000000000000000000225b03864da60029d50701c264e624000000000000000000000000000000dd50e1dd547d444a9a4c9d209996d5e48400000000000000000000000000000000002b7ab5c36299c38c2c253329d8cea00000000000000000000000000000000cb9c800a4030940e66b0601a2510d2e5400000000000000000000000000000000002454af356b574f1933360793d518d30000000000000000000000000000006f93b6cccea3e5eef698b02a98ecc7a40a00000000000000000000000000000000002bac9db36de89e4e23b4dc38860f3800000000000000000000000000000061312e98fe6ff139acdb60888f8fc10b7f00000000000000000000000000000000002e87e2375017424cec063b6c3d36be000000000000000000000000000000e0df7f9e61b2eccea530b0390e9cc8b0a10000000000000000000000000000000000283b0d87bd82729f95c876767ba5210000000000000000000000000000006ccefa24d31abd3401ca53d5869a1a5d740000000000000000000000000000000000285db32d93396988f619dc365fa8e9000000000000000000000000000000444ba41c9eacb5ca7ecefa5b8b802d932500000000000000000000000000000000001a151b3210f6cf8694178825ffc6c500000000000000000000000000000050ccd2d83b5c62cdbde3a07f279984332800000000000000000000000000000000002dc59d99b1eced92ddddf155f11aff00000000000000000000000000000031de0724d942d75ad3d33313fe7031ae4300000000000000000000000000000000001510846c331f20b19cfaf99f97be9b00000000000000000000000000000079717742da29558faaebf3d15a0993325500000000000000000000000000000000002b82be8c0f0376dbd63daf1cd63158000000000000000000000000000000517a730d243357b21b3abf461cfc30a0ce00000000000000000000000000000000002e0e8bbaa223de23104e943705711a000000000000000000000000000000a98cc409fb1befb1fc984f73e41bf33757000000000000000000000000000000000021c385445c7209e4778ae6121b580c000000000000000000000000000000ab3d39ea2d24c26c1dd8d710aa3660e1f90000000000000000000000000000000000194aa61645beea76fd9f74433536560000000000000000000000000000000b1e0e06dfa8f2e957b294453b35e26de20000000000000000000000000000000000218f8069318cd9e406a90b6a782de7000000000000000000000000000000da837b8c56a0726eba730e17128c41f0b600000000000000000000000000000000000ebfd5fa7e9fe0d206eb8ed0c8aee6000000000000000000000000000000e3256a764f112b566e757b5e922fc433970000000000000000000000000000000000077cc1f0541bde33cd5e045b4d612e000000000000000000000000000000d7e1d0a569e9a3f54d3b9208004573b4480000000000000000000000000000000000107e5d65d9e6401793f384e3303f470000000000000000000000000000005c34c685fa73d0e612feff823019abf0c4000000000000000000000000000000000003af2bc76ff72c42f94baeafffc702000000000000000000000000000000d24a100002ca9d950a6d8d62c97472c6e8000000000000000000000000000000000016ea69f12cc1558e1c49ff610cfb85000000000000000000000000000000a292bdb84780232874310712faa2855a9000000000000000000000000000000000001a60176cc2f626a221609fa58343c500000000000000000000000000000065ab20d36a6edbb3635ae663f3e21e927a000000000000000000000000000000000007ccc956c13881a5ea506632ec290e00000000000000000000000000000072445a3cd121cce158639530e240d8fa3f000000000000000000000000000000000022f3f92c232dcf1c9c954f738700b90000000000000000000000000000000435a68a7adedc247edcb52389dfd70f4200000000000000000000000000000000001049a677170d1701ce16d9d6d58d2b0000000000000000000000000000000dccac93e31eaabe5d987dd3b70cf8fc2a00000000000000000000000000000000000a8076a640283be7a35acd9aaf9982000000000000000000000000000000a54bdbe26bec3cdbf46842ce86a4e0060600000000000000000000000000000000000f9d61d2c19166dd8c05f7dde542af000000000000000000000000000000842f827c933b85715050daee11dc7e00d0000000000000000000000000000000000007caa32a2b8d7a29211bf3519a84a1000000000000000000000000000000a77dd6fcdca4bf8e34421a52ed373a055a00000000000000000000000000000000001aa66599fe63272e2e39974ac44cf4000000000000000000000000000000f86dee0dc5993eda73e937732b844fad010000000000000000000000000000000000248046bb4258f3ec8042a1ce81f88a000000000000000000000000000000b8bfa0ffa025c34a880b56c9fcb0dc6d030000000000000000000000000000000000207bc01c1d93533f447b7ea4a631ac0000000000000000000000000000008c9c301ca78956e7398c1d56f0b3af2961000000000000000000000000000000000014bd6760d3d8e76c3471304d9e97130000000000000000000000000000007d2529c096c503b6fb6775eead5b7b63cb00000000000000000000000000000000002b6cd83a5eb3c0f478967410c13b5400000000000000000000000000000066290ec2d468c27172c77a30ae67cbeb460000000000000000000000000000000000192233b74f19e7c9d09774a461949c000000000000000000000000000000bef4315fbdc5c11b64e690590000fd639700000000000000000000000000000000001d2a497bcb2ebcdb792bb5b294dff5000000000000000000000000000000e3e8f17a00e241324f2e67fa86e65899f60000000000000000000000000000000000043d63eff2ab63ba64ef387552d7730000000000000000000000000000001fc99b5f4bbcffa61b42ad8e771bcb105900000000000000000000000000000000002e0b45b0fe21eea53131b8cab8bd9800000000000000000000000000000032374367a08c0354b55b4991571d09a57c000000000000000000000000000000000028fe0ae3d71303390266cf0c27aa700000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005312bea3ca85fc921dca0b5e44614872680000000000000000000000000000000000123dfc33b59282789c9cb090c827a900000000000000000000000000000024a9e8c02433f76ea44b6827b0093ccc9f000000000000000000000000000000000014ea46e331036de9a4caea00daec29000000000000000000000000000000d6e16003d17f7c871ee46a90b9f0c1ab7c00000000000000000000000000000000001067dbc835f593cd06985bed789d2100000000000000000000000000000046a2d0b3d95f5f96aa57be679de0957be6000000000000000000000000000000000002c142dceccd7e356679912a654c3d0000006b","aggregationObject":["0x00000000000000000000000000000000000000000000000667141da186fa2ee6","0x00000000000000000000000000000000000000000000000b604b01ef8f0d0cbf","0x000000000000000000000000000000000000000000000004aa7261e91c2591c7","0x00000000000000000000000000000000000000000000000000022169bf4283ad","0x00000000000000000000000000000000000000000000000da42aad5197c6fae7","0x00000000000000000000000000000000000000000000000df94cfc0977edd5b2","0x000000000000000000000000000000000000000000000007c612055fec21ae22","0x000000000000000000000000000000000000000000000000000230175557a7ad","0x000000000000000000000000000000000000000000000001de0f097127c3548c","0x00000000000000000000000000000000000000000000000c907f4f00a40d52dc","0x000000000000000000000000000000000000000000000008101d8c6bd18462f1","0x00000000000000000000000000000000000000000000000000023015745a48e5","0x000000000000000000000000000000000000000000000007a8046de88646c85d","0x000000000000000000000000000000000000000000000007870e5c81c68314f7","0x00000000000000000000000000000000000000000000000e0abb05f5ec41457d","0x00000000000000000000000000000000000000000000000000008a1d3a84de8a"]} \ No newline at end of file diff --git a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts index 150396a60dd..cb1105b4715 100644 --- a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts +++ b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts @@ -129,6 +129,7 @@ import { type TransientDataIndexHint, TxContext, type TxRequest, + VERIFICATION_KEY_LENGTH_IN_FIELDS, type VerificationKeyAsFields, } from '@aztec/circuits.js'; import { toBufferBE } from '@aztec/foundation/bigint-buffer'; @@ -246,6 +247,7 @@ import type { TransientDataIndexHint as TransientDataIndexHintNoir, TxContext as TxContextNoir, TxRequest as TxRequestNoir, + VerificationKey as VerificationKeyNoir, } from './types/index.js'; /* eslint-disable camelcase */ @@ -1543,9 +1545,12 @@ export function mapKernelDataToNoir(kernelData: KernelData): KernelDataNoir { }; } -export function mapVerificationKeyToNoir(key: VerificationKeyAsFields) { +export function mapVerificationKeyToNoir(key: VerificationKeyAsFields): VerificationKeyNoir { + if (key.key.length !== VERIFICATION_KEY_LENGTH_IN_FIELDS) { + throw new Error(`Expected ${VERIFICATION_KEY_LENGTH_IN_FIELDS} fields, got ${key.key.length}`); + } return { - key: mapTuple(key.key, mapFieldToNoir), + key: mapTuple(key.key as Tuple, mapFieldToNoir), hash: mapFieldToNoir(key.hash), }; } diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 45490c6332f..9d1a0e7957c 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -508,6 +508,8 @@ export class ProvingOrchestrator implements BlockProver { }; pushTestData('blockResults', { + proverId: this.proverId.toString(), + vkTreeRoot: getVKTreeRoot().toString(), block: l2Block.toString(), proof: this.provingState.finalProof.toString(), aggregationObject: blockResult.aggregationObject.map(x => x.toString()), @@ -1157,9 +1159,10 @@ export class ProvingOrchestrator implements BlockProver { } function extractAggregationObject(proof: Proof, numPublicInputs: number): Fr[] { + // TODO (alexg) fix this const buffer = proof.buffer.subarray( - Fr.SIZE_IN_BYTES * (numPublicInputs - AGGREGATION_OBJECT_LENGTH), - Fr.SIZE_IN_BYTES * numPublicInputs, + 4 + Fr.SIZE_IN_BYTES * (3 + numPublicInputs - AGGREGATION_OBJECT_LENGTH), + 4 + Fr.SIZE_IN_BYTES * (3 + numPublicInputs), ); // TODO(#7159): Remove the following workaround if (buffer.length === 0) { diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index cc8bf108dd1..469b2e14894 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -285,6 +285,7 @@ __metadata: dependencies: "@aztec/circuit-types": "workspace:^" "@aztec/circuits.js": "workspace:^" + "@aztec/ethereum": "workspace:^" "@aztec/foundation": "workspace:^" "@aztec/noir-protocol-circuits-types": "workspace:^" "@aztec/simulator": "workspace:^" @@ -304,6 +305,7 @@ __metadata: ts-node: ^10.9.1 tslib: ^2.4.0 typescript: ^5.0.4 + viem: ^2.7.15 bin: bb-cli: ./dest/bb/index.js languageName: unknown From 2be14157abe3b277c58780ecc03bb1eff8dec20e Mon Sep 17 00:00:00 2001 From: Leila Wang Date: Thu, 5 Sep 2024 14:13:36 +0100 Subject: [PATCH 18/18] feat: verify public validation requests (#8150) Verify note hash read requests and l1tol2msg read requests in public kernel tail. --------- Co-authored-by: Ilyas Ridhuan --- barretenberg/cpp/pil/avm/kernel.pil | 7 +- .../barretenberg/vm/avm/generated/flavor.cpp | 36 ++++----- .../barretenberg/vm/avm/generated/flavor.hpp | 8 +- .../vm/avm/generated/relations/kernel.hpp | 19 ++--- .../relations/kernel_output_lookup.hpp | 10 +-- .../vm/avm/tests/execution.test.cpp | 13 ++- .../barretenberg/vm/avm/tests/kernel.test.cpp | 9 ++- .../barretenberg/vm/avm/trace/execution.cpp | 6 +- .../vm/avm/trace/execution_hints.hpp | 11 ++- .../vm/avm/trace/kernel_trace.cpp | 22 ++++- .../src/barretenberg/vm/avm/trace/trace.cpp | 64 ++++++++++++--- .../src/barretenberg/vm/avm/trace/trace.hpp | 13 ++- .../src/core/libraries/ConstantsGen.sol | 5 +- .../public_kernel_output_composer.nr | 20 ++++- .../src/public_kernel_tail.nr | 81 +++++++++++++++---- .../crates/reset-kernel-lib/src/lib.nr | 3 +- .../public_validation_request_processor.nr | 52 +++++++++--- .../crates/reset-kernel-lib/src/reset/mod.nr | 1 + .../src/reset/tree_leaf_read_request.nr | 27 +++++++ .../crates/types/src/abis/mod.nr | 1 + .../src/abis/public_circuit_public_inputs.nr | 18 ++--- .../types/src/abis/tree_leaf_read_request.nr | 44 ++++++++++ .../public_validation_requests.nr | 24 +++++- .../public_validation_requests_builder.nr | 14 +++- .../crates/types/src/constants.nr | 5 +- .../crates/types/src/tests/fixture_builder.nr | 28 ++++++- .../bb-prover/src/avm_proving.test.ts | 5 +- .../src/sibling_path/sibling_path.ts | 2 +- yarn-project/circuits.js/src/constants.gen.ts | 5 +- yarn-project/circuits.js/src/structs/index.ts | 2 + ...blic_kernel_tail_circuit_private_inputs.ts | 26 +++++- .../structs/public_circuit_public_inputs.ts | 17 ++-- .../src/structs/public_validation_requests.ts | 23 ++++++ .../src/structs/tree_leaf_read_request.ts | 32 ++++++++ .../structs/tree_leaf_read_request_hint.ts | 38 +++++++++ .../circuits.js/src/tests/factories.ts | 53 ++++++++++-- .../src/type_conversion.ts | 49 ++++++++++- .../hints/build_private_kernel_reset_hints.ts | 6 +- .../pxe/src/simulator_oracle/index.ts | 5 ++ .../simulator/src/avm/avm_simulator.test.ts | 10 ++- .../simulator/src/avm/journal/journal.test.ts | 4 +- .../simulator/src/avm/journal/journal.ts | 22 +++-- .../src/avm/opcodes/accrued_substate.test.ts | 10 ++- yarn-project/simulator/src/avm/test_utils.ts | 11 ++- .../src/public/abstract_phase_manager.ts | 5 +- .../simulator/src/public/db_interfaces.ts | 7 ++ .../simulator/src/public/execution.ts | 5 +- .../simulator/src/public/hints_builder.ts | 50 +++++++++++- .../simulator/src/public/public_db_sources.ts | 11 +++ .../src/public/side_effect_trace.test.ts | 14 ++-- .../simulator/src/public/side_effect_trace.ts | 23 +++--- .../src/public/tail_phase_manager.ts | 10 +++ 52 files changed, 790 insertions(+), 196 deletions(-) create mode 100644 noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/tree_leaf_read_request.nr create mode 100644 noir-projects/noir-protocol-circuits/crates/types/src/abis/tree_leaf_read_request.nr create mode 100644 yarn-project/circuits.js/src/structs/tree_leaf_read_request.ts create mode 100644 yarn-project/circuits.js/src/structs/tree_leaf_read_request_hint.ts diff --git a/barretenberg/cpp/pil/avm/kernel.pil b/barretenberg/cpp/pil/avm/kernel.pil index 9763bcaaa42..d04a5fa13ff 100644 --- a/barretenberg/cpp/pil/avm/kernel.pil +++ b/barretenberg/cpp/pil/avm/kernel.pil @@ -164,8 +164,8 @@ namespace main(256); // When we encounter a state writing opcode // We increment the side effect counter by 1 - #[SIDE_EFFECT_COUNTER_INCREMENT] - KERNEL_OUTPUT_SELECTORS * (side_effect_counter' - (side_effect_counter + 1)) = 0; + //#[SIDE_EFFECT_COUNTER_INCREMENT] + //KERNEL_OUTPUT_SELECTORS * (side_effect_counter' - (side_effect_counter + 1)) = 0; //===== LOOKUPS INTO THE PUBLIC INPUTS =========================================== pol KERNEL_INPUT_SELECTORS = sel_op_address + sel_op_storage_address + sel_op_sender @@ -182,8 +182,9 @@ namespace main(256); #[KERNEL_OUTPUT_ACTIVE_CHECK] KERNEL_OUTPUT_SELECTORS * (1 - sel_q_kernel_output_lookup) = 0; + // TODO(#8287): Reintroduce constraints #[KERNEL_OUTPUT_LOOKUP] - sel_q_kernel_output_lookup {kernel_out_offset, ia, side_effect_counter, ib} in sel_kernel_out {clk, kernel_value_out, kernel_side_effect_out, kernel_metadata_out}; + sel_q_kernel_output_lookup {kernel_out_offset, /*ia,*/ /*side_effect_counter,*/ ib } in sel_kernel_out {clk, /*kernel_value_out,*/ /*kernel_side_effect_out,*/ kernel_metadata_out}; #[LOOKUP_INTO_KERNEL] sel_q_kernel_lookup { main.ia, kernel_in_offset } in sel_kernel_inputs { kernel_inputs, clk }; diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.cpp index fca4ae3c126..f54c2b41054 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.cpp @@ -730,24 +730,23 @@ AvmFlavor::AllConstRefValues::AllConstRefValues( , main_nullifier_non_exists_write_offset_shift(il[722]) , main_pc_shift(il[723]) , main_sel_execution_row_shift(il[724]) - , main_side_effect_counter_shift(il[725]) - , main_sload_write_offset_shift(il[726]) - , main_sstore_write_offset_shift(il[727]) - , mem_glob_addr_shift(il[728]) - , mem_rw_shift(il[729]) - , mem_sel_mem_shift(il[730]) - , mem_tag_shift(il[731]) - , mem_tsp_shift(il[732]) - , mem_val_shift(il[733]) - , slice_addr_shift(il[734]) - , slice_clk_shift(il[735]) - , slice_cnt_shift(il[736]) - , slice_col_offset_shift(il[737]) - , slice_sel_cd_cpy_shift(il[738]) - , slice_sel_mem_active_shift(il[739]) - , slice_sel_return_shift(il[740]) - , slice_sel_start_shift(il[741]) - , slice_space_id_shift(il[742]) + , main_sload_write_offset_shift(il[725]) + , main_sstore_write_offset_shift(il[726]) + , mem_glob_addr_shift(il[727]) + , mem_rw_shift(il[728]) + , mem_sel_mem_shift(il[729]) + , mem_tag_shift(il[730]) + , mem_tsp_shift(il[731]) + , mem_val_shift(il[732]) + , slice_addr_shift(il[733]) + , slice_clk_shift(il[734]) + , slice_cnt_shift(il[735]) + , slice_col_offset_shift(il[736]) + , slice_sel_cd_cpy_shift(il[737]) + , slice_sel_mem_active_shift(il[738]) + , slice_sel_return_shift(il[739]) + , slice_sel_start_shift(il[740]) + , slice_space_id_shift(il[741]) {} AvmFlavor::ProverPolynomials::ProverPolynomials(ProvingKey& proving_key) @@ -1489,7 +1488,6 @@ AvmFlavor::AllConstRefValues AvmFlavor::ProverPolynomials::get_row(size_t row_id main_nullifier_non_exists_write_offset_shift[row_idx], main_pc_shift[row_idx], main_sel_execution_row_shift[row_idx], - main_side_effect_counter_shift[row_idx], main_sload_write_offset_shift[row_idx], main_sstore_write_offset_shift[row_idx], mem_glob_addr_shift[row_idx], diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.hpp index 963e592b3df..47228fbf667 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.hpp @@ -91,8 +91,8 @@ template using tuple_cat_t = decltype(std::tuple_cat(std:: #define PRECOMPUTED_ENTITIES byte_lookup_sel_bin, byte_lookup_table_byte_lengths, byte_lookup_table_in_tags, byte_lookup_table_input_a, byte_lookup_table_input_b, byte_lookup_table_op_id, byte_lookup_table_output, gas_base_da_gas_fixed_table, gas_base_l2_gas_fixed_table, gas_dyn_da_gas_fixed_table, gas_dyn_l2_gas_fixed_table, gas_sel_gas_cost, main_clk, main_sel_first, main_zeroes, powers_power_of_2 #define WIRE_ENTITIES main_kernel_inputs, main_kernel_value_out, main_kernel_side_effect_out, main_kernel_metadata_out, main_calldata, main_returndata, alu_a_hi, alu_a_lo, alu_b_hi, alu_b_lo, alu_b_pow, alu_c_hi, alu_c_lo, alu_cf, alu_clk, alu_cmp_gadget_gt, alu_cmp_gadget_input_a, alu_cmp_gadget_input_b, alu_cmp_gadget_result, alu_cmp_gadget_sel, alu_ff_tag, alu_ia, alu_ib, alu_ic, alu_in_tag, alu_max_bits_sub_b_bits, alu_max_bits_sub_b_pow, alu_op_add, alu_op_cast, alu_op_div, alu_op_eq, alu_op_lt, alu_op_lte, alu_op_mul, alu_op_not, alu_op_shl, alu_op_shr, alu_op_sub, alu_partial_prod_hi, alu_partial_prod_lo, alu_range_check_input_value, alu_range_check_num_bits, alu_range_check_sel, alu_remainder, alu_sel_alu, alu_sel_cmp, alu_sel_shift_which, alu_u128_tag, alu_u16_tag, alu_u32_tag, alu_u64_tag, alu_u8_tag, alu_zero_shift, binary_acc_ia, binary_acc_ib, binary_acc_ic, binary_clk, binary_ia_bytes, binary_ib_bytes, binary_ic_bytes, binary_in_tag, binary_mem_tag_ctr, binary_mem_tag_ctr_inv, binary_op_id, binary_sel_bin, binary_start, cmp_a_hi, cmp_a_lo, cmp_b_hi, cmp_b_lo, cmp_borrow, cmp_clk, cmp_cmp_rng_ctr, cmp_input_a, cmp_input_b, cmp_op_eq, cmp_op_eq_diff_inv, cmp_op_gt, cmp_p_a_borrow, cmp_p_b_borrow, cmp_p_sub_a_hi, cmp_p_sub_a_lo, cmp_p_sub_b_hi, cmp_p_sub_b_lo, cmp_range_chk_clk, cmp_res_hi, cmp_res_lo, cmp_result, cmp_sel_cmp, cmp_sel_rng_chk, cmp_shift_sel, conversion_clk, conversion_input, conversion_num_limbs, conversion_radix, conversion_sel_to_radix_le, keccakf1600_clk, keccakf1600_input, keccakf1600_output, keccakf1600_sel_keccakf1600, main_abs_da_rem_gas, main_abs_l2_rem_gas, main_alu_in_tag, main_base_da_gas_op_cost, main_base_l2_gas_op_cost, main_bin_op_id, main_call_ptr, main_da_gas_remaining, main_da_out_of_gas, main_dyn_da_gas_op_cost, main_dyn_gas_multiplier, main_dyn_l2_gas_op_cost, main_emit_l2_to_l1_msg_write_offset, main_emit_note_hash_write_offset, main_emit_nullifier_write_offset, main_emit_unencrypted_log_write_offset, main_ia, main_ib, main_ic, main_id, main_id_zero, main_ind_addr_a, main_ind_addr_b, main_ind_addr_c, main_ind_addr_d, main_internal_return_ptr, main_inv, main_kernel_in_offset, main_kernel_out_offset, main_l1_to_l2_msg_exists_write_offset, main_l2_gas_remaining, main_l2_out_of_gas, main_mem_addr_a, main_mem_addr_b, main_mem_addr_c, main_mem_addr_d, main_note_hash_exist_write_offset, main_nullifier_exists_write_offset, main_nullifier_non_exists_write_offset, main_op_err, main_opcode_val, main_pc, main_r_in_tag, main_rwa, main_rwb, main_rwc, main_rwd, main_sel_alu, main_sel_bin, main_sel_calldata, main_sel_execution_row, main_sel_kernel_inputs, main_sel_kernel_out, main_sel_last, main_sel_mem_op_a, main_sel_mem_op_b, main_sel_mem_op_c, main_sel_mem_op_d, main_sel_mov_ia_to_ic, main_sel_mov_ib_to_ic, main_sel_op_add, main_sel_op_address, main_sel_op_and, main_sel_op_block_number, main_sel_op_calldata_copy, main_sel_op_cast, main_sel_op_chain_id, main_sel_op_cmov, main_sel_op_coinbase, main_sel_op_dagasleft, main_sel_op_div, main_sel_op_ecadd, main_sel_op_emit_l2_to_l1_msg, main_sel_op_emit_note_hash, main_sel_op_emit_nullifier, main_sel_op_emit_unencrypted_log, main_sel_op_eq, main_sel_op_external_call, main_sel_op_external_return, main_sel_op_external_revert, main_sel_op_fdiv, main_sel_op_fee_per_da_gas, main_sel_op_fee_per_l2_gas, main_sel_op_function_selector, main_sel_op_get_contract_instance, main_sel_op_internal_call, main_sel_op_internal_return, main_sel_op_jump, main_sel_op_jumpi, main_sel_op_keccak, main_sel_op_l1_to_l2_msg_exists, main_sel_op_l2gasleft, main_sel_op_lt, main_sel_op_lte, main_sel_op_mov, main_sel_op_msm, main_sel_op_mul, main_sel_op_not, main_sel_op_note_hash_exists, main_sel_op_nullifier_exists, main_sel_op_or, main_sel_op_pedersen, main_sel_op_pedersen_commit, main_sel_op_poseidon2, main_sel_op_radix_le, main_sel_op_sender, main_sel_op_set, main_sel_op_sha256, main_sel_op_shl, main_sel_op_shr, main_sel_op_sload, main_sel_op_sstore, main_sel_op_storage_address, main_sel_op_sub, main_sel_op_timestamp, main_sel_op_transaction_fee, main_sel_op_version, main_sel_op_xor, main_sel_q_kernel_lookup, main_sel_q_kernel_output_lookup, main_sel_resolve_ind_addr_a, main_sel_resolve_ind_addr_b, main_sel_resolve_ind_addr_c, main_sel_resolve_ind_addr_d, main_sel_returndata, main_sel_rng_16, main_sel_rng_8, main_sel_slice_gadget, main_side_effect_counter, main_sload_write_offset, main_space_id, main_sstore_write_offset, main_tag_err, main_w_in_tag, mem_addr, mem_clk, mem_diff, mem_glob_addr, mem_last, mem_lastAccess, mem_one_min_inv, mem_r_in_tag, mem_rw, mem_sel_mem, mem_sel_mov_ia_to_ic, mem_sel_mov_ib_to_ic, mem_sel_op_a, mem_sel_op_b, mem_sel_op_c, mem_sel_op_cmov, mem_sel_op_d, mem_sel_op_poseidon_read_a, mem_sel_op_poseidon_read_b, mem_sel_op_poseidon_read_c, mem_sel_op_poseidon_read_d, mem_sel_op_poseidon_write_a, mem_sel_op_poseidon_write_b, mem_sel_op_poseidon_write_c, mem_sel_op_poseidon_write_d, mem_sel_op_slice, mem_sel_resolve_ind_addr_a, mem_sel_resolve_ind_addr_b, mem_sel_resolve_ind_addr_c, mem_sel_resolve_ind_addr_d, mem_sel_rng_chk, mem_skip_check_tag, mem_space_id, mem_tag, mem_tag_err, mem_tsp, mem_val, mem_w_in_tag, pedersen_clk, pedersen_input, pedersen_output, pedersen_sel_pedersen, poseidon2_B_10_0, poseidon2_B_10_1, poseidon2_B_10_2, poseidon2_B_10_3, poseidon2_B_11_0, poseidon2_B_11_1, poseidon2_B_11_2, poseidon2_B_11_3, poseidon2_B_12_0, poseidon2_B_12_1, poseidon2_B_12_2, poseidon2_B_12_3, poseidon2_B_13_0, poseidon2_B_13_1, poseidon2_B_13_2, poseidon2_B_13_3, poseidon2_B_14_0, poseidon2_B_14_1, poseidon2_B_14_2, poseidon2_B_14_3, poseidon2_B_15_0, poseidon2_B_15_1, poseidon2_B_15_2, poseidon2_B_15_3, poseidon2_B_16_0, poseidon2_B_16_1, poseidon2_B_16_2, poseidon2_B_16_3, poseidon2_B_17_0, poseidon2_B_17_1, poseidon2_B_17_2, poseidon2_B_17_3, poseidon2_B_18_0, poseidon2_B_18_1, poseidon2_B_18_2, poseidon2_B_18_3, poseidon2_B_19_0, poseidon2_B_19_1, poseidon2_B_19_2, poseidon2_B_19_3, poseidon2_B_20_0, poseidon2_B_20_1, poseidon2_B_20_2, poseidon2_B_20_3, poseidon2_B_21_0, poseidon2_B_21_1, poseidon2_B_21_2, poseidon2_B_21_3, poseidon2_B_22_0, poseidon2_B_22_1, poseidon2_B_22_2, poseidon2_B_22_3, poseidon2_B_23_0, poseidon2_B_23_1, poseidon2_B_23_2, poseidon2_B_23_3, poseidon2_B_24_0, poseidon2_B_24_1, poseidon2_B_24_2, poseidon2_B_24_3, poseidon2_B_25_0, poseidon2_B_25_1, poseidon2_B_25_2, poseidon2_B_25_3, poseidon2_B_26_0, poseidon2_B_26_1, poseidon2_B_26_2, poseidon2_B_26_3, poseidon2_B_27_0, poseidon2_B_27_1, poseidon2_B_27_2, poseidon2_B_27_3, poseidon2_B_28_0, poseidon2_B_28_1, poseidon2_B_28_2, poseidon2_B_28_3, poseidon2_B_29_0, poseidon2_B_29_1, poseidon2_B_29_2, poseidon2_B_29_3, poseidon2_B_30_0, poseidon2_B_30_1, poseidon2_B_30_2, poseidon2_B_30_3, poseidon2_B_31_0, poseidon2_B_31_1, poseidon2_B_31_2, poseidon2_B_31_3, poseidon2_B_32_0, poseidon2_B_32_1, poseidon2_B_32_2, poseidon2_B_32_3, poseidon2_B_33_0, poseidon2_B_33_1, poseidon2_B_33_2, poseidon2_B_33_3, poseidon2_B_34_0, poseidon2_B_34_1, poseidon2_B_34_2, poseidon2_B_34_3, poseidon2_B_35_0, poseidon2_B_35_1, poseidon2_B_35_2, poseidon2_B_35_3, poseidon2_B_36_0, poseidon2_B_36_1, poseidon2_B_36_2, poseidon2_B_36_3, poseidon2_B_37_0, poseidon2_B_37_1, poseidon2_B_37_2, poseidon2_B_37_3, poseidon2_B_38_0, poseidon2_B_38_1, poseidon2_B_38_2, poseidon2_B_38_3, poseidon2_B_39_0, poseidon2_B_39_1, poseidon2_B_39_2, poseidon2_B_39_3, poseidon2_B_40_0, poseidon2_B_40_1, poseidon2_B_40_2, poseidon2_B_40_3, poseidon2_B_41_0, poseidon2_B_41_1, poseidon2_B_41_2, poseidon2_B_41_3, poseidon2_B_42_0, poseidon2_B_42_1, poseidon2_B_42_2, poseidon2_B_42_3, poseidon2_B_43_0, poseidon2_B_43_1, poseidon2_B_43_2, poseidon2_B_43_3, poseidon2_B_44_0, poseidon2_B_44_1, poseidon2_B_44_2, poseidon2_B_44_3, poseidon2_B_45_0, poseidon2_B_45_1, poseidon2_B_45_2, poseidon2_B_45_3, poseidon2_B_46_0, poseidon2_B_46_1, poseidon2_B_46_2, poseidon2_B_46_3, poseidon2_B_47_0, poseidon2_B_47_1, poseidon2_B_47_2, poseidon2_B_47_3, poseidon2_B_48_0, poseidon2_B_48_1, poseidon2_B_48_2, poseidon2_B_48_3, poseidon2_B_49_0, poseidon2_B_49_1, poseidon2_B_49_2, poseidon2_B_49_3, poseidon2_B_4_0, poseidon2_B_4_1, poseidon2_B_4_2, poseidon2_B_4_3, poseidon2_B_50_0, poseidon2_B_50_1, poseidon2_B_50_2, poseidon2_B_50_3, poseidon2_B_51_0, poseidon2_B_51_1, poseidon2_B_51_2, poseidon2_B_51_3, poseidon2_B_52_0, poseidon2_B_52_1, poseidon2_B_52_2, poseidon2_B_52_3, poseidon2_B_53_0, poseidon2_B_53_1, poseidon2_B_53_2, poseidon2_B_53_3, poseidon2_B_54_0, poseidon2_B_54_1, poseidon2_B_54_2, poseidon2_B_54_3, poseidon2_B_55_0, poseidon2_B_55_1, poseidon2_B_55_2, poseidon2_B_55_3, poseidon2_B_56_0, poseidon2_B_56_1, poseidon2_B_56_2, poseidon2_B_56_3, poseidon2_B_57_0, poseidon2_B_57_1, poseidon2_B_57_2, poseidon2_B_57_3, poseidon2_B_58_0, poseidon2_B_58_1, poseidon2_B_58_2, poseidon2_B_58_3, poseidon2_B_59_0, poseidon2_B_59_1, poseidon2_B_59_2, poseidon2_B_59_3, poseidon2_B_5_0, poseidon2_B_5_1, poseidon2_B_5_2, poseidon2_B_5_3, poseidon2_B_6_0, poseidon2_B_6_1, poseidon2_B_6_2, poseidon2_B_6_3, poseidon2_B_7_0, poseidon2_B_7_1, poseidon2_B_7_2, poseidon2_B_7_3, poseidon2_B_8_0, poseidon2_B_8_1, poseidon2_B_8_2, poseidon2_B_8_3, poseidon2_B_9_0, poseidon2_B_9_1, poseidon2_B_9_2, poseidon2_B_9_3, poseidon2_EXT_LAYER_4, poseidon2_EXT_LAYER_5, poseidon2_EXT_LAYER_6, poseidon2_EXT_LAYER_7, poseidon2_T_0_4, poseidon2_T_0_5, poseidon2_T_0_6, poseidon2_T_0_7, poseidon2_T_1_4, poseidon2_T_1_5, poseidon2_T_1_6, poseidon2_T_1_7, poseidon2_T_2_4, poseidon2_T_2_5, poseidon2_T_2_6, poseidon2_T_2_7, poseidon2_T_3_4, poseidon2_T_3_5, poseidon2_T_3_6, poseidon2_T_3_7, poseidon2_T_60_4, poseidon2_T_60_5, poseidon2_T_60_6, poseidon2_T_60_7, poseidon2_T_61_4, poseidon2_T_61_5, poseidon2_T_61_6, poseidon2_T_61_7, poseidon2_T_62_4, poseidon2_T_62_5, poseidon2_T_62_6, poseidon2_T_62_7, poseidon2_T_63_4, poseidon2_T_63_5, poseidon2_T_63_6, poseidon2_T_63_7, poseidon2_a_0, poseidon2_a_1, poseidon2_a_2, poseidon2_a_3, poseidon2_b_0, poseidon2_b_1, poseidon2_b_2, poseidon2_b_3, poseidon2_clk, poseidon2_input_addr, poseidon2_mem_addr_read_a, poseidon2_mem_addr_read_b, poseidon2_mem_addr_read_c, poseidon2_mem_addr_read_d, poseidon2_mem_addr_write_a, poseidon2_mem_addr_write_b, poseidon2_mem_addr_write_c, poseidon2_mem_addr_write_d, poseidon2_output_addr, poseidon2_sel_poseidon_perm, range_check_alu_rng_chk, range_check_clk, range_check_cmp_hi_bits_rng_chk, range_check_cmp_lo_bits_rng_chk, range_check_dyn_diff, range_check_dyn_rng_chk_bits, range_check_dyn_rng_chk_pow_2, range_check_gas_da_rng_chk, range_check_gas_l2_rng_chk, range_check_is_lte_u112, range_check_is_lte_u128, range_check_is_lte_u16, range_check_is_lte_u32, range_check_is_lte_u48, range_check_is_lte_u64, range_check_is_lte_u80, range_check_is_lte_u96, range_check_mem_rng_chk, range_check_rng_chk_bits, range_check_sel_lookup_0, range_check_sel_lookup_1, range_check_sel_lookup_2, range_check_sel_lookup_3, range_check_sel_lookup_4, range_check_sel_lookup_5, range_check_sel_lookup_6, range_check_sel_rng_chk, range_check_u16_r0, range_check_u16_r1, range_check_u16_r2, range_check_u16_r3, range_check_u16_r4, range_check_u16_r5, range_check_u16_r6, range_check_u16_r7, range_check_value, sha256_clk, sha256_input, sha256_output, sha256_sel_sha256_compression, sha256_state, slice_addr, slice_clk, slice_cnt, slice_col_offset, slice_one_min_inv, slice_sel_cd_cpy, slice_sel_mem_active, slice_sel_return, slice_sel_start, slice_space_id, slice_val, lookup_rng_chk_pow_2_counts, lookup_rng_chk_diff_counts, lookup_rng_chk_0_counts, lookup_rng_chk_1_counts, lookup_rng_chk_2_counts, lookup_rng_chk_3_counts, lookup_rng_chk_4_counts, lookup_rng_chk_5_counts, lookup_rng_chk_6_counts, lookup_rng_chk_7_counts, lookup_pow_2_0_counts, lookup_pow_2_1_counts, lookup_byte_lengths_counts, lookup_byte_operations_counts, lookup_opcode_gas_counts, kernel_output_lookup_counts, lookup_into_kernel_counts, lookup_cd_value_counts, lookup_ret_value_counts, incl_main_tag_err_counts, incl_mem_tag_err_counts #define DERIVED_WITNESS_ENTITIES perm_rng_mem_inv, perm_rng_cmp_lo_inv, perm_rng_cmp_hi_inv, perm_rng_alu_inv, perm_cmp_alu_inv, perm_rng_gas_l2_inv, perm_rng_gas_da_inv, perm_pos_mem_read_a_inv, perm_pos_mem_read_b_inv, perm_pos_mem_read_c_inv, perm_pos_mem_read_d_inv, perm_pos_mem_write_a_inv, perm_pos_mem_write_b_inv, perm_pos_mem_write_c_inv, perm_pos_mem_write_d_inv, perm_slice_mem_inv, perm_main_alu_inv, perm_main_bin_inv, perm_main_conv_inv, perm_main_pos2_perm_inv, perm_main_pedersen_inv, perm_main_slice_inv, perm_main_mem_a_inv, perm_main_mem_b_inv, perm_main_mem_c_inv, perm_main_mem_d_inv, perm_main_mem_ind_addr_a_inv, perm_main_mem_ind_addr_b_inv, perm_main_mem_ind_addr_c_inv, perm_main_mem_ind_addr_d_inv, lookup_rng_chk_pow_2_inv, lookup_rng_chk_diff_inv, lookup_rng_chk_0_inv, lookup_rng_chk_1_inv, lookup_rng_chk_2_inv, lookup_rng_chk_3_inv, lookup_rng_chk_4_inv, lookup_rng_chk_5_inv, lookup_rng_chk_6_inv, lookup_rng_chk_7_inv, lookup_pow_2_0_inv, lookup_pow_2_1_inv, lookup_byte_lengths_inv, lookup_byte_operations_inv, lookup_opcode_gas_inv, kernel_output_lookup_inv, lookup_into_kernel_inv, lookup_cd_value_inv, lookup_ret_value_inv, incl_main_tag_err_inv, incl_mem_tag_err_inv -#define SHIFTED_ENTITIES binary_acc_ia_shift, binary_acc_ib_shift, binary_acc_ic_shift, binary_mem_tag_ctr_shift, binary_op_id_shift, cmp_a_hi_shift, cmp_a_lo_shift, cmp_b_hi_shift, cmp_b_lo_shift, cmp_cmp_rng_ctr_shift, cmp_op_gt_shift, cmp_p_sub_a_hi_shift, cmp_p_sub_a_lo_shift, cmp_p_sub_b_hi_shift, cmp_p_sub_b_lo_shift, cmp_sel_rng_chk_shift, main_da_gas_remaining_shift, main_emit_l2_to_l1_msg_write_offset_shift, main_emit_note_hash_write_offset_shift, main_emit_nullifier_write_offset_shift, main_emit_unencrypted_log_write_offset_shift, main_internal_return_ptr_shift, main_l1_to_l2_msg_exists_write_offset_shift, main_l2_gas_remaining_shift, main_note_hash_exist_write_offset_shift, main_nullifier_exists_write_offset_shift, main_nullifier_non_exists_write_offset_shift, main_pc_shift, main_sel_execution_row_shift, main_side_effect_counter_shift, main_sload_write_offset_shift, main_sstore_write_offset_shift, mem_glob_addr_shift, mem_rw_shift, mem_sel_mem_shift, mem_tag_shift, mem_tsp_shift, mem_val_shift, slice_addr_shift, slice_clk_shift, slice_cnt_shift, slice_col_offset_shift, slice_sel_cd_cpy_shift, slice_sel_mem_active_shift, slice_sel_return_shift, slice_sel_start_shift, slice_space_id_shift -#define TO_BE_SHIFTED(e) e.binary_acc_ia, e.binary_acc_ib, e.binary_acc_ic, e.binary_mem_tag_ctr, e.binary_op_id, e.cmp_a_hi, e.cmp_a_lo, e.cmp_b_hi, e.cmp_b_lo, e.cmp_cmp_rng_ctr, e.cmp_op_gt, e.cmp_p_sub_a_hi, e.cmp_p_sub_a_lo, e.cmp_p_sub_b_hi, e.cmp_p_sub_b_lo, e.cmp_sel_rng_chk, e.main_da_gas_remaining, e.main_emit_l2_to_l1_msg_write_offset, e.main_emit_note_hash_write_offset, e.main_emit_nullifier_write_offset, e.main_emit_unencrypted_log_write_offset, e.main_internal_return_ptr, e.main_l1_to_l2_msg_exists_write_offset, e.main_l2_gas_remaining, e.main_note_hash_exist_write_offset, e.main_nullifier_exists_write_offset, e.main_nullifier_non_exists_write_offset, e.main_pc, e.main_sel_execution_row, e.main_side_effect_counter, e.main_sload_write_offset, e.main_sstore_write_offset, e.mem_glob_addr, e.mem_rw, e.mem_sel_mem, e.mem_tag, e.mem_tsp, e.mem_val, e.slice_addr, e.slice_clk, e.slice_cnt, e.slice_col_offset, e.slice_sel_cd_cpy, e.slice_sel_mem_active, e.slice_sel_return, e.slice_sel_start, e.slice_space_id +#define SHIFTED_ENTITIES binary_acc_ia_shift, binary_acc_ib_shift, binary_acc_ic_shift, binary_mem_tag_ctr_shift, binary_op_id_shift, cmp_a_hi_shift, cmp_a_lo_shift, cmp_b_hi_shift, cmp_b_lo_shift, cmp_cmp_rng_ctr_shift, cmp_op_gt_shift, cmp_p_sub_a_hi_shift, cmp_p_sub_a_lo_shift, cmp_p_sub_b_hi_shift, cmp_p_sub_b_lo_shift, cmp_sel_rng_chk_shift, main_da_gas_remaining_shift, main_emit_l2_to_l1_msg_write_offset_shift, main_emit_note_hash_write_offset_shift, main_emit_nullifier_write_offset_shift, main_emit_unencrypted_log_write_offset_shift, main_internal_return_ptr_shift, main_l1_to_l2_msg_exists_write_offset_shift, main_l2_gas_remaining_shift, main_note_hash_exist_write_offset_shift, main_nullifier_exists_write_offset_shift, main_nullifier_non_exists_write_offset_shift, main_pc_shift, main_sel_execution_row_shift, main_sload_write_offset_shift, main_sstore_write_offset_shift, mem_glob_addr_shift, mem_rw_shift, mem_sel_mem_shift, mem_tag_shift, mem_tsp_shift, mem_val_shift, slice_addr_shift, slice_clk_shift, slice_cnt_shift, slice_col_offset_shift, slice_sel_cd_cpy_shift, slice_sel_mem_active_shift, slice_sel_return_shift, slice_sel_start_shift, slice_space_id_shift +#define TO_BE_SHIFTED(e) e.binary_acc_ia, e.binary_acc_ib, e.binary_acc_ic, e.binary_mem_tag_ctr, e.binary_op_id, e.cmp_a_hi, e.cmp_a_lo, e.cmp_b_hi, e.cmp_b_lo, e.cmp_cmp_rng_ctr, e.cmp_op_gt, e.cmp_p_sub_a_hi, e.cmp_p_sub_a_lo, e.cmp_p_sub_b_hi, e.cmp_p_sub_b_lo, e.cmp_sel_rng_chk, e.main_da_gas_remaining, e.main_emit_l2_to_l1_msg_write_offset, e.main_emit_note_hash_write_offset, e.main_emit_nullifier_write_offset, e.main_emit_unencrypted_log_write_offset, e.main_internal_return_ptr, e.main_l1_to_l2_msg_exists_write_offset, e.main_l2_gas_remaining, e.main_note_hash_exist_write_offset, e.main_nullifier_exists_write_offset, e.main_nullifier_non_exists_write_offset, e.main_pc, e.main_sel_execution_row, e.main_sload_write_offset, e.main_sstore_write_offset, e.mem_glob_addr, e.mem_rw, e.mem_sel_mem, e.mem_tag, e.mem_tsp, e.mem_val, e.slice_addr, e.slice_clk, e.slice_cnt, e.slice_col_offset, e.slice_sel_cd_cpy, e.slice_sel_mem_active, e.slice_sel_return, e.slice_sel_start, e.slice_space_id #define ALL_ENTITIES PRECOMPUTED_ENTITIES, WIRE_ENTITIES, DERIVED_WITNESS_ENTITIES, SHIFTED_ENTITIES // clang-format on @@ -119,11 +119,11 @@ class AvmFlavor { static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 16; static constexpr size_t NUM_WITNESS_ENTITIES = 680; - static constexpr size_t NUM_SHIFTED_ENTITIES = 47; + static constexpr size_t NUM_SHIFTED_ENTITIES = 46; static constexpr size_t NUM_WIRES = NUM_WITNESS_ENTITIES + NUM_PRECOMPUTED_ENTITIES; // We have two copies of the witness entities, so we subtract the number of fixed ones (they have no shift), one for // the unshifted and one for the shifted - static constexpr size_t NUM_ALL_ENTITIES = 743; + static constexpr size_t NUM_ALL_ENTITIES = 742; // The total number of witnesses including shifts and derived entities. static constexpr size_t NUM_ALL_WITNESS_ENTITIES = NUM_WITNESS_ENTITIES + NUM_SHIFTED_ENTITIES; diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/kernel.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/kernel.hpp index 6fa15bbad85..4ac010e7171 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/kernel.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/kernel.hpp @@ -10,9 +10,9 @@ template class kernelImpl { public: using FF = FF_; - static constexpr std::array SUBRELATION_PARTIAL_LENGTHS = { 3, 3, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + static constexpr std::array SUBRELATION_PARTIAL_LENGTHS = { 3, 3, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3 }; + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3 }; template void static accumulate(ContainerOverSubrelations& evals, @@ -363,22 +363,15 @@ template class kernelImpl { } { using Accumulator = typename std::tuple_element_t<41, ContainerOverSubrelations>; - auto tmp = (main_KERNEL_OUTPUT_SELECTORS * - (new_term.main_side_effect_counter_shift - (new_term.main_side_effect_counter + FF(1)))); + auto tmp = (main_KERNEL_INPUT_SELECTORS * (FF(1) - new_term.main_sel_q_kernel_lookup)); tmp *= scaling_factor; std::get<41>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<42, ContainerOverSubrelations>; - auto tmp = (main_KERNEL_INPUT_SELECTORS * (FF(1) - new_term.main_sel_q_kernel_lookup)); - tmp *= scaling_factor; - std::get<42>(evals) += typename Accumulator::View(tmp); - } - { - using Accumulator = typename std::tuple_element_t<43, ContainerOverSubrelations>; auto tmp = (main_KERNEL_OUTPUT_SELECTORS * (FF(1) - new_term.main_sel_q_kernel_output_lookup)); tmp *= scaling_factor; - std::get<43>(evals) += typename Accumulator::View(tmp); + std::get<42>(evals) += typename Accumulator::View(tmp); } } }; @@ -453,10 +446,8 @@ template class kernel : public Relation> { case 39: return "SSTORE_KERNEL_OUTPUT"; case 41: - return "SIDE_EFFECT_COUNTER_INCREMENT"; - case 42: return "KERNEL_INPUT_ACTIVE_CHECK"; - case 43: + case 42: return "KERNEL_OUTPUT_ACTIVE_CHECK"; } return std::to_string(index); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/kernel_output_lookup.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/kernel_output_lookup.hpp index 27ab2deb718..72a11d0e50f 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/kernel_output_lookup.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/kernel_output_lookup.hpp @@ -14,7 +14,7 @@ class kernel_output_lookup_lookup_settings { static constexpr size_t WRITE_TERMS = 1; static constexpr size_t READ_TERM_TYPES[READ_TERMS] = { 0 }; static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = { 0 }; - static constexpr size_t LOOKUP_TUPLE_SIZE = 4; + static constexpr size_t LOOKUP_TUPLE_SIZE = 2; static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = 4; static constexpr size_t READ_TERM_DEGREE = 0; static constexpr size_t WRITE_TERM_DEGREE = 0; @@ -40,12 +40,8 @@ class kernel_output_lookup_lookup_settings { in.main_sel_q_kernel_output_lookup, in.main_sel_kernel_out, in.main_kernel_out_offset, - in.main_ia, - in.main_side_effect_counter, in.main_ib, in.main_clk, - in.main_kernel_value_out, - in.main_kernel_side_effect_out, in.main_kernel_metadata_out); } @@ -56,12 +52,8 @@ class kernel_output_lookup_lookup_settings { in.main_sel_q_kernel_output_lookup, in.main_sel_kernel_out, in.main_kernel_out_offset, - in.main_ia, - in.main_side_effect_counter, in.main_ib, in.main_clk, - in.main_kernel_value_out, - in.main_kernel_side_effect_out, in.main_kernel_metadata_out); } }; diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/tests/execution.test.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/tests/execution.test.cpp index 5cd4535a260..8a776a123e9 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/tests/execution.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/tests/execution.test.cpp @@ -1771,7 +1771,8 @@ TEST_F(AvmExecutionTests, kernelOutputEmitOpcodes) auto emit_note_hash_kernel_out_row = std::ranges::find_if( trace.begin(), trace.end(), [&](Row r) { return r.main_clk == emit_note_hash_out_offset; }); EXPECT_EQ(emit_note_hash_kernel_out_row->main_kernel_value_out, 1); - EXPECT_EQ(emit_note_hash_kernel_out_row->main_kernel_side_effect_out, 0); + // TODO(#8287) + // EXPECT_EQ(emit_note_hash_kernel_out_row->main_kernel_side_effect_out, 0); feed_output(emit_note_hash_out_offset, 1, 0, 0); // CHECK EMIT NULLIFIER @@ -2040,7 +2041,9 @@ TEST_F(AvmExecutionTests, kernelOutputHashExistsOpcodes) std::vector returndata = {}; // Generate Hint for hash exists operation - auto execution_hints = ExecutionHints().with_storage_value_hints({ { 0, 1 }, { 1, 1 }, { 2, 1 } }); + auto execution_hints = ExecutionHints() + .with_storage_value_hints({ { 0, 1 }, { 1, 1 }, { 2, 1 } }) + .with_note_hash_exists_hints({ { 0, 1 }, { 1, 1 }, { 2, 1 } }); auto trace = Execution::gen_trace(instructions, returndata, calldata, public_inputs_vec, execution_hints); @@ -2068,7 +2071,8 @@ TEST_F(AvmExecutionTests, kernelOutputHashExistsOpcodes) auto nullifier_out_row = std::ranges::find_if( trace.begin(), trace.end(), [&](Row r) { return r.main_clk == START_NULLIFIER_EXISTS_OFFSET; }); EXPECT_EQ(nullifier_out_row->main_kernel_value_out, 1); // value - EXPECT_EQ(nullifier_out_row->main_kernel_side_effect_out, 1); + // TODO(#8287) + // EXPECT_EQ(nullifier_out_row->main_kernel_side_effect_out, 1); EXPECT_EQ(nullifier_out_row->main_kernel_metadata_out, 1); // exists feed_output(START_NULLIFIER_EXISTS_OFFSET, 1, 1, 1); @@ -2082,7 +2086,8 @@ TEST_F(AvmExecutionTests, kernelOutputHashExistsOpcodes) auto msg_out_row = std::ranges::find_if( trace.begin(), trace.end(), [&](Row r) { return r.main_clk == START_L1_TO_L2_MSG_EXISTS_WRITE_OFFSET; }); EXPECT_EQ(msg_out_row->main_kernel_value_out, 1); // value - EXPECT_EQ(msg_out_row->main_kernel_side_effect_out, 2); + // TODO(#8287) + // EXPECT_EQ(msg_out_row->main_kernel_side_effect_out, 2); EXPECT_EQ(msg_out_row->main_kernel_metadata_out, 1); // exists feed_output(START_L1_TO_L2_MSG_EXISTS_WRITE_OFFSET, 1, 2, 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/tests/kernel.test.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/tests/kernel.test.cpp index 87e503fe105..cc80b967fcd 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/tests/kernel.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/tests/kernel.test.cpp @@ -1226,14 +1226,16 @@ TEST_F(AvmKernelOutputPositiveTests, kernelNoteHashExists) auto direct_apply_opcodes = [=](AvmTraceBuilder& trace_builder) { trace_builder.op_set(0, static_cast(value), value_offset, AvmMemoryTag::FF); - trace_builder.op_note_hash_exists(/*indirect*/ false, value_offset, metadata_offset); + // TODO(#8287): Leaf index isnt constrained properly so we just set it to 0 + trace_builder.op_note_hash_exists(/*indirect*/ false, value_offset, 0, metadata_offset); }; // TODO: fix auto indirect_apply_opcodes = [=](AvmTraceBuilder& trace_builder) { trace_builder.op_set(0, static_cast(value), value_offset, AvmMemoryTag::FF); trace_builder.op_set(0, value_offset, indirect_value_offset, AvmMemoryTag::U32); trace_builder.op_set(0, metadata_offset, indirect_metadata_offset, AvmMemoryTag::U32); - trace_builder.op_note_hash_exists(/*indirect*/ 3, indirect_value_offset, indirect_metadata_offset); + // TODO(#8287): Leaf index isnt constrained properly so we just set it to 0 + trace_builder.op_note_hash_exists(/*indirect*/ 3, indirect_value_offset, 0, indirect_metadata_offset); }; auto checks = [=](bool indirect, const std::vector& trace) { auto row = std::ranges::find_if( @@ -1352,7 +1354,8 @@ TEST_F(AvmKernelOutputPositiveTests, kernelL1ToL2MsgExists) auto apply_opcodes = [=](AvmTraceBuilder& trace_builder) { trace_builder.op_set(0, static_cast(value), value_offset, AvmMemoryTag::FF); - trace_builder.op_l1_to_l2_msg_exists(/*indirect*/ false, value_offset, metadata_offset); + // TODO(#8287): Leaf index isnt constrained properly so we just set it to 0 + trace_builder.op_l1_to_l2_msg_exists(/*indirect*/ false, value_offset, 0, metadata_offset); }; auto checks = [=]([[maybe_unused]] bool indirect, const std::vector& trace) { auto row = std::ranges::find_if( diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.cpp index fc53843c31d..8ae88df7bc9 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.cpp @@ -650,8 +650,7 @@ std::vector Execution::gen_trace(std::vector const& instructio case OpCode::NOTEHASHEXISTS: trace_builder.op_note_hash_exists(std::get(inst.operands.at(0)), std::get(inst.operands.at(1)), - // TODO: leaf offset exists - // std::get(inst.operands.at(2)) + std::get(inst.operands.at(2)), std::get(inst.operands.at(3))); break; case OpCode::EMITNOTEHASH: @@ -673,8 +672,7 @@ std::vector Execution::gen_trace(std::vector const& instructio case OpCode::L1TOL2MSGEXISTS: trace_builder.op_l1_to_l2_msg_exists(std::get(inst.operands.at(0)), std::get(inst.operands.at(1)), - // TODO: leaf offset exists - // std::get(inst.operands.at(2)) + std::get(inst.operands.at(2)), std::get(inst.operands.at(3))); break; case OpCode::GETCONTRACTINSTANCE: diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution_hints.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution_hints.hpp index df54b770f05..5bda7055d85 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution_hints.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution_hints.hpp @@ -105,8 +105,15 @@ struct ExecutionHints { { std::unordered_map hints_map; push_vec_into_map(hints_map, storage_value_hints); - push_vec_into_map(hints_map, note_hash_exists_hints); push_vec_into_map(hints_map, nullifier_exists_hints); + return hints_map; + } + + // Leaf index -> exists + std::unordered_map get_leaf_index_hints() const + { + std::unordered_map hints_map; + push_vec_into_map(hints_map, note_hash_exists_hints); push_vec_into_map(hints_map, l1_to_l2_message_exists_hints); return hints_map; } @@ -161,4 +168,4 @@ struct ExecutionHints { {} }; -} // namespace bb::avm_trace \ No newline at end of file +} // namespace bb::avm_trace diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/kernel_trace.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/kernel_trace.cpp index 8564a7eed2b..f953f7b8489 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/kernel_trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/kernel_trace.cpp @@ -173,7 +173,16 @@ void AvmKernelTraceBuilder::op_note_hash_exists(uint32_t clk, { uint32_t offset = START_NOTE_HASH_EXISTS_WRITE_OFFSET + note_hash_exists_offset; - perform_kernel_output_lookup(offset, side_effect_counter, note_hash, FF(result)); + // TODO(#8287)Lookups are heavily underconstrained atm + if (result == 1) { + perform_kernel_output_lookup(offset, side_effect_counter, note_hash, FF(result)); + } else { + // if the note_hash does NOT exist, the public inputs already contains the correct output value (i.e. the + // actual value at the index), so we don't try to overwrite the value + std::get(public_inputs)[offset] = side_effect_counter; + std::get(public_inputs)[offset] = FF(result); + kernel_output_selector_counter[offset]++; + } note_hash_exists_offset++; KernelTraceEntry entry = { @@ -245,7 +254,16 @@ void AvmKernelTraceBuilder::op_l1_to_l2_msg_exists(uint32_t clk, uint32_t result) { uint32_t offset = START_L1_TO_L2_MSG_EXISTS_WRITE_OFFSET + l1_to_l2_msg_exists_offset; - perform_kernel_output_lookup(offset, side_effect_counter, message, FF(result)); + // TODO(#8287)Lookups are heavily underconstrained atm + if (result == 1) { + perform_kernel_output_lookup(offset, side_effect_counter, message, FF(result)); + } else { + // if the l1_to_l2_msg_exists is false, the public inputs already contains the correct output value (i.e. the + // actual value at the index), so we don't try to overwrite the value + std::get(public_inputs)[offset] = side_effect_counter; + std::get(public_inputs)[offset] = FF(result); + kernel_output_selector_counter[offset]++; + } l1_to_l2_msg_exists_offset++; KernelTraceEntry entry = { diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp index 899d4dd2575..3dc1d434db8 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp @@ -2095,6 +2095,44 @@ Row AvmTraceBuilder::create_kernel_output_opcode_with_set_metadata_output_from_h }; } +// Specifically for handling the L1TOL2MSGEXISTS and NOTEHASHEXISTS opcodes +Row AvmTraceBuilder::create_kernel_output_opcode_for_leaf_index( + uint8_t indirect, uint32_t clk, uint32_t data_offset, uint32_t metadata_offset, uint32_t leaf_index) +{ + // If doesnt exist, should not read_a, but instead get from public inputs + FF exists = execution_hints.get_leaf_index_hints().at(leaf_index); + + auto [resolved_data, resolved_metadata] = unpack_indirects<2>(indirect, { data_offset, metadata_offset }); + auto read_a = constrained_read_from_memory( + call_ptr, clk, resolved_data, AvmMemoryTag::FF, AvmMemoryTag::U8, IntermRegister::IA); + + auto write_b = constrained_write_to_memory( + call_ptr, clk, resolved_metadata, exists, AvmMemoryTag::FF, AvmMemoryTag::U8, IntermRegister::IB); + bool tag_match = read_a.tag_match && write_b.tag_match; + + return Row{ + .main_clk = clk, + .main_ia = read_a.val, + .main_ib = write_b.val, + .main_ind_addr_a = FF(read_a.indirect_address), + .main_ind_addr_b = FF(write_b.indirect_address), + .main_internal_return_ptr = internal_return_ptr, + .main_mem_addr_a = FF(read_a.direct_address), + .main_mem_addr_b = FF(write_b.direct_address), + .main_pc = pc++, + .main_r_in_tag = static_cast(AvmMemoryTag::FF), + .main_rwa = 0, + .main_rwb = 1, + .main_sel_mem_op_a = 1, + .main_sel_mem_op_b = 1, + .main_sel_q_kernel_output_lookup = 1, + .main_sel_resolve_ind_addr_a = FF(static_cast(read_a.is_indirect)), + .main_sel_resolve_ind_addr_b = FF(static_cast(write_b.is_indirect)), + .main_tag_err = static_cast(!tag_match), + .main_w_in_tag = static_cast(AvmMemoryTag::U8), + }; +} + /** * @brief Create a kernel output opcode with set metadata output object * @@ -2303,14 +2341,20 @@ void AvmTraceBuilder::op_sstore(uint8_t indirect, uint32_t src_offset, uint32_t op_jump(old_pc + 1); } -void AvmTraceBuilder::op_note_hash_exists(uint8_t indirect, uint32_t note_hash_offset, uint32_t dest_offset) +void AvmTraceBuilder::op_note_hash_exists(uint8_t indirect, + uint32_t note_hash_offset, + uint32_t leaf_index_offset, + uint32_t dest_offset) { auto const clk = static_cast(main_trace.size()) + 1; + auto leaf_index = unconstrained_read_from_memory(leaf_index_offset); Row row = - create_kernel_output_opcode_with_set_metadata_output_from_hint(indirect, clk, note_hash_offset, dest_offset); - kernel_trace_builder.op_note_hash_exists( - clk, side_effect_counter, row.main_ia, /*safe*/ static_cast(row.main_ib)); + create_kernel_output_opcode_for_leaf_index(indirect, clk, note_hash_offset, dest_offset, uint32_t(leaf_index)); + kernel_trace_builder.op_note_hash_exists(clk, + /*side_effect_counter*/ uint32_t(leaf_index), + row.main_ia, + /*safe*/ static_cast(row.main_ib)); row.main_sel_op_note_hash_exists = FF(1); // Constrain gas cost @@ -2319,7 +2363,6 @@ void AvmTraceBuilder::op_note_hash_exists(uint8_t indirect, uint32_t note_hash_o main_trace.push_back(row); debug("note_hash_exists side-effect cnt: ", side_effect_counter); - side_effect_counter++; } void AvmTraceBuilder::op_emit_note_hash(uint8_t indirect, uint32_t note_hash_offset) @@ -2375,13 +2418,17 @@ void AvmTraceBuilder::op_emit_nullifier(uint8_t indirect, uint32_t nullifier_off side_effect_counter++; } -void AvmTraceBuilder::op_l1_to_l2_msg_exists(uint8_t indirect, uint32_t log_offset, uint32_t dest_offset) +void AvmTraceBuilder::op_l1_to_l2_msg_exists(uint8_t indirect, + uint32_t log_offset, + uint32_t leaf_index_offset, + uint32_t dest_offset) { auto const clk = static_cast(main_trace.size()) + 1; - Row row = create_kernel_output_opcode_with_set_metadata_output_from_hint(indirect, clk, log_offset, dest_offset); + auto leaf_index = unconstrained_read_from_memory(leaf_index_offset); + Row row = create_kernel_output_opcode_for_leaf_index(indirect, clk, log_offset, dest_offset, uint32_t(leaf_index)); kernel_trace_builder.op_l1_to_l2_msg_exists( - clk, side_effect_counter, row.main_ia, /*safe*/ static_cast(row.main_ib)); + clk, uint32_t(leaf_index) /*side_effect_counter*/, row.main_ia, /*safe*/ static_cast(row.main_ib)); row.main_sel_op_l1_to_l2_msg_exists = FF(1); // Constrain gas cost @@ -2390,7 +2437,6 @@ void AvmTraceBuilder::op_l1_to_l2_msg_exists(uint8_t indirect, uint32_t log_offs main_trace.push_back(row); debug("l1_to_l2_msg_exists side-effect cnt: ", side_effect_counter); - side_effect_counter++; } void AvmTraceBuilder::op_get_contract_instance(uint8_t indirect, uint32_t address_offset, uint32_t dst_offset) diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.hpp index 5140d76bd37..a301b637b73 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.hpp @@ -122,11 +122,17 @@ class AvmTraceBuilder { // World State void op_sload(uint8_t indirect, uint32_t slot_offset, uint32_t size, uint32_t dest_offset); void op_sstore(uint8_t indirect, uint32_t src_offset, uint32_t size, uint32_t slot_offset); - void op_note_hash_exists(uint8_t indirect, uint32_t note_hash_offset, uint32_t dest_offset); + void op_note_hash_exists(uint8_t indirect, + uint32_t note_hash_offset, + uint32_t leaf_index_offset, + uint32_t dest_offset); void op_emit_note_hash(uint8_t indirect, uint32_t note_hash_offset); void op_nullifier_exists(uint8_t indirect, uint32_t nullifier_offset, uint32_t dest_offset); void op_emit_nullifier(uint8_t indirect, uint32_t nullifier_offset); - void op_l1_to_l2_msg_exists(uint8_t indirect, uint32_t log_offset, uint32_t dest_offset); + void op_l1_to_l2_msg_exists(uint8_t indirect, + uint32_t log_offset, + uint32_t leaf_index_offset, + uint32_t dest_offset); void op_get_contract_instance(uint8_t indirect, uint32_t address_offset, uint32_t dst_offset); // Accrued Substate @@ -233,6 +239,9 @@ class AvmTraceBuilder { uint32_t data_offset, uint32_t metadata_offset); + Row create_kernel_output_opcode_for_leaf_index( + uint8_t indirect, uint32_t clk, uint32_t data_offset, uint32_t metadata_offset, uint32_t leaf_index); + Row create_kernel_output_opcode_with_set_value_from_hint(uint8_t indirect, uint32_t clk, uint32_t data_offset, diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index 769a77c970c..49f48e82f06 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -168,6 +168,7 @@ library Constants { uint256 internal constant SCOPED_KEY_VALIDATION_REQUEST_AND_GENERATOR_LENGTH = 6; uint256 internal constant PARTIAL_STATE_REFERENCE_LENGTH = 6; uint256 internal constant READ_REQUEST_LENGTH = 2; + uint256 internal constant TREE_LEAF_READ_REQUEST_LENGTH = 2; uint256 internal constant LOG_HASH_LENGTH = 3; uint256 internal constant SCOPED_LOG_HASH_LENGTH = 4; uint256 internal constant ENCRYPTED_LOG_HASH_LENGTH = 4; @@ -194,14 +195,14 @@ library Constants { uint256 internal constant SCOPED_READ_REQUEST_LEN = 3; uint256 internal constant PUBLIC_DATA_READ_LENGTH = 2; uint256 internal constant PRIVATE_VALIDATION_REQUESTS_LENGTH = 772; - uint256 internal constant PUBLIC_VALIDATION_REQUESTS_LENGTH = 514; + uint256 internal constant PUBLIC_VALIDATION_REQUESTS_LENGTH = 770; uint256 internal constant PUBLIC_DATA_UPDATE_REQUEST_LENGTH = 3; uint256 internal constant COMBINED_ACCUMULATED_DATA_LENGTH = 610; uint256 internal constant COMBINED_CONSTANT_DATA_LENGTH = 43; uint256 internal constant PRIVATE_ACCUMULATED_DATA_LENGTH = 1336; uint256 internal constant PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 2167; uint256 internal constant PUBLIC_ACCUMULATED_DATA_LENGTH = 1311; - uint256 internal constant PUBLIC_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 3629; + uint256 internal constant PUBLIC_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 3885; uint256 internal constant KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 663; uint256 internal constant CONSTANT_ROLLUP_DATA_LENGTH = 12; uint256 internal constant BASE_OR_MERGE_PUBLIC_INPUTS_LENGTH = 29; diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/components/public_kernel_output_composer.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/components/public_kernel_output_composer.nr index a690bc819d9..6f45d5b3c3d 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/components/public_kernel_output_composer.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/components/public_kernel_output_composer.nr @@ -81,6 +81,14 @@ impl PublicKernelOutputComposer { let storage_contract_address = public_call.call_context.storage_contract_address; + let note_hash_read_requests = public_call.note_hash_read_requests; + for i in 0..note_hash_read_requests.len() { + let request = note_hash_read_requests[i]; + if !is_empty(request) { + self.output_builder.validation_requests.note_hash_read_requests.push(request); + } + } + let nullifier_read_requests = public_call.nullifier_read_requests; for i in 0..nullifier_read_requests.len() { let request = nullifier_read_requests[i]; @@ -97,6 +105,14 @@ impl PublicKernelOutputComposer { } } + let l1_to_l2_msg_read_requests = public_call.l1_to_l2_msg_read_requests; + for i in 0..l1_to_l2_msg_read_requests.len() { + let request = l1_to_l2_msg_read_requests[i]; + if !is_empty(request) { + self.output_builder.validation_requests.l1_to_l2_msg_read_requests.push(request); + } + } + let read_requests = public_call.contract_storage_reads; for i in 0..read_requests.len() { let read_request = read_requests[i]; @@ -117,8 +133,8 @@ impl PublicKernelOutputComposer { self.output_builder.end = propagate_accumulated_data(&mut self.output_builder.end, public_call); } - // TODO: Should keep the data even when reverts. - // The data is required for verifying validation requests in the tail circuit, which will then discard the + // TODO: Should keep the data even when reverts. + // The data is required for verifying validation requests in the tail circuit, which will then discard the // revertible data. if revert_in_phase { self.output_builder.end = PublicAccumulatedDataBuilder::empty(); diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_tail.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_tail.nr index 71542abe6a6..68c39dbf448 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_tail.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_tail.nr @@ -8,13 +8,14 @@ use crate::{ }; use dep::reset_kernel_lib::{ NullifierReadRequestHints, NullifierNonExistentReadRequestHints, PublicDataReadRequestHints, - PublicValidationRequestProcessor + PublicValidationRequestProcessor, TreeLeafReadRequestHint }; use dep::types::{ abis::{kernel_circuit_public_inputs::KernelCircuitPublicInputs, public_kernel_data::PublicKernelData}, constants::{ - MAX_PUBLIC_DATA_HINTS, MAX_NULLIFIER_READ_REQUESTS_PER_TX, PUBLIC_KERNEL_SETUP_INDEX, - PUBLIC_KERNEL_APP_LOGIC_INDEX, PUBLIC_KERNEL_TEARDOWN_INDEX + L1_TO_L2_MSG_TREE_HEIGHT, MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, MAX_PUBLIC_DATA_HINTS, + MAX_NOTE_HASH_READ_REQUESTS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, NOTE_HASH_TREE_HEIGHT, + PUBLIC_KERNEL_SETUP_INDEX, PUBLIC_KERNEL_APP_LOGIC_INDEX, PUBLIC_KERNEL_TEARDOWN_INDEX }, data::public_data_hint::PublicDataHint, partial_state_reference::PartialStateReference }; @@ -27,8 +28,10 @@ global ALLOWED_PREVIOUS_CIRCUITS = [ struct PublicKernelTailCircuitPrivateInputs { previous_kernel: PublicKernelData, + note_hash_read_request_hints: [TreeLeafReadRequestHint; MAX_NOTE_HASH_READ_REQUESTS_PER_TX], nullifier_read_request_hints: NullifierReadRequestHints, nullifier_non_existent_read_request_hints: NullifierNonExistentReadRequestHints, + l1_to_l2_msg_read_request_hints: [TreeLeafReadRequestHint; MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX], public_data_hints: [PublicDataHint; MAX_PUBLIC_DATA_HINTS], public_data_read_request_hints: PublicDataReadRequestHints, start_state: PartialStateReference, @@ -47,12 +50,13 @@ impl PublicKernelTailCircuitPrivateInputs { let previous_public_inputs = self.previous_kernel.public_inputs; PublicValidationRequestProcessor::new( previous_public_inputs, + self.start_state, + self.note_hash_read_request_hints, self.nullifier_read_request_hints, self.nullifier_non_existent_read_request_hints, - self.start_state.nullifier_tree.root, + self.l1_to_l2_msg_read_request_hints, self.public_data_read_request_hints, - self.public_data_hints, - self.start_state.public_data_tree.root + self.public_data_hints ).validate(); let output = self.generate_output(); @@ -71,7 +75,8 @@ mod tests { nullifier_read_request_hints_builder::NullifierReadRequestHintsBuilder, public_data_read_request_hints_builder::PublicDataReadRequestHintsBuilder }, - PublicDataHint, reset::read_request::{PendingReadHint, ReadRequestState, ReadRequestStatus} + PublicDataHint, reset::read_request::{PendingReadHint, ReadRequestState, ReadRequestStatus}, + TreeLeafReadRequestHint }; use dep::types::{ abis::{ @@ -80,24 +85,35 @@ mod tests { }, address::AztecAddress, constants::{ - MAX_NULLIFIERS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_PUBLIC_DATA_HINTS, - MAX_PUBLIC_DATA_READS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, NULLIFIER_TREE_HEIGHT, + L1_TO_L2_MSG_TREE_HEIGHT, MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, MAX_NOTE_HASHES_PER_TX, + MAX_NOTE_HASH_READ_REQUESTS_PER_TX, MAX_NULLIFIERS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, + MAX_PUBLIC_DATA_HINTS, MAX_PUBLIC_DATA_READS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + NOTE_HASH_SUBTREE_HEIGHT, NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, NULLIFIER_TREE_HEIGHT, NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, NULLIFIER_SUBTREE_HEIGHT, PUBLIC_DATA_SUBTREE_HEIGHT, PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH, PUBLIC_DATA_TREE_HEIGHT, MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX, MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - PUBLIC_KERNEL_APP_LOGIC_INDEX, BASE_ROLLUP_INDEX, PUBLIC_KERNEL_SETUP_INDEX, - PUBLIC_KERNEL_TEARDOWN_INDEX + NOTE_HASH_TREE_HEIGHT, PUBLIC_KERNEL_APP_LOGIC_INDEX, BASE_ROLLUP_INDEX, + PUBLIC_KERNEL_SETUP_INDEX, PUBLIC_KERNEL_TEARDOWN_INDEX }, hash::{compute_siloed_nullifier, silo_note_hash}, public_data_tree_leaf_preimage::PublicDataTreeLeafPreimage, tests::{ fixture_builder::FixtureBuilder, merkle_tree_utils::NonEmptyMerkleTree, - utils::{assert_array_eq, swap_items} + utils::{assert_array_eq, pad_end, swap_items} }, traits::is_empty, partial_state_reference::PartialStateReference, utils::arrays::{array_length, array_merge}, merkle_tree::MembershipWitness }; + fn build_note_hash_tree(pre_existing_note_hashes: [Field; N]) -> NonEmptyMerkleTree { + NonEmptyMerkleTree::new( + pad_end(pre_existing_note_hashes, 0), + [0; NOTE_HASH_TREE_HEIGHT], + [0; NOTE_HASH_TREE_HEIGHT - NOTE_HASH_SUBTREE_HEIGHT], + [0; NOTE_HASH_SUBTREE_HEIGHT] + ) + } + fn build_nullifier_tree() -> NonEmptyMerkleTree { let mut pre_existing_nullifiers = [NullifierLeafPreimage::empty(); MAX_NULLIFIERS_PER_TX]; pre_existing_nullifiers[0] = NullifierLeafPreimage { nullifier: 0, next_nullifier: 100, next_index: 1 }; @@ -131,12 +147,16 @@ mod tests { struct PublicKernelTailCircuitPrivateInputsBuilder { previous_kernel: FixtureBuilder, previous_revertible: FixtureBuilder, + note_hash_read_request_hints: BoundedVec, MAX_NOTE_HASH_READ_REQUESTS_PER_TX>, nullifier_read_request_hints_builder: NullifierReadRequestHintsBuilder, nullifier_non_existent_read_request_hints_builder: NullifierNonExistentReadRequestHintsBuilder, public_data_read_request_hints_builder: PublicDataReadRequestHintsBuilder, public_data_hints: BoundedVec, public_data_tree: NonEmptyMerkleTree, + l1_to_l2_msg_read_request_hints: BoundedVec, MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX>, start_state: PartialStateReference, + note_hash_tree: NonEmptyMerkleTree, + pre_existing_note_hashes: [Field; 2], } impl PublicKernelTailCircuitPrivateInputsBuilder { @@ -148,21 +168,31 @@ mod tests { PublicKernelTailCircuitPrivateInputsBuilder { previous_kernel, previous_revertible, + note_hash_read_request_hints: BoundedVec::new(), nullifier_read_request_hints_builder: NullifierReadRequestHintsBuilder::new(), nullifier_non_existent_read_request_hints_builder, public_data_read_request_hints_builder: PublicDataReadRequestHintsBuilder::new(MAX_PUBLIC_DATA_READS_PER_TX), public_data_hints: BoundedVec::new(), public_data_tree: NonEmptyMerkleTree::empty(), - start_state: PartialStateReference::empty() + l1_to_l2_msg_read_request_hints: BoundedVec::new(), + start_state: PartialStateReference::empty(), + note_hash_tree: NonEmptyMerkleTree::empty(), + pre_existing_note_hashes: [598589, 714714] } } + pub fn with_note_hash_tree(&mut self) -> Self { + self.note_hash_tree = build_note_hash_tree(self.pre_existing_note_hashes); + self.start_state.note_hash_tree.root = self.note_hash_tree.get_root(); + self.previous_kernel.historical_header.state.partial.note_hash_tree.root = 11111; + *self + } + pub fn with_nullifier_tree(&mut self) -> Self { let nullifier_tree = build_nullifier_tree(); self.nullifier_non_existent_read_request_hints_builder.set_nullifier_tree(nullifier_tree); - let tree_root = nullifier_tree.get_root(); - self.start_state.nullifier_tree.root = tree_root; - self.previous_kernel.historical_header.state.partial.nullifier_tree.root = tree_root; + self.start_state.nullifier_tree.root = nullifier_tree.get_root(); + self.previous_kernel.historical_header.state.partial.nullifier_tree.root = 22222; *self } @@ -173,6 +203,15 @@ mod tests { *self } + pub fn add_note_hash_read_request(&mut self, pre_existing_note_hash_index: u32) { + self.previous_kernel.add_note_hash_tree_leaf_read_requests( + self.pre_existing_note_hashes[pre_existing_note_hash_index], + pre_existing_note_hash_index as Field + ); + let sibling_path = self.note_hash_tree.get_sibling_path(pre_existing_note_hash_index); + self.note_hash_read_request_hints.push(TreeLeafReadRequestHint { sibling_path }); + } + pub fn add_nullifier(&mut self, unsiloed_nullifier: Field) { self.previous_kernel.add_siloed_nullifier(unsiloed_nullifier); self.sync_counters(); @@ -281,6 +320,8 @@ mod tests { nullifier_non_existent_read_request_hints: self.nullifier_non_existent_read_request_hints_builder.to_hints(), public_data_hints: self.public_data_hints.storage, public_data_read_request_hints: self.public_data_read_request_hints_builder.to_hints(), + note_hash_read_request_hints: self.note_hash_read_request_hints.storage, + l1_to_l2_msg_read_request_hints: self.l1_to_l2_msg_read_request_hints.storage, start_state: self.start_state }; @@ -334,6 +375,14 @@ mod tests { } #[test] + unconstrained fn verify_note_hash_read_requests_succeeds() { + let mut builder = PublicKernelTailCircuitPrivateInputsBuilder::new().with_note_hash_tree(); + + builder.add_note_hash_read_request(1); + + builder.succeeded(); + } + unconstrained fn one_pending_nullifier_read_request() { let mut builder = PublicKernelTailCircuitPrivateInputsBuilder::new(); diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/lib.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/lib.nr index c67fa479d22..319f2100ddd 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/lib.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/lib.nr @@ -6,7 +6,8 @@ use public_data_read_request_reset::PublicDataReadRequestHints; use public_validation_request_processor::PublicValidationRequestProcessor; use reset::{ key_validation_hint::KeyValidationHint, - transient_data::{TransientDataIndexHint, verify_squashed_transient_data} + transient_data::{TransientDataIndexHint, verify_squashed_transient_data}, + tree_leaf_read_request::TreeLeafReadRequestHint }; use dep::types::data::public_data_hint::PublicDataHint; diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/public_validation_request_processor.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/public_validation_request_processor.nr index 0e069fdd86f..01fb966217d 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/public_validation_request_processor.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/public_validation_request_processor.nr @@ -2,7 +2,8 @@ use crate::{ reset::{ non_existent_read_request::reset_non_existent_read_requests, mutable_data_read_request::reset_mutable_data_read_requests, - read_request::verify_reset_read_requests + read_request::verify_reset_read_requests, + tree_leaf_read_request::{TreeLeafReadRequestHint, validate_tree_leaf_read_requests} }, nullifier_read_request_reset::NullifierReadRequestHints, nullifier_non_existent_read_request_reset::NullifierNonExistentReadRequestHints, @@ -15,18 +16,26 @@ use dep::types::{ validation_requests::PublicValidationRequests }, data::public_data_hint::PublicDataHint, - constants::{MAX_NULLIFIERS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX}, - hash::compute_siloed_nullifier, traits::is_empty, + constants::{ + L1_TO_L2_MSG_TREE_HEIGHT, MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, + MAX_NULLIFIERS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + NOTE_HASH_TREE_HEIGHT +}, + hash::compute_siloed_nullifier, partial_state_reference::PartialStateReference, traits::is_empty, utils::arrays::{array_merge, array_to_bounded_vec, assert_sorted_array} }; struct PublicValidationRequestProcessor { validation_requests: PublicValidationRequests, + note_hash_read_request_hints: [TreeLeafReadRequestHint; MAX_NOTE_HASH_READ_REQUESTS_PER_TX], + note_hash_tree_root: Field, pending_nullifiers: [Nullifier; MAX_NULLIFIERS_PER_TX], - pending_public_data_writes: [PublicDataUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], nullifier_read_request_hints: NullifierReadRequestHints, nullifier_non_existent_read_request_hints: NullifierNonExistentReadRequestHints, nullifier_tree_root: Field, + l1_to_l2_msg_read_request_hints: [TreeLeafReadRequestHint; MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX], + l1_to_l2_msg_tree_root: Field, + pending_public_data_writes: [PublicDataUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], public_data_read_request_hints: PublicDataReadRequestHints, public_data_hints: [PublicDataHint; NUM_PUBLIC_DATA_HINTS], public_data_tree_root: Field, @@ -35,12 +44,13 @@ struct PublicValidationRequestProcessor { impl PublicValidationRequestProcessor { pub fn new( public_inputs: PublicKernelCircuitPublicInputs, + start_state: PartialStateReference, + note_hash_read_request_hints: [TreeLeafReadRequestHint; MAX_NOTE_HASH_READ_REQUESTS_PER_TX], nullifier_read_request_hints: NullifierReadRequestHints, nullifier_non_existent_read_request_hints: NullifierNonExistentReadRequestHints, - nullifier_tree_root: Field, + l1_to_l2_msg_read_request_hints: [TreeLeafReadRequestHint; MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX], public_data_read_request_hints: PublicDataReadRequestHints, - public_data_hints: [PublicDataHint; NUM_PUBLIC_DATA_HINTS], - public_data_tree_root: Field + public_data_hints: [PublicDataHint; NUM_PUBLIC_DATA_HINTS] ) -> Self { let end_non_revertible = public_inputs.end_non_revertible; let end = public_inputs.end; @@ -55,22 +65,36 @@ impl PublicValidationRequestProcessor PublicValidationRequestProcessor { + sibling_path: [Field; N] +} + +pub fn validate_tree_leaf_read_requests( + read_requests: [TreeLeafReadRequest; READ_REQUEST_LEN], + hints: [TreeLeafReadRequestHint; READ_REQUEST_LEN], + tree_root: Field +) { + for i in 0..READ_REQUEST_LEN { + let read_request = read_requests[i]; + if !is_empty(read_request) { + assert_check_membership( + read_request.value, + read_request.leaf_index, + hints[i].sibling_path, + tree_root + ); + } + } +} + diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/mod.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/mod.nr index 3c2e588deff..72738cbdf81 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/mod.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/mod.nr @@ -14,6 +14,7 @@ mod combined_constant_data; mod side_effect; mod read_request; +mod tree_leaf_read_request; mod log_hash; mod note_hash; mod nullifier; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_circuit_public_inputs.nr index 09673e94af0..b70557d9cda 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_circuit_public_inputs.nr @@ -1,8 +1,8 @@ use crate::{ abis::{ call_context::CallContext, note_hash::NoteHash, nullifier::Nullifier, read_request::ReadRequest, - gas::Gas, global_variables::GlobalVariables, log_hash::LogHash, - public_call_request::PublicCallRequest + tree_leaf_read_request::TreeLeafReadRequest, gas::Gas, global_variables::GlobalVariables, + log_hash::LogHash, public_call_request::PublicCallRequest }, address::AztecAddress, constants::{ @@ -24,10 +24,10 @@ struct PublicCircuitPublicInputs { args_hash: Field, returns_hash: Field, - note_hash_read_requests: [ReadRequest; MAX_NOTE_HASH_READ_REQUESTS_PER_CALL], + note_hash_read_requests: [TreeLeafReadRequest; MAX_NOTE_HASH_READ_REQUESTS_PER_CALL], nullifier_read_requests: [ReadRequest; MAX_NULLIFIER_READ_REQUESTS_PER_CALL], nullifier_non_existent_read_requests: [ReadRequest; MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL], - l1_to_l2_msg_read_requests: [ReadRequest; MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL], + l1_to_l2_msg_read_requests: [TreeLeafReadRequest; MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL], contract_storage_update_requests: [StorageUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL], contract_storage_reads: [StorageRead; MAX_PUBLIC_DATA_READS_PER_CALL], @@ -52,7 +52,7 @@ struct PublicCircuitPublicInputs { prover_address: AztecAddress, revert_code: u8, - + start_gas_left: Gas, end_gas_left: Gas, transaction_fee: Field, @@ -126,10 +126,10 @@ impl Deserialize for PublicCircuitPublicInp call_context: reader.read_struct(CallContext::deserialize), args_hash: reader.read(), returns_hash: reader.read(), - note_hash_read_requests: reader.read_struct_array(ReadRequest::deserialize, [ReadRequest::empty(); MAX_NOTE_HASH_READ_REQUESTS_PER_CALL]), + note_hash_read_requests: reader.read_struct_array(TreeLeafReadRequest::deserialize, [TreeLeafReadRequest::empty(); MAX_NOTE_HASH_READ_REQUESTS_PER_CALL]), nullifier_read_requests: reader.read_struct_array(ReadRequest::deserialize, [ReadRequest::empty(); MAX_NULLIFIER_READ_REQUESTS_PER_CALL]), nullifier_non_existent_read_requests: reader.read_struct_array(ReadRequest::deserialize, [ReadRequest::empty(); MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL]), - l1_to_l2_msg_read_requests: reader.read_struct_array(ReadRequest::deserialize, [ReadRequest::empty(); MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL]), + l1_to_l2_msg_read_requests: reader.read_struct_array(TreeLeafReadRequest::deserialize, [TreeLeafReadRequest::empty(); MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL]), contract_storage_update_requests: reader.read_struct_array(StorageUpdateRequest::deserialize, [StorageUpdateRequest::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL]), contract_storage_reads: reader.read_struct_array(StorageRead::deserialize, [StorageRead::empty(); MAX_PUBLIC_DATA_READS_PER_CALL]), public_call_requests: reader.read_struct_array(PublicCallRequest::deserialize, [PublicCallRequest::empty(); MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL]), @@ -159,10 +159,10 @@ impl Empty for PublicCircuitPublicInputs { call_context: CallContext::empty(), args_hash: 0, returns_hash: 0, - note_hash_read_requests: [ReadRequest::empty(); MAX_NOTE_HASH_READ_REQUESTS_PER_CALL], + note_hash_read_requests: [TreeLeafReadRequest::empty(); MAX_NOTE_HASH_READ_REQUESTS_PER_CALL], nullifier_read_requests: [ReadRequest::empty(); MAX_NULLIFIER_READ_REQUESTS_PER_CALL], nullifier_non_existent_read_requests: [ReadRequest::empty(); MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL], - l1_to_l2_msg_read_requests: [ReadRequest::empty(); MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL], + l1_to_l2_msg_read_requests: [TreeLeafReadRequest::empty(); MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL], contract_storage_update_requests: [StorageUpdateRequest::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL], contract_storage_reads: [StorageRead::empty(); MAX_PUBLIC_DATA_READS_PER_CALL], public_call_requests: [PublicCallRequest::empty(); MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL], diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/tree_leaf_read_request.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/tree_leaf_read_request.nr new file mode 100644 index 00000000000..69dee4db52d --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/tree_leaf_read_request.nr @@ -0,0 +1,44 @@ +use crate::{traits::{Empty, Serialize, Deserialize}, constants::TREE_LEAF_READ_REQUEST_LENGTH}; + +struct TreeLeafReadRequest { + value: Field, + leaf_index: Field, +} + +impl Eq for TreeLeafReadRequest { + fn eq(self, other: TreeLeafReadRequest) -> bool { + (self.value == other.value) & (self.leaf_index == other.leaf_index) + } +} + +impl Empty for TreeLeafReadRequest { + fn empty() -> Self { + TreeLeafReadRequest { + value: 0, + leaf_index: 0, + } + } +} + +impl Serialize for TreeLeafReadRequest { + fn serialize(self) -> [Field; TREE_LEAF_READ_REQUEST_LENGTH] { + [self.value, self.leaf_index] + } +} + +impl Deserialize for TreeLeafReadRequest { + fn deserialize(values: [Field; TREE_LEAF_READ_REQUEST_LENGTH]) -> Self { + Self { + value: values[0], + leaf_index: values[1], + } + } +} + +#[test] +fn serialization_of_empty_read() { + let item = TreeLeafReadRequest::empty(); + let serialized = item.serialize(); + let deserialized = TreeLeafReadRequest::deserialize(serialized); + assert(item.eq(deserialized)); +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/public_validation_requests.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/public_validation_requests.nr index 4def172e8ec..e9853592622 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/public_validation_requests.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/public_validation_requests.nr @@ -1,9 +1,11 @@ use crate::{ abis::{ public_data_read::PublicDataRead, read_request::ScopedReadRequest, + tree_leaf_read_request::TreeLeafReadRequest, validation_requests::{rollup_validation_requests::RollupValidationRequests} }, constants::{ + MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, MAX_PUBLIC_DATA_READS_PER_TX, PUBLIC_VALIDATION_REQUESTS_LENGTH }, @@ -12,8 +14,10 @@ use crate::{ struct PublicValidationRequests { for_rollup: RollupValidationRequests, + note_hash_read_requests: [TreeLeafReadRequest; MAX_NOTE_HASH_READ_REQUESTS_PER_TX], nullifier_read_requests: [ScopedReadRequest; MAX_NULLIFIER_READ_REQUESTS_PER_TX], nullifier_non_existent_read_requests: [ScopedReadRequest; MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX], + l1_to_l2_msg_read_requests: [TreeLeafReadRequest; MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX], public_data_reads: [PublicDataRead; MAX_PUBLIC_DATA_READS_PER_TX], } @@ -23,15 +27,23 @@ impl Serialize for PublicValidationRequests { fields.extend_from_array(self.for_rollup.serialize()); - for i in 0..MAX_NULLIFIER_READ_REQUESTS_PER_TX { + for i in 0..self.note_hash_read_requests.len() { + fields.extend_from_array(self.note_hash_read_requests[i].serialize()); + } + + for i in 0..self.nullifier_read_requests.len() { fields.extend_from_array(self.nullifier_read_requests[i].serialize()); } - for i in 0..MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX { + for i in 0..self.nullifier_non_existent_read_requests.len() { fields.extend_from_array(self.nullifier_non_existent_read_requests[i].serialize()); } - for i in 0..MAX_PUBLIC_DATA_READS_PER_TX { + for i in 0..self.l1_to_l2_msg_read_requests.len() { + fields.extend_from_array(self.l1_to_l2_msg_read_requests[i].serialize()); + } + + for i in 0..self.public_data_reads.len() { fields.extend_from_array(self.public_data_reads[i].serialize()); } @@ -47,8 +59,10 @@ impl Deserialize for PublicValidationRequests let mut reader = Reader::new(serialized); let item = Self { for_rollup: reader.read_struct(RollupValidationRequests::deserialize), + note_hash_read_requests: reader.read_struct_array(TreeLeafReadRequest::deserialize, [TreeLeafReadRequest::empty(); MAX_NOTE_HASH_READ_REQUESTS_PER_TX]), nullifier_read_requests: reader.read_struct_array(ScopedReadRequest::deserialize, [ScopedReadRequest::empty(); MAX_NULLIFIER_READ_REQUESTS_PER_TX]), nullifier_non_existent_read_requests: reader.read_struct_array(ScopedReadRequest::deserialize, [ScopedReadRequest::empty(); MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX]), + l1_to_l2_msg_read_requests: reader.read_struct_array(TreeLeafReadRequest::deserialize, [TreeLeafReadRequest::empty(); MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX]), public_data_reads: reader.read_struct_array(PublicDataRead::deserialize, [PublicDataRead::empty(); MAX_PUBLIC_DATA_READS_PER_TX]), }; @@ -61,8 +75,10 @@ impl Empty for PublicValidationRequests { fn empty() -> Self { PublicValidationRequests { for_rollup: RollupValidationRequests::empty(), + note_hash_read_requests: [TreeLeafReadRequest::empty(); MAX_NOTE_HASH_READ_REQUESTS_PER_TX], nullifier_read_requests: [ScopedReadRequest::empty(); MAX_NULLIFIER_READ_REQUESTS_PER_TX], nullifier_non_existent_read_requests: [ScopedReadRequest::empty(); MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX], + l1_to_l2_msg_read_requests: [TreeLeafReadRequest::empty(); MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX], public_data_reads: [PublicDataRead::empty(); MAX_PUBLIC_DATA_READS_PER_TX], } } @@ -71,8 +87,10 @@ impl Empty for PublicValidationRequests { impl Eq for PublicValidationRequests { fn eq(self, other: Self) -> bool { (self.for_rollup.eq(other.for_rollup)) & + (self.note_hash_read_requests == other.note_hash_read_requests) & (self.nullifier_read_requests == other.nullifier_read_requests) & (self.nullifier_non_existent_read_requests == other.nullifier_non_existent_read_requests) & + (self.l1_to_l2_msg_read_requests == other.l1_to_l2_msg_read_requests) & (self.public_data_reads == other.public_data_reads) } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/public_validation_requests_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/public_validation_requests_builder.nr index cde568a6152..4a14f852f0c 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/public_validation_requests_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/public_validation_requests_builder.nr @@ -1,12 +1,14 @@ use crate::{ abis::{ max_block_number::MaxBlockNumber, public_data_read::PublicDataRead, read_request::ScopedReadRequest, + tree_leaf_read_request::TreeLeafReadRequest, validation_requests::{ public_validation_requests::PublicValidationRequests, rollup_validation_requests::RollupValidationRequests } }, constants::{ + MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, MAX_PUBLIC_DATA_READS_PER_TX }, @@ -15,27 +17,33 @@ use crate::{ struct PublicValidationRequestsBuilder { max_block_number: MaxBlockNumber, + note_hash_read_requests: BoundedVec, nullifier_read_requests: BoundedVec, nullifier_non_existent_read_requests: BoundedVec, public_data_reads: BoundedVec, + l1_to_l2_msg_read_requests: BoundedVec, } impl PublicValidationRequestsBuilder { pub fn new(requests: PublicValidationRequests) -> Self { PublicValidationRequestsBuilder { max_block_number: requests.for_rollup.max_block_number, + note_hash_read_requests: array_to_bounded_vec(requests.note_hash_read_requests), nullifier_read_requests: array_to_bounded_vec(requests.nullifier_read_requests), nullifier_non_existent_read_requests: array_to_bounded_vec(requests.nullifier_non_existent_read_requests), - public_data_reads: array_to_bounded_vec(requests.public_data_reads) + public_data_reads: array_to_bounded_vec(requests.public_data_reads), + l1_to_l2_msg_read_requests: array_to_bounded_vec(requests.l1_to_l2_msg_read_requests) } } pub fn finish(self) -> PublicValidationRequests { PublicValidationRequests { for_rollup: self.for_rollup(), + note_hash_read_requests: self.note_hash_read_requests.storage, nullifier_read_requests: self.nullifier_read_requests.storage, nullifier_non_existent_read_requests: self.nullifier_non_existent_read_requests.storage, - public_data_reads: self.public_data_reads.storage + public_data_reads: self.public_data_reads.storage, + l1_to_l2_msg_read_requests: self.l1_to_l2_msg_read_requests.storage } } @@ -48,9 +56,11 @@ impl Empty for PublicValidationRequestsBuilder { fn empty() -> Self { PublicValidationRequestsBuilder { max_block_number: MaxBlockNumber::empty(), + note_hash_read_requests: BoundedVec::new(), nullifier_read_requests: BoundedVec::new(), nullifier_non_existent_read_requests: BoundedVec::new(), public_data_reads: BoundedVec::new(), + l1_to_l2_msg_read_requests: BoundedVec::new(), } } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index 154bc65ecb4..3077964c389 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -222,6 +222,7 @@ global KEY_VALIDATION_REQUEST_AND_GENERATOR_LENGTH = KEY_VALIDATION_REQUEST_LENG global SCOPED_KEY_VALIDATION_REQUEST_AND_GENERATOR_LENGTH = KEY_VALIDATION_REQUEST_AND_GENERATOR_LENGTH + 1; global PARTIAL_STATE_REFERENCE_LENGTH: u32 = 6; global READ_REQUEST_LENGTH = 2; +global TREE_LEAF_READ_REQUEST_LENGTH = 2; global LOG_HASH_LENGTH = 3; global SCOPED_LOG_HASH_LENGTH = LOG_HASH_LENGTH + 1; global ENCRYPTED_LOG_HASH_LENGTH = 4; @@ -241,7 +242,7 @@ global TX_REQUEST_LENGTH: u32 = 2 + TX_CONTEXT_LENGTH + FUNCTION_DATA_LENGTH; global TOTAL_FEES_LENGTH = 1; global HEADER_LENGTH: u32 = APPEND_ONLY_TREE_SNAPSHOT_LENGTH + CONTENT_COMMITMENT_LENGTH + STATE_REFERENCE_LENGTH + GLOBAL_VARIABLES_LENGTH + TOTAL_FEES_LENGTH; global PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH: u32 = CALL_CONTEXT_LENGTH + 4 + MAX_BLOCK_NUMBER_LENGTH + (READ_REQUEST_LENGTH * MAX_NOTE_HASH_READ_REQUESTS_PER_CALL) + (READ_REQUEST_LENGTH * MAX_NULLIFIER_READ_REQUESTS_PER_CALL) + (KEY_VALIDATION_REQUEST_AND_GENERATOR_LENGTH * MAX_KEY_VALIDATION_REQUESTS_PER_CALL) + (NOTE_HASH_LENGTH * MAX_NOTE_HASHES_PER_CALL) + (NULLIFIER_LENGTH * MAX_NULLIFIERS_PER_CALL) + (PRIVATE_CALL_REQUEST_LENGTH * MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL) + (PUBLIC_CALL_REQUEST_LENGTH * MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL) + PUBLIC_CALL_REQUEST_LENGTH + (L2_TO_L1_MESSAGE_LENGTH * MAX_L2_TO_L1_MSGS_PER_CALL) + 2 + (NOTE_LOG_HASH_LENGTH * MAX_NOTE_ENCRYPTED_LOGS_PER_CALL) + (ENCRYPTED_LOG_HASH_LENGTH * MAX_ENCRYPTED_LOGS_PER_CALL) + (LOG_HASH_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) + HEADER_LENGTH + TX_CONTEXT_LENGTH; -global PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH: u32 = CALL_CONTEXT_LENGTH + /*argsHash + returnsHash*/ 2 + (READ_REQUEST_LENGTH * MAX_NOTE_HASH_READ_REQUESTS_PER_CALL) + (READ_REQUEST_LENGTH * MAX_NULLIFIER_READ_REQUESTS_PER_CALL) + (READ_REQUEST_LENGTH * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL) + (READ_REQUEST_LENGTH * MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL) + (CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH * MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL) + (CONTRACT_STORAGE_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_CALL) + (PUBLIC_CALL_REQUEST_LENGTH * MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL) + (NOTE_HASH_LENGTH * MAX_NOTE_HASHES_PER_CALL) + (NULLIFIER_LENGTH * MAX_NULLIFIERS_PER_CALL) + (L2_TO_L1_MESSAGE_LENGTH * MAX_L2_TO_L1_MSGS_PER_CALL) + 2 + (LOG_HASH_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) + HEADER_LENGTH + GLOBAL_VARIABLES_LENGTH + AZTEC_ADDRESS_LENGTH + /* revert_code */ 1 + 2 * GAS_LENGTH + /* transaction_fee */ 1; +global PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH: u32 = CALL_CONTEXT_LENGTH + /*argsHash + returnsHash*/ 2 + (TREE_LEAF_READ_REQUEST_LENGTH * MAX_NOTE_HASH_READ_REQUESTS_PER_CALL) + (READ_REQUEST_LENGTH * MAX_NULLIFIER_READ_REQUESTS_PER_CALL) + (READ_REQUEST_LENGTH * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL) + (TREE_LEAF_READ_REQUEST_LENGTH * MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL) + (CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH * MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL) + (CONTRACT_STORAGE_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_CALL) + (PUBLIC_CALL_REQUEST_LENGTH * MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL) + (NOTE_HASH_LENGTH * MAX_NOTE_HASHES_PER_CALL) + (NULLIFIER_LENGTH * MAX_NULLIFIERS_PER_CALL) + (L2_TO_L1_MESSAGE_LENGTH * MAX_L2_TO_L1_MSGS_PER_CALL) + 2 + (LOG_HASH_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) + HEADER_LENGTH + GLOBAL_VARIABLES_LENGTH + AZTEC_ADDRESS_LENGTH + /* revert_code */ 1 + 2 * GAS_LENGTH + /* transaction_fee */ 1; global PRIVATE_CALL_STACK_ITEM_LENGTH: u32 = AZTEC_ADDRESS_LENGTH + FUNCTION_DATA_LENGTH + PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH; global PUBLIC_CONTEXT_INPUTS_LENGTH: u32 = CALL_CONTEXT_LENGTH + HEADER_LENGTH + GLOBAL_VARIABLES_LENGTH + GAS_LENGTH + 2; @@ -250,7 +251,7 @@ global AGGREGATION_OBJECT_LENGTH: u32 = 16; global SCOPED_READ_REQUEST_LEN = READ_REQUEST_LENGTH + 1; global PUBLIC_DATA_READ_LENGTH = 2; global PRIVATE_VALIDATION_REQUESTS_LENGTH = ROLLUP_VALIDATION_REQUESTS_LENGTH + (SCOPED_READ_REQUEST_LEN * MAX_NOTE_HASH_READ_REQUESTS_PER_TX) + (SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_READ_REQUESTS_PER_TX) + (SCOPED_KEY_VALIDATION_REQUEST_AND_GENERATOR_LENGTH * MAX_KEY_VALIDATION_REQUESTS_PER_TX) + 2; -global PUBLIC_VALIDATION_REQUESTS_LENGTH = ROLLUP_VALIDATION_REQUESTS_LENGTH + (SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_READ_REQUESTS_PER_TX) + (SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX) + (PUBLIC_DATA_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_TX); +global PUBLIC_VALIDATION_REQUESTS_LENGTH = ROLLUP_VALIDATION_REQUESTS_LENGTH + (TREE_LEAF_READ_REQUEST_LENGTH * MAX_NOTE_HASH_READ_REQUESTS_PER_TX) + (SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_READ_REQUESTS_PER_TX) + (SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX) + (PUBLIC_DATA_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_TX) + (TREE_LEAF_READ_REQUEST_LENGTH * MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX); global PUBLIC_DATA_UPDATE_REQUEST_LENGTH = 3; global COMBINED_ACCUMULATED_DATA_LENGTH = MAX_NOTE_HASHES_PER_TX + MAX_NULLIFIERS_PER_TX + (MAX_L2_TO_L1_MSGS_PER_TX * SCOPED_L2_TO_L1_MESSAGE_LENGTH) + (LOG_HASH_LENGTH * MAX_NOTE_ENCRYPTED_LOGS_PER_TX) + (SCOPED_LOG_HASH_LENGTH * MAX_ENCRYPTED_LOGS_PER_TX) + 3 + (MAX_UNENCRYPTED_LOGS_PER_TX * SCOPED_LOG_HASH_LENGTH) + (MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * PUBLIC_DATA_UPDATE_REQUEST_LENGTH) + GAS_LENGTH; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr index 0427e00154a..9a2c9100fa5 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr @@ -18,6 +18,7 @@ use crate::{ public_call_stack_item_compressed::PublicCallStackItemCompressed, public_circuit_public_inputs::PublicCircuitPublicInputs, public_data_read::PublicDataRead, public_data_update_request::PublicDataUpdateRequest, read_request::{ReadRequest, ScopedReadRequest}, + tree_leaf_read_request::TreeLeafReadRequest, log_hash::{LogHash, NoteLogHash, ScopedLogHash, EncryptedLogHash, ScopedEncryptedLogHash}, validation_requests::{ KeyValidationRequest, KeyValidationRequestAndGenerator, PrivateValidationRequests, @@ -112,9 +113,10 @@ struct FixtureBuilder { // Validation requests. max_block_number: MaxBlockNumber, note_hash_read_requests: BoundedVec, + note_hash_tree_leaf_read_requests: BoundedVec, nullifier_read_requests: BoundedVec, nullifier_non_existent_read_requests: BoundedVec, - l1_to_l2_msg_read_requests: BoundedVec, + l1_to_l2_msg_read_requests: BoundedVec, scoped_key_validation_requests_and_generators: BoundedVec, public_data_reads: BoundedVec, contract_storage_reads: BoundedVec, @@ -433,12 +435,12 @@ impl FixtureBuilder { call_context: self.build_call_context(), args_hash: self.args_hash, returns_hash: self.returns_hash, - note_hash_read_requests: subarray(self.note_hash_read_requests.storage.map(|r: ScopedReadRequest| r.read_request)), + note_hash_read_requests: subarray(self.note_hash_tree_leaf_read_requests.storage), nullifier_read_requests: subarray(self.nullifier_read_requests.storage.map(|r: ScopedReadRequest| r.read_request)), nullifier_non_existent_read_requests: subarray( self.nullifier_non_existent_read_requests.storage.map(|r: ScopedReadRequest| r.read_request) ), - l1_to_l2_msg_read_requests: subarray(self.l1_to_l2_msg_read_requests.storage.map(|r: ScopedReadRequest| r.read_request)), + l1_to_l2_msg_read_requests: subarray(self.l1_to_l2_msg_read_requests.storage), contract_storage_update_requests: subarray(self.contract_storage_update_requests.storage), contract_storage_reads: self.contract_storage_reads.storage, public_call_requests: subarray(self.public_call_requests.storage), @@ -474,9 +476,11 @@ impl FixtureBuilder { pub fn to_public_validation_requests(self) -> PublicValidationRequests { PublicValidationRequests { for_rollup: self.to_rollup_validation_requests(), + note_hash_read_requests: self.note_hash_tree_leaf_read_requests.storage, nullifier_read_requests: self.nullifier_read_requests.storage, nullifier_non_existent_read_requests: self.nullifier_non_existent_read_requests.storage, - public_data_reads: self.public_data_reads.storage + public_data_reads: self.public_data_reads.storage, + l1_to_l2_msg_read_requests: self.l1_to_l2_msg_read_requests.storage } } @@ -755,6 +759,21 @@ impl FixtureBuilder { } } + pub fn add_note_hash_tree_leaf_read_requests(&mut self, value: Field, leaf_index: Field) { + let read_request = TreeLeafReadRequest { value, leaf_index }; + self.note_hash_tree_leaf_read_requests.push(read_request); + } + + pub fn append_note_hash_tree_leaf_read_requests(&mut self, num_reads: u32) { + let index_offset = self.note_hash_tree_leaf_read_requests.len(); + for i in 0..self.note_hash_tree_leaf_read_requests.max_len() { + if i < num_reads { + let value = self.mock_note_hash_read_value(index_offset + i); + self.add_note_hash_tree_leaf_read_requests(value, (index_offset + i) as Field); + } + } + } + pub fn add_read_request_for_pending_nullifier(&mut self, nullifier_index: u32) -> u32 { let read_request_index = self.nullifier_read_requests.len(); let nullifier = self.mock_nullifier_value(nullifier_index); @@ -1170,6 +1189,7 @@ impl Empty for FixtureBuilder { public_call_requests: BoundedVec::new(), max_block_number: MaxBlockNumber::empty(), note_hash_read_requests: BoundedVec::new(), + note_hash_tree_leaf_read_requests: BoundedVec::new(), nullifier_read_requests: BoundedVec::new(), nullifier_non_existent_read_requests: BoundedVec::new(), l1_to_l2_msg_read_requests: BoundedVec::new(), diff --git a/yarn-project/bb-prover/src/avm_proving.test.ts b/yarn-project/bb-prover/src/avm_proving.test.ts index d48c1eb4859..ebf5af81ea5 100644 --- a/yarn-project/bb-prover/src/avm_proving.test.ts +++ b/yarn-project/bb-prover/src/avm_proving.test.ts @@ -27,6 +27,7 @@ import { PublicCircuitPublicInputs, ReadRequest, RevertCode, + TreeLeafReadRequest, } from '@aztec/circuits.js'; import { computeVarArgsHash } from '@aztec/circuits.js/hash'; import { padArrayEnd } from '@aztec/foundation/collection'; @@ -307,7 +308,7 @@ const getPublicInputs = (result: PublicExecutionResult): PublicCircuitPublicInpu returnsHash: computeVarArgsHash(result.returnValues), noteHashReadRequests: padArrayEnd( result.noteHashReadRequests, - ReadRequest.empty(), + TreeLeafReadRequest.empty(), MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, ), nullifierReadRequests: padArrayEnd( @@ -322,7 +323,7 @@ const getPublicInputs = (result: PublicExecutionResult): PublicCircuitPublicInpu ), l1ToL2MsgReadRequests: padArrayEnd( result.l1ToL2MsgReadRequests, - ReadRequest.empty(), + TreeLeafReadRequest.empty(), MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL, ), contractStorageReads: padArrayEnd( diff --git a/yarn-project/circuit-types/src/sibling_path/sibling_path.ts b/yarn-project/circuit-types/src/sibling_path/sibling_path.ts index 086c57baf80..df22de9a56c 100644 --- a/yarn-project/circuit-types/src/sibling_path/sibling_path.ts +++ b/yarn-project/circuit-types/src/sibling_path/sibling_path.ts @@ -84,7 +84,7 @@ export class SiblingPath { * Convert Sibling Path object into a tuple of field elements. * @returns A tuple representation of the sibling path. */ - public toTuple(): Tuple { + public toTuple(): Tuple { const array = this.toFields(); return makeTuple(array.length as N, i => array[i], 0); } diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index 10056cb822c..df0e0d0f9a1 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -150,6 +150,7 @@ export const KEY_VALIDATION_REQUEST_AND_GENERATOR_LENGTH = 5; export const SCOPED_KEY_VALIDATION_REQUEST_AND_GENERATOR_LENGTH = 6; export const PARTIAL_STATE_REFERENCE_LENGTH = 6; export const READ_REQUEST_LENGTH = 2; +export const TREE_LEAF_READ_REQUEST_LENGTH = 2; export const LOG_HASH_LENGTH = 3; export const SCOPED_LOG_HASH_LENGTH = 4; export const ENCRYPTED_LOG_HASH_LENGTH = 4; @@ -176,14 +177,14 @@ export const AGGREGATION_OBJECT_LENGTH = 16; export const SCOPED_READ_REQUEST_LEN = 3; export const PUBLIC_DATA_READ_LENGTH = 2; export const PRIVATE_VALIDATION_REQUESTS_LENGTH = 772; -export const PUBLIC_VALIDATION_REQUESTS_LENGTH = 514; +export const PUBLIC_VALIDATION_REQUESTS_LENGTH = 770; export const PUBLIC_DATA_UPDATE_REQUEST_LENGTH = 3; export const COMBINED_ACCUMULATED_DATA_LENGTH = 610; export const COMBINED_CONSTANT_DATA_LENGTH = 43; export const PRIVATE_ACCUMULATED_DATA_LENGTH = 1336; export const PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 2167; export const PUBLIC_ACCUMULATED_DATA_LENGTH = 1311; -export const PUBLIC_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 3629; +export const PUBLIC_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 3885; export const KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 663; export const CONSTANT_ROLLUP_DATA_LENGTH = 12; export const BASE_OR_MERGE_PUBLIC_INPUTS_LENGTH = 29; diff --git a/yarn-project/circuits.js/src/structs/index.ts b/yarn-project/circuits.js/src/structs/index.ts index e58f824e9f8..19cb59bdeec 100644 --- a/yarn-project/circuits.js/src/structs/index.ts +++ b/yarn-project/circuits.js/src/structs/index.ts @@ -84,6 +84,8 @@ export * from './scoped_key_validation_request_and_generator.js'; export * from './shared.js'; export * from './side_effects.js'; export * from './state_reference.js'; +export * from './tree_leaf_read_request.js'; +export * from './tree_leaf_read_request_hint.js'; export * from './trees/index.js'; export * from './tx_context.js'; export * from './tx_request.js'; diff --git a/yarn-project/circuits.js/src/structs/kernel/public_kernel_tail_circuit_private_inputs.ts b/yarn-project/circuits.js/src/structs/kernel/public_kernel_tail_circuit_private_inputs.ts index c6f22d4b536..7c2be1c3da5 100644 --- a/yarn-project/circuits.js/src/structs/kernel/public_kernel_tail_circuit_private_inputs.ts +++ b/yarn-project/circuits.js/src/structs/kernel/public_kernel_tail_circuit_private_inputs.ts @@ -1,6 +1,13 @@ import { BufferReader, type Tuple, serializeToBuffer } from '@aztec/foundation/serialize'; -import { MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_PUBLIC_DATA_HINTS } from '../../constants.gen.js'; +import { + L1_TO_L2_MSG_TREE_HEIGHT, + MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, + MAX_NOTE_HASH_READ_REQUESTS_PER_TX, + MAX_NULLIFIER_READ_REQUESTS_PER_TX, + MAX_PUBLIC_DATA_HINTS, + NOTE_HASH_TREE_HEIGHT, +} from '../../constants.gen.js'; import { type NullifierNonExistentReadRequestHints, nullifierNonExistentReadRequestHintsFromBuffer, @@ -9,6 +16,7 @@ import { PartialStateReference } from '../partial_state_reference.js'; import { PublicDataHint } from '../public_data_hint.js'; import { PublicDataReadRequestHints } from '../public_data_read_request_hints.js'; import { type NullifierReadRequestHints, nullifierReadRequestHintsFromBuffer } from '../read_request_hints/index.js'; +import { TreeLeafReadRequestHint } from '../tree_leaf_read_request_hint.js'; import { PublicKernelData } from './public_kernel_data.js'; export class PublicKernelTailCircuitPrivateInputs { @@ -17,6 +25,10 @@ export class PublicKernelTailCircuitPrivateInputs { * Kernels are recursive and this is the data from the previous kernel. */ public readonly previousKernel: PublicKernelData, + public readonly noteHashReadRequestHints: Tuple< + TreeLeafReadRequestHint, + typeof MAX_NOTE_HASH_READ_REQUESTS_PER_TX + >, /** * Contains hints for the nullifier read requests to locate corresponding pending or settled nullifiers. */ @@ -28,6 +40,10 @@ export class PublicKernelTailCircuitPrivateInputs { * Contains hints for the nullifier non existent read requests. */ public readonly nullifierNonExistentReadRequestHints: NullifierNonExistentReadRequestHints, + public readonly l1ToL2MsgReadRequestHints: Tuple< + TreeLeafReadRequestHint, + typeof MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX + >, public readonly publicDataHints: Tuple, public readonly publicDataReadRequestHints: PublicDataReadRequestHints, public readonly startState: PartialStateReference, @@ -36,8 +52,10 @@ export class PublicKernelTailCircuitPrivateInputs { toBuffer() { return serializeToBuffer( this.previousKernel, + this.noteHashReadRequestHints, this.nullifierReadRequestHints, this.nullifierNonExistentReadRequestHints, + this.l1ToL2MsgReadRequestHints, this.publicDataHints, this.publicDataReadRequestHints, this.startState, @@ -56,12 +74,18 @@ export class PublicKernelTailCircuitPrivateInputs { const reader = BufferReader.asReader(buffer); return new PublicKernelTailCircuitPrivateInputs( reader.readObject(PublicKernelData), + reader.readArray(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, { + fromBuffer: buf => TreeLeafReadRequestHint.fromBuffer(buf, NOTE_HASH_TREE_HEIGHT), + }), nullifierReadRequestHintsFromBuffer( reader, MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, ), nullifierNonExistentReadRequestHintsFromBuffer(reader), + reader.readArray(MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, { + fromBuffer: buf => TreeLeafReadRequestHint.fromBuffer(buf, L1_TO_L2_MSG_TREE_HEIGHT), + }), reader.readArray(MAX_PUBLIC_DATA_HINTS, PublicDataHint), reader.readObject(PublicDataReadRequestHints), reader.readObject(PartialStateReference), diff --git a/yarn-project/circuits.js/src/structs/public_circuit_public_inputs.ts b/yarn-project/circuits.js/src/structs/public_circuit_public_inputs.ts index 44e0ee9bce0..bf4e7fe8259 100644 --- a/yarn-project/circuits.js/src/structs/public_circuit_public_inputs.ts +++ b/yarn-project/circuits.js/src/structs/public_circuit_public_inputs.ts @@ -38,6 +38,7 @@ import { Nullifier } from './nullifier.js'; import { PublicCallRequest } from './public_call_request.js'; import { ReadRequest } from './read_request.js'; import { RevertCode } from './revert_code.js'; +import { TreeLeafReadRequest } from './tree_leaf_read_request.js'; /** * Public inputs to a public circuit. @@ -59,7 +60,7 @@ export class PublicCircuitPublicInputs { /** * Note Hash tree read requests executed during the call. */ - public noteHashReadRequests: Tuple, + public noteHashReadRequests: Tuple, /** * Nullifier read requests executed during the call. */ @@ -74,7 +75,7 @@ export class PublicCircuitPublicInputs { /** * L1 to L2 Message Read Requests per call. */ - public l1ToL2MsgReadRequests: Tuple, + public l1ToL2MsgReadRequests: Tuple, /** * Contract storage update requests executed during the call. */ @@ -160,10 +161,10 @@ export class PublicCircuitPublicInputs { CallContext.empty(), Fr.ZERO, Fr.ZERO, - makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, ReadRequest.empty), + makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, TreeLeafReadRequest.empty), makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_CALL, ReadRequest.empty), makeTuple(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL, ReadRequest.empty), - makeTuple(MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL, ReadRequest.empty), + makeTuple(MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL, TreeLeafReadRequest.empty), makeTuple(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, ContractStorageUpdateRequest.empty), makeTuple(MAX_PUBLIC_DATA_READS_PER_CALL, ContractStorageRead.empty), makeTuple(MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, PublicCallRequest.empty), @@ -272,10 +273,10 @@ export class PublicCircuitPublicInputs { reader.readObject(CallContext), reader.readObject(Fr), reader.readObject(Fr), - reader.readArray(MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, ReadRequest), + reader.readArray(MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, TreeLeafReadRequest), reader.readArray(MAX_NULLIFIER_READ_REQUESTS_PER_CALL, ReadRequest), reader.readArray(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL, ReadRequest), - reader.readArray(MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL, ReadRequest), + reader.readArray(MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL, TreeLeafReadRequest), reader.readArray(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, ContractStorageUpdateRequest), reader.readArray(MAX_PUBLIC_DATA_READS_PER_CALL, ContractStorageRead), reader.readArray(MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, PublicCallRequest), @@ -302,10 +303,10 @@ export class PublicCircuitPublicInputs { CallContext.fromFields(reader), reader.readField(), reader.readField(), - reader.readArray(MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, ReadRequest), + reader.readArray(MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, TreeLeafReadRequest), reader.readArray(MAX_NULLIFIER_READ_REQUESTS_PER_CALL, ReadRequest), reader.readArray(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL, ReadRequest), - reader.readArray(MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL, ReadRequest), + reader.readArray(MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL, TreeLeafReadRequest), reader.readArray(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, ContractStorageUpdateRequest), reader.readArray(MAX_PUBLIC_DATA_READS_PER_CALL, ContractStorageRead), reader.readArray(MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, PublicCallRequest), diff --git a/yarn-project/circuits.js/src/structs/public_validation_requests.ts b/yarn-project/circuits.js/src/structs/public_validation_requests.ts index af29d553736..17e988c2f74 100644 --- a/yarn-project/circuits.js/src/structs/public_validation_requests.ts +++ b/yarn-project/circuits.js/src/structs/public_validation_requests.ts @@ -6,6 +6,8 @@ import { BufferReader, FieldReader, type Tuple, serializeToBuffer } from '@aztec import { inspect } from 'util'; import { + MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, + MAX_NOTE_HASH_READ_REQUESTS_PER_TX, MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_PUBLIC_DATA_READS_PER_TX, @@ -13,6 +15,7 @@ import { import { PublicDataRead } from './public_data_read_request.js'; import { ScopedReadRequest } from './read_request.js'; import { RollupValidationRequests } from './rollup_validation_requests.js'; +import { TreeLeafReadRequest } from './tree_leaf_read_request.js'; /** * Validation requests accumulated during the execution of the transaction. @@ -24,6 +27,7 @@ export class PublicValidationRequests { * forwarded to the rollup for it to take care of them. */ public forRollup: RollupValidationRequests, + public noteHashReadRequests: Tuple, /** * All the nullifier read requests made in this transaction. */ @@ -35,6 +39,7 @@ export class PublicValidationRequests { ScopedReadRequest, typeof MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX >, + public l1ToL2MsgReadRequests: Tuple, /** * All the public data reads made in this transaction. */ @@ -44,8 +49,10 @@ export class PublicValidationRequests { getSize() { return ( this.forRollup.getSize() + + arraySerializedSizeOfNonEmpty(this.noteHashReadRequests) + arraySerializedSizeOfNonEmpty(this.nullifierReadRequests) + arraySerializedSizeOfNonEmpty(this.nullifierNonExistentReadRequests) + + arraySerializedSizeOfNonEmpty(this.l1ToL2MsgReadRequests) + arraySerializedSizeOfNonEmpty(this.publicDataReads) ); } @@ -53,8 +60,10 @@ export class PublicValidationRequests { toBuffer() { return serializeToBuffer( this.forRollup, + this.noteHashReadRequests, this.nullifierReadRequests, this.nullifierNonExistentReadRequests, + this.l1ToL2MsgReadRequests, this.publicDataReads, ); } @@ -67,8 +76,10 @@ export class PublicValidationRequests { const reader = FieldReader.asReader(fields); return new PublicValidationRequests( reader.readObject(RollupValidationRequests), + reader.readArray(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, TreeLeafReadRequest), reader.readArray(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ScopedReadRequest), reader.readArray(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, ScopedReadRequest), + reader.readArray(MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, TreeLeafReadRequest), reader.readArray(MAX_PUBLIC_DATA_READS_PER_TX, PublicDataRead), ); } @@ -82,8 +93,10 @@ export class PublicValidationRequests { const reader = BufferReader.asReader(buffer); return new PublicValidationRequests( reader.readObject(RollupValidationRequests), + reader.readArray(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, TreeLeafReadRequest), reader.readArray(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ScopedReadRequest), reader.readArray(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, ScopedReadRequest), + reader.readArray(MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, TreeLeafReadRequest), reader.readArray(MAX_PUBLIC_DATA_READS_PER_TX, PublicDataRead), ); } @@ -100,8 +113,10 @@ export class PublicValidationRequests { static empty() { return new PublicValidationRequests( RollupValidationRequests.empty(), + makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, TreeLeafReadRequest.empty), makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ScopedReadRequest.empty), makeTuple(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, ScopedReadRequest.empty), + makeTuple(MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, TreeLeafReadRequest.empty), makeTuple(MAX_PUBLIC_DATA_READS_PER_TX, PublicDataRead.empty), ); } @@ -109,6 +124,10 @@ export class PublicValidationRequests { [inspect.custom]() { return `PublicValidationRequests { forRollup: ${inspect(this.forRollup)}, + noteHashReadRequests: [${this.noteHashReadRequests + .filter(x => !x.isEmpty()) + .map(h => inspect(h)) + .join(', ')}], nullifierReadRequests: [${this.nullifierReadRequests .filter(x => !x.isEmpty()) .map(h => inspect(h)) @@ -117,6 +136,10 @@ export class PublicValidationRequests { .filter(x => !x.isEmpty()) .map(h => inspect(h)) .join(', ')}], + l1ToL2MsgReadRequests: [${this.l1ToL2MsgReadRequests + .filter(x => !x.isEmpty()) + .map(h => inspect(h)) + .join(', ')}], publicDataReads: [${this.publicDataReads .filter(x => !x.isEmpty()) .map(h => inspect(h)) diff --git a/yarn-project/circuits.js/src/structs/tree_leaf_read_request.ts b/yarn-project/circuits.js/src/structs/tree_leaf_read_request.ts new file mode 100644 index 00000000000..e32a0068b40 --- /dev/null +++ b/yarn-project/circuits.js/src/structs/tree_leaf_read_request.ts @@ -0,0 +1,32 @@ +import { Fr } from '@aztec/foundation/fields'; +import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize'; + +export class TreeLeafReadRequest { + constructor(public value: Fr, public leafIndex: Fr) {} + + toBuffer(): Buffer { + return serializeToBuffer(this.value, this.leafIndex); + } + + static fromBuffer(buffer: Buffer | BufferReader) { + const reader = BufferReader.asReader(buffer); + return new TreeLeafReadRequest(Fr.fromBuffer(reader), Fr.fromBuffer(reader)); + } + + toFields(): Fr[] { + return [this.value, this.leafIndex]; + } + + static fromFields(fields: Fr[] | FieldReader) { + const reader = FieldReader.asReader(fields); + return new TreeLeafReadRequest(reader.readField(), reader.readField()); + } + + isEmpty() { + return this.value.isZero() && this.leafIndex.isZero(); + } + + static empty() { + return new TreeLeafReadRequest(Fr.zero(), Fr.zero()); + } +} diff --git a/yarn-project/circuits.js/src/structs/tree_leaf_read_request_hint.ts b/yarn-project/circuits.js/src/structs/tree_leaf_read_request_hint.ts new file mode 100644 index 00000000000..e6b64deb6cb --- /dev/null +++ b/yarn-project/circuits.js/src/structs/tree_leaf_read_request_hint.ts @@ -0,0 +1,38 @@ +import { assertMemberLength } from '@aztec/foundation/array'; +import { Fr } from '@aztec/foundation/fields'; +import { BufferReader, type Tuple, serializeToBuffer } from '@aztec/foundation/serialize'; + +/** + * Contains information which can be used to prove that a leaf is a member of a Merkle tree. + */ +export class TreeLeafReadRequestHint { + constructor( + /** + * Size of the sibling path (number of fields it contains). + */ + pathSize: N, + /** + * Sibling path of the leaf in the Merkle tree. + */ + public siblingPath: Tuple, + ) { + assertMemberLength(this, 'siblingPath', pathSize); + } + + toBuffer() { + return serializeToBuffer(this.siblingPath); + } + + public static empty(pathSize: N): TreeLeafReadRequestHint { + const arr = Array(pathSize) + .fill(0) + .map(() => Fr.ZERO) as Tuple; + return new TreeLeafReadRequestHint(pathSize, arr); + } + + static fromBuffer(buffer: Buffer | BufferReader, size: N): TreeLeafReadRequestHint { + const reader = BufferReader.asReader(buffer); + const siblingPath = reader.readArray(size, Fr); + return new TreeLeafReadRequestHint(size, siblingPath); + } +} diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index a9e06ab980e..192869a48cb 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -37,12 +37,14 @@ import { KeyValidationRequest, KeyValidationRequestAndGenerator, L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, + L1_TO_L2_MSG_TREE_HEIGHT, L2ToL1Message, LogHash, MAX_ENCRYPTED_LOGS_PER_CALL, MAX_ENCRYPTED_LOGS_PER_TX, MAX_KEY_VALIDATION_REQUESTS_PER_CALL, MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL, + MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, MAX_L2_TO_L1_MSGS_PER_CALL, MAX_L2_TO_L1_MSGS_PER_TX, MAX_NOTE_ENCRYPTED_LOGS_PER_CALL, @@ -73,6 +75,7 @@ import { MergeRollupInputs, NESTED_RECURSIVE_PROOF_LENGTH, NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, + NOTE_HASH_TREE_HEIGHT, NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, NULLIFIER_TREE_HEIGHT, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, @@ -144,7 +147,13 @@ import { GasFees } from '../structs/gas_fees.js'; import { GasSettings } from '../structs/gas_settings.js'; import { GlobalVariables } from '../structs/global_variables.js'; import { Header } from '../structs/header.js'; -import { PublicValidationRequests, ScopedL2ToL1Message, ScopedNoteHash } from '../structs/index.js'; +import { + PublicValidationRequests, + ScopedL2ToL1Message, + ScopedNoteHash, + TreeLeafReadRequest, + TreeLeafReadRequestHint, +} from '../structs/index.js'; import { KernelCircuitPublicInputs } from '../structs/kernel/kernel_circuit_public_inputs.js'; import { KernelData } from '../structs/kernel/kernel_data.js'; import { BlockMergeRollupInputs } from '../structs/rollup/block_merge_rollup.js'; @@ -237,6 +246,14 @@ function makeScopedReadRequest(n: number): ScopedReadRequest { return new ScopedReadRequest(makeReadRequest(n), AztecAddress.fromBigInt(BigInt(n + 2))); } +function makeTreeLeafReadRequest(seed: number) { + return new TreeLeafReadRequest(new Fr(seed), new Fr(seed + 1)); +} + +function makeTreeLeafReadRequestHint(seed: number, size: N) { + return new TreeLeafReadRequestHint(size, makeSiblingPath(seed, size)); +} + /** * Creates arbitrary KeyValidationRequest from the given seed. * @param seed - The seed to use for generating the KeyValidationRequest. @@ -310,8 +327,10 @@ export function makeContractStorageRead(seed = 1): ContractStorageRead { function makePublicValidationRequests(seed = 1) { return new PublicValidationRequests( makeRollupValidationRequests(seed), - makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, makeScopedReadRequest, seed + 0x80), + makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, makeTreeLeafReadRequest, seed + 0x10), + makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, makeScopedReadRequest, seed + 0x80), makeTuple(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, makeScopedReadRequest, seed + 0x95), + makeTuple(MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, makeTreeLeafReadRequest, seed + 0x100), makeTuple(MAX_PUBLIC_DATA_READS_PER_TX, makePublicDataRead, seed + 0xe00), ); } @@ -421,10 +440,20 @@ export function makePublicCircuitPublicInputs( makeCallContext(seed, { storageContractAddress: storageContractAddress ?? makeAztecAddress(seed) }), fr(seed + 0x100), fr(seed + 0x200), - tupleGenerator(MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, makeReadRequest, seed + 0x300, ReadRequest.empty), + tupleGenerator( + MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, + makeTreeLeafReadRequest, + seed + 0x300, + TreeLeafReadRequest.empty, + ), tupleGenerator(MAX_NULLIFIER_READ_REQUESTS_PER_CALL, makeReadRequest, seed + 0x400, ReadRequest.empty), tupleGenerator(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL, makeReadRequest, seed + 0x420, ReadRequest.empty), - tupleGenerator(MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL, makeReadRequest, seed + 0x440, ReadRequest.empty), + tupleGenerator( + MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL, + makeTreeLeafReadRequest, + seed + 0x440, + TreeLeafReadRequest.empty, + ), tupleGenerator( MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, makeContractStorageUpdateRequest, @@ -518,6 +547,10 @@ export function makeKernelCircuitPublicInputs(seed = 1, fullAccumulatedData = tr ); } +function makeSiblingPath(seed: number, size: N) { + return makeTuple(size, fr, seed); +} + /** * Creates arbitrary/mocked membership witness where the sibling paths is an array of fields in an ascending order starting from `start`. * @param size - The size of the membership witness. @@ -525,7 +558,7 @@ export function makeKernelCircuitPublicInputs(seed = 1, fullAccumulatedData = tr * @returns A membership witness. */ export function makeMembershipWitness(size: N, start: number): MembershipWitness { - return new MembershipWitness(size, BigInt(start), makeTuple(size, fr, start)); + return new MembershipWitness(size, BigInt(start), makeSiblingPath(start, size)); } /** @@ -676,8 +709,18 @@ export function makePublicKernelCircuitPrivateInputs(seed = 1): PublicKernelCirc export function makePublicKernelTailCircuitPrivateInputs(seed = 1): PublicKernelTailCircuitPrivateInputs { return new PublicKernelTailCircuitPrivateInputs( makePublicKernelData(seed), + makeTuple( + MAX_NOTE_HASH_READ_REQUESTS_PER_TX, + s => makeTreeLeafReadRequestHint(s, NOTE_HASH_TREE_HEIGHT), + seed + 0x20, + ), NullifierReadRequestHintsBuilder.empty(MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX), NullifierNonExistentReadRequestHintsBuilder.empty(), + makeTuple( + MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, + s => makeTreeLeafReadRequestHint(s, L1_TO_L2_MSG_TREE_HEIGHT), + seed + 0x80, + ), makeTuple(MAX_PUBLIC_DATA_HINTS, PublicDataHint.empty, seed + 0x100), PublicDataReadRequestHintsBuilder.empty(), makePartialStateReference(seed + 0x200), diff --git a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts index cb1105b4715..b95d9da6c9e 100644 --- a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts +++ b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts @@ -32,11 +32,13 @@ import { type KeyValidationHint, KeyValidationRequest, KeyValidationRequestAndGenerator, + type L1_TO_L2_MSG_TREE_HEIGHT, L2ToL1Message, type LeafDataReadHint, LogHash, MAX_ENCRYPTED_LOGS_PER_TX, MAX_KEY_VALIDATION_REQUESTS_PER_TX, + MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, MAX_L2_TO_L1_MSGS_PER_TX, MAX_NOTE_ENCRYPTED_LOGS_PER_TX, MAX_NOTE_HASHES_PER_TX, @@ -127,6 +129,8 @@ import { type StateDiffHints, StateReference, type TransientDataIndexHint, + TreeLeafReadRequest, + type TreeLeafReadRequestHint, TxContext, type TxRequest, VERIFICATION_KEY_LENGTH_IN_FIELDS, @@ -245,6 +249,8 @@ import type { StorageRead as StorageReadNoir, StorageUpdateRequest as StorageUpdateRequestNoir, TransientDataIndexHint as TransientDataIndexHintNoir, + TreeLeafReadRequestHint as TreeLeafReadRequestHintNoir, + TreeLeafReadRequest as TreeLeafReadRequestNoir, TxContext as TxContextNoir, TxRequest as TxRequestNoir, VerificationKey as VerificationKeyNoir, @@ -814,6 +820,17 @@ export function mapScopedReadRequestFromNoir(scoped: ScopedReadRequestNoir): Sco ); } +function mapTreeLeafReadRequestToNoir(readRequest: TreeLeafReadRequest): TreeLeafReadRequestNoir { + return { + value: mapFieldToNoir(readRequest.value), + leaf_index: mapFieldToNoir(readRequest.leafIndex), + }; +} + +function mapTreeLeafReadRequestFromNoir(readRequest: TreeLeafReadRequestNoir) { + return new TreeLeafReadRequest(mapFieldFromNoir(readRequest.value), mapFieldFromNoir(readRequest.leaf_index)); +} + /** * Maps a KeyValidationRequest to a noir KeyValidationRequest. * @param request - The KeyValidationRequest. @@ -1090,6 +1107,14 @@ function mapLeafDataReadHintToNoir(hint: LeafDataReadHint): LeafDataReadHintNoir }; } +function mapTreeLeafReadRequestHintToNoir( + hint: TreeLeafReadRequestHint, +): TreeLeafReadRequestHintNoir { + return { + sibling_path: mapTuple(hint.siblingPath, mapFieldToNoir) as FixedLengthArray, + }; +} + function mapNoteHashSettledReadHintToNoir( hint: SettledReadHint, ): NoteHashSettledReadHintNoir { @@ -1230,11 +1255,13 @@ function mapPrivateValidationRequestsFromNoir(requests: PrivateValidationRequest function mapPublicValidationRequestsToNoir(requests: PublicValidationRequests): PublicValidationRequestsNoir { return { for_rollup: mapRollupValidationRequestsToNoir(requests.forRollup), + note_hash_read_requests: mapTuple(requests.noteHashReadRequests, mapTreeLeafReadRequestToNoir), nullifier_read_requests: mapTuple(requests.nullifierReadRequests, mapScopedReadRequestToNoir), nullifier_non_existent_read_requests: mapTuple( requests.nullifierNonExistentReadRequests, mapScopedReadRequestToNoir, ), + l1_to_l2_msg_read_requests: mapTuple(requests.l1ToL2MsgReadRequests, mapTreeLeafReadRequestToNoir), public_data_reads: mapTuple(requests.publicDataReads, mapPublicDataReadToNoir), }; } @@ -1242,6 +1269,11 @@ function mapPublicValidationRequestsToNoir(requests: PublicValidationRequests): function mapPublicValidationRequestsFromNoir(requests: PublicValidationRequestsNoir): PublicValidationRequests { return new PublicValidationRequests( mapRollupValidationRequestsFromNoir(requests.for_rollup), + mapTupleFromNoir( + requests.note_hash_read_requests, + MAX_NOTE_HASH_READ_REQUESTS_PER_TX, + mapTreeLeafReadRequestFromNoir, + ), mapTupleFromNoir( requests.nullifier_read_requests, MAX_NULLIFIER_READ_REQUESTS_PER_TX, @@ -1252,6 +1284,11 @@ function mapPublicValidationRequestsFromNoir(requests: PublicValidationRequestsN MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, mapScopedReadRequestFromNoir, ), + mapTupleFromNoir( + requests.l1_to_l2_msg_read_requests, + MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, + mapTreeLeafReadRequestFromNoir, + ), mapTupleFromNoir(requests.public_data_reads, MAX_PUBLIC_DATA_READS_PER_TX, mapPublicDataReadFromNoir), ); } @@ -1755,10 +1792,18 @@ export function mapPublicKernelTailCircuitPrivateInputsToNoir( ): PublicKernelTailCircuitPrivateInputsNoir { return { previous_kernel: mapPublicKernelDataToNoir(inputs.previousKernel), + note_hash_read_request_hints: mapTuple( + inputs.noteHashReadRequestHints, + (hint: TreeLeafReadRequestHint) => mapTreeLeafReadRequestHintToNoir(hint), + ), nullifier_read_request_hints: mapNullifierReadRequestHintsToNoir(inputs.nullifierReadRequestHints), nullifier_non_existent_read_request_hints: mapNullifierNonExistentReadRequestHintsToNoir( inputs.nullifierNonExistentReadRequestHints, ), + l1_to_l2_msg_read_request_hints: mapTuple( + inputs.l1ToL2MsgReadRequestHints, + (hint: TreeLeafReadRequestHint) => mapTreeLeafReadRequestHintToNoir(hint), + ), public_data_hints: mapTuple(inputs.publicDataHints, mapPublicDataHintToNoir), public_data_read_request_hints: mapPublicDataReadRequestHintsToNoir(inputs.publicDataReadRequestHints), start_state: mapPartialStateReferenceToNoir(inputs.startState), @@ -1892,10 +1937,10 @@ export function mapPublicCircuitPublicInputsToNoir( call_context: mapCallContextToNoir(publicInputs.callContext), args_hash: mapFieldToNoir(publicInputs.argsHash), returns_hash: mapFieldToNoir(publicInputs.returnsHash), - note_hash_read_requests: mapTuple(publicInputs.noteHashReadRequests, mapReadRequestToNoir), + note_hash_read_requests: mapTuple(publicInputs.noteHashReadRequests, mapTreeLeafReadRequestToNoir), nullifier_read_requests: mapTuple(publicInputs.nullifierReadRequests, mapReadRequestToNoir), nullifier_non_existent_read_requests: mapTuple(publicInputs.nullifierNonExistentReadRequests, mapReadRequestToNoir), - l1_to_l2_msg_read_requests: mapTuple(publicInputs.l1ToL2MsgReadRequests, mapReadRequestToNoir), + l1_to_l2_msg_read_requests: mapTuple(publicInputs.l1ToL2MsgReadRequests, mapTreeLeafReadRequestToNoir), contract_storage_update_requests: mapTuple( publicInputs.contractStorageUpdateRequests, mapStorageUpdateRequestToNoir, diff --git a/yarn-project/pxe/src/kernel_prover/hints/build_private_kernel_reset_hints.ts b/yarn-project/pxe/src/kernel_prover/hints/build_private_kernel_reset_hints.ts index 69d382edbfa..03123a74abf 100644 --- a/yarn-project/pxe/src/kernel_prover/hints/build_private_kernel_reset_hints.ts +++ b/yarn-project/pxe/src/kernel_prover/hints/build_private_kernel_reset_hints.ts @@ -46,11 +46,7 @@ function getNullifierReadRequestHints(), - ), + membershipWitness: new MembershipWitness(NULLIFIER_TREE_HEIGHT, index, siblingPath.toTuple()), leafPreimage, }; }; diff --git a/yarn-project/pxe/src/simulator_oracle/index.ts b/yarn-project/pxe/src/simulator_oracle/index.ts index 7e7dc09ab63..3e906aff1b6 100644 --- a/yarn-project/pxe/src/simulator_oracle/index.ts +++ b/yarn-project/pxe/src/simulator_oracle/index.ts @@ -151,6 +151,11 @@ export class SimulatorOracle implements DBOracle { return await this.aztecNode.findLeafIndex('latest', MerkleTreeId.NOTE_HASH_TREE, commitment); } + // We need this in public as part of the EXISTS calls - but isn't used in private + public getCommitmentValue(_leafIndex: bigint): Promise { + throw new Error('Unimplemented in private!'); + } + async getNullifierIndex(nullifier: Fr) { return await this.aztecNode.findLeafIndex('latest', MerkleTreeId.NULLIFIER_TREE, nullifier); } diff --git a/yarn-project/simulator/src/avm/avm_simulator.test.ts b/yarn-project/simulator/src/avm/avm_simulator.test.ts index 3eb40e1b731..c4c179ccbe6 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.test.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.test.ts @@ -406,11 +406,11 @@ describe('AVM simulator: transpiled Noir contracts', () => { const results = await new AvmSimulator(context).executeBytecode(bytecode); expect(results.reverted).toBe(false); expect(results.output).toEqual([expectFound ? Fr.ONE : Fr.ZERO]); - + const expectedValue = results.output[0].toNumber() === 1 ? value0 : Fr.ZERO; expect(trace.traceNoteHashCheck).toHaveBeenCalledTimes(1); expect(trace.traceNoteHashCheck).toHaveBeenCalledWith( storageAddress, - /*noteHash=*/ value0, + /*noteHash=*/ expectedValue, leafIndex, /*exists=*/ expectFound, ); @@ -472,9 +472,13 @@ describe('AVM simulator: transpiled Noir contracts', () => { expect(results.output).toEqual([expectFound ? Fr.ONE : Fr.ZERO]); expect(trace.traceL1ToL2MessageCheck).toHaveBeenCalledTimes(1); + let expectedValue = results.output[0].toNumber() === 1 ? value0 : value1; + if (mockAtLeafIndex === undefined) { + expectedValue = Fr.ZERO; + } expect(trace.traceL1ToL2MessageCheck).toHaveBeenCalledWith( address, - /*msgHash=*/ value0, + /*msgHash=*/ expectedValue, leafIndex, /*exists=*/ expectFound, ); diff --git a/yarn-project/simulator/src/avm/journal/journal.test.ts b/yarn-project/simulator/src/avm/journal/journal.test.ts index 9818c42be4b..f3c18188d12 100644 --- a/yarn-project/simulator/src/avm/journal/journal.test.ts +++ b/yarn-project/simulator/src/avm/journal/journal.test.ts @@ -79,7 +79,7 @@ describe('journal', () => { const exists = await persistableState.checkNoteHashExists(address, utxo, leafIndex); expect(exists).toEqual(false); expect(trace.traceNoteHashCheck).toHaveBeenCalledTimes(1); - expect(trace.traceNoteHashCheck).toHaveBeenCalledWith(address, utxo, leafIndex, exists); + expect(trace.traceNoteHashCheck).toHaveBeenCalledWith(address, Fr.ZERO, leafIndex, exists); }); it('checkNoteHashExists works for existing note hashes', async () => { @@ -126,7 +126,7 @@ describe('journal', () => { const exists = await persistableState.checkL1ToL2MessageExists(address, utxo, leafIndex); expect(exists).toEqual(false); expect(trace.traceL1ToL2MessageCheck).toHaveBeenCalledTimes(1); - expect(trace.traceL1ToL2MessageCheck).toHaveBeenCalledWith(address, utxo, leafIndex, exists); + expect(trace.traceL1ToL2MessageCheck).toHaveBeenCalledWith(address, Fr.ZERO, leafIndex, exists); }); it('checkL1ToL2MessageExists works for existing message', async () => { diff --git a/yarn-project/simulator/src/avm/journal/journal.ts b/yarn-project/simulator/src/avm/journal/journal.ts index f34a2832edd..2f869c76a7b 100644 --- a/yarn-project/simulator/src/avm/journal/journal.ts +++ b/yarn-project/simulator/src/avm/journal/journal.ts @@ -1,5 +1,5 @@ import { AztecAddress, type FunctionSelector, type Gas } from '@aztec/circuits.js'; -import { type Fr } from '@aztec/foundation/fields'; +import { Fr } from '@aztec/foundation/fields'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { SerializableContractInstance } from '@aztec/types/contracts'; @@ -122,10 +122,14 @@ export class AvmPersistableStateManager { * @returns true if the note hash exists at the given leaf index, false otherwise */ public async checkNoteHashExists(storageAddress: Fr, noteHash: Fr, leafIndex: Fr): Promise { - const gotLeafIndex = await this.hostStorage.commitmentsDb.getCommitmentIndex(noteHash); - const exists = gotLeafIndex === leafIndex.toBigInt(); - this.log.debug(`noteHashes(${storageAddress})@${noteHash} ?? leafIndex: ${leafIndex}, exists: ${exists}.`); - this.trace.traceNoteHashCheck(storageAddress, noteHash, leafIndex, exists); + const gotLeafValue = (await this.hostStorage.commitmentsDb.getCommitmentValue(leafIndex.toBigInt())) ?? Fr.ZERO; + const exists = gotLeafValue.equals(noteHash); + this.log.debug( + `noteHashes(${storageAddress})@${noteHash} ?? leafIndex: ${leafIndex} | gotLeafValue: ${gotLeafValue}, exists: ${exists}.`, + ); + // TODO(8287): We still return exists here, but we need to transmit both the requested noteHash and the gotLeafValue + // such that the VM can constrain the equality and decide on exists based on that. + this.trace.traceNoteHashCheck(storageAddress, gotLeafValue, leafIndex, exists); return Promise.resolve(exists); } @@ -173,12 +177,14 @@ export class AvmPersistableStateManager { * @returns exists - whether the message exists in the L1 to L2 Messages tree */ public async checkL1ToL2MessageExists(contractAddress: Fr, msgHash: Fr, msgLeafIndex: Fr): Promise { - const valueAtIndex = await this.hostStorage.commitmentsDb.getL1ToL2LeafValue(msgLeafIndex.toBigInt()); - const exists = valueAtIndex?.equals(msgHash) ?? false; + const valueAtIndex = (await this.hostStorage.commitmentsDb.getL1ToL2LeafValue(msgLeafIndex.toBigInt())) ?? Fr.ZERO; + const exists = valueAtIndex.equals(msgHash); this.log.debug( `l1ToL2Messages(@${msgLeafIndex}) ?? exists: ${exists}, expected: ${msgHash}, found: ${valueAtIndex}.`, ); - this.trace.traceL1ToL2MessageCheck(contractAddress, msgHash, msgLeafIndex, exists); + // TODO(8287): We still return exists here, but we need to transmit both the requested msgHash and the value + // such that the VM can constrain the equality and decide on exists based on that. + this.trace.traceL1ToL2MessageCheck(contractAddress, valueAtIndex, msgLeafIndex, exists); return Promise.resolve(exists); } diff --git a/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts b/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts index dd1e638d6a4..aab2ae6efa6 100644 --- a/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts @@ -98,10 +98,11 @@ describe('Accrued Substate', () => { const gotExists = context.machineState.memory.getAs(existsOffset); expect(gotExists).toEqual(new Uint8(expectFound ? 1 : 0)); + const expectedValue = gotExists.toNumber() === 1 ? value0 : Fr.ZERO; expect(trace.traceNoteHashCheck).toHaveBeenCalledTimes(1); expect(trace.traceNoteHashCheck).toHaveBeenCalledWith( storageAddress, - /*noteHash=*/ value0, + /*noteHash=*/ expectedValue, leafIndex, /*exists=*/ expectFound, ); @@ -290,9 +291,14 @@ describe('Accrued Substate', () => { expect(gotExists).toEqual(new Uint8(expectFound ? 1 : 0)); expect(trace.traceL1ToL2MessageCheck).toHaveBeenCalledTimes(1); + // The expected value to trace depends on a) if we found it and b) if it is undefined + let expectedValue = gotExists.toNumber() === 1 ? value0 : value1; + if (mockAtLeafIndex === undefined) { + expectedValue = Fr.ZERO; + } expect(trace.traceL1ToL2MessageCheck).toHaveBeenCalledWith( address, - /*msgHash=*/ value0, + /*msgHash=*/ expectedValue, leafIndex, /*exists=*/ expectFound, ); diff --git a/yarn-project/simulator/src/avm/test_utils.ts b/yarn-project/simulator/src/avm/test_utils.ts index ce65116d5b8..f7dda9f6593 100644 --- a/yarn-project/simulator/src/avm/test_utils.ts +++ b/yarn-project/simulator/src/avm/test_utils.ts @@ -28,8 +28,15 @@ export function mockStorageReadWithMap(hs: HostStorage, mockedStorage: Map).getCommitmentIndex.mockResolvedValue(leafIndex.toBigInt()); +export function mockNoteHashExists(hs: HostStorage, _leafIndex: Fr, value?: Fr) { + (hs.commitmentsDb as jest.Mocked).getCommitmentValue.mockImplementation((index: bigint) => { + if (index == _leafIndex.toBigInt()) { + return Promise.resolve(value); + } else { + // This is ok for now since the traceing functions handle it + return Promise.resolve(undefined); + } + }); } export function mockNullifierExists(hs: HostStorage, leafIndex: Fr, _value?: Fr) { diff --git a/yarn-project/simulator/src/public/abstract_phase_manager.ts b/yarn-project/simulator/src/public/abstract_phase_manager.ts index 965caab647e..054218ae5a2 100644 --- a/yarn-project/simulator/src/public/abstract_phase_manager.ts +++ b/yarn-project/simulator/src/public/abstract_phase_manager.ts @@ -47,6 +47,7 @@ import { PublicKernelData, ReadRequest, RevertCode, + TreeLeafReadRequest, makeEmptyProof, makeEmptyRecursiveProof, } from '@aztec/circuits.js'; @@ -412,7 +413,7 @@ export abstract class AbstractPhaseManager { returnsHash: computeVarArgsHash(result.returnValues), noteHashReadRequests: padArrayEnd( result.noteHashReadRequests, - ReadRequest.empty(), + TreeLeafReadRequest.empty(), MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, ), nullifierReadRequests: padArrayEnd( @@ -427,7 +428,7 @@ export abstract class AbstractPhaseManager { ), l1ToL2MsgReadRequests: padArrayEnd( result.l1ToL2MsgReadRequests, - ReadRequest.empty(), + TreeLeafReadRequest.empty(), MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL, ), contractStorageReads: padArrayEnd( diff --git a/yarn-project/simulator/src/public/db_interfaces.ts b/yarn-project/simulator/src/public/db_interfaces.ts index ca44044de82..dfa168b66bb 100644 --- a/yarn-project/simulator/src/public/db_interfaces.ts +++ b/yarn-project/simulator/src/public/db_interfaces.ts @@ -100,6 +100,13 @@ export interface CommitmentsDB { */ getCommitmentIndex(commitment: Fr): Promise; + /** + * Gets commitment in the note hash tree given a leaf index. + * @param leafIndex - the leaf to look up. + * @returns - The commitment at that index. Undefined if leaf index is not found. + */ + getCommitmentValue(leafIndex: bigint): Promise; + /** * Gets the index of a nullifier in the nullifier tree. * @param nullifier - The nullifier. diff --git a/yarn-project/simulator/src/public/execution.ts b/yarn-project/simulator/src/public/execution.ts index be5b4a51605..109fef0a3ea 100644 --- a/yarn-project/simulator/src/public/execution.ts +++ b/yarn-project/simulator/src/public/execution.ts @@ -17,6 +17,7 @@ import { PublicCallStackItemCompressed, type ReadRequest, RevertCode, + type TreeLeafReadRequest, } from '@aztec/circuits.js'; import { computeVarArgsHash } from '@aztec/circuits.js/hash'; @@ -62,13 +63,13 @@ export interface PublicExecutionResult { /** The new nullifiers to be inserted into the nullifier tree. */ nullifiers: Nullifier[]; /** The note hash read requests emitted in this call. */ - noteHashReadRequests: ReadRequest[]; + noteHashReadRequests: TreeLeafReadRequest[]; /** The nullifier read requests emitted in this call. */ nullifierReadRequests: ReadRequest[]; /** The nullifier non existent read requests emitted in this call. */ nullifierNonExistentReadRequests: ReadRequest[]; /** L1 to L2 message read requests emitted in this call. */ - l1ToL2MsgReadRequests: ReadRequest[]; + l1ToL2MsgReadRequests: TreeLeafReadRequest[]; /** * The hashed logs with side effect counter. * Note: required as we don't track the counter anywhere else. diff --git a/yarn-project/simulator/src/public/hints_builder.ts b/yarn-project/simulator/src/public/hints_builder.ts index 5b0ad19713e..439aa7a5fa3 100644 --- a/yarn-project/simulator/src/public/hints_builder.ts +++ b/yarn-project/simulator/src/public/hints_builder.ts @@ -1,6 +1,9 @@ import { type IndexedTreeId, MerkleTreeId } from '@aztec/circuit-types'; import { type Fr, + L1_TO_L2_MSG_TREE_HEIGHT, + MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, + MAX_NOTE_HASH_READ_REQUESTS_PER_TX, type MAX_NULLIFIERS_PER_TX, type MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, @@ -8,6 +11,7 @@ import { type MAX_PUBLIC_DATA_READS_PER_TX, type MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, MembershipWitness, + NOTE_HASH_TREE_HEIGHT, NULLIFIER_TREE_HEIGHT, type Nullifier, PUBLIC_DATA_TREE_HEIGHT, @@ -16,18 +20,32 @@ import { type PublicDataTreeLeafPreimage, type PublicDataUpdateRequest, type ScopedReadRequest, + type TreeLeafReadRequest, + TreeLeafReadRequestHint, buildNullifierNonExistentReadRequestHints, buildPublicDataHint, buildPublicDataHints, buildPublicDataReadRequestHints, buildSiloedNullifierReadRequestHints, } from '@aztec/circuits.js'; +import { makeTuple } from '@aztec/foundation/array'; import { type Tuple } from '@aztec/foundation/serialize'; import { type MerkleTreeOperations } from '@aztec/world-state'; export class HintsBuilder { constructor(private db: MerkleTreeOperations) {} + async getNoteHashReadRequestsHints( + readRequests: Tuple, + ) { + return await this.getTreeLeafReadRequestsHints( + readRequests, + MAX_NOTE_HASH_READ_REQUESTS_PER_TX, + NOTE_HASH_TREE_HEIGHT, + MerkleTreeId.NOTE_HASH_TREE, + ); + } + async getNullifierReadRequestHints( nullifierReadRequests: Tuple, pendingNullifiers: Tuple, @@ -50,6 +68,17 @@ export class HintsBuilder { return buildNullifierNonExistentReadRequestHints(this, nullifierNonExistentReadRequests, pendingNullifiers); } + async getL1ToL2MsgReadRequestsHints( + readRequests: Tuple, + ) { + return await this.getTreeLeafReadRequestsHints( + readRequests, + MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_TX, + L1_TO_L2_MSG_TREE_HEIGHT, + MerkleTreeId.L1_TO_L2_MESSAGE_TREE, + ); + } + getPublicDataHints( publicDataReads: Tuple, publicDataUpdateRequests: Tuple, @@ -120,8 +149,8 @@ export class HintsBuilder { treeHeight: TREE_HEIGHT, index: bigint, ) { - const siblingPath = await this.db.getSiblingPath(treeId, index); - const membershipWitness = new MembershipWitness(treeHeight, index, siblingPath.toTuple()); + const siblingPath = await this.db.getSiblingPath(treeId, index); + const membershipWitness = new MembershipWitness(treeHeight, index, siblingPath.toTuple()); const leafPreimage = await this.db.getLeafPreimage(treeId, index); if (!leafPreimage) { @@ -130,4 +159,21 @@ export class HintsBuilder { return { membershipWitness, leafPreimage }; } + + private async getTreeLeafReadRequestsHints( + readRequests: Tuple, + size: N, + treeHeight: TREE_HEIGHT, + treeId: MerkleTreeId, + ): Promise, N>> { + const hints = makeTuple(size, () => TreeLeafReadRequestHint.empty(treeHeight)); + for (let i = 0; i < readRequests.length; i++) { + const request = readRequests[i]; + if (!request.isEmpty()) { + const siblingPath = await this.db.getSiblingPath(treeId, request.leafIndex.toBigInt()); + hints[i] = new TreeLeafReadRequestHint(treeHeight, siblingPath.toTuple()); + } + } + return hints; + } } diff --git a/yarn-project/simulator/src/public/public_db_sources.ts b/yarn-project/simulator/src/public/public_db_sources.ts index da99bb02d4e..662ccae10c3 100644 --- a/yarn-project/simulator/src/public/public_db_sources.ts +++ b/yarn-project/simulator/src/public/public_db_sources.ts @@ -308,6 +308,17 @@ export class WorldStateDB implements CommitmentsDB { return index; } + public async getCommitmentValue(leafIndex: bigint): Promise { + const timer = new Timer(); + const leafValue = await this.db.getLeafValue(MerkleTreeId.NOTE_HASH_TREE, leafIndex); + this.log.debug(`[DB] Fetched commitment leaf value`, { + eventName: 'public-db-access', + duration: timer.ms(), + operation: 'get-commitment-leaf-value', + } satisfies PublicDBAccessStats); + return leafValue; + } + public async getNullifierIndex(nullifier: Fr): Promise { const timer = new Timer(); const index = await this.db.findLeafIndex(MerkleTreeId.NULLIFIER_TREE, nullifier.toBuffer()); diff --git a/yarn-project/simulator/src/public/side_effect_trace.test.ts b/yarn-project/simulator/src/public/side_effect_trace.test.ts index 63224eafddf..5ee1469f161 100644 --- a/yarn-project/simulator/src/public/side_effect_trace.test.ts +++ b/yarn-project/simulator/src/public/side_effect_trace.test.ts @@ -94,7 +94,6 @@ describe('Side Effect Trace', () => { it('Should trace note hash checks', () => { const exists = true; trace.traceNoteHashCheck(address, utxo, leafIndex, exists); - expect(trace.getCounter()).toBe(startCounterPlus1); const pxResult = toPxResult(trace); expect(pxResult.noteHashReadRequests).toEqual([ @@ -102,11 +101,11 @@ describe('Side Effect Trace', () => { //storageAddress: contractAddress, value: utxo, //exists: exists, - counter: startCounter, - //leafIndex: leafIndex, + // counter: startCounter, + leafIndex, }, ]); - expect(pxResult.avmCircuitHints.noteHashExists.items).toEqual([{ key: startCounterFr, value: new Fr(exists) }]); + expect(pxResult.avmCircuitHints.noteHashExists.items).toEqual([{ key: leafIndex, value: new Fr(exists) }]); }); it('Should trace note hashes', () => { @@ -174,18 +173,17 @@ describe('Side Effect Trace', () => { it('Should trace L1ToL2 Message checks', () => { const exists = true; trace.traceL1ToL2MessageCheck(address, utxo, leafIndex, exists); - expect(trace.getCounter()).toBe(startCounterPlus1); const pxResult = toPxResult(trace); expect(pxResult.l1ToL2MsgReadRequests).toEqual([ { value: utxo, - counter: startCounter, + leafIndex, }, ]); expect(pxResult.avmCircuitHints.l1ToL2MessageExists.items).toEqual([ { - key: startCounterFr, + key: leafIndex, value: new Fr(exists), }, ]); @@ -246,7 +244,6 @@ describe('Side Effect Trace', () => { nestedTrace.tracePublicStorageWrite(address, slot, value); testCounter++; nestedTrace.traceNoteHashCheck(address, utxo, leafIndex, existsDefault); - testCounter++; nestedTrace.traceNewNoteHash(address, utxo); testCounter++; nestedTrace.traceNullifierCheck(address, utxo, leafIndex, /*exists=*/ true, isPending); @@ -256,7 +253,6 @@ describe('Side Effect Trace', () => { nestedTrace.traceNewNullifier(address, utxo); testCounter++; nestedTrace.traceL1ToL2MessageCheck(address, utxo, leafIndex, existsDefault); - testCounter++; nestedTrace.traceNewL2ToL1Message(recipient, content); testCounter++; nestedTrace.traceUnencryptedLog(address, log); diff --git a/yarn-project/simulator/src/public/side_effect_trace.ts b/yarn-project/simulator/src/public/side_effect_trace.ts index dbbe1c8ac37..5d7b0a33a70 100644 --- a/yarn-project/simulator/src/public/side_effect_trace.ts +++ b/yarn-project/simulator/src/public/side_effect_trace.ts @@ -16,6 +16,7 @@ import { Nullifier, type PublicCallRequest, ReadRequest, + TreeLeafReadRequest, } from '@aztec/circuits.js'; import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -38,14 +39,14 @@ export class PublicSideEffectTrace implements PublicSideEffectTraceInterface { private contractStorageReads: ContractStorageRead[] = []; private contractStorageUpdateRequests: ContractStorageUpdateRequest[] = []; - private noteHashReadRequests: ReadRequest[] = []; + private noteHashReadRequests: TreeLeafReadRequest[] = []; private noteHashes: NoteHash[] = []; private nullifierReadRequests: ReadRequest[] = []; private nullifierNonExistentReadRequests: ReadRequest[] = []; private nullifiers: Nullifier[] = []; - private l1ToL2MsgReadRequests: ReadRequest[] = []; + private l1ToL2MsgReadRequests: TreeLeafReadRequest[] = []; private newL2ToL1Messages: L2ToL1Message[] = []; private unencryptedLogs: UnencryptedL2Log[] = []; @@ -104,17 +105,16 @@ export class PublicSideEffectTrace implements PublicSideEffectTraceInterface { this.incrementSideEffectCounter(); } - public traceNoteHashCheck(_storageAddress: Fr, noteHash: Fr, _leafIndex: Fr, exists: boolean) { + // TODO(8287): _exists can be removed once we have the vm properly handling the equality check + public traceNoteHashCheck(_storageAddress: Fr, noteHash: Fr, leafIndex: Fr, exists: boolean) { // TODO(4805): check if some threshold is reached for max note hash checks // NOTE: storageAddress is unused but will be important when an AVM circuit processes an entire enqueued call // TODO(dbanks12): leafIndex is unused for now but later must be used by kernel to constrain that the kernel // is in fact checking the leaf indicated by the user - this.noteHashReadRequests.push(new ReadRequest(noteHash, this.sideEffectCounter)); + this.noteHashReadRequests.push(new TreeLeafReadRequest(noteHash, leafIndex)); this.avmCircuitHints.noteHashExists.items.push( - new AvmKeyValueHint(/*key=*/ new Fr(this.sideEffectCounter), /*value=*/ new Fr(exists ? 1 : 0)), + new AvmKeyValueHint(/*key=*/ new Fr(leafIndex), /*value=*/ exists ? Fr.ONE : Fr.ZERO), ); - this.logger.debug(`NOTE_HASH_CHECK cnt: ${this.sideEffectCounter}`); - this.incrementSideEffectCounter(); } public traceNewNoteHash(_storageAddress: Fr, noteHash: Fr) { @@ -154,17 +154,16 @@ export class PublicSideEffectTrace implements PublicSideEffectTraceInterface { this.incrementSideEffectCounter(); } - public traceL1ToL2MessageCheck(_contractAddress: Fr, msgHash: Fr, _msgLeafIndex: Fr, exists: boolean) { + // TODO(8287): _exists can be removed once we have the vm properly handling the equality check + public traceL1ToL2MessageCheck(_contractAddress: Fr, msgHash: Fr, msgLeafIndex: Fr, exists: boolean) { // TODO(4805): check if some threshold is reached for max message reads // NOTE: contractAddress is unused but will be important when an AVM circuit processes an entire enqueued call // TODO(dbanks12): leafIndex is unused for now but later must be used by kernel to constrain that the kernel // is in fact checking the leaf indicated by the user - this.l1ToL2MsgReadRequests.push(new ReadRequest(msgHash, this.sideEffectCounter)); + this.l1ToL2MsgReadRequests.push(new TreeLeafReadRequest(msgHash, msgLeafIndex)); this.avmCircuitHints.l1ToL2MessageExists.items.push( - new AvmKeyValueHint(/*key=*/ new Fr(this.sideEffectCounter), /*value=*/ new Fr(exists ? 1 : 0)), + new AvmKeyValueHint(/*key=*/ new Fr(msgLeafIndex), /*value=*/ exists ? Fr.ONE : Fr.ZERO), ); - this.logger.debug(`L1_TO_L2_MSG_CHECK cnt: ${this.sideEffectCounter}`); - this.incrementSideEffectCounter(); } public traceNewL2ToL1Message(recipient: Fr, content: Fr) { diff --git a/yarn-project/simulator/src/public/tail_phase_manager.ts b/yarn-project/simulator/src/public/tail_phase_manager.ts index 5b55af3e250..155a8fec14a 100644 --- a/yarn-project/simulator/src/public/tail_phase_manager.ts +++ b/yarn-project/simulator/src/public/tail_phase_manager.ts @@ -77,6 +77,10 @@ export class TailPhaseManager extends AbstractPhaseManager { const { validationRequests, endNonRevertibleData: nonRevertibleData, end: revertibleData } = previousOutput; + const noteHashReadRequestHints = await this.hintsBuilder.getNoteHashReadRequestsHints( + validationRequests.noteHashReadRequests, + ); + const pendingNullifiers = mergeAccumulatedData( nonRevertibleData.nullifiers, revertibleData.nullifiers, @@ -93,6 +97,10 @@ export class TailPhaseManager extends AbstractPhaseManager { pendingNullifiers, ); + const l1ToL2MsgReadRequestHints = await this.hintsBuilder.getL1ToL2MsgReadRequestsHints( + validationRequests.l1ToL2MsgReadRequests, + ); + const pendingPublicDataWrites = mergeAccumulatedData( nonRevertibleData.publicDataUpdateRequests, revertibleData.publicDataUpdateRequests, @@ -114,8 +122,10 @@ export class TailPhaseManager extends AbstractPhaseManager { return new PublicKernelTailCircuitPrivateInputs( previousKernel, + noteHashReadRequestHints, nullifierReadRequestHints, nullifierNonExistentReadRequestHints, + l1ToL2MsgReadRequestHints, publicDataHints, publicDataReadRequestHints, currentState.partial,