diff --git a/.github/ensure-builder/action.yml b/.github/ensure-builder/action.yml index 6ff89d784713..32925ace8442 100644 --- a/.github/ensure-builder/action.yml +++ b/.github/ensure-builder/action.yml @@ -70,7 +70,7 @@ runs: ec2_subnet_id: subnet-4cfabd25 ec2_security_group_id: sg-0ccd4e5df0dcca0c9 ec2_key_name: "build-instance" - ec2_instance_tags: '[{"Key": "Keep-Alive", "Value": "true"},{"Key": "Builder", "Value": "true"}]' + ec2_instance_tags: '[{"Key": "Builder", "Value": "true"}]' # This disambiguates from 'tester' - name: Set BUILDER_SPOT_IP and BUILDER_SPOT_KEY shell: bash @@ -100,4 +100,4 @@ runs: - name: Report Exit Code shell: bash if: steps.test.outputs.exit_code != '155' || inputs.spot_strategy == 'None' - run: exit ${{ steps.test.outputs.exit_code }} \ No newline at end of file + run: exit ${{ steps.test.outputs.exit_code }} diff --git a/.github/ensure-tester/action.yml b/.github/ensure-tester/action.yml index fcc896ff596d..d96e117e3c04 100644 --- a/.github/ensure-tester/action.yml +++ b/.github/ensure-tester/action.yml @@ -78,7 +78,7 @@ runs: ec2_subnet_id: subnet-4cfabd25 ec2_security_group_id: sg-0ccd4e5df0dcca0c9 ec2_key_name: "build-instance" - ec2_instance_tags: '[{"Key": "Keep-Alive", "Value": "true"}]' + ec2_instance_tags: '[]' - name: Ensure Tester Cleanup uses: gacts/run-and-post-run@v1 @@ -107,4 +107,4 @@ runs: - name: Report Exit Code shell: bash if: steps.test.outputs.exit_code != '155' || inputs.spot_strategy == 'None' - run: exit ${{ steps.test.outputs.exit_code }} \ No newline at end of file + run: exit ${{ steps.test.outputs.exit_code }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2afd63f633d4..5b44fa96c76b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -562,7 +562,7 @@ jobs: protocol-circuits-gates-report: needs: [ci-rest, configure] - if: needs.configure.outputs.non-docs == 'true' && needs.configure.outputs.non-bb == 'true' + if: github.ref_name == 'master' || (needs.configure.outputs.non-docs == 'true' && needs.configure.outputs.non-bb == 'true') runs-on: ubuntu-latest permissions: pull-requests: write @@ -582,7 +582,7 @@ jobs: scripts/copy_from_builder ./noir-projects/gates_report.json protocol_circuits_report.json - name: Compare gates reports id: gates_diff - uses: noir-lang/noir-gates-diff@84ada11295b9a1e1da7325af4e45e2db9f775175 + uses: noir-lang/noir-gates-diff@7e4ddaa91c69380f15ccba514eac17bc7432a8cc continue-on-error: true timeout-minutes: 1 with: @@ -600,7 +600,7 @@ jobs: public-functions-size-report: needs: [ci-rest, configure] - if: needs.configure.outputs.non-docs == 'true' + if: github.ref_name == 'master' || needs.configure.outputs.non-docs == 'true' runs-on: ubuntu-latest permissions: pull-requests: write @@ -620,7 +620,7 @@ jobs: scripts/copy_from_builder noir-projects/public_functions_report.json public_functions_report.json - name: Compare public functions bytecode size reports id: public_functions_sizes_diff - uses: noir-lang/noir-gates-diff@84ada11295b9a1e1da7325af4e45e2db9f775175 + uses: noir-lang/noir-gates-diff@7e4ddaa91c69380f15ccba514eac17bc7432a8cc continue-on-error: true timeout-minutes: 1 with: diff --git a/.github/workflows/publish-aztec-packages.yml b/.github/workflows/publish-aztec-packages.yml index 0f1549677aad..1cee8c76a1b3 100644 --- a/.github/workflows/publish-aztec-packages.yml +++ b/.github/workflows/publish-aztec-packages.yml @@ -120,7 +120,7 @@ jobs: timeout-minutes: 80 run: | sudo shutdown -P 80 - ./bootstrap.sh image-aztec + ./bootstrap.sh image-aztec --check-arch docker tag aztecprotocol/aztec:${{ env.GIT_COMMIT }} aztecprotocol/aztec:${{ env.GIT_COMMIT }}-arm64 docker push aztecprotocol/aztec:${{ env.GIT_COMMIT }}-arm64 build-nargo-x86: diff --git a/Earthfile b/Earthfile index f37f1415dd23..ca980143f3fc 100644 --- a/Earthfile +++ b/Earthfile @@ -205,7 +205,7 @@ docs-with-cache: FROM +bootstrap ENV CI=1 ENV USE_CACHE=1 - LET artifact=docs-ci-deploy-$(./boxes/bootstrap.sh hash) + LET artifact=docs-ci-deploy-$(./docs/bootstrap.sh hash) IF ci3/test_should_run $artifact WAIT BUILD --pass-args ./docs/+deploy-preview diff --git a/avm-transpiler/Cargo.lock b/avm-transpiler/Cargo.lock index 11767391c576..4997bffb9e3f 100644 --- a/avm-transpiler/Cargo.lock +++ b/avm-transpiler/Cargo.lock @@ -305,12 +305,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" -[[package]] -name = "base64" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" - [[package]] name = "base64" version = "0.21.7" @@ -863,18 +857,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "jsonrpc" -version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34efde8d2422fb79ed56db1d3aea8fa5b583351d15a26770cdee2f88813dd702" -dependencies = [ - "base64 0.13.1", - "minreq", - "serde", - "serde_json", -] - [[package]] name = "k256" version = "0.11.6" @@ -929,17 +911,6 @@ dependencies = [ "adler", ] -[[package]] -name = "minreq" -version = "2.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "763d142cdff44aaadd9268bebddb156ef6c65a0e13486bb81673cf2d8739f9b0" -dependencies = [ - "log", - "serde", - "serde_json", -] - [[package]] name = "noirc_errors" version = "1.0.0-beta.1" @@ -962,11 +933,7 @@ name = "noirc_printable_type" version = "1.0.0-beta.1" dependencies = [ "acvm", - "iter-extended", - "jsonrpc", "serde", - "serde_json", - "thiserror", ] [[package]] diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 6a44a1aa0c79..27b8c829517d 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = e50b5c03a005fc294414b3b9c103bb17e42598fa - parent = 9eaa10983b26616876099896accb0e3093ae8d20 + commit = bfda2c9914f49f917bae7d62f27555e7ff3e6b4f + parent = b73f7f9442f9a51e82925dea3c32bc64173d81e5 method = merge cmdver = 0.4.6 diff --git a/barretenberg/acir_tests/bootstrap.sh b/barretenberg/acir_tests/bootstrap.sh index 1b1a7771d9b1..ad6ced2c2d14 100755 --- a/barretenberg/acir_tests/bootstrap.sh +++ b/barretenberg/acir_tests/bootstrap.sh @@ -16,6 +16,15 @@ function prepare_tests { # TODO(https://github.com/AztecProtocol/barretenberg/issues/1108): problem regardless the proof system used rm -rf acir_tests/regression_5045 + # Regenerate verify_honk_proof and verify_rollup_honk_proof recursive input. + echo "Regenerating verify_honk_proof and verify_rollup_honk_proof recursive inputs." + COMPILE=2 ./run_test.sh assert_statement + local bb=$(realpath ../cpp/build/bin/bb) + (cd ./acir_tests/assert_statement && \ + $bb write_recursion_inputs_ultra_honk -b ./target/program.json -o ../../../../noir/noir-repo/test_programs/execution_success/verify_honk_proof --recursive && \ + $bb write_recursion_inputs_rollup_honk -b ./target/program.json -o ../../../../noir/noir-repo/test_programs/execution_success/verify_rollup_honk_proof --recursive && \ + cp -R ../../../../noir/noir-repo/test_programs/execution_success/verify_honk_proof .. && cp -R ../../../../noir/noir-repo/test_programs/execution_success/verify_rollup_honk_proof ..) + # COMPILE=2 only compiles the test. denoise "parallel --joblog joblog.txt --line-buffered 'COMPILE=2 ./run_test.sh \$(basename {})' ::: ./acir_tests/*" @@ -29,13 +38,6 @@ function build_tests { prepare_tests - - # TODO: This actually breaks things, but shouldn't. We want to do it here and not maintain manually. - # Regenerate verify_honk_proof recursive input. - # local bb=$(realpath ../cpp/build/bin/bb) - # (cd ./acir_tests/assert_statement && \ - # $bb write_recursion_inputs_honk -b ./target/program.json -o ../verify_honk_proof --recursive) - # Update yarn.lock so it can be committed. # Be lenient about bb.js hash changing, even if we try to minimize the occurrences. denoise "cd browser-test-app && yarn add --dev @aztec/bb.js@../../ts && yarn" diff --git a/barretenberg/acir_tests/regenerate_verify_honk_proof_inputs.sh b/barretenberg/acir_tests/regenerate_verify_honk_proof_inputs.sh deleted file mode 100755 index 03a46ad70cc6..000000000000 --- a/barretenberg/acir_tests/regenerate_verify_honk_proof_inputs.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env bash -# Env var overrides: -# BIN: to specify a different binary to test with (e.g. bb.js or bb.js-dev). -set -eu - -BIN=${BIN:-../cpp/build/bin/bb} -CRS_PATH=~/.bb-crs -BRANCH=master -VERBOSE=${VERBOSE:+-v} - -if [ -f "$BIN" ]; then - BIN=$(realpath "$BIN") -else - BIN=$(realpath "$(which "$BIN")") -fi - -export BRANCH - -# The program for which a proof will be recursively verified -PROGRAM=assert_statement -# The programs containing the recursive verifier -RECURSIVE_PROGRAMS=(verify_honk_proof verify_rollup_honk_proof) - -./reset_acir_tests.sh --no-rebuild-nargo --programs "$PROGRAM" -cd "acir_tests/$PROGRAM" - -# Base directory for TOML outputs -BASE_TOML_DIR=../../../../noir/noir-repo/test_programs/execution_success - -for RECURSIVE_PROGRAM in "${RECURSIVE_PROGRAMS[@]}"; do - TOML_DIR="$BASE_TOML_DIR/$RECURSIVE_PROGRAM" - - if [ ! -d "$TOML_DIR" ]; then - echo "Error: Directory $TOML_DIR does not exist." - exit 1 - fi - - echo "Generating recursion inputs for $RECURSIVE_PROGRAM and writing to directory $TOML_DIR" - - # Decide the command based on the recursive program - if [[ "$RECURSIVE_PROGRAM" == "verify_rollup_honk_proof" ]]; then - COMMAND="write_recursion_inputs_rollup_honk" - else - COMMAND="write_recursion_inputs_honk" - fi - - $BIN "$COMMAND" --recursive $VERBOSE -c "$CRS_PATH" -b ./target/program.json -o "$TOML_DIR" -done - -cd ../.. -./reset_acir_tests.sh --no-rebuild-nargo --programs "${RECURSIVE_PROGRAMS[@]}" diff --git a/barretenberg/cpp/pil/avm/constants_gen.pil b/barretenberg/cpp/pil/avm/constants_gen.pil index 942924d0d9e9..cb8f8952d8c4 100644 --- a/barretenberg/cpp/pil/avm/constants_gen.pil +++ b/barretenberg/cpp/pil/avm/constants_gen.pil @@ -4,7 +4,7 @@ namespace constants(256); pol MAX_NULLIFIERS_PER_CALL = 16; pol MAX_ENQUEUED_CALLS_PER_CALL = 16; pol MAX_L2_TO_L1_MSGS_PER_CALL = 2; - pol MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL = 64; + pol MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL = 63; pol MAX_PUBLIC_DATA_READS_PER_CALL = 64; pol MAX_NOTE_HASH_READ_REQUESTS_PER_CALL = 16; pol MAX_NULLIFIER_READ_REQUESTS_PER_CALL = 16; @@ -38,9 +38,9 @@ namespace constants(256); pol START_NULLIFIER_NON_EXISTS_OFFSET = 32; pol START_L1_TO_L2_MSG_EXISTS_WRITE_OFFSET = 48; pol START_SSTORE_WRITE_OFFSET = 64; - pol START_SLOAD_WRITE_OFFSET = 128; - pol START_EMIT_NOTE_HASH_WRITE_OFFSET = 192; - pol START_EMIT_NULLIFIER_WRITE_OFFSET = 208; - pol START_EMIT_L2_TO_L1_MSG_WRITE_OFFSET = 224; - pol START_EMIT_UNENCRYPTED_LOG_WRITE_OFFSET = 226; + pol START_SLOAD_WRITE_OFFSET = 127; + pol START_EMIT_NOTE_HASH_WRITE_OFFSET = 191; + pol START_EMIT_NULLIFIER_WRITE_OFFSET = 207; + pol START_EMIT_L2_TO_L1_MSG_WRITE_OFFSET = 223; + pol START_EMIT_UNENCRYPTED_LOG_WRITE_OFFSET = 225; diff --git a/barretenberg/cpp/src/CMakeLists.txt b/barretenberg/cpp/src/CMakeLists.txt index 1a9590e4d673..fd9925660826 100644 --- a/barretenberg/cpp/src/CMakeLists.txt +++ b/barretenberg/cpp/src/CMakeLists.txt @@ -102,6 +102,10 @@ if(SMT) add_subdirectory(barretenberg/smt_verification) endif() +if(SMT AND ACIR_FORMAL_PROOFS) + add_subdirectory(barretenberg/acir_formal_proofs) +endif() + add_subdirectory(barretenberg/benchmark) include(GNUInstallDirs) diff --git a/barretenberg/cpp/src/barretenberg/acir_formal_proofs/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/CMakeLists.txt new file mode 100644 index 000000000000..fcd338d4072c --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/CMakeLists.txt @@ -0,0 +1 @@ +barretenberg_module(acir_formal_proofs dsl circuit_checker smt_verification common) \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/acir_formal_proofs/README.md b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/README.md new file mode 100644 index 000000000000..5dd3a5a29182 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/README.md @@ -0,0 +1,47 @@ +# Formal Verification of ACIR Instructions + +This module provides formal verification capabilities for ACIR (Arithmetic Circuit Intermediate Representation) instructions generated from Noir SSA code. + +## Overview + +The verifier uses SMT (Satisfiability Modulo Theories) solving to formally verify the correctness of ACIR instructions. It supports verification of: + +- Arithmetic operations (add, subtract, multiply, divide) +- Bitwise operations (AND, OR, XOR, NOT) +- Shifts (left shift, right shift) +- Comparisons (equality, less than, greater than) +- Field arithmetic + +## Tests + +⚠️ **WARNING**: Do not run these tests on a local machine without sufficient memory (>32GB RAM). The tests can consume large amounts of memory and CPU resources. Some tests like integer division can run for multiple days. It is recommended to run these tests in a controlled environment with adequate resources. + +### Results + +| Opcode | Lhs type/size | Rhs type/size | Time/seconds | Memory/GB | Success | SMT Term Type | Reason | +| ----------- | ------------- | ------------- | ------------ | --------- | ------- | ---------------- | -------------------------- | +| Binary::Add | Field | Field | 0.024 | - | ✓ | TermType::FFTerm | | +| Binary::Add | Unsigned_127 | Unsigned_127 | 2.8 | - | ✓ | TermType::BVTerm | | +| Binary::And | Unsigned_32 | Unsigned_32 | 6.7 | - | ✓ | TermType::BVTerm | | +| Binary::And | Unsigned_127 | Unsigned_127 | 7.5 | - | ✗ | TermType::BVTerm | Probably bug in smt solver | +| Binary::Div | Field | Field | 0.024 | - | ✓ | TermType::FFTerm | | +| Binary::Div | Unsigned_126 | Unsigned_126 | 402.7 | 3.5 | ✗ | TermType::BVTerm | Analysis in progress | +| Binary::Div | Signed_126 | Signed_126 | >17 days | 5.1 | ✗ | TermType::ITerm | Test takes too long | +| Binary::Eq | Field | Field | 19.2 | - | ✓ | TermType::FFTerm | | +| Binary::Eq | Unsigned_127 | Unsigned_127 | 22.8 | - | ✓ | TermType::BVTerm | | +| Binary::Lt | Unsigned_127 | Unsigned_127 | 56.7 | - | ✓ | TermType::BVTerm | | +| Binary::Mod | Unsigned_127 | Unsigned_127 | - | 3.2 | ✗ | TermType::BVTerm | Analysis in progress | +| Binary::Mul | Field | Field | 0.024 | - | ✓ | TermType::FFTerm | | +| Binary::Mul | Unsigned_127 | Unsigned_127 | 10.0 | - | ✓ | TermType::BVTerm | | +| Binary::Or | Unsigned_32 | Unsigned_32 | 18.0 | - | ✓ | TermType::BVTerm | | +| Binary::Or | Unsigned_127 | Unsigned_127 | 7.5 | - | ✗ | TermType::BVTerm | Probably bug in smt solver | +| Binary::Shl | Unsigned_64 | Unsigned_8 | 42331.61 | 63.2 | ✓ | TermType::BVTerm | | +| Binary::Shl | Unsigned_32 | Unsigned_8 | 4574.0 | 30 | ✓ | TermType::BVTerm | | +| Binary::Shr | Unsigned_64 | Unsigned_8 | 3927.88 | 10 | ✓ | TermType::BVTerm | | +| Binary::Sub | Unsigned_127 | Unsigned_127 | 3.3 | - | ✓ | TermType::BVTerm | | +| Binary::Xor | Unsigned_32 | Unsigned_32 | 14.7 | - | ✓ | TermType::BVTerm | | +| Binary::Xor | Unsigned_127 | Unsigned_127 | 7.5 | - | ✗ | TermType::BVTerm | Probably bug in smt solver | +| Not | Unsigned_127 | - | 0.2 | - | ✓ | TermType::BVTerm | | + + +Each test attempts to find counterexamples that violate the expected behavior. A passing test indicates the operation is correctly implemented, while a failing test reveals potential issues. \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/acir_formal_proofs/acir_loader.cpp b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/acir_loader.cpp new file mode 100644 index 000000000000..f45f9e7aba82 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/acir_loader.cpp @@ -0,0 +1,70 @@ +#include "acir_loader.hpp" +#include "barretenberg/dsl/acir_format/acir_format.hpp" +#include "barretenberg/dsl/acir_format/acir_to_constraint_buf.hpp" +#include "barretenberg/smt_verification/circuit/ultra_circuit.hpp" +#include "barretenberg/smt_verification/terms/term.hpp" +#include "msgpack/v3/sbuffer_decl.hpp" +#include +#include +#include + +std::vector readFile(std::string filename) +{ + std::ifstream file(filename, std::ios::binary); + file.unsetf(std::ios::skipws); + + std::streampos fileSize; + + file.seekg(0, std::ios::end); + fileSize = file.tellg(); + file.seekg(0, std::ios::beg); + + std::vector vec; + + vec.insert(vec.begin(), std::istream_iterator(file), std::istream_iterator()); + file.close(); + return vec; +} + +AcirToSmtLoader::AcirToSmtLoader(std::string filename) +{ + this->acir_program_buf = readFile(filename); + this->instruction_name = filename; + this->constraint_system = acir_format::program_buf_to_acir_format(this->acir_program_buf, false).at(0); + this->circuit_buf = this->get_circuit_builder().export_circuit(); +} + +bb::UltraCircuitBuilder AcirToSmtLoader::get_circuit_builder() +{ + bb::UltraCircuitBuilder builder = acir_format::create_circuit(this->constraint_system, false); + builder.set_variable_name(0, "a"); + builder.set_variable_name(1, "b"); + builder.set_variable_name(2, "c"); + return builder; +} + +smt_solver::Solver AcirToSmtLoader::get_smt_solver() +{ + smt_circuit::CircuitSchema circuit_info = smt_circuit_schema::unpack_from_buffer(this->circuit_buf); + // In circuits generated by the shift left (shl) opcode, there is a variable with bit length 197. + // This is likely because the shl operation internally calls truncate opcode to handle overflow + return smt_solver::Solver(circuit_info.modulus, smt_circuit::default_solver_config, 16, 240); +} + +smt_circuit::UltraCircuit AcirToSmtLoader::get_bitvec_smt_circuit(smt_solver::Solver* solver) +{ + smt_circuit::CircuitSchema circuit_info = smt_circuit_schema::unpack_from_buffer(this->circuit_buf); + return smt_circuit::UltraCircuit(circuit_info, solver, smt_terms::TermType::BVTerm); +} + +smt_circuit::UltraCircuit AcirToSmtLoader::get_field_smt_circuit(smt_solver::Solver* solver) +{ + smt_circuit::CircuitSchema circuit_info = smt_circuit_schema::unpack_from_buffer(this->circuit_buf); + return smt_circuit::UltraCircuit(circuit_info, solver, smt_terms::TermType::FFTerm); +} + +smt_circuit::UltraCircuit AcirToSmtLoader::get_integer_smt_circuit(smt_solver::Solver* solver) +{ + smt_circuit::CircuitSchema circuit_info = smt_circuit_schema::unpack_from_buffer(this->circuit_buf); + return smt_circuit::UltraCircuit(circuit_info, solver, smt_terms::TermType::ITerm); +} \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/acir_formal_proofs/acir_loader.hpp b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/acir_loader.hpp new file mode 100644 index 000000000000..a201aa8bb1d2 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/acir_loader.hpp @@ -0,0 +1,96 @@ +#pragma once +#include "barretenberg/dsl/acir_format/acir_format.hpp" +#include "barretenberg/smt_verification/circuit/ultra_circuit.hpp" +#include "msgpack/v3/sbuffer_decl.hpp" +#include +#include +#include + +/** + * @brief Class for loading ACIR (Arithmetic Circuit Intermediate Representation) programs and converting them to SMT + * format + * + * This class handles loading ACIR programs from files and provides functionality to: + * - Convert the ACIR program to various SMT circuit representations + * - Access the underlying constraint systems + * - Build circuits for verification + * + * The loader reads an ACIR program file, creates constraint systems, and allows conversion + * to different SMT circuit types (bitvector, field, integer) for formal verification. + */ +class AcirToSmtLoader { + public: + // Deleted constructors/operators to prevent copying/moving + AcirToSmtLoader() = delete; + AcirToSmtLoader(const AcirToSmtLoader& other) = delete; + AcirToSmtLoader(AcirToSmtLoader&& other) = delete; + AcirToSmtLoader& operator=(const AcirToSmtLoader other) = delete; + AcirToSmtLoader&& operator=(AcirToSmtLoader&& other) = delete; + + ~AcirToSmtLoader() = default; + + /** + * @brief Constructs loader from an ACIR program file + * @param filename Path to the ACIR program file to load + * + * Reads the ACIR program from file, initializes the constraint system, + * and prepares the circuit buffer for later use. + */ + AcirToSmtLoader(std::string filename); + + /** + * @brief Gets the constraint systems from the loaded ACIR program + * @return Reference to the ACIR format constraint systems + */ + acir_format::AcirFormat& get_constraint_systems() { return this->constraint_system; } + + /** + * @brief Creates a circuit builder for the loaded program + * @return UltraCircuitBuilder instance + * + * Creates and returns a circuit builder with predefined variable names: + * - Variable 0 named "a" + * - Variable 1 named "b" + * - Variable 2 named "c" + */ + bb::UltraCircuitBuilder get_circuit_builder(); + + /** + * @brief Gets an SMT solver instance + * @return Solver instance for SMT solving + * + * Creates a solver configured with: + * - Circuit modulus from schema + * - Default solver configuration + * - Minimum bit width of 16 + * - Maximum bit width of 240 + */ + smt_solver::Solver get_smt_solver(); + + /** + * @brief Creates an SMT circuit for bitvector operations + * @param solver Pointer to SMT solver to use + * @return UltraCircuit configured for bitvector operations + */ + smt_circuit::UltraCircuit get_bitvec_smt_circuit(smt_solver::Solver* solver); + + /** + * @brief Creates an SMT circuit for field operations + * @param solver Pointer to SMT solver to use + * @return UltraCircuit configured for field operations + */ + smt_circuit::UltraCircuit get_field_smt_circuit(smt_solver::Solver* solver); + + /** + * @brief Creates an SMT circuit for integer operations + * @param solver Pointer to SMT solver to use + * @return UltraCircuit configured for integer operations + */ + smt_circuit::UltraCircuit get_integer_smt_circuit(smt_solver::Solver* solver); + + private: + std::string instruction_name; ///< Name of the instruction/filename being processed + std::vector acir_program_buf; ///< Buffer containing the raw ACIR program data read from file + acir_format::AcirFormat constraint_system; ///< The parsed constraint system from the ACIR program + msgpack::sbuffer circuit_buf; ///< Buffer for circuit serialization using MessagePack +}; \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/acir_formal_proofs/acir_loader.test.cpp b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/acir_loader.test.cpp new file mode 100644 index 000000000000..67cf4ac64576 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/acir_loader.test.cpp @@ -0,0 +1,490 @@ +/** + * @file acir_loader.test.cpp + * @brief Tests for verifying ACIR (Arithmetic Circuit Intermediate Representation) operations + * + * This test suite verifies the correctness of various arithmetic, logical, and bitwise operations + * implemented in ACIR format. It uses SMT solvers to formally verify the operations. + */ + +#include "acir_loader.hpp" +#include "barretenberg/circuit_checker/circuit_checker.hpp" +#include "barretenberg/common/test.hpp" +#include "barretenberg/dsl/acir_format/acir_format.hpp" +#include "barretenberg/smt_verification/circuit/ultra_circuit.hpp" +#include "barretenberg/smt_verification/solver/solver.hpp" +#include "barretenberg/smt_verification/util/smt_util.hpp" +#include "barretenberg/stdlib/client_ivc_verifier/client_ivc_recursive_verifier.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" +#include "formal_proofs.hpp" +#include "helpers.hpp" +#include + +// Path to test artifacts containing ACIR programs and witness files +const std::string ARTIFACTS_PATH = "../src/barretenberg/acir_formal_proofs/artifacts/"; + +/** + * @brief Saves witness data when a bug is found during verification + * @param instruction_name Name of the instruction being tested + * @param circuit The circuit containing the bug + * + * Saves witness data to a file named {instruction_name}.witness in the artifacts directory + */ +void save_buggy_witness(std::string instruction_name, smt_circuit::UltraCircuit circuit) +{ + std::vector special_names; + info("Saving bug for op ", instruction_name); + default_model_single(special_names, circuit, ARTIFACTS_PATH + instruction_name + ".witness"); +} + +/** + * @brief Verifies a previously saved witness file for correctness + * @param instruction_name Name of the instruction to verify + * @return true if witness is valid, false otherwise + * + * Loads a witness file and verifies it against the corresponding ACIR program + */ +bool verify_buggy_witness(std::string instruction_name) +{ + std::vector witness = import_witness_single(ARTIFACTS_PATH + instruction_name + ".witness.pack"); + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + instruction_name + ".acir"); + bb::UltraCircuitBuilder builder = loader.get_circuit_builder(); + for (uint i = 0; i < witness.size(); i++) { + builder.variables[i] = witness[i]; + if (i < 100) { + info(witness[i]); + } + } + return bb::CircuitChecker::check(builder); +} + +/** + * @brief Tests 127-bit unsigned addition + * Verifies that the ACIR implementation of addition is correct + * Execution time: ~2.8 seconds on SMTBOX + */ +TEST(acir_formal_proofs, uint_terms_add) +{ + std::string TESTNAME = "Binary::Add_Unsigned_127_Unsigned_127"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_bitvec_smt_circuit(&solver); + bool res = verify_add(&solver, circuit); + EXPECT_FALSE(res); + + if (res) { + save_buggy_witness(TESTNAME, circuit); + } +} + +/** + * @brief Tests 127-bit unsigned bitwise AND + * Verifies that the ACIR implementation of AND is correct + */ +TEST(acir_formal_proofs, uint_terms_and) +{ + std::string TESTNAME = "Binary::And_Unsigned_127_Unsigned_127"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_bitvec_smt_circuit(&solver); + bool res = verify_and(&solver, circuit); + EXPECT_FALSE(res); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } +} + +/** + * @brief Tests 32-bit unsigned bitwise AND + * Verifies that the ACIR implementation of AND is correct + * Execution time: ~6.7 seconds on SMTBOX + */ +TEST(acir_formal_proofs, uint_terms_and32) +{ + std::string TESTNAME = "Binary::And_Unsigned_32_Unsigned_32"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_bitvec_smt_circuit(&solver); + bool res = verify_and(&solver, circuit); + EXPECT_FALSE(res); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } +} + +/** + * @brief Tests 126-bit unsigned division + * Verifies that the ACIR implementation of division is correct + */ +TEST(acir_formal_proofs, uint_terms_div) +{ + std::string TESTNAME = "Binary::Div_Unsigned_126_Unsigned_126"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_bitvec_smt_circuit(&solver); + bool res = verify_div(&solver, circuit); + EXPECT_FALSE(res); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } +} + +/** + * @brief Tests 127-bit unsigned equality comparison + * Verifies two cases: + * 1. When operands are equal, result must be 0 + * 2. When operands are not equal, result must be 1 + * Execution time: ~22.8 seconds on SMTBOX + */ +TEST(acir_formal_proofs, uint_terms_eq) +{ + std::string TESTNAME = "Binary::Eq_Unsigned_127_Unsigned_127"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver1 = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit1 = loader.get_bitvec_smt_circuit(&solver1); + + bool res1 = verify_eq_on_equlaity(&solver1, circuit1); + EXPECT_FALSE(res1); + if (res1) { + save_buggy_witness(TESTNAME, circuit1); + } + + smt_solver::Solver solver2 = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit2 = loader.get_bitvec_smt_circuit(&solver2); + + bool res2 = verify_eq_on_inequlaity(&solver2, circuit2); + EXPECT_FALSE(res2); + if (res2) { + save_buggy_witness(TESTNAME, circuit2); + } +} + +/** + * @brief Tests 127-bit unsigned less than comparison + * Verifies two cases: + * 1. When a < b, result must be 0 + * 2. When a >= b, result must be 1 + * Execution time: ~81.7 seconds on SMTBOX + */ +TEST(acir_formal_proofs, uint_terms_lt) +{ + std::string TESTNAME = "Binary::Lt_Unsigned_127_Unsigned_127"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver1 = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit1 = loader.get_bitvec_smt_circuit(&solver1); + + bool res1 = verify_lt(&solver1, circuit1); + EXPECT_FALSE(res1); + if (res1) { + save_buggy_witness(TESTNAME, circuit1); + } + + smt_solver::Solver solver2 = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit2 = loader.get_bitvec_smt_circuit(&solver2); + + bool res2 = verify_gt(&solver2, circuit2); + EXPECT_FALSE(res2); + if (res2) { + save_buggy_witness(TESTNAME, circuit2); + } +} + +/** + * @brief Tests 126-bit unsigned modulo + * Verifies that the ACIR implementation of modulo is correct + * Execution time: ??? seconds on SMTBOX + */ +TEST(acir_formal_proofs, uint_terms_mod) +{ + std::string TESTNAME = "Binary::Mod_Unsigned_126_Unsigned_126"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_bitvec_smt_circuit(&solver); + bool res = verify_mod(&solver, circuit); + solver.print_assertions(); + EXPECT_FALSE(res); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } +} + +/** + * @brief Tests 127-bit unsigned multiplication + * Verifies that the ACIR implementation of multiplication is correct + * Execution time: ~10.0 seconds on SMTBOX + */ +TEST(acir_formal_proofs, uint_terms_mul) +{ + std::string TESTNAME = "Binary::Mul_Unsigned_127_Unsigned_127"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_bitvec_smt_circuit(&solver); + bool res = verify_mul(&solver, circuit); + EXPECT_FALSE(res); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } +} + +/** + * @brief Tests 127-bit unsigned bitwise OR + * Verifies that the ACIR implementation of OR is correct + */ +TEST(acir_formal_proofs, uint_terms_or) +{ + std::string TESTNAME = "Binary::Or_Unsigned_127_Unsigned_127"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_bitvec_smt_circuit(&solver); + bool res = verify_or(&solver, circuit); + EXPECT_FALSE(res); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } +} + +/** + * @brief Tests 32-bit unsigned bitwise OR + * Verifies that the ACIR implementation of OR is correct + * Execution time: ~20.3 seconds on SMTBOX + */ +TEST(acir_formal_proofs, uint_terms_or32) +{ + std::string TESTNAME = "Binary::Or_Unsigned_32_Unsigned_32"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_bitvec_smt_circuit(&solver); + bool res = verify_or(&solver, circuit); + EXPECT_FALSE(res); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } +} + +/** + * @brief Tests 64-bit left shift + * Verifies that the ACIR implementation of left shift is correct + * Execution time: ~4588 seconds on SMTBOX + * Memory usage: ~30GB RAM + */ +TEST(acir_formal_proofs, uint_terms_shl64) +{ + std::string TESTNAME = "Binary::Shl_Unsigned_64_Unsigned_8"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_bitvec_smt_circuit(&solver); + bool res = verify_shl64(&solver, circuit); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } + EXPECT_FALSE(res); +} + +/** + * @brief Tests 32-bit left shift + * Verifies that the ACIR implementation of left shift is correct + * Execution time: ~4574 seconds on SMTBOX + * Memory usage: ~30GB RAM + */ +TEST(acir_formal_proofs, uint_terms_shl32) +{ + std::string TESTNAME = "Binary::Shl_Unsigned_32_Unsigned_8"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_bitvec_smt_circuit(&solver); + bool res = verify_shl32(&solver, circuit); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } + EXPECT_FALSE(res); +} + +/** + * @brief Tests right shift operation + * Verifies that the ACIR implementation of right shift is correct + * Execution time: ~3927.88 seconds on SMTBOX + * Memory usage: ~10GB RAM + */ +TEST(acir_formal_proofs, uint_terms_shr) +{ + std::string TESTNAME = "Binary::Shr_Unsigned_64_Unsigned_8"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_bitvec_smt_circuit(&solver); + bool res = verify_shr(&solver, circuit); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } + EXPECT_FALSE(res); +} + +/** + * @brief Tests 127-bit unsigned subtraction + * Verifies that the ACIR implementation of subtraction is correct + * Execution time: ~2.6 seconds on SMTBOX + */ +TEST(acir_formal_proofs, uint_terms_sub) +{ + std::string TESTNAME = "Binary::Sub_Unsigned_127_Unsigned_127"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_bitvec_smt_circuit(&solver); + bool res = verify_sub(&solver, circuit); + EXPECT_FALSE(res); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } +} + +/** + * @brief Tests 127-bit unsigned bitwise XOR + * Verifies that the ACIR implementation of XOR is correct + */ +TEST(acir_formal_proofs, uint_terms_xor) +{ + std::string TESTNAME = "Binary::Xor_Unsigned_127_Unsigned_127"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_bitvec_smt_circuit(&solver); + bool res = verify_xor(&solver, circuit); + EXPECT_FALSE(res); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } +} + +/** + * @brief Tests 32-bit unsigned bitwise XOR + * Verifies that the ACIR implementation of XOR is correct + */ +TEST(acir_formal_proofs, uint_terms_xor32) +{ + std::string TESTNAME = "Binary::Xor_Unsigned_32_Unsigned_32"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_bitvec_smt_circuit(&solver); + bool res = verify_xor(&solver, circuit); + EXPECT_FALSE(res); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } +} + +/** + * @brief Tests 127-bit unsigned bitwise NOT + * Verifies that the ACIR implementation of NOT is correct + * Execution time: ~21.3 seconds on SMTBOX + */ +TEST(acir_formal_proofs, uint_terms_not) +{ + std::string TESTNAME = "Not_Unsigned_127"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_bitvec_smt_circuit(&solver); + bool res = verify_not_127(&solver, circuit); + EXPECT_FALSE(res); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } +} + +/** + * @brief Tests field addition + * Verifies that the ACIR implementation of field addition is correct + * Execution time: ~0.22 seconds on SMTBOX + */ +TEST(acir_formal_proofs, field_terms_add) +{ + std::string TESTNAME = "Binary::Add_Field_0_Field_0"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_field_smt_circuit(&solver); + bool res = verify_add(&solver, circuit); + EXPECT_FALSE(res); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } +} + +/** + * @brief Tests field division + * Verifies that the ACIR implementation of field division is correct + * Execution time: ~0.22 seconds on SMTBOX + */ +TEST(acir_formal_proofs, field_terms_div) +{ + std::string TESTNAME = "Binary::Div_Field_0_Field_0"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_field_smt_circuit(&solver); + bool res = verify_div_field(&solver, circuit); + EXPECT_FALSE(res); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } +} + +/** + * @brief Tests field equality comparison + * Verifies two cases: + * 1. When operands are equal, result must be 0 + * 2. When operands are not equal, result must be 1 + * Execution time: ~19.2 seconds on SMTBOX + */ +TEST(acir_formal_proofs, field_terms_eq) +{ + std::string TESTNAME = "Binary::Eq_Field_0_Field_0"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver1 = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit1 = loader.get_field_smt_circuit(&solver1); + + bool res1 = verify_eq_on_equlaity(&solver1, circuit1); + EXPECT_FALSE(res1); + if (res1) { + save_buggy_witness(TESTNAME, circuit1); + } + + smt_solver::Solver solver2 = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit2 = loader.get_field_smt_circuit(&solver2); + + bool res2 = verify_eq_on_inequlaity(&solver2, circuit2); + EXPECT_FALSE(res2); + if (res2) { + save_buggy_witness(TESTNAME, circuit2); + } +} + +/** + * @brief Tests field multiplication + * Verifies that the ACIR implementation of field multiplication is correct + * Execution time: ~0.22 seconds on SMTBOX + */ +TEST(acir_formal_proofs, field_terms_mul) +{ + std::string TESTNAME = "Binary::Mul_Field_0_Field_0"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_field_smt_circuit(&solver); + bool res = verify_mul(&solver, circuit); + EXPECT_FALSE(res); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } +} + +/** + * @brief Tests 126-bit signed division + * Verifies that the ACIR implementation of signed division is correct + * Execution time: >17 DAYS on SMTBOX + */ +TEST(acir_formal_proofs, integer_terms_div) +{ + std::string TESTNAME = "Binary::Div_Signed_126_Signed_126"; + AcirToSmtLoader loader = AcirToSmtLoader(ARTIFACTS_PATH + TESTNAME + ".acir"); + smt_solver::Solver solver = loader.get_smt_solver(); + smt_circuit::UltraCircuit circuit = loader.get_integer_smt_circuit(&solver); + bool res = verify_div(&solver, circuit); + EXPECT_FALSE(res); + if (res) { + save_buggy_witness(TESTNAME, circuit); + } +} \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/acir_formal_proofs/formal_proofs.cpp b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/formal_proofs.cpp new file mode 100644 index 000000000000..4c8bc12ee6b9 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/formal_proofs.cpp @@ -0,0 +1,272 @@ +#include "barretenberg/smt_verification/circuit/ultra_circuit.hpp" +#include "barretenberg/smt_verification/solver/solver.hpp" +#include "barretenberg/smt_verification/util/smt_util.hpp" +#include "helpers.hpp" + +void debug_solution(smt_solver::Solver* solver, std::unordered_map terms) +{ + solver->print_assertions(); + std::unordered_map vals = solver->model(terms); + for (auto const& i : vals) { + info(i.first, " = ", i.second); + } +} + +bool verify_add(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit) +{ + auto a = circuit["a"]; + auto b = circuit["b"]; + auto c = circuit["c"]; + auto cr = a + b; + c != cr; + bool res = solver->check(); + if (res) { + std::unordered_map terms({ { "a", a }, { "b", b }, { "c", c }, { "cr", cr } }); + debug_solution(solver, terms); + } + return res; +} + +bool verify_sub(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit) +{ + auto a = circuit["a"]; + auto b = circuit["b"]; + auto c = circuit["c"]; + auto cr = a - b; + c != cr; + bool res = solver->check(); + if (res) { + std::unordered_map terms({ { "a", a }, { "b", b }, { "c", c }, { "cr", cr } }); + debug_solution(solver, terms); + } + return res; +} + +bool verify_mul(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit) +{ + auto a = circuit["a"]; + auto b = circuit["b"]; + auto c = circuit["c"]; + auto cr = a * b; + c != cr; + bool res = solver->check(); + if (res) { + std::unordered_map terms({ { "a", a }, { "b", b }, { "c", c }, { "cr", cr } }); + debug_solution(solver, terms); + } + return res; +} + +bool verify_div(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit) +{ + auto a = circuit["a"]; + auto b = circuit["b"]; + auto c = circuit["c"]; + auto cr = a / b; + c != cr; + bool res = solver->check(); + if (res) { + std::unordered_map terms({ { "a", a }, { "b", b }, { "c", c }, { "cr", cr } }); + debug_solution(solver, terms); + } + return res; +} + +bool verify_div_field(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit) +{ + auto a = circuit["a"]; + auto b = circuit["b"]; + auto c = circuit["c"]; + // c = a / b + // c * b = a + auto cr = c * b; + a != cr; + bool res = solver->check(); + if (res) { + std::unordered_map terms({ { "a", a }, { "b", b }, { "c", c }, { "cr", cr } }); + debug_solution(solver, terms); + } + return res; +} + +bool verify_mod(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit) +{ + auto a = circuit["a"]; + auto b = circuit["b"]; + auto c = circuit["c"]; + smt_circuit::STerm c1 = a % b; + c != c1; + bool res = solver->check(); + if (res) { + std::unordered_map terms({ { "a", a }, { "b", b }, { "c", c }, { "c1", c1 } }); + debug_solution(solver, terms); + } + return res; +} + +bool verify_or(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit) +{ + auto a = circuit["a"]; + auto b = circuit["b"]; + auto c = circuit["c"]; + auto cr = a | b; + c != cr; + bool res = solver->check(); + if (res) { + std::unordered_map terms({ { "a", a }, { "b", b }, { "c", c }, { "cr", cr } }); + debug_solution(solver, terms); + } + return res; +} + +bool verify_and(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit) +{ + auto a = circuit["a"]; + auto b = circuit["b"]; + auto c = circuit["c"]; + auto cr = a & b; + c != cr; + bool res = solver->check(); + if (res) { + std::unordered_map terms({ { "a", a }, { "b", b }, { "c", c }, { "cr", cr } }); + debug_solution(solver, terms); + } + return res; +} + +bool verify_xor(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit) +{ + auto a = circuit["a"]; + auto b = circuit["b"]; + auto c = circuit["c"]; + auto cr = a ^ b; + c != cr; + bool res = solver->check(); + if (res) { + std::unordered_map terms({ { "a", a }, { "b", b }, { "c", c }, { "cr", cr } }); + debug_solution(solver, terms); + } + return res; +} + +// takes 0.346 seconds on SMTBOX +bool verify_not_127(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit) +{ + auto a = circuit["a"]; + auto b = circuit["b"]; + // 2**127 - 1 + auto mask = smt_terms::BVConst("170141183460469231731687303715884105727", solver, 10); + auto br = a ^ mask; + b != br; + bool res = solver->check(); + if (res) { + std::unordered_map terms({ { "a", a }, { "b", b }, { "br", br } }); + debug_solution(solver, terms); + } + return res; +} + +bool verify_shl32(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit) +{ + auto a = circuit["a"]; + auto b = circuit["b"]; + auto c = circuit["c"]; + auto cr = shl32(a, b, solver); + c != cr; + bool res = solver->check(); + if (res) { + std::unordered_map terms({ { "a", a }, { "b", b }, { "c", c }, { "cr", cr } }); + debug_solution(solver, terms); + } + return res; +} + +bool verify_shl64(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit) +{ + auto a = circuit["a"]; + auto b = circuit["b"]; + auto c = circuit["c"]; + auto cr = shl64(a, b, solver); + c != cr; + bool res = solver->check(); + if (res) { + std::unordered_map terms({ { "a", a }, { "b", b }, { "c", c }, { "cr", cr } }); + debug_solution(solver, terms); + } + return res; +} + +bool verify_shr(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit) +{ + auto a = circuit["a"]; + auto b = circuit["b"]; + auto c = circuit["c"]; + auto cr = shr(a, b, solver); + c != cr; + bool res = solver->check(); + if (res) { + std::unordered_map terms({ { "a", a }, { "b", b }, { "c", c }, { "cr", cr } }); + debug_solution(solver, terms); + } + return res; +} + +bool verify_eq_on_equlaity(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit) +{ + auto a = circuit["a"]; + auto b = circuit["b"]; + auto c = circuit["c"]; + a == b; + c != 1; + bool res = solver->check(); + if (res) { + std::unordered_map terms({ { "a", a }, { "b", b }, { "c", c } }); + debug_solution(solver, terms); + } + return res; +} + +bool verify_eq_on_inequlaity(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit) +{ + auto a = circuit["a"]; + auto b = circuit["b"]; + auto c = circuit["c"]; + a != b; + c != 0; + bool res = solver->check(); + if (res) { + std::unordered_map terms({ { "a", a }, { "b", b }, { "c", c } }); + debug_solution(solver, terms); + } + return res; +} + +bool verify_lt(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit) +{ + auto a = circuit["a"]; + auto b = circuit["b"]; + auto c = circuit["c"]; + a < b; + c != 1; + bool res = solver->check(); + if (res) { + std::unordered_map terms({ { "a", a }, { "b", b }, { "c", c } }); + debug_solution(solver, terms); + } + return res; +} + +bool verify_gt(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit) +{ + auto a = circuit["a"]; + auto b = circuit["b"]; + auto c = circuit["c"]; + a > b; + c != 0; + bool res = solver->check(); + if (res) { + std::unordered_map terms({ { "a", a }, { "b", b }, { "c", c } }); + debug_solution(solver, terms); + } + return res; +} \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/acir_formal_proofs/formal_proofs.hpp b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/formal_proofs.hpp new file mode 100644 index 000000000000..6992c1d0386d --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/formal_proofs.hpp @@ -0,0 +1,149 @@ +#pragma once +#include "barretenberg/smt_verification/circuit/ultra_circuit.hpp" +#include "barretenberg/smt_verification/solver/solver.hpp" +#include "cvc5/cvc5.h" +#include +#include + +/** + * @brief Debug helper to print solver assertions and model values + * @param solver SMT solver instance + * @param terms Map of term names to CVC5 terms to evaluate + */ +void debug_solution(smt_solver::Solver* solver, std::unordered_map terms); + +/** + * @brief Verify addition operation: c = a + b + * @param solver SMT solver instance + * @param circuit Circuit containing variables a, b, c + * @return true if a counterexample is found (verification fails) + */ +bool verify_add(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit); + +/** + * @brief Verify subtraction operation: c = a - b + * @param solver SMT solver instance + * @param circuit Circuit containing variables a, b, c + * @return true if a counterexample is found (verification fails) + */ +bool verify_sub(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit); + +/** + * @brief Verify multiplication operation: c = a * b + * @param solver SMT solver instance + * @param circuit Circuit containing variables a, b, c + * @return true if a counterexample is found (verification fails) + */ +bool verify_mul(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit); + +/** + * @brief Verify integer division operation: c = a / b + * @param solver SMT solver instance + * @param circuit Circuit containing variables a, b, c + * @return true if a counterexample is found (verification fails) + */ +bool verify_div(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit); + +/** + * @brief Verify field division operation: c = a / b (in field) + * @param solver SMT solver instance + * @param circuit Circuit containing variables a, b, c + * @return true if a counterexample is found (verification fails) + */ +bool verify_div_field(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit); + +/** + * @brief Verify modulo operation: c = a mod b + * @param solver SMT solver instance + * @param circuit Circuit containing variables a, b, c + * @return true if a counterexample is found (verification fails) + */ +bool verify_mod(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit); + +/** + * @brief Verify bitwise OR operation: c = a | b + * @param solver SMT solver instance + * @param circuit Circuit containing variables a, b, c + * @return true if a counterexample is found (verification fails) + */ +bool verify_or(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit); + +/** + * @brief Verify bitwise AND operation: c = a & b + * @param solver SMT solver instance + * @param circuit Circuit containing variables a, b, c + * @return true if a counterexample is found (verification fails) + */ +bool verify_and(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit); + +/** + * @brief Verify bitwise XOR operation: c = a ^ b + * @param solver SMT solver instance + * @param circuit Circuit containing variables a, b, c + * @return true if a counterexample is found (verification fails) + */ +bool verify_xor(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit); + +/** + * @brief Verify NOT operation on 127 bits: b = ~a + * @param solver SMT solver instance + * @param circuit Circuit containing variables a, b + * @return true if a counterexample is found (verification fails) + */ +bool verify_not_127(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit); + +/** + * @brief Verify 32-bit left shift operation: c = a << b + * @param solver SMT solver instance + * @param circuit Circuit containing variables a, b, c + * @return true if a counterexample is found (verification fails) + */ +bool verify_shl32(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit); + +/** + * @brief Verify 64-bit left shift operation: c = a << b + * @param solver SMT solver instance + * @param circuit Circuit containing variables a, b, c + * @return true if a counterexample is found (verification fails) + */ +bool verify_shl64(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit); + +/** + * @brief Verify right shift operation: c = a >> b + * @param solver SMT solver instance + * @param circuit Circuit containing variables a, b, c + * @return true if a counterexample is found (verification fails) + */ +bool verify_shr(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit); + +/** + * @brief Verify equality comparison when values are equal + * @param solver SMT solver instance + * @param circuit Circuit containing variables a, b, c + * @return true if a counterexample is found (verification fails) + */ +bool verify_eq_on_equlaity(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit); + +/** + * @brief Verify equality comparison when values are not equal + * @param solver SMT solver instance + * @param circuit Circuit containing variables a, b, c + * @return true if a counterexample is found (verification fails) + */ +bool verify_eq_on_inequlaity(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit); + +/** + * @brief Verify less than comparison: a < b + * @param solver SMT solver instance + * @param circuit Circuit containing variables a, b, c + * @return true if a counterexample is found (verification fails) + */ +bool verify_lt(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit); + +/** + * @brief Verify greater than comparison: a > b + * @param solver SMT solver instance + * @param circuit Circuit containing variables a, b, c + * @return true if a counterexample is found (verification fails) + */ +bool verify_gt(smt_solver::Solver* solver, smt_circuit::UltraCircuit circuit); diff --git a/barretenberg/cpp/src/barretenberg/acir_formal_proofs/helpers.cpp b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/helpers.cpp new file mode 100644 index 000000000000..a3ab02b5f584 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/helpers.cpp @@ -0,0 +1,63 @@ +#include "barretenberg/smt_verification/circuit/ultra_circuit.hpp" +#include "barretenberg/smt_verification/solver/solver.hpp" +#include "barretenberg/smt_verification/util/smt_util.hpp" + +// used for base = 2; exp <= 8 so its okay +uint32_t pow_num(uint32_t base, uint32_t exp) +{ + uint32_t res = 1; + for (uint32_t i = 0; i < exp; i++) { + res *= base; + } + return res; +} + +// returns 2^v0 +smt_circuit::STerm pow2_8(smt_circuit::STerm v0, smt_solver::Solver* solver) +{ + uint32_t BIT_SIZE = 8; + auto one = smt_terms::BVConst("1", solver, 10); + auto two = smt_terms::BVConst("2", solver, 10); + smt_circuit::STerm res = smt_circuit::BVVar("res", solver); + res = one; + auto exp = v0; + for (uint32_t i = 1; i < BIT_SIZE + 1; i++) { + auto r2 = res * res; + auto mask = pow_num(2, BIT_SIZE - i); + // same thing as taking ith bit in little endian + auto b = (v0 & mask) >> (BIT_SIZE - i); + res = (r2 * two * b) + (1 - b) * r2; + } + return res; +} + +smt_circuit::STerm shl(smt_circuit::STerm v0, smt_circuit::STerm v1, smt_solver::Solver* solver) +{ + auto pow2_v1 = pow2_8(v1, solver); + return v0 * pow2_v1; +} + +smt_circuit::STerm shr(smt_circuit::STerm v0, smt_circuit::STerm v1, smt_solver::Solver* solver) +{ + auto pow2_v1 = pow2_8(v1, solver); + auto res = v0 / pow2_v1; + return res; +} + +smt_circuit::STerm shl64(smt_circuit::STerm v0, smt_circuit::STerm v1, smt_solver::Solver* solver) +{ + auto shifted = shl(v0, v1, solver); + // 2^64 - 1 + auto mask = smt_terms::BVConst("18446744073709551615", solver, 10); + auto res = shifted & mask; + return res; +} + +smt_circuit::STerm shl32(smt_circuit::STerm v0, smt_circuit::STerm v1, smt_solver::Solver* solver) +{ + auto shifted = shl(v0, v1, solver); + // 2^32 - 1 + auto mask = smt_terms::BVConst("4294967295", solver, 10); + auto res = shifted & mask; + return res; +} diff --git a/barretenberg/cpp/src/barretenberg/acir_formal_proofs/helpers.hpp b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/helpers.hpp new file mode 100644 index 000000000000..d1e6fe4ab45b --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/helpers.hpp @@ -0,0 +1,47 @@ +#include "barretenberg/smt_verification/circuit/ultra_circuit.hpp" +#include "barretenberg/smt_verification/solver/solver.hpp" +#include "barretenberg/smt_verification/terms/term.hpp" + +/** + * @brief Left shift operation with 64-bit truncation + * @param v0 Value to shift + * @param v1 Number of bits to shift (8-bit value) + * @param solver SMT solver instance + * @return Result of (v0 << v1) truncated to 64 bits + */ +smt_circuit::STerm shl64(smt_circuit::STerm v0, smt_circuit::STerm v1, smt_solver::Solver* solver); + +/** + * @brief Left shift operation with 32-bit truncation + * @param v0 Value to shift + * @param v1 Number of bits to shift (8-bit value) + * @param solver SMT solver instance + * @return Result of (v0 << v1) truncated to 32 bits + */ +smt_circuit::STerm shl32(smt_circuit::STerm v0, smt_circuit::STerm v1, smt_solver::Solver* solver); + +/** + * @brief Calculates power of 2 + * @param v0 Exponent (8-bit value) + * @param solver SMT solver instance + * @return 2^v0 + */ +smt_circuit::STerm pow2_8(smt_circuit::STerm v0, smt_solver::Solver* solver); + +/** + * @brief Right shift operation + * @param v0 Value to shift + * @param v1 Number of bits to shift (8-bit value) + * @param solver SMT solver instance + * @return Result of (v0 >> v1) + */ +smt_circuit::STerm shr(smt_circuit::STerm v0, smt_circuit::STerm v1, smt_solver::Solver* solver); + +/** + * @brief Left shift operation without truncation + * @param v0 Value to shift + * @param v1 Number of bits to shift (8-bit value) + * @param solver SMT solver instance + * @return Result of (v0 << v1) without truncation + */ +smt_circuit::STerm shl(smt_circuit::STerm v0, smt_circuit::STerm v1, smt_solver::Solver* solver); \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/acir_formal_proofs/helpers.test.cpp b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/helpers.test.cpp new file mode 100644 index 000000000000..9918dcb6947e --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/acir_formal_proofs/helpers.test.cpp @@ -0,0 +1,92 @@ +#include "helpers.hpp" +#include "barretenberg/common/test.hpp" +#include "barretenberg/smt_verification/circuit/ultra_circuit.hpp" +#include "barretenberg/smt_verification/solver/solver.hpp" +#include "barretenberg/smt_verification/util/smt_util.hpp" +#include "barretenberg/stdlib/primitives/uint/uint.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" + +using namespace bb; +using witness_ct = stdlib::witness_t; +using uint_ct = stdlib::uint32; + +using namespace smt_terms; + +TEST(helpers, shl) +{ + Solver s("30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001", default_solver_config, 16, 32); + + STerm x = BVVar("x", &s); + STerm y = BVVar("y", &s); + STerm z = shl(x, y, &s); + x == 5; + y == 1; + // z should be z == 10; + s.check(); + std::unordered_map terms({ { "x", x }, { "y", y }, { "z", z } }); + std::unordered_map vals = s.model(terms); + info("x = ", vals["x"]); + info("y = ", vals["y"]); + info("z = ", vals["z"]); + // z == 1010 in binary + EXPECT_TRUE(vals["z"] == "00000000000000000000000000001010"); +} + +TEST(helpers, shr) +{ + Solver s("30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001", default_solver_config, 16, 32); + + STerm x = BVVar("x", &s); + STerm y = BVVar("y", &s); + STerm z = shr(x, y, &s); + x == 5; + y == 1; + // z should be z == 2; + s.check(); + std::unordered_map terms({ { "x", x }, { "y", y }, { "z", z } }); + std::unordered_map vals = s.model(terms); + info("x = ", vals["x"]); + info("y = ", vals["y"]); + info("z = ", vals["z"]); + // z == 10 in binary + EXPECT_TRUE(vals["z"] == "00000000000000000000000000000010"); +} + +TEST(helpers, buggy_shr) +{ + // using smt solver i found that 1879048194 >> 16 == 0 + // its strange... + Solver s("30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001", default_solver_config, 16, 32); + + STerm x = BVVar("x", &s); + STerm y = BVVar("y", &s); + STerm z = shr(x, y, &s); + x == 1879048194; + y == 16; + // z should be z == 28672; + s.check(); + std::unordered_map terms({ { "x", x }, { "y", y }, { "z", z } }); + std::unordered_map vals = s.model(terms); + info("x = ", vals["x"]); + info("y = ", vals["y"]); + info("z = ", vals["z"]); + // z == 28672 in binary + EXPECT_TRUE(vals["z"] == "00000000000000000111000000000000"); +} + +TEST(helpers, pow2) +{ + Solver s("30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001", default_solver_config, 16, 32); + + STerm x = BVVar("x", &s); + STerm z = pow2_8(x, &s); + x == 11; + // z should be z == 2048; + s.check(); + std::unordered_map terms({ { "x", x }, { "z", z } }); + std::unordered_map vals = s.model(terms); + info("x = ", vals["x"]); + info("z = ", vals["z"]); + // z == 2048 in binary + EXPECT_TRUE(vals["z"] == "00000000000000000000100000000000"); +} \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index e4b65f266eca..cf6f7fefba12 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -228,10 +228,10 @@ void prove_tube(const std::string& output_path) // circuit // TODO(https://github.com/AztecProtocol/barretenberg/issues/1048): INSECURE - make this tube proof actually use // these public inputs by turning proof into witnesses and calling set_public on each witness - auto num_public_inputs = static_cast(static_cast(proof.mega_proof[1])); - num_public_inputs -= bb::PAIRING_POINT_ACCUMULATOR_SIZE; // don't add the agg object + auto num_inner_public_inputs = static_cast(static_cast(proof.mega_proof[1])); + num_inner_public_inputs -= bb::PAIRING_POINT_ACCUMULATOR_SIZE; // don't add the agg object - for (size_t i = 0; i < num_public_inputs; i++) { + for (size_t i = 0; i < num_inner_public_inputs; i++) { auto offset = bb::HONK_PROOF_PUBLIC_INPUT_OFFSET; builder->add_public_variable(proof.mega_proof[i + offset]); } @@ -278,13 +278,13 @@ void prove_tube(const std::string& output_path) Verifier tube_verifier(tube_verification_key, ipa_verification_key); // Break up the tube proof into the honk portion and the ipa portion - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1168): Add formula to flavor - const size_t HONK_PROOF_LENGTH = 469; + const size_t HONK_PROOF_LENGTH_WITHOUT_INNER_PUB_INPUTS = + UltraRollupFlavor::PROOF_LENGTH_WITHOUT_PUB_INPUTS + PAIRING_POINT_ACCUMULATOR_SIZE + IPA_CLAIM_SIZE; // The extra calculation is for the IPA proof length. - ASSERT(tube_proof.size() == HONK_PROOF_LENGTH + 1 + 4 * (CONST_ECCVM_LOG_N) + 2 + 2 + num_public_inputs); + ASSERT(tube_proof.size() == HONK_PROOF_LENGTH_WITHOUT_INNER_PUB_INPUTS + num_inner_public_inputs); // split out the ipa proof - const std::ptrdiff_t honk_proof_with_pub_inputs_length = - static_cast(HONK_PROOF_LENGTH + num_public_inputs); + const std::ptrdiff_t honk_proof_with_pub_inputs_length = static_cast( + HONK_PROOF_LENGTH_WITHOUT_INNER_PUB_INPUTS - IPA_PROOF_LENGTH + num_inner_public_inputs); auto ipa_proof = HonkProof(tube_proof.begin() + honk_proof_with_pub_inputs_length, tube_proof.end()); auto tube_honk_proof = HonkProof(tube_proof.begin(), tube_proof.end() + honk_proof_with_pub_inputs_length); bool verified = tube_verifier.verify_proof(tube_honk_proof, ipa_proof); @@ -895,15 +895,13 @@ template bool verify_honk(const std::string& proof_path, bool verified; if constexpr (HasIPAAccumulator) { // Break up the tube proof into the honk portion and the ipa portion - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1168): Add formula to flavor - const size_t HONK_PROOF_LENGTH = 469; - const size_t num_public_inputs = - static_cast(uint64_t(proof[1])) - PAIRING_POINT_ACCUMULATOR_SIZE - IPA_CLAIM_SIZE; + const size_t HONK_PROOF_LENGTH = Flavor::PROOF_LENGTH_WITHOUT_PUB_INPUTS - IPA_PROOF_LENGTH; + const size_t num_public_inputs = static_cast(uint64_t(proof[1])); // The extra calculation is for the IPA proof length. debug("proof size: ", proof.size()); debug("num public inputs: ", num_public_inputs); // TODO(https://github.com/AztecProtocol/barretenberg/issues/1182): Move to ProofSurgeon. - ASSERT(proof.size() == HONK_PROOF_LENGTH + 1 + 4 * (CONST_ECCVM_LOG_N) + 2 + 2 + num_public_inputs); + ASSERT(proof.size() == HONK_PROOF_LENGTH + IPA_PROOF_LENGTH + num_public_inputs); // split out the ipa proof const std::ptrdiff_t honk_proof_with_pub_inputs_length = static_cast(HONK_PROOF_LENGTH + num_public_inputs); diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.hpp index 68d5958a2b85..7e83b477ef7e 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/ipa/ipa.hpp @@ -6,9 +6,11 @@ #include "barretenberg/common/container.hpp" #include "barretenberg/common/thread.hpp" #include "barretenberg/common/throw_or_abort.hpp" +#include "barretenberg/constants.hpp" #include "barretenberg/ecc/scalar_multiplication/scalar_multiplication.hpp" #include "barretenberg/stdlib/hash/poseidon2/poseidon2.hpp" #include "barretenberg/stdlib/honk_verifier/ipa_accumulator.hpp" +#include "barretenberg/stdlib/primitives/circuit_builders/circuit_builders_fwd.hpp" #include "barretenberg/stdlib/transcript/transcript.hpp" #include "barretenberg/transcript/transcript.hpp" #include @@ -20,6 +22,8 @@ namespace bb { // clang-format off +constexpr size_t IPA_PROOF_LENGTH = 1 + 4 * CONST_ECCVM_LOG_N + 2 + 2; + /** * @brief IPA (inner product argument) commitment scheme class. * @@ -949,6 +953,32 @@ template class IPA { output_claim.opening_pair.evaluation.self_reduce(); return {output_claim, prover_transcript->proof_data}; } + + static std::pair, HonkProof> create_fake_ipa_claim_and_proof(UltraCircuitBuilder& builder) + requires Curve::is_stdlib_type { + using NativeCurve = curve::Grumpkin; + using Builder = typename Curve::Builder; + using Curve = stdlib::grumpkin; + auto ipa_transcript = std::make_shared(); + auto ipa_commitment_key = std::make_shared>(1 << CONST_ECCVM_LOG_N); + size_t n = 4; + auto poly = Polynomial(n); + for (size_t i = 0; i < n; i++) { + poly.at(i) = fq::random_element(); + } + fq x = fq::random_element(); + fq eval = poly.evaluate(x); + auto commitment = ipa_commitment_key->commit(poly); + const OpeningPair opening_pair = { x, eval }; + IPA::compute_opening_proof(ipa_commitment_key, { poly, opening_pair }, ipa_transcript); + + auto stdlib_comm = Curve::Group::from_witness(&builder, commitment); + auto stdlib_x = Curve::ScalarField::from_witness(&builder, x); + auto stdlib_eval = Curve::ScalarField::from_witness(&builder, eval); + OpeningClaim stdlib_opening_claim{ { stdlib_x, stdlib_eval }, stdlib_comm }; + + return {stdlib_opening_claim, ipa_transcript->export_proof()}; + } }; } // namespace bb \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/kzg/kzg.test.cpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/kzg/kzg.test.cpp index a50c0a8919af..dcf3b4fe3155 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/kzg/kzg.test.cpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/kzg/kzg.test.cpp @@ -309,6 +309,7 @@ TYPED_TEST(KZGTest, ShpleminiKzgWithShiftAndConcatenation) // Gemini verifier output: // - claim: d+1 commitments to Fold_{r}^(0), Fold_{-r}^(0), Fold^(l), d+1 evaluations a_0_pos, a_l, l = 0:d-1 + bool consistency_checked = true; const auto batch_opening_claim = ShpleminiVerifier::compute_batch_opening_claim(n, RefVector(unshifted_commitments), @@ -318,7 +319,9 @@ TYPED_TEST(KZGTest, ShpleminiKzgWithShiftAndConcatenation) mle_opening_point, this->vk()->get_g1_identity(), verifier_transcript, - {}, + /* repeated commitments= */ {}, + /* has zk = */ {}, + &consistency_checked, /* libra commitments = */ {}, /* libra evaluations = */ {}, to_vector_of_ref_vectors(concatenation_groups_commitments), diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplemini.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplemini.hpp index 1f594bbd3ebd..330759436618 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplemini.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplemini.hpp @@ -3,8 +3,10 @@ #include "barretenberg/commitment_schemes/commitment_key.hpp" #include "barretenberg/commitment_schemes/gemini/gemini_impl.hpp" #include "barretenberg/commitment_schemes/shplonk/shplonk.hpp" +#include "barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.hpp" #include "barretenberg/commitment_schemes/verification_key.hpp" #include "barretenberg/flavor/repeated_commitments_data.hpp" +#include "barretenberg/sumcheck/zk_sumcheck_data.hpp" #include "barretenberg/transcript/transcript.hpp" namespace bb { @@ -28,13 +30,12 @@ template class ShpleminiProver_ { std::span multilinear_challenge, const std::shared_ptr>& commitment_key, const std::shared_ptr& transcript, - const std::vector>& libra_univariates = {}, - const std::vector& libra_evaluations = {}, + const std::array& libra_polynomials = {}, RefSpan concatenated_polynomials = {}, const std::vector>& groups_to_be_concatenated = {}) { // While Shplemini is not templated on Flavor, we derive ZK flag this way - const bool has_zk = !libra_evaluations.empty(); + const bool has_zk = (libra_polynomials[0].size() > 0); std::vector opening_claims = GeminiProver::prove(circuit_size, f_polynomials, g_polynomials, @@ -46,15 +47,25 @@ template class ShpleminiProver_ { has_zk); // Create opening claims for Libra masking univariates std::vector libra_opening_claims; - size_t idx = 0; - for (auto [libra_univariate, libra_evaluation] : zip_view(libra_univariates, libra_evaluations)) { - OpeningClaim new_claim; - new_claim.polynomial = Polynomial(libra_univariate); - new_claim.opening_pair.challenge = multilinear_challenge[idx]; - new_claim.opening_pair.evaluation = libra_evaluation; - libra_opening_claims.push_back(new_claim); - idx++; + OpeningClaim new_claim; + + if (has_zk) { + static constexpr FF subgroup_generator = Curve::subgroup_generator; + const auto gemini_r = opening_claims[0].opening_pair.challenge; + + std::array libra_eval_labels = { + "Libra:concatenation_eval", "Libra:shifted_big_sum_eval", "Libra:big_sum_eval", "Libra:quotient_eval" + }; + const std::array evaluation_points = { gemini_r, gemini_r * subgroup_generator, gemini_r, gemini_r }; + for (size_t idx = 0; idx < 4; idx++) { + new_claim.polynomial = std::move(libra_polynomials[idx]); + new_claim.opening_pair.challenge = evaluation_points[idx]; + new_claim.opening_pair.evaluation = new_claim.polynomial.evaluate(evaluation_points[idx]); + transcript->send_to_verifier(libra_eval_labels[idx], new_claim.opening_pair.evaluation); + libra_opening_claims.push_back(new_claim); + } } + const OpeningClaim batched_claim = ShplonkProver::prove(commitment_key, opening_claims, transcript, libra_opening_claims); return batched_claim; @@ -134,13 +145,15 @@ template class ShpleminiVerifier_ { const Commitment& g1_identity, const std::shared_ptr& transcript, const RepeatedCommitmentsData& repeated_commitments = {}, - RefSpan libra_univariate_commitments = {}, - const std::vector& libra_univariate_evaluations = {}, + const bool has_zk = false, + bool* consistency_checked = nullptr, // TODO(https://github.com/AztecProtocol/barretenberg/issues/1191). + // Shplemini Refactoring: Remove bool pointer + const std::array& libra_commitments = {}, + const Fr& libra_univariate_evaluation = Fr{ 0 }, const std::vector>& concatenation_group_commitments = {}, RefSpan concatenated_evaluations = {}) { - // Extract log_circuit_size size_t log_circuit_size{ 0 }; if constexpr (Curve::is_stdlib_type) { @@ -152,7 +165,6 @@ template class ShpleminiVerifier_ { Fr batched_evaluation = Fr{ 0 }; // While Shplemini is not templated on Flavor, we derive ZK flag this way - const bool has_zk = !libra_univariate_evaluations.empty(); Commitment hiding_polynomial_commitment; if (has_zk) { hiding_polynomial_commitment = @@ -176,6 +188,14 @@ template class ShpleminiVerifier_ { const std::vector gemini_eval_challenge_powers = gemini::powers_of_evaluation_challenge(gemini_evaluation_challenge, CONST_PROOF_SIZE_LOG_N); + std::array libra_evaluations; + if (has_zk) { + libra_evaluations[0] = transcript->template receive_from_prover("Libra:concatenation_eval"); + libra_evaluations[1] = transcript->template receive_from_prover("Libra:shifted_big_sum_eval"); + libra_evaluations[2] = transcript->template receive_from_prover("Libra:big_sum_eval"); + libra_evaluations[3] = transcript->template receive_from_prover("Libra:quotient_eval"); + } + // Process Shplonk transcript data: // - Get Shplonk batching challenge const Fr shplonk_batching_challenge = transcript->template get_challenge("Shplonk:nu"); @@ -297,11 +317,14 @@ template class ShpleminiVerifier_ { if (has_zk) { add_zk_data(commitments, scalars, - libra_univariate_commitments, - libra_univariate_evaluations, - multivariate_challenge, + libra_commitments, + libra_evaluations, + gemini_evaluation_challenge, shplonk_batching_challenge, shplonk_evaluation_challenge); + + *consistency_checked = SmallSubgroupIPAVerifier::check_evaluations_consistency( + libra_evaluations, gemini_evaluation_challenge, multivariate_challenge, libra_univariate_evaluation); } return { commitments, scalars, shplonk_evaluation_challenge }; @@ -588,7 +611,7 @@ template class ShpleminiVerifier_ { * * @param commitments * @param scalars - * @param libra_univariate_commitments + * @param libra_commitments * @param libra_univariate_evaluations * @param multivariate_challenge * @param shplonk_batching_challenge @@ -596,9 +619,9 @@ template class ShpleminiVerifier_ { */ static void add_zk_data(std::vector& commitments, std::vector& scalars, - RefSpan libra_univariate_commitments, - const std::vector& libra_univariate_evaluations, - const std::vector& multivariate_challenge, + const std::array& libra_commitments, + const std::array& libra_evaluations, + const Fr& gemini_evaluation_challenge, const Fr& shplonk_batching_challenge, const Fr& shplonk_evaluation_challenge) @@ -611,32 +634,35 @@ template class ShpleminiVerifier_ { // need to keep track of the contribution to the constant term Fr& constant_term = scalars.back(); - // compute shplonk denominators and batch invert them - std::vector denominators; - size_t num_libra_univariates = libra_univariate_commitments.size(); - // compute Shplonk denominators and invert them - for (size_t idx = 0; idx < num_libra_univariates; idx++) { - if constexpr (Curve::is_stdlib_type) { - denominators.push_back(Fr(1) / (shplonk_evaluation_challenge - multivariate_challenge[idx])); - } else { - denominators.push_back(shplonk_evaluation_challenge - multivariate_challenge[idx]); - } - }; - if constexpr (!Curve::is_stdlib_type) { - Fr::batch_invert(denominators); - } // add Libra commitments to the vector of commitments; compute corresponding scalars and the correction to // the constant term - for (const auto [libra_univariate_commitment, denominator, libra_univariate_evaluation] : - zip_view(libra_univariate_commitments, denominators, libra_univariate_evaluations)) { - commitments.push_back(std::move(libra_univariate_commitment)); - Fr scaling_factor = denominator * shplonk_challenge_power; - scalars.push_back((-scaling_factor)); + for (size_t idx = 0; idx < libra_commitments.size(); idx++) { + commitments.push_back(libra_commitments[idx]); + } + + std::array denominators; + std::array batching_scalars; + // compute Shplonk denominators and invert them + denominators[0] = Fr(1) / (shplonk_evaluation_challenge - gemini_evaluation_challenge); + denominators[1] = + Fr(1) / (shplonk_evaluation_challenge - Fr(Curve::subgroup_generator) * gemini_evaluation_challenge); + denominators[2] = denominators[0]; + denominators[3] = denominators[0]; + + // compute the scalars to be multiplied against the commitments [libra_concatenated], [big_sum], [big_sum], and + // [libra_quotient] + for (size_t idx = 0; idx < libra_evaluations.size(); idx++) { + Fr scaling_factor = denominators[idx] * shplonk_challenge_power; + batching_scalars[idx] = -scaling_factor; shplonk_challenge_power *= shplonk_batching_challenge; - // update the constant term of the Shplonk batched claim - constant_term += scaling_factor * libra_univariate_evaluation; + constant_term += scaling_factor * libra_evaluations[idx]; } + + // to save a scalar mul, add the sum of the batching scalars corresponding to the big sum evaluations + scalars.push_back(batching_scalars[0]); + scalars.push_back(batching_scalars[1] + batching_scalars[2]); + scalars.push_back(batching_scalars[3]); } }; } // namespace bb \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplemini.test.cpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplemini.test.cpp index e3537c00a756..bfb9fd931694 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplemini.test.cpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplemini.test.cpp @@ -223,118 +223,4 @@ TYPED_TEST(ShpleminiTest, CorrectnessOfGeminiClaimBatching) EXPECT_EQ(shplemini_result, expected_result); } -/** - * @brief Libra masking univariates are used in sumcheck to prevent the leakage of witness data through the evaluations - * of round univariates. Here we test the opening of log_n Libra masking univariates batched with the opening of several - * prover polynomials and their shifts. - * - */ -TYPED_TEST(ShpleminiTest, ShpleminiWithMaskingLibraUnivariates) -{ - using ShpleminiProver = ShpleminiProver_; - using ShpleminiVerifier = ShpleminiVerifier_; - using KZG = KZG; - using IPA = IPA; - using Fr = typename TypeParam::ScalarField; - using Commitment = typename TypeParam::AffineElement; - using Polynomial = typename bb::Polynomial; - - const size_t n = 16; - const size_t log_n = 4; - // In practice, the length of Libra univariates is equal to FLAVOR::BATCHED_RELATION_PARTIAL_LENGTH - const size_t LIBRA_UNIVARIATE_LENGTH = 12; - - std::array interpolation_domain; - for (size_t idx = 0; idx < LIBRA_UNIVARIATE_LENGTH; idx++) { - interpolation_domain[idx] = Fr(idx); - } - // Generate multilinear polynomials, their commitments (genuine and mocked) and evaluations (genuine) at a - // random point. - auto mle_opening_point = this->random_evaluation_point(log_n); // sometimes denoted 'u' - auto poly1 = Polynomial::random(n); - auto poly2 = Polynomial::random(n, 1); - auto poly3 = Polynomial::random(n, 1); - auto poly4 = Polynomial::random(n); - - std::vector> libra_univariates; - std::vector libra_commitments; - std::vector libra_evaluations; - for (size_t idx = 0; idx < log_n; idx++) { - // generate random polynomial - Polynomial libra_polynomial = Polynomial::random(LIBRA_UNIVARIATE_LENGTH); - // create a univariate with the same coefficients (to store an array instead of a vector) - bb::Univariate libra_univariate; - for (size_t i = 0; i < LIBRA_UNIVARIATE_LENGTH; i++) { - libra_univariate.value_at(i) = libra_polynomial[i]; - } - libra_univariates.push_back(libra_univariate); - - // commit to libra polynomial and populate the vector of libra commitments - Commitment libra_commitment = this->commit(libra_polynomial); - libra_commitments.push_back(libra_commitment); - - // evaluate current libra univariate at the corresponding challenge and store the value in libra evaluations - libra_evaluations.push_back(libra_polynomial.evaluate(mle_opening_point[idx])); - } - - Commitment commitment1 = this->commit(poly1); - Commitment commitment2 = this->commit(poly2); - Commitment commitment3 = this->commit(poly3); - Commitment commitment4 = this->commit(poly4); - std::vector unshifted_commitments = { commitment1, commitment2, commitment3, commitment4 }; - std::vector shifted_commitments = { commitment2, commitment3 }; - auto eval1 = poly1.evaluate_mle(mle_opening_point); - auto eval2 = poly2.evaluate_mle(mle_opening_point); - auto eval3 = poly3.evaluate_mle(mle_opening_point); - auto eval4 = poly4.evaluate_mle(mle_opening_point); - auto eval2_shift = poly2.evaluate_mle(mle_opening_point, true); - auto eval3_shift = poly3.evaluate_mle(mle_opening_point, true); - - // Collect multilinear evaluations for input to prover - // std::vector multilinear_evaluations = { eval1, eval2, eval3, eval4, eval2_shift, eval3_shift }; - - auto prover_transcript = NativeTranscript::prover_init_empty(); - - // Run the full prover PCS protocol: - auto opening_claim = ShpleminiProver::prove(Fr{ n }, - RefArray{ poly1, poly2, poly3, poly4 }, - RefArray{ poly2, poly3 }, - mle_opening_point, - this->ck(), - prover_transcript, - libra_univariates, - libra_evaluations); - if constexpr (std::is_same_v) { - IPA::compute_opening_proof(this->ck(), opening_claim, prover_transcript); - } else { - KZG::compute_opening_proof(this->ck(), opening_claim, prover_transcript); - } - - // Run the full verifier PCS protocol with genuine opening claims (genuine commitment, genuine evaluation) - - auto verifier_transcript = NativeTranscript::verifier_init_empty(prover_transcript); - - // Gemini verifier output: - // - claim: d+1 commitments to Fold_{r}^(0), Fold_{-r}^(0), Fold^(l), d+1 evaluations a_0_pos, a_l, l = 0:d-1 - auto batch_opening_claim = ShpleminiVerifier::compute_batch_opening_claim(n, - RefVector(unshifted_commitments), - RefVector(shifted_commitments), - RefArray{ eval1, eval2, eval3, eval4 }, - RefArray{ eval2_shift, eval3_shift }, - mle_opening_point, - this->vk()->get_g1_identity(), - verifier_transcript, - {}, - RefVector(libra_commitments), - libra_evaluations); - - if constexpr (std::is_same_v) { - auto result = IPA::reduce_verify_batch_opening_claim(batch_opening_claim, this->vk(), verifier_transcript); - EXPECT_EQ(result, true); - } else { - const auto pairing_points = KZG::reduce_verify_batch_opening_claim(batch_opening_claim, verifier_transcript); - // Final pairing check: e([Q] - [Q_z] + z[W], [1]_2) = e([W], [x]_2) - EXPECT_EQ(this->vk()->pairing_check(pairing_points[0], pairing_points[1]), true); - } -} } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplonk.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplonk.hpp index 5e90d4a00bb7..65b0fca780d8 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplonk.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplonk.hpp @@ -45,6 +45,13 @@ template class ShplonkProver_ { { // Find n, the maximum size of all polynomials fⱼ(X) size_t max_poly_size{ 0 }; + + if (!libra_opening_claims.empty()) { + // Max size of the polynomials in Libra opening claims is Curve::SUBGROUP_SIZE*2 + 2; we round it up to the + // next power of 2 + const size_t log_subgroup_size = static_cast(numeric::get_msb(Curve::SUBGROUP_SIZE)); + max_poly_size = 1 << (log_subgroup_size + 1); + }; for (const auto& claim : opening_claims) { max_poly_size = std::max(max_poly_size, claim.polynomial.size()); } diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.hpp new file mode 100644 index 000000000000..39f9b27b6253 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.hpp @@ -0,0 +1,545 @@ +#pragma once + +#include "barretenberg/constants.hpp" +#include "barretenberg/ecc/curves/bn254/bn254.hpp" +#include "barretenberg/polynomials/polynomial.hpp" +#include "barretenberg/polynomials/univariate.hpp" +#include "barretenberg/sumcheck/zk_sumcheck_data.hpp" + +#include +#include + +namespace bb { + +/** + * @brief Small Subgroup IPA Prover for Zero-Knowledge Opening of Libra Polynomials. + * + * @details Implements a less general version of the protocol described in + * [Ariel's HackMD](https://hackmd.io/xYHn1qqvQjey1yJutcuXdg). This version is specialized for making + * commitments and openings of Libra polynomials zero-knowledge. + * + * ### Overview + * + * Let \f$ G \f$ be the masked concatenated Libra polynomial. Without masking, it is defined by concatenating Libra + * constant term and the monomial coefficients of the Libra univariates \f$ g_i \f$ in the Lagrange basis over \f$ H + * \f$. More explicitly, unmasked concatenated Libra polynomial is given by the following vector of coefficients: + * \f[ \big( \text{libra_constant_term}, g_{0,0}, \ldots, g_{0, + * \text{LIBRA_UNIVARIATES_LENGTH} - 1}, \ldots, g_{d-1, 0}, g_{d-1, \text{LIBRA_UNIVARIATES_LENGTH} - 1} \big) \f], + * where \f$ d = \text{log_circuit_size}\f$. + * It is masked by adding \f$ (r_0 + r_1 X) Z_{H}(X)\f$, where \f$ Z_H(X) \f$ is the vanishing polynomial for \f$ H \f$. + * + * This class enables the prover to: + * + * - Open the commitment to concatenated Libra polynomial with zero-knowledge while proving correctness of the claimed + * inner product. The concatenated polynomial is commited to during the construction of ZKSumcheckData structure. + * + * ### Inputs + * The prover receives: + * - **ZKSumcheckData:** Contains: + * - Monomial coefficients of the masked concatenated Libra polynomial \f$ G \f$. + * - Interpolation domain for a small subgroup \( H \subset \mathbb{F}^\ast \), where \(\mathbb{F} \) is the + * ScalarField of a given curve. + * - **Sumcheck challenges:** \( u_0, \ldots, u_{D-1} \), where \( D = \text{CONST_PROOF_SIZE_LOG_N} \). + * - **Claimed inner product:** \( s = \text{claimed\_ipa\_eval} \), defined as: + * \f[ + * s = \sum_{i=1}^{|H|} F(g^i) G(g^i), + * \f] + * where \( F(X) \) is the ``challenge`` polynomial constructed from the Sumcheck round challenges (see the formula + * below) and \( G(X) \) is the concatenated Libra polynomial. + * + * ### Prover's Construction + * 1. Define a polynomial \( A(X) \), called the **big sum polynomial**, which is analogous to the big product + * polynomial used to prove claims about \f$ \prod_{h\in H} f(h) \cdot g(h) \f$. It is uniquely defined by the + * following: + * - \( A(1) = 0 \), + * - \( A(g^i) = A(g^{i-1}) + F(g^{i-1}) G(g^{i-1}) \) for \( i = 1, \ldots, |H|-1 \). + * 2. Mask \( A(X) \) by adding \( Z_H(X) R(X) \), where \( R(X) \) is a random polynomial of degree 3. + * 3. Commit to \( A(X) \) and send the commitment to the verifier. + * + * ### Key Identity + * \( A(X) \) is honestly constructed, i.e. + * - \f$ A_0 = 0\f$, + * - \f$ A_{i} = A_{i-1} + F_{i-1} * G_{i-1}\f$ (Lagrange coefficients over \f$ H \f$) for \f$ i = 1,\ldots, |H|\f$ + * - \f$ A_{|H|} \f$ is equal to the claimed inner product \f$s\f$. + * if and only if the following identity holds: + * \f[ L_1(X) A(X) + (X - g^{-1}) (A(g \cdot X) - A(X) - + * F(X) G(X)) + L_{|H|}(X) (A(X) - s) = Z_H(X) Q(X), \f] where \( Q(X) \) is the quotient of the left-hand side by \( + * Z_H(X) \). The second summand is the translation of the second condition using the fact that the coefficients of \f$ + * A(gX) \f$ are given by a cyclic shift of the coefficients of \f$ A(X) \f$. + * + * The methods of this class allow the prover to compute \( A(X) \) and \( Q(X) \). + * + * After receiveing a random evaluation challenge \f$ r \f$ , the prover sends \f$ G(r), A(g\cdot r), A(r), Q(r) \f$ to + * the verifier. In our case, \f$ r \f$ is the Gemini evaluation challenge, and this part is taken care of by Shplemini. + */ +template class SmallSubgroupIPAProver { + using Curve = typename Flavor::Curve; + using FF = typename Curve::ScalarField; + // The size of a multiplicative subgroup in the ScalarField of a curve + static constexpr size_t SUBGROUP_SIZE = Curve::SUBGROUP_SIZE; + // Size of the polynomial to be divided by Z_H + static constexpr size_t BATCHED_POLYNOMIAL_LENGTH = 2 * SUBGROUP_SIZE + 2; + // Size of Q(X) + static constexpr size_t QUOTIENT_LENGTH = SUBGROUP_SIZE + 2; + // The length of a random polynomial to mask Prover's Sumcheck Univariates. In the case of BN254-based Flavors, we + // send the coefficients of the univariates, hence we choose these value to be the max sumcheck univariate length + // over Translator, Ultra, and Mega. In ECCVM, the Sumcheck prover will commit to its univariates, which reduces the + // required length from 23 to 3. + static constexpr size_t LIBRA_UNIVARIATES_LENGTH = (std::is_same_v) ? 9 : 3; + // Fixed generator of H + static constexpr FF subgroup_generator = Curve::subgroup_generator; + + // Interpolation domain {1, g, \ldots, g^{SUBGROUP_SIZE - 1}} used by ECCVM + std::array interpolation_domain; + // We use IFFT over BN254 scalar field + EvaluationDomain bn_evaluation_domain; + + // Monomial coefficients of the concatenated Libra masking polynomial extracted from ZKSumcheckData + Polynomial concatenated_polynomial; + // Lagrange coefficeints of the concatenated Libra masking polynomial = constant_term || g_0 || ... || g_{d-1} + Polynomial libra_concatenated_lagrange_form; + + // Claimed evaluation s = constant_term + g_0(u_0) + ... + g_{d-1}(u_{d-1}), where g_i is the i'th Libra masking + // univariate + FF claimed_evaluation; + + // The polynomial obtained by concatenated powers of sumcheck challenges + Polynomial challenge_polynomial; + Polynomial challenge_polynomial_lagrange; + + // Big sum polynomial A(X) + Polynomial big_sum_polynomial_unmasked; + Polynomial big_sum_polynomial; + std::array big_sum_lagrange_coeffs; + + // The RHS of the key identity, denoted C(X) in the HackMD + Polynomial batched_polynomial; + + // Quotient of the batched polynomial C(X) by the subgroup vanishing polynomial X^{|H|} - 1 + Polynomial batched_quotient; + + public: + SmallSubgroupIPAProver(ZKSumcheckData& zk_sumcheck_data, + const std::vector& multivariate_challenge, + const FF claimed_ipa_eval, + std::shared_ptr transcript, + std::shared_ptr commitment_key) + : interpolation_domain(zk_sumcheck_data.interpolation_domain) + , concatenated_polynomial(zk_sumcheck_data.libra_concatenated_monomial_form) + , libra_concatenated_lagrange_form(zk_sumcheck_data.libra_concatenated_lagrange_form) + , challenge_polynomial(SUBGROUP_SIZE) + , challenge_polynomial_lagrange(SUBGROUP_SIZE) + , big_sum_polynomial_unmasked(SUBGROUP_SIZE) + , big_sum_polynomial(SUBGROUP_SIZE + 3) // + 3 to account for masking + , batched_polynomial(BATCHED_POLYNOMIAL_LENGTH) + , batched_quotient(QUOTIENT_LENGTH) + + { + // Extract the evaluation domain computed by ZKSumcheckData + if constexpr (std::is_same_v) { + bn_evaluation_domain = std::move(zk_sumcheck_data.bn_evaluation_domain); + } + + // Construct the challenge polynomial in Lagrange basis, compute its monomial coefficients + compute_challenge_polynomial(multivariate_challenge); + + // Construct unmasked big sum polynomial in Lagrange basis, compute its monomial coefficients and mask it + compute_big_sum_polynomial(); + + // Send masked commitment [A + Z_H * R] to the verifier, where R is of degree 2 + transcript->template send_to_verifier("Libra:big_sum_commitment", commitment_key->commit(big_sum_polynomial)); + + // Compute C(X) + compute_batched_polynomial(claimed_ipa_eval); + + // Compute Q(X) + compute_batched_quotient(); + + // Send commitment [Q] to the verifier + if (commitment_key) { + transcript->template send_to_verifier("Libra:quotient_commitment", + commitment_key->commit(batched_quotient)); + } + } + + // Getter to pass the witnesses to ShpleminiProver. Big sum polynomial is evaluated at 2 points (and is small) + std::array, NUM_LIBRA_EVALUATIONS> get_witness_polynomials() const + { + return { concatenated_polynomial, big_sum_polynomial, big_sum_polynomial, batched_quotient }; + } + // Getters for test purposes + const Polynomial& get_batched_polynomial() const { return batched_polynomial; } + const Polynomial& get_challenge_polynomial() const { return challenge_polynomial; } + + /** + * @brief Computes the challenge polynomial F(X) based on the provided multivariate challenges. + * + * This method generates a polynomial in both Lagrange basis and monomial basis from Sumcheck's + * multivariate_challenge vector. The result is stored in `challenge_polynomial_lagrange` and + * `challenge_polynomial`. The former is re-used in the computation of the big sum polynomial A(X) + * + * ### Lagrange Basis + * The Lagrange basis polynomial is constructed as follows: + * - Initialize the first coefficient as `1`. + * - For each challenge index `idx_poly` in the `CONST_PROOF_SIZE_LOG_N` range, compute a sequence of coefficients + * recursively as powers of the corresponding multivariate challenge. + * - Store these coefficients in `coeffs_lagrange_basis`. + * More explicitly, + * \f$ F = (1 , 1 , u_0, \ldots, u_0^{LIBRA_UNIVARIATES_LENGTH-1}, \ldots, 1, u_{D-1}, \ldots, + * u_{D-1}^{LIBRA_UNVIARIATES_LENGTH-1} ) \f$ in the Lagrange basis over \f$ H \f$. + * + * ### Monomial Basis + * If the curve is not `BN254`, the monomial polynomial is constructed directly using un-optimized Lagrange + * interpolation. Otherwise, an IFFT is used to convert the Lagrange basis coefficients into monomial basis + * coefficients. + * + * @param multivariate_challenge A vector of field elements used to compute the challenge polynomial. + */ + void compute_challenge_polynomial(const std::vector& multivariate_challenge) + { + std::vector coeffs_lagrange_basis(SUBGROUP_SIZE); + coeffs_lagrange_basis[0] = FF(1); + + for (size_t challenge_idx = 0; challenge_idx < CONST_PROOF_SIZE_LOG_N; challenge_idx++) { + // We concatenate 1 with CONST_PROOF_SIZE_LOG_N Libra Univariates of length LIBRA_UNIVARIATES_LENGTH + const size_t poly_to_concatenate_start = 1 + LIBRA_UNIVARIATES_LENGTH * challenge_idx; + coeffs_lagrange_basis[poly_to_concatenate_start] = FF(1); + for (size_t idx = poly_to_concatenate_start + 1; idx < poly_to_concatenate_start + LIBRA_UNIVARIATES_LENGTH; + idx++) { + // Recursively compute the powers of the challenge + coeffs_lagrange_basis[idx] = coeffs_lagrange_basis[idx - 1] * multivariate_challenge[challenge_idx]; + } + } + + challenge_polynomial_lagrange = Polynomial(coeffs_lagrange_basis); + + // Compute monomial coefficients + if constexpr (!std::is_same_v) { + challenge_polynomial = Polynomial(interpolation_domain, coeffs_lagrange_basis, SUBGROUP_SIZE); + } else { + std::vector challenge_polynomial_ifft(SUBGROUP_SIZE); + polynomial_arithmetic::ifft( + coeffs_lagrange_basis.data(), challenge_polynomial_ifft.data(), bn_evaluation_domain); + challenge_polynomial = Polynomial(challenge_polynomial_ifft); + } + } + + /** + * @brief Computes the big sum polynomial A(X) + * + * #### Lagrange Basis + * - First, we recursively compute the coefficients of the unmasked big sum polynomial, i.e. we set the first + * coefficient to `0`. + * - For each i, the coefficient is updated as: + * \f$ \texttt{big_sum_lagrange_coeffs} (g^{i}) = + * \texttt{big_sum_lagrange_coeffs} (g^{i-1}) + + * \texttt{challenge_polynomial_lagrange[prev_idx]} (g^{i-1}) \cdot + * \texttt{libra_concatenated_lagrange_form[prev_idx]} (g^{i-1}) \f$ + * #### Masking Term + * - A random polynomial of degree 2 is generated and added to the Big Sum Polynomial. + * - The masking term is applied as \f$ Z_H(X) \cdot \texttt{masking_term} \f$, where \f$ Z_H(X) \f$ is the + * vanishing polynomial. + * + */ + void compute_big_sum_polynomial() + { + big_sum_lagrange_coeffs[0] = 0; + + // Compute the big sum coefficients recursively + for (size_t idx = 1; idx < SUBGROUP_SIZE; idx++) { + size_t prev_idx = idx - 1; + big_sum_lagrange_coeffs[idx] = + big_sum_lagrange_coeffs[prev_idx] + + challenge_polynomial_lagrange.at(prev_idx) * libra_concatenated_lagrange_form.at(prev_idx); + }; + + // Get the coefficients in the monomial basis + if constexpr (!std::is_same_v) { + big_sum_polynomial_unmasked = Polynomial(interpolation_domain, big_sum_lagrange_coeffs, SUBGROUP_SIZE); + } else { + std::vector big_sum_ifft(SUBGROUP_SIZE); + polynomial_arithmetic::ifft(big_sum_lagrange_coeffs.data(), big_sum_ifft.data(), bn_evaluation_domain); + big_sum_polynomial_unmasked = Polynomial(big_sum_ifft); + } + // Generate random masking_term of degree 2, add Z_H(X) * masking_term + bb::Univariate masking_term = bb::Univariate::get_random(); + big_sum_polynomial += big_sum_polynomial_unmasked; + + for (size_t idx = 0; idx < masking_term.size(); idx++) { + big_sum_polynomial.at(idx) -= masking_term.value_at(idx); + big_sum_polynomial.at(idx + SUBGROUP_SIZE) += masking_term.value_at(idx); + } + }; + + /** + * @brief Compute \f$ L_1(X) * A(X) + (X - 1/g) (A(gX) - A(X) - F(X) G(X)) + L_{|H|}(X)(A(X) - s) \f$, where \f$ g + * \f$ is the fixed generator of \f$ H \f$. + * + */ + void compute_batched_polynomial(const FF& claimed_evaluation) + { + // Compute shifted big sum polynomial A(gX) + Polynomial shifted_big_sum(SUBGROUP_SIZE + 3); + + for (size_t idx = 0; idx < SUBGROUP_SIZE + 3; idx++) { + shifted_big_sum.at(idx) = big_sum_polynomial.at(idx) * interpolation_domain[idx % SUBGROUP_SIZE]; + } + + const auto& [lagrange_first, lagrange_last] = + compute_lagrange_polynomials(interpolation_domain, bn_evaluation_domain); + + // Compute -F(X)*G(X), the negated product of challenge_polynomial and libra_concatenated_monomial_form + for (size_t i = 0; i < concatenated_polynomial.size(); ++i) { + for (size_t j = 0; j < challenge_polynomial.size(); ++j) { + batched_polynomial.at(i + j) -= concatenated_polynomial.at(i) * challenge_polynomial.at(j); + } + } + + // Compute - F(X) * G(X) + A(gX) - A(X) + for (size_t idx = 0; idx < shifted_big_sum.size(); idx++) { + batched_polynomial.at(idx) += shifted_big_sum.at(idx) - big_sum_polynomial.at(idx); + } + + // Mutiply - F(X) * G(X) + A(gX) - A(X) by X-g: + // 1. Multiply by X + for (size_t idx = batched_polynomial.size() - 1; idx > 0; idx--) { + batched_polynomial.at(idx) = batched_polynomial.at(idx - 1); + } + batched_polynomial.at(0) = FF(0); + // 2. Subtract 1/g(A(gX) - A(X) - F(X) * G(X)) + for (size_t idx = 0; idx < batched_polynomial.size() - 1; idx++) { + batched_polynomial.at(idx) -= batched_polynomial.at(idx + 1) * interpolation_domain[SUBGROUP_SIZE - 1]; + } + + // Add (L_1 + L_{|H|}) * A(X) to the result + for (size_t i = 0; i < big_sum_polynomial.size(); ++i) { + for (size_t j = 0; j < SUBGROUP_SIZE; ++j) { + batched_polynomial.at(i + j) += big_sum_polynomial.at(i) * (lagrange_first.at(j) + lagrange_last.at(j)); + } + } + // Subtract L_{|H|} * s + for (size_t idx = 0; idx < SUBGROUP_SIZE; idx++) { + batched_polynomial.at(idx) -= lagrange_last.at(idx) * claimed_evaluation; + } + } + /** + * @brief Compute monomial coefficients of the first and last Lagrange polynomials + * + * @param interpolation_domain + * @param bn_evaluation_domain + * @return std::array, 2> + */ + std::array, 2> static compute_lagrange_polynomials( + const std::array& interpolation_domain, const EvaluationDomain& bn_evaluation_domain) + { + // Compute the monomial coefficients of L_1 + std::array lagrange_coeffs; + lagrange_coeffs[0] = FF(1); + for (size_t idx = 1; idx < SUBGROUP_SIZE; idx++) { + lagrange_coeffs[idx] = FF(0); + } + + Polynomial lagrange_first_monomial(SUBGROUP_SIZE); + if constexpr (!std::is_same_v) { + lagrange_first_monomial = Polynomial(interpolation_domain, lagrange_coeffs, SUBGROUP_SIZE); + } else { + std::vector lagrange_first_ifft(SUBGROUP_SIZE); + polynomial_arithmetic::ifft(lagrange_coeffs.data(), lagrange_first_ifft.data(), bn_evaluation_domain); + lagrange_first_monomial = Polynomial(lagrange_first_ifft); + } + + // Compute the monomial coefficients of L_{|H|}, the last Lagrange polynomial + lagrange_coeffs[0] = FF(0); + lagrange_coeffs[SUBGROUP_SIZE - 1] = FF(1); + + Polynomial lagrange_last_monomial; + if constexpr (!std::is_same_v) { + lagrange_last_monomial = Polynomial(interpolation_domain, lagrange_coeffs, SUBGROUP_SIZE); + } else { + std::vector lagrange_last_ifft(SUBGROUP_SIZE); + polynomial_arithmetic::ifft(lagrange_coeffs.data(), lagrange_last_ifft.data(), bn_evaluation_domain); + lagrange_last_monomial = Polynomial(lagrange_last_ifft); + } + + return { lagrange_first_monomial, lagrange_last_monomial }; + } + /** @brief Efficiently compute the quotient of batched_polynomial by Z_H = X ^ { | H | } - 1 + */ + void compute_batched_quotient() + { + + auto remainder = batched_polynomial; + for (size_t idx = BATCHED_POLYNOMIAL_LENGTH - 1; idx >= SUBGROUP_SIZE; idx--) { + batched_quotient.at(idx - SUBGROUP_SIZE) = remainder.at(idx); + remainder.at(idx - SUBGROUP_SIZE) += remainder.at(idx); + } + } +}; + +/** + * @brief Verifier class for Small Subgroup IPA Prover. + * + * @details Checks the consistency of polynomial evaluations provided by the prover against + * the values derived from the sumcheck challenge and a random evaluation challenge. + */ +template class SmallSubgroupIPAVerifier { + using FF = typename Curve::ScalarField; + + static constexpr size_t SUBGROUP_SIZE = Curve::SUBGROUP_SIZE; + + static constexpr size_t LIBRA_UNIVARIATES_LENGTH = (std::is_same_v) ? 9 : 3; + + public: + /*! + * @brief Verifies the consistency of polynomial evaluations provided by the prover. + * + * @details + * Given a subgroup of \f$ \mathbb{F}^\ast \f$, its generator \f$ g\f$, this function checks whether the following + * equation holds: + * \f[ L_1(r) A(r) + (r - g^{-1}) \left( A(g*r) - A(r) - F(r) G(r) \right) + L_{|H|}(r) \left( A(r) - s + * \right) = T(r) Z_H(r) \f] Where the following are sent by the prover + * - \f$ A(r), A(g\cdot r) \f$ are the evaluation of the "big sum polynomial" + * - \f$ G(r) \f$ is the evaluation of the concatenation of the coefficients of the masking Libra polynomials + * + * - \f$ T(r) \f$ is the evaluation of the quotient of the left hand side above by the vanishing polynomial for + * \f$H\f$ + * and the following evaluations computed by the verifier + * - \f$ L_1 \f$ and \f$ L_{|H|} \f$ are the Lagrange polynomials corresponding to \f$ 1 \f$ and \f$ g^{-1} \f$. + * - \f$ F(r) \f$ is the evaluation of the polynomial obtained by concatenating powers of sumcheck round challenges + * - \f$ Z_H(r) \f$ is the vanishing polynomial \f$ X^{|H|} - 1\f$ evaluated at the challenge point. + * + * @param libra_evaluations A vector of polynomial evaluations containing: + * - \f$ G(r), A(g\cdot r), A(r), T(r) \f$. + * @param gemini_evaluation_challenge The challenge point \f$ r \f$ at which evaluations are verified. + * @param multilinear_challenge A vector of sumcheck round challenges. + * @param eval_claim The claimed inner proudct of the coefficients of \f$G\f$ and \f$F\f$. + * @return True if the consistency check passes, false otherwise. + */ + static bool check_evaluations_consistency(const std::array& libra_evaluations, + const FF& gemini_evaluation_challenge, + const std::vector& multilinear_challenge, + const FF& inner_product_eval_claim) + { + + const FF subgroup_generator_inverse = Curve::subgroup_generator_inverse; + + // Compute the evaluation of the vanishing polynomia Z_H(X) at X = gemini_evaluation_challenge + const FF vanishing_poly_eval = gemini_evaluation_challenge.pow(SUBGROUP_SIZE) - FF(1); + + // Construct the challenge polynomial from the sumcheck challenge, the verifier has to evaluate it on its own + const std::vector challenge_polynomial_lagrange = compute_challenge_polynomial(multilinear_challenge); + + // Compute the evaluations of the challenge polynomial, Lagrange first, and Lagrange last for the fixed small + // subgroup + auto [challenge_poly, lagrange_first, lagrange_last] = + compute_batched_barycentric_evaluations(challenge_polynomial_lagrange, + gemini_evaluation_challenge, + subgroup_generator_inverse, + vanishing_poly_eval); + + const FF& concatenated_at_r = libra_evaluations[0]; + const FF& big_sum_shifted_eval = libra_evaluations[1]; + const FF& big_sum_eval = libra_evaluations[2]; + const FF& quotient_eval = libra_evaluations[3]; + + // Compute the evaluation of + // L_1(X) * A(X) + (X - 1/g) (A(gX) - A(X) - F(X) G(X)) + L_{|H|}(X)(A(X) - s) - Z_H(X) * Q(X) + FF diff = lagrange_first * big_sum_eval; + diff += (gemini_evaluation_challenge - subgroup_generator_inverse) * + (big_sum_shifted_eval - big_sum_eval - concatenated_at_r * challenge_poly); + diff += lagrange_last * (big_sum_eval - inner_product_eval_claim) - vanishing_poly_eval * quotient_eval; + + if constexpr (Curve::is_stdlib_type) { + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1186). Insecure pattern. + return (diff.get_value() == FF(0).get_value()); + } else { + return (diff == FF(0)); + }; + } + + /** + * @brief Given the sumcheck multivariate challenge \f$ (u_0,\ldots, u_{D-1})\f$, where \f$ D = + * \text{CONST_PROOF_SIZE_LOG_N}\f$, the verifier has to construct and evaluate the polynomial whose + * coefficients are given by \f$ (1, u_0, u_0^2, u_1,\ldots, 1, u_{D-1}, u_{D-1}^2) \f$. We spend \f$ D \f$ + * multiplications to construct the coefficients. + * + * @param multivariate_challenge + * @return Polynomial + */ + static std::vector compute_challenge_polynomial(const std::vector& multivariate_challenge) + { + std::vector challenge_polynomial_lagrange(SUBGROUP_SIZE); + + challenge_polynomial_lagrange[0] = FF{ 1 }; + + // Populate the vector with the powers of the challenges + for (size_t idx_poly = 0; idx_poly < CONST_PROOF_SIZE_LOG_N; idx_poly++) { + size_t current_idx = 1 + LIBRA_UNIVARIATES_LENGTH * idx_poly; + challenge_polynomial_lagrange[current_idx] = FF(1); + for (size_t idx = 1; idx < LIBRA_UNIVARIATES_LENGTH; idx++) { + // Recursively compute the powers of the challenge + challenge_polynomial_lagrange[current_idx + idx] = + challenge_polynomial_lagrange[current_idx + idx - 1] * multivariate_challenge[idx_poly]; + } + } + return challenge_polynomial_lagrange; + } + + /** + * @brief Efficient batch evaluation of the challenge polynomial, Lagrange first, and Lagrange last + * + * @details It is a modification of \ref bb::polynomial_arithmetic::compute_barycentric_evaluation + * "compute_barycentric_evaluation" method that does not require EvaluationDomain object and outputs the barycentric + * evaluation of a polynomial along with the evaluations of the first and last Lagrange polynomials. The + * interpolation domain is given by \f$ (1, g, g^2, \ldots, g^{|H| -1 } )\f$ + * + * @param coeffs Coefficients of the polynomial to be evaluated, in our case it is the challenge polynomial + * @param z Evaluation point, we are using the Gemini evaluation challenge + * @param inverse_root_of_unity Inverse of the generator of the subgroup H + * @return std::array + */ + static std::array compute_batched_barycentric_evaluations(const std::vector& coeffs, + const FF& r, + const FF& inverse_root_of_unity, + const FF& vanishing_poly_eval) + { + std::array denominators; + FF one = FF{ 1 }; + FF numerator = vanishing_poly_eval; + + numerator *= one / FF(SUBGROUP_SIZE); // (r^n - 1) / n + + denominators[0] = r - one; + FF work_root = inverse_root_of_unity; // g^{-1} + // + // Compute the denominators of the Lagrange polynomials evaluated at r + for (size_t i = 1; i < SUBGROUP_SIZE; ++i) { + denominators[i] = work_root * r; + denominators[i] -= one; // r * g^{-i} - 1 + work_root *= inverse_root_of_unity; + } + + // Invert/Batch invert denominators + if constexpr (Curve::is_stdlib_type) { + for (FF& denominator : denominators) { + denominator = one / denominator; + } + } else { + FF::batch_invert(&denominators[0], SUBGROUP_SIZE); + } + std::array result; + + // Accumulate the evaluation of the polynomials given by `coeffs` vector + result[0] = FF{ 0 }; + for (const auto& [coeff, denominator] : zip_view(coeffs, denominators)) { + result[0] += coeff * denominator; // + coeffs_i * 1/(r * g^{-i} - 1) + } + + result[0] = result[0] * numerator; // The evaluation of the polynomials given by its evaluations over H + result[1] = denominators[0] * numerator; // Lagrange first evaluated at r + result[2] = denominators[SUBGROUP_SIZE - 1] * numerator; // Lagrange last evaluated at r + + return result; + } +}; +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/constants.hpp b/barretenberg/cpp/src/barretenberg/constants.hpp index 218761ae5506..a1cfae900adb 100644 --- a/barretenberg/cpp/src/barretenberg/constants.hpp +++ b/barretenberg/cpp/src/barretenberg/constants.hpp @@ -20,4 +20,7 @@ static constexpr uint32_t MAX_DATABUS_SIZE = 10000; // The number of entries in ProverPolynomials reserved for randomness intended to mask witness commitments, witness // evaluation at the sumcheck challenge, and, if necessary, the evaluation of the corresponding shift static constexpr uint32_t MASKING_OFFSET = 4; +// For ZK Flavors: the number of the commitments required by Libra and SmallSubgroupIPA. +static constexpr uint32_t NUM_LIBRA_COMMITMENTS = 3; +static constexpr uint32_t NUM_LIBRA_EVALUATIONS = 4; } // namespace bb \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp index 077ffd9de8c7..1789562498d8 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp @@ -439,31 +439,13 @@ HonkRecursionConstraintsOutput process_honk_recursion_constraints( } else if (nested_ipa_claims.size() > 2) { throw_or_abort("Too many nested IPA claims to accumulate"); } else { - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1184): Move to IPA class. if (honk_recursion == 2) { info("Proving with UltraRollupHonk but no IPA claims exist."); - // just create some fake IPA claim and proof - using NativeCurve = curve::Grumpkin; - using Curve = stdlib::grumpkin; - auto ipa_transcript = std::make_shared(); - auto ipa_commitment_key = std::make_shared>(1 << CONST_ECCVM_LOG_N); - size_t n = 4; - auto poly = Polynomial(n); - for (size_t i = 0; i < n; i++) { - poly.at(i) = fq::random_element(); - } - fq x = fq::random_element(); - fq eval = poly.evaluate(x); - auto commitment = ipa_commitment_key->commit(poly); - const OpeningPair opening_pair = { x, eval }; - IPA::compute_opening_proof(ipa_commitment_key, { poly, opening_pair }, ipa_transcript); - - auto stdlib_comm = Curve::Group::from_witness(&builder, commitment); - auto stdlib_x = Curve::ScalarField::from_witness(&builder, x); - auto stdlib_eval = Curve::ScalarField::from_witness(&builder, eval); - OpeningClaim stdlib_opening_claim{ { stdlib_x, stdlib_eval }, stdlib_comm }; + auto [stdlib_opening_claim, ipa_proof] = + IPA>::create_fake_ipa_claim_and_proof(builder); + output.ipa_claim = stdlib_opening_claim; - output.ipa_proof = ipa_transcript->export_proof(); + output.ipa_proof = ipa_proof; } } output.agg_obj_indices = current_aggregation_object; diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.cpp index e44428be1a4b..310f2a57531d 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.cpp @@ -42,18 +42,9 @@ void create_dummy_vkey_and_proof(Builder& builder, const std::vector& proof_fields) { // Set vkey->circuit_size correctly based on the proof size - size_t num_frs_comm = bb::field_conversion::calc_num_bn254_frs(); - size_t num_frs_fr = bb::field_conversion::calc_num_bn254_frs(); - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1168): Add formula to flavor - assert((proof_size - bb::HONK_PROOF_PUBLIC_INPUT_OFFSET - Flavor::NUM_WITNESS_ENTITIES * num_frs_comm - - Flavor::NUM_ALL_ENTITIES * num_frs_fr - num_frs_comm) % - (num_frs_comm + num_frs_fr * (Flavor::BATCHED_RELATION_PARTIAL_LENGTH + 1)) == - 0); + ASSERT(proof_size == Flavor::PROOF_LENGTH_WITHOUT_PUB_INPUTS); // Note: this computation should always result in log_circuit_size = CONST_PROOF_SIZE_LOG_N - auto log_circuit_size = - (proof_size - bb::HONK_PROOF_PUBLIC_INPUT_OFFSET - Flavor::NUM_WITNESS_ENTITIES * num_frs_comm - - Flavor::NUM_ALL_ENTITIES * num_frs_fr - num_frs_comm) / - (num_frs_comm + num_frs_fr * (Flavor::BATCHED_RELATION_PARTIAL_LENGTH + 1)); + auto log_circuit_size = CONST_PROOF_SIZE_LOG_N; // First key field is circuit size builder.assert_equal(builder.add_variable(1 << log_circuit_size), key_fields[0].witness_index); // Second key field is number of public inputs diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/honk_contract.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/honk_contract.hpp index c55919b4edc1..98c352bcb32b 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/honk_contract.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/honk_contract.hpp @@ -4,7 +4,7 @@ // Source code for the Ultrahonk Solidity verifier. // It's expected that the AcirComposer will inject a library which will load the verification key into memory. -const std::string HONK_CONTRACT_SOURCE = R"( +static const char HONK_CONTRACT_SOURCE[] = R"( pragma solidity ^0.8.27; type Fr is uint256; @@ -1398,20 +1398,31 @@ library RelationsLib { } } -// Errors -error PublicInputsLengthWrong(); -error SumcheckFailed(); -error ShpleminiFailed(); interface IVerifier { function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool); } -// Smart contract verifier of honk proofs -contract HonkVerifier is IVerifier -{ + +abstract contract BaseHonkVerifier is IVerifier { using FrLib for Fr; + uint256 immutable N; + uint256 immutable logN; + uint256 immutable numPublicInputs; + + constructor(uint256 _N, uint256 _logN, uint256 _numPublicInputs) { + N = _N; + logN = _logN; + numPublicInputs = _numPublicInputs; + } + + error PublicInputsLengthWrong(); + error SumcheckFailed(); + error ShpleminiFailed(); + + function loadVerificationKey() internal pure virtual returns (Honk.VerificationKey memory); + function verify(bytes calldata proof, bytes32[] calldata publicInputs) public view override returns (bool) { Honk.VerificationKey memory vk = loadVerificationKey(); Honk.Proof memory p = TranscriptLib.loadProof(proof); @@ -1437,10 +1448,6 @@ contract HonkVerifier is IVerifier return sumcheckVerified && shpleminiVerified; // Boolean condition not required - nice for vanity :) } - function loadVerificationKey() internal pure returns (Honk.VerificationKey memory) { - return HonkVerificationKey.loadVerificationKey(); - } - function computePublicInputDelta( bytes32[] memory publicInputs, Fr beta, @@ -1455,7 +1462,7 @@ contract HonkVerifier is IVerifier Fr denominatorAcc = gamma - (beta * FrLib.from(offset + 1)); { - for (uint256 i = 0; i < NUMBER_OF_PUBLIC_INPUTS; i++) { + for (uint256 i = 0; i < numPublicInputs; i++) { Fr pubInput = FrLib.fromBytes32(publicInputs[i]); numerator = numerator * (numeratorAcc + pubInput); @@ -1477,7 +1484,7 @@ contract HonkVerifier is IVerifier Fr powPartialEvaluation = Fr.wrap(1); // We perform sumcheck reductions over log n rounds ( the multivariate degree ) - for (uint256 round; round < LOG_N; ++round) { + for (uint256 round; round < logN; ++round) { Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory roundUnivariate = proof.sumcheckUnivariates[round]; bool valid = checkSum(roundUnivariate, roundTarget); if (!valid) revert SumcheckFailed(); @@ -1509,6 +1516,7 @@ contract HonkVerifier is IVerifier view returns (Fr targetSum) { + // TODO: inline Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory BARYCENTRIC_LAGRANGE_DENOMINATORS = [ Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffec51), Fr.wrap(0x00000000000000000000000000000000000000000000000000000000000002d0), @@ -1589,7 +1597,7 @@ contract HonkVerifier is IVerifier { ShpleminiIntermediates memory mem; // stack - // - Compute vector (r, r², ... , r²⁽ⁿ⁻¹⁾), where n = log_circuit_size, I think this should be CONST_PROOF_SIZE + // - Compute vector (r, r², ... , r²⁽ⁿ⁻¹⁾), where n = log_circuit_size Fr[CONST_PROOF_SIZE_LOG_N] memory powers_of_evaluation_challenge = computeSquares(tp.geminiR); // Arrays hold values that will be linearly combined for the gemini and shplonk batch openings @@ -1719,7 +1727,7 @@ contract HonkVerifier is IVerifier mem.batchingChallenge = tp.shplonkNu.sqr(); for (uint256 i; i < CONST_PROOF_SIZE_LOG_N - 1; ++i) { - bool dummy_round = i >= (LOG_N - 1); + bool dummy_round = i >= (logN - 1); Fr scalingFactor = Fr.wrap(0); if (!dummy_round) { @@ -1775,7 +1783,7 @@ contract HonkVerifier is IVerifier for (uint256 i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { Fr round_inverted_denominator = Fr.wrap(0); - if (i <= LOG_N + 1) { + if (i <= logN + 1) { round_inverted_denominator = (eval_challenge + eval_challenge_powers[i]).invert(); } inverse_vanishing_evals[i + 1] = round_inverted_denominator; @@ -1800,7 +1808,7 @@ contract HonkVerifier is IVerifier // Divide by the denominator batchedEvalRoundAcc = batchedEvalRoundAcc * (challengePower * (Fr.wrap(1) - u) + u).invert(); - bool is_dummy_round = (i > LOG_N); + bool is_dummy_round = (i > logN); if (!is_dummy_round) { batchedEvalAccumulator = batchedEvalRoundAcc; } @@ -1874,16 +1882,11 @@ contract HonkVerifier is IVerifier } } -// Conversion util - Duplicated as we cannot template LOG_N -function convertPoints(Honk.G1ProofPoint[LOG_N + 1] memory commitments) - pure - returns (Honk.G1Point[LOG_N + 1] memory converted) -{ - for (uint256 i; i < LOG_N + 1; ++i) { - converted[i] = convertProofPoint(commitments[i]); +contract HonkVerifier is BaseHonkVerifier(N, LOG_N, NUMBER_OF_PUBLIC_INPUTS) { + function loadVerificationKey() internal pure override returns (Honk.VerificationKey memory) { + return HonkVerificationKey.loadVerificationKey(); } } - )"; inline std::string get_honk_solidity_verifier(auto const& verification_key) diff --git a/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/bn254.hpp b/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/bn254.hpp index 8b2308d53b78..b8ea6f839c81 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/bn254.hpp +++ b/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/bn254.hpp @@ -23,5 +23,17 @@ class BN254 { // classes are instantiated with "native" curve types. Eventually, the verifier classes will be instantiated only // with stdlib types, and "native" verification will be acheived via a simulated builder. static constexpr bool is_stdlib_type = false; + + // Required by SmallSubgroupIPA argument. This constant needs to divide the size of the multiplicative subgroup of + // the ScalarField and satisfy SUBGROUP_SIZE > CONST_PROOF_SIZE_LOG_N * Flavor::BATCHED_RELATION_PARTIAL_LENGTH, for + // each BN254-Flavor, since in every round of Sumcheck, the prover sends Flavor::BATCHED_RELATION_PARTIAL_LENGTH + // elements to the verifier. + static constexpr size_t SUBGROUP_SIZE = 256; + // BN254's scalar field has a multiplicative subgroup of order 2^28. It is generated by 5. The generator below is + // 5^{2^{20}}. To avoid inversion in the recursive verifier, we also store the inverse of the chosen generator. + static constexpr ScalarField subgroup_generator = + ScalarField(uint256_t("0x07b0c561a6148404f086204a9f36ffb0617942546750f230c893619174a57a76")); + static constexpr ScalarField subgroup_generator_inverse = + ScalarField(uint256_t("0x204bd3277422fad364751ad938e2b5e6a54cf8c68712848a692c553d0329f5d6")); }; } // namespace bb::curve \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/ecc/curves/grumpkin/grumpkin.hpp b/barretenberg/cpp/src/barretenberg/ecc/curves/grumpkin/grumpkin.hpp index cbba4dc346c0..f195fbd7e53f 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/curves/grumpkin/grumpkin.hpp +++ b/barretenberg/cpp/src/barretenberg/ecc/curves/grumpkin/grumpkin.hpp @@ -54,5 +54,18 @@ class Grumpkin { // classes are instantiated with "native" curve types. Eventually, the verifier classes will be instantiated only // with stdlib types, and "native" verification will be acheived via a simulated builder. static constexpr bool is_stdlib_type = false; + + // Required by SmallSubgroupIPA argument. This constant needs to divide the size of the multiplicative subgroup of + // the ScalarField and satisfy SUBGROUP_SIZE > CONST_PROOF_SIZE_LOG_N * 3, since in every round of Sumcheck, the + // prover sends 3 elements to the verifier. + static constexpr size_t SUBGROUP_SIZE = 87; + // The generator below was derived by factoring r - 1 into primes, where r is the modulus of the Grumkin scalar + // field. A random field element was sampled and raised to the power (r - 1) / (3 * 29). We verified that the + // resulting element does not generate a smaller subgroup by further raising it to the powers of 3 and 29. To + // optimize the recursive verifier and avoid costly inversions, we also precompute and store its inverse. + static constexpr ScalarField subgroup_generator = + ScalarField(uint256_t("0x147c647c09fb639514909e9f0513f31ec1a523bf8a0880bc7c24fbc962a9586b")); + static constexpr ScalarField subgroup_generator_inverse = + ScalarField("0x0c68e27477b5e78cfab790bd3b59806fa871771f71ec7452cde5384f6e3a1988"); }; } // namespace bb::curve \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp index 9648f2aab23e..7afbebf92d63 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp @@ -955,11 +955,17 @@ class ECCVMFlavor { Commitment transcript_msm_count_at_transition_inverse_comm; Commitment z_perm_comm; Commitment lookup_inverses_comm; - std::vector libra_commitments; + Commitment libra_concatenation_commitment; FF libra_sum; std::vector> sumcheck_univariates; - std::vector libra_evaluations; + FF libra_claimed_evaluation; + Commitment libra_big_sum_commitment; + Commitment libra_quotient_commitment; std::array sumcheck_evaluations; + FF libra_concatenation_eval; + FF libra_shifted_big_sum_eval; + FF libra_big_sum_eval; + FF libra_quotient_eval; Commitment hiding_polynomial_commitment; FF hiding_polynomial_eval; std::vector gemini_fold_comms; @@ -1160,11 +1166,8 @@ class ECCVMFlavor { NativeTranscript::proof_data, num_frs_read); z_perm_comm = NativeTranscript::template deserialize_from_buffer(NativeTranscript::proof_data, num_frs_read); - size_t log_circuit_size = static_cast(numeric::get_msb(circuit_size)); - for (size_t i = 0; i < log_circuit_size; i++) { - libra_commitments.emplace_back(NativeTranscript::template deserialize_from_buffer( - NativeTranscript::proof_data, num_frs_read)); - }; + libra_concatenation_commitment = + NativeTranscript::template deserialize_from_buffer(proof_data, num_frs_read); libra_sum = NativeTranscript::template deserialize_from_buffer(NativeTranscript::proof_data, num_frs_read); for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { @@ -1173,12 +1176,13 @@ class ECCVMFlavor { NativeTranscript::proof_data, num_frs_read)); } - for (size_t i = 0; i < log_circuit_size; i++) { - libra_evaluations.emplace_back( - NativeTranscript::template deserialize_from_buffer(NativeTranscript::proof_data, num_frs_read)); - } + libra_claimed_evaluation = NativeTranscript::template deserialize_from_buffer(proof_data, num_frs_read); sumcheck_evaluations = NativeTranscript::template deserialize_from_buffer>( NativeTranscript::proof_data, num_frs_read); + libra_big_sum_commitment = + NativeTranscript::template deserialize_from_buffer(proof_data, num_frs_read); + libra_quotient_commitment = + NativeTranscript::template deserialize_from_buffer(proof_data, num_frs_read); hiding_polynomial_commitment = deserialize_from_buffer(NativeTranscript::proof_data, num_frs_read); hiding_polynomial_eval = deserialize_from_buffer(NativeTranscript::proof_data, num_frs_read); @@ -1189,6 +1193,10 @@ class ECCVMFlavor { for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { gemini_fold_evals.push_back(deserialize_from_buffer(proof_data, num_frs_read)); } + libra_concatenation_eval = deserialize_from_buffer(proof_data, num_frs_read); + libra_shifted_big_sum_eval = deserialize_from_buffer(proof_data, num_frs_read); + libra_big_sum_eval = deserialize_from_buffer(proof_data, num_frs_read); + libra_quotient_eval = deserialize_from_buffer(proof_data, num_frs_read); shplonk_q_comm = deserialize_from_buffer(proof_data, num_frs_read); translation_eval_op = @@ -1208,7 +1216,6 @@ class ECCVMFlavor { void serialize_full_transcript() { size_t old_proof_length = NativeTranscript::proof_data.size(); - size_t log_circuit_size = static_cast(numeric::get_msb(circuit_size)); NativeTranscript::proof_data.clear(); @@ -1313,9 +1320,7 @@ class ECCVMFlavor { NativeTranscript::template serialize_to_buffer(lookup_inverses_comm, NativeTranscript::proof_data); NativeTranscript::template serialize_to_buffer(z_perm_comm, NativeTranscript::proof_data); - for (size_t i = 0; i < log_circuit_size; ++i) { - NativeTranscript::template serialize_to_buffer(libra_commitments[i], NativeTranscript::proof_data); - } + NativeTranscript::template serialize_to_buffer(libra_concatenation_commitment, proof_data); NativeTranscript::template serialize_to_buffer(libra_sum, NativeTranscript::proof_data); @@ -1323,10 +1328,11 @@ class ECCVMFlavor { NativeTranscript::template serialize_to_buffer(sumcheck_univariates[i], NativeTranscript::proof_data); } - for (size_t i = 0; i < log_circuit_size; ++i) { - NativeTranscript::template serialize_to_buffer(libra_evaluations[i], NativeTranscript::proof_data); - } + NativeTranscript::template serialize_to_buffer(libra_claimed_evaluation, proof_data); + NativeTranscript::template serialize_to_buffer(sumcheck_evaluations, NativeTranscript::proof_data); + NativeTranscript::template serialize_to_buffer(libra_big_sum_commitment, proof_data); + NativeTranscript::template serialize_to_buffer(libra_quotient_commitment, proof_data); NativeTranscript::template serialize_to_buffer(hiding_polynomial_commitment, NativeTranscript::proof_data); NativeTranscript::template serialize_to_buffer(hiding_polynomial_eval, NativeTranscript::proof_data); for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N - 1; ++i) { @@ -1335,6 +1341,10 @@ class ECCVMFlavor { for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { NativeTranscript::template serialize_to_buffer(gemini_fold_evals[i], proof_data); } + NativeTranscript::template serialize_to_buffer(libra_concatenation_eval, proof_data); + NativeTranscript::template serialize_to_buffer(libra_shifted_big_sum_eval, proof_data); + NativeTranscript::template serialize_to_buffer(libra_big_sum_eval, proof_data); + NativeTranscript::template serialize_to_buffer(libra_quotient_eval, proof_data); NativeTranscript::template serialize_to_buffer(shplonk_q_comm, proof_data); NativeTranscript::template serialize_to_buffer(translation_eval_op, NativeTranscript::proof_data); diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp index 4a4cfe0a6d23..35f2e3191751 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp @@ -3,6 +3,7 @@ #include "barretenberg/commitment_schemes/commitment_key.hpp" #include "barretenberg/commitment_schemes/shplonk/shplemini.hpp" #include "barretenberg/commitment_schemes/shplonk/shplonk.hpp" +#include "barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.hpp" #include "barretenberg/common/ref_array.hpp" #include "barretenberg/honk/proof_system/logderivative_library.hpp" #include "barretenberg/plonk_honk_shared/library/grand_product_library.hpp" @@ -103,7 +104,7 @@ void ECCVMProver::execute_relation_check_rounds() gate_challenges[idx] = transcript->template get_challenge("Sumcheck:gate_challenge_" + std::to_string(idx)); } - zk_sumcheck_data = ZKSumcheckData(key->log_circuit_size, transcript, key->commitment_key); + zk_sumcheck_data = ZKData(key->log_circuit_size, transcript, key->commitment_key); sumcheck_output = sumcheck.prove(key->polynomials, relation_parameters, alpha, gate_challenges, zk_sumcheck_data); } @@ -121,6 +122,11 @@ void ECCVMProver::execute_pcs_rounds() using Shplonk = ShplonkProver_; using OpeningClaim = ProverOpeningClaim; + SmallSubgroupIPA small_subgroup_ipa_prover(zk_sumcheck_data, + sumcheck_output.challenge, + sumcheck_output.claimed_libra_evaluation, + transcript, + key->commitment_key); // Execute the Shplemini (Gemini + Shplonk) protocol to produce a univariate opening claim for the multilinear // evaluations produced by Sumcheck const OpeningClaim multivariate_to_univariate_opening_claim = @@ -130,8 +136,7 @@ void ECCVMProver::execute_pcs_rounds() sumcheck_output.challenge, key->commitment_key, transcript, - zk_sumcheck_data.libra_univariates_monomial, - sumcheck_output.claimed_libra_evaluations); + small_subgroup_ipa_prover.get_witness_polynomials()); // Get the challenge at which we evaluate all transcript polynomials as univariates evaluation_challenge_x = transcript->template get_challenge("Translation:evaluation_challenge_x"); diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp index dab9d015ab2b..4217b3818622 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp @@ -1,4 +1,5 @@ #pragma once +#include "barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.hpp" #include "barretenberg/eccvm/eccvm_flavor.hpp" #include "barretenberg/goblin/translation_evaluations.hpp" #include "barretenberg/honk/proof_system/types/proof.hpp" @@ -25,6 +26,8 @@ class ECCVMProver { using Transcript = typename Flavor::Transcript; using TranslationEvaluations = bb::TranslationEvaluations_; using CircuitBuilder = typename Flavor::CircuitBuilder; + using ZKData = ZKSumcheckData; + using SmallSubgroupIPA = SmallSubgroupIPAProver; explicit ECCVMProver(CircuitBuilder& builder, const std::shared_ptr& transcript = std::make_shared(), @@ -53,7 +56,7 @@ class ECCVMProver { std::shared_ptr key; CommitmentLabels commitment_labels; - ZKSumcheckData zk_sumcheck_data; + ZKData zk_sumcheck_data; Polynomial batched_quotient_Q; // batched quotient poly computed by Shplonk FF nu_challenge; // needed in both Shplonk rounds diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_transcript.test.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_transcript.test.cpp index c285c8112e89..8166d607aca2 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_transcript.test.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_transcript.test.cpp @@ -26,10 +26,9 @@ class ECCVMTranscriptTests : public ::testing::Test { * * @return TranscriptManifest */ - TranscriptManifest construct_eccvm_honk_manifest(size_t circuit_size) + TranscriptManifest construct_eccvm_honk_manifest() { TranscriptManifest manifest_expected; - auto log_n = numeric::get_msb(circuit_size); size_t MAX_PARTIAL_RELATION_LENGTH = Flavor::BATCHED_RELATION_PARTIAL_LENGTH; // Size of types is number of bb::frs needed to represent the type size_t frs_per_Fr = bb::field_conversion::calc_num_bn254_frs(); @@ -139,10 +138,7 @@ class ECCVMTranscriptTests : public ::testing::Test { } round++; - for (size_t i = 0; i < log_n; i++) { - std::string idx = std::to_string(i); - manifest_expected.add_entry(round, "Libra:commitment_" + idx, frs_per_G); - } + manifest_expected.add_entry(round, "Libra:concatenation_commitment", frs_per_G); manifest_expected.add_entry(round, "Libra:Sum", frs_per_Fr); // get the challenge for the ZK Sumcheck claim manifest_expected.add_challenge(round, "Libra:Challenge"); @@ -157,13 +153,10 @@ class ECCVMTranscriptTests : public ::testing::Test { round++; - for (size_t i = 0; i < log_n; i++) { - std::string idx = std::to_string(i); - manifest_expected.add_entry(round, "Libra:evaluation_" + idx, frs_per_Fr); - } - // manifest_expected.add_entry(round, "Libra:evaluation", log_n * frs_per_Fr); - + manifest_expected.add_entry(round, "Libra:claimed_evaluation", frs_per_Fr); manifest_expected.add_entry(round, "Sumcheck:evaluations", frs_per_evals); + manifest_expected.add_entry(round, "Libra:big_sum_commitment", frs_per_G); + manifest_expected.add_entry(round, "Libra:quotient_commitment", frs_per_G); manifest_expected.add_entry(round, "Gemini:masking_poly_comm", frs_per_G); manifest_expected.add_entry(round, "Gemini:masking_poly_eval", frs_per_Fr); @@ -180,7 +173,10 @@ class ECCVMTranscriptTests : public ::testing::Test { std::string idx = std::to_string(i); manifest_expected.add_entry(round, "Gemini:a_" + idx, frs_per_Fr); } - + manifest_expected.add_entry(round, "Libra:concatenation_eval", frs_per_Fr); + manifest_expected.add_entry(round, "Libra:shifted_big_sum_eval", frs_per_Fr); + manifest_expected.add_entry(round, "Libra:big_sum_eval", frs_per_Fr); + manifest_expected.add_entry(round, "Libra:quotient_eval", frs_per_Fr); manifest_expected.add_challenge(round, "Shplonk:nu"); round++; manifest_expected.add_entry(round, "Shplonk:Q", frs_per_G); @@ -285,7 +281,7 @@ TEST_F(ECCVMTranscriptTests, ProverManifestConsistency) ECCVMProof proof = prover.construct_proof(); // Check that the prover generated manifest agrees with the manifest hard coded in this suite - auto manifest_expected = this->construct_eccvm_honk_manifest(prover.key->circuit_size); + auto manifest_expected = this->construct_eccvm_honk_manifest(); auto prover_manifest = prover.transcript->get_manifest(); // Note: a manifest can be printed using manifest.print() diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp index f4a106833357..b2a260a8be7f 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp @@ -55,15 +55,16 @@ bool ECCVMVerifier::verify_proof(const ECCVMProof& proof) } // Receive commitments to Libra masking polynomials - std::vector libra_commitments; - for (size_t idx = 0; idx < log_circuit_size; idx++) { - Commitment libra_commitment = - transcript->receive_from_prover("Libra:commitment_" + std::to_string(idx)); - libra_commitments.push_back(libra_commitment); - } + std::array libra_commitments = {}; + + libra_commitments[0] = transcript->template receive_from_prover("Libra:concatenation_commitment"); - auto [multivariate_challenge, claimed_evaluations, libra_evaluations, sumcheck_verified] = + auto [multivariate_challenge, claimed_evaluations, libra_evaluation, sumcheck_verified] = sumcheck.verify(relation_parameters, alpha, gate_challenges); + + libra_commitments[1] = transcript->template receive_from_prover("Libra:big_sum_commitment"); + libra_commitments[2] = transcript->template receive_from_prover("Libra:quotient_commitment"); + // If Sumcheck did not verify, return false if (sumcheck_verified.has_value() && !sumcheck_verified.value()) { vinfo("eccvm sumcheck failed"); @@ -71,6 +72,7 @@ bool ECCVMVerifier::verify_proof(const ECCVMProof& proof) } // Compute the Shplemini accumulator consisting of the Shplonk evaluation and the commitments and scalars vector // produced by the unified protocol + bool consistency_checked = true; BatchOpeningClaim sumcheck_batch_opening_claims = Shplemini::compute_batch_opening_claim(circuit_size, commitments.get_unshifted(), @@ -81,8 +83,10 @@ bool ECCVMVerifier::verify_proof(const ECCVMProof& proof) key->pcs_verification_key->get_g1_identity(), transcript, Flavor::REPEATED_COMMITMENTS, - RefVector(libra_commitments), - libra_evaluations); + Flavor::HasZK, + &consistency_checked, + libra_commitments, + libra_evaluation); // Reduce the accumulator to a single opening claim const OpeningClaim multivariate_to_univariate_opening_claim = @@ -132,6 +136,6 @@ bool ECCVMVerifier::verify_proof(const ECCVMProof& proof) PCS::reduce_verify(key->pcs_verification_key, batch_opening_claim, ipa_transcript); vinfo("eccvm sumcheck verified?: ", sumcheck_verified.value()); vinfo("batch opening verified?: ", batched_opening_verified); - return sumcheck_verified.value() && batched_opening_verified; + return sumcheck_verified.value() && batched_opening_verified && consistency_checked; } } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/examples/join_split/join_split.test.cpp b/barretenberg/cpp/src/barretenberg/examples/join_split/join_split.test.cpp index 1c516b7ae9d5..b02d82664d2b 100644 --- a/barretenberg/cpp/src/barretenberg/examples/join_split/join_split.test.cpp +++ b/barretenberg/cpp/src/barretenberg/examples/join_split/join_split.test.cpp @@ -703,7 +703,7 @@ TEST_F(join_split_tests, test_0_input_notes_and_detect_circuit_change) // The below part detects any changes in the join-split circuit constexpr size_t DYADIC_CIRCUIT_SIZE = 1 << 16; - constexpr uint256_t CIRCUIT_HASH("0x9ffbbd2c3ebd45cba861d3da6f75e2f73c448cc5747c9e34b44d6bc8a90b4a9c"); + constexpr uint256_t CIRCUIT_HASH("0x48687216f00a81d2a0f64f0a10cce056fce2ad13c47f8329229eb3712d3f7566"); const uint256_t circuit_hash = circuit.hash_circuit(); // circuit is finalized now diff --git a/barretenberg/cpp/src/barretenberg/flavor/flavor.hpp b/barretenberg/cpp/src/barretenberg/flavor/flavor.hpp index eb3fa6a59948..9e6b652d0dc0 100644 --- a/barretenberg/cpp/src/barretenberg/flavor/flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/flavor/flavor.hpp @@ -322,10 +322,11 @@ template constexpr auto create_tuple_of_arrays_of_values() // Forward declare honk flavors namespace bb { class UltraFlavor; -class UltraFlavorWithZK; +class UltraZKFlavor; class UltraRollupFlavor; class ECCVMFlavor; class UltraKeccakFlavor; +class UltraKeccakZKFlavor; class MegaFlavor; class MegaZKFlavor; class TranslatorFlavor; @@ -361,10 +362,10 @@ template concept IsPlonkFlavor = IsAnyOf; template -concept IsUltraPlonkOrHonk = IsAnyOf; +concept IsUltraPlonkOrHonk = IsAnyOf; template -concept IsUltraFlavor = IsAnyOf; +concept IsUltraFlavor = IsAnyOf; template concept IsMegaFlavor = IsAnyOf concept IsECCVMRecursiveFlavor = IsAnyOf concept IsFoldingFlavor = IsAnyOf, diff --git a/barretenberg/cpp/src/barretenberg/smt_verification/circuit/ultra_circuit.cpp b/barretenberg/cpp/src/barretenberg/smt_verification/circuit/ultra_circuit.cpp index 01cb37285241..155a17c8c63b 100644 --- a/barretenberg/cpp/src/barretenberg/smt_verification/circuit/ultra_circuit.cpp +++ b/barretenberg/cpp/src/barretenberg/smt_verification/circuit/ultra_circuit.cpp @@ -95,7 +95,7 @@ size_t UltraCircuit::handle_arithmetic_relation(size_t cursor, size_t idx) return cursor + 1; } - STerm res = this->symbolic_vars[0]; + STerm res = this->symbolic_vars[this->variable_names_inverse["zero"]]; static const bb::fr neg_half = bb::fr(-2).invert(); if (!q_arith.is_zero()) { diff --git a/barretenberg/cpp/src/barretenberg/smt_verification/solver/solver.cpp b/barretenberg/cpp/src/barretenberg/smt_verification/solver/solver.cpp index 2219a05b0d44..943291bbf63c 100644 --- a/barretenberg/cpp/src/barretenberg/smt_verification/solver/solver.cpp +++ b/barretenberg/cpp/src/barretenberg/smt_verification/solver/solver.cpp @@ -172,6 +172,9 @@ std::string Solver::stringify_term(const cvc5::Term& term, bool parenthesis) child_parenthesis = false; break; case cvc5::Kind::LT: + case cvc5::Kind::BITVECTOR_UDIV: + op = " / "; + break; case cvc5::Kind::BITVECTOR_ULT: op = " < "; break; @@ -187,6 +190,9 @@ std::string Solver::stringify_term(const cvc5::Term& term, bool parenthesis) case cvc5::Kind::BITVECTOR_UGE: op = " >= "; break; + case cvc5::Kind::BITVECTOR_UREM: + op = " % "; + break; case cvc5::Kind::XOR: case cvc5::Kind::BITVECTOR_XOR: op = " ^ "; diff --git a/barretenberg/cpp/src/barretenberg/smt_verification/terms/term.cpp b/barretenberg/cpp/src/barretenberg/smt_verification/terms/term.cpp index 8d2bc5967037..cb55e0eed65c 100644 --- a/barretenberg/cpp/src/barretenberg/smt_verification/terms/term.cpp +++ b/barretenberg/cpp/src/barretenberg/smt_verification/terms/term.cpp @@ -1,4 +1,5 @@ #include "barretenberg/smt_verification/terms/term.hpp" +#include "term.hpp" namespace smt_terms { @@ -265,6 +266,16 @@ void STerm::operator>=(const bb::fr& other) const this->solver->assertFormula(ge); } +STerm STerm::operator%(const STerm& other) const +{ + if (!this->operations.contains(OpType::MOD)) { + info("MOD is not compatible with ", this->type); + return *this; + } + cvc5::Term res = solver->term_manager.mkTerm(this->operations.at(OpType::MOD), { this->term, other.term }); + return { res, this->solver, this->type }; +} + STerm STerm::operator^(const STerm& other) const { if (!this->operations.contains(OpType::XOR)) { @@ -313,6 +324,38 @@ STerm STerm::operator|(const STerm& other) const return { res, this->solver, this->type }; } +void STerm::operator<(const STerm& other) const +{ + STerm left = *this; + STerm right = other; + left = this->type == TermType::FFITerm && left.term.getNumChildren() > 1 ? left.mod() : left; + right = this->type == TermType::FFITerm && right.term.getNumChildren() > 1 ? right.mod() : right; + + cvc5::Term eq = this->solver->term_manager.mkTerm(this->operations.at(OpType::LT), { left.term, right.term }); + this->solver->assertFormula(eq); +} + +void STerm::operator>(const STerm& other) const +{ + STerm left = *this; + STerm right = other; + left = this->type == TermType::FFITerm && left.term.getNumChildren() > 1 ? left.mod() : left; + right = this->type == TermType::FFITerm && right.term.getNumChildren() > 1 ? right.mod() : right; + + cvc5::Term eq = this->solver->term_manager.mkTerm(this->operations.at(OpType::GT), { left.term, right.term }); + this->solver->assertFormula(eq); +} + +STerm STerm::operator~() const +{ + if (!this->operations.contains(OpType::NOT)) { + info("NOT is not compatible with ", this->type); + return *this; + } + cvc5::Term res = solver->term_manager.mkTerm(this->operations.at(OpType::NOT), { this->term }); + return { res, this->solver, this->type }; +} + void STerm::operator|=(const STerm& other) { if (!this->operations.contains(OpType::OR)) { diff --git a/barretenberg/cpp/src/barretenberg/smt_verification/terms/term.hpp b/barretenberg/cpp/src/barretenberg/smt_verification/terms/term.hpp index 35aff409795f..a9ac3b56a474 100644 --- a/barretenberg/cpp/src/barretenberg/smt_verification/terms/term.hpp +++ b/barretenberg/cpp/src/barretenberg/smt_verification/terms/term.hpp @@ -15,7 +15,7 @@ using namespace smt_solver; enum class TermType { FFTerm, FFITerm, BVTerm, ITerm }; std::ostream& operator<<(std::ostream& os, TermType type); -enum class OpType : int32_t { ADD, SUB, MUL, DIV, NEG, XOR, AND, OR, GT, GE, LT, LE, MOD, RSH, LSH, ROTR, ROTL }; +enum class OpType : int32_t { ADD, SUB, MUL, DIV, NEG, XOR, AND, OR, GT, GE, LT, LE, MOD, RSH, LSH, ROTR, ROTL, NOT }; /** * @brief precomputed map that contains allowed @@ -73,7 +73,9 @@ const std::unordered_map> typed { OpType::ROTL, cvc5::Kind::BITVECTOR_ROTATE_LEFT }, { OpType::ROTR, cvc5::Kind::BITVECTOR_ROTATE_RIGHT }, { OpType::MOD, cvc5::Kind::BITVECTOR_UREM }, - { OpType::DIV, cvc5::Kind::BITVECTOR_UDIV } } } + { OpType::DIV, cvc5::Kind::BITVECTOR_UDIV }, + { OpType::NOT, cvc5::Kind::BITVECTOR_NOT }, + } } }; /** @@ -160,6 +162,10 @@ class STerm { void operator&=(const STerm& other); STerm operator|(const STerm& other) const; void operator|=(const STerm& other); + void operator<(const STerm& other) const; + void operator>(const STerm& other) const; + STerm operator%(const STerm& other) const; + STerm operator~() const; STerm operator<<(const uint32_t& n) const; void operator<<=(const uint32_t& n); STerm operator>>(const uint32_t& n) const; diff --git a/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/eccvm_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/eccvm_recursive_verifier.cpp index a94224828541..248fa30346e1 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/eccvm_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/eccvm_recursive_verifier.cpp @@ -76,18 +76,19 @@ ECCVMRecursiveVerifier_::verify_proof(const ECCVMProof& proof) } // Receive commitments to Libra masking polynomials - std::vector libra_commitments; - for (size_t idx = 0; idx < log_circuit_size; idx++) { - Commitment libra_commitment = - transcript->template receive_from_prover("Libra:commitment_" + std::to_string(idx)); - libra_commitments.push_back(libra_commitment); - } + std::array libra_commitments = {}; + + libra_commitments[0] = transcript->template receive_from_prover("Libra:concatenation_commitment"); - auto [multivariate_challenge, claimed_evaluations, libra_evaluations, sumcheck_verified] = + auto [multivariate_challenge, claimed_evaluations, claimed_libra_evaluation, sumcheck_verified] = sumcheck.verify(relation_parameters, alpha, gate_challenges); + libra_commitments[1] = transcript->template receive_from_prover("Libra:big_sum_commitment"); + libra_commitments[2] = transcript->template receive_from_prover("Libra:quotient_commitment"); + // Compute the Shplemini accumulator consisting of the Shplonk evaluation and the commitments and scalars vector // produced by the unified protocol + bool consistency_checked = true; BatchOpeningClaim sumcheck_batch_opening_claims = Shplemini::compute_batch_opening_claim(circuit_size, commitments.get_unshifted(), @@ -98,8 +99,10 @@ ECCVMRecursiveVerifier_::verify_proof(const ECCVMProof& proof) key->pcs_verification_key->get_g1_identity(), transcript, Flavor::REPEATED_COMMITMENTS, - RefVector(libra_commitments), - libra_evaluations); + Flavor::HasZK, + &consistency_checked, + libra_commitments, + claimed_libra_evaluation); // Reduce the accumulator to a single opening claim const OpeningClaim multivariate_to_univariate_opening_claim = diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/decider_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/decider_recursive_verifier.cpp index 162a890e5ff4..3be2eb346a55 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/decider_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/decider_recursive_verifier.cpp @@ -39,7 +39,8 @@ std::array DeciderRecursiveVerifier_:: multivariate_challenge, Commitment::one(builder), transcript, - Flavor::REPEATED_COMMITMENTS); + Flavor::REPEATED_COMMITMENTS, + Flavor::HasZK); auto pairing_points = PCS::reduce_verify_batch_opening_claim(opening_claim, transcript); return pairing_points; diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/ultra_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/ultra_recursive_verifier.cpp index 3caa326d5dc1..058a12342909 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/ultra_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/ultra_recursive_verifier.cpp @@ -49,16 +49,14 @@ UltraRecursiveVerifier_::Output UltraRecursiveVerifier_::verify_ Output output; StdlibProof honk_proof; if constexpr (HasIPAAccumulator) { - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1168): Add formula to flavor - const size_t HONK_PROOF_LENGTH = 469; + const size_t HONK_PROOF_LENGTH = Flavor::NativeFlavor::PROOF_LENGTH_WITHOUT_PUB_INPUTS - IPA_PROOF_LENGTH; const size_t num_public_inputs = static_cast(proof[1].get_value()); // The extra calculation is for the IPA proof length. // TODO(https://github.com/AztecProtocol/barretenberg/issues/1182): Handle in ProofSurgeon. - ASSERT(proof.size() == HONK_PROOF_LENGTH + (1 + 4 * (CONST_ECCVM_LOG_N) + 2 + 2) + num_public_inputs - - (PAIRING_POINT_ACCUMULATOR_SIZE + IPA_CLAIM_SIZE)); + ASSERT(proof.size() == HONK_PROOF_LENGTH + IPA_PROOF_LENGTH + num_public_inputs); // split out the ipa proof - const std::ptrdiff_t honk_proof_with_pub_inputs_length = static_cast( - HONK_PROOF_LENGTH + num_public_inputs - (PAIRING_POINT_ACCUMULATOR_SIZE + IPA_CLAIM_SIZE)); + const std::ptrdiff_t honk_proof_with_pub_inputs_length = + static_cast(HONK_PROOF_LENGTH + num_public_inputs); output.ipa_proof = StdlibProof(proof.begin() + honk_proof_with_pub_inputs_length, proof.end()); honk_proof = StdlibProof(proof.begin(), proof.end() + honk_proof_with_pub_inputs_length); } else { @@ -108,24 +106,22 @@ UltraRecursiveVerifier_::Output UltraRecursiveVerifier_::verify_ auto sumcheck = Sumcheck(log_circuit_size, transcript); // Receive commitments to Libra masking polynomials - std::vector libra_commitments = {}; + std::array libra_commitments = {}; + FF libra_evaluation{ 0 }; if constexpr (Flavor::HasZK) { - for (size_t idx = 0; idx < log_circuit_size; idx++) { - Commitment libra_commitment = - transcript->template receive_from_prover("Libra:commitment_" + std::to_string(idx)); - libra_commitments.push_back(libra_commitment); - }; + libra_commitments[0] = transcript->template receive_from_prover("Libra:concatenation_commitment"); } SumcheckOutput sumcheck_output = sumcheck.verify(verification_key->relation_parameters, verification_key->alphas, gate_challenges); // For MegaZKFlavor: the sumcheck output contains claimed evaluations of the Libra polynomials - std::vector libra_evaluations = {}; if constexpr (Flavor::HasZK) { - libra_evaluations = std::move(sumcheck_output.claimed_libra_evaluations); + libra_evaluation = std::move(sumcheck_output.claimed_libra_evaluation); + libra_commitments[1] = transcript->template receive_from_prover("Libra:big_sum_commitment"); + libra_commitments[2] = transcript->template receive_from_prover("Libra:quotient_commitment"); } - // Execute Shplemini to produce a batch opening claim subsequently verified by a univariate PCS + bool consistency_checked = true; const BatchOpeningClaim opening_claim = Shplemini::compute_batch_opening_claim(key->circuit_size, commitments.get_unshifted(), @@ -136,8 +132,10 @@ UltraRecursiveVerifier_::Output UltraRecursiveVerifier_::verify_ Commitment::one(builder), transcript, Flavor::REPEATED_COMMITMENTS, - RefVector(libra_commitments), - libra_evaluations); + Flavor::HasZK, + &consistency_checked, + libra_commitments, + libra_evaluation); auto pairing_points = PCS::reduce_verify_batch_opening_claim(opening_claim, transcript); diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/ultra_recursive_verifier.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/ultra_recursive_verifier.test.cpp index bb7c633f120c..5e854a677a1a 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/ultra_recursive_verifier.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/ultra_recursive_verifier.test.cpp @@ -77,29 +77,11 @@ template class RecursiveVerifierTest : public testing PairingPointAccumulatorIndices agg_obj_indices = stdlib::recursion::init_default_agg_obj_indices(builder); builder.add_pairing_point_accumulator(agg_obj_indices); - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1184): Move to IPA class. if constexpr (HasIPAAccumulator) { - using NativeCurve = curve::Grumpkin; - using Curve = stdlib::grumpkin; - auto ipa_transcript = std::make_shared(); - auto ipa_commitment_key = std::make_shared>(1 << CONST_ECCVM_LOG_N); - size_t n = 4; - auto poly = Polynomial(n); - for (size_t i = 0; i < n; i++) { - poly.at(i) = fq::random_element(); - } - fq x = fq::random_element(); - fq eval = poly.evaluate(x); - auto commitment = ipa_commitment_key->commit(poly); - const OpeningPair opening_pair = { x, eval }; - IPA::compute_opening_proof(ipa_commitment_key, { poly, opening_pair }, ipa_transcript); - - auto stdlib_comm = Curve::Group::from_witness(&builder, commitment); - auto stdlib_x = Curve::ScalarField::from_witness(&builder, x); - auto stdlib_eval = Curve::ScalarField::from_witness(&builder, eval); - OpeningClaim stdlib_opening_claim{ { stdlib_x, stdlib_eval }, stdlib_comm }; + auto [stdlib_opening_claim, ipa_proof] = + IPA>::create_fake_ipa_claim_and_proof(builder); builder.add_ipa_claim(stdlib_opening_claim.get_witness_indices()); - builder.ipa_proof = ipa_transcript->export_proof(); + builder.ipa_proof = ipa_proof; } return builder; }; diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/curves/bn254.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/curves/bn254.hpp index c15d39d586d3..5feb3b6e872a 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/curves/bn254.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/curves/bn254.hpp @@ -41,5 +41,15 @@ template struct bn254 { using bigfr_ct = bigfield; using g1_bigfr_ct = element; + // Required by SmallSubgroupIPA argument + static constexpr size_t SUBGROUP_SIZE = 256; + // BN254's scalar field has a multiplicative subgroup of order 2^28. It is generated by 5. The generator below is + // 5^{2^{20}}. To avoid inversion in the recursive verifier, we also store ir + static constexpr bb::fr subgroup_generator = + bb::fr(uint256_t("0x07b0c561a6148404f086204a9f36ffb0617942546750f230c893619174a57a76")); + static constexpr bb::fr subgroup_generator_inverse = + bb::fr(uint256_t("0x204bd3277422fad364751ad938e2b5e6a54cf8c68712848a692c553d0329f5d6")); + }; // namespace bn254 + } // namespace bb::stdlib diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/curves/grumpkin.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/curves/grumpkin.hpp index 8f8555886e66..c8630ac735ea 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/curves/grumpkin.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/curves/grumpkin.hpp @@ -32,5 +32,16 @@ template struct grumpkin { using byte_array_ct = byte_array; using bool_ct = bool_t; using uint32_ct = stdlib::uint32; + + // Required by SmallSubgroupIPA argument + static constexpr size_t SUBGROUP_SIZE = 87; + // To find the generator below, we factored r - 1 into primes, where r is the modulus of the Grumkin scalar field, + // sampled a random field element, raised it to (r-1)/(3*29), and ensured that the resulting element is + // not generating a smaller subgroup. To avoid inversion in the recursive verifier, we also store its inverse. + static constexpr bb::fq subgroup_generator = + bb::fq("0x147c647c09fb639514909e9f0513f31ec1a523bf8a0880bc7c24fbc962a9586b"); + static constexpr bb::fq subgroup_generator_inverse = + bb::fq("0x0c68e27477b5e78cfab790bd3b59806fa871771f71ec7452cde5384f6e3a1988"); }; + } // namespace bb::stdlib \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/group/cycle_group.cpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/group/cycle_group.cpp index ab196d6d1c22..080e5a200877 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/group/cycle_group.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/group/cycle_group.cpp @@ -202,10 +202,11 @@ template cycle_group cycle_group::get_stand * @brief Evaluates a doubling. Does not use Ultra double gate * * @tparam Builder + * @param unused param is due to interface-compatibility with the UltraArithmetic version of `dbl` * @return cycle_group */ template -cycle_group cycle_group::dbl() const +cycle_group cycle_group::dbl([[maybe_unused]] const std::optional /*unused*/) const requires IsNotUltraArithmetic { auto modified_y = field_t::conditional_assign(is_point_at_infinity(), 1, y); @@ -219,35 +220,54 @@ cycle_group cycle_group::dbl() const * @brief Evaluates a doubling. Uses Ultra double gate * * @tparam Builder + * @param hint : value of output point witness, if known ahead of time (used to avoid modular inversions during witgen) * @return cycle_group */ template -cycle_group cycle_group::dbl() const +cycle_group cycle_group::dbl(const std::optional hint) const requires IsUltraArithmetic { // ensure we use a value of y that is not zero. (only happens if point at infinity) // this costs 0 gates if `is_infinity` is a circuit constant auto modified_y = field_t::conditional_assign(is_point_at_infinity(), 1, y).normalize(); - auto x1 = x.get_value(); - auto y1 = modified_y.get_value(); - - // N.B. the formula to derive the witness value for x3 mirrors the formula in elliptic_relation.hpp - // Specifically, we derive x^4 via the Short Weierstrass curve formula `y^2 = x^3 + b` - // i.e. x^4 = x * (y^2 - b) - // We must follow this pattern exactly to support the edge-case where the input is the point at infinity. - auto y_pow_2 = y1.sqr(); - auto x_pow_4 = x1 * (y_pow_2 - Group::curve_b); - auto lambda_squared = (x_pow_4 * 9) / (y_pow_2 * 4); - auto lambda = (x1 * x1 * 3) / (y1 + y1); - auto x3 = lambda_squared - x1 - x1; - auto y3 = lambda * (x1 - x3) - y1; - if (is_constant()) { - auto result = cycle_group(x3, y3, is_point_at_infinity().get_value()); - // We need to manually propagate the origin tag - result.set_origin_tag(get_origin_tag()); - return result; + + cycle_group result; + if (hint.has_value()) { + auto x3 = hint.value().x; + auto y3 = hint.value().y; + if (is_constant()) { + result = cycle_group(x3, y3, is_point_at_infinity()); + // We need to manually propagate the origin tag + result.set_origin_tag(get_origin_tag()); + + return result; + } + + result = cycle_group(witness_t(context, x3), witness_t(context, y3), is_point_at_infinity()); + } else { + auto x1 = x.get_value(); + auto y1 = modified_y.get_value(); + + // N.B. the formula to derive the witness value for x3 mirrors the formula in elliptic_relation.hpp + // Specifically, we derive x^4 via the Short Weierstrass curve formula `y^2 = x^3 + b` + // i.e. x^4 = x * (y^2 - b) + // We must follow this pattern exactly to support the edge-case where the input is the point at infinity. + auto y_pow_2 = y1.sqr(); + auto x_pow_4 = x1 * (y_pow_2 - Group::curve_b); + auto lambda_squared = (x_pow_4 * 9) / (y_pow_2 * 4); + auto lambda = (x1 * x1 * 3) / (y1 + y1); + auto x3 = lambda_squared - x1 - x1; + auto y3 = lambda * (x1 - x3) - y1; + if (is_constant()) { + auto result = cycle_group(x3, y3, is_point_at_infinity().get_value()); + // We need to manually propagate the origin tag + result.set_origin_tag(get_origin_tag()); + return result; + } + + result = cycle_group(witness_t(context, x3), witness_t(context, y3), is_point_at_infinity()); } - cycle_group result(witness_t(context, x3), witness_t(context, y3), is_point_at_infinity()); + context->create_ecc_dbl_gate(bb::ecc_dbl_gate_{ .x1 = x.get_witness_index(), .y1 = modified_y.normalize().get_witness_index(), @@ -272,7 +292,8 @@ cycle_group cycle_group::dbl() const * @return cycle_group */ template -cycle_group cycle_group::unconditional_add(const cycle_group& other) const +cycle_group cycle_group::unconditional_add( + const cycle_group& other, [[maybe_unused]] const std::optional /*unused*/) const requires IsNotUltraArithmetic { auto x_diff = other.x - x; @@ -294,10 +315,12 @@ cycle_group cycle_group::unconditional_add(const cycle_group& * * @tparam Builder * @param other + * @param hint : value of output point witness, if known ahead of time (used to avoid modular inversions during witgen) * @return cycle_group */ template -cycle_group cycle_group::unconditional_add(const cycle_group& other) const +cycle_group cycle_group::unconditional_add(const cycle_group& other, + const std::optional hint) const requires IsUltraArithmetic { auto context = get_context(other); @@ -308,28 +331,36 @@ cycle_group cycle_group::unconditional_add(const cycle_group& auto lhs = cycle_group::from_constant_witness(context, get_value()); // We need to manually propagate the origin tag lhs.set_origin_tag(get_origin_tag()); - return lhs.unconditional_add(other); + return lhs.unconditional_add(other, hint); } if (!lhs_constant && rhs_constant) { auto rhs = cycle_group::from_constant_witness(context, other.get_value()); // We need to manually propagate the origin tag rhs.set_origin_tag(other.get_origin_tag()); - return unconditional_add(rhs); + return unconditional_add(rhs, hint); } - - const auto p1 = get_value(); - const auto p2 = other.get_value(); - AffineElement p3(Element(p1) + Element(p2)); - if (lhs_constant && rhs_constant) { - auto result = cycle_group(p3); - // We need to manually propagate the origin tag - result.set_origin_tag(OriginTag(get_origin_tag(), other.get_origin_tag())); - return result; + cycle_group result; + if (hint.has_value()) { + auto x3 = hint.value().x; + auto y3 = hint.value().y; + if (lhs_constant && rhs_constant) { + return cycle_group(x3, y3, false); + } + result = cycle_group(witness_t(context, x3), witness_t(context, y3), false); + } else { + const auto p1 = get_value(); + const auto p2 = other.get_value(); + AffineElement p3(Element(p1) + Element(p2)); + if (lhs_constant && rhs_constant) { + auto result = cycle_group(p3); + // We need to manually propagate the origin tag + result.set_origin_tag(OriginTag(get_origin_tag(), other.get_origin_tag())); + return result; + } + field_t r_x(witness_t(context, p3.x)); + field_t r_y(witness_t(context, p3.y)); + result = cycle_group(r_x, r_y, false); } - field_t r_x(witness_t(context, p3.x)); - field_t r_y(witness_t(context, p3.y)); - cycle_group result(r_x, r_y, false); - bb::ecc_add_gate_ add_gate{ .x1 = x.get_witness_index(), .y1 = y.get_witness_index(), @@ -353,13 +384,15 @@ cycle_group cycle_group::unconditional_add(const cycle_group& * * @tparam Builder * @param other + * @param hint : value of output point witness, if known ahead of time (used to avoid modular inversions during witgen) * @return cycle_group */ template -cycle_group cycle_group::unconditional_subtract(const cycle_group& other) const +cycle_group cycle_group::unconditional_subtract(const cycle_group& other, + const std::optional hint) const { if constexpr (!IS_ULTRA) { - return unconditional_add(-other); + return unconditional_add(-other, hint); } else { auto context = get_context(other); @@ -370,7 +403,7 @@ cycle_group cycle_group::unconditional_subtract(const cycle_gr auto lhs = cycle_group::from_constant_witness(context, get_value()); // We need to manually propagate the origin tag lhs.set_origin_tag(get_origin_tag()); - return lhs.unconditional_subtract(other); + return lhs.unconditional_subtract(other, hint); } if (!lhs_constant && rhs_constant) { auto rhs = cycle_group::from_constant_witness(context, other.get_value()); @@ -378,19 +411,28 @@ cycle_group cycle_group::unconditional_subtract(const cycle_gr rhs.set_origin_tag(other.get_origin_tag()); return unconditional_subtract(rhs); } - auto p1 = get_value(); - auto p2 = other.get_value(); - AffineElement p3(Element(p1) - Element(p2)); - if (lhs_constant && rhs_constant) { - auto result = cycle_group(p3); - // We need to manually propagate the origin tag - result.set_origin_tag(OriginTag(get_origin_tag(), other.get_origin_tag())); - return result; + cycle_group result; + if (hint.has_value()) { + auto x3 = hint.value().x; + auto y3 = hint.value().y; + if (lhs_constant && rhs_constant) { + return cycle_group(x3, y3, false); + } + result = cycle_group(witness_t(context, x3), witness_t(context, y3), is_point_at_infinity()); + } else { + auto p1 = get_value(); + auto p2 = other.get_value(); + AffineElement p3(Element(p1) - Element(p2)); + if (lhs_constant && rhs_constant) { + auto result = cycle_group(p3); + // We need to manually propagate the origin tag + result.set_origin_tag(OriginTag(get_origin_tag(), other.get_origin_tag())); + return result; + } + field_t r_x(witness_t(context, p3.x)); + field_t r_y(witness_t(context, p3.y)); + result = cycle_group(r_x, r_y, false); } - field_t r_x(witness_t(context, p3.x)); - field_t r_y(witness_t(context, p3.y)); - cycle_group result(r_x, r_y, false); - bb::ecc_add_gate_ add_gate{ .x1 = x.get_witness_index(), .y1 = y.get_witness_index(), @@ -418,14 +460,16 @@ cycle_group cycle_group::unconditional_subtract(const cycle_gr * * @tparam Builder * @param other + * @param hint : value of output point witness, if known ahead of time (used to avoid modular inversions during witgen) * @return cycle_group */ template -cycle_group cycle_group::checked_unconditional_add(const cycle_group& other) const +cycle_group cycle_group::checked_unconditional_add(const cycle_group& other, + const std::optional hint) const { field_t x_delta = x - other.x; x_delta.assert_is_not_zero("cycle_group::checked_unconditional_add, x-coordinate collision"); - return unconditional_add(other); + return unconditional_add(other, hint); } /** @@ -438,14 +482,16 @@ cycle_group cycle_group::checked_unconditional_add(const cycle * * @tparam Builder * @param other + * @param hint : value of output point witness, if known ahead of time (used to avoid modular inversions during witgen) * @return cycle_group */ template -cycle_group cycle_group::checked_unconditional_subtract(const cycle_group& other) const +cycle_group cycle_group::checked_unconditional_subtract(const cycle_group& other, + const std::optional hint) const { field_t x_delta = x - other.x; x_delta.assert_is_not_zero("cycle_group::checked_unconditional_subtract, x-coordinate collision"); - return unconditional_subtract(other); + return unconditional_subtract(other, hint); } /** @@ -901,7 +947,13 @@ cycle_group::straus_scalar_slice::straus_scalar_slice(Builder* context, // convert an input cycle_scalar object into a vector of slices, each containing `table_bits` bits. // this also performs an implicit range check on the input slices const auto slice_scalar = [&](const field_t& scalar, const size_t num_bits) { - std::vector result; + // we record the scalar slices both as field_t circuit elements and u64 values + // (u64 values are used to index arrays and we don't want to repeatedly cast a stdlib value to a numeric + // primitive as this gets expensive when repeated enough times) + std::pair, std::vector> result; + result.first.reserve(static_cast(1ULL) << table_bits); + result.second.reserve(static_cast(1ULL) << table_bits); + if (num_bits == 0) { return result; } @@ -911,12 +963,22 @@ cycle_group::straus_scalar_slice::straus_scalar_slice(Builder* context, uint256_t raw_value = scalar.get_value(); for (size_t i = 0; i < num_slices; ++i) { uint64_t slice_v = static_cast(raw_value.data[0]) & table_mask; - result.push_back(field_t(slice_v)); + result.first.push_back(field_t(slice_v)); + result.second.push_back(slice_v); raw_value = raw_value >> table_bits; } return result; } + uint256_t raw_value = scalar.get_value(); + const uint64_t table_mask = (1ULL << table_bits) - 1ULL; + const size_t num_slices = (num_bits + table_bits - 1) / table_bits; + for (size_t i = 0; i < num_slices; ++i) { + uint64_t slice_v = static_cast(raw_value.data[0]) & table_mask; + result.second.push_back(slice_v); + raw_value = raw_value >> table_bits; + } + if constexpr (IS_ULTRA) { const auto slice_indices = context->decompose_into_default_range(scalar.normalize().get_witness_index(), @@ -924,26 +986,22 @@ cycle_group::straus_scalar_slice::straus_scalar_slice(Builder* context, table_bits, "straus_scalar_slice decompose_into_default_range"); for (auto& idx : slice_indices) { - result.emplace_back(field_t::from_witness_index(context, idx)); + result.first.emplace_back(field_t::from_witness_index(context, idx)); } } else { - uint256_t raw_value = scalar.get_value(); - const uint64_t table_mask = (1ULL << table_bits) - 1ULL; - const size_t num_slices = (num_bits + table_bits - 1) / table_bits; for (size_t i = 0; i < num_slices; ++i) { - uint64_t slice_v = static_cast(raw_value.data[0]) & table_mask; + uint64_t slice_v = result.second[i]; field_t slice(witness_t(context, slice_v)); context->create_range_constraint( slice.get_witness_index(), table_bits, "straus_scalar_slice create_range_constraint"); - result.emplace_back(slice); - raw_value = raw_value >> table_bits; + result.first.push_back(slice); } std::vector linear_elements; FF scaling_factor = 1; for (size_t i = 0; i < num_slices; ++i) { - linear_elements.emplace_back(result[i] * scaling_factor); + linear_elements.emplace_back(result.first[i] * scaling_factor); scaling_factor += scaling_factor; } field_t::accumulate(linear_elements).assert_equal(scalar); @@ -956,8 +1014,10 @@ cycle_group::straus_scalar_slice::straus_scalar_slice(Builder* context, auto hi_slices = slice_scalar(scalar.hi, hi_bits); auto lo_slices = slice_scalar(scalar.lo, lo_bits); - std::copy(lo_slices.begin(), lo_slices.end(), std::back_inserter(slices)); - std::copy(hi_slices.begin(), hi_slices.end(), std::back_inserter(slices)); + std::copy(lo_slices.first.begin(), lo_slices.first.end(), std::back_inserter(slices)); + std::copy(hi_slices.first.begin(), hi_slices.first.end(), std::back_inserter(slices)); + std::copy(lo_slices.second.begin(), lo_slices.second.end(), std::back_inserter(slices_native)); + std::copy(hi_slices.second.begin(), hi_slices.second.end(), std::back_inserter(slices_native)); const auto tag = scalar.get_origin_tag(); for (auto& element : slices) { // All slices need to have the same origin tag @@ -983,6 +1043,35 @@ std::optional> cycle_group::straus_scalar_slice::read( return slices[index]; } +/** + * @brief Compute the output points generated when computing the Straus lookup table + * @details When performing an MSM, we first compute all the witness values as Element types (with a Z-coordinate), + * and then we batch-convert the points into affine representation `AffineElement` + * This avoids the need to compute a modular inversion for every group operation, + * which dramatically cuts witness generation times + * + * @tparam Builder + * @param base_point + * @param offset_generator + * @param table_bits + * @return std::vector::Element> + */ +template +std::vector::Element> cycle_group< + Builder>::straus_lookup_table::compute_straus_lookup_table_hints(const Element& base_point, + const Element& offset_generator, + size_t table_bits) +{ + const size_t table_size = 1UL << table_bits; + Element base = base_point.is_point_at_infinity() ? Group::one : base_point; + std::vector hints; + hints.emplace_back(offset_generator); + for (size_t i = 1; i < table_size; ++i) { + hints.emplace_back(hints[i - 1] + base); + } + return hints; +} + /** * @brief Construct a new cycle group::straus lookup table::straus lookup table object * @@ -1001,7 +1090,8 @@ template cycle_group::straus_lookup_table::straus_lookup_table(Builder* context, const cycle_group& base_point, const cycle_group& offset_generator, - size_t table_bits) + size_t table_bits, + std::optional> hints) : _table_bits(table_bits) , _context(context) , tag(OriginTag(base_point.get_origin_tag(), offset_generator.get_origin_tag())) @@ -1022,11 +1112,41 @@ cycle_group::straus_lookup_table::straus_lookup_table(Builder* context, field_t modded_x = field_t::conditional_assign(base_point.is_point_at_infinity(), fallback_point.x, base_point.x); field_t modded_y = field_t::conditional_assign(base_point.is_point_at_infinity(), fallback_point.y, base_point.y); cycle_group modded_base_point(modded_x, modded_y, false); - for (size_t i = 1; i < table_size; ++i) { - auto add_output = point_table[i - 1].checked_unconditional_add(modded_base_point); - field_t x = field_t::conditional_assign(base_point.is_point_at_infinity(), offset_generator.x, add_output.x); - field_t y = field_t::conditional_assign(base_point.is_point_at_infinity(), offset_generator.y, add_output.y); - point_table[i] = cycle_group(x, y, false); + + // if the input point is constant, it is cheaper to fix the point as a witness and then derive the table, than it is + // to derive the table and fix its witnesses to be constant! (due to group additions = 1 gate, and fixing x/y coords + // to be constant = 2 gates) + if (modded_base_point.is_constant() && !base_point.is_point_at_infinity().get_value()) { + modded_base_point = cycle_group::from_constant_witness(_context, modded_base_point.get_value()); + point_table[0] = cycle_group::from_constant_witness(_context, offset_generator.get_value()); + for (size_t i = 1; i < table_size; ++i) { + std::optional hint = + hints.has_value() ? std::optional(hints.value()[i - 1]) : std::nullopt; + point_table[i] = point_table[i - 1].unconditional_add(modded_base_point, hint); + } + } else { + std::vector> x_coordinate_checks; + // ensure all of the ecc add gates are lined up so that we can pay 1 gate per add and not 2 + for (size_t i = 1; i < table_size; ++i) { + std::optional hint = + hints.has_value() ? std::optional(hints.value()[i - 1]) : std::nullopt; + x_coordinate_checks.emplace_back(point_table[i - 1].x, modded_base_point.x); + point_table[i] = point_table[i - 1].unconditional_add(modded_base_point, hint); + } + + // batch the x-coordinate checks together + // because `assert_is_not_zero` witness generation needs a modular inversion (expensive) + field_t coordinate_check_product = 1; + for (auto& [x1, x2] : x_coordinate_checks) { + auto x_diff = x2 - x1; + coordinate_check_product *= x_diff; + } + coordinate_check_product.assert_is_not_zero("straus_lookup_table x-coordinate collision"); + + for (size_t i = 1; i < table_size; ++i) { + point_table[i] = + cycle_group::conditional_assign(base_point.is_point_at_infinity(), offset_generator, point_table[i]); + } } if constexpr (IS_ULTRA) { rom_id = context->create_ROM_array(table_size); @@ -1137,16 +1257,78 @@ typename cycle_group::batch_mul_internal_output cycle_group::_ const size_t num_points = scalars.size(); std::vector scalar_slices; + + /** + * Compute the witness values of the batch_mul algorithm natively, as Element types with a Z-coordinate. + * We then batch-convert to AffineElement types, and feed these points as "hints" into the cycle_group methods. + * This avoids the need to compute modular inversions for every group operation, which dramatically reduces witness + * generation times + */ + std::vector operation_transcript; + std::vector> native_straus_tables; + Element offset_generator_accumulator = offset_generators[0]; + { + for (size_t i = 0; i < num_points; ++i) { + std::vector native_straus_table; + native_straus_table.emplace_back(offset_generators[i + 1]); + size_t table_size = 1ULL << TABLE_BITS; + for (size_t j = 1; j < table_size; ++j) { + native_straus_table.emplace_back(native_straus_table[j - 1] + base_points[i].get_value()); + } + native_straus_tables.emplace_back(native_straus_table); + } + for (size_t i = 0; i < num_points; ++i) { + scalar_slices.emplace_back(straus_scalar_slice(context, scalars[i], TABLE_BITS)); + + auto table_transcript = straus_lookup_table::compute_straus_lookup_table_hints( + base_points[i].get_value(), offset_generators[i + 1], TABLE_BITS); + std::copy(table_transcript.begin() + 1, table_transcript.end(), std::back_inserter(operation_transcript)); + } + Element accumulator = offset_generators[0]; + + for (size_t i = 0; i < num_rounds; ++i) { + if (i != 0) { + for (size_t j = 0; j < TABLE_BITS; ++j) { + // offset_generator_accuulator is a regular Element, so dbl() won't add constraints + accumulator = accumulator.dbl(); + operation_transcript.emplace_back(accumulator); + offset_generator_accumulator = offset_generator_accumulator.dbl(); + } + } + for (size_t j = 0; j < num_points; ++j) { + + const Element point = + native_straus_tables[j][static_cast(scalar_slices[j].slices_native[num_rounds - i - 1])]; + + accumulator += point; + + operation_transcript.emplace_back(accumulator); + offset_generator_accumulator = offset_generator_accumulator + Element(offset_generators[j + 1]); + } + } + } + + // Normalize the computed witness points and convert into AffineElement type + Element::batch_normalize(&operation_transcript[0], operation_transcript.size()); + + std::vector operation_hints; + operation_hints.reserve(operation_transcript.size()); + for (auto& element : operation_transcript) { + operation_hints.emplace_back(AffineElement(element.x, element.y)); + } + std::vector point_tables; + const size_t hints_per_table = (1ULL << TABLE_BITS) - 1; OriginTag tag{}; for (size_t i = 0; i < num_points; ++i) { + std::span table_hints(&operation_hints[i * hints_per_table], hints_per_table); // Merge tags tag = OriginTag(tag, scalars[i].get_origin_tag(), base_points[i].get_origin_tag()); scalar_slices.emplace_back(straus_scalar_slice(context, scalars[i], TABLE_BITS)); point_tables.emplace_back(straus_lookup_table(context, base_points[i], offset_generators[i + 1], TABLE_BITS)); } - Element offset_generator_accumulator = offset_generators[0]; + AffineElement* hint_ptr = &operation_hints[num_points * hints_per_table]; cycle_group accumulator = offset_generators[0]; // populate the set of points we are going to add into our accumulator, *before* we do any ECC operations @@ -1165,36 +1347,42 @@ typename cycle_group::batch_mul_internal_output cycle_group::_ } } } + std::vector> x_coordinate_checks; size_t point_counter = 0; for (size_t i = 0; i < num_rounds; ++i) { if (i != 0) { for (size_t j = 0; j < TABLE_BITS; ++j) { - // offset_generator_accuulator is a regular Element, so dbl() won't add constraints - accumulator = accumulator.dbl(); - offset_generator_accumulator = offset_generator_accumulator.dbl(); + accumulator = accumulator.dbl(*hint_ptr); + hint_ptr++; } } for (size_t j = 0; j < num_points; ++j) { const std::optional scalar_slice = scalar_slices[j].read(num_rounds - i - 1); - // if we are doing a batch mul over scalars of different bit-lengths, we may not have a bit slice for a - // given round and a given scalar + // if we are doing a batch mul over scalars of different bit-lengths, we may not have a bit slice + // for a given round and a given scalar + ASSERT(scalar_slice.value().get_value() == scalar_slices[j].slices_native[num_rounds - i - 1]); if (scalar_slice.has_value()) { const auto& point = points_to_add[point_counter++]; if (!unconditional_add) { x_coordinate_checks.push_back({ accumulator.x, point.x }); } - accumulator = accumulator.unconditional_add(point); - offset_generator_accumulator = offset_generator_accumulator + Element(offset_generators[j + 1]); + accumulator = accumulator.unconditional_add(point, *hint_ptr); + hint_ptr++; } } } + // validate that none of the x-coordinate differences are zero + // we batch the x-coordinate checks together + // because `assert_is_not_zero` witness generation needs a modular inversion (expensive) + field_t coordinate_check_product = 1; for (auto& [x1, x2] : x_coordinate_checks) { auto x_diff = x2 - x1; - x_diff.assert_is_not_zero("_variable_base_batch_mul_internal x-coordinate collision"); + coordinate_check_product *= x_diff; } + coordinate_check_product.assert_is_not_zero("_variable_base_batch_mul_internal x-coordinate collision"); // Set the final accumulator's tag to the union of all points' and scalars' tags accumulator.set_origin_tag(tag); @@ -1268,12 +1456,33 @@ typename cycle_group::batch_mul_internal_output cycle_group::_ ASSERT(offset_1.has_value()); offset_generator_accumulator += offset_1.value(); } + /** + * Compute the witness values of the batch_mul algorithm natively, as Element types with a Z-coordinate. + * We then batch-convert to AffineElement types, and feed these points as "hints" into the cycle_group methods. + * This avoids the need to compute modular inversions for every group operation, which dramatically reduces witness + * generation times + */ + std::vector operation_transcript; + { + Element accumulator = lookup_points[0].get_value(); + for (size_t i = 1; i < lookup_points.size(); ++i) { + accumulator = accumulator + (lookup_points[i].get_value()); + operation_transcript.emplace_back(accumulator); + } + } + Element::batch_normalize(&operation_transcript[0], operation_transcript.size()); + std::vector operation_hints; + operation_hints.reserve(operation_transcript.size()); + for (auto& element : operation_transcript) { + operation_hints.emplace_back(AffineElement(element.x, element.y)); + } + cycle_group accumulator = lookup_points[0]; // Perform all point additions sequentially. The Ultra ecc_addition relation costs 1 gate iff additions are chained // and output point of previous addition = input point of current addition. // If this condition is not met, the addition relation costs 2 gates. So it's good to do these sequentially! for (size_t i = 1; i < lookup_points.size(); ++i) { - accumulator = accumulator.unconditional_add(lookup_points[i]); + accumulator = accumulator.unconditional_add(lookup_points[i], operation_hints[i - 1]); } /** * offset_generator_accumulator represents the sum of all the offset generator terms present in `accumulator`. diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/group/cycle_group.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/group/cycle_group.hpp index cee097dfa4a9..ab008de22a36 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/group/cycle_group.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/group/cycle_group.hpp @@ -136,6 +136,7 @@ template class cycle_group { std::optional read(size_t index); size_t _table_bits; std::vector slices; + std::vector slices_native; }; /** @@ -165,11 +166,16 @@ template class cycle_group { */ struct straus_lookup_table { public: + static std::vector compute_straus_lookup_table_hints(const Element& base_point, + const Element& offset_generator, + size_t table_bits); + straus_lookup_table() = default; straus_lookup_table(Builder* context, const cycle_group& base_point, const cycle_group& offset_generator, - size_t table_bits); + size_t table_bits, + std::optional> hints = std::nullopt); cycle_group read(const field_t& index); size_t _table_bits; Builder* _context; @@ -204,17 +210,22 @@ template class cycle_group { void set_point_at_infinity(const bool_t& is_infinity) { _is_infinity = is_infinity; } cycle_group get_standard_form() const; void validate_is_on_curve() const; - cycle_group dbl() const + cycle_group dbl(const std::optional hint = std::nullopt) const requires IsUltraArithmetic; - cycle_group dbl() const + cycle_group dbl(const std::optional hint = std::nullopt) const requires IsNotUltraArithmetic; - cycle_group unconditional_add(const cycle_group& other) const + cycle_group unconditional_add(const cycle_group& other, + const std::optional hint = std::nullopt) const requires IsUltraArithmetic; - cycle_group unconditional_add(const cycle_group& other) const + cycle_group unconditional_add(const cycle_group& other, + const std::optional hint = std::nullopt) const requires IsNotUltraArithmetic; - cycle_group unconditional_subtract(const cycle_group& other) const; - cycle_group checked_unconditional_add(const cycle_group& other) const; - cycle_group checked_unconditional_subtract(const cycle_group& other) const; + cycle_group unconditional_subtract(const cycle_group& other, + const std::optional hint = std::nullopt) const; + cycle_group checked_unconditional_add(const cycle_group& other, + const std::optional hint = std::nullopt) const; + cycle_group checked_unconditional_subtract(const cycle_group& other, + const std::optional hint = std::nullopt) const; cycle_group operator+(const cycle_group& other) const; cycle_group operator-(const cycle_group& other) const; cycle_group operator-() const; diff --git a/barretenberg/cpp/src/barretenberg/stdlib/translator_vm_verifier/translator_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/translator_vm_verifier/translator_recursive_verifier.cpp index ce321b18ea89..c0cbacf2e925 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/translator_vm_verifier/translator_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/translator_vm_verifier/translator_recursive_verifier.cpp @@ -111,18 +111,17 @@ std::array TranslatorRecursiveVerifier_template get_challenge("Sumcheck:gate_challenge_" + std::to_string(idx)); } - std::vector libra_commitments; - for (size_t idx = 0; idx < log_circuit_size; idx++) { - Commitment libra_commitment = - transcript->template receive_from_prover("Libra:commitment_" + std::to_string(idx)); - libra_commitments.push_back(libra_commitment); - } - auto [multivariate_challenge, claimed_evaluations, libra_evaluations, sumcheck_verified] = + std::array libra_commitments = {}; + libra_commitments[0] = transcript->template receive_from_prover("Libra:concatenation_commitment"); + + auto [multivariate_challenge, claimed_evaluations, libra_evaluation, sumcheck_verified] = sumcheck.verify(relation_parameters, alpha, gate_challenges); - // Execute ZeroMorph rounds followed by the univariate PCS. See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a - // complete description of the unrolled protocol. + libra_commitments[1] = transcript->template receive_from_prover("Libra:big_sum_commitment"); + libra_commitments[2] = transcript->template receive_from_prover("Libra:quotient_commitment"); + // Execute Shplemini + bool consistency_checked = true; const BatchOpeningClaim opening_claim = Shplemini::compute_batch_opening_claim(circuit_size, commitments.get_unshifted_without_concatenated(), @@ -133,8 +132,10 @@ std::array TranslatorRecursiveVerifier_ class Transcript_ : public NativeTranscript { + class Transcript : public NativeTranscript { public: uint32_t circuit_size; uint32_t public_input_size; @@ -887,23 +887,23 @@ class MegaFlavor { Commitment shplonk_q_comm; Commitment kzg_w_comm; - Transcript_() = default; + Transcript() = default; - Transcript_(const HonkProof& proof) + Transcript(const HonkProof& proof) : NativeTranscript(proof) {} - static std::shared_ptr prover_init_empty() + static std::shared_ptr prover_init_empty() { - auto transcript = std::make_shared(); + auto transcript = std::make_shared(); constexpr uint32_t init{ 42 }; // arbitrary transcript->send_to_verifier("Init", init); return transcript; }; - static std::shared_ptr verifier_init_empty(const std::shared_ptr& transcript) + static std::shared_ptr verifier_init_empty(const std::shared_ptr& transcript) { - auto verifier_transcript = std::make_shared(transcript->proof_data); + auto verifier_transcript = std::make_shared(transcript->proof_data); [[maybe_unused]] auto _ = verifier_transcript->template receive_from_prover("Init"); return verifier_transcript; }; @@ -1010,8 +1010,6 @@ class MegaFlavor { ASSERT(proof_data.size() == old_proof_length); } }; - // Specialize for Mega (general case used in MegaRecursive). - using Transcript = Transcript_; }; } // namespace bb \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_zk_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_zk_flavor.hpp index dbaac3d0164a..7ed3964002d9 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_zk_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_zk_flavor.hpp @@ -19,66 +19,40 @@ class MegaZKFlavor : public bb::MegaFlavor { * Note: Made generic for use in MegaRecursive. * TODO(https://github.com/AztecProtocol/barretenberg/issues/877): Remove this Commitment template parameter */ - template class Transcript_ : public NativeTranscript { + class Transcript : public MegaFlavor::Transcript { public: - uint32_t circuit_size; - uint32_t public_input_size; - uint32_t pub_inputs_offset; - std::vector public_inputs; - Commitment w_l_comm; - Commitment w_r_comm; - Commitment w_o_comm; - Commitment ecc_op_wire_1_comm; - Commitment ecc_op_wire_2_comm; - Commitment ecc_op_wire_3_comm; - Commitment ecc_op_wire_4_comm; - Commitment calldata_comm; - Commitment calldata_read_counts_comm; - Commitment calldata_read_tags_comm; - Commitment calldata_inverses_comm; - Commitment secondary_calldata_comm; - Commitment secondary_calldata_read_counts_comm; - Commitment secondary_calldata_read_tags_comm; - Commitment secondary_calldata_inverses_comm; - Commitment return_data_comm; - Commitment return_data_read_counts_comm; - Commitment return_data_read_tags_comm; - Commitment return_data_inverses_comm; - Commitment w_4_comm; - Commitment z_perm_comm; - Commitment lookup_inverses_comm; - Commitment lookup_read_counts_comm; - Commitment lookup_read_tags_comm; - std::vector libra_commitments; + // Note: we have a different vector of univariates because the degree for ZK flavors differs + std::vector> zk_sumcheck_univariates; + Commitment libra_concatenation_commitment; FF libra_sum; - std::vector> sumcheck_univariates; - std::vector libra_evaluations; - std::array sumcheck_evaluations; + FF libra_claimed_evaluation; + Commitment libra_big_sum_commitment; + Commitment libra_quotient_commitment; + FF libra_concatenation_eval; + FF libra_shifted_big_sum_eval; + FF libra_big_sum_eval; + FF libra_quotient_eval; Commitment hiding_polynomial_commitment; FF hiding_polynomial_eval; - std::vector gemini_fold_comms; - std::vector gemini_fold_evals; - Commitment shplonk_q_comm; - Commitment kzg_w_comm; - Transcript_() = default; + Transcript() = default; - Transcript_(const HonkProof& proof) - : NativeTranscript(proof) + Transcript(const HonkProof& proof) + : MegaFlavor::Transcript(proof) {} - static std::shared_ptr prover_init_empty() + static std::shared_ptr prover_init_empty() { - auto transcript = std::make_shared(); + auto transcript = std::make_shared(); constexpr uint32_t init{ 42 }; // arbitrary transcript->send_to_verifier("Init", init); return transcript; }; - static std::shared_ptr verifier_init_empty(const std::shared_ptr& transcript) + static std::shared_ptr verifier_init_empty(const std::shared_ptr& transcript) { - auto verifier_transcript = std::make_shared(transcript->proof_data); - [[maybe_unused]] auto _ = verifier_transcript->template receive_from_prover("Init"); + auto verifier_transcript = std::make_shared(transcript->proof_data); + verifier_transcript->template receive_from_prover("Init"); return verifier_transcript; }; @@ -86,133 +60,131 @@ class MegaZKFlavor : public bb::MegaFlavor { { // take current proof and put them into the struct size_t num_frs_read = 0; - circuit_size = deserialize_from_buffer(proof_data, num_frs_read); - size_t log_circuit_size = static_cast(numeric::get_msb(circuit_size)); + this->circuit_size = deserialize_from_buffer(proof_data, num_frs_read); - public_input_size = deserialize_from_buffer(proof_data, num_frs_read); - pub_inputs_offset = deserialize_from_buffer(proof_data, num_frs_read); + this->public_input_size = deserialize_from_buffer(proof_data, num_frs_read); + this->pub_inputs_offset = deserialize_from_buffer(proof_data, num_frs_read); for (size_t i = 0; i < public_input_size; ++i) { - public_inputs.push_back(deserialize_from_buffer(proof_data, num_frs_read)); + this->public_inputs.push_back(deserialize_from_buffer(proof_data, num_frs_read)); } - w_l_comm = deserialize_from_buffer(proof_data, num_frs_read); - w_r_comm = deserialize_from_buffer(proof_data, num_frs_read); - w_o_comm = deserialize_from_buffer(proof_data, num_frs_read); - ecc_op_wire_1_comm = deserialize_from_buffer(proof_data, num_frs_read); - ecc_op_wire_2_comm = deserialize_from_buffer(proof_data, num_frs_read); - ecc_op_wire_3_comm = deserialize_from_buffer(proof_data, num_frs_read); - ecc_op_wire_4_comm = deserialize_from_buffer(proof_data, num_frs_read); - calldata_comm = deserialize_from_buffer(proof_data, num_frs_read); - calldata_read_counts_comm = deserialize_from_buffer(proof_data, num_frs_read); - calldata_read_tags_comm = deserialize_from_buffer(proof_data, num_frs_read); - calldata_inverses_comm = deserialize_from_buffer(proof_data, num_frs_read); - secondary_calldata_comm = deserialize_from_buffer(proof_data, num_frs_read); - secondary_calldata_read_counts_comm = deserialize_from_buffer(proof_data, num_frs_read); - secondary_calldata_read_tags_comm = deserialize_from_buffer(proof_data, num_frs_read); - secondary_calldata_inverses_comm = deserialize_from_buffer(proof_data, num_frs_read); - return_data_comm = deserialize_from_buffer(proof_data, num_frs_read); - return_data_read_counts_comm = deserialize_from_buffer(proof_data, num_frs_read); - return_data_read_tags_comm = deserialize_from_buffer(proof_data, num_frs_read); - return_data_inverses_comm = deserialize_from_buffer(proof_data, num_frs_read); - lookup_read_counts_comm = deserialize_from_buffer(proof_data, num_frs_read); - lookup_read_tags_comm = deserialize_from_buffer(proof_data, num_frs_read); - w_4_comm = deserialize_from_buffer(proof_data, num_frs_read); - lookup_inverses_comm = deserialize_from_buffer(proof_data, num_frs_read); - z_perm_comm = deserialize_from_buffer(proof_data, num_frs_read); - for (size_t i = 0; i < log_circuit_size; i++) { - libra_commitments.emplace_back(NativeTranscript::template deserialize_from_buffer( - NativeTranscript::proof_data, num_frs_read)); - }; - libra_sum = - NativeTranscript::template deserialize_from_buffer(NativeTranscript::proof_data, num_frs_read); + this->w_l_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->w_r_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->w_o_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->ecc_op_wire_1_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->ecc_op_wire_2_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->ecc_op_wire_3_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->ecc_op_wire_4_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->calldata_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->calldata_read_counts_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->calldata_read_tags_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->calldata_inverses_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->secondary_calldata_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->secondary_calldata_read_counts_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->secondary_calldata_read_tags_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->secondary_calldata_inverses_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->return_data_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->return_data_read_counts_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->return_data_read_tags_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->return_data_inverses_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->lookup_read_counts_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->lookup_read_tags_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->w_4_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->lookup_inverses_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->z_perm_comm = deserialize_from_buffer(proof_data, num_frs_read); + libra_concatenation_commitment = deserialize_from_buffer(proof_data, num_frs_read); + libra_sum = deserialize_from_buffer(proof_data, num_frs_read); for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { - sumcheck_univariates.push_back( + zk_sumcheck_univariates.push_back( deserialize_from_buffer>(proof_data, num_frs_read)); } - for (size_t i = 0; i < log_circuit_size; i++) { - libra_evaluations.emplace_back( - NativeTranscript::template deserialize_from_buffer(NativeTranscript::proof_data, num_frs_read)); - } - sumcheck_evaluations = deserialize_from_buffer>(proof_data, num_frs_read); + libra_claimed_evaluation = deserialize_from_buffer(proof_data, num_frs_read); + this->sumcheck_evaluations = + deserialize_from_buffer>(proof_data, num_frs_read); + libra_big_sum_commitment = deserialize_from_buffer(proof_data, num_frs_read); + libra_quotient_commitment = deserialize_from_buffer(proof_data, num_frs_read); hiding_polynomial_commitment = deserialize_from_buffer(proof_data, num_frs_read); - hiding_polynomial_eval = deserialize_from_buffer(NativeTranscript::proof_data, num_frs_read); + hiding_polynomial_eval = deserialize_from_buffer(proof_data, num_frs_read); for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N - 1; ++i) { - gemini_fold_comms.push_back(deserialize_from_buffer(proof_data, num_frs_read)); + this->gemini_fold_comms.push_back(deserialize_from_buffer(proof_data, num_frs_read)); } for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { - gemini_fold_evals.push_back(deserialize_from_buffer(proof_data, num_frs_read)); + this->gemini_fold_evals.push_back(deserialize_from_buffer(proof_data, num_frs_read)); } - shplonk_q_comm = deserialize_from_buffer(proof_data, num_frs_read); + libra_concatenation_eval = deserialize_from_buffer(proof_data, num_frs_read); + libra_shifted_big_sum_eval = deserialize_from_buffer(proof_data, num_frs_read); + libra_big_sum_eval = deserialize_from_buffer(proof_data, num_frs_read); + libra_quotient_eval = deserialize_from_buffer(proof_data, num_frs_read); + this->shplonk_q_comm = deserialize_from_buffer(proof_data, num_frs_read); - kzg_w_comm = deserialize_from_buffer(proof_data, num_frs_read); + this->kzg_w_comm = deserialize_from_buffer(proof_data, num_frs_read); } void serialize_full_transcript() { size_t old_proof_length = proof_data.size(); - size_t log_circuit_size = static_cast(numeric::get_msb(circuit_size)); proof_data.clear(); - serialize_to_buffer(circuit_size, proof_data); - serialize_to_buffer(public_input_size, proof_data); - serialize_to_buffer(pub_inputs_offset, proof_data); + serialize_to_buffer(this->circuit_size, proof_data); + serialize_to_buffer(this->public_input_size, proof_data); + serialize_to_buffer(this->pub_inputs_offset, proof_data); for (size_t i = 0; i < public_input_size; ++i) { - serialize_to_buffer(public_inputs[i], proof_data); - } - serialize_to_buffer(w_l_comm, proof_data); - serialize_to_buffer(w_r_comm, proof_data); - serialize_to_buffer(w_o_comm, proof_data); - serialize_to_buffer(ecc_op_wire_1_comm, proof_data); - serialize_to_buffer(ecc_op_wire_2_comm, proof_data); - serialize_to_buffer(ecc_op_wire_3_comm, proof_data); - serialize_to_buffer(ecc_op_wire_4_comm, proof_data); - serialize_to_buffer(calldata_comm, proof_data); - serialize_to_buffer(calldata_read_counts_comm, proof_data); - serialize_to_buffer(calldata_read_tags_comm, proof_data); - serialize_to_buffer(calldata_inverses_comm, proof_data); - serialize_to_buffer(secondary_calldata_comm, proof_data); - serialize_to_buffer(secondary_calldata_read_counts_comm, proof_data); - serialize_to_buffer(secondary_calldata_read_tags_comm, proof_data); - serialize_to_buffer(secondary_calldata_inverses_comm, proof_data); - serialize_to_buffer(return_data_comm, proof_data); - serialize_to_buffer(return_data_read_counts_comm, proof_data); - serialize_to_buffer(return_data_read_tags_comm, proof_data); - serialize_to_buffer(return_data_inverses_comm, proof_data); - serialize_to_buffer(lookup_read_counts_comm, proof_data); - serialize_to_buffer(lookup_read_tags_comm, proof_data); - serialize_to_buffer(w_4_comm, proof_data); - serialize_to_buffer(lookup_inverses_comm, proof_data); - serialize_to_buffer(z_perm_comm, proof_data); - - for (size_t i = 0; i < log_circuit_size; ++i) { - NativeTranscript::template serialize_to_buffer(libra_commitments[i], NativeTranscript::proof_data); + serialize_to_buffer(this->public_inputs[i], proof_data); } - NativeTranscript::template serialize_to_buffer(libra_sum, NativeTranscript::proof_data); + serialize_to_buffer(this->w_l_comm, proof_data); + serialize_to_buffer(this->w_r_comm, proof_data); + serialize_to_buffer(this->w_o_comm, proof_data); + serialize_to_buffer(this->ecc_op_wire_1_comm, proof_data); + serialize_to_buffer(this->ecc_op_wire_2_comm, proof_data); + serialize_to_buffer(this->ecc_op_wire_3_comm, proof_data); + serialize_to_buffer(this->ecc_op_wire_4_comm, proof_data); + serialize_to_buffer(this->calldata_comm, proof_data); + serialize_to_buffer(this->calldata_read_counts_comm, proof_data); + serialize_to_buffer(this->calldata_read_tags_comm, proof_data); + serialize_to_buffer(this->calldata_inverses_comm, proof_data); + serialize_to_buffer(this->secondary_calldata_comm, proof_data); + serialize_to_buffer(this->secondary_calldata_read_counts_comm, proof_data); + serialize_to_buffer(this->secondary_calldata_read_tags_comm, proof_data); + serialize_to_buffer(this->secondary_calldata_inverses_comm, proof_data); + serialize_to_buffer(this->return_data_comm, proof_data); + serialize_to_buffer(this->return_data_read_counts_comm, proof_data); + serialize_to_buffer(this->return_data_read_tags_comm, proof_data); + serialize_to_buffer(this->return_data_inverses_comm, proof_data); + serialize_to_buffer(this->lookup_read_counts_comm, proof_data); + serialize_to_buffer(this->lookup_read_tags_comm, proof_data); + serialize_to_buffer(this->w_4_comm, proof_data); + serialize_to_buffer(this->lookup_inverses_comm, proof_data); + serialize_to_buffer(this->z_perm_comm, proof_data); + + serialize_to_buffer(libra_concatenation_commitment, proof_data); + serialize_to_buffer(libra_sum, proof_data); for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { - serialize_to_buffer(sumcheck_univariates[i], proof_data); - } - for (size_t i = 0; i < log_circuit_size; ++i) { - NativeTranscript::template serialize_to_buffer(libra_evaluations[i], NativeTranscript::proof_data); + serialize_to_buffer(zk_sumcheck_univariates[i], proof_data); } + serialize_to_buffer(libra_claimed_evaluation, proof_data); - serialize_to_buffer(sumcheck_evaluations, proof_data); - serialize_to_buffer(hiding_polynomial_commitment, NativeTranscript::proof_data); - serialize_to_buffer(hiding_polynomial_eval, NativeTranscript::proof_data); + serialize_to_buffer(this->sumcheck_evaluations, proof_data); + serialize_to_buffer(libra_big_sum_commitment, proof_data); + serialize_to_buffer(libra_quotient_commitment, proof_data); + serialize_to_buffer(hiding_polynomial_commitment, proof_data); + serialize_to_buffer(hiding_polynomial_eval, proof_data); for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N - 1; ++i) { - serialize_to_buffer(gemini_fold_comms[i], proof_data); + serialize_to_buffer(this->gemini_fold_comms[i], proof_data); } for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { - serialize_to_buffer(gemini_fold_evals[i], proof_data); + serialize_to_buffer(this->gemini_fold_evals[i], proof_data); } - serialize_to_buffer(shplonk_q_comm, proof_data); - serialize_to_buffer(kzg_w_comm, proof_data); + serialize_to_buffer(libra_concatenation_eval, proof_data); + serialize_to_buffer(libra_shifted_big_sum_eval, proof_data); + serialize_to_buffer(libra_big_sum_eval, proof_data); + serialize_to_buffer(libra_quotient_eval, proof_data); + serialize_to_buffer(this->shplonk_q_comm, proof_data); + serialize_to_buffer(this->kzg_w_comm, proof_data); ASSERT(proof_data.size() == old_proof_length); } }; - // Specialize for Mega (general case used in MegaRecursive). - using Transcript = Transcript_; }; } // namespace bb \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp index 62bc329ba6d9..204dc177391f 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp @@ -67,9 +67,9 @@ class UltraFlavor { // Note: made generic for use in MegaRecursive. template - // List of relations reflecting the Ultra arithmetisation. WARNING: As UltraKeccak flavor inherits from Ultra flavor - // any change of ordering in this tuple needs to be reflected in the smart contract, otherwise relation accumulation - // will not match. + // List of relations reflecting the Ultra arithmetisation. WARNING: As UltraKeccak flavor inherits from + // Ultra flavor any change of ordering in this tuple needs to be reflected in the smart contract, otherwise + // relation accumulation will not match. using Relations_ = std::tuple, bb::UltraPermutationRelation, bb::LogDerivLookupRelation, @@ -97,6 +97,22 @@ class UltraFlavor { static constexpr size_t BATCHED_RELATION_PARTIAL_LENGTH = MAX_PARTIAL_RELATION_LENGTH + 1; static constexpr size_t NUM_RELATIONS = std::tuple_size_v; + // Proof length formula: + // 1. HONK_PROOF_PUBLIC_INPUT_OFFSET are the circuit_size, num_public_inputs, pub_inputs_offset + // 2. PAIRING_POINT_ACCUMULATOR_SIZE public inputs for pairing point accumulator + // 3. NUM_WITNESS_ENTITIES commitments + // 4. CONST_PROOF_SIZE_LOG_N sumcheck univariates + // 5. NUM_ALL_ENTITIES sumcheck evaluations + // 6. CONST_PROOF_SIZE_LOG_N Gemini Fold commitments + // 7. CONST_PROOF_SIZE_LOG_N Gemini a evaluations + // 8. KZG W commitment + static constexpr size_t num_frs_comm = bb::field_conversion::calc_num_bn254_frs(); + static constexpr size_t num_frs_fr = bb::field_conversion::calc_num_bn254_frs(); + static constexpr size_t PROOF_LENGTH_WITHOUT_PUB_INPUTS = + HONK_PROOF_PUBLIC_INPUT_OFFSET + NUM_WITNESS_ENTITIES * num_frs_comm + + CONST_PROOF_SIZE_LOG_N * BATCHED_RELATION_PARTIAL_LENGTH * num_frs_fr + NUM_ALL_ENTITIES * num_frs_fr + + CONST_PROOF_SIZE_LOG_N * num_frs_comm + CONST_PROOF_SIZE_LOG_N * num_frs_fr + num_frs_comm; + template using ProtogalaxyTupleOfTuplesOfUnivariatesNoOptimisticSkipping = decltype(create_protogalaxy_tuple_of_tuples_of_univariates()); @@ -537,7 +553,6 @@ class UltraFlavor { * @brief A container for storing the partially evaluated multivariates produced by sumcheck. */ class PartiallyEvaluatedMultivariates : public AllEntities { - public: PartiallyEvaluatedMultivariates() = default; PartiallyEvaluatedMultivariates(const size_t circuit_size) @@ -675,7 +690,7 @@ class UltraFlavor { this->z_perm = commitments.z_perm; } } - }; + }; // namespace bb // Specialize for Ultra (general case used in UltraRecursive). using VerifierCommitments = VerifierCommitments_; @@ -683,8 +698,10 @@ class UltraFlavor { * @brief Derived class that defines proof structure for Ultra proofs, as well as supporting functions. * */ - class Transcript : public NativeTranscript { + template class Transcript_ : public BaseTranscript { public: + using Base = BaseTranscript; + // Transcript objects defined as public member variables for easy access and modification uint32_t circuit_size; uint32_t public_input_size; @@ -704,24 +721,24 @@ class UltraFlavor { std::vector gemini_fold_evals; Commitment shplonk_q_comm; Commitment kzg_w_comm; - Transcript() = default; + Transcript_() = default; // Used by verifier to initialize the transcript - Transcript(const std::vector& proof) - : NativeTranscript(proof) + Transcript_(const std::vector& proof) + : Base(proof) {} - static std::shared_ptr prover_init_empty() + static std::shared_ptr prover_init_empty() { - auto transcript = std::make_shared(); + auto transcript = std::make_shared(); constexpr uint32_t init{ 42 }; // arbitrary transcript->send_to_verifier("Init", init); return transcript; }; - static std::shared_ptr verifier_init_empty(const std::shared_ptr& transcript) + static std::shared_ptr verifier_init_empty(const std::shared_ptr& transcript) { - auto verifier_transcript = std::make_shared(transcript->proof_data); + auto verifier_transcript = std::make_shared(transcript->proof_data); [[maybe_unused]] auto _ = verifier_transcript->template receive_from_prover("Init"); return verifier_transcript; }; @@ -735,37 +752,40 @@ class UltraFlavor { void deserialize_full_transcript() { // take current proof and put them into the struct + auto& proof_data = this->proof_data; size_t num_frs_read = 0; - circuit_size = deserialize_from_buffer(proof_data, num_frs_read); + circuit_size = Base::template deserialize_from_buffer(proof_data, num_frs_read); - public_input_size = deserialize_from_buffer(proof_data, num_frs_read); - pub_inputs_offset = deserialize_from_buffer(proof_data, num_frs_read); + public_input_size = Base::template deserialize_from_buffer(proof_data, num_frs_read); + pub_inputs_offset = Base::template deserialize_from_buffer(proof_data, num_frs_read); for (size_t i = 0; i < public_input_size; ++i) { - public_inputs.push_back(deserialize_from_buffer(proof_data, num_frs_read)); + public_inputs.push_back(Base::template deserialize_from_buffer(proof_data, num_frs_read)); } - w_l_comm = deserialize_from_buffer(proof_data, num_frs_read); - w_r_comm = deserialize_from_buffer(proof_data, num_frs_read); - w_o_comm = deserialize_from_buffer(proof_data, num_frs_read); - lookup_read_counts_comm = deserialize_from_buffer(proof_data, num_frs_read); - lookup_read_tags_comm = deserialize_from_buffer(proof_data, num_frs_read); - w_4_comm = deserialize_from_buffer(proof_data, num_frs_read); - lookup_inverses_comm = deserialize_from_buffer(proof_data, num_frs_read); - z_perm_comm = deserialize_from_buffer(proof_data, num_frs_read); + w_l_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + w_r_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + w_o_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + lookup_read_counts_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + lookup_read_tags_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + w_4_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + lookup_inverses_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + z_perm_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { sumcheck_univariates.push_back( - deserialize_from_buffer>(proof_data, - num_frs_read)); + Base::template deserialize_from_buffer>( + proof_data, num_frs_read)); } - sumcheck_evaluations = deserialize_from_buffer>(proof_data, num_frs_read); + sumcheck_evaluations = + Base::template deserialize_from_buffer>(proof_data, num_frs_read); for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N - 1; ++i) { - gemini_fold_comms.push_back(deserialize_from_buffer(proof_data, num_frs_read)); + gemini_fold_comms.push_back( + Base::template deserialize_from_buffer(proof_data, num_frs_read)); } for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { - gemini_fold_evals.push_back(deserialize_from_buffer(proof_data, num_frs_read)); + gemini_fold_evals.push_back(Base::template deserialize_from_buffer(proof_data, num_frs_read)); } - shplonk_q_comm = deserialize_from_buffer(proof_data, num_frs_read); + shplonk_q_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); - kzg_w_comm = deserialize_from_buffer(proof_data, num_frs_read); + kzg_w_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); } /** @@ -776,39 +796,42 @@ class UltraFlavor { */ void serialize_full_transcript() { + auto& proof_data = this->proof_data; size_t old_proof_length = proof_data.size(); proof_data.clear(); // clear proof_data so the rest of the function can replace it - serialize_to_buffer(circuit_size, proof_data); - serialize_to_buffer(public_input_size, proof_data); - serialize_to_buffer(pub_inputs_offset, proof_data); + Base::template serialize_to_buffer(circuit_size, proof_data); + Base::template serialize_to_buffer(public_input_size, proof_data); + Base::template serialize_to_buffer(pub_inputs_offset, proof_data); for (size_t i = 0; i < public_input_size; ++i) { - serialize_to_buffer(public_inputs[i], proof_data); + Base::template serialize_to_buffer(public_inputs[i], proof_data); } - serialize_to_buffer(w_l_comm, proof_data); - serialize_to_buffer(w_r_comm, proof_data); - serialize_to_buffer(w_o_comm, proof_data); - serialize_to_buffer(lookup_read_counts_comm, proof_data); - serialize_to_buffer(lookup_read_tags_comm, proof_data); - serialize_to_buffer(w_4_comm, proof_data); - serialize_to_buffer(lookup_inverses_comm, proof_data); - serialize_to_buffer(z_perm_comm, proof_data); + Base::template serialize_to_buffer(w_l_comm, proof_data); + Base::template serialize_to_buffer(w_r_comm, proof_data); + Base::template serialize_to_buffer(w_o_comm, proof_data); + Base::template serialize_to_buffer(lookup_read_counts_comm, proof_data); + Base::template serialize_to_buffer(lookup_read_tags_comm, proof_data); + Base::template serialize_to_buffer(w_4_comm, proof_data); + Base::template serialize_to_buffer(lookup_inverses_comm, proof_data); + Base::template serialize_to_buffer(z_perm_comm, proof_data); for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { - serialize_to_buffer(sumcheck_univariates[i], proof_data); + Base::template serialize_to_buffer(sumcheck_univariates[i], proof_data); } - serialize_to_buffer(sumcheck_evaluations, proof_data); + Base::template serialize_to_buffer(sumcheck_evaluations, proof_data); for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N - 1; ++i) { - serialize_to_buffer(gemini_fold_comms[i], proof_data); + Base::template serialize_to_buffer(gemini_fold_comms[i], proof_data); } for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { - serialize_to_buffer(gemini_fold_evals[i], proof_data); + Base::template serialize_to_buffer(gemini_fold_evals[i], proof_data); } - serialize_to_buffer(shplonk_q_comm, proof_data); - serialize_to_buffer(kzg_w_comm, proof_data); + Base::template serialize_to_buffer(shplonk_q_comm, proof_data); + Base::template serialize_to_buffer(kzg_w_comm, proof_data); // sanity check to make sure we generate the same length of proof as before. ASSERT(proof_data.size() == old_proof_length); } }; + + using Transcript = Transcript_; }; } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_keccak_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_keccak_flavor.hpp index e7b47f9235af..65f9b029e84d 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_keccak_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_keccak_flavor.hpp @@ -34,7 +34,8 @@ class UltraKeccakFlavor : public bb::UltraFlavor { * that, and split out separate PrecomputedPolynomials/Commitments data for clarity but also for portability of our * circuits. */ - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1094): Add aggregation + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1094): Add aggregation to the verifier contract so the + // VerificationKey from UltraFlavor can be used class VerificationKey : public VerificationKey_, VerifierCommitmentKey> { public: VerificationKey() = default; @@ -159,129 +160,7 @@ class UltraKeccakFlavor : public bb::UltraFlavor { // Specialize for Ultra (general case used in UltraRecursive). using VerifierCommitments = VerifierCommitments_; - /** - * @brief Derived class that defines proof structure for Ultra proofs, as well as supporting functions. - * - */ - class Transcript : public KeccakTranscript { - public: - // Transcript objects defined as public member variables for easy access and modification - uint32_t circuit_size; - uint32_t public_input_size; - uint32_t pub_inputs_offset; - std::vector public_inputs; - Commitment w_l_comm; - Commitment w_r_comm; - Commitment w_o_comm; - Commitment lookup_read_counts_comm; - Commitment lookup_read_tags_comm; - Commitment w_4_comm; - Commitment z_perm_comm; - Commitment lookup_inverses_comm; - std::vector> sumcheck_univariates; - std::array sumcheck_evaluations; - std::vector zm_cq_comms; - Commitment zm_cq_comm; - Commitment kzg_w_comm; - - Transcript() = default; - - // Used by verifier to initialize the transcript - Transcript(const std::vector& proof) - : KeccakTranscript(proof) - {} - - static std::shared_ptr prover_init_empty() - { - auto transcript = std::make_shared(); - constexpr uint32_t init{ 42 }; // arbitrary - transcript->send_to_verifier("Init", init); - return transcript; - }; - - static std::shared_ptr verifier_init_empty(const std::shared_ptr& transcript) - { - auto verifier_transcript = std::make_shared(transcript->proof_data); - [[maybe_unused]] auto _ = verifier_transcript->template receive_from_prover("Init"); - return verifier_transcript; - }; - - /** - * @brief Takes a FULL Ultra proof and deserializes it into the public member variables - * that compose the structure. Must be called in order to access the structure of the - * proof. - * - */ - void deserialize_full_transcript() - { - // take current proof and put them into the struct - size_t num_frs_read = 0; - circuit_size = deserialize_from_buffer(proof_data, num_frs_read); - - public_input_size = deserialize_from_buffer(proof_data, num_frs_read); - pub_inputs_offset = deserialize_from_buffer(proof_data, num_frs_read); - for (size_t i = 0; i < public_input_size; ++i) { - public_inputs.push_back(deserialize_from_buffer(proof_data, num_frs_read)); - } - w_l_comm = deserialize_from_buffer(proof_data, num_frs_read); - w_r_comm = deserialize_from_buffer(proof_data, num_frs_read); - w_o_comm = deserialize_from_buffer(proof_data, num_frs_read); - lookup_read_counts_comm = deserialize_from_buffer(proof_data, num_frs_read); - lookup_read_tags_comm = deserialize_from_buffer(proof_data, num_frs_read); - w_4_comm = deserialize_from_buffer(proof_data, num_frs_read); - lookup_inverses_comm = deserialize_from_buffer(proof_data, num_frs_read); - z_perm_comm = deserialize_from_buffer(proof_data, num_frs_read); - for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { - sumcheck_univariates.push_back( - deserialize_from_buffer>(proof_data, - num_frs_read)); - } - sumcheck_evaluations = deserialize_from_buffer>(proof_data, num_frs_read); - for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { - zm_cq_comms.push_back(deserialize_from_buffer(proof_data, num_frs_read)); - } - zm_cq_comm = deserialize_from_buffer(proof_data, num_frs_read); - kzg_w_comm = deserialize_from_buffer(proof_data, num_frs_read); - } - - /** - * @brief Serializes the structure variables into a FULL Ultra proof. Should be called - * only if deserialize_full_transcript() was called and some transcript variable was - * modified. - * - */ - void serialize_full_transcript() - { - size_t old_proof_length = proof_data.size(); - proof_data.clear(); // clear proof_data so the rest of the function can replace it - serialize_to_buffer(circuit_size, proof_data); - serialize_to_buffer(public_input_size, proof_data); - serialize_to_buffer(pub_inputs_offset, proof_data); - for (size_t i = 0; i < public_input_size; ++i) { - serialize_to_buffer(public_inputs[i], proof_data); - } - serialize_to_buffer(w_l_comm, proof_data); - serialize_to_buffer(w_r_comm, proof_data); - serialize_to_buffer(w_o_comm, proof_data); - serialize_to_buffer(lookup_read_counts_comm, proof_data); - serialize_to_buffer(lookup_read_tags_comm, proof_data); - serialize_to_buffer(w_4_comm, proof_data); - serialize_to_buffer(lookup_inverses_comm, proof_data); - serialize_to_buffer(z_perm_comm, proof_data); - for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { - serialize_to_buffer(sumcheck_univariates[i], proof_data); - } - serialize_to_buffer(sumcheck_evaluations, proof_data); - for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { - serialize_to_buffer(zm_cq_comms[i], proof_data); - } - serialize_to_buffer(zm_cq_comm, proof_data); - serialize_to_buffer(kzg_w_comm, proof_data); - - // sanity check to make sure we generate the same length of proof as before. - ASSERT(proof_data.size() == old_proof_length); - } - }; + using Transcript = UltraKeccakFlavor::Transcript_; }; } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_keccak_zk_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_keccak_zk_flavor.hpp new file mode 100644 index 000000000000..b461e64698c9 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_keccak_zk_flavor.hpp @@ -0,0 +1,184 @@ +#pragma once + +#include "barretenberg/stdlib_circuit_builders/ultra_keccak_flavor.hpp" + +namespace bb { + +/*! +\brief Child class of UltraFlavor that runs with ZK Sumcheck. +\details +Most of the properties of UltraFlavor are +inherited without any changes, except for the MAX_PARTIAL_RELATION_LENGTH which is now computed as a maximum of +SUBRELATION_PARTIAL_LENGTHS incremented by the corresponding SUBRELATION_WITNESS_DEGREES over all relations included in +UltraFlavor, which also affects the size of ExtendedEdges univariate containers. +Moreover, the container SumcheckTupleOfTuplesOfUnivariates is resized to reflect that masked +witness polynomials are of degree at most \f$2\f$ in each variable, and hence, for any subrelation, the corresponding +univariate accumuluator size has to be increased by the subrelation's witness degree. See more in +\ref docs/src/sumcheck-outline.md "Sumcheck Outline". +*/ +class UltraKeccakZKFlavor : public UltraKeccakFlavor { + public: + // This flavor runs with ZK Sumcheck + static constexpr bool HasZK = true; + // Determine the number of evaluations of Prover and Libra Polynomials that the Prover sends to the Verifier in + // the rounds of ZK Sumcheck. + static constexpr size_t BATCHED_RELATION_PARTIAL_LENGTH = UltraKeccakFlavor::BATCHED_RELATION_PARTIAL_LENGTH + 1; + /** + * @brief Derived class that defines proof structure for Ultra zero knowledge proofs, as well as supporting + * functions. + * + */ + class Transcript : public UltraKeccakFlavor::Transcript { + public: + using Base = UltraKeccakFlavor::Transcript::Base; + // Note: we have a different vector of univariates because the degree for ZK flavors differs + std::vector> zk_sumcheck_univariates; + Commitment libra_concatenation_commitment; + FF libra_sum; + FF libra_claimed_evaluation; + Commitment libra_big_sum_commitment; + Commitment libra_quotient_commitment; + FF libra_concatenation_eval; + FF libra_shifted_big_sum_eval; + FF libra_big_sum_eval; + FF libra_quotient_eval; + Commitment hiding_polynomial_commitment; + FF hiding_polynomial_eval; + + Transcript() = default; + + // Used by verifier to initialize the transcript + Transcript(const std::vector& proof) + : UltraKeccakFlavor::Transcript(proof) + {} + + static std::shared_ptr prover_init_empty() + { + auto transcript = std::make_shared(); + constexpr uint32_t init{ 42 }; // arbitrary + transcript->send_to_verifier("Init", init); + return transcript; + }; + + static std::shared_ptr verifier_init_empty(const std::shared_ptr& transcript) + { + auto verifier_transcript = std::make_shared(transcript->proof_data); + verifier_transcript->template receive_from_prover("Init"); + return verifier_transcript; + }; + + /** + * @brief Takes a FULL Ultra proof and deserializes it into the public member variables + * that compose the structure. Must be called in order to access the structure of the + * proof. + * + */ + void deserialize_full_transcript() + { + // take current proof and put them into the struct + size_t num_frs_read = 0; + auto& proof_data = this->proof_data; + this->circuit_size = Base::template deserialize_from_buffer(proof_data, num_frs_read); + + this->public_input_size = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->pub_inputs_offset = Base::template deserialize_from_buffer(proof_data, num_frs_read); + for (size_t i = 0; i < this->public_input_size; ++i) { + this->public_inputs.push_back(Base::template deserialize_from_buffer(proof_data, num_frs_read)); + } + this->w_l_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->w_r_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->w_o_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->lookup_read_counts_comm = + Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->lookup_read_tags_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->w_4_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->lookup_inverses_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->z_perm_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + libra_concatenation_commitment = + Base::template deserialize_from_buffer(proof_data, num_frs_read); + libra_sum = Base::template deserialize_from_buffer(proof_data, num_frs_read); + + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { + zk_sumcheck_univariates.push_back( + Base::template deserialize_from_buffer>( + proof_data, num_frs_read)); + } + libra_claimed_evaluation = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->sumcheck_evaluations = + Base::template deserialize_from_buffer>(proof_data, num_frs_read); + libra_big_sum_commitment = Base::template deserialize_from_buffer(proof_data, num_frs_read); + libra_quotient_commitment = Base::template deserialize_from_buffer(proof_data, num_frs_read); + hiding_polynomial_commitment = Base::template deserialize_from_buffer(proof_data, num_frs_read); + hiding_polynomial_eval = Base::template deserialize_from_buffer(proof_data, num_frs_read); + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N - 1; ++i) { + this->gemini_fold_comms.push_back( + Base::template deserialize_from_buffer(proof_data, num_frs_read)); + } + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { + this->gemini_fold_evals.push_back(Base::template deserialize_from_buffer(proof_data, num_frs_read)); + } + libra_concatenation_eval = Base::template deserialize_from_buffer(proof_data, num_frs_read); + libra_shifted_big_sum_eval = Base::template deserialize_from_buffer(proof_data, num_frs_read); + libra_big_sum_eval = Base::template deserialize_from_buffer(proof_data, num_frs_read); + libra_quotient_eval = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->shplonk_q_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + + this->kzg_w_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + } + + /** + * @brief Serializes the structure variables into a FULL Ultra proof. Should be called + * only if deserialize_full_transcript() was called and some transcript variable was + * modified. + * + */ + void serialize_full_transcript() + { + auto& proof_data = this->proof_data; + size_t old_proof_length = proof_data.size(); + proof_data.clear(); // clear proof_data so the rest of the function can replace it + Base::template serialize_to_buffer(this->circuit_size, proof_data); + Base::template serialize_to_buffer(this->public_input_size, proof_data); + Base::template serialize_to_buffer(this->pub_inputs_offset, proof_data); + for (size_t i = 0; i < this->public_input_size; ++i) { + Base::template serialize_to_buffer(this->public_inputs[i], proof_data); + } + Base::template serialize_to_buffer(this->w_l_comm, proof_data); + Base::template serialize_to_buffer(this->w_r_comm, proof_data); + Base::template serialize_to_buffer(this->w_o_comm, proof_data); + Base::template serialize_to_buffer(this->lookup_read_counts_comm, proof_data); + Base::template serialize_to_buffer(this->lookup_read_tags_comm, proof_data); + Base::template serialize_to_buffer(this->w_4_comm, proof_data); + Base::template serialize_to_buffer(this->lookup_inverses_comm, proof_data); + Base::template serialize_to_buffer(this->z_perm_comm, proof_data); + Base::template serialize_to_buffer(libra_concatenation_commitment, proof_data); + Base::template serialize_to_buffer(libra_sum, proof_data); + + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { + Base::template serialize_to_buffer(zk_sumcheck_univariates[i], proof_data); + } + Base::template serialize_to_buffer(libra_claimed_evaluation, proof_data); + + Base::template serialize_to_buffer(this->sumcheck_evaluations, proof_data); + Base::template serialize_to_buffer(libra_big_sum_commitment, proof_data); + Base::template serialize_to_buffer(libra_quotient_commitment, proof_data); + Base::template serialize_to_buffer(hiding_polynomial_commitment, proof_data); + Base::template serialize_to_buffer(hiding_polynomial_eval, proof_data); + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N - 1; ++i) { + Base::template serialize_to_buffer(this->gemini_fold_comms[i], proof_data); + } + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { + Base::template serialize_to_buffer(this->gemini_fold_evals[i], proof_data); + } + Base::template serialize_to_buffer(libra_concatenation_eval, proof_data); + Base::template serialize_to_buffer(libra_shifted_big_sum_eval, proof_data); + Base::template serialize_to_buffer(libra_big_sum_eval, proof_data); + Base::template serialize_to_buffer(libra_quotient_eval, proof_data); + Base::template serialize_to_buffer(this->shplonk_q_comm, proof_data); + Base::template serialize_to_buffer(this->kzg_w_comm, proof_data); + + ASSERT(proof_data.size() == old_proof_length); + } + }; +}; +} // namespace bb \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_rollup_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_rollup_flavor.hpp index ff3d495236b0..6376f1d3f682 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_rollup_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_rollup_flavor.hpp @@ -1,10 +1,26 @@ #pragma once +#include "barretenberg/commitment_schemes/ipa/ipa.hpp" #include "barretenberg/stdlib_circuit_builders/ultra_flavor.hpp" namespace bb { class UltraRollupFlavor : public bb::UltraFlavor { public: + // Proof length formula: + // 1. HONK_PROOF_PUBLIC_INPUT_OFFSET are the circuit_size, num_public_inputs, pub_inputs_offset + // 2. PAIRING_POINT_ACCUMULATOR_SIZE public inputs for pairing point accumulator + // 3. IPA_CLAIM_SIZE public inputs for IPA claim + // 4. NUM_WITNESS_ENTITIES commitments + // 5. CONST_PROOF_SIZE_LOG_N sumcheck univariates + // 6. NUM_ALL_ENTITIES sumcheck evaluations + // 7. CONST_PROOF_SIZE_LOG_N Gemini Fold commitments + // 8. CONST_PROOF_SIZE_LOG_N Gemini a evaluations + // 9. KZG W commitment + static constexpr size_t num_frs_comm = bb::field_conversion::calc_num_bn254_frs(); + static constexpr size_t num_frs_fr = bb::field_conversion::calc_num_bn254_frs(); + static constexpr size_t PROOF_LENGTH_WITHOUT_PUB_INPUTS = + UltraFlavor::PROOF_LENGTH_WITHOUT_PUB_INPUTS + IPA_PROOF_LENGTH; + using UltraFlavor::UltraFlavor; class ProvingKey : public UltraFlavor::ProvingKey { public: diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_zk_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_zk_flavor.hpp index c6ddab34dbe7..31e3c8305780 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_zk_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_zk_flavor.hpp @@ -16,15 +16,170 @@ witness polynomials are of degree at most \f$2\f$ in each variable, and hence, f univariate accumuluator size has to be increased by the subrelation's witness degree. See more in \ref docs/src/sumcheck-outline.md "Sumcheck Outline". */ -class UltraFlavorWithZK : public bb::UltraFlavor { +class UltraZKFlavor : public UltraFlavor { public: // This flavor runs with ZK Sumcheck static constexpr bool HasZK = true; // Determine the number of evaluations of Prover and Libra Polynomials that the Prover sends to the Verifier in // the rounds of ZK Sumcheck. static constexpr size_t BATCHED_RELATION_PARTIAL_LENGTH = UltraFlavor::BATCHED_RELATION_PARTIAL_LENGTH + 1; - // Construct the container for the subrelations' contributions - using SumcheckTupleOfTuplesOfUnivariates = decltype(create_sumcheck_tuple_of_tuples_of_univariates()); -}; + /** + * @brief Derived class that defines proof structure for Ultra zero knowledge proofs, as well as supporting + * functions. + * + */ + template class Transcript_ : public UltraFlavor::Transcript_ { + public: + using Base = UltraFlavor::Transcript_::Base; + // Note: we have a different vector of univariates because the degree for ZK flavors differs + std::vector> zk_sumcheck_univariates; + Commitment libra_concatenation_commitment; + FF libra_sum; + FF libra_claimed_evaluation; + Commitment libra_big_sum_commitment; + Commitment libra_quotient_commitment; + FF libra_concatenation_eval; + FF libra_shifted_big_sum_eval; + FF libra_big_sum_eval; + FF libra_quotient_eval; + Commitment hiding_polynomial_commitment; + FF hiding_polynomial_eval; + + Transcript_() = default; + + // Used by verifier to initialize the transcript + Transcript_(const std::vector& proof) + : UltraFlavor::Transcript_(proof) + {} + + static std::shared_ptr prover_init_empty() + { + auto transcript = std::make_shared(); + constexpr uint32_t init{ 42 }; // arbitrary + transcript->send_to_verifier("Init", init); + return transcript; + }; + + static std::shared_ptr verifier_init_empty(const std::shared_ptr& transcript) + { + auto verifier_transcript = std::make_shared(transcript->proof_data); + verifier_transcript->template receive_from_prover("Init"); + return verifier_transcript; + }; + + /** + * @brief Takes a FULL Ultra proof and deserializes it into the public member variables + * that compose the structure. Must be called in order to access the structure of the + * proof. + * + */ + void deserialize_full_transcript() + { + // take current proof and put them into the struct + size_t num_frs_read = 0; + auto& proof_data = this->proof_data; + this->circuit_size = Base::template deserialize_from_buffer(proof_data, num_frs_read); + + this->public_input_size = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->pub_inputs_offset = Base::template deserialize_from_buffer(proof_data, num_frs_read); + for (size_t i = 0; i < this->public_input_size; ++i) { + this->public_inputs.push_back(Base::template deserialize_from_buffer(proof_data, num_frs_read)); + } + this->w_l_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->w_r_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->w_o_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->lookup_read_counts_comm = + Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->lookup_read_tags_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->w_4_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->lookup_inverses_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->z_perm_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + libra_concatenation_commitment = + Base::template deserialize_from_buffer(proof_data, num_frs_read); + libra_sum = Base::template deserialize_from_buffer(proof_data, num_frs_read); + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { + zk_sumcheck_univariates.push_back( + Base::template deserialize_from_buffer>( + proof_data, num_frs_read)); + } + libra_claimed_evaluation = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->sumcheck_evaluations = + Base::template deserialize_from_buffer>(proof_data, num_frs_read); + libra_big_sum_commitment = Base::template deserialize_from_buffer(proof_data, num_frs_read); + libra_quotient_commitment = Base::template deserialize_from_buffer(proof_data, num_frs_read); + hiding_polynomial_commitment = Base::template deserialize_from_buffer(proof_data, num_frs_read); + hiding_polynomial_eval = Base::template deserialize_from_buffer(proof_data, num_frs_read); + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N - 1; ++i) { + this->gemini_fold_comms.push_back( + Base::template deserialize_from_buffer(proof_data, num_frs_read)); + } + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { + this->gemini_fold_evals.push_back(Base::template deserialize_from_buffer(proof_data, num_frs_read)); + } + libra_concatenation_eval = Base::template deserialize_from_buffer(proof_data, num_frs_read); + libra_shifted_big_sum_eval = Base::template deserialize_from_buffer(proof_data, num_frs_read); + libra_big_sum_eval = Base::template deserialize_from_buffer(proof_data, num_frs_read); + libra_quotient_eval = Base::template deserialize_from_buffer(proof_data, num_frs_read); + this->shplonk_q_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + + this->kzg_w_comm = Base::template deserialize_from_buffer(proof_data, num_frs_read); + } + + /** + * @brief Serializes the structure variables into a FULL Ultra proof. Should be called + * only if deserialize_full_transcript() was called and some transcript variable was + * modified. + * + */ + void serialize_full_transcript() + { + auto& proof_data = this->proof_data; + size_t old_proof_length = proof_data.size(); + proof_data.clear(); // clear proof_data so the rest of the function can replace it + Base::template serialize_to_buffer(this->circuit_size, proof_data); + Base::template serialize_to_buffer(this->public_input_size, proof_data); + Base::template serialize_to_buffer(this->pub_inputs_offset, proof_data); + for (size_t i = 0; i < this->public_input_size; ++i) { + Base::template serialize_to_buffer(this->public_inputs[i], proof_data); + } + Base::template serialize_to_buffer(this->w_l_comm, proof_data); + Base::template serialize_to_buffer(this->w_r_comm, proof_data); + Base::template serialize_to_buffer(this->w_o_comm, proof_data); + Base::template serialize_to_buffer(this->lookup_read_counts_comm, proof_data); + Base::template serialize_to_buffer(this->lookup_read_tags_comm, proof_data); + Base::template serialize_to_buffer(this->w_4_comm, proof_data); + Base::template serialize_to_buffer(this->lookup_inverses_comm, proof_data); + Base::template serialize_to_buffer(this->z_perm_comm, proof_data); + Base::template serialize_to_buffer(libra_concatenation_commitment, proof_data); + Base::template serialize_to_buffer(libra_sum, proof_data); + + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { + Base::template serialize_to_buffer(zk_sumcheck_univariates[i], proof_data); + } + Base::template serialize_to_buffer(libra_claimed_evaluation, proof_data); + + Base::template serialize_to_buffer(this->sumcheck_evaluations, proof_data); + Base::template serialize_to_buffer(libra_big_sum_commitment, proof_data); + Base::template serialize_to_buffer(libra_quotient_commitment, proof_data); + Base::template serialize_to_buffer(hiding_polynomial_commitment, proof_data); + Base::template serialize_to_buffer(hiding_polynomial_eval, proof_data); + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N - 1; ++i) { + Base::template serialize_to_buffer(this->gemini_fold_comms[i], proof_data); + } + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { + Base::template serialize_to_buffer(this->gemini_fold_evals[i], proof_data); + } + Base::template serialize_to_buffer(libra_concatenation_eval, proof_data); + Base::template serialize_to_buffer(libra_shifted_big_sum_eval, proof_data); + Base::template serialize_to_buffer(libra_big_sum_eval, proof_data); + Base::template serialize_to_buffer(libra_quotient_eval, proof_data); + Base::template serialize_to_buffer(this->shplonk_q_comm, proof_data); + Base::template serialize_to_buffer(this->kzg_w_comm, proof_data); + + ASSERT(proof_data.size() == old_proof_length); + } + }; + using Transcript = Transcript_; +}; } // namespace bb \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck.hpp b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck.hpp index b4645bf0d0a6..afd1cb3a6630 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck.hpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck.hpp @@ -154,6 +154,7 @@ template class SumcheckProver { // Define the length of Libra Univariates. For non-ZK Flavors: set to 0. static constexpr size_t LIBRA_UNIVARIATES_LENGTH = Flavor::HasZK ? Flavor::BATCHED_RELATION_PARTIAL_LENGTH : 0; using LibraUnivariates = std::vector>; + using ZKData = ZKSumcheckData; std::shared_ptr transcript; SumcheckProverRound round; @@ -177,8 +178,8 @@ template class SumcheckProver { , partially_evaluated_polynomials(multivariate_n){}; /** - * @brief Compute round univariate, place it in transcript, compute challenge, partially evaluate. Repeat - * until final round, then get full evaluations of prover polynomials, and place them in transcript. + * @brief Non-ZK version: Compute round univariate, place it in transcript, compute challenge, partially evaluate. + * Repeat until final round, then get full evaluations of prover polynomials, and place them in transcript. * @details See Detailed description of \ref bb::SumcheckProver< Flavor > "Sumcheck Prover . * @param full_polynomials Container for ProverPolynomials * @param relation_parameters @@ -186,11 +187,89 @@ template class SumcheckProver { * @param gate_challenges * @return SumcheckOutput */ + SumcheckOutput prove(ProverPolynomials& full_polynomials, + const bb::RelationParameters& relation_parameters, + const RelationSeparator alpha, + const std::vector& gate_challenges) + { + + bb::GateSeparatorPolynomial gate_separators(gate_challenges, multivariate_d); + + std::vector multivariate_challenge; + multivariate_challenge.reserve(multivariate_d); + // In the first round, we compute the first univariate polynomial and populate the book-keeping table of + // #partially_evaluated_polynomials, which has \f$ n/2 \f$ rows and \f$ N \f$ columns. When the Flavor has ZK, + // compute_univariate also takes into account the zk_sumcheck_data. + auto round_univariate = round.compute_univariate(full_polynomials, relation_parameters, gate_separators, alpha); + vinfo("starting sumcheck rounds..."); + { + + PROFILE_THIS_NAME("rest of sumcheck round 1"); + + // Place the evaluations of the round univariate into transcript. + transcript->send_to_verifier("Sumcheck:univariate_0", round_univariate); + FF round_challenge = transcript->template get_challenge("Sumcheck:u_0"); + multivariate_challenge.emplace_back(round_challenge); + // Prepare sumcheck book-keeping table for the next round + partially_evaluate(full_polynomials, multivariate_n, round_challenge); + gate_separators.partially_evaluate(round_challenge); + round.round_size = round.round_size >> 1; // TODO(#224)(Cody): Maybe partially_evaluate should do this and + // release memory? // All but final round + // We operate on partially_evaluated_polynomials in place. + } + for (size_t round_idx = 1; round_idx < multivariate_d; round_idx++) { + + PROFILE_THIS_NAME("sumcheck loop"); + + // Write the round univariate to the transcript + round_univariate = + round.compute_univariate(partially_evaluated_polynomials, relation_parameters, gate_separators, alpha); + // Place evaluations of Sumcheck Round Univariate in the transcript + transcript->send_to_verifier("Sumcheck:univariate_" + std::to_string(round_idx), round_univariate); + FF round_challenge = transcript->template get_challenge("Sumcheck:u_" + std::to_string(round_idx)); + multivariate_challenge.emplace_back(round_challenge); + // Prepare sumcheck book-keeping table for the next round + partially_evaluate(partially_evaluated_polynomials, round.round_size, round_challenge); + gate_separators.partially_evaluate(round_challenge); + round.round_size = round.round_size >> 1; + } + vinfo("completed ", multivariate_d, " rounds of sumcheck"); + + // Zero univariates are used to pad the proof to the fixed size CONST_PROOF_SIZE_LOG_N. + auto zero_univariate = bb::Univariate::zero(); + for (size_t idx = multivariate_d; idx < CONST_PROOF_SIZE_LOG_N; idx++) { + transcript->send_to_verifier("Sumcheck:univariate_" + std::to_string(idx), zero_univariate); + FF round_challenge = transcript->template get_challenge("Sumcheck:u_" + std::to_string(idx)); + multivariate_challenge.emplace_back(round_challenge); + } + // Claimed evaluations of Prover polynomials are extracted and added to the transcript. When Flavor has ZK, the + // evaluations of all witnesses are masked. + ClaimedEvaluations multivariate_evaluations; + multivariate_evaluations = extract_claimed_evaluations(partially_evaluated_polynomials); + transcript->send_to_verifier("Sumcheck:evaluations", multivariate_evaluations.get_all()); + // For ZK Flavors: the evaluations of Libra univariates are included in the Sumcheck Output + + return SumcheckOutput{ multivariate_challenge, multivariate_evaluations }; + vinfo("finished sumcheck"); + }; + + /** + * @brief ZK-version of `prove` that runs Sumcheck with disabled rows and masking of Round Univariates. + * The masking is ensured by adding random Libra univariates to the Sumcheck round univariates. + * + * @param full_polynomials + * @param relation_parameters + * @param alpha + * @param gate_challenges + * @param zk_sumcheck_data + * @return SumcheckOutput + */ SumcheckOutput prove(ProverPolynomials& full_polynomials, const bb::RelationParameters& relation_parameters, const RelationSeparator alpha, const std::vector& gate_challenges, - ZKSumcheckData zk_sumcheck_data = ZKSumcheckData()) + ZKData& zk_sumcheck_data) + requires FlavorHasZK { bb::GateSeparatorPolynomial gate_separators(gate_challenges, multivariate_d); @@ -221,10 +300,8 @@ template class SumcheckProver { // Prepare sumcheck book-keeping table for the next round partially_evaluate(full_polynomials, multivariate_n, round_challenge); // Prepare ZK Sumcheck data for the next round - if constexpr (Flavor::HasZK) { - update_zk_sumcheck_data(zk_sumcheck_data, round_challenge, round_idx); - row_disabling_polynomial.update_evaluations(round_challenge, round_idx); - }; + update_zk_sumcheck_data(zk_sumcheck_data, round_challenge, round_idx); + row_disabling_polynomial.update_evaluations(round_challenge, round_idx); gate_separators.partially_evaluate(round_challenge); round.round_size = round.round_size >> 1; // TODO(#224)(Cody): Maybe partially_evaluate should do this and // release memory? // All but final round @@ -249,10 +326,8 @@ template class SumcheckProver { // Prepare sumcheck book-keeping table for the next round partially_evaluate(partially_evaluated_polynomials, round.round_size, round_challenge); // Prepare evaluation masking and libra structures for the next round (for ZK Flavors) - if constexpr (Flavor::HasZK) { - update_zk_sumcheck_data(zk_sumcheck_data, round_challenge, round_idx); - row_disabling_polynomial.update_evaluations(round_challenge, round_idx); - }; + update_zk_sumcheck_data(zk_sumcheck_data, round_challenge, round_idx); + row_disabling_polynomial.update_evaluations(round_challenge, round_idx); gate_separators.partially_evaluate(round_challenge); round.round_size = round.round_size >> 1; @@ -268,27 +343,23 @@ template class SumcheckProver { } // The evaluations of Libra uninvariates at \f$ g_0(u_0), \ldots, g_{d-1} (u_{d-1}) \f$ are added to the // transcript. - if constexpr (Flavor::HasZK) { - for (size_t idx = 0; idx < multivariate_d; idx++) { - const FF& libra_evaluation = zk_sumcheck_data.libra_evaluations[idx]; - std::string libra_evaluation_label = "Libra:evaluation_" + std::to_string(idx); - transcript->send_to_verifier(libra_evaluation_label, libra_evaluation); - } - }; + FF libra_evaluation{ 0 }; + + for (auto& libra_eval : zk_sumcheck_data.libra_evaluations) { + libra_evaluation += libra_eval; + } + libra_evaluation += zk_sumcheck_data.constant_term; + std::string libra_evaluation_label = "Libra:claimed_evaluation"; + transcript->send_to_verifier(libra_evaluation_label, libra_evaluation); // Claimed evaluations of Prover polynomials are extracted and added to the transcript. When Flavor has ZK, the // evaluations of all witnesses are masked. ClaimedEvaluations multivariate_evaluations; multivariate_evaluations = extract_claimed_evaluations(partially_evaluated_polynomials); transcript->send_to_verifier("Sumcheck:evaluations", multivariate_evaluations.get_all()); - // For ZK Flavors: the evaluations of Libra univariates are included in the Sumcheck Output - if constexpr (!Flavor::HasZK) { - return SumcheckOutput{ multivariate_challenge, multivariate_evaluations }; - } else { - return SumcheckOutput{ multivariate_challenge, - multivariate_evaluations, - zk_sumcheck_data.libra_evaluations }; - } + // The sum of the Libra constant term and the evaluations of Libra univariates at corresponding sumcheck + // challenges is included in the Sumcheck Output + return SumcheckOutput{ multivariate_challenge, multivariate_evaluations, libra_evaluation }; vinfo("finished sumcheck"); }; @@ -400,23 +471,24 @@ polynomials that are sent in clear. * @param libra_running_sum * @param libra_evaluations */ - void update_zk_sumcheck_data(ZKSumcheckData& zk_sumcheck_data, const FF round_challenge, size_t round_idx) + void update_zk_sumcheck_data(ZKData& zk_sumcheck_data, const FF round_challenge, size_t round_idx) { + static constexpr FF two_inv = FF(1) / FF(2); // when round_idx = d - 1, the update is not needed if (round_idx < zk_sumcheck_data.libra_univariates.size() - 1) { for (auto& univariate : zk_sumcheck_data.libra_univariates) { - univariate *= FF(1) / FF(2); + univariate *= two_inv; }; // compute the evaluation \f$ \rho \cdot 2^{d-2-i} \çdot g_i(u_i) \f$ auto libra_evaluation = zk_sumcheck_data.libra_univariates[round_idx].evaluate(round_challenge); auto next_libra_univariate = zk_sumcheck_data.libra_univariates[round_idx + 1]; // update the running sum by adding g_i(u_i) and subtracting (g_i(0) + g_i(1)) zk_sumcheck_data.libra_running_sum += - -next_libra_univariate.value_at(0) - next_libra_univariate.value_at(1); - zk_sumcheck_data.libra_running_sum *= FF(1) / FF(2); + -next_libra_univariate.evaluate(FF(0)) - next_libra_univariate.evaluate(FF(1)); + zk_sumcheck_data.libra_running_sum *= two_inv; zk_sumcheck_data.libra_running_sum += libra_evaluation; - zk_sumcheck_data.libra_scaling_factor *= FF(1) / FF(2); + zk_sumcheck_data.libra_scaling_factor *= two_inv; zk_sumcheck_data.libra_evaluations.emplace_back(libra_evaluation / zk_sumcheck_data.libra_scaling_factor); } else { @@ -583,14 +655,11 @@ template class SumcheckVerifier { } } // Extract claimed evaluations of Libra univariates and compute their sum multiplied by the Libra challenge - ClaimedLibraEvaluations libra_evaluations(multivariate_d); + FF libra_evaluation{ 0 }; FF full_libra_purported_value = FF(0); if constexpr (Flavor::HasZK) { - for (size_t idx = 0; idx < multivariate_d; idx++) { - libra_evaluations[idx] = - transcript->template receive_from_prover("Libra:evaluation_" + std::to_string(idx)); - full_libra_purported_value += libra_evaluations[idx]; - }; + libra_evaluation = transcript->template receive_from_prover("Libra:claimed_evaluation"); + full_libra_purported_value += libra_evaluation; full_libra_purported_value *= libra_challenge; }; // Final round @@ -627,7 +696,7 @@ template class SumcheckVerifier { if constexpr (!Flavor::HasZK) { return SumcheckOutput{ multivariate_challenge, purported_evaluations, verified }; } else { - return SumcheckOutput{ multivariate_challenge, purported_evaluations, libra_evaluations, verified }; + return SumcheckOutput{ multivariate_challenge, purported_evaluations, libra_evaluation, verified }; } }; }; diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck.test.cpp b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck.test.cpp index 2ceab9efb918..0cf586f47eb6 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck.test.cpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck.test.cpp @@ -21,6 +21,8 @@ template class SumcheckTests : public ::testing::Test { using FF = typename Flavor::FF; using ProverPolynomials = typename Flavor::ProverPolynomials; using RelationSeparator = Flavor::RelationSeparator; + using ZKData = ZKSumcheckData; + const size_t NUM_POLYNOMIALS = Flavor::NUM_ALL_ENTITIES; static void SetUpTestSuite() { bb::srs::init_crs_factory(bb::srs::get_ignition_crs_path()); } @@ -118,6 +120,7 @@ template class SumcheckTests : public ::testing::Test { void test_prover() { + const size_t multivariate_d(2); const size_t multivariate_n(1 << multivariate_d); @@ -146,7 +149,7 @@ template class SumcheckTests : public ::testing::Test { SumcheckOutput output; if constexpr (Flavor::HasZK) { - ZKSumcheckData zk_sumcheck_data(multivariate_d, transcript); + ZKData zk_sumcheck_data = ZKData(multivariate_d, transcript); output = sumcheck.prove(full_polynomials, {}, alpha, gate_challenges, zk_sumcheck_data); } else { output = sumcheck.prove(full_polynomials, {}, alpha, gate_challenges); @@ -252,7 +255,7 @@ template class SumcheckTests : public ::testing::Test { } SumcheckOutput output; if constexpr (Flavor::HasZK) { - ZKSumcheckData zk_sumcheck_data(multivariate_d, prover_transcript); + ZKData zk_sumcheck_data = ZKData(multivariate_d, prover_transcript); output = sumcheck_prover.prove( full_polynomials, relation_parameters, prover_alpha, prover_gate_challenges, zk_sumcheck_data); } else { @@ -342,7 +345,7 @@ template class SumcheckTests : public ::testing::Test { SumcheckOutput output; if constexpr (Flavor::HasZK) { // construct libra masking polynomials and compute auxiliary data - ZKSumcheckData zk_sumcheck_data(multivariate_d, prover_transcript); + ZKData zk_sumcheck_data = ZKData(multivariate_d, prover_transcript); output = sumcheck_prover.prove( full_polynomials, relation_parameters, prover_alpha, prover_gate_challenges, zk_sumcheck_data); } else { @@ -371,19 +374,18 @@ template class SumcheckTests : public ::testing::Test { }; // Define the FlavorTypes -using FlavorTypes = testing::Types; +using FlavorTypes = + testing::Types; TYPED_TEST_SUITE(SumcheckTests, FlavorTypes); -#define SKIP_IF_ZK() \ - if (std::is_same::value || std::is_same::value) { \ - GTEST_SKIP() << "Skipping test for ZK-enabled flavors"; \ - } - TYPED_TEST(SumcheckTests, PolynomialNormalization) { - SKIP_IF_ZK(); - this->test_polynomial_normalization(); + if constexpr (std::is_same_v) { + this->test_polynomial_normalization(); + } else { + GTEST_SKIP() << "Skipping test for ZK-enabled flavors"; + } } // Test the prover TYPED_TEST(SumcheckTests, Prover) @@ -400,4 +402,4 @@ TYPED_TEST(SumcheckTests, ProverAndVerifierSimpleFailure) { this->test_failure_prover_verifier_flow(); } -} // namespace +} // namespace \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_output.hpp b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_output.hpp index a18353446a24..ea0c014a6153 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_output.hpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_output.hpp @@ -35,8 +35,8 @@ template struct SumcheckOutput challenge; // Evaluations at \f$ \vec u \f$ of the polynomials used in Sumcheck ClaimedEvaluations claimed_evaluations; - // Include ClaimedLibraEvaluations conditioned on FlavorHasZK concept - std::vector claimed_libra_evaluations; + // For ZK Flavors: the sum of the Libra constant term and Libra univariates evaluated at Sumcheck challenges + FF claimed_libra_evaluation; // Whether or not the evaluations of multilinear polynomials \f$ P_1, \ldots, P_N \f$ and final Sumcheck evaluation // have been confirmed std::optional verified = false; // Optional b/c this struct is shared by the Prover/Verifier diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp index dc42333a19a3..092ba3ca9415 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp @@ -43,6 +43,7 @@ template class SumcheckProverRound { using ExtendedEdges = std::conditional_t, typename Flavor::ExtendedEdges>; + using ZKData = ZKSumcheckData; /** * @brief In Round \f$i = 0,\ldots, d-1\f$, equals \f$2^{d-i}\f$. */ @@ -65,6 +66,9 @@ template class SumcheckProverRound { static constexpr size_t BATCHED_RELATION_PARTIAL_LENGTH = Flavor::BATCHED_RELATION_PARTIAL_LENGTH; using SumcheckRoundUnivariate = bb::Univariate; SumcheckTupleOfTuplesOfUnivariates univariate_accumulators; + + static constexpr size_t LIBRA_UNIVARIATES_LENGTH = + (std::is_same_v) ? BATCHED_RELATION_PARTIAL_LENGTH : 3; // Prover constructor SumcheckProverRound(size_t initial_round_size) : round_size(initial_round_size) @@ -120,8 +124,8 @@ template class SumcheckProverRound { } /** - * @brief Return the evaluations of the univariate round polynomials \f$ \tilde{S}_{i} (X_{i}) \f$ at \f$ X_{i } = - 0,\ldots, D \f$. Most likely, \f$ D \f$ is around \f$ 12 \f$. At the + * @brief Non-ZK version: Return the evaluations of the univariate round polynomials \f$ \tilde{S}_{i} (X_{i}) \f$ + at \f$ X_{i } = 0,\ldots, D \f$. Most likely, \f$ D \f$ is around \f$ 12 \f$. At the * end, reset all * univariate accumulators to be zero. * @details First, the vector of \ref pow_challenges "pow challenges" is computed. @@ -142,14 +146,68 @@ template class SumcheckProverRound { method \ref extend_and_batch_univariates "extend and batch univariates". */ template - SumcheckRoundUnivariate compute_univariate( - const size_t round_idx, - ProverPolynomialsOrPartiallyEvaluatedMultivariates& polynomials, - const bb::RelationParameters& relation_parameters, - const bb::GateSeparatorPolynomial& gate_sparators, - const RelationSeparator alpha, - ZKSumcheckData zk_sumcheck_data, // only populated when Flavor HasZK - RowDisablingPolynomial row_disabling_poly) + SumcheckRoundUnivariate compute_univariate(ProverPolynomialsOrPartiallyEvaluatedMultivariates& polynomials, + const bb::RelationParameters& relation_parameters, + const bb::GateSeparatorPolynomial& gate_sparators, + const RelationSeparator alpha) + { + PROFILE_THIS_NAME("compute_univariate"); + + // Determine number of threads for multithreading. + // Note: Multithreading is "on" for every round but we reduce the number of threads from the max available based + // on a specified minimum number of iterations per thread. This eventually leads to the use of a single thread. + // For now we use a power of 2 number of threads simply to ensure the round size is evenly divided. + size_t min_iterations_per_thread = 1 << 6; // min number of iterations for which we'll spin up a unique thread + size_t num_threads = bb::calculate_num_threads_pow2(round_size, min_iterations_per_thread); + size_t iterations_per_thread = round_size / num_threads; // actual iterations per thread + + // Construct univariate accumulator containers; one per thread + std::vector thread_univariate_accumulators(num_threads); + + // Accumulate the contribution from each sub-relation accross each edge of the hyper-cube + parallel_for(num_threads, [&](size_t thread_idx) { + // Initialize the thread accumulator to 0 + Utils::zero_univariates(thread_univariate_accumulators[thread_idx]); + // Construct extended univariates containers; one per thread + ExtendedEdges extended_edges; + size_t start = thread_idx * iterations_per_thread; + size_t end = (thread_idx + 1) * iterations_per_thread; + for (size_t edge_idx = start; edge_idx < end; edge_idx += 2) { + extend_edges(extended_edges, polynomials, edge_idx); + // Compute the \f$ \ell \f$-th edge's univariate contribution, + // scale it by the corresponding \f$ pow_{\beta} \f$ contribution and add it to the accumulators for \f$ + // \tilde{S}^i(X_i) \f$. If \f$ \ell \f$'s binary representation is given by \f$ (\ell_{i+1},\ldots, + // \ell_{d-1})\f$, the \f$ pow_{\beta}\f$-contribution is \f$\beta_{i+1}^{\ell_{i+1}} \cdot \ldots \cdot + // \beta_{d-1}^{\ell_{d-1}}\f$. + accumulate_relation_univariates(thread_univariate_accumulators[thread_idx], + extended_edges, + relation_parameters, + gate_sparators[(edge_idx >> 1) * gate_sparators.periodicity]); + } + }); + + // Accumulate the per-thread univariate accumulators into a single set of accumulators + for (auto& accumulators : thread_univariate_accumulators) { + Utils::add_nested_tuples(univariate_accumulators, accumulators); + } + + // Batch the univariate contributions from each sub-relation to obtain the round univariate + return batch_over_relations(univariate_accumulators, alpha, gate_sparators); + } + + /** + * @brief ZK-version of `compute_univariate` that runs Sumcheck with disabled rows and masking of Round Univariates. + * The masking is ensured by adding random Libra univariates to the Sumcheck round univariates. + * + */ + template + SumcheckRoundUnivariate compute_univariate(const size_t round_idx, + ProverPolynomialsOrPartiallyEvaluatedMultivariates& polynomials, + const bb::RelationParameters& relation_parameters, + const bb::GateSeparatorPolynomial& gate_sparators, + const RelationSeparator alpha, + const ZKData& zk_sumcheck_data, // only populated when Flavor HasZK + RowDisablingPolynomial row_disabling_poly) { PROFILE_THIS_NAME("compute_univariate"); @@ -192,21 +250,15 @@ template class SumcheckProverRound { } // For ZK Flavors: The evaluations of the round univariates are masked by the evaluations of Libra univariates // and corrected by subtracting the contribution from the disabled rows - if constexpr (Flavor::HasZK) { - const auto contribution_from_disabled_rows = compute_disabled_contribution( - polynomials, relation_parameters, gate_sparators, alpha, round_idx, row_disabling_poly); - const auto libra_round_univariate = compute_libra_round_univariate(zk_sumcheck_data, round_idx); - // Batch the univariate contributions from each sub-relation to obtain the round univariate - const auto round_univariate = - batch_over_relations(univariate_accumulators, alpha, gate_sparators); - // Mask the round univariate - return round_univariate + libra_round_univariate - contribution_from_disabled_rows; - } + const auto contribution_from_disabled_rows = compute_disabled_contribution( + polynomials, relation_parameters, gate_sparators, alpha, round_idx, row_disabling_poly); + const auto libra_round_univariate = compute_libra_round_univariate(zk_sumcheck_data, round_idx); // Batch the univariate contributions from each sub-relation to obtain the round univariate - else { - return batch_over_relations(univariate_accumulators, alpha, gate_sparators); - } - } + const auto round_univariate = + batch_over_relations(univariate_accumulators, alpha, gate_sparators); + // Mask the round univariate + return round_univariate + libra_round_univariate - contribution_from_disabled_rows; + }; /*! * @brief For ZK Flavors: A method disabling the last 4 rows of the ProverPolynomials @@ -336,18 +388,22 @@ template class SumcheckProverRound { * @param zk_sumcheck_data * @param round_idx */ - static SumcheckRoundUnivariate compute_libra_round_univariate(ZKSumcheckData zk_sumcheck_data, - size_t round_idx) + static SumcheckRoundUnivariate compute_libra_round_univariate(const ZKData& zk_sumcheck_data, size_t round_idx) { - SumcheckRoundUnivariate libra_round_univariate; + bb::Univariate libra_round_univariate; // select the i'th column of Libra book-keeping table const auto& current_column = zk_sumcheck_data.libra_univariates[round_idx]; // the evaluation of Libra round univariate at k=0...D are equal to \f$\texttt{libra_univariates}_{i}(k)\f$ // corrected by the Libra running sum - for (size_t idx = 0; idx < BATCHED_RELATION_PARTIAL_LENGTH; ++idx) { - libra_round_univariate.value_at(idx) = current_column.value_at(idx) + zk_sumcheck_data.libra_running_sum; + for (size_t idx = 0; idx < LIBRA_UNIVARIATES_LENGTH; ++idx) { + libra_round_univariate.value_at(idx) = + current_column.evaluate(FF(idx)) + zk_sumcheck_data.libra_running_sum; }; - return libra_round_univariate; + if constexpr (BATCHED_RELATION_PARTIAL_LENGTH == LIBRA_UNIVARIATES_LENGTH) { + return libra_round_univariate; + } else { + return libra_round_univariate.template extend_to(); + } } private: diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/zk_sumcheck_data.hpp b/barretenberg/cpp/src/barretenberg/sumcheck/zk_sumcheck_data.hpp index efe7f8d84be8..d35f188d1a7b 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/zk_sumcheck_data.hpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/zk_sumcheck_data.hpp @@ -1,9 +1,10 @@ #pragma once +#include "barretenberg/constants.hpp" +#include "barretenberg/ecc/curves/bn254/bn254.hpp" #include "barretenberg/polynomials/polynomial.hpp" #include "barretenberg/polynomials/univariate.hpp" #include -#include #include namespace bb { @@ -13,31 +14,34 @@ namespace bb { * */ template struct ZKSumcheckData { - using FF = typename Flavor::FF; - /** - * @brief The total algebraic degree of the Sumcheck relation \f$ F \f$ as a polynomial in Prover Polynomials - * \f$P_1,\ldots, P_N\f$. - */ - static constexpr size_t MAX_PARTIAL_RELATION_LENGTH = Flavor::MAX_PARTIAL_RELATION_LENGTH; + using Curve = typename Flavor::Curve; + using FF = typename Curve::ScalarField; + + static constexpr size_t SUBGROUP_SIZE = Curve::SUBGROUP_SIZE; + + static constexpr FF subgroup_generator = Curve::subgroup_generator; - /** - * @brief The total algebraic degree of the Sumcheck relation \f$ F \f$ as a polynomial in Prover Polynomials - * \f$P_1,\ldots, P_N\f$ incremented by 1, i.e. it is equal \ref MAX_PARTIAL_RELATION_LENGTH - * "MAX_PARTIAL_RELATION_LENGTH + 1". - */ - static constexpr size_t BATCHED_RELATION_PARTIAL_LENGTH = Flavor::BATCHED_RELATION_PARTIAL_LENGTH; - // The size of the LibraUnivariates. We ensure that they do not take extra space when Flavor runs non-ZK Sumcheck. // The size of the LibraUnivariates. We ensure that they do not take extra space when Flavor runs non-ZK Sumcheck. - static constexpr size_t LIBRA_UNIVARIATES_LENGTH = Flavor::HasZK ? Flavor::BATCHED_RELATION_PARTIAL_LENGTH : 0; - // Container for the Libra Univariates. Their number depends on the size of the circuit. - using LibraUnivariates = std::vector>; + static constexpr size_t LIBRA_UNIVARIATES_LENGTH = (std::is_same_v) ? 9 : 3; + + static constexpr FF one_half = FF(1) / FF(2); + // Container for the evaluations of Libra Univariates that have to be proven. using ClaimedLibraEvaluations = std::vector; - LibraUnivariates libra_univariates; - LibraUnivariates libra_univariates_monomial; + FF constant_term; + + EvaluationDomain bn_evaluation_domain = EvaluationDomain(); + std::array interpolation_domain; + // to compute product in lagrange basis + Polynomial libra_concatenated_lagrange_form; + Polynomial libra_concatenated_monomial_form; + + std::vector> libra_univariates{}; + size_t log_circuit_size{ 0 }; FF libra_scaling_factor{ 1 }; FF libra_challenge; + FF libra_total_sum; FF libra_running_sum; ClaimedLibraEvaluations libra_evaluations; @@ -48,23 +52,24 @@ template struct ZKSumcheckData { ZKSumcheckData(const size_t multivariate_d, std::shared_ptr transcript, std::shared_ptr commitment_key = nullptr) - : libra_univariates(generate_libra_univariates(multivariate_d)) // Created in Lagrange basis for Sumcheck - , libra_univariates_monomial(transform_to_monomial(libra_univariates)) // Required for commiting and by Shplonk + : constant_term(FF::random_element()) + , libra_concatenated_monomial_form(SUBGROUP_SIZE + 2) // includes masking + , libra_univariates(generate_libra_univariates(multivariate_d)) // random univariates of degree 2 + , log_circuit_size(multivariate_d) { + create_interpolation_domain(); + + compute_concatenated_libra_polynomial(); - // If proving_key is provided, commit to libra_univariates + // If proving_key is provided, commit to the concatenated and masked libra polynomial if (commitment_key != nullptr) { - size_t idx = 0; - for (auto& libra_univariate_monomial : libra_univariates_monomial) { - auto libra_commitment = commitment_key->commit(Polynomial(libra_univariate_monomial)); - transcript->template send_to_verifier("Libra:commitment_" + std::to_string(idx), libra_commitment); - idx++; - } + auto libra_commitment = commitment_key->commit(libra_concatenated_monomial_form); + transcript->template send_to_verifier("Libra:concatenation_commitment", libra_commitment); } // Compute the total sum of the Libra polynomials libra_scaling_factor = FF(1); - FF libra_total_sum = compute_libra_total_sum(libra_univariates, libra_scaling_factor); + libra_total_sum = compute_libra_total_sum(libra_univariates, libra_scaling_factor, constant_term); // Send the Libra total sum to the transcript transcript->send_to_verifier("Libra:Sum", libra_total_sum); @@ -85,50 +90,16 @@ template struct ZKSumcheckData { * independent uniformly random coefficients. * */ - static LibraUnivariates generate_libra_univariates(const size_t number_of_polynomials) + static std::vector> generate_libra_univariates(const size_t number_of_polynomials) { - LibraUnivariates libra_full_polynomials(number_of_polynomials); + std::vector> libra_full_polynomials(number_of_polynomials); for (auto& libra_polynomial : libra_full_polynomials) { - libra_polynomial = bb::Univariate::get_random(); + libra_polynomial = Polynomial::random(LIBRA_UNIVARIATES_LENGTH); }; return libra_full_polynomials; }; - /** - * @brief Transform Libra univariates from Lagrange to monomial form - * - * @param libra_full_polynomials - * @return LibraUnivariates - */ - static LibraUnivariates transform_to_monomial(LibraUnivariates& libra_full_polynomials) - { - std::array interpolation_domain; - LibraUnivariates libra_univariates_monomial; - libra_univariates_monomial.reserve(libra_full_polynomials.size()); - - for (size_t idx = 0; idx < LIBRA_UNIVARIATES_LENGTH; idx++) { - interpolation_domain[idx] = FF(idx); - } - - for (auto& libra_polynomial : libra_full_polynomials) { - - // Use the efficient Lagrange interpolation - Polynomial libra_polynomial_monomial(std::span(interpolation_domain), - std::span(libra_polynomial.evaluations), - LIBRA_UNIVARIATES_LENGTH); - - // To avoid storing Polynomials (coefficients are vectors), we define a univariate with the coefficients - // interpolated above - bb::Univariate libra_univariate; - for (size_t idx = 0; idx < LIBRA_UNIVARIATES_LENGTH; idx++) { - libra_univariate.value_at(idx) = libra_polynomial_monomial[idx]; - } - libra_univariates_monomial.push_back(libra_univariate); - }; - return libra_univariates_monomial; - }; - /** * @brief Compute the sum of the randomly sampled multivariate polynomial \f$ G = \sum_{i=0}^{n-1} g_i(X_i) \f$ over * the Boolean hypercube. @@ -137,18 +108,20 @@ template struct ZKSumcheckData { * @param scaling_factor * @return FF */ - static FF compute_libra_total_sum(const LibraUnivariates& libra_univariates, FF& scaling_factor) + static FF compute_libra_total_sum(const std::vector>& libra_univariates, + FF& scaling_factor, + const FF& constant_term) { FF total_sum = 0; - scaling_factor = scaling_factor / 2; + scaling_factor *= one_half; for (auto& univariate : libra_univariates) { - total_sum += univariate.value_at(0) + univariate.value_at(1); + total_sum += univariate.evaluate(FF(0)) + univariate.evaluate(FF(1)); scaling_factor *= 2; } total_sum *= scaling_factor; - return total_sum; + return total_sum + constant_term * (1 << libra_univariates.size()); } /** @@ -173,8 +146,74 @@ template struct ZKSumcheckData { univariate *= libra_scaling_factor; }; // subtract the contribution of the first libra univariate from libra total sum - libra_running_sum += -libra_univariates[0].value_at(0) - libra_univariates[0].value_at(1); - libra_running_sum *= FF(1) / FF(2); + libra_running_sum += -libra_univariates[0].evaluate(FF(0)) - libra_univariates[0].evaluate(FF(1)); + libra_running_sum *= one_half; + } + + /** + * @brief Create a interpolation domain object and initialize the evaluation domain in the case of BN254 scalar + * field + * + */ + void create_interpolation_domain() + { + if constexpr (std::is_same_v) { + bn_evaluation_domain = EvaluationDomain(SUBGROUP_SIZE, SUBGROUP_SIZE); + if (bn_evaluation_domain.size > 0) { + bn_evaluation_domain.compute_lookup_table(); + } + } + + interpolation_domain[0] = FF{ 1 }; + for (size_t idx = 1; idx < SUBGROUP_SIZE; idx++) { + interpolation_domain[idx] = interpolation_domain[idx - 1] * subgroup_generator; + } + } + + /** @brief Compute concatenated libra polynomial in lagrange basis, transform to monomial, add masking term Z_H(m_0 + * + m_1 + * + */ + void compute_concatenated_libra_polynomial() + { + std::array coeffs_lagrange_subgroup; + coeffs_lagrange_subgroup[0] = constant_term; + + for (size_t idx = 1; idx < SUBGROUP_SIZE; idx++) { + coeffs_lagrange_subgroup[idx] = FF{ 0 }; + } + + for (size_t poly_idx = 0; poly_idx < log_circuit_size; poly_idx++) { + for (size_t idx = 0; idx < LIBRA_UNIVARIATES_LENGTH; idx++) { + size_t idx_to_populate = 1 + poly_idx * LIBRA_UNIVARIATES_LENGTH + idx; + coeffs_lagrange_subgroup[idx_to_populate] = libra_univariates[poly_idx].at(idx); + } + } + + libra_concatenated_lagrange_form = Polynomial(coeffs_lagrange_subgroup); + + bb::Univariate masking_scalars = bb::Univariate::get_random(); + + Polynomial libra_concatenated_monomial_form_unmasked(SUBGROUP_SIZE); + if constexpr (!std::is_same_v) { + libra_concatenated_monomial_form_unmasked = + Polynomial(interpolation_domain, coeffs_lagrange_subgroup, SUBGROUP_SIZE); + } else { + std::vector coeffs_lagrange_subgroup_ifft(SUBGROUP_SIZE); + polynomial_arithmetic::ifft( + coeffs_lagrange_subgroup.data(), coeffs_lagrange_subgroup_ifft.data(), bn_evaluation_domain); + libra_concatenated_monomial_form_unmasked = Polynomial(coeffs_lagrange_subgroup_ifft); + } + + for (size_t idx = 0; idx < SUBGROUP_SIZE; idx++) { + libra_concatenated_monomial_form.at(idx) = libra_concatenated_monomial_form_unmasked.at(idx); + } + + for (size_t idx = 0; idx < masking_scalars.size(); idx++) { + libra_concatenated_monomial_form.at(idx) -= masking_scalars.value_at(idx); + libra_concatenated_monomial_form.at(SUBGROUP_SIZE + idx) += masking_scalars.value_at(idx); + } } }; + } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/trace_to_polynomials/trace_to_polynomials.cpp b/barretenberg/cpp/src/barretenberg/trace_to_polynomials/trace_to_polynomials.cpp index f9d3405d2f8f..b1aaccb49d1c 100644 --- a/barretenberg/cpp/src/barretenberg/trace_to_polynomials/trace_to_polynomials.cpp +++ b/barretenberg/cpp/src/barretenberg/trace_to_polynomials/trace_to_polynomials.cpp @@ -3,6 +3,7 @@ #include "barretenberg/plonk/proof_system/proving_key/proving_key.hpp" #include "barretenberg/stdlib_circuit_builders/mega_zk_flavor.hpp" #include "barretenberg/stdlib_circuit_builders/ultra_keccak_flavor.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_keccak_zk_flavor.hpp" #include "barretenberg/stdlib_circuit_builders/ultra_rollup_flavor.hpp" #include "barretenberg/stdlib_circuit_builders/ultra_zk_flavor.hpp" namespace bb { @@ -175,8 +176,9 @@ void TraceToPolynomials::add_ecc_op_wires_to_proving_key(Builder& builde } template class TraceToPolynomials; -template class TraceToPolynomials; +template class TraceToPolynomials; template class TraceToPolynomials; +template class TraceToPolynomials; template class TraceToPolynomials; template class TraceToPolynomials; template class TraceToPolynomials; diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.cpp b/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.cpp index 3e0485b7ede7..1774a98f6f42 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.cpp @@ -2,6 +2,7 @@ #include "barretenberg/commitment_schemes/claim.hpp" #include "barretenberg/commitment_schemes/commitment_key.hpp" #include "barretenberg/commitment_schemes/shplonk/shplemini.hpp" +#include "barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.hpp" #include "barretenberg/plonk_honk_shared/library/grand_product_library.hpp" #include "barretenberg/sumcheck/sumcheck.hpp" @@ -119,8 +120,7 @@ void TranslatorProver::execute_relation_check_rounds() } // // create masking polynomials for sumcheck round univariates and auxiliary data - zk_sumcheck_data = - ZKSumcheckData(key->proving_key->log_circuit_size, transcript, key->proving_key->commitment_key); + zk_sumcheck_data = ZKData(key->proving_key->log_circuit_size, transcript, key->proving_key->commitment_key); sumcheck_output = sumcheck.prove(key->proving_key->polynomials, relation_parameters, alpha, gate_challenges, zk_sumcheck_data); @@ -138,6 +138,14 @@ void TranslatorProver::execute_pcs_rounds() using OpeningClaim = ProverOpeningClaim; + using SmallSubgroupIPA = SmallSubgroupIPAProver; + + SmallSubgroupIPA small_subgroup_ipa_prover(zk_sumcheck_data, + sumcheck_output.challenge, + sumcheck_output.claimed_libra_evaluation, + transcript, + key->proving_key->commitment_key); + const OpeningClaim prover_opening_claim = ShpleminiProver_::prove(key->proving_key->circuit_size, key->proving_key->polynomials.get_unshifted_without_concatenated(), @@ -145,8 +153,7 @@ void TranslatorProver::execute_pcs_rounds() sumcheck_output.challenge, key->proving_key->commitment_key, transcript, - zk_sumcheck_data.libra_univariates_monomial, - sumcheck_output.claimed_libra_evaluations, + small_subgroup_ipa_prover.get_witness_polynomials(), key->proving_key->polynomials.get_concatenated(), key->proving_key->polynomials.get_groups_to_be_concatenated()); diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.hpp b/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.hpp index 357c7aa6a908..2acd99027cfd 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.hpp @@ -23,6 +23,7 @@ class TranslatorProver { using CommitmentLabels = typename Flavor::CommitmentLabels; using PCS = typename Flavor::PCS; using Transcript = typename Flavor::Transcript; + using ZKData = ZKSumcheckData; static constexpr size_t MINIMUM_MINI_CIRCUIT_SIZE = 2048; size_t total_num_gates = 0; // num_gates (already include zero row offset) (used to compute dyadic size) size_t dyadic_circuit_size = 0; // final power-of-2 circuit size @@ -47,7 +48,7 @@ class TranslatorProver { CommitmentLabels commitment_labels; - ZKSumcheckData zk_sumcheck_data; + ZKData zk_sumcheck_data; SumcheckOutput sumcheck_output; diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.cpp b/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.cpp index 8acaa89f7369..4cff080cf23b 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.cpp @@ -99,22 +99,22 @@ bool TranslatorVerifier::verify_proof(const HonkProof& proof) } // Receive commitments to Libra masking polynomials - std::vector libra_commitments; - for (size_t idx = 0; idx < log_circuit_size; idx++) { - Commitment libra_commitment = - transcript->receive_from_prover("Libra:commitment_" + std::to_string(idx)); - libra_commitments.push_back(libra_commitment); - } + std::array libra_commitments = {}; + libra_commitments[0] = transcript->template receive_from_prover("Libra:concatenation_commitment"); - auto [multivariate_challenge, claimed_evaluations, libra_evaluations, sumcheck_verified] = + auto [multivariate_challenge, claimed_evaluations, libra_evaluation, sumcheck_verified] = sumcheck.verify(relation_parameters, alpha, gate_challenges); // If Sumcheck did not verify, return false if (sumcheck_verified.has_value() && !sumcheck_verified.value()) { return false; } - // Execute Shplemini + libra_commitments[1] = transcript->template receive_from_prover("Libra:big_sum_commitment"); + libra_commitments[2] = transcript->template receive_from_prover("Libra:quotient_commitment"); + + // Execute Shplemini + bool consistency_checked = false; const BatchOpeningClaim opening_claim = Shplemini::compute_batch_opening_claim(circuit_size, commitments.get_unshifted_without_concatenated(), @@ -125,15 +125,17 @@ bool TranslatorVerifier::verify_proof(const HonkProof& proof) Commitment::one(), transcript, Flavor::REPEATED_COMMITMENTS, - RefVector(libra_commitments), - libra_evaluations, + Flavor::HasZK, + &consistency_checked, + libra_commitments, + libra_evaluation, commitments.get_groups_to_be_concatenated(), claimed_evaluations.get_concatenated()); const auto pairing_points = PCS::reduce_verify_batch_opening_claim(opening_claim, transcript); auto verified = key->pcs_verification_key->pairing_check(pairing_points[0], pairing_points[1]); - return verified; + return verified && consistency_checked; } bool TranslatorVerifier::verify_translation(const TranslationEvaluations& translation_evaluations) diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/ultra_honk/CMakeLists.txt index 7c55050a1380..0ae823699ce7 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/CMakeLists.txt +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/CMakeLists.txt @@ -1 +1 @@ -barretenberg_module(ultra_honk sumcheck) \ No newline at end of file +barretenberg_module(ultra_honk sumcheck stdlib_primitives) \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/decider_prover.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/decider_prover.cpp index 2a27bd4b2f01..531009c69898 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/decider_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/decider_prover.cpp @@ -1,4 +1,5 @@ #include "decider_prover.hpp" +#include "barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.hpp" #include "barretenberg/common/op_count.hpp" #include "barretenberg/sumcheck/sumcheck.hpp" @@ -32,15 +33,18 @@ template void DeciderProver_::execute_relation_ch { PROFILE_THIS_NAME("sumcheck.prove"); + if constexpr (Flavor::HasZK) { - auto commitment_key = std::make_shared(Flavor::BATCHED_RELATION_PARTIAL_LENGTH); - zk_sumcheck_data = ZKSumcheckData(numeric::get_msb(polynomial_size), transcript, commitment_key); + const size_t log_subgroup_size = static_cast(numeric::get_msb(Curve::SUBGROUP_SIZE)); + auto commitment_key = std::make_shared(1 << (log_subgroup_size + 1)); + zk_sumcheck_data = ZKData(numeric::get_msb(polynomial_size), transcript, commitment_key); sumcheck_output = sumcheck.prove(proving_key->proving_key.polynomials, proving_key->relation_parameters, proving_key->alphas, proving_key->gate_challenges, zk_sumcheck_data); } else { + sumcheck_output = sumcheck.prove(proving_key->proving_key.polynomials, proving_key->relation_parameters, proving_key->alphas, @@ -71,14 +75,17 @@ template void DeciderProver_::execute_pcs_rounds( ck, transcript); } else { + + SmallSubgroupIPA small_subgroup_ipa_prover( + zk_sumcheck_data, sumcheck_output.challenge, sumcheck_output.claimed_libra_evaluation, transcript, ck); + prover_opening_claim = ShpleminiProver_::prove(proving_key->proving_key.circuit_size, proving_key->proving_key.polynomials.get_unshifted(), proving_key->proving_key.polynomials.get_to_be_shifted(), sumcheck_output.challenge, ck, transcript, - zk_sumcheck_data.libra_univariates_monomial, - sumcheck_output.claimed_libra_evaluations); + small_subgroup_ipa_prover.get_witness_polynomials()); } vinfo("executed multivariate-to-univariate reduction"); PCS::compute_opening_proof(ck, prover_opening_claim, transcript); @@ -108,8 +115,10 @@ template HonkProof DeciderProver_::construct_proo } template class DeciderProver_; +template class DeciderProver_; template class DeciderProver_; template class DeciderProver_; +template class DeciderProver_; template class DeciderProver_; template class DeciderProver_; diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/decider_prover.hpp b/barretenberg/cpp/src/barretenberg/ultra_honk/decider_prover.hpp index 1d8a7a854248..47baefd297b5 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/decider_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/decider_prover.hpp @@ -25,6 +25,8 @@ template class DeciderProver_ { using DeciderPK = DeciderProvingKey_; using Transcript = typename Flavor::Transcript; using RelationSeparator = typename Flavor::RelationSeparator; + using ZKData = ZKSumcheckData; + using SmallSubgroupIPA = SmallSubgroupIPAProver; public: explicit DeciderProver_(const std::shared_ptr&, @@ -46,7 +48,7 @@ template class DeciderProver_ { Polynomial quotient_W; - ZKSumcheckData zk_sumcheck_data; + ZKData zk_sumcheck_data; SumcheckOutput sumcheck_output; diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/decider_proving_key.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/decider_proving_key.cpp index bcef1fe9343b..0b9bf8a7d842 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/decider_proving_key.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/decider_proving_key.cpp @@ -328,8 +328,9 @@ void DeciderProvingKey_::move_structured_trace_overflow_to_overflow_bloc } template class DeciderProvingKey_; -template class DeciderProvingKey_; +template class DeciderProvingKey_; template class DeciderProvingKey_; +template class DeciderProvingKey_; template class DeciderProvingKey_; template class DeciderProvingKey_; template class DeciderProvingKey_; diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/decider_proving_key.hpp b/barretenberg/cpp/src/barretenberg/ultra_honk/decider_proving_key.hpp index 5da7c7917088..8bd0c1cc1b01 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/decider_proving_key.hpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/decider_proving_key.hpp @@ -7,6 +7,7 @@ #include "barretenberg/relations/relation_parameters.hpp" #include "barretenberg/stdlib_circuit_builders/mega_zk_flavor.hpp" #include "barretenberg/stdlib_circuit_builders/ultra_keccak_flavor.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_keccak_zk_flavor.hpp" #include "barretenberg/stdlib_circuit_builders/ultra_rollup_flavor.hpp" #include "barretenberg/stdlib_circuit_builders/ultra_zk_flavor.hpp" #include "barretenberg/trace_to_polynomials/trace_to_polynomials.hpp" diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/decider_verifier.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/decider_verifier.cpp index 22e93f763600..87891d71e9d7 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/decider_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/decider_verifier.cpp @@ -46,22 +46,19 @@ template bool DeciderVerifier_::verify() auto sumcheck = SumcheckVerifier( static_cast(accumulator->verification_key->log_circuit_size), transcript, accumulator->target_sum); // For MegaZKFlavor: receive commitments to Libra masking polynomials - std::vector libra_commitments = {}; + std::array libra_commitments = {}; if constexpr (Flavor::HasZK) { - for (size_t idx = 0; idx < static_cast(accumulator->verification_key->log_circuit_size); idx++) { - Commitment libra_commitment = - transcript->template receive_from_prover("Libra:commitment_" + std::to_string(idx)); - libra_commitments.push_back(libra_commitment); - }; + libra_commitments[0] = transcript->template receive_from_prover("Libra:concatenation_commitment"); } - SumcheckOutput sumcheck_output = sumcheck.verify(accumulator->relation_parameters, accumulator->alphas, accumulator->gate_challenges); // For MegaZKFlavor: the sumcheck output contains claimed evaluations of the Libra polynomials - std::vector libra_evaluations = {}; + FF libra_evaluation{ 0 }; if constexpr (Flavor::HasZK) { - libra_evaluations = std::move(sumcheck_output.claimed_libra_evaluations); + libra_evaluation = std::move(sumcheck_output.claimed_libra_evaluation); + libra_commitments[1] = transcript->template receive_from_prover("Libra:big_sum_commitment"); + libra_commitments[2] = transcript->template receive_from_prover("Libra:quotient_commitment"); } // If Sumcheck did not verify, return false @@ -69,7 +66,7 @@ template bool DeciderVerifier_::verify() info("Sumcheck verification failed."); return false; } - + bool consistency_checked = true; const BatchOpeningClaim opening_claim = Shplemini::compute_batch_opening_claim(accumulator->verification_key->circuit_size, commitments.get_unshifted(), @@ -80,15 +77,19 @@ template bool DeciderVerifier_::verify() Commitment::one(), transcript, Flavor::REPEATED_COMMITMENTS, - RefVector(libra_commitments), - libra_evaluations); + Flavor::HasZK, + &consistency_checked, + libra_commitments, + libra_evaluation); const auto pairing_points = PCS::reduce_verify_batch_opening_claim(opening_claim, transcript); bool verified = pcs_verification_key->pairing_check(pairing_points[0], pairing_points[1]); - return sumcheck_output.verified.value() && verified; + return sumcheck_output.verified.value() && verified && consistency_checked; } template class DeciderVerifier_; +template class DeciderVerifier_; template class DeciderVerifier_; +template class DeciderVerifier_; template class DeciderVerifier_; template class DeciderVerifier_; template class DeciderVerifier_; diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/mega_transcript.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/mega_transcript.test.cpp index 4808bb2c2a88..e47333d43687 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/mega_transcript.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/mega_transcript.test.cpp @@ -30,11 +30,10 @@ template class MegaTranscriptTests : public ::testing::Test { * * @return TranscriptManifest */ - static TranscriptManifest construct_mega_honk_manifest(size_t circuit_size = 0) + static TranscriptManifest construct_mega_honk_manifest() { using Commitment = typename Flavor::Commitment; TranscriptManifest manifest_expected; - auto log_n = numeric::get_msb(circuit_size); size_t MAX_PARTIAL_RELATION_LENGTH = Flavor::BATCHED_RELATION_PARTIAL_LENGTH; size_t NUM_SUBRELATIONS = Flavor::NUM_SUBRELATIONS; @@ -97,10 +96,7 @@ template class MegaTranscriptTests : public ::testing::Test { } if constexpr (Flavor::HasZK) { - for (size_t i = 0; i < log_n; i++) { - std::string label = "Libra:commitment_" + std::to_string(i); - manifest_expected.add_entry(round, label, frs_per_G); - } + manifest_expected.add_entry(round, "Libra:concatenation_commitment", frs_per_G); manifest_expected.add_entry(round, "Libra:Sum", frs_per_Fr); manifest_expected.add_challenge(round, "Libra:Challenge"); round++; @@ -115,15 +111,14 @@ template class MegaTranscriptTests : public ::testing::Test { } if constexpr (Flavor::HasZK) { - for (size_t i = 0; i < log_n; i++) { - std::string idx = std::to_string(i); - manifest_expected.add_entry(round, "Libra:evaluation_" + idx, frs_per_Fr); - } + manifest_expected.add_entry(round, "Libra:claimed_evaluation", frs_per_Fr); } manifest_expected.add_entry(round, "Sumcheck:evaluations", frs_per_evals); if constexpr (Flavor::HasZK) { + manifest_expected.add_entry(round, "Libra:big_sum_commitment", frs_per_G); + manifest_expected.add_entry(round, "Libra:quotient_commitment", frs_per_G); manifest_expected.add_entry(round, "Gemini:masking_poly_comm", frs_per_G); manifest_expected.add_entry(round, "Gemini:masking_poly_eval", frs_per_Fr); } @@ -141,6 +136,12 @@ template class MegaTranscriptTests : public ::testing::Test { std::string idx = std::to_string(i); manifest_expected.add_entry(round, "Gemini:a_" + idx, frs_per_Fr); } + if constexpr (Flavor::HasZK) { + manifest_expected.add_entry(round, "Libra:concatenation_eval", frs_per_Fr); + manifest_expected.add_entry(round, "Libra:shifted_big_sum_eval", frs_per_Fr); + manifest_expected.add_entry(round, "Libra:big_sum_eval", frs_per_Fr); + manifest_expected.add_entry(round, "Libra:quotient_eval", frs_per_Fr); + } manifest_expected.add_challenge(round, "Shplonk:nu"); round++; @@ -198,7 +199,7 @@ TYPED_TEST(MegaTranscriptTests, ProverManifestConsistency) auto proof = prover.construct_proof(); // Check that the prover generated manifest agrees with the manifest hard coded in this suite - auto manifest_expected = TestFixture::construct_mega_honk_manifest(proving_key->proving_key.circuit_size); + auto manifest_expected = TestFixture::construct_mega_honk_manifest(); auto prover_manifest = prover.transcript->get_manifest(); // Note: a manifest can be printed using manifest.print() for (size_t round = 0; round < manifest_expected.size(); ++round) { diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.cpp index 7377c7b31c72..6012c9551f09 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.cpp @@ -268,7 +268,9 @@ template typename Flavor::RelationSeparator OinkProver; +template class OinkProver; template class OinkProver; +template class OinkProver; template class OinkProver; template class OinkProver; template class OinkProver; diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_verifier.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_verifier.cpp index 8196f7f77e7a..1ec1d01cbc4a 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_verifier.cpp @@ -3,7 +3,9 @@ #include "barretenberg/stdlib_circuit_builders/mega_zk_flavor.hpp" #include "barretenberg/stdlib_circuit_builders/ultra_flavor.hpp" #include "barretenberg/stdlib_circuit_builders/ultra_keccak_flavor.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_keccak_zk_flavor.hpp" #include "barretenberg/stdlib_circuit_builders/ultra_rollup_flavor.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_zk_flavor.hpp" namespace bb { @@ -160,7 +162,9 @@ template typename Flavor::RelationSeparator OinkVerifier< } template class OinkVerifier; +template class OinkVerifier; template class OinkVerifier; +template class OinkVerifier; template class OinkVerifier; template class OinkVerifier; template class OinkVerifier; diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_honk.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_honk.test.cpp index 3f7ffe9defe9..82c868b1a416 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_honk.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_honk.test.cpp @@ -47,7 +47,7 @@ template class UltraHonkTests : public ::testing::Test { static void SetUpTestSuite() { bb::srs::init_crs_factory(bb::srs::get_ignition_crs_path()); } }; -using FlavorTypes = testing::Types; +using FlavorTypes = testing::Types; TYPED_TEST_SUITE(UltraHonkTests, FlavorTypes); /** diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_prover.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_prover.cpp index a211564ae5d4..3597b3797a7a 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_prover.cpp @@ -47,7 +47,7 @@ template HonkProof UltraProver_::export_proof() // Add the IPA proof if constexpr (HasIPAAccumulator) { // The extra calculation is for the IPA proof length. - ASSERT(proving_key->proving_key.ipa_proof.size() == 1 + 4 * (CONST_ECCVM_LOG_N) + 2 + 2); + ASSERT(proving_key->proving_key.ipa_proof.size() == IPA_PROOF_LENGTH); proof.insert(proof.end(), proving_key->proving_key.ipa_proof.begin(), proving_key->proving_key.ipa_proof.end()); } return proof; @@ -77,7 +77,9 @@ template HonkProof UltraProver_::construct_proof( } template class UltraProver_; +template class UltraProver_; template class UltraProver_; +template class UltraProver_; template class UltraProver_; template class UltraProver_; template class UltraProver_; diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_transcript.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_transcript.test.cpp index 43a8e0cbb410..f313ea074b7e 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_transcript.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_transcript.test.cpp @@ -1,25 +1,34 @@ +#include "barretenberg/commitment_schemes/ipa/ipa.hpp" #include "barretenberg/ecc/curves/bn254/g1.hpp" #include "barretenberg/flavor/flavor.hpp" #include "barretenberg/numeric/bitop/get_msb.hpp" #include "barretenberg/polynomials/univariate.hpp" #include "barretenberg/stdlib_circuit_builders/ultra_flavor.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_rollup_flavor.hpp" #include "barretenberg/transcript/transcript.hpp" #include "barretenberg/ultra_honk/decider_proving_key.hpp" #include "barretenberg/ultra_honk/ultra_prover.hpp" #include "barretenberg/ultra_honk/ultra_verifier.hpp" -#include +#include "gtest/gtest.h" using namespace bb; -class UltraTranscriptTests : public ::testing::Test { +template class UltraTranscriptTests : public ::testing::Test { public: - static void SetUpTestSuite() { bb::srs::init_crs_factory(bb::srs::get_ignition_crs_path()); } + static void SetUpTestSuite() + { + bb::srs::init_crs_factory(bb::srs::get_ignition_crs_path()); + bb::srs::init_grumpkin_crs_factory("../srs_db/grumpkin"); + } - using Flavor = UltraFlavor; using VerificationKey = Flavor::VerificationKey; using FF = Flavor::FF; + using Commitment = Flavor::Commitment; using DeciderProvingKey = DeciderProvingKey_; + using Builder = Flavor::CircuitBuilder; + using Prover = UltraProver_; + using Verifier = UltraVerifier_; /** * @brief Construct a manifest for a Ultra Honk proof @@ -40,7 +49,7 @@ class UltraTranscriptTests : public ::testing::Test { size_t NUM_SUBRELATIONS = Flavor::NUM_SUBRELATIONS; // Size of types is number of bb::frs needed to represent the types size_t frs_per_Fr = bb::field_conversion::calc_num_bn254_frs(); - size_t frs_per_G = bb::field_conversion::calc_num_bn254_frs(); + size_t frs_per_G = bb::field_conversion::calc_num_bn254_frs(); size_t frs_per_uni = MAX_PARTIAL_RELATION_LENGTH * frs_per_Fr; size_t frs_per_evals = (Flavor::NUM_ALL_ENTITIES)*frs_per_Fr; size_t frs_per_uint32 = bb::field_conversion::calc_num_bn254_frs(); @@ -50,6 +59,11 @@ class UltraTranscriptTests : public ::testing::Test { manifest_expected.add_entry(round, "public_input_size", frs_per_uint32); manifest_expected.add_entry(round, "pub_inputs_offset", frs_per_uint32); manifest_expected.add_entry(round, "public_input_0", frs_per_Fr); + if constexpr (HasIPAAccumulator) { + for (size_t i = 0; i < IPA_CLAIM_SIZE; i++) { + manifest_expected.add_entry(round, "public_input_" + std::to_string(i + 1), frs_per_Fr); + } + } manifest_expected.add_entry(round, "W_L", frs_per_G); manifest_expected.add_entry(round, "W_R", frs_per_G); manifest_expected.add_entry(round, "W_O", frs_per_G); @@ -80,6 +94,13 @@ class UltraTranscriptTests : public ::testing::Test { round++; } + if constexpr (Flavor::HasZK) { + manifest_expected.add_entry(round, "Libra:concatenation_commitment", frs_per_G); + manifest_expected.add_entry(round, "Libra:Sum", frs_per_Fr); + manifest_expected.add_challenge(round, "Libra:Challenge"); + round++; + } + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { std::string idx = std::to_string(i); manifest_expected.add_entry(round, "Sumcheck:univariate_" + idx, frs_per_uni); @@ -88,7 +109,19 @@ class UltraTranscriptTests : public ::testing::Test { round++; } + if constexpr (Flavor::HasZK) { + manifest_expected.add_entry(round, "Libra:claimed_evaluation", frs_per_Fr); + } + manifest_expected.add_entry(round, "Sumcheck:evaluations", frs_per_evals); + + if constexpr (Flavor::HasZK) { + manifest_expected.add_entry(round, "Libra:big_sum_commitment", frs_per_G); + manifest_expected.add_entry(round, "Libra:quotient_commitment", frs_per_G); + manifest_expected.add_entry(round, "Gemini:masking_poly_comm", frs_per_G); + manifest_expected.add_entry(round, "Gemini:masking_poly_eval", frs_per_Fr); + } + manifest_expected.add_challenge(round, "rho"); round++; @@ -103,6 +136,13 @@ class UltraTranscriptTests : public ::testing::Test { manifest_expected.add_entry(round, "Gemini:a_" + idx, frs_per_Fr); } + if constexpr (Flavor::HasZK) { + manifest_expected.add_entry(round, "Libra:concatenation_eval", frs_per_Fr); + manifest_expected.add_entry(round, "Libra:shifted_big_sum_eval", frs_per_Fr); + manifest_expected.add_entry(round, "Libra:big_sum_eval", frs_per_Fr); + manifest_expected.add_entry(round, "Libra:quotient_eval", frs_per_Fr); + } + manifest_expected.add_challenge(round, "Shplonk:nu"); round++; manifest_expected.add_entry(round, "Shplonk:Q", frs_per_G); @@ -115,42 +155,62 @@ class UltraTranscriptTests : public ::testing::Test { return manifest_expected; } - void generate_test_circuit(auto& builder) + void generate_test_circuit(typename Flavor::CircuitBuilder& builder) { FF a = 1; builder.add_variable(a); builder.add_public_variable(a); + + if constexpr (HasIPAAccumulator) { + auto [stdlib_opening_claim, ipa_proof] = + IPA>::create_fake_ipa_claim_and_proof(builder); + builder.add_ipa_claim(stdlib_opening_claim.get_witness_indices()); + builder.ipa_proof = ipa_proof; + } } - void generate_random_test_circuit(auto& builder) + void generate_random_test_circuit(typename Flavor::CircuitBuilder& builder) { auto a = FF::random_element(); auto b = FF::random_element(); builder.add_variable(a); builder.add_public_variable(a); builder.add_public_variable(b); + + if constexpr (HasIPAAccumulator) { + auto [stdlib_opening_claim, ipa_proof] = + IPA>::create_fake_ipa_claim_and_proof(builder); + builder.add_ipa_claim(stdlib_opening_claim.get_witness_indices()); + builder.ipa_proof = ipa_proof; + } } }; +using FlavorTypes = + ::testing::Types; +TYPED_TEST_SUITE(UltraTranscriptTests, FlavorTypes); + /** * @brief Ensure consistency between the manifest hard coded in this testing suite and the one generated by the * standard honk prover over the course of proof construction. */ -TEST_F(UltraTranscriptTests, ProverManifestConsistency) +TYPED_TEST(UltraTranscriptTests, ProverManifestConsistency) { // Construct a simple circuit of size n = 8 (i.e. the minimum circuit size) - auto builder = typename Flavor::CircuitBuilder(); - generate_test_circuit(builder); + auto builder = typename TestFixture::Builder(); + TestFixture::generate_test_circuit(builder); // Automatically generate a transcript manifest by constructing a proof - auto proving_key = std::make_shared(builder); - UltraProver prover(proving_key); + auto proving_key = std::make_shared(builder); + typename TestFixture::Prover prover(proving_key); auto proof = prover.construct_proof(); // Check that the prover generated manifest agrees with the manifest hard coded in this suite - auto manifest_expected = construct_ultra_honk_manifest(); + auto manifest_expected = TestFixture::construct_ultra_honk_manifest(); auto prover_manifest = prover.transcript->get_manifest(); // Note: a manifest can be printed using manifest.print() + manifest_expected.print(); + prover_manifest.print(); for (size_t round = 0; round < manifest_expected.size(); ++round) { ASSERT_EQ(prover_manifest[round], manifest_expected[round]) << "Prover manifest discrepency in round " << round; } @@ -161,22 +221,40 @@ TEST_F(UltraTranscriptTests, ProverManifestConsistency) * construction and the one generated by the verifier over the course of proof verification. * */ -TEST_F(UltraTranscriptTests, VerifierManifestConsistency) +TYPED_TEST(UltraTranscriptTests, VerifierManifestConsistency) { // Construct a simple circuit of size n = 8 (i.e. the minimum circuit size) - auto builder = Flavor::CircuitBuilder(); - generate_test_circuit(builder); + auto builder = typename TestFixture::Builder(); + TestFixture::generate_test_circuit(builder); // Automatically generate a transcript manifest in the prover by constructing a proof - auto proving_key = std::make_shared(builder); - UltraProver prover(proving_key); + auto proving_key = std::make_shared(builder); + typename TestFixture::Prover prover(proving_key); auto proof = prover.construct_proof(); // Automatically generate a transcript manifest in the verifier by verifying a proof - auto verification_key = std::make_shared(proving_key->proving_key); - UltraVerifier verifier(verification_key); - verifier.verify_proof(proof); + auto verification_key = std::make_shared(proving_key->proving_key); + typename TestFixture::Verifier verifier(verification_key); + HonkProof honk_proof; + HonkProof ipa_proof; + if constexpr (HasIPAAccumulator) { + verifier.ipa_verification_key = + std::make_shared>(1 << CONST_ECCVM_LOG_N); + const size_t HONK_PROOF_LENGTH = TypeParam::PROOF_LENGTH_WITHOUT_PUB_INPUTS - IPA_PROOF_LENGTH; + const size_t num_public_inputs = static_cast(proof[1]); + // The extra calculation is for the IPA proof length. + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1182): Handle in ProofSurgeon. + ASSERT(proof.size() == HONK_PROOF_LENGTH + IPA_PROOF_LENGTH + num_public_inputs); + // split out the ipa proof + const std::ptrdiff_t honk_proof_with_pub_inputs_length = + static_cast(HONK_PROOF_LENGTH + num_public_inputs); + ipa_proof = HonkProof(proof.begin() + honk_proof_with_pub_inputs_length, proof.end()); + honk_proof = HonkProof(proof.begin(), proof.end() + honk_proof_with_pub_inputs_length); + } else { + honk_proof = proof; + } + verifier.verify_proof(honk_proof, ipa_proof); // Check consistency between the manifests generated by the prover and verifier auto prover_manifest = prover.transcript->get_manifest(); @@ -194,10 +272,12 @@ TEST_F(UltraTranscriptTests, VerifierManifestConsistency) * @details We generate 6 challenges that are each 128 bits, and check that they are not 0. * */ -TEST_F(UltraTranscriptTests, ChallengeGenerationTest) +TYPED_TEST(UltraTranscriptTests, ChallengeGenerationTest) { + using Flavor = TypeParam; + using FF = Flavor::FF; // initialized with random value sent to verifier - auto transcript = Flavor::Transcript::prover_init_empty(); + auto transcript = TypeParam::Transcript::prover_init_empty(); // test a bunch of challenges auto challenges = transcript->template get_challenges("a", "b", "c", "d", "e", "f"); // check they are not 0 @@ -213,18 +293,25 @@ TEST_F(UltraTranscriptTests, ChallengeGenerationTest) ASSERT_NE(c, 0) << "Challenge c is 0"; } -TEST_F(UltraTranscriptTests, StructureTest) +TYPED_TEST(UltraTranscriptTests, StructureTest) { + using Flavor = TypeParam; + using FF = Flavor::FF; + using Commitment = Flavor::Commitment; // Construct a simple circuit of size n = 8 (i.e. the minimum circuit size) - auto builder = typename Flavor::CircuitBuilder(); - generate_test_circuit(builder); + auto builder = typename TestFixture::Builder(); + if constexpr (IsAnyOf) { + GTEST_SKIP() << "Not built for this parameter"; + } + // Construct a simple circuit of size n = 8 (i.e. the minimum circuit size) + TestFixture::generate_test_circuit(builder); // Automatically generate a transcript manifest by constructing a proof - auto proving_key = std::make_shared(builder); - UltraProver prover(proving_key); + auto proving_key = std::make_shared(builder); + typename TestFixture::Prover prover(proving_key); auto proof = prover.construct_proof(); - auto verification_key = std::make_shared(proving_key->proving_key); - UltraVerifier verifier(verification_key); + auto verification_key = std::make_shared(proving_key->proving_key); + typename TestFixture::Verifier verifier(verification_key); EXPECT_TRUE(verifier.verify_proof(proof)); // try deserializing and serializing with no changes and check proof is still valid @@ -232,7 +319,7 @@ TEST_F(UltraTranscriptTests, StructureTest) prover.transcript->serialize_full_transcript(); EXPECT_TRUE(verifier.verify_proof(prover.export_proof())); // we have changed nothing so proof is still valid - Flavor::Commitment one_group_val = Flavor::Commitment::one(); + Commitment one_group_val = Commitment::one(); FF rand_val = FF::random_element(); prover.transcript->z_perm_comm = one_group_val * rand_val; // choose random object to modify EXPECT_TRUE(verifier.verify_proof( @@ -242,5 +329,22 @@ TEST_F(UltraTranscriptTests, StructureTest) EXPECT_FALSE(verifier.verify_proof(prover.export_proof())); // the proof is now wrong after serializing it prover.transcript->deserialize_full_transcript(); - EXPECT_EQ(static_cast(prover.transcript->z_perm_comm), one_group_val * rand_val); + EXPECT_EQ(static_cast(prover.transcript->z_perm_comm), one_group_val * rand_val); +} + +TYPED_TEST(UltraTranscriptTests, ProofLengthTest) +{ + if constexpr (!IsAnyOf) { + GTEST_SKIP() << "Not built for this parameter"; + } else { + // Construct a simple circuit of size n = 8 (i.e. the minimum circuit size) + auto builder = typename TypeParam::CircuitBuilder(); + TestFixture::generate_test_circuit(builder); + + // Automatically generate a transcript manifest by constructing a proof + auto proving_key = std::make_shared(builder); + typename TestFixture::Prover prover(proving_key); + auto proof = prover.construct_proof(); + EXPECT_EQ(proof.size(), TypeParam::PROOF_LENGTH_WITHOUT_PUB_INPUTS + builder.public_inputs.size()); + } } diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.cpp index 3c4278c7439c..692ac54397f2 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.cpp @@ -72,7 +72,9 @@ template bool UltraVerifier_::verify_proof(const HonkP } template class UltraVerifier_; +template class UltraVerifier_; template class UltraVerifier_; +template class UltraVerifier_; template class UltraVerifier_; template class UltraVerifier_; template class UltraVerifier_; diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_cmp_alu.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_cmp_alu.hpp index 1c45010f6991..2830de59c49d 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_cmp_alu.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_cmp_alu.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_cmp_alu_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 7; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::cmp_sel_cmp; + static constexpr Column DST_SELECTOR = Column::alu_cmp_gadget_sel; + static constexpr Column INVERSES = Column::perm_cmp_alu_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.cmp_sel_cmp == 1 || in.alu_cmp_gadget_sel == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_alu.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_alu.hpp index 98b4b50c1029..363330c0e32b 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_alu.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_alu.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_main_alu_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 16; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::main_sel_alu; + static constexpr Column DST_SELECTOR = Column::alu_sel_alu; + static constexpr Column INVERSES = Column::perm_main_alu_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.main_sel_alu == 1 || in.alu_sel_alu == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_bin.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_bin.hpp index d6aa985dead2..7f55dabfe6b4 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_bin.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_bin.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_main_bin_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 6; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::main_sel_bin; + static constexpr Column DST_SELECTOR = Column::binary_start; + static constexpr Column INVERSES = Column::perm_main_bin_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.main_sel_bin == 1 || in.binary_start == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_conv.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_conv.hpp index 73f75460e0d6..52b955b09fdb 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_conv.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_conv.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_main_conv_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 5; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::main_sel_op_radix_be; + static constexpr Column DST_SELECTOR = Column::conversion_sel_to_radix_be; + static constexpr Column INVERSES = Column::perm_main_conv_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.main_sel_op_radix_be == 1 || in.conversion_sel_to_radix_be == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_a.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_a.hpp index 5b67cd4c5c4b..00dcc32980f9 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_a.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_a.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_main_mem_a_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 8; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::main_sel_mem_op_a; + static constexpr Column DST_SELECTOR = Column::mem_sel_op_a; + static constexpr Column INVERSES = Column::perm_main_mem_a_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.main_sel_mem_op_a == 1 || in.mem_sel_op_a == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_b.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_b.hpp index ec4db06c5782..5281245386ff 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_b.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_b.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_main_mem_b_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 8; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::main_sel_mem_op_b; + static constexpr Column DST_SELECTOR = Column::mem_sel_op_b; + static constexpr Column INVERSES = Column::perm_main_mem_b_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.main_sel_mem_op_b == 1 || in.mem_sel_op_b == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_c.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_c.hpp index acbc0b7e40a5..9d9512688fd9 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_c.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_c.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_main_mem_c_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 7; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::main_sel_mem_op_c; + static constexpr Column DST_SELECTOR = Column::mem_sel_op_c; + static constexpr Column INVERSES = Column::perm_main_mem_c_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.main_sel_mem_op_c == 1 || in.mem_sel_op_c == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_d.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_d.hpp index d54bc468d8ed..4c787ee09605 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_d.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_d.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_main_mem_d_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 7; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::main_sel_mem_op_d; + static constexpr Column DST_SELECTOR = Column::mem_sel_op_d; + static constexpr Column INVERSES = Column::perm_main_mem_d_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.main_sel_mem_op_d == 1 || in.mem_sel_op_d == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_ind_addr_a.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_ind_addr_a.hpp index 91aed07085b8..9f0a19fbf4b1 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_ind_addr_a.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_ind_addr_a.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_main_mem_ind_addr_a_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 4; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::main_sel_resolve_ind_addr_a; + static constexpr Column DST_SELECTOR = Column::mem_sel_resolve_ind_addr_a; + static constexpr Column INVERSES = Column::perm_main_mem_ind_addr_a_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.main_sel_resolve_ind_addr_a == 1 || in.mem_sel_resolve_ind_addr_a == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_ind_addr_b.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_ind_addr_b.hpp index e4e0f5c647f9..366498465e25 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_ind_addr_b.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_ind_addr_b.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_main_mem_ind_addr_b_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 4; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::main_sel_resolve_ind_addr_b; + static constexpr Column DST_SELECTOR = Column::mem_sel_resolve_ind_addr_b; + static constexpr Column INVERSES = Column::perm_main_mem_ind_addr_b_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.main_sel_resolve_ind_addr_b == 1 || in.mem_sel_resolve_ind_addr_b == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_ind_addr_c.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_ind_addr_c.hpp index 23b8cc3ed804..4b318b84b591 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_ind_addr_c.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_ind_addr_c.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_main_mem_ind_addr_c_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 4; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::main_sel_resolve_ind_addr_c; + static constexpr Column DST_SELECTOR = Column::mem_sel_resolve_ind_addr_c; + static constexpr Column INVERSES = Column::perm_main_mem_ind_addr_c_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.main_sel_resolve_ind_addr_c == 1 || in.mem_sel_resolve_ind_addr_c == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_ind_addr_d.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_ind_addr_d.hpp index 9cd180cddbf8..824a6616b3dd 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_ind_addr_d.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_mem_ind_addr_d.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_main_mem_ind_addr_d_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 4; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::main_sel_resolve_ind_addr_d; + static constexpr Column DST_SELECTOR = Column::mem_sel_resolve_ind_addr_d; + static constexpr Column INVERSES = Column::perm_main_mem_ind_addr_d_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.main_sel_resolve_ind_addr_d == 1 || in.mem_sel_resolve_ind_addr_d == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_pos2_perm.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_pos2_perm.hpp index 4bb38fb01044..f96735310ff0 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_pos2_perm.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_pos2_perm.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_main_pos2_perm_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 4; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::main_sel_op_poseidon2; + static constexpr Column DST_SELECTOR = Column::poseidon2_sel_poseidon_perm; + static constexpr Column INVERSES = Column::perm_main_pos2_perm_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.main_sel_op_poseidon2 == 1 || in.poseidon2_sel_poseidon_perm == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_sha256.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_sha256.hpp index 5840520b2e2c..0e5068bd5ac3 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_sha256.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_main_sha256.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_main_sha256_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 4; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::main_sel_op_sha256; + static constexpr Column DST_SELECTOR = Column::sha256_sel_sha256_compression; + static constexpr Column INVERSES = Column::perm_main_sha256_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.main_sel_op_sha256 == 1 || in.sha256_sel_sha256_compression == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_merkle_poseidon2.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_merkle_poseidon2.hpp index b834a9d8b47f..7932af800ff7 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_merkle_poseidon2.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_merkle_poseidon2.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_merkle_poseidon2_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 4; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::merkle_tree_sel_merkle_tree; + static constexpr Column DST_SELECTOR = Column::poseidon2_full_sel_merkle_tree; + static constexpr Column INVERSES = Column::perm_merkle_poseidon2_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.merkle_tree_sel_merkle_tree == 1 || in.poseidon2_full_sel_merkle_tree == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos2_fixed_pos2_perm.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos2_fixed_pos2_perm.hpp index 09652cc48a95..8e65cd790752 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos2_fixed_pos2_perm.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos2_fixed_pos2_perm.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_pos2_fixed_pos2_perm_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 9; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::poseidon2_full_sel_poseidon; + static constexpr Column DST_SELECTOR = Column::poseidon2_sel_poseidon_perm_immediate; + static constexpr Column INVERSES = Column::perm_pos2_fixed_pos2_perm_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.poseidon2_full_sel_poseidon == 1 || in.poseidon2_sel_poseidon_perm_immediate == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_read_a.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_read_a.hpp index fd33b39f2a54..c9d6d37a8a4c 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_read_a.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_read_a.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_pos_mem_read_a_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 5; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::poseidon2_sel_poseidon_perm_mem_op; + static constexpr Column DST_SELECTOR = Column::mem_sel_op_poseidon_read_a; + static constexpr Column INVERSES = Column::perm_pos_mem_read_a_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.poseidon2_sel_poseidon_perm_mem_op == 1 || in.mem_sel_op_poseidon_read_a == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_read_b.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_read_b.hpp index 660cd83d22a8..0c0615079911 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_read_b.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_read_b.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_pos_mem_read_b_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 5; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::poseidon2_sel_poseidon_perm_mem_op; + static constexpr Column DST_SELECTOR = Column::mem_sel_op_poseidon_read_b; + static constexpr Column INVERSES = Column::perm_pos_mem_read_b_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.poseidon2_sel_poseidon_perm_mem_op == 1 || in.mem_sel_op_poseidon_read_b == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_read_c.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_read_c.hpp index a3322399a69f..3dbf72feb8b8 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_read_c.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_read_c.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_pos_mem_read_c_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 5; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::poseidon2_sel_poseidon_perm_mem_op; + static constexpr Column DST_SELECTOR = Column::mem_sel_op_poseidon_read_c; + static constexpr Column INVERSES = Column::perm_pos_mem_read_c_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.poseidon2_sel_poseidon_perm_mem_op == 1 || in.mem_sel_op_poseidon_read_c == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_read_d.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_read_d.hpp index 8ef21a4a9c93..f1131b0ef648 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_read_d.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_read_d.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_pos_mem_read_d_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 5; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::poseidon2_sel_poseidon_perm_mem_op; + static constexpr Column DST_SELECTOR = Column::mem_sel_op_poseidon_read_d; + static constexpr Column INVERSES = Column::perm_pos_mem_read_d_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.poseidon2_sel_poseidon_perm_mem_op == 1 || in.mem_sel_op_poseidon_read_d == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_write_a.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_write_a.hpp index ca574d1b1fbf..84a3748dea6d 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_write_a.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_write_a.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_pos_mem_write_a_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 5; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::poseidon2_sel_poseidon_perm_mem_op; + static constexpr Column DST_SELECTOR = Column::mem_sel_op_poseidon_write_a; + static constexpr Column INVERSES = Column::perm_pos_mem_write_a_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.poseidon2_sel_poseidon_perm_mem_op == 1 || in.mem_sel_op_poseidon_write_a == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_write_b.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_write_b.hpp index 7abc1c339623..d665cb6220bc 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_write_b.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_write_b.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_pos_mem_write_b_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 5; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::poseidon2_sel_poseidon_perm_mem_op; + static constexpr Column DST_SELECTOR = Column::mem_sel_op_poseidon_write_b; + static constexpr Column INVERSES = Column::perm_pos_mem_write_b_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.poseidon2_sel_poseidon_perm_mem_op == 1 || in.mem_sel_op_poseidon_write_b == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_write_c.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_write_c.hpp index 213deb7728eb..730946d69f1d 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_write_c.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_write_c.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_pos_mem_write_c_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 5; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::poseidon2_sel_poseidon_perm_mem_op; + static constexpr Column DST_SELECTOR = Column::mem_sel_op_poseidon_write_c; + static constexpr Column INVERSES = Column::perm_pos_mem_write_c_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.poseidon2_sel_poseidon_perm_mem_op == 1 || in.mem_sel_op_poseidon_write_c == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_write_d.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_write_d.hpp index d45486385bc1..570581548dd6 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_write_d.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_pos_mem_write_d.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_pos_mem_write_d_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 5; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::poseidon2_sel_poseidon_perm_mem_op; + static constexpr Column DST_SELECTOR = Column::mem_sel_op_poseidon_write_d; + static constexpr Column INVERSES = Column::perm_pos_mem_write_d_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.poseidon2_sel_poseidon_perm_mem_op == 1 || in.mem_sel_op_poseidon_write_d == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_rng_alu.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_rng_alu.hpp index 5c935533df99..508be9f2804c 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_rng_alu.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_rng_alu.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_rng_alu_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 3; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::range_check_alu_rng_chk; + static constexpr Column DST_SELECTOR = Column::alu_range_check_sel; + static constexpr Column INVERSES = Column::perm_rng_alu_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.range_check_alu_rng_chk == 1 || in.alu_range_check_sel == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_rng_cmp_hi.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_rng_cmp_hi.hpp index 01eddfee8645..f60dd58f6e78 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_rng_cmp_hi.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_rng_cmp_hi.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_rng_cmp_hi_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 2; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::range_check_cmp_hi_bits_rng_chk; + static constexpr Column DST_SELECTOR = Column::cmp_sel_rng_chk; + static constexpr Column INVERSES = Column::perm_rng_cmp_hi_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.range_check_cmp_hi_bits_rng_chk == 1 || in.cmp_sel_rng_chk == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_rng_cmp_lo.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_rng_cmp_lo.hpp index 4315efb8b4b7..fe8426fc5369 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_rng_cmp_lo.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_rng_cmp_lo.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_rng_cmp_lo_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 2; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::range_check_cmp_lo_bits_rng_chk; + static constexpr Column DST_SELECTOR = Column::cmp_sel_rng_chk; + static constexpr Column INVERSES = Column::perm_rng_cmp_lo_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.range_check_cmp_lo_bits_rng_chk == 1 || in.cmp_sel_rng_chk == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_rng_non_ff_cmp.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_rng_non_ff_cmp.hpp index 6170cae1b334..b07bb1e4b5b3 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_rng_non_ff_cmp.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_rng_non_ff_cmp.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_rng_non_ff_cmp_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 2; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::range_check_cmp_non_ff_rng_chk; + static constexpr Column DST_SELECTOR = Column::cmp_op_non_ff_gt; + static constexpr Column INVERSES = Column::perm_rng_non_ff_cmp_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.range_check_cmp_non_ff_rng_chk == 1 || in.cmp_op_non_ff_gt == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_slice_mem.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_slice_mem.hpp index 182b931fdfee..1ee3faa5c498 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_slice_mem.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/perm_slice_mem.hpp @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class perm_slice_mem_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = 5; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::slice_sel_mem_active; + static constexpr Column DST_SELECTOR = Column::mem_sel_op_slice; + static constexpr Column INVERSES = Column::perm_slice_mem_inv; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.slice_sel_mem_active == 1 || in.mem_sel_op_slice == 1); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/errors.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/errors.hpp index 0549c73423c9..e6613bd7c589 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/errors.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/errors.hpp @@ -24,6 +24,8 @@ enum class AvmError : uint32_t { OUT_OF_GAS, STATIC_CALL_ALTERATION, FAILED_BYTECODE_RETRIEVAL, + MSM_POINTS_LEN_INVALID, + MSM_POINT_NOT_ON_CURVE, }; } // namespace bb::avm_trace diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.cpp index a56f7f9008f2..d177f4ef4558 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.cpp @@ -472,6 +472,7 @@ AvmError Execution::execute_enqueued_call(AvmTraceBuilder& trace_builder, std::stack debug_counter_stack; uint32_t counter = 0; // FIXME: this cast means that we can have duplicate call ptrs since clk will end up way bigger than 256 + trace_builder.set_pc(pc); trace_builder.set_call_ptr(static_cast(context_id)); while (is_ok(error) && (pc = trace_builder.get_pc()) < bytecode.size()) { auto [inst, parse_error] = Deserialization::parse(bytecode, pc); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution_hints.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution_hints.hpp index be57dc4da6d4..dc554ebd2b5c 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution_hints.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution_hints.hpp @@ -275,7 +275,7 @@ struct ExecutionHints { std::vector contract_instance_hints_vec; read(it, contract_instance_hints_vec); std::map contract_instance_hints; - for (const auto& instance : contract_instance_hints_vec) { + for (const auto& instance : std::views::reverse(contract_instance_hints_vec)) { contract_instance_hints[instance.address] = instance; } diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/helper.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/helper.cpp index 23f30d12807f..c36904e66873 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/helper.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/helper.cpp @@ -117,6 +117,8 @@ std::string to_name(AvmError error) return "TAG CHECKING ERROR"; case AvmError::ADDR_RES_TAG_ERROR: return "ADDRESS RESOLUTION TAG ERROR"; + case AvmError::MEM_SLICE_OUT_OF_RANGE: + return "MEMORY SLICE OUT OF RANGE"; case AvmError::REL_ADDR_OUT_OF_RANGE: return "RELATIVE ADDRESS IS OUT OF RANGE"; case AvmError::DIV_ZERO: @@ -135,8 +137,14 @@ std::string to_name(AvmError error) return "SIDE EFFECT LIMIT REACHED"; case AvmError::OUT_OF_GAS: return "OUT OF GAS"; + case AvmError::STATIC_CALL_ALTERATION: + return "STATIC CALL ALTERATION"; case AvmError::FAILED_BYTECODE_RETRIEVAL: return "FAILED BYTECODE RETRIEVAL"; + case AvmError::MSM_POINTS_LEN_INVALID: + return "MSM POINTS LEN INVALID"; + case AvmError::MSM_POINT_NOT_ON_CURVE: + return "MSM POINT NOT ON CURVE"; default: throw std::runtime_error("Invalid error type"); break; diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp index c554799d18ac..cd7e26908bd5 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp @@ -4780,6 +4780,11 @@ AvmError AvmTraceBuilder::op_variable_msm(uint8_t indirect, const FF points_length = is_ok(error) ? unconstrained_read_from_memory(resolved_point_length_offset) : 0; + // Unconstrained check that points_length must be a multiple of 3. + if (is_ok(error) && static_cast(points_length) % 3 != 0) { + error = AvmError::MSM_POINTS_LEN_INVALID; + } + if (is_ok(error) && !check_slice_mem_range(resolved_points_offset, static_cast(points_length))) { error = AvmError::MEM_SLICE_OUT_OF_RANGE; } @@ -4863,6 +4868,10 @@ AvmError AvmTraceBuilder::op_variable_msm(uint8_t indirect, points.emplace_back(grumpkin::g1::affine_element::infinity()); } else { points.emplace_back(x, y); + // Unconstrained check that this point lies on the Grumpkin curve. + if (!points.back().on_curve()) { + return AvmError::MSM_POINT_NOT_ON_CURVE; + } } } // Reconstruct Grumpkin scalars diff --git a/barretenberg/cpp/src/barretenberg/vm/aztec_constants.hpp b/barretenberg/cpp/src/barretenberg/vm/aztec_constants.hpp index 518730ac15ad..67f0e9255c98 100644 --- a/barretenberg/cpp/src/barretenberg/vm/aztec_constants.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/aztec_constants.hpp @@ -5,7 +5,7 @@ #define MAX_NULLIFIERS_PER_CALL 16 #define MAX_ENQUEUED_CALLS_PER_CALL 16 #define MAX_L2_TO_L1_MSGS_PER_CALL 2 -#define MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL 64 +#define MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL 63 #define MAX_PUBLIC_DATA_READS_PER_CALL 64 #define MAX_NOTE_HASH_READ_REQUESTS_PER_CALL 16 #define MAX_NULLIFIER_READ_REQUESTS_PER_CALL 16 @@ -44,14 +44,14 @@ #define PUBLIC_INNER_CALL_REQUEST_LENGTH 13 #define STATE_REFERENCE_LENGTH 8 #define TOTAL_FEES_LENGTH 1 -#define PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH 867 +#define PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH 864 #define AVM_ACCUMULATED_DATA_LENGTH 320 #define AVM_CIRCUIT_PUBLIC_INPUTS_LENGTH 1011 #define AVM_VERIFICATION_KEY_LENGTH_IN_FIELDS 86 #define MAX_PUBLIC_CALLS_TO_UNIQUE_CONTRACT_CLASS_IDS 21 #define AVM_PROOF_LENGTH_IN_FIELDS 4155 #define AVM_PUBLIC_COLUMN_MAX_SIZE 1024 -#define AVM_PUBLIC_INPUTS_FLATTENED_SIZE 2915 +#define AVM_PUBLIC_INPUTS_FLATTENED_SIZE 2912 #define MEM_TAG_FF 0 #define MEM_TAG_U1 1 #define MEM_TAG_U8 2 @@ -78,11 +78,11 @@ #define START_NULLIFIER_NON_EXISTS_OFFSET 32 #define START_L1_TO_L2_MSG_EXISTS_WRITE_OFFSET 48 #define START_SSTORE_WRITE_OFFSET 64 -#define START_SLOAD_WRITE_OFFSET 128 -#define START_EMIT_NOTE_HASH_WRITE_OFFSET 192 -#define START_EMIT_NULLIFIER_WRITE_OFFSET 208 -#define START_EMIT_L2_TO_L1_MSG_WRITE_OFFSET 224 -#define START_EMIT_UNENCRYPTED_LOG_WRITE_OFFSET 226 +#define START_SLOAD_WRITE_OFFSET 127 +#define START_EMIT_NOTE_HASH_WRITE_OFFSET 191 +#define START_EMIT_NULLIFIER_WRITE_OFFSET 207 +#define START_EMIT_L2_TO_L1_MSG_WRITE_OFFSET 223 +#define START_EMIT_UNENCRYPTED_LOG_WRITE_OFFSET 225 #define MAX_L2_GAS_PER_TX_PUBLIC_PORTION 6000000 #define AVM_POSEIDON2_BASE_L2_GAS 36 #define AVM_ADD_BASE_L2_GAS 27 diff --git a/barretenberg/sol/src/honk/BaseHonkVerifier.sol b/barretenberg/sol/src/honk/BaseHonkVerifier.sol new file mode 100644 index 000000000000..5c38218a041d --- /dev/null +++ b/barretenberg/sol/src/honk/BaseHonkVerifier.sol @@ -0,0 +1,499 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs +pragma solidity >=0.8.21; + +import {IVerifier} from "../interfaces/IVerifier.sol"; +import { + Honk, + WIRE, + NUMBER_OF_ENTITIES, + NUMBER_OF_SUBRELATIONS, + NUMBER_OF_ALPHAS, + NUMBER_UNSHIFTED, + BATCHED_RELATION_PARTIAL_LENGTH, + CONST_PROOF_SIZE_LOG_N +} from "./HonkTypes.sol"; + +import {ecMul, ecAdd, ecSub, negateInplace, convertProofPoint} from "./utils.sol"; + +// Field arithmetic libraries - prevent littering the code with modmul / addmul +import {MODULUS as P, MINUS_ONE, Fr, FrLib} from "./Fr.sol"; + +import {Transcript, TranscriptLib} from "./Transcript.sol"; + +import {RelationsLib} from "./Relations.sol"; + +abstract contract BaseHonkVerifier is IVerifier { + using FrLib for Fr; + + uint256 immutable N; + uint256 immutable logN; + uint256 immutable numPublicInputs; + + constructor(uint256 _N, uint256 _logN, uint256 _numPublicInputs) { + N = _N; + logN = _logN; + numPublicInputs = _numPublicInputs; + } + + // Errors + error PublicInputsLengthWrong(); + error SumcheckFailed(); + error ShpleminiFailed(); + + function loadVerificationKey() internal pure virtual returns (Honk.VerificationKey memory); + + function verify(bytes calldata proof, bytes32[] calldata publicInputs) public view override returns (bool) { + Honk.VerificationKey memory vk = loadVerificationKey(); + Honk.Proof memory p = TranscriptLib.loadProof(proof); + + if (publicInputs.length != vk.publicInputsSize) { + revert PublicInputsLengthWrong(); + } + + // Generate the fiat shamir challenges for the whole protocol + Transcript memory t = TranscriptLib.generateTranscript(p, publicInputs, numPublicInputs); + + // Compute the public input delta + t.publicInputsDelta = computePublicInputDelta(publicInputs, t.beta, t.gamma, p.publicInputsOffset); + + // Sumcheck + bool sumcheckVerified = verifySumcheck(p, t); + if (!sumcheckVerified) revert SumcheckFailed(); + + bool shpleminiVerified = verifyShplemini(p, vk, t); + if (!shpleminiVerified) revert ShpleminiFailed(); + + return sumcheckVerified && shpleminiVerified; // Boolean condition not required - nice for vanity :) + } + + function computePublicInputDelta(bytes32[] memory publicInputs, Fr beta, Fr gamma, uint256 offset) + internal + view + returns (Fr publicInputDelta) + { + Fr numerator = Fr.wrap(1); + Fr denominator = Fr.wrap(1); + + Fr numeratorAcc = gamma + (beta * FrLib.from(N + offset)); + Fr denominatorAcc = gamma - (beta * FrLib.from(offset + 1)); + + { + for (uint256 i = 0; i < numPublicInputs; i++) { + Fr pubInput = FrLib.fromBytes32(publicInputs[i]); + + numerator = numerator * (numeratorAcc + pubInput); + denominator = denominator * (denominatorAcc + pubInput); + + numeratorAcc = numeratorAcc + beta; + denominatorAcc = denominatorAcc - beta; + } + } + + // Fr delta = numerator / denominator; // TOOO: batch invert later? + publicInputDelta = FrLib.div(numerator, denominator); + } + + uint256 constant ROUND_TARGET = 0; + + function verifySumcheck(Honk.Proof memory proof, Transcript memory tp) internal view returns (bool verified) { + Fr roundTarget; + Fr powPartialEvaluation = Fr.wrap(1); + + // We perform sumcheck reductions over log n rounds ( the multivariate degree ) + for (uint256 round; round < logN; ++round) { + Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory roundUnivariate = proof.sumcheckUnivariates[round]; + bool valid = checkSum(roundUnivariate, roundTarget); + if (!valid) revert SumcheckFailed(); + + Fr roundChallenge = tp.sumCheckUChallenges[round]; + + // Update the round target for the next rounf + roundTarget = computeNextTargetSum(roundUnivariate, roundChallenge); + powPartialEvaluation = partiallyEvaluatePOW(tp, powPartialEvaluation, roundChallenge, round); + } + + // Last round + Fr grandHonkRelationSum = RelationsLib.accumulateRelationEvaluations(proof, tp, powPartialEvaluation); + verified = (grandHonkRelationSum == roundTarget); + } + + function checkSum(Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory roundUnivariate, Fr roundTarget) + internal + pure + returns (bool checked) + { + Fr totalSum = roundUnivariate[0] + roundUnivariate[1]; + checked = totalSum == roundTarget; + } + + // Return the new target sum for the next sumcheck round + function computeNextTargetSum(Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory roundUnivariates, Fr roundChallenge) + internal + view + returns (Fr targetSum) + { + // TODO: inline + Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory BARYCENTRIC_LAGRANGE_DENOMINATORS = [ + Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffec51), + Fr.wrap(0x00000000000000000000000000000000000000000000000000000000000002d0), + Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffff11), + Fr.wrap(0x0000000000000000000000000000000000000000000000000000000000000090), + Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffff71), + Fr.wrap(0x00000000000000000000000000000000000000000000000000000000000000f0), + Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593effffd31), + Fr.wrap(0x00000000000000000000000000000000000000000000000000000000000013b0) + ]; + + Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory BARYCENTRIC_DOMAIN = [ + Fr.wrap(0x00), + Fr.wrap(0x01), + Fr.wrap(0x02), + Fr.wrap(0x03), + Fr.wrap(0x04), + Fr.wrap(0x05), + Fr.wrap(0x06), + Fr.wrap(0x07) + ]; + // To compute the next target sum, we evaluate the given univariate at a point u (challenge). + + // TODO: opt: use same array mem for each iteratioon + // Performing Barycentric evaluations + // Compute B(x) + Fr numeratorValue = Fr.wrap(1); + for (uint256 i; i < BATCHED_RELATION_PARTIAL_LENGTH; ++i) { + numeratorValue = numeratorValue * (roundChallenge - Fr.wrap(i)); + } + + // Calculate domain size N of inverses -- TODO: montgomery's trick + Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory denominatorInverses; + for (uint256 i; i < BATCHED_RELATION_PARTIAL_LENGTH; ++i) { + Fr inv = BARYCENTRIC_LAGRANGE_DENOMINATORS[i]; + inv = inv * (roundChallenge - BARYCENTRIC_DOMAIN[i]); + inv = FrLib.invert(inv); + denominatorInverses[i] = inv; + } + + for (uint256 i; i < BATCHED_RELATION_PARTIAL_LENGTH; ++i) { + Fr term = roundUnivariates[i]; + term = term * denominatorInverses[i]; + targetSum = targetSum + term; + } + + // Scale the sum by the value of B(x) + targetSum = targetSum * numeratorValue; + } + + // Univariate evaluation of the monomial ((1-X_l) + X_l.B_l) at the challenge point X_l=u_l + function partiallyEvaluatePOW(Transcript memory tp, Fr currentEvaluation, Fr roundChallenge, uint256 round) + internal + pure + returns (Fr newEvaluation) + { + Fr univariateEval = Fr.wrap(1) + (roundChallenge * (tp.gateChallenges[round] - Fr.wrap(1))); + newEvaluation = currentEvaluation * univariateEval; + } + + // Avoid stack too deep + struct ShpleminiIntermediates { + Fr unshiftedScalar; + Fr shiftedScalar; + // Scalar to be multiplied by [1]₁ + Fr constantTermAccumulator; + // Accumulator for powers of rho + Fr batchingChallenge; + // Linear combination of multilinear (sumcheck) evaluations and powers of rho + Fr batchedEvaluation; + } + + function verifyShplemini(Honk.Proof memory proof, Honk.VerificationKey memory vk, Transcript memory tp) + internal + view + returns (bool verified) + { + ShpleminiIntermediates memory mem; // stack + + // - Compute vector (r, r², ... , r²⁽ⁿ⁻¹⁾), where n = log_circuit_size + Fr[CONST_PROOF_SIZE_LOG_N] memory powers_of_evaluation_challenge = computeSquares(tp.geminiR); + + // Arrays hold values that will be linearly combined for the gemini and shplonk batch openings + Fr[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2] memory scalars; + Honk.G1Point[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2] memory commitments; + + Fr[CONST_PROOF_SIZE_LOG_N + 1] memory inverse_vanishing_evals = + computeInvertedGeminiDenominators(tp, powers_of_evaluation_challenge); + + mem.unshiftedScalar = inverse_vanishing_evals[0] + (tp.shplonkNu * inverse_vanishing_evals[1]); + mem.shiftedScalar = + tp.geminiR.invert() * (inverse_vanishing_evals[0] - (tp.shplonkNu * inverse_vanishing_evals[1])); + + scalars[0] = Fr.wrap(1); + commitments[0] = convertProofPoint(proof.shplonkQ); + + /* Batch multivariate opening claims, shifted and unshifted + * The vector of scalars is populated as follows: + * \f[ + * \left( + * - \left(\frac{1}{z-r} + \nu \times \frac{1}{z+r}\right), + * \ldots, + * - \rho^{i+k-1} \times \left(\frac{1}{z-r} + \nu \times \frac{1}{z+r}\right), + * - \rho^{i+k} \times \frac{1}{r} \times \left(\frac{1}{z-r} - \nu \times \frac{1}{z+r}\right), + * \ldots, + * - \rho^{k+m-1} \times \frac{1}{r} \times \left(\frac{1}{z-r} - \nu \times \frac{1}{z+r}\right) + * \right) + * \f] + * + * The following vector is concatenated to the vector of commitments: + * \f[ + * f_0, \ldots, f_{m-1}, f_{\text{shift}, 0}, \ldots, f_{\text{shift}, k-1} + * \f] + * + * Simultaneously, the evaluation of the multilinear polynomial + * \f[ + * \sum \rho^i \cdot f_i + \sum \rho^{i+k} \cdot f_{\text{shift}, i} + * \f] + * at the challenge point \f$ (u_0,\ldots, u_{n-1}) \f$ is computed. + * + * This approach minimizes the number of iterations over the commitments to multilinear polynomials + * and eliminates the need to store the powers of \f$ \rho \f$. + */ + mem.batchingChallenge = Fr.wrap(1); + mem.batchedEvaluation = Fr.wrap(0); + + for (uint256 i = 1; i <= NUMBER_UNSHIFTED; ++i) { + scalars[i] = mem.unshiftedScalar.neg() * mem.batchingChallenge; + mem.batchedEvaluation = mem.batchedEvaluation + (proof.sumcheckEvaluations[i - 1] * mem.batchingChallenge); + mem.batchingChallenge = mem.batchingChallenge * tp.rho; + } + // g commitments are accumulated at r + for (uint256 i = NUMBER_UNSHIFTED + 1; i <= NUMBER_OF_ENTITIES; ++i) { + scalars[i] = mem.shiftedScalar.neg() * mem.batchingChallenge; + mem.batchedEvaluation = mem.batchedEvaluation + (proof.sumcheckEvaluations[i - 1] * mem.batchingChallenge); + mem.batchingChallenge = mem.batchingChallenge * tp.rho; + } + + commitments[1] = vk.qm; + commitments[2] = vk.qc; + commitments[3] = vk.ql; + commitments[4] = vk.qr; + commitments[5] = vk.qo; + commitments[6] = vk.q4; + commitments[7] = vk.qLookup; + commitments[8] = vk.qArith; + commitments[9] = vk.qDeltaRange; + commitments[10] = vk.qElliptic; + commitments[11] = vk.qAux; + commitments[12] = vk.qPoseidon2External; + commitments[13] = vk.qPoseidon2Internal; + commitments[14] = vk.s1; + commitments[15] = vk.s2; + commitments[16] = vk.s3; + commitments[17] = vk.s4; + commitments[18] = vk.id1; + commitments[19] = vk.id2; + commitments[20] = vk.id3; + commitments[21] = vk.id4; + commitments[22] = vk.t1; + commitments[23] = vk.t2; + commitments[24] = vk.t3; + commitments[25] = vk.t4; + commitments[26] = vk.lagrangeFirst; + commitments[27] = vk.lagrangeLast; + + // Accumulate proof points + commitments[28] = convertProofPoint(proof.w1); + commitments[29] = convertProofPoint(proof.w2); + commitments[30] = convertProofPoint(proof.w3); + commitments[31] = convertProofPoint(proof.w4); + commitments[32] = convertProofPoint(proof.zPerm); + commitments[33] = convertProofPoint(proof.lookupInverses); + commitments[34] = convertProofPoint(proof.lookupReadCounts); + commitments[35] = convertProofPoint(proof.lookupReadTags); + + // to be Shifted + commitments[36] = convertProofPoint(proof.w1); + commitments[37] = convertProofPoint(proof.w2); + commitments[38] = convertProofPoint(proof.w3); + commitments[39] = convertProofPoint(proof.w4); + commitments[40] = convertProofPoint(proof.zPerm); + + /* Batch gemini claims from the prover + * place the commitments to gemini aᵢ to the vector of commitments, compute the contributions from + * aᵢ(−r²ⁱ) for i=1, … , n−1 to the constant term accumulator, add corresponding scalars + * + * 1. Moves the vector + * \f[ + * \left( \text{com}(A_1), \text{com}(A_2), \ldots, \text{com}(A_{n-1}) \right) + * \f] + * to the 'commitments' vector. + * + * 2. Computes the scalars: + * \f[ + * \frac{\nu^{2}}{z + r^2}, \frac{\nu^3}{z + r^4}, \ldots, \frac{\nu^{n-1}}{z + r^{2^{n-1}}} + * \f] + * and places them into the 'scalars' vector. + * + * 3. Accumulates the summands of the constant term: + * \f[ + * \sum_{i=2}^{n-1} \frac{\nu^{i} \cdot A_i(-r^{2^i})}{z + r^{2^i}} + * \f] + * and adds them to the 'constant_term_accumulator'. + */ + mem.constantTermAccumulator = Fr.wrap(0); + mem.batchingChallenge = tp.shplonkNu.sqr(); + + for (uint256 i; i < CONST_PROOF_SIZE_LOG_N - 1; ++i) { + bool dummy_round = i >= (logN - 1); + + Fr scalingFactor = Fr.wrap(0); + if (!dummy_round) { + scalingFactor = mem.batchingChallenge * inverse_vanishing_evals[i + 2]; + scalars[NUMBER_OF_ENTITIES + 1 + i] = scalingFactor.neg(); + } + + mem.constantTermAccumulator = + mem.constantTermAccumulator + (scalingFactor * proof.geminiAEvaluations[i + 1]); + mem.batchingChallenge = mem.batchingChallenge * tp.shplonkNu; + + commitments[NUMBER_OF_ENTITIES + 1 + i] = convertProofPoint(proof.geminiFoldComms[i]); + } + + // Add contributions from A₀(r) and A₀(-r) to constant_term_accumulator: + // Compute evaluation A₀(r) + Fr a_0_pos = computeGeminiBatchedUnivariateEvaluation( + tp, mem.batchedEvaluation, proof.geminiAEvaluations, powers_of_evaluation_challenge + ); + + mem.constantTermAccumulator = mem.constantTermAccumulator + (a_0_pos * inverse_vanishing_evals[0]); + mem.constantTermAccumulator = + mem.constantTermAccumulator + (proof.geminiAEvaluations[0] * tp.shplonkNu * inverse_vanishing_evals[1]); + + // Finalise the batch opening claim + commitments[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N] = Honk.G1Point({x: 1, y: 2}); + scalars[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N] = mem.constantTermAccumulator; + + Honk.G1Point memory quotient_commitment = convertProofPoint(proof.kzgQuotient); + + commitments[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] = quotient_commitment; + scalars[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] = tp.shplonkZ; // evaluation challenge + + Honk.G1Point memory P_0 = batchMul(commitments, scalars); + Honk.G1Point memory P_1 = negateInplace(quotient_commitment); + + return pairing(P_0, P_1); + } + + function computeSquares(Fr r) internal pure returns (Fr[CONST_PROOF_SIZE_LOG_N] memory squares) { + squares[0] = r; + for (uint256 i = 1; i < CONST_PROOF_SIZE_LOG_N; ++i) { + squares[i] = squares[i - 1].sqr(); + } + } + + function computeInvertedGeminiDenominators( + Transcript memory tp, + Fr[CONST_PROOF_SIZE_LOG_N] memory eval_challenge_powers + ) internal view returns (Fr[CONST_PROOF_SIZE_LOG_N + 1] memory inverse_vanishing_evals) { + Fr eval_challenge = tp.shplonkZ; + inverse_vanishing_evals[0] = (eval_challenge - eval_challenge_powers[0]).invert(); + + for (uint256 i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { + Fr round_inverted_denominator = Fr.wrap(0); + if (i <= logN + 1) { + round_inverted_denominator = (eval_challenge + eval_challenge_powers[i]).invert(); + } + inverse_vanishing_evals[i + 1] = round_inverted_denominator; + } + } + + function computeGeminiBatchedUnivariateEvaluation( + Transcript memory tp, + Fr batchedEvalAccumulator, + Fr[CONST_PROOF_SIZE_LOG_N] memory geminiEvaluations, + Fr[CONST_PROOF_SIZE_LOG_N] memory geminiEvalChallengePowers + ) internal view returns (Fr a_0_pos) { + for (uint256 i = CONST_PROOF_SIZE_LOG_N; i > 0; --i) { + Fr challengePower = geminiEvalChallengePowers[i - 1]; + Fr u = tp.sumCheckUChallenges[i - 1]; + Fr evalNeg = geminiEvaluations[i - 1]; + + Fr batchedEvalRoundAcc = ( + (challengePower * batchedEvalAccumulator * Fr.wrap(2)) + - evalNeg * (challengePower * (Fr.wrap(1) - u) - u) + ); + // Divide by the denominator + batchedEvalRoundAcc = batchedEvalRoundAcc * (challengePower * (Fr.wrap(1) - u) + u).invert(); + + bool is_dummy_round = (i > logN); + if (!is_dummy_round) { + batchedEvalAccumulator = batchedEvalRoundAcc; + } + } + + a_0_pos = batchedEvalAccumulator; + } + + // This implementation is the same as above with different constants + function batchMul( + Honk.G1Point[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2] memory base, + Fr[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2] memory scalars + ) internal view returns (Honk.G1Point memory result) { + uint256 limit = NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2; + assembly { + let success := 0x01 + let free := mload(0x40) + + // Write the original into the accumulator + // Load into memory for ecMUL, leave offset for eccAdd result + // base is an array of pointers, so we have to dereference them + mstore(add(free, 0x40), mload(mload(base))) + mstore(add(free, 0x60), mload(add(0x20, mload(base)))) + // Add scalar + mstore(add(free, 0x80), mload(scalars)) + success := and(success, staticcall(gas(), 7, add(free, 0x40), 0x60, free, 0x40)) + + let count := 0x01 + for {} lt(count, limit) { count := add(count, 1) } { + // Get loop offsets + let base_base := add(base, mul(count, 0x20)) + let scalar_base := add(scalars, mul(count, 0x20)) + + mstore(add(free, 0x40), mload(mload(base_base))) + mstore(add(free, 0x60), mload(add(0x20, mload(base_base)))) + // Add scalar + mstore(add(free, 0x80), mload(scalar_base)) + + success := and(success, staticcall(gas(), 7, add(free, 0x40), 0x60, add(free, 0x40), 0x40)) + // accumulator = accumulator + accumulator_2 + success := and(success, staticcall(gas(), 6, free, 0x80, free, 0x40)) + } + + // Return the result - i hate this + mstore(result, mload(free)) + mstore(add(result, 0x20), mload(add(free, 0x20))) + } + } + + function pairing(Honk.G1Point memory rhs, Honk.G1Point memory lhs) internal view returns (bool) { + bytes memory input = abi.encodePacked( + rhs.x, + rhs.y, + // Fixed G1 point + uint256(0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2), + uint256(0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed), + uint256(0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b), + uint256(0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa), + lhs.x, + lhs.y, + // G1 point from VK + uint256(0x260e01b251f6f1c7e7ff4e580791dee8ea51d87a358e038b4efe30fac09383c1), + uint256(0x0118c4d5b837bcc2bc89b5b398b5974e9f5944073b32078b7e231fec938883b0), + uint256(0x04fc6369f7110fe3d25156c1bb9a72859cf2a04641f99ba4ee413c80da6a5fe4), + uint256(0x22febda3c0c0632a56475b4214e5615e11e6dd3f96e6cea2854a87d4dacc5e55) + ); + + (bool success, bytes memory result) = address(0x08).staticcall(input); + bool decodedResult = abi.decode(result, (bool)); + return success && decodedResult; + } +} diff --git a/barretenberg/sol/src/honk/HonkTypes.sol b/barretenberg/sol/src/honk/HonkTypes.sol index 7832c767fc4f..bedb2a28a444 100644 --- a/barretenberg/sol/src/honk/HonkTypes.sol +++ b/barretenberg/sol/src/honk/HonkTypes.sol @@ -1,9 +1,6 @@ // SPDX-License-Identifier: MIT pragma solidity >=0.8.21; -// Temp only set here for testing, logn will be templated -import {LOG_N} from "./keys/Add2HonkVerificationKey.sol"; - import {Fr} from "./Fr.sol"; uint256 constant CONST_PROOF_SIZE_LOG_N = 28; diff --git a/barretenberg/sol/src/honk/instance/Add2Honk.sol b/barretenberg/sol/src/honk/instance/Add2Honk.sol index 9107e9d1e2a8..d70c26ce1b03 100644 --- a/barretenberg/sol/src/honk/instance/Add2Honk.sol +++ b/barretenberg/sol/src/honk/instance/Add2Honk.sol @@ -5,503 +5,12 @@ pragma solidity >=0.8.21; import {IVerifier} from "../../interfaces/IVerifier.sol"; import {Add2HonkVerificationKey as VK, N, LOG_N, NUMBER_OF_PUBLIC_INPUTS} from "../keys/Add2HonkVerificationKey.sol"; -import { - Honk, - WIRE, - NUMBER_OF_ENTITIES, - NUMBER_OF_SUBRELATIONS, - NUMBER_OF_ALPHAS, - NUMBER_UNSHIFTED, - BATCHED_RELATION_PARTIAL_LENGTH, - CONST_PROOF_SIZE_LOG_N -} from "../HonkTypes.sol"; - -import {ecMul, ecAdd, ecSub, negateInplace, convertProofPoint} from "../utils.sol"; - -// Field arithmetic libraries - prevent littering the code with modmul / addmul -import {MODULUS as P, MINUS_ONE, Fr, FrLib} from "../Fr.sol"; - -import {Transcript, TranscriptLib} from "../Transcript.sol"; - -import {RelationsLib} from "../Relations.sol"; - -// Errors -error PublicInputsLengthWrong(); -error SumcheckFailed(); -error ShpleminiFailed(); +import {Honk} from "../HonkTypes.sol"; +import {BaseHonkVerifier as BASE} from "../BaseHonkVerifier.sol"; /// Smart contract verifier of honk proofs -contract Add2HonkVerifier is IVerifier { - using FrLib for Fr; - - function verify(bytes calldata proof, bytes32[] calldata publicInputs) public view override returns (bool) { - Honk.VerificationKey memory vk = loadVerificationKey(); - Honk.Proof memory p = TranscriptLib.loadProof(proof); - - if (publicInputs.length != vk.publicInputsSize) { - revert PublicInputsLengthWrong(); - } - - // Generate the fiat shamir challenges for the whole protocol - Transcript memory t = TranscriptLib.generateTranscript(p, publicInputs, vk.publicInputsSize); - - // Compute the public input delta - t.publicInputsDelta = - computePublicInputDelta(publicInputs, t.beta, t.gamma, vk.circuitSize, p.publicInputsOffset); - - // Sumcheck - bool sumcheckVerified = verifySumcheck(p, t); - if (!sumcheckVerified) revert SumcheckFailed(); - - bool shpleminiVerified = verifyShplemini(p, vk, t); - if (!shpleminiVerified) revert ShpleminiFailed(); - - return sumcheckVerified && shpleminiVerified; // Boolean condition not required - nice for vanity :) - } - - function loadVerificationKey() internal pure returns (Honk.VerificationKey memory) { +contract Add2HonkVerifier is BASE(N, LOG_N, NUMBER_OF_PUBLIC_INPUTS) { + function loadVerificationKey() internal pure override returns (Honk.VerificationKey memory) { return VK.loadVerificationKey(); } - - function computePublicInputDelta( - bytes32[] memory publicInputs, - Fr beta, - Fr gamma, - uint256 domainSize, - uint256 offset - ) internal view returns (Fr publicInputDelta) { - Fr numerator = Fr.wrap(1); - Fr denominator = Fr.wrap(1); - - Fr numeratorAcc = gamma + (beta * FrLib.from(domainSize + offset)); - Fr denominatorAcc = gamma - (beta * FrLib.from(offset + 1)); - - { - for (uint256 i = 0; i < NUMBER_OF_PUBLIC_INPUTS; i++) { - Fr pubInput = FrLib.fromBytes32(publicInputs[i]); - - numerator = numerator * (numeratorAcc + pubInput); - denominator = denominator * (denominatorAcc + pubInput); - - numeratorAcc = numeratorAcc + beta; - denominatorAcc = denominatorAcc - beta; - } - } - - // Fr delta = numerator / denominator; // TOOO: batch invert later? - publicInputDelta = FrLib.div(numerator, denominator); - } - - uint256 constant ROUND_TARGET = 0; - - function verifySumcheck(Honk.Proof memory proof, Transcript memory tp) internal view returns (bool verified) { - Fr roundTarget; - Fr powPartialEvaluation = Fr.wrap(1); - - // We perform sumcheck reductions over log n rounds ( the multivariate degree ) - for (uint256 round; round < LOG_N; ++round) { - Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory roundUnivariate = proof.sumcheckUnivariates[round]; - bool valid = checkSum(roundUnivariate, roundTarget); - if (!valid) revert SumcheckFailed(); - - Fr roundChallenge = tp.sumCheckUChallenges[round]; - - // Update the round target for the next rounf - roundTarget = computeNextTargetSum(roundUnivariate, roundChallenge); - powPartialEvaluation = partiallyEvaluatePOW(tp, powPartialEvaluation, roundChallenge, round); - } - - // Last round - Fr grandHonkRelationSum = RelationsLib.accumulateRelationEvaluations(proof, tp, powPartialEvaluation); - verified = (grandHonkRelationSum == roundTarget); - } - - function checkSum(Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory roundUnivariate, Fr roundTarget) - internal - pure - returns (bool checked) - { - Fr totalSum = roundUnivariate[0] + roundUnivariate[1]; - checked = totalSum == roundTarget; - } - - // Return the new target sum for the next sumcheck round - function computeNextTargetSum(Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory roundUnivariates, Fr roundChallenge) - internal - view - returns (Fr targetSum) - { - // TODO: inline - Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory BARYCENTRIC_LAGRANGE_DENOMINATORS = [ - Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffec51), - Fr.wrap(0x00000000000000000000000000000000000000000000000000000000000002d0), - Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffff11), - Fr.wrap(0x0000000000000000000000000000000000000000000000000000000000000090), - Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffff71), - Fr.wrap(0x00000000000000000000000000000000000000000000000000000000000000f0), - Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593effffd31), - Fr.wrap(0x00000000000000000000000000000000000000000000000000000000000013b0) - ]; - - Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory BARYCENTRIC_DOMAIN = [ - Fr.wrap(0x00), - Fr.wrap(0x01), - Fr.wrap(0x02), - Fr.wrap(0x03), - Fr.wrap(0x04), - Fr.wrap(0x05), - Fr.wrap(0x06), - Fr.wrap(0x07) - ]; - // To compute the next target sum, we evaluate the given univariate at a point u (challenge). - - // TODO: opt: use same array mem for each iteratioon - // Performing Barycentric evaluations - // Compute B(x) - Fr numeratorValue = Fr.wrap(1); - for (uint256 i; i < BATCHED_RELATION_PARTIAL_LENGTH; ++i) { - numeratorValue = numeratorValue * (roundChallenge - Fr.wrap(i)); - } - - // Calculate domain size N of inverses -- TODO: montgomery's trick - Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory denominatorInverses; - for (uint256 i; i < BATCHED_RELATION_PARTIAL_LENGTH; ++i) { - Fr inv = BARYCENTRIC_LAGRANGE_DENOMINATORS[i]; - inv = inv * (roundChallenge - BARYCENTRIC_DOMAIN[i]); - inv = FrLib.invert(inv); - denominatorInverses[i] = inv; - } - - for (uint256 i; i < BATCHED_RELATION_PARTIAL_LENGTH; ++i) { - Fr term = roundUnivariates[i]; - term = term * denominatorInverses[i]; - targetSum = targetSum + term; - } - - // Scale the sum by the value of B(x) - targetSum = targetSum * numeratorValue; - } - - // Univariate evaluation of the monomial ((1-X_l) + X_l.B_l) at the challenge point X_l=u_l - function partiallyEvaluatePOW(Transcript memory tp, Fr currentEvaluation, Fr roundChallenge, uint256 round) - internal - pure - returns (Fr newEvaluation) - { - Fr univariateEval = Fr.wrap(1) + (roundChallenge * (tp.gateChallenges[round] - Fr.wrap(1))); - newEvaluation = currentEvaluation * univariateEval; - } - - // Avoid stack too deep - struct ShpleminiIntermediates { - Fr unshiftedScalar; - Fr shiftedScalar; - // Scalar to be multiplied by [1]₁ - Fr constantTermAccumulator; - // Accumulator for powers of rho - Fr batchingChallenge; - // Linear combination of multilinear (sumcheck) evaluations and powers of rho - Fr batchedEvaluation; - } - - function verifyShplemini(Honk.Proof memory proof, Honk.VerificationKey memory vk, Transcript memory tp) - internal - view - returns (bool verified) - { - ShpleminiIntermediates memory mem; // stack - - // - Compute vector (r, r², ... , r²⁽ⁿ⁻¹⁾), where n = log_circuit_size - Fr[CONST_PROOF_SIZE_LOG_N] memory powers_of_evaluation_challenge = computeSquares(tp.geminiR); - - // Arrays hold values that will be linearly combined for the gemini and shplonk batch openings - Fr[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2] memory scalars; - Honk.G1Point[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2] memory commitments; - - Fr[CONST_PROOF_SIZE_LOG_N + 1] memory inverse_vanishing_evals = - computeInvertedGeminiDenominators(tp, powers_of_evaluation_challenge); - - mem.unshiftedScalar = inverse_vanishing_evals[0] + (tp.shplonkNu * inverse_vanishing_evals[1]); - mem.shiftedScalar = - tp.geminiR.invert() * (inverse_vanishing_evals[0] - (tp.shplonkNu * inverse_vanishing_evals[1])); - - scalars[0] = Fr.wrap(1); - commitments[0] = convertProofPoint(proof.shplonkQ); - - /* Batch multivariate opening claims, shifted and unshifted - * The vector of scalars is populated as follows: - * \f[ - * \left( - * - \left(\frac{1}{z-r} + \nu \times \frac{1}{z+r}\right), - * \ldots, - * - \rho^{i+k-1} \times \left(\frac{1}{z-r} + \nu \times \frac{1}{z+r}\right), - * - \rho^{i+k} \times \frac{1}{r} \times \left(\frac{1}{z-r} - \nu \times \frac{1}{z+r}\right), - * \ldots, - * - \rho^{k+m-1} \times \frac{1}{r} \times \left(\frac{1}{z-r} - \nu \times \frac{1}{z+r}\right) - * \right) - * \f] - * - * The following vector is concatenated to the vector of commitments: - * \f[ - * f_0, \ldots, f_{m-1}, f_{\text{shift}, 0}, \ldots, f_{\text{shift}, k-1} - * \f] - * - * Simultaneously, the evaluation of the multilinear polynomial - * \f[ - * \sum \rho^i \cdot f_i + \sum \rho^{i+k} \cdot f_{\text{shift}, i} - * \f] - * at the challenge point \f$ (u_0,\ldots, u_{n-1}) \f$ is computed. - * - * This approach minimizes the number of iterations over the commitments to multilinear polynomials - * and eliminates the need to store the powers of \f$ \rho \f$. - */ - mem.batchingChallenge = Fr.wrap(1); - mem.batchedEvaluation = Fr.wrap(0); - - for (uint256 i = 1; i <= NUMBER_UNSHIFTED; ++i) { - scalars[i] = mem.unshiftedScalar.neg() * mem.batchingChallenge; - mem.batchedEvaluation = mem.batchedEvaluation + (proof.sumcheckEvaluations[i - 1] * mem.batchingChallenge); - mem.batchingChallenge = mem.batchingChallenge * tp.rho; - } - // g commitments are accumulated at r - for (uint256 i = NUMBER_UNSHIFTED + 1; i <= NUMBER_OF_ENTITIES; ++i) { - scalars[i] = mem.shiftedScalar.neg() * mem.batchingChallenge; - mem.batchedEvaluation = mem.batchedEvaluation + (proof.sumcheckEvaluations[i - 1] * mem.batchingChallenge); - mem.batchingChallenge = mem.batchingChallenge * tp.rho; - } - - commitments[1] = vk.qm; - commitments[2] = vk.qc; - commitments[3] = vk.ql; - commitments[4] = vk.qr; - commitments[5] = vk.qo; - commitments[6] = vk.q4; - commitments[7] = vk.qLookup; - commitments[8] = vk.qArith; - commitments[9] = vk.qDeltaRange; - commitments[10] = vk.qElliptic; - commitments[11] = vk.qAux; - commitments[12] = vk.qPoseidon2External; - commitments[13] = vk.qPoseidon2Internal; - commitments[14] = vk.s1; - commitments[15] = vk.s2; - commitments[16] = vk.s3; - commitments[17] = vk.s4; - commitments[18] = vk.id1; - commitments[19] = vk.id2; - commitments[20] = vk.id3; - commitments[21] = vk.id4; - commitments[22] = vk.t1; - commitments[23] = vk.t2; - commitments[24] = vk.t3; - commitments[25] = vk.t4; - commitments[26] = vk.lagrangeFirst; - commitments[27] = vk.lagrangeLast; - - // Accumulate proof points - commitments[28] = convertProofPoint(proof.w1); - commitments[29] = convertProofPoint(proof.w2); - commitments[30] = convertProofPoint(proof.w3); - commitments[31] = convertProofPoint(proof.w4); - commitments[32] = convertProofPoint(proof.zPerm); - commitments[33] = convertProofPoint(proof.lookupInverses); - commitments[34] = convertProofPoint(proof.lookupReadCounts); - commitments[35] = convertProofPoint(proof.lookupReadTags); - - // to be Shifted - commitments[36] = convertProofPoint(proof.w1); - commitments[37] = convertProofPoint(proof.w2); - commitments[38] = convertProofPoint(proof.w3); - commitments[39] = convertProofPoint(proof.w4); - commitments[40] = convertProofPoint(proof.zPerm); - - /* Batch gemini claims from the prover - * place the commitments to gemini aᵢ to the vector of commitments, compute the contributions from - * aᵢ(−r²ⁱ) for i=1, … , n−1 to the constant term accumulator, add corresponding scalars - * - * 1. Moves the vector - * \f[ - * \left( \text{com}(A_1), \text{com}(A_2), \ldots, \text{com}(A_{n-1}) \right) - * \f] - * to the 'commitments' vector. - * - * 2. Computes the scalars: - * \f[ - * \frac{\nu^{2}}{z + r^2}, \frac{\nu^3}{z + r^4}, \ldots, \frac{\nu^{n-1}}{z + r^{2^{n-1}}} - * \f] - * and places them into the 'scalars' vector. - * - * 3. Accumulates the summands of the constant term: - * \f[ - * \sum_{i=2}^{n-1} \frac{\nu^{i} \cdot A_i(-r^{2^i})}{z + r^{2^i}} - * \f] - * and adds them to the 'constant_term_accumulator'. - */ - mem.constantTermAccumulator = Fr.wrap(0); - mem.batchingChallenge = tp.shplonkNu.sqr(); - - for (uint256 i; i < CONST_PROOF_SIZE_LOG_N - 1; ++i) { - bool dummy_round = i >= (LOG_N - 1); - - Fr scalingFactor = Fr.wrap(0); - if (!dummy_round) { - scalingFactor = mem.batchingChallenge * inverse_vanishing_evals[i + 2]; - scalars[NUMBER_OF_ENTITIES + 1 + i] = scalingFactor.neg(); - } - - mem.constantTermAccumulator = - mem.constantTermAccumulator + (scalingFactor * proof.geminiAEvaluations[i + 1]); - mem.batchingChallenge = mem.batchingChallenge * tp.shplonkNu; - - commitments[NUMBER_OF_ENTITIES + 1 + i] = convertProofPoint(proof.geminiFoldComms[i]); - } - - // Add contributions from A₀(r) and A₀(-r) to constant_term_accumulator: - // Compute evaluation A₀(r) - Fr a_0_pos = computeGeminiBatchedUnivariateEvaluation( - tp, mem.batchedEvaluation, proof.geminiAEvaluations, powers_of_evaluation_challenge - ); - - mem.constantTermAccumulator = mem.constantTermAccumulator + (a_0_pos * inverse_vanishing_evals[0]); - mem.constantTermAccumulator = - mem.constantTermAccumulator + (proof.geminiAEvaluations[0] * tp.shplonkNu * inverse_vanishing_evals[1]); - - // Finalise the batch opening claim - commitments[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N] = Honk.G1Point({x: 1, y: 2}); - scalars[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N] = mem.constantTermAccumulator; - - Honk.G1Point memory quotient_commitment = convertProofPoint(proof.kzgQuotient); - - commitments[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] = quotient_commitment; - scalars[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] = tp.shplonkZ; // evaluation challenge - - Honk.G1Point memory P_0 = batchMul(commitments, scalars); - Honk.G1Point memory P_1 = negateInplace(quotient_commitment); - - return pairing(P_0, P_1); - } - - function computeSquares(Fr r) internal pure returns (Fr[CONST_PROOF_SIZE_LOG_N] memory squares) { - squares[0] = r; - for (uint256 i = 1; i < CONST_PROOF_SIZE_LOG_N; ++i) { - squares[i] = squares[i - 1].sqr(); - } - } - - function computeInvertedGeminiDenominators( - Transcript memory tp, - Fr[CONST_PROOF_SIZE_LOG_N] memory eval_challenge_powers - ) internal view returns (Fr[CONST_PROOF_SIZE_LOG_N + 1] memory inverse_vanishing_evals) { - Fr eval_challenge = tp.shplonkZ; - inverse_vanishing_evals[0] = (eval_challenge - eval_challenge_powers[0]).invert(); - - for (uint256 i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { - Fr round_inverted_denominator = Fr.wrap(0); - if (i <= LOG_N + 1) { - round_inverted_denominator = (eval_challenge + eval_challenge_powers[i]).invert(); - } - inverse_vanishing_evals[i + 1] = round_inverted_denominator; - } - } - - function computeGeminiBatchedUnivariateEvaluation( - Transcript memory tp, - Fr batchedEvalAccumulator, - Fr[CONST_PROOF_SIZE_LOG_N] memory geminiEvaluations, - Fr[CONST_PROOF_SIZE_LOG_N] memory geminiEvalChallengePowers - ) internal view returns (Fr a_0_pos) { - for (uint256 i = CONST_PROOF_SIZE_LOG_N; i > 0; --i) { - Fr challengePower = geminiEvalChallengePowers[i - 1]; - Fr u = tp.sumCheckUChallenges[i - 1]; - Fr evalNeg = geminiEvaluations[i - 1]; - - Fr batchedEvalRoundAcc = ( - (challengePower * batchedEvalAccumulator * Fr.wrap(2)) - - evalNeg * (challengePower * (Fr.wrap(1) - u) - u) - ); - // Divide by the denominator - batchedEvalRoundAcc = batchedEvalRoundAcc * (challengePower * (Fr.wrap(1) - u) + u).invert(); - - bool is_dummy_round = (i > LOG_N); - if (!is_dummy_round) { - batchedEvalAccumulator = batchedEvalRoundAcc; - } - } - - a_0_pos = batchedEvalAccumulator; - } - - // This implementation is the same as above with different constants - function batchMul( - Honk.G1Point[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2] memory base, - Fr[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2] memory scalars - ) internal view returns (Honk.G1Point memory result) { - uint256 limit = NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2; - assembly { - let success := 0x01 - let free := mload(0x40) - - // Write the original into the accumulator - // Load into memory for ecMUL, leave offset for eccAdd result - // base is an array of pointers, so we have to dereference them - mstore(add(free, 0x40), mload(mload(base))) - mstore(add(free, 0x60), mload(add(0x20, mload(base)))) - // Add scalar - mstore(add(free, 0x80), mload(scalars)) - success := and(success, staticcall(gas(), 7, add(free, 0x40), 0x60, free, 0x40)) - - let count := 0x01 - for {} lt(count, limit) { count := add(count, 1) } { - // Get loop offsets - let base_base := add(base, mul(count, 0x20)) - let scalar_base := add(scalars, mul(count, 0x20)) - - mstore(add(free, 0x40), mload(mload(base_base))) - mstore(add(free, 0x60), mload(add(0x20, mload(base_base)))) - // Add scalar - mstore(add(free, 0x80), mload(scalar_base)) - - success := and(success, staticcall(gas(), 7, add(free, 0x40), 0x60, add(free, 0x40), 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, free, 0x80, free, 0x40)) - } - - // Return the result - i hate this - mstore(result, mload(free)) - mstore(add(result, 0x20), mload(add(free, 0x20))) - } - } - - function pairing(Honk.G1Point memory rhs, Honk.G1Point memory lhs) internal view returns (bool) { - bytes memory input = abi.encodePacked( - rhs.x, - rhs.y, - // Fixed G1 point - uint256(0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2), - uint256(0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed), - uint256(0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b), - uint256(0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa), - lhs.x, - lhs.y, - // G1 point from VK - uint256(0x260e01b251f6f1c7e7ff4e580791dee8ea51d87a358e038b4efe30fac09383c1), - uint256(0x0118c4d5b837bcc2bc89b5b398b5974e9f5944073b32078b7e231fec938883b0), - uint256(0x04fc6369f7110fe3d25156c1bb9a72859cf2a04641f99ba4ee413c80da6a5fe4), - uint256(0x22febda3c0c0632a56475b4214e5615e11e6dd3f96e6cea2854a87d4dacc5e55) - ); - - (bool success, bytes memory result) = address(0x08).staticcall(input); - bool decodedResult = abi.decode(result, (bool)); - return success && decodedResult; - } -} - -// Conversion util - Duplicated as we cannot template LOG_N -function convertPoints(Honk.G1ProofPoint[LOG_N + 1] memory commitments) - pure - returns (Honk.G1Point[LOG_N + 1] memory converted) -{ - for (uint256 i; i < LOG_N + 1; ++i) { - converted[i] = convertProofPoint(commitments[i]); - } } diff --git a/barretenberg/sol/src/honk/instance/BlakeHonk.sol b/barretenberg/sol/src/honk/instance/BlakeHonk.sol index b49ee5743734..fd42b9e094e7 100644 --- a/barretenberg/sol/src/honk/instance/BlakeHonk.sol +++ b/barretenberg/sol/src/honk/instance/BlakeHonk.sol @@ -5,504 +5,13 @@ pragma solidity >=0.8.21; import {IVerifier} from "../../interfaces/IVerifier.sol"; import {BlakeHonkVerificationKey as VK, N, LOG_N, NUMBER_OF_PUBLIC_INPUTS} from "../keys/BlakeHonkVerificationKey.sol"; -import { - Honk, - WIRE, - NUMBER_OF_ENTITIES, - NUMBER_OF_SUBRELATIONS, - NUMBER_OF_ALPHAS, - NUMBER_UNSHIFTED, - NUMBER_TO_BE_SHIFTED, - BATCHED_RELATION_PARTIAL_LENGTH, - CONST_PROOF_SIZE_LOG_N -} from "../HonkTypes.sol"; +import {Honk} from "../HonkTypes.sol"; -import {ecMul, ecAdd, ecSub, negateInplace, convertProofPoint} from "../utils.sol"; - -// Field arithmetic libraries - prevent littering the code with modmul / addmul -import {MODULUS as P, MINUS_ONE, Fr, FrLib} from "../Fr.sol"; - -import {Transcript, TranscriptLib} from "../Transcript.sol"; - -import {RelationsLib} from "../Relations.sol"; - -// Errors -error PublicInputsLengthWrong(); -error SumcheckFailed(); -error ShpleminiFailed(); +import {BaseHonkVerifier as BASE} from "../BaseHonkVerifier.sol"; /// Smart contract verifier of honk proofs -contract BlakeHonkVerifier is IVerifier { - using FrLib for Fr; - - function verify(bytes calldata proof, bytes32[] calldata publicInputs) public view override returns (bool) { - Honk.VerificationKey memory vk = loadVerificationKey(); - Honk.Proof memory p = TranscriptLib.loadProof(proof); - - if (publicInputs.length != vk.publicInputsSize) { - revert PublicInputsLengthWrong(); - } - - // Generate the fiat shamir challenges for the whole protocol - Transcript memory t = TranscriptLib.generateTranscript(p, publicInputs, vk.publicInputsSize); - - // Compute the public input delta - t.publicInputsDelta = - computePublicInputDelta(publicInputs, t.beta, t.gamma, vk.circuitSize, p.publicInputsOffset); - - // Sumcheck - bool sumcheckVerified = verifySumcheck(p, t); - if (!sumcheckVerified) revert SumcheckFailed(); - - bool shpleminiVerified = verifyShplemini(p, vk, t); - if (!shpleminiVerified) revert ShpleminiFailed(); - - return sumcheckVerified && shpleminiVerified; // Boolean condition not required - nice for vanity :) - } - - function loadVerificationKey() internal pure returns (Honk.VerificationKey memory) { +contract BlakeHonkVerifier is BASE(N, LOG_N, NUMBER_OF_PUBLIC_INPUTS) { + function loadVerificationKey() internal pure override returns (Honk.VerificationKey memory) { return VK.loadVerificationKey(); } - - function computePublicInputDelta( - bytes32[] memory publicInputs, - Fr beta, - Fr gamma, - uint256 domainSize, - uint256 offset - ) internal view returns (Fr publicInputDelta) { - Fr numerator = Fr.wrap(1); - Fr denominator = Fr.wrap(1); - - Fr numeratorAcc = gamma + (beta * FrLib.from(domainSize + offset)); - Fr denominatorAcc = gamma - (beta * FrLib.from(offset + 1)); - - { - for (uint256 i = 0; i < NUMBER_OF_PUBLIC_INPUTS; i++) { - Fr pubInput = FrLib.fromBytes32(publicInputs[i]); - - numerator = numerator * (numeratorAcc + pubInput); - denominator = denominator * (denominatorAcc + pubInput); - - numeratorAcc = numeratorAcc + beta; - denominatorAcc = denominatorAcc - beta; - } - } - - // Fr delta = numerator / denominator; // TOOO: batch invert later? - publicInputDelta = FrLib.div(numerator, denominator); - } - - uint256 constant ROUND_TARGET = 0; - - function verifySumcheck(Honk.Proof memory proof, Transcript memory tp) internal view returns (bool verified) { - Fr roundTarget; - Fr powPartialEvaluation = Fr.wrap(1); - - // We perform sumcheck reductions over log n rounds ( the multivariate degree ) - for (uint256 round; round < LOG_N; ++round) { - Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory roundUnivariate = proof.sumcheckUnivariates[round]; - bool valid = checkSum(roundUnivariate, roundTarget); - if (!valid) revert SumcheckFailed(); - - Fr roundChallenge = tp.sumCheckUChallenges[round]; - - // Update the round target for the next rounf - roundTarget = computeNextTargetSum(roundUnivariate, roundChallenge); - powPartialEvaluation = partiallyEvaluatePOW(tp, powPartialEvaluation, roundChallenge, round); - } - - // Last round - Fr grandHonkRelationSum = RelationsLib.accumulateRelationEvaluations(proof, tp, powPartialEvaluation); - verified = (grandHonkRelationSum == roundTarget); - } - - function checkSum(Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory roundUnivariate, Fr roundTarget) - internal - pure - returns (bool checked) - { - Fr totalSum = roundUnivariate[0] + roundUnivariate[1]; - checked = totalSum == roundTarget; - } - - // Return the new target sum for the next sumcheck round - function computeNextTargetSum(Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory roundUnivariates, Fr roundChallenge) - internal - view - returns (Fr targetSum) - { - // TODO: inline - Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory BARYCENTRIC_LAGRANGE_DENOMINATORS = [ - Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffec51), - Fr.wrap(0x00000000000000000000000000000000000000000000000000000000000002d0), - Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffff11), - Fr.wrap(0x0000000000000000000000000000000000000000000000000000000000000090), - Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffff71), - Fr.wrap(0x00000000000000000000000000000000000000000000000000000000000000f0), - Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593effffd31), - Fr.wrap(0x00000000000000000000000000000000000000000000000000000000000013b0) - ]; - - Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory BARYCENTRIC_DOMAIN = [ - Fr.wrap(0x00), - Fr.wrap(0x01), - Fr.wrap(0x02), - Fr.wrap(0x03), - Fr.wrap(0x04), - Fr.wrap(0x05), - Fr.wrap(0x06), - Fr.wrap(0x07) - ]; - // To compute the next target sum, we evaluate the given univariate at a point u (challenge). - - // TODO: opt: use same array mem for each iteratioon - // Performing Barycentric evaluations - // Compute B(x) - Fr numeratorValue = Fr.wrap(1); - for (uint256 i; i < BATCHED_RELATION_PARTIAL_LENGTH; ++i) { - numeratorValue = numeratorValue * (roundChallenge - Fr.wrap(i)); - } - - // Calculate domain size N of inverses -- TODO: montgomery's trick - Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory denominatorInverses; - for (uint256 i; i < BATCHED_RELATION_PARTIAL_LENGTH; ++i) { - Fr inv = BARYCENTRIC_LAGRANGE_DENOMINATORS[i]; - inv = inv * (roundChallenge - BARYCENTRIC_DOMAIN[i]); - inv = FrLib.invert(inv); - denominatorInverses[i] = inv; - } - - for (uint256 i; i < BATCHED_RELATION_PARTIAL_LENGTH; ++i) { - Fr term = roundUnivariates[i]; - term = term * denominatorInverses[i]; - targetSum = targetSum + term; - } - - // Scale the sum by the value of B(x) - targetSum = targetSum * numeratorValue; - } - - // Univariate evaluation of the monomial ((1-X_l) + X_l.B_l) at the challenge point X_l=u_l - function partiallyEvaluatePOW(Transcript memory tp, Fr currentEvaluation, Fr roundChallenge, uint256 round) - internal - pure - returns (Fr newEvaluation) - { - Fr univariateEval = Fr.wrap(1) + (roundChallenge * (tp.gateChallenges[round] - Fr.wrap(1))); - newEvaluation = currentEvaluation * univariateEval; - } - - // Avoid stack too deep - struct ShpleminiIntermediates { - Fr unshiftedScalar; - Fr shiftedScalar; - // Scalar to be multiplied by [1]₁ - Fr constantTermAccumulator; - // Accumulator for powers of rho - Fr batchingChallenge; - // Linear combination of multilinear (sumcheck) evaluations and powers of rho - Fr batchedEvaluation; - } - - function verifyShplemini(Honk.Proof memory proof, Honk.VerificationKey memory vk, Transcript memory tp) - internal - view - returns (bool verified) - { - ShpleminiIntermediates memory mem; // stack - - // - Compute vector (r, r², ... , r²⁽ⁿ⁻¹⁾), where n = log_circuit_size - Fr[CONST_PROOF_SIZE_LOG_N] memory powers_of_evaluation_challenge = computeSquares(tp.geminiR); - - // Arrays hold values that will be linearly combined for the gemini and shplonk batch openings - Fr[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2] memory scalars; - Honk.G1Point[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2] memory commitments; - - Fr[CONST_PROOF_SIZE_LOG_N + 1] memory inverse_vanishing_evals = - computeInvertedGeminiDenominators(tp, powers_of_evaluation_challenge); - - mem.unshiftedScalar = inverse_vanishing_evals[0] + (tp.shplonkNu * inverse_vanishing_evals[1]); - mem.shiftedScalar = - tp.geminiR.invert() * (inverse_vanishing_evals[0] - (tp.shplonkNu * inverse_vanishing_evals[1])); - - scalars[0] = Fr.wrap(1); - commitments[0] = convertProofPoint(proof.shplonkQ); - - /* Batch multivariate opening claims, shifted and unshifted - * The vector of scalars is populated as follows: - * \f[ - * \left( - * - \left(\frac{1}{z-r} + \nu \times \frac{1}{z+r}\right), - * \ldots, - * - \rho^{i+k-1} \times \left(\frac{1}{z-r} + \nu \times \frac{1}{z+r}\right), - * - \rho^{i+k} \times \frac{1}{r} \times \left(\frac{1}{z-r} - \nu \times \frac{1}{z+r}\right), - * \ldots, - * - \rho^{k+m-1} \times \frac{1}{r} \times \left(\frac{1}{z-r} - \nu \times \frac{1}{z+r}\right) - * \right) - * \f] - * - * The following vector is concatenated to the vector of commitments: - * \f[ - * f_0, \ldots, f_{m-1}, f_{\text{shift}, 0}, \ldots, f_{\text{shift}, k-1} - * \f] - * - * Simultaneously, the evaluation of the multilinear polynomial - * \f[ - * \sum \rho^i \cdot f_i + \sum \rho^{i+k} \cdot f_{\text{shift}, i} - * \f] - * at the challenge point \f$ (u_0,\ldots, u_{n-1}) \f$ is computed. - * - * This approach minimizes the number of iterations over the commitments to multilinear polynomials - * and eliminates the need to store the powers of \f$ \rho \f$. - */ - mem.batchingChallenge = Fr.wrap(1); - mem.batchedEvaluation = Fr.wrap(0); - - for (uint256 i = 1; i <= NUMBER_UNSHIFTED; ++i) { - scalars[i] = mem.unshiftedScalar.neg() * mem.batchingChallenge; - mem.batchedEvaluation = mem.batchedEvaluation + (proof.sumcheckEvaluations[i - 1] * mem.batchingChallenge); - mem.batchingChallenge = mem.batchingChallenge * tp.rho; - } - // g commitments are accumulated at r - for (uint256 i = NUMBER_UNSHIFTED + 1; i <= NUMBER_OF_ENTITIES; ++i) { - scalars[i] = mem.shiftedScalar.neg() * mem.batchingChallenge; - mem.batchedEvaluation = mem.batchedEvaluation + (proof.sumcheckEvaluations[i - 1] * mem.batchingChallenge); - mem.batchingChallenge = mem.batchingChallenge * tp.rho; - } - - commitments[1] = vk.qm; - commitments[2] = vk.qc; - commitments[3] = vk.ql; - commitments[4] = vk.qr; - commitments[5] = vk.qo; - commitments[6] = vk.q4; - commitments[7] = vk.qLookup; - commitments[8] = vk.qArith; - commitments[9] = vk.qDeltaRange; - commitments[10] = vk.qElliptic; - commitments[11] = vk.qAux; - commitments[12] = vk.qPoseidon2External; - commitments[13] = vk.qPoseidon2Internal; - commitments[14] = vk.s1; - commitments[15] = vk.s2; - commitments[16] = vk.s3; - commitments[17] = vk.s4; - commitments[18] = vk.id1; - commitments[19] = vk.id2; - commitments[20] = vk.id3; - commitments[21] = vk.id4; - commitments[22] = vk.t1; - commitments[23] = vk.t2; - commitments[24] = vk.t3; - commitments[25] = vk.t4; - commitments[26] = vk.lagrangeFirst; - commitments[27] = vk.lagrangeLast; - - // Accumulate proof points - commitments[28] = convertProofPoint(proof.w1); - commitments[29] = convertProofPoint(proof.w2); - commitments[30] = convertProofPoint(proof.w3); - commitments[31] = convertProofPoint(proof.w4); - commitments[32] = convertProofPoint(proof.zPerm); - commitments[33] = convertProofPoint(proof.lookupInverses); - commitments[34] = convertProofPoint(proof.lookupReadCounts); - commitments[35] = convertProofPoint(proof.lookupReadTags); - - // to be Shifted - commitments[36] = convertProofPoint(proof.w1); - commitments[37] = convertProofPoint(proof.w2); - commitments[38] = convertProofPoint(proof.w3); - commitments[39] = convertProofPoint(proof.w4); - commitments[40] = convertProofPoint(proof.zPerm); - - /* Batch gemini claims from the prover - * place the commitments to gemini aᵢ to the vector of commitments, compute the contributions from - * aᵢ(−r²ⁱ) for i=1, … , n−1 to the constant term accumulator, add corresponding scalars - * - * 1. Moves the vector - * \f[ - * \left( \text{com}(A_1), \text{com}(A_2), \ldots, \text{com}(A_{n-1}) \right) - * \f] - * to the 'commitments' vector. - * - * 2. Computes the scalars: - * \f[ - * \frac{\nu^{2}}{z + r^2}, \frac{\nu^3}{z + r^4}, \ldots, \frac{\nu^{n-1}}{z + r^{2^{n-1}}} - * \f] - * and places them into the 'scalars' vector. - * - * 3. Accumulates the summands of the constant term: - * \f[ - * \sum_{i=2}^{n-1} \frac{\nu^{i} \cdot A_i(-r^{2^i})}{z + r^{2^i}} - * \f] - * and adds them to the 'constant_term_accumulator'. - */ - mem.constantTermAccumulator = Fr.wrap(0); - mem.batchingChallenge = tp.shplonkNu.sqr(); - - for (uint256 i; i < CONST_PROOF_SIZE_LOG_N - 1; ++i) { - bool dummy_round = i >= (LOG_N - 1); - - Fr scalingFactor = Fr.wrap(0); - if (!dummy_round) { - scalingFactor = mem.batchingChallenge * inverse_vanishing_evals[i + 2]; - scalars[NUMBER_OF_ENTITIES + 1 + i] = scalingFactor.neg(); - } - - mem.constantTermAccumulator = - mem.constantTermAccumulator + (scalingFactor * proof.geminiAEvaluations[i + 1]); - mem.batchingChallenge = mem.batchingChallenge * tp.shplonkNu; - - commitments[NUMBER_OF_ENTITIES + 1 + i] = convertProofPoint(proof.geminiFoldComms[i]); - } - - // Add contributions from A₀(r) and A₀(-r) to constant_term_accumulator: - // Compute evaluation A₀(r) - Fr a_0_pos = computeGeminiBatchedUnivariateEvaluation( - tp, mem.batchedEvaluation, proof.geminiAEvaluations, powers_of_evaluation_challenge - ); - - mem.constantTermAccumulator = mem.constantTermAccumulator + (a_0_pos * inverse_vanishing_evals[0]); - mem.constantTermAccumulator = - mem.constantTermAccumulator + (proof.geminiAEvaluations[0] * tp.shplonkNu * inverse_vanishing_evals[1]); - - // Finalise the batch opening claim - commitments[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N] = Honk.G1Point({x: 1, y: 2}); - scalars[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N] = mem.constantTermAccumulator; - - Honk.G1Point memory quotient_commitment = convertProofPoint(proof.kzgQuotient); - - commitments[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] = quotient_commitment; - scalars[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] = tp.shplonkZ; // evaluation challenge - - Honk.G1Point memory P_0 = batchMul(commitments, scalars); - Honk.G1Point memory P_1 = negateInplace(quotient_commitment); - - return pairing(P_0, P_1); - } - - function computeSquares(Fr r) internal pure returns (Fr[CONST_PROOF_SIZE_LOG_N] memory squares) { - squares[0] = r; - for (uint256 i = 1; i < CONST_PROOF_SIZE_LOG_N; ++i) { - squares[i] = squares[i - 1].sqr(); - } - } - - function computeInvertedGeminiDenominators( - Transcript memory tp, - Fr[CONST_PROOF_SIZE_LOG_N] memory eval_challenge_powers - ) internal view returns (Fr[CONST_PROOF_SIZE_LOG_N + 1] memory inverse_vanishing_evals) { - Fr eval_challenge = tp.shplonkZ; - inverse_vanishing_evals[0] = (eval_challenge - eval_challenge_powers[0]).invert(); - - for (uint256 i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { - Fr round_inverted_denominator = Fr.wrap(0); - if (i <= LOG_N + 1) { - round_inverted_denominator = (eval_challenge + eval_challenge_powers[i]).invert(); - } - inverse_vanishing_evals[i + 1] = round_inverted_denominator; - } - } - - function computeGeminiBatchedUnivariateEvaluation( - Transcript memory tp, - Fr batchedEvalAccumulator, - Fr[CONST_PROOF_SIZE_LOG_N] memory geminiEvaluations, - Fr[CONST_PROOF_SIZE_LOG_N] memory geminiEvalChallengePowers - ) internal view returns (Fr a_0_pos) { - for (uint256 i = CONST_PROOF_SIZE_LOG_N; i > 0; --i) { - Fr challengePower = geminiEvalChallengePowers[i - 1]; - Fr u = tp.sumCheckUChallenges[i - 1]; - Fr evalNeg = geminiEvaluations[i - 1]; - - Fr batchedEvalRoundAcc = ( - (challengePower * batchedEvalAccumulator * Fr.wrap(2)) - - evalNeg * (challengePower * (Fr.wrap(1) - u) - u) - ); - // Divide by the denominator - batchedEvalRoundAcc = batchedEvalRoundAcc * (challengePower * (Fr.wrap(1) - u) + u).invert(); - - bool is_dummy_round = (i > LOG_N); - if (!is_dummy_round) { - batchedEvalAccumulator = batchedEvalRoundAcc; - } - } - - a_0_pos = batchedEvalAccumulator; - } - - // This implementation is the same as above with different constants - function batchMul( - Honk.G1Point[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2] memory base, - Fr[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2] memory scalars - ) internal view returns (Honk.G1Point memory result) { - uint256 limit = NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2; - assembly { - let success := 0x01 - let free := mload(0x40) - - // Write the original into the accumulator - // Load into memory for ecMUL, leave offset for eccAdd result - // base is an array of pointers, so we have to dereference them - mstore(add(free, 0x40), mload(mload(base))) - mstore(add(free, 0x60), mload(add(0x20, mload(base)))) - // Add scalar - mstore(add(free, 0x80), mload(scalars)) - success := and(success, staticcall(gas(), 7, add(free, 0x40), 0x60, free, 0x40)) - - let count := 0x01 - for {} lt(count, limit) { count := add(count, 1) } { - // Get loop offsets - let base_base := add(base, mul(count, 0x20)) - let scalar_base := add(scalars, mul(count, 0x20)) - - mstore(add(free, 0x40), mload(mload(base_base))) - mstore(add(free, 0x60), mload(add(0x20, mload(base_base)))) - // Add scalar - mstore(add(free, 0x80), mload(scalar_base)) - - success := and(success, staticcall(gas(), 7, add(free, 0x40), 0x60, add(free, 0x40), 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, free, 0x80, free, 0x40)) - } - - // Return the result - i hate this - mstore(result, mload(free)) - mstore(add(result, 0x20), mload(add(free, 0x20))) - } - } - - function pairing(Honk.G1Point memory rhs, Honk.G1Point memory lhs) internal view returns (bool) { - bytes memory input = abi.encodePacked( - rhs.x, - rhs.y, - // Fixed G1 point - uint256(0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2), - uint256(0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed), - uint256(0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b), - uint256(0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa), - lhs.x, - lhs.y, - // G1 point from VK - uint256(0x260e01b251f6f1c7e7ff4e580791dee8ea51d87a358e038b4efe30fac09383c1), - uint256(0x0118c4d5b837bcc2bc89b5b398b5974e9f5944073b32078b7e231fec938883b0), - uint256(0x04fc6369f7110fe3d25156c1bb9a72859cf2a04641f99ba4ee413c80da6a5fe4), - uint256(0x22febda3c0c0632a56475b4214e5615e11e6dd3f96e6cea2854a87d4dacc5e55) - ); - - (bool success, bytes memory result) = address(0x08).staticcall(input); - bool decodedResult = abi.decode(result, (bool)); - return success && decodedResult; - } -} - -// Conversion util - Duplicated as we cannot template LOG_N -function convertPoints(Honk.G1ProofPoint[LOG_N + 1] memory commitments) - pure - returns (Honk.G1Point[LOG_N + 1] memory converted) -{ - for (uint256 i; i < LOG_N + 1; ++i) { - converted[i] = convertProofPoint(commitments[i]); - } } diff --git a/barretenberg/sol/src/honk/instance/EcdsaHonk.sol b/barretenberg/sol/src/honk/instance/EcdsaHonk.sol index 5afb9bd1b288..f7d599ff01e9 100644 --- a/barretenberg/sol/src/honk/instance/EcdsaHonk.sol +++ b/barretenberg/sol/src/honk/instance/EcdsaHonk.sol @@ -5,506 +5,13 @@ pragma solidity >=0.8.21; import {IVerifier} from "../../interfaces/IVerifier.sol"; import {EcdsaHonkVerificationKey as VK, N, LOG_N, NUMBER_OF_PUBLIC_INPUTS} from "../keys/EcdsaHonkVerificationKey.sol"; -import { - Honk, - WIRE, - NUMBER_OF_ENTITIES, - NUMBER_OF_SUBRELATIONS, - NUMBER_OF_ALPHAS, - NUMBER_UNSHIFTED, - BATCHED_RELATION_PARTIAL_LENGTH, - CONST_PROOF_SIZE_LOG_N -} from "../HonkTypes.sol"; +import {Honk} from "../HonkTypes.sol"; -import {ecMul, ecAdd, ecSub, negateInplace, convertProofPoint} from "../utils.sol"; - -// Field arithmetic libraries - prevent littering the code with modmul / addmul -import {MODULUS as P, MINUS_ONE, Fr, FrLib} from "../Fr.sol"; - -import {Transcript, TranscriptLib} from "../Transcript.sol"; - -import {RelationsLib} from "../Relations.sol"; - -// Errors -error PublicInputsLengthWrong(); -error SumcheckFailed(); -error ShpleminiFailed(); +import {BaseHonkVerifier as BASE} from "../BaseHonkVerifier.sol"; /// Smart contract verifier of honk proofs -contract EcdsaHonkVerifier is IVerifier { - using FrLib for Fr; - - function verify(bytes calldata proof, bytes32[] calldata publicInputs) public view override returns (bool) { - Honk.VerificationKey memory vk = loadVerificationKey(); - Honk.Proof memory p = TranscriptLib.loadProof(proof); - - if (publicInputs.length != vk.publicInputsSize) { - revert PublicInputsLengthWrong(); - } - - // Generate the fiat shamir challenges for the whole protocol - Transcript memory t = TranscriptLib.generateTranscript(p, publicInputs, vk.publicInputsSize); - - // Compute the public input delta - t.publicInputsDelta = - computePublicInputDelta(publicInputs, t.beta, t.gamma, vk.circuitSize, p.publicInputsOffset); - - // Sumcheck - bool sumcheckVerified = verifySumcheck(p, t); - if (!sumcheckVerified) revert SumcheckFailed(); - - bool shpleminiVerified = verifyShplemini(p, vk, t); - if (!shpleminiVerified) revert ShpleminiFailed(); - - return sumcheckVerified && shpleminiVerified; // Boolean condition not required - nice for vanity :) - } - - function loadVerificationKey() internal pure returns (Honk.VerificationKey memory) { +contract EcdsaHonkVerifier is BASE(N, LOG_N, NUMBER_OF_PUBLIC_INPUTS) { + function loadVerificationKey() internal pure override returns (Honk.VerificationKey memory) { return VK.loadVerificationKey(); } - - function computePublicInputDelta( - bytes32[] memory publicInputs, - Fr beta, - Fr gamma, - uint256 domainSize, - uint256 offset - ) internal view returns (Fr publicInputDelta) { - Fr numerator = Fr.wrap(1); - Fr denominator = Fr.wrap(1); - - Fr numeratorAcc = gamma + (beta * FrLib.from(domainSize + offset)); - Fr denominatorAcc = gamma - (beta * FrLib.from(offset + 1)); - - { - for (uint256 i = 0; i < NUMBER_OF_PUBLIC_INPUTS; i++) { - Fr pubInput = FrLib.fromBytes32(publicInputs[i]); - - numerator = numerator * (numeratorAcc + pubInput); - denominator = denominator * (denominatorAcc + pubInput); - - numeratorAcc = numeratorAcc + beta; - denominatorAcc = denominatorAcc - beta; - } - } - - // Fr delta = numerator / denominator; // TOOO: batch invert later? - publicInputDelta = FrLib.div(numerator, denominator); - } - - uint256 constant ROUND_TARGET = 0; - - function verifySumcheck(Honk.Proof memory proof, Transcript memory tp) internal view returns (bool verified) { - Fr roundTarget; - Fr powPartialEvaluation = Fr.wrap(1); - - // We perform sumcheck reductions over log n rounds ( the multivariate degree ) - for (uint256 round; round < LOG_N; ++round) { - Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory roundUnivariate = proof.sumcheckUnivariates[round]; - bool valid = checkSum(roundUnivariate, roundTarget); - if (!valid) revert SumcheckFailed(); - - Fr roundChallenge = tp.sumCheckUChallenges[round]; - - // Update the round target for the next rounf - roundTarget = computeNextTargetSum(roundUnivariate, roundChallenge); - powPartialEvaluation = partiallyEvaluatePOW(tp, powPartialEvaluation, roundChallenge, round); - } - - // Last round - Fr grandHonkRelationSum = RelationsLib.accumulateRelationEvaluations(proof, tp, powPartialEvaluation); - verified = (grandHonkRelationSum == roundTarget); - } - - function checkSum(Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory roundUnivariate, Fr roundTarget) - internal - pure - returns (bool checked) - { - Fr totalSum = roundUnivariate[0] + roundUnivariate[1]; - checked = totalSum == roundTarget; - } - - // Return the new target sum for the next sumcheck round - function computeNextTargetSum(Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory roundUnivariates, Fr roundChallenge) - internal - view - returns (Fr targetSum) - { - // TODO: inline - Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory BARYCENTRIC_LAGRANGE_DENOMINATORS = [ - Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffec51), - Fr.wrap(0x00000000000000000000000000000000000000000000000000000000000002d0), - Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffff11), - Fr.wrap(0x0000000000000000000000000000000000000000000000000000000000000090), - Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffff71), - Fr.wrap(0x00000000000000000000000000000000000000000000000000000000000000f0), - Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593effffd31), - Fr.wrap(0x00000000000000000000000000000000000000000000000000000000000013b0) - ]; - - Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory BARYCENTRIC_DOMAIN = [ - Fr.wrap(0x00), - Fr.wrap(0x01), - Fr.wrap(0x02), - Fr.wrap(0x03), - Fr.wrap(0x04), - Fr.wrap(0x05), - Fr.wrap(0x06), - Fr.wrap(0x07) - ]; - // To compute the next target sum, we evaluate the given univariate at a point u (challenge). - - // TODO: opt: use same array mem for each iteratioon - // Performing Barycentric evaluations - // Compute B(x) - Fr numeratorValue = Fr.wrap(1); - for (uint256 i; i < BATCHED_RELATION_PARTIAL_LENGTH; ++i) { - numeratorValue = numeratorValue * (roundChallenge - Fr.wrap(i)); - } - - // Calculate domain size N of inverses -- TODO: montgomery's trick - Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory denominatorInverses; - for (uint256 i; i < BATCHED_RELATION_PARTIAL_LENGTH; ++i) { - Fr inv = BARYCENTRIC_LAGRANGE_DENOMINATORS[i]; - inv = inv * (roundChallenge - BARYCENTRIC_DOMAIN[i]); - inv = FrLib.invert(inv); - denominatorInverses[i] = inv; - } - - for (uint256 i; i < BATCHED_RELATION_PARTIAL_LENGTH; ++i) { - Fr term = roundUnivariates[i]; - term = term * denominatorInverses[i]; - targetSum = targetSum + term; - } - - // Scale the sum by the value of B(x) - targetSum = targetSum * numeratorValue; - } - - // Univariate evaluation of the monomial ((1-X_l) + X_l.B_l) at the challenge point X_l=u_l - function partiallyEvaluatePOW(Transcript memory tp, Fr currentEvaluation, Fr roundChallenge, uint256 round) - internal - pure - returns (Fr newEvaluation) - { - Fr univariateEval = Fr.wrap(1) + (roundChallenge * (tp.gateChallenges[round] - Fr.wrap(1))); - newEvaluation = currentEvaluation * univariateEval; - } - - // Avoid stack too deep - struct ShpleminiIntermediates { - Fr unshiftedScalar; - Fr shiftedScalar; - // Scalar to be multiplied by [1]₁ - Fr constantTermAccumulator; - // Accumulator for powers of rho - Fr batchingChallenge; - // Linear combination of multilinear (sumcheck) evaluations and powers of rho - Fr batchedEvaluation; - } - - /** - * Shplemini is a combination of the gemini commitment scheme, that uses shplonk's batch opening claim. - */ - function verifyShplemini(Honk.Proof memory proof, Honk.VerificationKey memory vk, Transcript memory tp) - internal - view - returns (bool verified) - { - ShpleminiIntermediates memory mem; // stack - - // - Compute vector (r, r², ... , r²⁽ⁿ⁻¹⁾), where n = log_circuit_size - Fr[CONST_PROOF_SIZE_LOG_N] memory powers_of_evaluation_challenge = computeSquares(tp.geminiR); - - // Arrays hold values that will be linearly combined for the gemini and shplonk batch openings - Fr[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2] memory scalars; - Honk.G1Point[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2] memory commitments; - - Fr[CONST_PROOF_SIZE_LOG_N + 1] memory inverse_vanishing_evals = - computeInvertedGeminiDenominators(tp, powers_of_evaluation_challenge); - - mem.unshiftedScalar = inverse_vanishing_evals[0] + (tp.shplonkNu * inverse_vanishing_evals[1]); - mem.shiftedScalar = - tp.geminiR.invert() * (inverse_vanishing_evals[0] - (tp.shplonkNu * inverse_vanishing_evals[1])); - - scalars[0] = Fr.wrap(1); - commitments[0] = convertProofPoint(proof.shplonkQ); - - /* Batch multivariate opening claims, shifted and unshifted - * The vector of scalars is populated as follows: - * \f[ - * \left( - * - \left(\frac{1}{z-r} + \nu \times \frac{1}{z+r}\right), - * \ldots, - * - \rho^{i+k-1} \times \left(\frac{1}{z-r} + \nu \times \frac{1}{z+r}\right), - * - \rho^{i+k} \times \frac{1}{r} \times \left(\frac{1}{z-r} - \nu \times \frac{1}{z+r}\right), - * \ldots, - * - \rho^{k+m-1} \times \frac{1}{r} \times \left(\frac{1}{z-r} - \nu \times \frac{1}{z+r}\right) - * \right) - * \f] - * - * The following vector is concatenated to the vector of commitments: - * \f[ - * f_0, \ldots, f_{m-1}, f_{\text{shift}, 0}, \ldots, f_{\text{shift}, k-1} - * \f] - * - * Simultaneously, the evaluation of the multilinear polynomial - * \f[ - * \sum \rho^i \cdot f_i + \sum \rho^{i+k} \cdot f_{\text{shift}, i} - * \f] - * at the challenge point \f$ (u_0,\ldots, u_{n-1}) \f$ is computed. - * - * This approach minimizes the number of iterations over the commitments to multilinear polynomials - * and eliminates the need to store the powers of \f$ \rho \f$. - */ - mem.batchingChallenge = Fr.wrap(1); - mem.batchedEvaluation = Fr.wrap(0); - - for (uint256 i = 1; i <= NUMBER_UNSHIFTED; ++i) { - scalars[i] = mem.unshiftedScalar.neg() * mem.batchingChallenge; - mem.batchedEvaluation = mem.batchedEvaluation + (proof.sumcheckEvaluations[i - 1] * mem.batchingChallenge); - mem.batchingChallenge = mem.batchingChallenge * tp.rho; - } - // g commitments are accumulated at r - for (uint256 i = NUMBER_UNSHIFTED + 1; i <= NUMBER_OF_ENTITIES; ++i) { - scalars[i] = mem.shiftedScalar.neg() * mem.batchingChallenge; - mem.batchedEvaluation = mem.batchedEvaluation + (proof.sumcheckEvaluations[i - 1] * mem.batchingChallenge); - mem.batchingChallenge = mem.batchingChallenge * tp.rho; - } - - commitments[1] = vk.qm; - commitments[2] = vk.qc; - commitments[3] = vk.ql; - commitments[4] = vk.qr; - commitments[5] = vk.qo; - commitments[6] = vk.q4; - commitments[7] = vk.qLookup; - commitments[8] = vk.qArith; - commitments[9] = vk.qDeltaRange; - commitments[10] = vk.qElliptic; - commitments[11] = vk.qAux; - commitments[12] = vk.qPoseidon2External; - commitments[13] = vk.qPoseidon2Internal; - commitments[14] = vk.s1; - commitments[15] = vk.s2; - commitments[16] = vk.s3; - commitments[17] = vk.s4; - commitments[18] = vk.id1; - commitments[19] = vk.id2; - commitments[20] = vk.id3; - commitments[21] = vk.id4; - commitments[22] = vk.t1; - commitments[23] = vk.t2; - commitments[24] = vk.t3; - commitments[25] = vk.t4; - commitments[26] = vk.lagrangeFirst; - commitments[27] = vk.lagrangeLast; - - // Accumulate proof points - commitments[28] = convertProofPoint(proof.w1); - commitments[29] = convertProofPoint(proof.w2); - commitments[30] = convertProofPoint(proof.w3); - commitments[31] = convertProofPoint(proof.w4); - commitments[32] = convertProofPoint(proof.zPerm); - commitments[33] = convertProofPoint(proof.lookupInverses); - commitments[34] = convertProofPoint(proof.lookupReadCounts); - commitments[35] = convertProofPoint(proof.lookupReadTags); - - // to be Shifted - commitments[36] = convertProofPoint(proof.w1); - commitments[37] = convertProofPoint(proof.w2); - commitments[38] = convertProofPoint(proof.w3); - commitments[39] = convertProofPoint(proof.w4); - commitments[40] = convertProofPoint(proof.zPerm); - - /* Batch gemini claims from the prover - * place the commitments to gemini aᵢ to the vector of commitments, compute the contributions from - * aᵢ(−r²ⁱ) for i=1, … , n−1 to the constant term accumulator, add corresponding scalars - * - * 1. Moves the vector - * \f[ - * \left( \text{com}(A_1), \text{com}(A_2), \ldots, \text{com}(A_{n-1}) \right) - * \f] - * to the 'commitments' vector. - * - * 2. Computes the scalars: - * \f[ - * \frac{\nu^{2}}{z + r^2}, \frac{\nu^3}{z + r^4}, \ldots, \frac{\nu^{n-1}}{z + r^{2^{n-1}}} - * \f] - * and places them into the 'scalars' vector. - * - * 3. Accumulates the summands of the constant term: - * \f[ - * \sum_{i=2}^{n-1} \frac{\nu^{i} \cdot A_i(-r^{2^i})}{z + r^{2^i}} - * \f] - * and adds them to the 'constant_term_accumulator'. - */ - mem.constantTermAccumulator = Fr.wrap(0); - mem.batchingChallenge = tp.shplonkNu.sqr(); - - for (uint256 i; i < CONST_PROOF_SIZE_LOG_N - 1; ++i) { - bool dummy_round = i >= (LOG_N - 1); - - Fr scalingFactor = Fr.wrap(0); - if (!dummy_round) { - scalingFactor = mem.batchingChallenge * inverse_vanishing_evals[i + 2]; - scalars[NUMBER_OF_ENTITIES + 1 + i] = scalingFactor.neg(); - } - - mem.constantTermAccumulator = - mem.constantTermAccumulator + (scalingFactor * proof.geminiAEvaluations[i + 1]); - mem.batchingChallenge = mem.batchingChallenge * tp.shplonkNu; - - commitments[NUMBER_OF_ENTITIES + 1 + i] = convertProofPoint(proof.geminiFoldComms[i]); - } - - // Add contributions from A₀(r) and A₀(-r) to constant_term_accumulator: - // Compute evaluation A₀(r) - Fr a_0_pos = computeGeminiBatchedUnivariateEvaluation( - tp, mem.batchedEvaluation, proof.geminiAEvaluations, powers_of_evaluation_challenge - ); - - mem.constantTermAccumulator = mem.constantTermAccumulator + (a_0_pos * inverse_vanishing_evals[0]); - mem.constantTermAccumulator = - mem.constantTermAccumulator + (proof.geminiAEvaluations[0] * tp.shplonkNu * inverse_vanishing_evals[1]); - - // Finalise the batch opening claim - commitments[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N] = Honk.G1Point({x: 1, y: 2}); - scalars[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N] = mem.constantTermAccumulator; - - Honk.G1Point memory quotient_commitment = convertProofPoint(proof.kzgQuotient); - - commitments[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] = quotient_commitment; - scalars[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] = tp.shplonkZ; // evaluation challenge - - Honk.G1Point memory P_0 = batchMul(commitments, scalars); - Honk.G1Point memory P_1 = negateInplace(quotient_commitment); - - return pairing(P_0, P_1); - } - - function computeSquares(Fr r) internal pure returns (Fr[CONST_PROOF_SIZE_LOG_N] memory squares) { - squares[0] = r; - for (uint256 i = 1; i < CONST_PROOF_SIZE_LOG_N; ++i) { - squares[i] = squares[i - 1].sqr(); - } - } - - function computeInvertedGeminiDenominators( - Transcript memory tp, - Fr[CONST_PROOF_SIZE_LOG_N] memory eval_challenge_powers - ) internal view returns (Fr[CONST_PROOF_SIZE_LOG_N + 1] memory inverse_vanishing_evals) { - Fr eval_challenge = tp.shplonkZ; - inverse_vanishing_evals[0] = (eval_challenge - eval_challenge_powers[0]).invert(); - - for (uint256 i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { - Fr round_inverted_denominator = Fr.wrap(0); - if (i <= LOG_N + 1) { - round_inverted_denominator = (eval_challenge + eval_challenge_powers[i]).invert(); - } - inverse_vanishing_evals[i + 1] = round_inverted_denominator; - } - } - - function computeGeminiBatchedUnivariateEvaluation( - Transcript memory tp, - Fr batchedEvalAccumulator, - Fr[CONST_PROOF_SIZE_LOG_N] memory geminiEvaluations, - Fr[CONST_PROOF_SIZE_LOG_N] memory geminiEvalChallengePowers - ) internal view returns (Fr a_0_pos) { - for (uint256 i = CONST_PROOF_SIZE_LOG_N; i > 0; --i) { - Fr challengePower = geminiEvalChallengePowers[i - 1]; - Fr u = tp.sumCheckUChallenges[i - 1]; - Fr evalNeg = geminiEvaluations[i - 1]; - - Fr batchedEvalRoundAcc = ( - (challengePower * batchedEvalAccumulator * Fr.wrap(2)) - - evalNeg * (challengePower * (Fr.wrap(1) - u) - u) - ); - // Divide by the denominator - batchedEvalRoundAcc = batchedEvalRoundAcc * (challengePower * (Fr.wrap(1) - u) + u).invert(); - - bool is_dummy_round = (i > LOG_N); - if (!is_dummy_round) { - batchedEvalAccumulator = batchedEvalRoundAcc; - } - } - - a_0_pos = batchedEvalAccumulator; - } - - // This implementation is the same as above with different constants - function batchMul( - Honk.G1Point[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2] memory base, - Fr[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2] memory scalars - ) internal view returns (Honk.G1Point memory result) { - uint256 limit = NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 2; - assembly { - let success := 0x01 - let free := mload(0x40) - - // Write the original into the accumulator - // Load into memory for ecMUL, leave offset for eccAdd result - // base is an array of pointers, so we have to dereference them - mstore(add(free, 0x40), mload(mload(base))) - mstore(add(free, 0x60), mload(add(0x20, mload(base)))) - // Add scalar - mstore(add(free, 0x80), mload(scalars)) - success := and(success, staticcall(gas(), 7, add(free, 0x40), 0x60, free, 0x40)) - - let count := 0x01 - for {} lt(count, limit) { count := add(count, 1) } { - // Get loop offsets - let base_base := add(base, mul(count, 0x20)) - let scalar_base := add(scalars, mul(count, 0x20)) - - mstore(add(free, 0x40), mload(mload(base_base))) - mstore(add(free, 0x60), mload(add(0x20, mload(base_base)))) - // Add scalar - mstore(add(free, 0x80), mload(scalar_base)) - - success := and(success, staticcall(gas(), 7, add(free, 0x40), 0x60, add(free, 0x40), 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, free, 0x80, free, 0x40)) - } - - // Return the result - i hate this - mstore(result, mload(free)) - mstore(add(result, 0x20), mload(add(free, 0x20))) - } - } - - function pairing(Honk.G1Point memory rhs, Honk.G1Point memory lhs) internal view returns (bool) { - bytes memory input = abi.encodePacked( - rhs.x, - rhs.y, - // Fixed G1 point - uint256(0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2), - uint256(0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed), - uint256(0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b), - uint256(0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa), - lhs.x, - lhs.y, - // G1 point from VK - uint256(0x260e01b251f6f1c7e7ff4e580791dee8ea51d87a358e038b4efe30fac09383c1), - uint256(0x0118c4d5b837bcc2bc89b5b398b5974e9f5944073b32078b7e231fec938883b0), - uint256(0x04fc6369f7110fe3d25156c1bb9a72859cf2a04641f99ba4ee413c80da6a5fe4), - uint256(0x22febda3c0c0632a56475b4214e5615e11e6dd3f96e6cea2854a87d4dacc5e55) - ); - - (bool success, bytes memory result) = address(0x08).staticcall(input); - bool decodedResult = abi.decode(result, (bool)); - return success && decodedResult; - } -} - -// Conversion util - Duplicated as we cannot template LOG_N -function convertPoints(Honk.G1ProofPoint[LOG_N + 1] memory commitments) - pure - returns (Honk.G1Point[LOG_N + 1] memory converted) -{ - for (uint256 i; i < LOG_N + 1; ++i) { - converted[i] = convertProofPoint(commitments[i]); - } } diff --git a/bb-pilcom/bb-pil-backend/src/permutation_builder.rs b/bb-pilcom/bb-pil-backend/src/permutation_builder.rs index 3c7a978472ca..b7cdaa735d06 100644 --- a/bb-pilcom/bb-pil-backend/src/permutation_builder.rs +++ b/bb-pilcom/bb-pil-backend/src/permutation_builder.rs @@ -144,6 +144,7 @@ fn create_permutation_settings_data(permutation: &Permutation, vm_name: &str) -> "lhs_selector": lhs_selector, "rhs_selector": rhs_selector, "perm_entities": perm_entities, + "inverses_col": permutation.inverse.clone(), }) } diff --git a/bb-pilcom/bb-pil-backend/templates/permutation.hpp.hbs b/bb-pilcom/bb-pil-backend/templates/permutation.hpp.hbs index 29ee9a18e0c9..502bbb886a46 100644 --- a/bb-pilcom/bb-pil-backend/templates/permutation.hpp.hbs +++ b/bb-pilcom/bb-pil-backend/templates/permutation.hpp.hbs @@ -1,6 +1,7 @@ // AUTOGENERATED FILE #pragma once +#include "../columns.hpp" #include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" #include @@ -13,6 +14,11 @@ class {{perm_name}}_permutation_settings { // This constant defines how many columns are bundled together to form each set. constexpr static size_t COLUMNS_PER_SET = {{columns_per_set}}; + // Columns using the Column enum. + static constexpr Column SRC_SELECTOR = Column::{{lhs_selector}}; + static constexpr Column DST_SELECTOR = Column::{{rhs_selector}}; + static constexpr Column INVERSES = Column::{{inverses_col}}; + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) { return (in.{{lhs_selector}} == 1 || in.{{rhs_selector}} == 1); diff --git a/bootstrap.sh b/bootstrap.sh index e0e4a070cd21..9e59b1de6f99 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -201,9 +201,34 @@ case "$cmd" in ;; "image-aztec") image=aztecprotocol/aztec:$(git rev-parse HEAD) + check_arch=false + + # Check for --check-arch flag in args + for arg in "$@"; do + if [ "$arg" = "--check-arch" ]; then + check_arch=true + break + fi + done + docker pull $image &>/dev/null || true if docker_has_image $image; then - exit + if [ "$check_arch" = true ]; then + # Check we're on the correct architecture + image_arch=$(docker inspect $image --format '{{.Architecture}}') + host_arch=$(uname -m | sed 's/x86_64/amd64/' | sed 's/aarch64/arm64/') + + if [ "$image_arch" != "$host_arch" ]; then + echo "Warning: Image architecture ($image_arch) doesn't match host architecture ($host_arch)" + echo "Rebuilding image for correct architecture..." + else + echo "Image $image already exists and has been downloaded with correct architecture." && exit + fi + else + echo "Image $image already exists and has been downloaded." && exit + fi + else + echo "Image $image does not exist, building..." fi github_group "image-aztec" source $ci3/source_tmp diff --git a/cspell.json b/cspell.json index 3ed9bc44cf44..9acd495666a6 100644 --- a/cspell.json +++ b/cspell.json @@ -12,6 +12,7 @@ "auditability", "authwit", "authwits", + "authwitness", "Automine", "autonat", "autorun", diff --git a/docs/bootstrap.sh b/docs/bootstrap.sh old mode 100644 new mode 100755 diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index 765546c6c081..8d2325e2b20b 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -21,7 +21,7 @@ library Constants { uint256 internal constant MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL = 5; uint256 internal constant MAX_ENQUEUED_CALLS_PER_CALL = 16; uint256 internal constant MAX_L2_TO_L1_MSGS_PER_CALL = 2; - uint256 internal constant MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL = 64; + uint256 internal constant MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL = 63; uint256 internal constant MAX_PUBLIC_DATA_READS_PER_CALL = 64; uint256 internal constant MAX_NOTE_HASH_READ_REQUESTS_PER_CALL = 16; uint256 internal constant MAX_NULLIFIER_READ_REQUESTS_PER_CALL = 16; @@ -187,7 +187,7 @@ library Constants { uint256 internal constant BLOCK_HEADER_LENGTH = 25; uint256 internal constant BLOCK_HEADER_LENGTH_BYTES = 648; uint256 internal constant PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH = 741; - uint256 internal constant PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH = 867; + uint256 internal constant PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH = 864; uint256 internal constant PRIVATE_CONTEXT_INPUTS_LENGTH = 40; uint256 internal constant FEE_RECIPIENT_LENGTH = 2; uint256 internal constant AGGREGATION_OBJECT_LENGTH = 16; @@ -258,11 +258,11 @@ library Constants { uint256 internal constant START_NULLIFIER_NON_EXISTS_OFFSET = 32; uint256 internal constant START_L1_TO_L2_MSG_EXISTS_WRITE_OFFSET = 48; uint256 internal constant START_SSTORE_WRITE_OFFSET = 64; - uint256 internal constant START_SLOAD_WRITE_OFFSET = 128; - uint256 internal constant START_EMIT_NOTE_HASH_WRITE_OFFSET = 192; - uint256 internal constant START_EMIT_NULLIFIER_WRITE_OFFSET = 208; - uint256 internal constant START_EMIT_L2_TO_L1_MSG_WRITE_OFFSET = 224; - uint256 internal constant START_EMIT_UNENCRYPTED_LOG_WRITE_OFFSET = 226; + uint256 internal constant START_SLOAD_WRITE_OFFSET = 127; + uint256 internal constant START_EMIT_NOTE_HASH_WRITE_OFFSET = 191; + uint256 internal constant START_EMIT_NULLIFIER_WRITE_OFFSET = 207; + uint256 internal constant START_EMIT_L2_TO_L1_MSG_WRITE_OFFSET = 223; + uint256 internal constant START_EMIT_UNENCRYPTED_LOG_WRITE_OFFSET = 225; uint256 internal constant DEFAULT_GAS_LIMIT = 1000000000; uint256 internal constant MAX_L2_GAS_PER_TX_PUBLIC_PORTION = 6000000; uint256 internal constant DEFAULT_TEARDOWN_GAS_LIMIT = 6000000; diff --git a/l1-contracts/src/core/libraries/RollupLibs/ProposeLib.sol b/l1-contracts/src/core/libraries/RollupLibs/ProposeLib.sol index 59b4e10a7a69..5dbc8b27dae1 100644 --- a/l1-contracts/src/core/libraries/RollupLibs/ProposeLib.sol +++ b/l1-contracts/src/core/libraries/RollupLibs/ProposeLib.sol @@ -15,6 +15,6 @@ struct ProposeArgs { library ProposeLib { function digest(ProposeArgs memory _args) internal pure returns (bytes32) { - return keccak256(abi.encode(SignatureLib.SignatureDomainSeperator.blockAttestation, _args)); + return keccak256(abi.encode(SignatureLib.SignatureDomainSeparator.blockAttestation, _args)); } } diff --git a/l1-contracts/src/core/libraries/crypto/SignatureLib.sol b/l1-contracts/src/core/libraries/crypto/SignatureLib.sol index 4223f5ddafe5..1265adb305f9 100644 --- a/l1-contracts/src/core/libraries/crypto/SignatureLib.sol +++ b/l1-contracts/src/core/libraries/crypto/SignatureLib.sol @@ -13,9 +13,9 @@ struct Signature { library SignatureLib { /** - * @notice The domain seperator for the signatures + * @notice The domain separator for the signatures */ - enum SignatureDomainSeperator { + enum SignatureDomainSeparator { blockProposal, blockAttestation } diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 28f80a81d36d..625c3bf3ddaf 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = e852135a829883f177a69ccbd1df29fd445fbfc1 + commit = 8f332207062c425762a983cde8eb1b8eabe06743 method = merge cmdver = 0.4.6 - parent = 33475e46f8265f8ff7e6d8980a46b240c1af8656 + parent = f0d0c47d2cffe8017e8c0bc92e43f7bfb2ef47ce diff --git a/noir-projects/aztec-nr/aztec/src/context/call_interfaces.nr b/noir-projects/aztec-nr/aztec/src/context/call_interfaces.nr index 4fdbdfce2735..6c15183e91d5 100644 --- a/noir-projects/aztec-nr/aztec/src/context/call_interfaces.nr +++ b/noir-projects/aztec-nr/aztec/src/context/call_interfaces.nr @@ -8,25 +8,11 @@ use crate::hash::hash_args; use crate::oracle::arguments::pack_arguments; pub trait CallInterface { - fn get_args(self) -> [Field] { - self.args - } - - fn get_selector(self) -> FunctionSelector { - self.selector - } - - fn get_name(self) -> str { - self.name - } - - fn get_contract_address(self) -> AztecAddress { - self.target_contract - } - - fn get_is_static(self) -> bool { - self.is_static - } + fn get_args(self) -> [Field]; + fn get_selector(self) -> FunctionSelector; + fn get_name(self) -> str; + fn get_contract_address(self) -> AztecAddress; + fn get_is_static(self) -> bool; } pub struct PrivateCallInterface { @@ -70,7 +56,27 @@ impl PrivateCallInterface { } } -impl CallInterface for PrivateVoidCallInterface {} +impl CallInterface for PrivateVoidCallInterface { + fn get_args(self) -> [Field] { + self.args + } + + fn get_selector(self) -> FunctionSelector { + self.selector + } + + fn get_name(self) -> str { + self.name + } + + fn get_contract_address(self) -> AztecAddress { + self.target_contract + } + + fn get_is_static(self) -> bool { + self.is_static + } +} pub struct PrivateVoidCallInterface { pub target_contract: AztecAddress, @@ -108,7 +114,27 @@ impl PrivateVoidCallInterface { } } -impl CallInterface for PrivateStaticCallInterface {} +impl CallInterface for PrivateStaticCallInterface { + fn get_args(self) -> [Field] { + self.args + } + + fn get_selector(self) -> FunctionSelector { + self.selector + } + + fn get_name(self) -> str { + self.name + } + + fn get_contract_address(self) -> AztecAddress { + self.target_contract + } + + fn get_is_static(self) -> bool { + self.is_static + } +} pub struct PrivateStaticCallInterface { pub target_contract: AztecAddress, @@ -136,7 +162,27 @@ impl PrivateStaticCallInterface { } } -impl CallInterface for PrivateStaticVoidCallInterface {} +impl CallInterface for PrivateStaticVoidCallInterface { + fn get_args(self) -> [Field] { + self.args + } + + fn get_selector(self) -> FunctionSelector { + self.selector + } + + fn get_name(self) -> str { + self.name + } + + fn get_contract_address(self) -> AztecAddress { + self.target_contract + } + + fn get_is_static(self) -> bool { + self.is_static + } +} pub struct PrivateStaticVoidCallInterface { pub target_contract: AztecAddress, @@ -162,7 +208,27 @@ impl PrivateStaticVoidCallInterface { } } -impl CallInterface for PublicCallInterface {} +impl CallInterface for PublicCallInterface { + fn get_args(self) -> [Field] { + self.args + } + + fn get_selector(self) -> FunctionSelector { + self.selector + } + + fn get_name(self) -> str { + self.name + } + + fn get_contract_address(self) -> AztecAddress { + self.target_contract + } + + fn get_is_static(self) -> bool { + self.is_static + } +} pub struct PublicCallInterface { pub target_contract: AztecAddress, @@ -231,7 +297,27 @@ impl PublicCallInterface { } } -impl CallInterface for PublicVoidCallInterface {} +impl CallInterface for PublicVoidCallInterface { + fn get_args(self) -> [Field] { + self.args + } + + fn get_selector(self) -> FunctionSelector { + self.selector + } + + fn get_name(self) -> str { + self.name + } + + fn get_contract_address(self) -> AztecAddress { + self.target_contract + } + + fn get_is_static(self) -> bool { + self.is_static + } +} pub struct PublicVoidCallInterface { pub target_contract: AztecAddress, @@ -294,7 +380,27 @@ impl PublicVoidCallInterface { } } -impl CallInterface for PublicStaticCallInterface {} +impl CallInterface for PublicStaticCallInterface { + fn get_args(self) -> [Field] { + self.args + } + + fn get_selector(self) -> FunctionSelector { + self.selector + } + + fn get_name(self) -> str { + self.name + } + + fn get_contract_address(self) -> AztecAddress { + self.target_contract + } + + fn get_is_static(self) -> bool { + self.is_static + } +} pub struct PublicStaticCallInterface { pub target_contract: AztecAddress, @@ -338,7 +444,27 @@ impl PublicStaticCallInterface { } } -impl CallInterface for PublicStaticVoidCallInterface {} +impl CallInterface for PublicStaticVoidCallInterface { + fn get_args(self) -> [Field] { + self.args + } + + fn get_selector(self) -> FunctionSelector { + self.selector + } + + fn get_name(self) -> str { + self.name + } + + fn get_contract_address(self) -> AztecAddress { + self.target_contract + } + + fn get_is_static(self) -> bool { + self.is_static + } +} pub struct PublicStaticVoidCallInterface { target_contract: AztecAddress, diff --git a/noir-projects/aztec-nr/aztec/src/keys/secret_derivation.nr b/noir-projects/aztec-nr/aztec/src/keys/secret_derivation.nr index b49fa90a7838..93201448573c 100644 --- a/noir-projects/aztec-nr/aztec/src/keys/secret_derivation.nr +++ b/noir-projects/aztec-nr/aztec/src/keys/secret_derivation.nr @@ -11,7 +11,7 @@ pub fn derive_aes_secret(secret: Scalar, point: Point) -> [u8; 32] { } shared_secret_bytes_with_separator[32] = GENERATOR_INDEX__SYMMETRIC_KEY; - + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/10537): Replace use of sha256 sha256(shared_secret_bytes_with_separator) } diff --git a/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr b/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr index 5d8aa114b09c..1b9577a0b63f 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr @@ -441,7 +441,7 @@ comptime fn generate_setup_payload( + 2 /* log_plaintext_length */ + 14 /* AES padding */; // Each field contains 31 bytes so the length in fields is computed as ceil(encrypted_log_byte_length / 31) - // --> we achieve rouding by adding 30 and then dividing without remainder + // --> we achieve rounding by adding 30 and then dividing without remainder let encrypted_log_field_length = (encrypted_log_byte_length + 30) / 31; ( @@ -661,7 +661,7 @@ comptime fn generate_finalization_payload( + 2 /* log_plaintext_length */ + 14 /* AES padding */; // Each field contains 31 bytes so the length in fields is computed as ceil(setup_log_byte_length / 31) - // --> we achieve rouding by adding 30 and then dividing without remainder + // --> we achieve rounding by adding 30 and then dividing without remainder let setup_log_field_length = (setup_log_byte_length + 30) / 31; let public_values_field_length = public_values_length * 32; let finalization_log_byte_length = diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/map.nr b/noir-projects/aztec-nr/aztec/src/state_vars/map.nr index 5e9febc29cb0..367f7eea984f 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/map.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/map.nr @@ -15,7 +15,11 @@ pub struct Map { impl Storage for Map where T: Serialize + Deserialize, -{} +{ + fn get_storage_slot(self) -> Field { + self.storage_slot + } +} impl Map { // docs:start:new diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/private_immutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/private_immutable.nr index 89d827f5951f..662f4274f373 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/private_immutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/private_immutable.nr @@ -25,7 +25,11 @@ pub struct PrivateImmutable { impl Storage for PrivateImmutable where T: Serialize + Deserialize, -{} +{ + fn get_storage_slot(self) -> Field { + self.storage_slot + } +} impl PrivateImmutable { // docs:start:new diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/private_mutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/private_mutable.nr index f721a30a7c2d..4dd7d13dd2f2 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/private_mutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/private_mutable.nr @@ -27,7 +27,11 @@ mod test; impl Storage for PrivateMutable where T: Serialize + Deserialize, -{} +{ + fn get_storage_slot(self) -> Field { + self.storage_slot + } +} impl PrivateMutable { // docs:start:new diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/private_set.nr b/noir-projects/aztec-nr/aztec/src/state_vars/private_set.nr index 471ede3d9d56..008c0c215cfc 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/private_set.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/private_set.nr @@ -26,7 +26,11 @@ pub struct PrivateSet { impl Storage for PrivateSet where T: Serialize + Deserialize, -{} +{ + fn get_storage_slot(self) -> Field { + self.storage_slot + } +} impl PrivateSet { // docs:start:new diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr index 4ed4f2bbb4d2..96809aa86869 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr @@ -19,7 +19,11 @@ pub struct PublicImmutable { impl Storage for PublicImmutable where T: Serialize + Deserialize, -{} +{ + fn get_storage_slot(self) -> Field { + self.storage_slot + } +} impl PublicImmutable { // docs:start:public_immutable_struct_new diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr index 0f69d4cd5280..cf9bf5890403 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr @@ -12,7 +12,11 @@ pub struct PublicMutable { impl Storage for PublicMutable where T: Serialize + Deserialize, -{} +{ + fn get_storage_slot(self) -> Field { + self.storage_slot + } +} impl PublicMutable { // docs:start:public_mutable_struct_new diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable.nr index 3d940d3d4924..5e9a2f0a3348 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable.nr @@ -39,7 +39,11 @@ global HASH_SEPARATOR: u32 = 2; impl Storage for SharedMutable where T: Serialize + Deserialize, -{} +{ + fn get_storage_slot(self) -> Field { + self.storage_slot + } +} // SharedMutable stores a value of type T that is: // - publicly known (i.e. unencrypted) diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/storage.nr b/noir-projects/aztec-nr/aztec/src/state_vars/storage.nr index a48c7de3749b..3539ce0c78cc 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/storage.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/storage.nr @@ -4,9 +4,7 @@ pub trait Storage where T: Serialize + Deserialize, { - fn get_storage_slot(self) -> Field { - self.storage_slot - } + fn get_storage_slot(self) -> Field; } // Struct representing an exportable storage variable in the contract diff --git a/noir-projects/aztec-nr/bootstrap.sh b/noir-projects/aztec-nr/bootstrap.sh index 2cd5d7233710..976ff04789d3 100755 --- a/noir-projects/aztec-nr/bootstrap.sh +++ b/noir-projects/aztec-nr/bootstrap.sh @@ -9,7 +9,7 @@ export NARGO=${NARGO:-../../noir/noir-repo/target/release/nargo} function test_cmds { i=0 - $NARGO test --list-tests --silence-warnings | while read -r package test; do + $NARGO test --list-tests | while read -r package test; do # We assume there are 8 txe's running. port=$((45730 + (i++ % ${NUM_TXES:-1}))) echo "noir-projects/scripts/run_test.sh aztec-nr $package $test $port" diff --git a/noir-projects/bootstrap.sh b/noir-projects/bootstrap.sh index e70da821e60b..4b98d4fa7fba 100755 --- a/noir-projects/bootstrap.sh +++ b/noir-projects/bootstrap.sh @@ -46,7 +46,7 @@ case "$cmd" in exit ;; "hash") - cache_content_hash .rebuild_patterns + cache_content_hash .rebuild_patterns ../noir/.rebuild_patterns exit ;; *) diff --git a/noir-projects/noir-contracts/bootstrap.sh b/noir-projects/noir-contracts/bootstrap.sh index 3d8b2b0f6463..4d7180f7dcb9 100755 --- a/noir-projects/noir-contracts/bootstrap.sh +++ b/noir-projects/noir-contracts/bootstrap.sh @@ -105,7 +105,7 @@ function compile { "^noir-projects/aztec-nr/" \ )" if ! cache_download contract-$contract_hash.tar.gz &> /dev/null; then - $NARGO compile --package $contract --silence-warnings --inliner-aggressiveness 0 + $NARGO compile --package $contract --inliner-aggressiveness 0 $TRANSPILER $json_path $json_path cache_upload contract-$contract_hash.tar.gz $json_path &> /dev/null fi @@ -144,7 +144,7 @@ function build { function test_cmds { i=0 - $NARGO test --list-tests --silence-warnings | while read -r package test; do + $NARGO test --list-tests | while read -r package test; do # We assume there are 8 txe's running. port=$((45730 + (i++ % ${NUM_TXES:-1}))) echo "noir-projects/scripts/run_test.sh noir-contracts $package $test $port" @@ -197,4 +197,4 @@ case "$cmd" in *) echo_stderr "Unknown command: $cmd" exit 1 -esac \ No newline at end of file +esac diff --git a/noir-projects/noir-contracts/contracts/amm_contract/src/main.nr b/noir-projects/noir-contracts/contracts/amm_contract/src/main.nr index b14258f9d0f0..a70d9ebeadaf 100644 --- a/noir-projects/noir-contracts/contracts/amm_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/amm_contract/src/main.nr @@ -253,7 +253,7 @@ contract AMM { let sender = context.msg_sender(); // Liquidity tokens are burned when liquidity is removed in order to reduce the total supply. However, we lack - // a function to privately burn, so we instead transfer the tokens into the AMM's public balance, and them have + // a function to privately burn, so we instead transfer the tokens into the AMM's public balance, and then have // the AMM publicly burn its own tokens. // TODO(#10287): consider adding a private burn liquidity_token.transfer_to_public(sender, context.this_address(), liquidity, nonce).call( diff --git a/noir-projects/noir-contracts/contracts/token_bridge_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_bridge_contract/src/main.nr index e7298d32e3c8..e29275432134 100644 --- a/noir-projects/noir-contracts/contracts/token_bridge_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_bridge_contract/src/main.nr @@ -95,7 +95,6 @@ contract TokenBridge { /// Claims the bridged tokens and makes them accessible in private. Note that recipient's address is not revealed /// but the amount is. Hence it's most likely possible to determine to which L1 deposit this claim corresponds to /// (unless there are multiple pending deposits of the same amount). - /// TODO(#8416): Consider creating a truly private claim flow. #[private] fn claim_private( recipient: AztecAddress, // recipient of the bridged tokens diff --git a/noir-projects/noir-protocol-circuits/.gitignore b/noir-projects/noir-protocol-circuits/.gitignore index 9a99d303b900..5782711af5db 100644 --- a/noir-projects/noir-protocol-circuits/.gitignore +++ b/noir-projects/noir-protocol-circuits/.gitignore @@ -2,4 +2,5 @@ Verifier.toml target crates/autogenerated /Nargo.toml -/private_kernel_reset_dimensions.json \ No newline at end of file +/private_kernel_reset_dimensions.json +install-state.gz \ No newline at end of file diff --git a/noir-projects/noir-protocol-circuits/.yarn/install-state.gz b/noir-projects/noir-protocol-circuits/.yarn/install-state.gz deleted file mode 100644 index 5c46fdd2fed8..000000000000 Binary files a/noir-projects/noir-protocol-circuits/.yarn/install-state.gz and /dev/null differ diff --git a/noir-projects/noir-protocol-circuits/bootstrap.sh b/noir-projects/noir-protocol-circuits/bootstrap.sh index b2ba4044009a..4298aea8f2ef 100755 --- a/noir-projects/noir-protocol-circuits/bootstrap.sh +++ b/noir-projects/noir-protocol-circuits/bootstrap.sh @@ -67,7 +67,7 @@ function compile { SECONDS=0 rm -f $json_path # TODO: --skip-brillig-constraints-check added temporarily for blobs build time. - local compile_cmd="$NARGO compile --package $name --silence-warnings --skip-brillig-constraints-check" + local compile_cmd="$NARGO compile --package $name --skip-brillig-constraints-check" echo_stderr "$compile_cmd" dump_fail "$compile_cmd" echo_stderr "Compilation complete for: $name (${SECONDS}s)" @@ -138,7 +138,7 @@ function test { CIRCUITS_HASH=$(cache_content_hash ../../noir/.rebuild_patterns "^noir-projects/$name") test_should_run $name-tests-$CIRCUITS_HASH || return 0 - RAYON_NUM_THREADS= $NARGO test --silence-warnings --skip-brillig-constraints-check + RAYON_NUM_THREADS= $NARGO test --skip-brillig-constraints-check cache_upload_flag $name-tests-$CIRCUITS_HASH } diff --git a/noir-projects/noir-protocol-circuits/crates/blob/Nargo.toml b/noir-projects/noir-protocol-circuits/crates/blob/Nargo.toml index da49758490e1..45b10bfc1ea5 100644 --- a/noir-projects/noir-protocol-circuits/crates/blob/Nargo.toml +++ b/noir-projects/noir-protocol-circuits/crates/blob/Nargo.toml @@ -5,5 +5,5 @@ authors = [""] compiler_version = ">=0.30.0" [dependencies] -bigint = {tag = "v0.4.2", git = "https://github.com/noir-lang/noir-bignum" } +bigint = { tag = "v0.5.0", git = "https://github.com/noir-lang/noir-bignum" } types = { path = "../types" } diff --git a/noir-projects/noir-protocol-circuits/crates/blob/src/blob.nr b/noir-projects/noir-protocol-circuits/crates/blob/src/blob.nr index d2f9d809332f..10ebc5c16941 100644 --- a/noir-projects/noir-protocol-circuits/crates/blob/src/blob.nr +++ b/noir-projects/noir-protocol-circuits/crates/blob/src/blob.nr @@ -227,67 +227,69 @@ fn barycentric_evaluate_blob_at_z(z: F, ys: [F; FIELDS_PER_BLOB]) -> F { __compute_partial_sums(fracs, ROOTS) }; - // We split off the first term to check the initial sum - - // partial_sums[0] <- (lhs[0] * rhs[0] + ... + lhs[7] * rhs[7]) - // => (lhs[0] * rhs[0] + ... + lhs[7] * rhs[7]) - partial_sums[0] == 0 - let lhs = [ - [ROOTS[0]], [ROOTS[1]], [ROOTS[2]], [ROOTS[3]], [ROOTS[4]], [ROOTS[5]], [ROOTS[6]], - [ROOTS[7]], - ]; - let rhs = [ - [fracs[0]], [fracs[1]], [fracs[2]], [fracs[3]], [fracs[4]], [fracs[5]], [fracs[6]], - [fracs[7]], - ]; - BigNum::evaluate_quadratic_expression( - lhs, - [[false], [false], [false], [false], [false], [false], [false], [false]], - rhs, - [[false], [false], [false], [false], [false], [false], [false], [false]], - [partial_sums[0]], - [true], - ); - for i in 1..NUM_PARTIAL_SUMS { - // Seeking: - // ___i*8 - 1 ___i*8 + 7 - // \ omega^i \ / y_k \ - // sum_out = / y_i . --------- + / omega^k . | --------- | - // /____ z - omega^i /____ \ z - omega^k / - // 0 k = i*8 - // ^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - // sum partial_sum - // - // ... that is: - // - // ___i*8 - 1 ___ 7 - // \ omega^i \ - // sum_out = / y_i . --------- + / lhs[j] . rhs[j] - // /____ z - omega^i /____ - // 0 j = 0 - // ^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^ - // sum partial_sum - // - - let mut lhs: [[F; 1]; 8] = [std::mem::zeroed(); 8]; - let mut rhs: [[F; 1]; 8] = [std::mem::zeroed(); 8]; - for j in 0..8 { - let k = i * 8 + j; - lhs[j] = [ROOTS[k]]; // omega^k - rhs[j] = [fracs[k]]; // y_k / (z - omega^k) - } - - let linear_terms = [partial_sums[i - 1], partial_sums[i]]; - - // partial_sums[i] <- partial_sums[i-1] + (lhs[8*i] * rhs[8*i] + ... + lhs[8*i + 7] * rhs[8*i + 7]) - // => (lhs[8*i] * rhs[8*i] + ... + lhs[8*i + 7] * rhs[8*i + 7]) + partial_sums[i-1] - partial_sums[i] == 0 + if !std::runtime::is_unconstrained() { + // We split off the first term to check the initial sum + + // partial_sums[0] <- (lhs[0] * rhs[0] + ... + lhs[7] * rhs[7]) + // => (lhs[0] * rhs[0] + ... + lhs[7] * rhs[7]) - partial_sums[0] == 0 + let lhs = [ + [ROOTS[0]], [ROOTS[1]], [ROOTS[2]], [ROOTS[3]], [ROOTS[4]], [ROOTS[5]], [ROOTS[6]], + [ROOTS[7]], + ]; + let rhs = [ + [fracs[0]], [fracs[1]], [fracs[2]], [fracs[3]], [fracs[4]], [fracs[5]], [fracs[6]], + [fracs[7]], + ]; BigNum::evaluate_quadratic_expression( lhs, [[false], [false], [false], [false], [false], [false], [false], [false]], rhs, [[false], [false], [false], [false], [false], [false], [false], [false]], - linear_terms, - [false, true], + [partial_sums[0]], + [true], ); + for i in 1..NUM_PARTIAL_SUMS { + // Seeking: + // ___i*8 - 1 ___i*8 + 7 + // \ omega^i \ / y_k \ + // sum_out = / y_i . --------- + / omega^k . | --------- | + // /____ z - omega^i /____ \ z - omega^k / + // 0 k = i*8 + // ^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + // sum partial_sum + // + // ... that is: + // + // ___i*8 - 1 ___ 7 + // \ omega^i \ + // sum_out = / y_i . --------- + / lhs[j] . rhs[j] + // /____ z - omega^i /____ + // 0 j = 0 + // ^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^ + // sum partial_sum + // + + // partial_sums[i] <- partial_sums[i-1] + (lhs[8*i] * rhs[8*i] + ... + lhs[8*i + 7] * rhs[8*i + 7]) + // => (lhs[8*i] * rhs[8*i] + ... + lhs[8*i + 7] * rhs[8*i + 7]) + partial_sums[i-1] - partial_sums[i] == 0 + let mut lhs: [[F; 1]; 8] = [std::mem::zeroed(); 8]; + let mut rhs: [[F; 1]; 8] = [std::mem::zeroed(); 8]; + for j in 0..8 { + let k = i * 8 + j; + lhs[j] = [ROOTS[k]]; // omega^k + rhs[j] = [fracs[k]]; // y_k / (z - omega^k) + } + + let linear_terms = [partial_sums[i - 1], partial_sums[i]]; + + BigNum::evaluate_quadratic_expression( + lhs, + [[false], [false], [false], [false], [false], [false], [false], [false]], + rhs, + [[false], [false], [false], [false], [false], [false], [false], [false]], + linear_terms, + [false, true], + ); + } } let factor = compute_factor(z); @@ -317,14 +319,16 @@ fn compute_factor(z: F) -> F { // (z_pow_d - one) * (D_INV) - factor = 0 // z_pow_d * D_INV - D_INV - factor = 0 - BigNum::evaluate_quadratic_expression( - [[z_pow_d]], - [[false]], - [[D_INV]], - [[false]], - [factor, D_INV], - [true, true], - ); + if !std::runtime::is_unconstrained() { + BigNum::evaluate_quadratic_expression( + [[z_pow_d]], + [[false]], + [[D_INV]], + [[false]], + [factor, D_INV], + [true, true], + ); + } // This version doesn't work: // BigNum::evaluate_quadratic_expression( @@ -371,17 +375,19 @@ fn compute_fracs( __compute_fracs(z, ys, ROOTS) }; - for i in 0..FIELDS_PER_BLOB { - // frac <-- ys[i] / (z + neg_roots[i]) - // frac * (z + neg_roots[i]) - ys[i] = 0 - BigNum::evaluate_quadratic_expression( - [[fracs[i]]], - [[false]], - [[z, ROOTS[i].neg()]], - [[false, false]], - [ys[i]], - [true], - ); + if !std::runtime::is_unconstrained() { + for i in 0..FIELDS_PER_BLOB { + // frac <-- ys[i] / (z + neg_roots[i]) + // frac * (z + neg_roots[i]) - ys[i] = 0 + BigNum::evaluate_quadratic_expression( + [[fracs[i]]], + [[false]], + [[z, ROOTS[i].neg()]], + [[false, false]], + [ys[i]], + [true], + ); + } } fracs @@ -442,7 +448,7 @@ mod tests { field_to_bignum, }, blob_public_inputs::BlobCommitment, - unconstrained_config::{D, D_INV, F, LOG_FIELDS_PER_BLOB}, + unconstrained_config::{D, D_INV, F}, }; use bigint::{BigNum, fields::bls12_381Fr::BLS12_381_Fr_Params}; use types::{ @@ -451,24 +457,6 @@ mod tests { tests::{fixture_builder::FixtureBuilder, utils::pad_end}, }; - // Helper to return (z^d - 1)/d (unsafe - test only) - fn z_d_helper(challenge_z: F) -> F { - let mut t1 = unsafe { challenge_z.__mul(challenge_z) }; - let mut t2: F = BigNum::new(); - for _i in 0..LOG_FIELDS_PER_BLOB - 1 { - t2 = unsafe { t1.__mul(t1) }; - t1 = t2; - } - - let z_pow_d = t1; - - let one: F = BigNum::one(); - - t1 = unsafe { z_pow_d.__sub(one) }; - let factor = unsafe { t1.__mul(D_INV) }; - factor - } - #[test] unconstrained fn test_one_note() { let mut tx_data = FixtureBuilder::new(); @@ -500,7 +488,7 @@ mod tests { //* p(z).(z - 1) = --------- //* d // - let rhs = z_d_helper(challenge_z); + let rhs = super::compute_factor(challenge_z); let z_minus_1 = unsafe { challenge_z.__sub(BigNum::one()) }; let lhs = y.__mul(z_minus_1); assert_eq(lhs, rhs); diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/components.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/components.nr index cfeb15725611..5a6a911c4aba 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/components.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/components.nr @@ -275,6 +275,226 @@ pub fn append_tx_effects_for_blob( l2_to_l1_msgs: [Field; MAX_L2_TO_L1_MSGS_PER_TX], start_sponge_blob: SpongeBlob, ) -> SpongeBlob { + let (mut tx_effects_hash_input, offset) = get_tx_effects_hash_input( + combined, + revert_code, + transaction_fee, + all_public_data_update_requests, + l2_to_l1_msgs, + ); + + // NB: using start.absorb & returning start caused issues in ghost values appearing in + // base_rollup_inputs.start when using a fresh sponge. These only appeared when simulating via wasm. + let mut out_sponge = start_sponge_blob; + + // If we have an empty tx (usually a padding tx), we don't want to absorb anything + // An empty tx will only have 2 effects - revert code and fee - hence offset = 2 + if offset != 2 { + out_sponge.absorb(tx_effects_hash_input, offset); + } + + out_sponge +} + +fn get_tx_effects_hash_input( + combined: CombinedAccumulatedData, + revert_code: u8, + transaction_fee: Field, + all_public_data_update_requests: [PublicDataWrite; MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], + l2_to_l1_msgs: [Field; MAX_L2_TO_L1_MSGS_PER_TX], +) -> ([Field; TX_EFFECTS_BLOB_HASH_INPUT_FIELDS], u32) { + let mut tx_effects_hash_input = unsafe { + get_tx_effects_hash_input_helper( + combined, + revert_code, + transaction_fee, + all_public_data_update_requests, + l2_to_l1_msgs, + ) + }; + + let note_hashes = combined.note_hashes; + let nullifiers = combined.nullifiers; + + // Public writes are the concatenation of all non-empty user update requests and protocol update requests, then padded with zeroes. + // The incoming all_public_data_update_requests may have empty update requests in the middle, so we move those to the end of the array. + let public_data_update_requests = + get_all_update_requests_for_tx_effects(all_public_data_update_requests); + let private_logs = combined.private_logs; + let unencrypted_logs = + combined.unencrypted_logs_hashes.map(|log: ScopedLogHash| silo_unencrypted_log_hash(log)); + let contract_class_logs = combined.contract_class_logs_hashes.map(|log: ScopedLogHash| { + silo_unencrypted_log_hash(log) + }); + + let mut offset = 0; + let mut array_len = 0; + + // NB: for publishing fields of blob data we use the first element of the blob to encode: + // TX_START_PREFIX | 0 | txlen[0] txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revert_code + // Two bytes are used to encode the number of fields appended here, given by 'offset' + // We only know the value once the appending is complete, hence we overwrite input[0] below + offset += 1; + + // TX FEE + // Using 29 bytes to encompass all reasonable fee lengths + assert_eq( + tx_effects_hash_input[offset], + field_from_bytes( + array_concat([TX_FEE_PREFIX, 0], transaction_fee.to_be_bytes::<29>()), + true, + ), + ); + offset += 1; + + // NB: The array_length function does NOT constrain we have a sorted left-packed array. + // We can use it because all inputs here come from the kernels which DO constrain left-packing. + // If that ever changes, we will have to constrain it by counting items differently. + // NOTE HASHES + array_len = array_length(note_hashes); + if array_len != 0 { + let notes_prefix = encode_blob_prefix(NOTES_PREFIX, array_len); + assert_eq(tx_effects_hash_input[offset], notes_prefix); + offset += 1; + + for j in 0..MAX_NOTE_HASHES_PER_TX { + if j < array_len { + assert_eq(tx_effects_hash_input[offset + j], note_hashes[j]); + } + } + offset += array_len; + } + + // NULLIFIERS + array_len = array_length(nullifiers); + if array_len != 0 { + let nullifiers_prefix = encode_blob_prefix(NULLIFIERS_PREFIX, array_len); + assert_eq(tx_effects_hash_input[offset], nullifiers_prefix); + offset += 1; + + for j in 0..MAX_NULLIFIERS_PER_TX { + if j < array_len { + assert_eq(tx_effects_hash_input[offset + j], nullifiers[j]); + } + } + offset += array_len; + } + + // L2 TO L1 MESSAGES + array_len = array_length(l2_to_l1_msgs); + if array_len != 0 { + let l2_to_l1_msgs_prefix = encode_blob_prefix(L2_L1_MSGS_PREFIX, array_len); + assert_eq(tx_effects_hash_input[offset], l2_to_l1_msgs_prefix); + offset += 1; + + for j in 0..MAX_L2_TO_L1_MSGS_PER_TX { + if j < array_len { + assert_eq(tx_effects_hash_input[offset + j], l2_to_l1_msgs[j]); + } + } + offset += array_len; + } + + // PUBLIC DATA UPDATE REQUESTS + array_len = array_length(public_data_update_requests); + if array_len != 0 { + let public_data_update_requests_prefix = + encode_blob_prefix(PUBLIC_DATA_UPDATE_REQUESTS_PREFIX, array_len * 2); + assert_eq(tx_effects_hash_input[offset], public_data_update_requests_prefix); + offset += 1; + for j in 0..MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX { + if j < array_len { + assert_eq( + tx_effects_hash_input[offset + j * 2], + public_data_update_requests[j].leaf_slot, + ); + assert_eq( + tx_effects_hash_input[offset + j * 2 + 1], + public_data_update_requests[j].value, + ); + } + } + offset += array_len * 2; + } + + // TODO(Miranda): squash 0s in a nested loop and add len prefix? + // PRIVATE_LOGS + array_len = array_length(private_logs) * PRIVATE_LOG_SIZE_IN_FIELDS; + if array_len != 0 { + let private_logs_prefix = encode_blob_prefix(PRIVATE_LOGS_PREFIX, array_len); + assert_eq(tx_effects_hash_input[offset], private_logs_prefix); + offset += 1; + + for j in 0..MAX_PRIVATE_LOGS_PER_TX { + for k in 0..PRIVATE_LOG_SIZE_IN_FIELDS { + let index = offset + j * PRIVATE_LOG_SIZE_IN_FIELDS + k; + if index < array_len { + assert_eq(tx_effects_hash_input[index], private_logs[j].fields[k]); + } + } + } + offset += array_len; + } + + // TODO(#8954): When logs are refactored into fields, we will append the values here + // Currently appending the single log hash as an interim solution + // UNENCRYPTED LOGS + array_len = array_length(unencrypted_logs); + if array_len != 0 { + let unencrypted_logs_prefix = encode_blob_prefix(UNENCRYPTED_LOGS_PREFIX, array_len); + assert_eq(tx_effects_hash_input[offset], unencrypted_logs_prefix); + offset += 1; + + for j in 0..MAX_UNENCRYPTED_LOGS_PER_TX { + if j < array_len { + assert_eq(tx_effects_hash_input[offset + j], unencrypted_logs[j]); + } + } + offset += array_len; + } + + // CONTRACT CLASS LOGS + array_len = array_length(contract_class_logs); + if array_len != 0 { + let contract_class_logs_prefix = encode_blob_prefix(CONTRACT_CLASS_LOGS_PREFIX, array_len); + assert_eq(tx_effects_hash_input[offset], contract_class_logs_prefix); + offset += 1; + + for j in 0..MAX_CONTRACT_CLASS_LOGS_PER_TX { + if j < array_len { + assert_eq(tx_effects_hash_input[offset + j], contract_class_logs[j]); + } + } + offset += array_len; + } + + // Now we know the number of fields appended, we can assign the first value: + // TX_START_PREFIX | 0 | txlen[0] txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revert_code + // Start prefix is "tx_start".to_field() => 8 bytes + let prefix_bytes = TX_START_PREFIX.to_be_bytes::<8>(); + let length_bytes = (offset as Field).to_be_bytes::<2>(); + // REVERT CODE + assert_eq( + tx_effects_hash_input[0], + field_from_bytes( + array_concat( + prefix_bytes, + [0, length_bytes[0], length_bytes[1], 0, REVERT_CODE_PREFIX, 0, revert_code], + ), + true, + ), + ); + + (tx_effects_hash_input, offset) +} + +unconstrained fn get_tx_effects_hash_input_helper( + combined: CombinedAccumulatedData, + revert_code: u8, + transaction_fee: Field, + all_public_data_update_requests: [PublicDataWrite; MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], + l2_to_l1_msgs: [Field; MAX_L2_TO_L1_MSGS_PER_TX], +) -> [Field; TX_EFFECTS_BLOB_HASH_INPUT_FIELDS] { let mut tx_effects_hash_input = [0; TX_EFFECTS_BLOB_HASH_INPUT_FIELDS]; let note_hashes = combined.note_hashes; @@ -424,17 +644,7 @@ pub fn append_tx_effects_for_blob( true, ); - // NB: using start.absorb & returning start caused issues in ghost values appearing in - // base_rollup_inputs.start when using a fresh sponge. These only appeared when simulating via wasm. - let mut out_sponge = start_sponge_blob; - - // If we have an empty tx (usually a padding tx), we don't want to absorb anything - // An empty tx will only have 2 effects - revert code and fee - hence offset = 2 - if offset != 2 { - out_sponge.absorb(tx_effects_hash_input, offset); - } - - out_sponge + tx_effects_hash_input } fn get_all_update_requests_for_tx_effects( diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/log_hash.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/log_hash.nr index 54068a15e3fc..79d208a60831 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/log_hash.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/log_hash.nr @@ -22,9 +22,6 @@ impl OrderedValue for LogHash { fn value(self) -> Field { self.value } - fn counter(self) -> u32 { - self.counter - } } impl Eq for LogHash { @@ -83,9 +80,6 @@ impl OrderedValue for ScopedLogHash { fn value(self) -> Field { self.log_hash.value } - fn counter(self) -> u32 { - self.log_hash.counter - } } impl Eq for ScopedLogHash { diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash.nr index 0b842cc16251..eb88ed9024db 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash.nr @@ -71,9 +71,6 @@ impl OrderedValue for ScopedNoteHash { fn value(self) -> Field { self.note_hash.value } - fn counter(self) -> u32 { - self.note_hash.counter - } } impl Eq for ScopedNoteHash { diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier.nr index eab73ba87a22..d376f34bebb4 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier.nr @@ -23,9 +23,6 @@ impl OrderedValue for Nullifier { fn value(self) -> Field { self.value } - fn counter(self) -> u32 { - self.counter - } } impl Eq for Nullifier { @@ -102,9 +99,6 @@ impl OrderedValue for ScopedNullifier { fn value(self) -> Field { self.nullifier.value } - fn counter(self) -> u32 { - self.nullifier.counter - } } impl Eq for ScopedNullifier { diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_leaf_preimage.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_leaf_preimage.nr index cdc559f353d0..a57a5e2f077d 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_leaf_preimage.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_leaf_preimage.nr @@ -40,10 +40,6 @@ impl LeafPreimage for NullifierLeafPreimage { } impl IndexedTreeLeafPreimage for NullifierLeafPreimage { - fn get_key(self) -> Field { - self.nullifier - } - fn get_next_key(self) -> Field { self.next_nullifier } @@ -52,10 +48,6 @@ impl IndexedTreeLeafPreimage for NullifierLeafPreimage { (self.next_nullifier == 0) & (self.next_index == 0) } - fn as_leaf(self) -> Field { - self.hash() - } - fn update_pointers(self, next_key: Field, next_index: u32) -> Self { Self { nullifier: self.nullifier, next_nullifier: next_key, next_index } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/side_effect/mod.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/side_effect/mod.nr index d0025825da95..dff003c43fa6 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/side_effect/mod.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/side_effect/mod.nr @@ -14,12 +14,11 @@ pub trait RangeOrdered { fn counter_end(self) -> u32; } -pub trait OrderedValue +pub trait OrderedValue: Ordered where T: Eq, { fn value(self) -> T; - fn counter(self) -> u32; } pub trait Scoped diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index a8a6480fc23a..8b9d45514a72 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -31,7 +31,7 @@ pub global MAX_NULLIFIERS_PER_CALL: u32 = 16; pub global MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL: u32 = 5; pub global MAX_ENQUEUED_CALLS_PER_CALL: u32 = 16; pub global MAX_L2_TO_L1_MSGS_PER_CALL: u32 = 2; -pub global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL: u32 = 64; +pub global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL: u32 = 63; pub global MAX_PUBLIC_DATA_READS_PER_CALL: u32 = 64; pub global MAX_NOTE_HASH_READ_REQUESTS_PER_CALL: u32 = 16; pub global MAX_NULLIFIER_READ_REQUESTS_PER_CALL: u32 = 16; @@ -771,3 +771,14 @@ pub global PROOF_TYPE_PG: u32 = 3; pub global PROOF_TYPE_AVM: u32 = 4; pub global PROOF_TYPE_ROLLUP_HONK: u32 = 5; pub global PROOF_TYPE_ROOT_ROLLUP_HONK: u32 = 6; + +mod test { + use crate::constants::{ + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + }; + + #[test] + unconstrained fn test_constants() { + assert(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL <= MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX); + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/data/public_data_tree_leaf_preimage.nr b/noir-projects/noir-protocol-circuits/crates/types/src/data/public_data_tree_leaf_preimage.nr index 558c4169f01e..06b4112600d1 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/data/public_data_tree_leaf_preimage.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/data/public_data_tree_leaf_preimage.nr @@ -1,6 +1,6 @@ use crate::{ data::public_data_tree_leaf::PublicDataTreeLeaf, - merkle_tree::leaf_preimage::IndexedTreeLeafPreimage, + merkle_tree::leaf_preimage::{IndexedTreeLeafPreimage, LeafPreimage}, traits::{Empty, Hash}, }; @@ -41,11 +41,17 @@ impl Hash for PublicDataTreeLeafPreimage { } } -impl IndexedTreeLeafPreimage for PublicDataTreeLeafPreimage { +impl LeafPreimage for PublicDataTreeLeafPreimage { fn get_key(self) -> Field { self.slot } + fn as_leaf(self) -> Field { + self.hash() + } +} + +impl IndexedTreeLeafPreimage for PublicDataTreeLeafPreimage { fn get_next_key(self) -> Field { self.next_slot } @@ -54,10 +60,6 @@ impl IndexedTreeLeafPreimage for PublicDataTreeLeafPreimage (self.next_slot == 0) & (self.next_index == 0) } - fn as_leaf(self) -> Field { - self.hash() - } - fn update_pointers(self, next_slot: Field, next_index: u32) -> Self { Self { slot: self.slot, value: self.value, next_slot, next_index } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr b/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr index 1ffc5782024b..2e73769fe244 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr @@ -138,19 +138,19 @@ pub fn compute_l2_to_l1_hash( rollup_version_id: Field, chain_id: Field, ) -> Field { - let mut bytes: BoundedVec = BoundedVec::new(); + let mut bytes: [u8; 160] = std::mem::zeroed(); let inputs = [contract_address.to_field(), rollup_version_id, recipient.to_field(), chain_id, content]; - for i in 0..inputs.len() { + for i in 0..5 { // TODO are bytes be in fr.to_buffer() ? let item_bytes: [u8; 32] = inputs[i].to_be_bytes(); for j in 0..32 { - bytes.push(item_bytes[j]); + bytes[32 * i + j] = item_bytes[j]; } } - sha256_to_field(bytes.storage()) + sha256_to_field(bytes) } pub fn silo_l2_to_l1_message( diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/indexed_tree/check_valid_low_leaf.nr b/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/indexed_tree/check_valid_low_leaf.nr index 8fbe3334ee4a..0bf69d0ad127 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/indexed_tree/check_valid_low_leaf.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/indexed_tree/check_valid_low_leaf.nr @@ -19,6 +19,7 @@ mod tests { indexed_tree::check_valid_low_leaf::assert_check_valid_low_leaf, leaf_preimage::IndexedTreeLeafPreimage, }; + use crate::merkle_tree::leaf_preimage::LeafPreimage; use crate::traits::Empty; struct TestLeafPreimage { @@ -38,11 +39,18 @@ mod tests { } } - impl IndexedTreeLeafPreimage for TestLeafPreimage { + impl LeafPreimage for TestLeafPreimage { fn get_key(self) -> Field { self.value } + fn as_leaf(self) -> Field { + self.value + } + } + + impl IndexedTreeLeafPreimage for TestLeafPreimage { + fn get_next_key(self) -> Field { self.next_value } @@ -51,10 +59,6 @@ mod tests { (self.next_value == 0) } - fn as_leaf(self) -> Field { - self.value - } - fn update_pointers(self, next_value: Field, _next_index: u32) -> Self { Self { value: self.value, next_value } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/leaf_preimage.nr b/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/leaf_preimage.nr index c1192a03a0d0..9b5fcee268af 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/leaf_preimage.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/leaf_preimage.nr @@ -5,13 +5,9 @@ pub trait LeafPreimage { fn as_leaf(self) -> Field; } -pub trait IndexedTreeLeafPreimage: Eq + Empty { - fn get_key(self) -> Field; - +pub trait IndexedTreeLeafPreimage: Eq + Empty + LeafPreimage { fn get_next_key(self) -> Field; - fn as_leaf(self) -> Field; - fn points_to_infinity(self) -> bool; fn update_pointers(self, next_key: Field, next_index: u32) -> Self; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/membership.nr b/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/membership.nr index 61bc479d2fcb..ac3ac7c26fca 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/membership.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/membership.nr @@ -124,10 +124,6 @@ mod tests { } impl IndexedTreeLeafPreimage for TestLeafPreimage { - fn get_key(self) -> Field { - self.value - } - fn get_next_key(self) -> Field { self.next_value } @@ -136,10 +132,6 @@ mod tests { (self.next_value == 0) } - fn as_leaf(self) -> Field { - pedersen_hash([self.value]) - } - fn update_pointers(self, next_value: Field, _next_index: u32) -> Self { Self { value: self.value, next_value } } diff --git a/noir-projects/noir-protocol-circuits/package.json b/noir-projects/noir-protocol-circuits/package.json index 2f5efb48850c..0fe327c2d053 100644 --- a/noir-projects/noir-protocol-circuits/package.json +++ b/noir-projects/noir-protocol-circuits/package.json @@ -4,5 +4,6 @@ "main": "index.js", "dependencies": { "@iarna/toml": "^2.2.5" - } + }, + "packageManager": "yarn@4.5.2+sha512.570504f67349ef26d2d86a768dc5ec976ead977aa086b0bb4237e97d5db7ae5c620f9f0e0edf3ea5047205063faff102bf2a2d778664a94eaaa1085ad483fe2e" } diff --git a/noir-projects/scripts/check.sh b/noir-projects/scripts/check.sh index d644d966ef69..81c97a8cbc16 100755 --- a/noir-projects/scripts/check.sh +++ b/noir-projects/scripts/check.sh @@ -3,7 +3,7 @@ set -eu cd $(dirname "$0")/../ -nargo check --program-dir ./aztec-nr --silence-warnings -nargo check --program-dir ./noir-contracts --silence-warnings -nargo check --program-dir ./noir-protocol-circuits --silence-warnings -nargo check --program-dir ./mock-protocol-circuits --silence-warnings +nargo check --program-dir ./aztec-nr +nargo check --program-dir ./noir-contracts +nargo check --program-dir ./noir-protocol-circuits +nargo check --program-dir ./mock-protocol-circuits diff --git a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Prover.toml b/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Prover.toml index 33b300afae76..9c268316ab89 100644 --- a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Prover.toml +++ b/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Prover.toml @@ -1,4 +1,4 @@ key_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" -proof = ["0x0000000000000000000000000000000000000000000000000000000000000040", "0x0000000000000000000000000000000000000000000000000000000000000011", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000042ab5d6d1986846cf", "0x00000000000000000000000000000000000000000000000b75c020998797da78", "0x0000000000000000000000000000000000000000000000005a107acb64952eca", "0x000000000000000000000000000000000000000000000000000031e97a575e9d", "0x00000000000000000000000000000000000000000000000b5666547acf8bd5a4", "0x00000000000000000000000000000000000000000000000c410db10a01750aeb", "0x00000000000000000000000000000000000000000000000d722669117f9758a4", "0x000000000000000000000000000000000000000000000000000178cbf4206471", "0x000000000000000000000000000000000000000000000000e91b8a11e7842c38", "0x000000000000000000000000000000000000000000000007fd51009034b3357f", "0x000000000000000000000000000000000000000000000009889939f81e9c7402", "0x0000000000000000000000000000000000000000000000000000f94656a2ca48", "0x000000000000000000000000000000000000000000000006fb128b46c1ddb67f", "0x0000000000000000000000000000000000000000000000093fe27776f50224bd", "0x000000000000000000000000000000000000000000000004a0c80c0da527a081", "0x0000000000000000000000000000000000000000000000000001b52c2020d746", "0x0000000000000000000000000000005a9bae947e1e91af9e4033d8d6aa6ed632", "0x000000000000000000000000000000000025e485e013446d4ac7981c88ba6ecc", "0x000000000000000000000000000000ff1e0496e30ab24a63b32b2d1120b76e62", "0x00000000000000000000000000000000001afe0a8a685d7cd85d1010e55d9d7c", "0x000000000000000000000000000000b0804efd6573805f991458295f510a2004", "0x00000000000000000000000000000000000c81a178016e2fe18605022d5a8b0e", "0x000000000000000000000000000000eba51e76eb1cfff60a53a0092a3c3dea47", "0x000000000000000000000000000000000022e7466247b533282f5936ac4e6c15", "0x00000000000000000000000000000071b1d76edf770edff98f00ff4deec264cd", "0x00000000000000000000000000000000001e48128e68794d8861fcbb2986a383", "0x000000000000000000000000000000d3a2af4915ae6d86b097adc377fafda2d4", "0x000000000000000000000000000000000006359de9ca452dab3a4f1f8d9c9d98", "0x0000000000000000000000000000004ae614a40d61d28f36aaf03d00a064355d", "0x000000000000000000000000000000000010edd34b6c69cb31a6a833a1d040d2", "0x000000000000000000000000000000f4976ee83b95241474007fdc30b06ebdd7", "0x000000000000000000000000000000000019cd0d7e4577008a8335c6260be826", "0x0000000000000000000000000000004ae614a40d61d28f36aaf03d00a064355d", "0x000000000000000000000000000000000010edd34b6c69cb31a6a833a1d040d2", "0x000000000000000000000000000000f4976ee83b95241474007fdc30b06ebdd7", "0x000000000000000000000000000000000019cd0d7e4577008a8335c6260be826", "0x000000000000000000000000000000f968b227a358a305607f3efc933823d288", "0x00000000000000000000000000000000000eaf8adb390375a76d95e918b65e08", "0x000000000000000000000000000000bb34b4b447aae56f5e24f81c3acd6d547f", "0x00000000000000000000000000000000002175d012746260ebcfe339a91a81e1", "0x0000000000000000000000000000007376c0278bcc77156c008b389f6bd12e8f", "0x0000000000000000000000000000000000176ebed74bf3b57ca42a2fac842809", "0x000000000000000000000000000000fc4a601439d45b9ce23c9d9cc37cb21dd2", "0x000000000000000000000000000000000017dc6f8c53261c44cd0348a9970d88", "0x000000000000000000000000000000292042e9c3c101d8dd41b3acfc0db429a2", "0x0000000000000000000000000000000000114f6b3755c65915e12f13533cdd41", "0x0000000000000000000000000000007bb311a6bd5f4f3d543ca30a1a0201614f", "0x00000000000000000000000000000000000871a5a04dda33543cf59b3babf733", "0x03ecb02e48ba68818287222348fc537135b6426d4580ea31cdd540250a7a95fb", "0x2c779e44987737a835c92393388504ebf27da5db3438865f760cb56ee5856a06", "0x0ac17380181974c0082ef0fada047fc7906a94dc18e5be249dd29e52961522e0", "0x206c623397f11ed76a292c7c605fe56c1ae4f6babb6889786aaff461535cf79e", "0x11dfbe6d0977046fd72b55bdb9b41f983fc06ea6404cb36efdf1786d6f0dcbf7", "0x1081a4f21029bfcf4f8388257bf88d932963b2368c4fde96836fb6f023f16622", "0x2e981de982d368f04b0a49e537a05ed5583080c3eafed9617bd06e1214641be9", "0x2571d21e04b1d59935490270a4789e53042f18e15fc7b94dfd267c8deadfc74e", "0x0f70f3f5319d3564bcc6fad428552eefce8f341ced5187fa186d0f099315f225", "0x18f7ee010c96f63d18d2a277e5f5a8ef19b525907817d69616d24495ad804021", "0x1cf7e347bae24a2c1a93b5fd11ff79e68bd2c1bd32056df6d2e1582d17db1d8d", "0x01c451a702451ea9871b7e19d3e2052d8fa49a571ae46616453f27d324cfc907", "0x0614dbf2404cb5fef2e5408f1115502954097981a5ea67d1f80812fa247b1f93", "0x09884374bb354fdb4b118f4da44cd37e75ba7a655201f94cc4064acbdc35887d", "0x158211f68576bcafd791ff02039bb8246b1ba94d99f30a0f282565dd8d82ce0b", "0x0a5231e4039c7b7e0ad77d319958403997a33a0be31e71032733cd70067b14db", "0x2803fc0a9f60d9e202eb6caddcacfbb94a5a7df2695c2a0c909815093ca1864c", "0x0faa80f3cef0e86746df7518f865936959d65eb64ccb05bfb574ac5561683eae", "0x298544d77ee6f4cae7aaa4e9203a233fa18dab360d6f54daf9f8801802586d58", "0x0ba76e9cadf0380b82a3b5ec1c71b80295ce3caeec73aefecffed90f28c1b012", "0x0d965885fc07c4a140f50a83cb750c876191651ee47462d0931e0e67bc9ab5cb", "0x0aad443f96fbde49595f31a47d233ca83ed2bcda86801a8fd7a596ea666c7716", "0x298ef11d346c061b8899cb10c0cd7150aa2d6c554340cf2ee6aa76125e7d1e7f", "0x27518b0512f1efb3a79650941d3adb015b357c92a8c06e086be7b3e7666cea47", "0x0cd6b10a38d5af7f0a5fa97c0ffcba406e8a65a92fe050b3b79f3f1f45d723e2", "0x2f5e1210a841f9c6a7ee7d7f290479696387a58192cb775a64adf0dc225e3d4f", "0x2ffd390be1aecefc25137d01e5b7a85410254ae9e21e7067a9869217cb828ee1", "0x244a77246b5beb3e0e38c8dda39a516050303e83c051300d08bbe5450320b47f", "0x16ed38ff1019df1d168974833ccc96d8648d05c2823021d45e1d5599998a8aec", "0x24cbbf8ca1a5bb082c1081d9252a5e964ff7aae4d83e04468db55960b80a585f", "0x01b3a78bcc6e7529c6ded587b8958d903ef990421d5706afbef64a7976a51084", "0x2c3a013cdecd027b32c8c99c163dd822df374ef018aac8f3d1fb8d57335f9503", "0x0ad83466928e5d70e1e0bb26633a1e0c61399733c15fffafb22df18029e583d6", "0x095023de752480ca3edd408d3c34d732bd9836919e84931d3f42a978867bc53e", "0x283d221f0a2e6b6616bc3eda23e15bb8c8b459010014db7608e646c70ea1da2e", "0x1390c81536ca3d71228eeccf9844702aa3df012bbc3902fe30323f55ee8c7872", "0x0ef66f0302753884f05043080104589e1bf036566a9a00d4688d3c7910631508", "0x15eceeed97d9267597c028fafac72dba40f3ea441880bdd2202b30846a243044", "0x2b38e3f3b5193811df1bafd58432d5abd1c5f575a5d9c2a449682a0f2f035034", "0x2bbcc40d11d6a96c85a63162143ec53314573bb0af7c32d89b791394c80d14a5", "0x22d909e8fb5152b5b98f7cb3fc38f4ef7d7e8f2a857347da59ba2e979bd2ec96", "0x29e3f7a95437fe6ea1e72cd4c7c2fcf1892ab22ba69387f1a6a795e91bcc26f2", "0x02d8c8d031ada707d85693ed106135cecb7fe52de73eedbc54a91a2c856d0de1", "0x28020fb768c50bf52497832c04fb0e14cdb651b98b80864d0b68228ddfd922a0", "0x24d61051f82ff93eb42b5afdcad9e236c669f9f849bf811a853f1e9cc302f8ce", "0x250e89de38769cf70e6cb3685f99e39f44d8ebc1a468930f49e1d80342092b6a", "0x0ad45df677495b9199c57f620db716c543fb6437d574453b7721c2faee6316ba", "0x043d8715e16e3f2dc5a537355c0576254a36db25002f13958696ed13c24add46", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0bb8cb6a256f110f134a15b66b644eb49dc8e0239f92489408e6265a323bb513", "0x18f02f61683cf6a7492291a7041ee1b3cfa8dfb5aa40e2f6571d13750f0f459a", "0x0a982c10c93ae173751f4165e6456c9b85e2551bd503919acae78c801ccc0e1e", "0x0b6e6266bdc6d44d05ae76d2db249877a46ad978fc758654a3a6a7208d2725a6", "0x20de00721f30db8280aa6804c90f3fbe23477e1f746a8eff61c4c5b2af7e821c", "0x1321548f766ded3432627a50704d233455c61d8e59afec2883dbe2f381c033f9", "0x00c56c902b14aa59f6f59badf20f59c51b08aeb6afbdd97a5ac7b98b998b0a56", "0x1745c01290e99476fa655e5fcf2e5b3d24f42624cbd25a40f60af3f3e8489512", "0x08e915bebf8f6c86b332a681d35c57dcd0e0804f22e4e78884ab5edc0a41d6d0", "0x0c77b29e3a0c4368ce5231f94d13fa9b84a6e350cc8f2a57b3f6920a2bf0e879", "0x07ab821c4f07e1dd1cce43b9d9dbf4c9da2e22fea675a57ecf5a47a6992059a2", "0x0bcf85d1f4e34e7598a68bc09cfacf119b0afa1193ad4163096e44ca8eef7a22", "0x0c8a367da01b9097d24eb7b5471c6bd049f7938080bb8cc6fb92945fa934faf9", "0x2cda5caa3a93dcc134cf176f4ad6c79577d273bd2706da3fcaad92a9bc336ac8", "0x2c19cbfce048ef9bb9efb5b9875645b0bc7825aa1a05ad71dbbd3a34d1d76501", "0x28d7f11c2b6b7ff8717b79b098e5bd37eb39a8ea5d03b15339c86fb93e497d97", "0x21ba86c3baacb0fe4270c64c1a5f2861af2f5aa1d6391afe8214e1ce9eac8dfa", "0x0c5289ff2fe581ccf0caa97b4041d381e384c4aef0417edd4dafa7f64a3bfa6f", "0x0c744b0b301542ec4f334696945d7b89b137c5b9434bec277dc938c3dc9ca7aa", "0x006d246e7b0ad18157fa925981f097121fe34cc769128fd8993c4d15cfdf0aee", "0x00000000000000000000000000000022a7ebdc5723cfdb5d73c4c0392e43c407", "0x160d495d0b4462662665faccb1d65d81f0c3f006254a6082a0ceffc49483d76a", "0x24a5a2b9bef1abc3a818d2f27ca5cca7e1b3da34ea6056f338ac54638e26c3b0", "0x0a3dcf45eb6c14391ed8aeab11cb9460f85cb11a1ecb868ad75095796c0f5dac", "0x160c8087a99bf109389ca20d0ce624f71dd2e40fe64d62712e45669ed550fd61", "0x1a65519621040c52fdec5594d095add265c99210929eeb29a3a8180b480bb313", "0x09bf095c40c00dce181347b0d8eaa83ecd0f1f8fcb5dab34ffe19e219672e681", "0x10f3444d7a623e54ad2e882c436191975143217e9f8121c8770a3bbd3ec183ce", "0x0cd68d59322f30bdaf0a6a628f095f880ff716b157ef9a2d589f2cfc40b53e3d", "0x20732eab49adfd109626e0e84496a929a690377e98ee03f13f66ad4541949300", "0x28c850b8bc7f6ae88835bed342eda477e8d1f013a4db34b7ff33198e0a9f8e09", "0x14d234765f29b713df4d1291d452c73a5dc00458242df1be96e04ffb0a3c396e", "0x162572b68ab59291f430cf3113d4fcd3ddca59c4808c134345f6765693f8e1ce", "0x0bc37cb2ffd0f1691bf4d8c621d49f8c23ff58989d09d96c019cc6fc9a46e155", "0x0bc37cb2ffd0f1691bf4d8c621d49f8c23ff58989d09d96c019cc6fc9a46e155", "0x2aa58bd9f811c2970dcefde8c31c2c6db2349eed5161f80c0280a3acbd9f4f0d", "0x141198df3b9c1917d2e7a136ca3438fd9630719591c24388a0559873756d288b", "0x1975864b97d6ff0ace3d600020772ea32181c2f3e413ae67bac22e4a3bbcba9c", "0x2023d432e6630619c99c25a2f293f5ed78f373ee5212bff02b92f00a43bab0e0", "0x23cbaa3113b7265b926bc8811fccce701b3d4e4006ae6b66cefffd0e2f059fd4", "0x00000000000000000000000000000061e09b904168dcad01e82d26f8a21bb884", "0x000000000000000000000000000000000027a29682b86d1b09daa382868b245a", "0x000000000000000000000000000000efc6d7071b6c8cf492bc3ba72f4eda84c5", "0x000000000000000000000000000000000010860d04901949ad63a94863c77149", "0x00000000000000000000000000000036cacf503222db1b5d0edd58aafc1e74f9", "0x00000000000000000000000000000000000fef16ca13117c1f45f6df0782c273", "0x000000000000000000000000000000013704a507a82f6224d7c369d3d6c3929e", "0x00000000000000000000000000000000002887168ff50b7339d4d181d2a84cc0", "0x000000000000000000000000000000e57f500eab5f7d08ac3c93ce080dc1f15e", "0x00000000000000000000000000000000000c720872540cd88fec315c3e6a7625", "0x0000000000000000000000000000004b6897120d1858d29ac10cba52cf73dc44", "0x000000000000000000000000000000000019a9523d9a2481b5dbb1c23cb9686e", "0x00000000000000000000000000000052442fa24d3d5c3b9a608d8259757905a4", "0x00000000000000000000000000000000001c58b3787ffa3edd9f049c1c775a85", "0x000000000000000000000000000000f5737a5b8d278973356217e75bdc986ea2", "0x00000000000000000000000000000000001e0959f30c996ac7e3b265bac2de9d", "0x00000000000000000000000000000030122ef6c7a097b6b2ea6efa763797b8b4", "0x0000000000000000000000000000000000191344dd8af6c1c1c3beef933de271", "0x0000000000000000000000000000002f38961e4eb47676a42a5b59457e19a7b1", "0x000000000000000000000000000000000002936d1fa64f399018fbf60a1f2001", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0f349727b02c8c3916e6904450b4db7d748c746d693f12acfd44757e349ae543", "0x109b539e615d399af085519623a8f0ea43fd3e672e8b3397deb033110b896ba7", "0x1d2d7546c079198782bc8c1ecf377c86567d90d05988acd013af254a84ccfca2", "0x11922ebe299248b1a194ff41750bbad0297b8e4db3944036983764ba171195a1", "0x14cb035cb0d920e42127d67ed6763fd0cdadeac07c0aa57ab5c038a501aca88c", "0x2e82b6bc3d9337d1f2c080ed47e935308317c159408ff3a054718e737f827a49", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x000000000000000000000000000000c883c136e90256f3059fb812416fe61171", "0x00000000000000000000000000000000002cbe86e3f824a75195f942b5de0abe", "0x000000000000000000000000000000ff2335151394b16f671dcfdc5ee78485f3", "0x00000000000000000000000000000000000fceb210dd93cd99b7bb225af47ae8", "0x0000000000000000000000000000001a95675079e3f1508e636cf69b7978db7f", "0x0000000000000000000000000000000000079f7a59ed7b4a3dda951fa8fbad98", "0x000000000000000000000000000000b15ed33e983574376efe5ae0c29c6bdd1f", "0x000000000000000000000000000000000004031f5fd23cf82aad9e5d0bfd0363"] +proof = ["0x0000000000000000000000000000000000000000000000000000000000000040", "0x0000000000000000000000000000000000000000000000000000000000000011", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000042ab5d6d1986846cf", "0x00000000000000000000000000000000000000000000000b75c020998797da78", "0x0000000000000000000000000000000000000000000000005a107acb64952eca", "0x000000000000000000000000000000000000000000000000000031e97a575e9d", "0x00000000000000000000000000000000000000000000000b5666547acf8bd5a4", "0x00000000000000000000000000000000000000000000000c410db10a01750aeb", "0x00000000000000000000000000000000000000000000000d722669117f9758a4", "0x000000000000000000000000000000000000000000000000000178cbf4206471", "0x000000000000000000000000000000000000000000000000e91b8a11e7842c38", "0x000000000000000000000000000000000000000000000007fd51009034b3357f", "0x000000000000000000000000000000000000000000000009889939f81e9c7402", "0x0000000000000000000000000000000000000000000000000000f94656a2ca48", "0x000000000000000000000000000000000000000000000006fb128b46c1ddb67f", "0x0000000000000000000000000000000000000000000000093fe27776f50224bd", "0x000000000000000000000000000000000000000000000004a0c80c0da527a081", "0x0000000000000000000000000000000000000000000000000001b52c2020d746", "0x0000000000000000000000000000005265060992033960e8e32bc3fc09c3dffe", "0x00000000000000000000000000000000001d477657d4c186192157174e1ae85d", "0x000000000000000000000000000000cd51c49ebd008943587ea23486b8b5589a", "0x000000000000000000000000000000000029cff3c57394ddf5cc5323399502b0", "0x000000000000000000000000000000c2ec36ea535b8aa21337d02d48df12dffd", "0x000000000000000000000000000000000005af47d2e8508f0b0f9a09c4344c5b", "0x0000000000000000000000000000008b0495a384458dc7891e7694abeba9c27f", "0x00000000000000000000000000000000000693f38a17576df05f40917378ee5b", "0x0000000000000000000000000000006234b1604de9892ab1a1dd7e5a12e5b64d", "0x0000000000000000000000000000000000027beb67a31282343d16798d7f6881", "0x0000000000000000000000000000007cf97aa50f47c9263dad2e4c2c94cb1243", "0x00000000000000000000000000000000002892ea06de7e07a8cb8b46e6292fa0", "0x00000000000000000000000000000079cf93b804469cfd1aed183baaeae73de8", "0x00000000000000000000000000000000000e59187557f6855bde567cb35ede86", "0x0000000000000000000000000000008bd253f9ec6d2aa0aa50326c8365f4771f", "0x0000000000000000000000000000000000094ed7be0bdab40ba71f0b5a713f46", "0x00000000000000000000000000000079cf93b804469cfd1aed183baaeae73de8", "0x00000000000000000000000000000000000e59187557f6855bde567cb35ede86", "0x0000000000000000000000000000008bd253f9ec6d2aa0aa50326c8365f4771f", "0x0000000000000000000000000000000000094ed7be0bdab40ba71f0b5a713f46", "0x000000000000000000000000000000c6fbd597bf32620b74384b4182361f42c4", "0x0000000000000000000000000000000000018edc9ac63e10a04a67e9a498a93a", "0x000000000000000000000000000000f84f1abee938aa614d378583f16052e4fd", "0x0000000000000000000000000000000000071410de8b0d772ec833ab10f01f9d", "0x000000000000000000000000000000183c61e378b1ca713479769b7b2e74d603", "0x000000000000000000000000000000000003f21807c5069ab301a6d44b1d2960", "0x0000000000000000000000000000000ba9d96216597d251984d0a302327cd6fb", "0x00000000000000000000000000000000001379e1299d694a37dbff2a7e451267", "0x0000000000000000000000000000000265b8a70e3adaf365ce6afab43132ce80", "0x000000000000000000000000000000000025e91bb4ccd7abf754509beb7d23b4", "0x0000000000000000000000000000004929d635d6bd6dd0a5af7c245c462d732c", "0x0000000000000000000000000000000000227dd7610f4731e1b44f25d9e78fe6", "0x2d12d5c5058c324f9bc322ea29a04784b54f8035f139416d5bcfd943b9926801", "0x035178addba56dda1c8d22cc57e110d872e4681288802f23e8121c50366d9800", "0x06de7f41c17a9eb137d2b70b0d0feb97d6f83112060a530cf5ab9a7517e9d78a", "0x0f273c12ada01ad0e8069add3d3b8a8faa0bdcb4dfc3623fd4ac16c1e552e12b", "0x074ebece0d875a3bb670ad14fd85b2f02457b8ed862c93b54066d9b669a183c3", "0x11b89f60144876fcb3dda5d7cc8635d59b4361768b9b25f1378b26b49828f725", "0x288ff447a67c1c9b141e2ac417f07ea586bc56bca2c1dcba4936b41441393038", "0x2d3ffc59491e07d005e69e1487027011a76eb3c6d524a9b4aa6bf1bc46b6349f", "0x1466ef0e63cae250079dc66e41e42dae283f52d8787ced3e2d17b4c7c9c7d898", "0x225b3258dc7d920544b3b6531b7af2ccadb1c70a0995acc4501b88cc5f0fb907", "0x1f0e4e558fdfd2cef659615cd3d60478d5c63cc747f825a946e7c603a39752a8", "0x27c3896812da6e9b849b3040c6ac87fac3d663b3115a76d88c303f57b644d990", "0x1a537570f364e5b1dd9af6c4b2c7d1112556c00a75ddd8315a1bb1b9096700d7", "0x2d3f559a00ab352c8786dc2020127e107ee08768268c6b0097b2d6176842f414", "0x09bc339c3880c79a593f7db4ddc73fccfeae63b9fa4c5d5cc2f4bb280517c6cf", "0x2f8bc578f20113073ff2be60b6da3495f89a42120bc52bbd253796874f9759fd", "0x26be7abb2371fc72625ec11ffab449c1778b80451fea16526e6248efd49de5b0", "0x21cf35a90e5a308b8aad543bfa4750f19bd8e13e266a096cd630a39a475c066d", "0x15a2722c132db7dd062ccc988626ee32930ed9f4cc2dc8f0f5b83acd01c7395f", "0x18fefba3a72aca38f11dd1b258e3dbc5a3af8baf0aeb2b7f95ba75db421254b5", "0x2135cfbb055b1dcc2629ef569fce3203458a4a93351da029888787fb69950e89", "0x2a9324753d1299472226591c7681740d5ac53600958ffe33e20e936e3e134504", "0x2b37b9147927b1c9bbc4ce18150ad33400889a5ccebf417b4b9776a4238a0bc5", "0x12155671f2b7ccc0c25ff0a5748ee1c222f193d6902c4a9d37981800d05300f7", "0x08f07b01a5989df1b57fb57ec34266d468586ffd77546656ffd945917f2f310e", "0x0ec1ee0e959b0f8d58af8c82c2d8259cea457d0cb98a9c3da29c7f534a6e217a", "0x22aef80d85ba0200f99435006647d80993dc3336dda00ef68b7cebdd8c3f37e5", "0x2b07beb48ee0be89ba61106738486f4bd7a27f066d98672afc0eddbe0f62fd7c", "0x031c53dd417a71e0a092e57e9cf05b34b1d6afd989ebee1c2a7440e6fd18e142", "0x19705da6d82f9c572e529f9ff53f611dc7cdc5353febbdd7d99baf68f5d7fb08", "0x02dff73237605fb60aaeec62c4d70e211dafd696323ebb4fdb23da1356edc7b7", "0x28123310ea392855c4c5ae09a808e76d9aaebd1a2be528cd9ff2b58f2ae2e2b5", "0x2fb6955e954aa2ab2ba684330098e5c96f95d36543e766c7059c36e2e92c86ec", "0x1c108db3f6cd00c92f834baf1a88d03ddfb799bc91f729102ab68665f14a0429", "0x27b7c4e8392389c0cb16829cf7edac4ec71b7a95fc9ac63cbc811526fff42949", "0x00bda672de3a14c5d3d109a7fd2b355ab9cd5948b25c89e7063219788cad2b5f", "0x2540ed01eca6e3fbe9b413fed252a8b51b905ef52d3c44b6e3f9d96d89cceb25", "0x0980154978657d2050da8f97f83d1e92756b6d8f6d9a2c2858cdc89b26fbf262", "0x0f16055808088e8b53714c3cd0446a4a913d0924d5b7911a2f044ebd42f6174e", "0x0fe63161bd27a029a308803a11e6483e0d93dd7a045830759e6d6dd2750fdebf", "0x1c7e932d81a1c04a013e3bba9b56058302669cff5eb72999e12b6cf1528c3297", "0x2ee5a9a008f39be5f918f1edda8581a645e8ec7a599f659c1d1d543b4b57a54c", "0x153937bfad1211834575747a5b7ba58c4773b9f6fbc94d40598a244be55d51fd", "0x2dbd25164c929288b7cdd48b7628b89d1d61317959c311a7737657c624a47184", "0x27c9b7aa6d6333bc40fa43c4c9a851900fee79281caba089586eb2d8269c3c54", "0x2becaa90c4e2bb8acbe6dda8a2a3b43d9d04467898ebb1dc3e14a83f81d32512", "0x2b912d1ba0a02db4c8233000999aff38a430681acb23d1bdde1fa0fa062a17f5", "0x2075cdc89c791cb2d6b84a5a963f3600c51b7b1b2fabeed10c738f55ab1fb4a1", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x1471a95eb0ded8389636686aa80dd03de1d6463b220aad6bf79ad09808a1d668", "0x182e36542540118bf3f916f5fa1a8e9815f8e9790aa4d409eb0d4dd853501264", "0x0c39aee6873d5cda3ab71fc6445f6e2442780dfb3d676f9e3d52c72c7e9d2212", "0x072f89165101953f3721c7026fb31f3ab483e28968f3b0fef9ed94c2ea762fcd", "0x01d45e6d3df87d2c189099593bfade85b1b3cc46f9197cd2fe86441975738b43", "0x0357669d3816688746b9956a15ee6932ece3f90452670d3170470b1b492d43ce", "0x0888ced17aa0b48253ae7281b047364a55b21de8cc6d3d95576e1c12d844e896", "0x22e1db5f3280b301e204a4f0fa197e885acdf1b4a7796a1fbc8cddc9745387fb", "0x2df0db1e5eee18402c47d12e0e6cdb01233bff44c531a10beaefa54fda09ed72", "0x1b2a1e808d4e97c016343cd684638f3dd868c6bba4310305194295bf3e3cc28e", "0x17424701ca23f7af5d1da3578075813a381ab149d8e0955f983101fb0dafd274", "0x1a94eb7d138fcc01534c8b595a1b26f92ed69e554afd1a9cca08fade4c079e8d", "0x2a4054c6234b357a39fcb5e5422014cb9e2c2e78471fc7c0de4ed34699ebf537", "0x1c32828ae686569702e809917a6c4c9b312ecc2a1f4ff46f2a9a8f2cb1b9d8f6", "0x30336f19d17ab0663f491e03cf93cdd00297ae6d355ebe964c739e09556a9917", "0x1a601b9ea1a9c906d50145eaf9a04a92c54e0e735ecb5d70646eeba2550af3a7", "0x23c58b060ae1d78bfb9ac877019853f5aa163bff15f7f5948fe2ee56b6337412", "0x25fb1879de2f781f9ca372272993747db0a8c297d0c4c8e42828a5a15002dcd0", "0x24dd8e2f31a2b4840f45153fa22b010892d26826ffd2a701494dac528054f7be", "0x0883817539bb697cc376c85a88ef1953caa44a920bf1da6e3995fac80612b5e8", "0x0000000000000000000000000000002c8a80ab07bba8ea2c44e6cc7a5b6cba32", "0x27825b9d5cb4ccf2fa78745def0028b976dca4f32df2ce69c75415e0f238f536", "0x2769a1f59949dd4653929cff29ec6a66e2da94318b59a4dbe8f227d9b8123093", "0x25999ee45d5bc32b4907e46aae89833a08201bc10709f15cd35da391a2edd67e", "0x2a344e28ea8cdeecce8522a2bc601ca6ef195c06dc977a6df77d280f8f5bb176", "0x0f20867f6a27630060f7b33c42e110d3ecc2a63c2595398f64071ea5032ec397", "0x0aab5753f49b34843dbaaaf495a2f47ff12322c79c94eee76bcbf3f865c30267", "0x2027172e4ced4b6f710cfbd7ba3b685be782735c53eae17afa2eda35825b3b9d", "0x0dc044704fe514ae924b05d6aba5b431afeae6e4b9cb456a789f3e8e8deb6051", "0x06b97bac55f6413c26abe689d9946f317ada969531245c0c899c69684bcc68c7", "0x0bb72a73b8c5d6d0cfed762f30040d42be18b9c945f86c42346867a38f2679c6", "0x0018b84c9bfbd6ee153769d648f55580a995baa3859d4783e8c4b514b45e8051", "0x1cb9cc9da1e96a946e3a9e1168eda85fa379def26a78fdbe515ab45d777df665", "0x0aceacf3dd99bb0fd30c2cdfc54633a7285a7aae048f2ce7a4baeb910835940c", "0x0aceacf3dd99bb0fd30c2cdfc54633a7285a7aae048f2ce7a4baeb910835940c", "0x1c0643f95e44ace2ac1ded3edf88730c40bc669faa78d8991aaab241609e29bd", "0x17c09c851d565f7dfea75315518f04a28487b8964b61b920f0419ffd2a0b0fbb", "0x06e6f5462be57045ead38ce7146d81216ddb07b4320b9fd2c308aa2369a10c66", "0x03302944c99c6a0559eb4cdbfae6429f2125b47e16eeee590c54da69574cd527", "0x0f46b7fa3b7c399f1f8992b32a0a900706c3b6f3db366be175081fff39f79899", "0x000000000000000000000000000000deab220098e0743b21ed93f6388e80a65d", "0x000000000000000000000000000000000003885559ebf4ab35d49b0bce4d79f7", "0x000000000000000000000000000000017e2de970aaa284197111fd9905c4a742", "0x00000000000000000000000000000000000b1345364238b08e3fe9707410c18f", "0x0000000000000000000000000000006d73dd0a654f2bd614c5776e04c0e60cd2", "0x00000000000000000000000000000000002e68275e03a839ecd487489154f294", "0x00000000000000000000000000000001e1f93d2b018f0a6b39ef1fd29a5e9710", "0x00000000000000000000000000000000001545df2edfad784f403a3ef4ae2865", "0x000000000000000000000000000000ff7ae4905cbb3d622172e48dec6f1b2620", "0x00000000000000000000000000000000001b9caac1ec4b6386a2cc38677bd826", "0x000000000000000000000000000000401072f183011dbf2761f1e506c156e297", "0x00000000000000000000000000000000002bf1ac2ef46cabb190b21d8463e1b3", "0x0000000000000000000000000000001aa0a85b68445babca19d465f16f3d0f44", "0x000000000000000000000000000000000026af9be6313c8e278673d7af6f4bf3", "0x000000000000000000000000000000721825342b3c04f9e6378b644348c801a6", "0x000000000000000000000000000000000015ba091e88e2ff46007a89d2d1afd5", "0x0000000000000000000000000000007344bce8d1d609356ad8f8ac0d34ec3da7", "0x00000000000000000000000000000000001a7ff303dbb62a1af6b632acbb3ef4", "0x0000000000000000000000000000000b4fe0012ff9655be51e69b0996b06179c", "0x00000000000000000000000000000000002d7ef4d382f16509894e2b6032af10", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x19f0c008fa095f42bad43fb28b2b4b47359270ded337b6d25391fdae9b391d75", "0x1c94592a94c4beb4a7e85554aa2bf80765155ad05e8936c25288cd8c4f67a3f9", "0x025138b970f04c9e9d196714071b05a4f20904fff74e69b30c548c975f248f31", "0x188fa91b4dac482ae0f2e44baf3866c1f9fd612d74bc04c96f5f565508f84b2b", "0x0fc936e564c2c8c68f65745643116c3fdb10b5181ae44d2de5bb1f6cec6ef3fe", "0x2583a70cd8b72a1a3e5968e2c88b95979d11cc06d4f80d2da250af7712721003", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x000000000000000000000000000000e714f5ed5ef689b2794f15772d4a946efa", "0x00000000000000000000000000000000002e59cd0cabcf055eeec66cce924421", "0x0000000000000000000000000000005d0309091e80086f656f824ad5ec932475", "0x00000000000000000000000000000000002066f6e6deee86ef9dea59eab0b755", "0x000000000000000000000000000000c8e88bb1d6ec1fc5434e7a032b1d6a5667", "0x000000000000000000000000000000000008013ea9ff1748d590acbf58e37dac", "0x000000000000000000000000000000ebc576489a7a4e90b23a50cfec950b755a", "0x00000000000000000000000000000000001874d0e5f8c341fd0b5e3ddd81eaa1"] public_inputs = ["0x0000000000000000000000000000000000000000000000000000000000000003"] -verification_key = ["0x0000000000000000000000000000000000000000000000000000000000000040", "0x0000000000000000000000000000000000000000000000000000000000000011", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000003", "0x0000000000000000000000000000000000000000000000000000000000000004", "0x0000000000000000000000000000000000000000000000000000000000000005", "0x0000000000000000000000000000000000000000000000000000000000000006", "0x0000000000000000000000000000000000000000000000000000000000000007", "0x0000000000000000000000000000000000000000000000000000000000000008", "0x0000000000000000000000000000000000000000000000000000000000000009", "0x000000000000000000000000000000000000000000000000000000000000000a", "0x000000000000000000000000000000000000000000000000000000000000000b", "0x000000000000000000000000000000000000000000000000000000000000000c", "0x000000000000000000000000000000000000000000000000000000000000000d", "0x000000000000000000000000000000000000000000000000000000000000000e", "0x000000000000000000000000000000000000000000000000000000000000000f", "0x0000000000000000000000000000000000000000000000000000000000000010", "0x00000000000000000000000000000060e430ad1c23bfcf3514323aae3f206e84", "0x00000000000000000000000000000000001b5c3ff4c2458d8f481b1c068f27ae", "0x000000000000000000000000000000bb510ab2112def34980e4fc6998ad9dd16", "0x00000000000000000000000000000000000576e7c105b43e061e13cb877fefe1", "0x000000000000000000000000000000ced074785d11857b065d8199e6669a601c", "0x00000000000000000000000000000000000053b48a4098c1c0ae268f273952f7", "0x000000000000000000000000000000d1d4b26e941db8168cee8f6de548ae0fd8", "0x00000000000000000000000000000000001a9adf5a6dadc3d948bb61dfd63f4c", "0x0000000000000000000000000000009ce1faac6f8de6ebb18f1db17372c82ad5", "0x00000000000000000000000000000000002002681bb417184b2df070a16a3858", "0x000000000000000000000000000000161baa651a8092e0e84725594de5aba511", "0x00000000000000000000000000000000000be0064399c2a1efff9eb0cdcb2223", "0x0000000000000000000000000000008673be6fd1bdbe980a29d8c1ded54381e7", "0x000000000000000000000000000000000008a5158a7d9648cf1d234524c9fa0c", "0x0000000000000000000000000000002b4fce6e4b1c72062b296d49bca2aa4130", "0x00000000000000000000000000000000002e45a9eff4b6769e55fb710cded44f", "0x00000000000000000000000000000072b85bf733758b76bcf97333efb85a23e3", "0x000000000000000000000000000000000017da0ea508994fc82862715e4b5592", "0x00000000000000000000000000000094fa74695cf058dba8ff35aec95456c6c3", "0x0000000000000000000000000000000000211acddb851061c24b8f159e832bd1", "0x000000000000000000000000000000303b5e5c531384b9a792e11702ad3bcab0", "0x00000000000000000000000000000000000d336dff51a60b8833d5d7f6d4314c", "0x0000000000000000000000000000009f825dde88092070747180d581c342444a", "0x0000000000000000000000000000000000237fbd6511a03cca8cac01b555fe01", "0x0000000000000000000000000000007c313205159495df6d8de292079a4844ff", "0x000000000000000000000000000000000018facdfc468530dd45e8f7a1d38ce9", "0x0000000000000000000000000000000d1ce33446fc3dc4ab40ca38d92dac74e1", "0x00000000000000000000000000000000000852d8e3e0e8f4435af3e94222688b", "0x0000000000000000000000000000006c04ee19ec1dfec87ed47d6d04aa158de2", "0x000000000000000000000000000000000013240f97a584b45184c8ec31319b5f", "0x000000000000000000000000000000cefb5d240b07ceb4be26ea429b6dc9d9e0", "0x00000000000000000000000000000000002dad22022121d689f57fb38ca21349", "0x000000000000000000000000000000c9f189f2a91aeb664ce376d8b157ba98f8", "0x00000000000000000000000000000000002531a51ad54f124d58094b219818d2", "0x000000000000000000000000000000ef1e6db71809307f677677e62b4163f556", "0x0000000000000000000000000000000000272da4396fb2a7ee0638b9140e523d", "0x0000000000000000000000000000002e54c0244a7732c87bc4712a76dd8c83fb", "0x000000000000000000000000000000000007db77b3e04b7eba9643da57cbbe4d", "0x000000000000000000000000000000e0dfe1ddd7f74ae0d636c910c3e85830d8", "0x00000000000000000000000000000000000466fa9b57ec4664abd1505b490862", "0x0000000000000000000000000000009ee55ae8a32fe5384c79907067cc27192e", "0x00000000000000000000000000000000000799d0e465cec07ecb5238c854e830", "0x0000000000000000000000000000001d5910ad361e76e1c241247a823733c39f", "0x00000000000000000000000000000000002b03f2ccf7507564da2e6678bef8fe", "0x000000000000000000000000000000ee40d90bea71fba7a412dd61fcf34e8ceb", "0x0000000000000000000000000000000000140b0936c323fd2471155617b6af56", "0x0000000000000000000000000000002b90071823185c5ff8e440fd3d73b6fefc", "0x00000000000000000000000000000000002b6c10790a5f6631c87d652e059df4", "0x00000000000000000000000000000029a17181c7934fc3fdbd352eac5cb521b9", "0x00000000000000000000000000000000001f497cbf5284ff29a2d336e5991999", "0x000000000000000000000000000000072bd9c0c6beda1fdee6d4ff0432ba9e1b", "0x000000000000000000000000000000000013ea38a0bd2aa751a490a724fac818", "0x000000000000000000000000000000c599f63dcd3edd49f08ae5c3141c1e3493", "0x00000000000000000000000000000000002bdb36be0bea09950dd32a8ccf6fbc", "0x00000000000000000000000000000047f27f29724e7f19eba0340256a0bd4b7d", "0x00000000000000000000000000000000001c1c5ccf87a962129ca785f8f35120", "0x000000000000000000000000000000c5c71efdae00679bbe4a95096e012b1817", "0x000000000000000000000000000000000017a365de041e317817d0135f2b48e0", "0x0000000000000000000000000000008ae711ac402f7848d719c93a89ba8d39f1", "0x00000000000000000000000000000000002b6fb40ed8a1935226f4f9786a0499", "0x0000000000000000000000000000002f03a71501d83de1da5715a4e9462d6198", "0x00000000000000000000000000000000001644064443b8546f48eae693af47b8", "0x00000000000000000000000000000083763ab1b6e8fe269b2fe4c7b9c448c08d", "0x000000000000000000000000000000000021d7cc18c59676a8eeb47c0111c251", "0x000000000000000000000000000000b5f937153073e03ea7d51a996e0ebc2e6b", "0x000000000000000000000000000000000011ddd0e26457373eb06e0493177672", "0x000000000000000000000000000000c5f6eb9f6fc8fa99811a4a88c74a6d018b", "0x000000000000000000000000000000000025bcd07a0732c123567834f5109558", "0x000000000000000000000000000000aeb08a0b1a4442189448b4e97490568146", "0x000000000000000000000000000000000002a1744e4771705536a88f07e0f90f", "0x000000000000000000000000000000b938568293bd0724b0ea76c2ec34c4a829", "0x0000000000000000000000000000000000053296e8f3b9ad3af877dfa9c7c2a7", "0x000000000000000000000000000000f0ca1db6323996eba26bdc86dafef9d10b", "0x00000000000000000000000000000000001441a46c58af03d5645d52721d956a", "0x0000000000000000000000000000008bbf8f884013c66c28ba09c2fbd573b656", "0x0000000000000000000000000000000000206c391ca06fac27d1908e94570243", "0x0000000000000000000000000000002d4f5aaed88ba4f79612d53b804ca8f194", "0x00000000000000000000000000000000001674011c96392df08970fa6b7b4cb8", "0x0000000000000000000000000000009f88297c1729d76c4d9306853598c91325", "0x0000000000000000000000000000000000256f51adfcacc3c1e340be4d32d3e9", "0x0000000000000000000000000000000ab9955eec0d74eb799afed2a802b24d75", "0x00000000000000000000000000000000001fcbe43ea105b30d36ed0b21b03411", "0x000000000000000000000000000000d66b1d5433f1aa5305cd1edce7c22de466", "0x00000000000000000000000000000000002331546a256b8a3b751956806680d4", "0x000000000000000000000000000000292b512a940306c9b122a0feb4e9780af9", "0x00000000000000000000000000000000000904e02f0334f3a3f72e65ce82f34b", "0x000000000000000000000000000000778103226eff8f576eba0a0a2ffa134d57", "0x000000000000000000000000000000000001fe54f93991aa61a056f75e5137b0", "0x00000000000000000000000000000089058e539eb10c10fa13dd50e39517555d", "0x000000000000000000000000000000000009e91383ce6118cef78093d2c550fc", "0x000000000000000000000000000000d95e2e1bf12461b508cc8d63fee4a8613d", "0x00000000000000000000000000000000000ab28965260d651673541f10f2e02f", "0x0000000000000000000000000000008f6f361c7fe163277c1022dacf2a0f307b", "0x00000000000000000000000000000000001e47e6531c4ec673e9c18a9062c1df", "0x000000000000000000000000000000e55ba7b44de2c0927d4df995625325c68c", "0x000000000000000000000000000000000005d3e2113696d710431294256dab1a", "0x000000000000000000000000000000150b1462a7b440ef041dc9e7af7e1d1f15", "0x000000000000000000000000000000000017b03e1a64b4988e05ca4291f3ce6a", "0x0000000000000000000000000000008d494ccfb05451bea33fe43623c45a9d7b", "0x000000000000000000000000000000000001ed433638a57a66f8205daea09e50", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x000000000000000000000000000000f68b70e0e4b0cb9e2c7bd64fa4b75b32dd", "0x00000000000000000000000000000000001bcedd9106bdd4e13e0b751c672a83", "0x00000000000000000000000000000042fd857eb4bf620db08b0e181807df9f59", "0x00000000000000000000000000000000001ccfa89524772b4bd5b6bf6741d71f"] +verification_key = ["0x0000000000000000000000000000000000000000000000000000000000000040", "0x0000000000000000000000000000000000000000000000000000000000000011", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000003", "0x0000000000000000000000000000000000000000000000000000000000000004", "0x0000000000000000000000000000000000000000000000000000000000000005", "0x0000000000000000000000000000000000000000000000000000000000000006", "0x0000000000000000000000000000000000000000000000000000000000000007", "0x0000000000000000000000000000000000000000000000000000000000000008", "0x0000000000000000000000000000000000000000000000000000000000000009", "0x000000000000000000000000000000000000000000000000000000000000000a", "0x000000000000000000000000000000000000000000000000000000000000000b", "0x000000000000000000000000000000000000000000000000000000000000000c", "0x000000000000000000000000000000000000000000000000000000000000000d", "0x000000000000000000000000000000000000000000000000000000000000000e", "0x000000000000000000000000000000000000000000000000000000000000000f", "0x0000000000000000000000000000000000000000000000000000000000000010", "0x000000000000000000000000000000ea6f0ffc3623f62df87b74c4dcf518fd39", "0x0000000000000000000000000000000000173d770a8f5cb72b5b7df483f41e34", "0x00000000000000000000000000000078cbd4d4ee5fbdc3e18d02a1f70cbb5d91", "0x0000000000000000000000000000000000021f87cdc6ab8ffcec1ad655c4957e", "0x000000000000000000000000000000becb2c582944a105e60c8137244c7fbe50", "0x00000000000000000000000000000000001131c406ad5519e89c0595a6eb090a", "0x000000000000000000000000000000835914e338d4388cd6b377c6f29c3ae8d7", "0x0000000000000000000000000000000000090349ad561dca142627a600ee4277", "0x000000000000000000000000000000974f2b1de449db7000391c8aec96756b19", "0x00000000000000000000000000000000002073a05050310929610ff7d081bfef", "0x000000000000000000000000000000e1ea79efc9dc97e61ef4b380bf93f5d160", "0x00000000000000000000000000000000002e70a9d847019e5f99be0ce5849ef9", "0x0000000000000000000000000000003181bcdd733575d173aeb749a241457fee", "0x00000000000000000000000000000000002a99429ae3e7fc810959e114a4935e", "0x000000000000000000000000000000638591cb7a86b050e9c915fdf75fc0188d", "0x00000000000000000000000000000000000eec7be2a69ddc8b3c3a39729f2ff1", "0x000000000000000000000000000000a6e81418467d38b93104d402dea2d642c1", "0x00000000000000000000000000000000002497efe6b96c26dc4b5e015c627530", "0x000000000000000000000000000000f9ac5e0111dc36f9cc1e9a9905235a374d", "0x0000000000000000000000000000000000127f146a5df1de7c90d1a4a9a373fa", "0x000000000000000000000000000000bb0e4ad8256760164511649027827b2468", "0x00000000000000000000000000000000002c2b05a49b25c71848061f721b0e52", "0x000000000000000000000000000000d2e0cfa24df62aeb0b250c6b8ce4bce249", "0x0000000000000000000000000000000000177cfc8a9f03ba7a0fdf0b99899778", "0x000000000000000000000000000000bc3661650d53f9b24d923d8f404cb0bbc9", "0x00000000000000000000000000000000000c4032c3079594eb75a8449d3d5ce8", "0x00000000000000000000000000000054eb5fe796a0ca89441369b7c24301f851", "0x00000000000000000000000000000000001084d709650356d40f0158fd6da81f", "0x000000000000000000000000000000b59bdbe49ff8208baeb13b8cd40c72281f", "0x0000000000000000000000000000000000212700d6e138f55e8fe4d41aca557f", "0x000000000000000000000000000000d0d87076eba438e31effd2ea0f1b0e45d2", "0x00000000000000000000000000000000000628be7db7fa2e49440a4867f3c5c8", "0x000000000000000000000000000000c9f189f2a91aeb664ce376d8b157ba98f8", "0x00000000000000000000000000000000002531a51ad54f124d58094b219818d2", "0x000000000000000000000000000000ef1e6db71809307f677677e62b4163f556", "0x0000000000000000000000000000000000272da4396fb2a7ee0638b9140e523d", "0x0000000000000000000000000000002e54c0244a7732c87bc4712a76dd8c83fb", "0x000000000000000000000000000000000007db77b3e04b7eba9643da57cbbe4d", "0x000000000000000000000000000000e0dfe1ddd7f74ae0d636c910c3e85830d8", "0x00000000000000000000000000000000000466fa9b57ec4664abd1505b490862", "0x000000000000000000000000000000677bd789aa094b735f2abf3d9cfd032188", "0x0000000000000000000000000000000000236e982930a9984fd08a3edddf25a1", "0x000000000000000000000000000000c07a966aebd836d8a800f54b1c3bb5c36f", "0x00000000000000000000000000000000002ddf6475059b2e9451db5b8d857bff", "0x000000000000000000000000000000ee40d90bea71fba7a412dd61fcf34e8ceb", "0x0000000000000000000000000000000000140b0936c323fd2471155617b6af56", "0x0000000000000000000000000000002b90071823185c5ff8e440fd3d73b6fefc", "0x00000000000000000000000000000000002b6c10790a5f6631c87d652e059df4", "0x00000000000000000000000000000029a17181c7934fc3fdbd352eac5cb521b9", "0x00000000000000000000000000000000001f497cbf5284ff29a2d336e5991999", "0x000000000000000000000000000000072bd9c0c6beda1fdee6d4ff0432ba9e1b", "0x000000000000000000000000000000000013ea38a0bd2aa751a490a724fac818", "0x00000000000000000000000000000024894ab0070e4f72cf1fffafa8b39307c4", "0x000000000000000000000000000000000019f564c37de48cf28b308da5f2e2f8", "0x000000000000000000000000000000a08a6899213caaa89baca51a3598a07b99", "0x00000000000000000000000000000000000b5e594c4ae54f867a3bd8973689d8", "0x000000000000000000000000000000a3ba29ef6ca86e9f0d01acd1abac8ccd34", "0x0000000000000000000000000000000000155cad00fb78a200a9a95a2a220ce3", "0x00000000000000000000000000000038a5f38d32ca28822645e2c790fc26f029", "0x00000000000000000000000000000000000906f90f083e6f1121af20a5067a04", "0x000000000000000000000000000000d53e76dd5279fe31335ed32d80dcc5ca31", "0x00000000000000000000000000000000002cf52886dd6e8f1fd0adb60005c606", "0x0000000000000000000000000000002ddcbd934e2f9c7ff98d2e55c070cc2fe2", "0x000000000000000000000000000000000028db51dbb15364210353420c11faf9", "0x000000000000000000000000000000397594051f075d214bbf3f48b71c63f4b8", "0x0000000000000000000000000000000000047b66ca1874c32782c036fb9aefeb", "0x0000000000000000000000000000009e9fba95bbfa0e8726ec97a6d808caa2d4", "0x000000000000000000000000000000000004ade9dfa460ee9d37d6ccb02c1e20", "0x0000000000000000000000000000002b8e95ee572dc79770aa90657517801faf", "0x00000000000000000000000000000000001bb5b68de1c268c3d95913e3147c15", "0x0000000000000000000000000000007d6628253cf6c468d2fb28f77f36b15831", "0x000000000000000000000000000000000014d3535ebb73ddbddcfd2bbeb5c12d", "0x0000000000000000000000000000005f0f2c1ce2daf455b3f3d0098e1a74774c", "0x00000000000000000000000000000000000f65e16cb15a76fc408beabbd219e3", "0x000000000000000000000000000000a2f6f792ce9d9d148a330fabf682a0d0b7", "0x00000000000000000000000000000000000a243a33df583888f21721cfccf6c0", "0x000000000000000000000000000000cb6d5d306b6754ba07e397736489c614e2", "0x000000000000000000000000000000000005caedb54aad66bad31ff224151ec3", "0x000000000000000000000000000000f1e5fcf49bc8ac16b03b30c861fad85fe6", "0x00000000000000000000000000000000001fef824be2fa25332c2bca21d3126b", "0x0000000000000000000000000000000ab9955eec0d74eb799afed2a802b24d75", "0x00000000000000000000000000000000001fcbe43ea105b30d36ed0b21b03411", "0x000000000000000000000000000000d66b1d5433f1aa5305cd1edce7c22de466", "0x00000000000000000000000000000000002331546a256b8a3b751956806680d4", "0x0000000000000000000000000000002b1c1c2637db3f8fecd9d8bb38442cc468", "0x00000000000000000000000000000000000450f8716810dff987300c3bc10a89", "0x0000000000000000000000000000005db2bf83f8a194086a4cca39916b578faf", "0x000000000000000000000000000000000010005567f9eb3d3a97098baa0d71c6", "0x00000000000000000000000000000031e12e1ce3a444583203ea04c16ec69eb2", "0x0000000000000000000000000000000000103bcf2cf468d53c71d57b5c0ab312", "0x0000000000000000000000000000004207277f4116e0af5a9268b38a5d34910b", "0x00000000000000000000000000000000000c5d6e7a8b0b14d4ed8f51217ae8af", "0x00000000000000000000000000000083bc4ff48edd6aa66759994187f28dd173", "0x000000000000000000000000000000000017cb85a0f539b780ee6319982f5ba4", "0x00000000000000000000000000000012fb642de7b51efcce75a189bdf598f3b8", "0x000000000000000000000000000000000026fa70b6c942ddd3700064b48ba1ee", "0x000000000000000000000000000000eb0ab515191143e5a3c8bd587526486628", "0x0000000000000000000000000000000000132b76a71278e567595f3aaf837a72", "0x0000000000000000000000000000002c37ccc495848c2887f98bfbaca776ca39", "0x00000000000000000000000000000000002c6b2a0de0a3fefdfc4fb4f3b8381d", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x000000000000000000000000000000f68b70e0e4b0cb9e2c7bd64fa4b75b32dd", "0x00000000000000000000000000000000001bcedd9106bdd4e13e0b751c672a83", "0x00000000000000000000000000000042fd857eb4bf620db08b0e181807df9f59", "0x00000000000000000000000000000000001ccfa89524772b4bd5b6bf6741d71f"] diff --git a/noir/noir-repo/test_programs/execution_success/verify_rollup_honk_proof/Prover.toml b/noir/noir-repo/test_programs/execution_success/verify_rollup_honk_proof/Prover.toml index 4c4591c2f4de..559149c7ec69 100644 --- a/noir/noir-repo/test_programs/execution_success/verify_rollup_honk_proof/Prover.toml +++ b/noir/noir-repo/test_programs/execution_success/verify_rollup_honk_proof/Prover.toml @@ -1,4 +1,4 @@ key_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" -proof = ["0x0000000000000000000000000000000000000000000000000000000000001000", "0x000000000000000000000000000000000000000000000000000000000000001b", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000042ab5d6d1986846cf", "0x00000000000000000000000000000000000000000000000b75c020998797da78", "0x0000000000000000000000000000000000000000000000005a107acb64952eca", "0x000000000000000000000000000000000000000000000000000031e97a575e9d", "0x00000000000000000000000000000000000000000000000b5666547acf8bd5a4", "0x00000000000000000000000000000000000000000000000c410db10a01750aeb", "0x00000000000000000000000000000000000000000000000d722669117f9758a4", "0x000000000000000000000000000000000000000000000000000178cbf4206471", "0x000000000000000000000000000000000000000000000000e91b8a11e7842c38", "0x000000000000000000000000000000000000000000000007fd51009034b3357f", "0x000000000000000000000000000000000000000000000009889939f81e9c7402", "0x0000000000000000000000000000000000000000000000000000f94656a2ca48", "0x000000000000000000000000000000000000000000000006fb128b46c1ddb67f", "0x0000000000000000000000000000000000000000000000093fe27776f50224bd", "0x000000000000000000000000000000000000000000000004a0c80c0da527a081", "0x0000000000000000000000000000000000000000000000000001b52c2020d746", "0x00000000000000000000000000000000000000000000000198351784bcb51220", "0x0000000000000000000000000000000000000000000000010d0f8f1f7113655e", "0x000000000000000000000000000000000000000000000004112bf58ccdc9d1a6", "0x0000000000000000000000000000000000000000000000000002a60f71be5fae", "0x00000000000000000000000000000000000000000000000c277ca8f3f566b58e", "0x00000000000000000000000000000000000000000000000eb132e7fdc58ac690", "0x000000000000000000000000000000000000000000000009f620b17904c216fc", "0x0000000000000000000000000000000000000000000000000000418193e46832", "0x16cf7cc6f21f642a8118cfa0dc61f2537a91ecacc5f3668eabd665cc680e03fb", "0x18158b8584791bfbe5ee10a85fdd34c00d56a8a5d88129ffe078746088491b46", "0x000000000000000000000000000000f31a9f28cd3a9e1f6075aaf9ec8727208c", "0x00000000000000000000000000000000001233f898e5172cd4bc911b998709fe", "0x000000000000000000000000000000ca718983bbd788cb6bb027395398bb2eb4", "0x000000000000000000000000000000000005b1206cd018c3ccdef2d56d001872", "0x00000000000000000000000000000078e6f7d3378f422821fb4d4c068b216657", "0x000000000000000000000000000000000003b440ded66529bee75ef104f807dc", "0x000000000000000000000000000000803d9477bd05cdc528ae79ac9ac3063b59", "0x00000000000000000000000000000000001fbc07b498117324c534a1959454de", "0x000000000000000000000000000000186d9a95a47ff0ad2b0140d0d4617c19a2", "0x00000000000000000000000000000000001ae8fbb60548a48b872fc55c81ae2a", "0x0000000000000000000000000000003b01e2f9a4875516f4efaa4e8927193d77", "0x00000000000000000000000000000000001e49d5f5e233972d0ce5bd6affcbd2", "0x0000000000000000000000000000002a171861b26b214d20a7cb4d428a270b19", "0x000000000000000000000000000000000006d220e2a01128ecd1358cfd02bb10", "0x000000000000000000000000000000e45bf2a5a87cf3f64798f8279a77be8f0b", "0x00000000000000000000000000000000001fc24780eee3fa3aedc08349ec0a49", "0x0000000000000000000000000000002a171861b26b214d20a7cb4d428a270b19", "0x000000000000000000000000000000000006d220e2a01128ecd1358cfd02bb10", "0x000000000000000000000000000000e45bf2a5a87cf3f64798f8279a77be8f0b", "0x00000000000000000000000000000000001fc24780eee3fa3aedc08349ec0a49", "0x0000000000000000000000000000001d37b931273e72178e88ce50335e2dc83e", "0x00000000000000000000000000000000000511d63534ad2ccafc77415b1a29b0", "0x000000000000000000000000000000e7d019c73ebbc6451c01ac52f156ecf865", "0x0000000000000000000000000000000000268c3d97d581e919b815696da25cc3", "0x0000000000000000000000000000000d9599859b3bdcdb478812ee25d2f0eeee", "0x000000000000000000000000000000000026c9e7abbb73875fddb1ac55cdf1e2", "0x00000000000000000000000000000033c989fdb71acf8d4184c7a0d321e08d73", "0x00000000000000000000000000000000001c175d26699767e4c519b04251b5ee", "0x0000000000000000000000000000000f92946b5ba26bb09b66d65834d2f4cf5b", "0x000000000000000000000000000000000012b63a80baf4d982c5122a471bc42a", "0x0000000000000000000000000000006fa3c86e91550801f284938bfa104ca096", "0x00000000000000000000000000000000002c51726c2563a7b5d9dc69ea0263fc", "0x306287f56c48effd2f9699a46db9267c91263b6de06d7b7be9cfa4662dab37dd", "0x0001c67d74e8b02c88b9ac1213c831e0970dacda994bf5155a12512dc254c824", "0x1925a443b3d49b3a0d80d8ca41c6f32687b5dfaaa8f21a65517bc5db6bd1e47b", "0x251a4faf274456bbe0c7f69fa8f67e5a1c079622d6233f636500dfbcf9b621cb", "0x0e6a61c9e1bafd7067d9830443de3c05afc3dd79f45a195de5a90732a0868f45", "0x0a7245508b1615f792270dba05f387ada642988a9584ffb4030c4a90c7efcf12", "0x15863f2959c9d468cf756679c24ad8d8aeca54121684681e546e5e40a0b6ceb0", "0x0404eef3cb7467c0065e2ed7525a88d3d9b8d62fe0c441ed5fc2439722641f02", "0x08f4ec100b001fba3c773208d57cc0b6416ca1341e02129e717d93fc2e3e40c2", "0x0a49765410098eb75f5e725f163dbfa492e91b739f0cbcd6ac8a84898d0c4b55", "0x191eec3f8f395fb4c4fea8cd5e726d5a01b8f7de6d73b92af658a9826982effa", "0x27dadf71f0d9b4b09eb744718d20bed1185bf6fe9074c0e38a3574a9fef10468", "0x000daf66f8c35c1d3b887a3f974bde18e804ac7768fea8257518ae1303d441d8", "0x1622a82d0d30d00a9a5df5e5448ce64e575385413872f601a5d3f507eb07004a", "0x2d6191ae1a2657e766345dd4e60c43db5043bbda7b159b768dffa0371055901a", "0x2544bbdd4ce4b098eb4feddcaa7aa95a69f418f205d3e6716cf20d6d607b31fc", "0x27539990c10c0e5d05e07efa3c268354576cb00e2e50d27ae72204baa46a04f2", "0x1bf39a47211a4b4224dc726a437ea853097dfc7ada0f3e2d4e3f4d3a7edd1bc4", "0x15759bd32285dc0c51e46a0eeea80f755bfa77e18a65c62e241a2c7fbb8370b8", "0x2616a41128e14a226c459a93a19b13de581caa4f75c6368dbf7257a323fc84b9", "0x1bc5b5a9238571c7efeb9e992e4b410e2affdf015518126bc6b23ca2986d02c6", "0x08a03b8831139f22a329f22ab8a87eed1c9313db7d993718c6b1c0735e66a719", "0x2e3e957d6c4b250f0cdbdd00ada94bb53dc70104196176e9abe473647aa86a09", "0x1b6c77870100cd99fa8a85db4bdcab5da5c3505704cfbb2bda11463d25b6eea6", "0x00e1e81ba9dfbd1b7cd9192d5f0218c2eff9bf6f4dd859e07bdb163633ebf310", "0x2e7d519ecb078293b55adc1f9078e01f375a97dc2374725c9b178bb1df019aea", "0x0af9c92343c56476189c42b4c578a170c85111555b3fddaa74666a0f7f860ec1", "0x213b21f0891215c865fa84e0c1744765e3c0a36b5062c39e46b95704f478f746", "0x22f71989fdc01723d4e1cc854cbd9b482af30ad642f3a5dad765e016264bfd95", "0x0de2e531eab5853f2d7759b3e258fc0d5860d10678331b00d2612c4f794538e9", "0x26d4d98c23dccd50cb40b89ea899fcc23cd06e141582932d0acf1915c85a1c6c", "0x27094a91f28e55036e178acd6adeec53adb0a4b93e8365a57f237ca9d2e94405", "0x019662c5cbf555a9b96b65742647426a33c97b595e8516c433e742b49e22cabb", "0x2d32a500f4e75b3377432fb7febb00bf1fa6306a621f8801306ab293b12b370a", "0x2984ccbc3fa69b375e758005d2e668619e71c84e007fc9361305d17c046c18b6", "0x15a8c7880db4f1e48ba2f406d514eaa76df3c8e3db2964808f38e59ab63268e0", "0x014f53ec2db52091b3a6a2d445e4bd93ac9f5ed122ba701c5682c26c1f5bc4cb", "0x1cd8668cba6d841aaad54684e3613db68dfd0a57258f5e6d7207ca5aa91f8f7c", "0x2c198f396bd3785539b5d9b8ec33da835fae84caac528b587dbfa5d380378d28", "0x1912c183ff1b57a17d1cf630346d4547566384ab3ca09602bf98d83ead40ef52", "0x23b691d4c3445720248facd6dc688d2afa13277ed408405e833a053b7543bea3", "0x275b0dfd8d39f50c6d672e02e10a775102c54674ae0eeda61acca7bd339fe3d7", "0x1ec7eaa32c32988df48890320c31e49a6d8e1db47fe19aa936433045a6b20d3e", "0x07be4532de3a24a63bee613d6b42cbd294bcf8b0fa401d9bf838f7c5a6fd7a11", "0x030d7505ece9e71c5e2fc1717cdf463c41f2be0e368bb39e8d9337652e4848c9", "0x13759dca280cbe036507810ef0baf9d02389ce0772200ceba30957c4280504fb", "0x155b5775cf065eca5d4791b76503cf6864b1f089e09658f8d73bce95aad8b63d", "0x03408af42e89123440902eee1b218ac54893a05c499e544ab152f2dc4d2a7406", "0x0f095b86ec10064e8538959a22d69f1842d0cb92a8bfc9f63e9af82d9f518aa8", "0x2ecff3efe9ca5247b99ee0ff636cb7c667ca0752ca3bd7e019750c6a0824c05e", "0x15a1eb4c9f778792e854b14a65f461491346eb3b5afbc0f47a04f37df950c0c7", "0x1c45fe9bd5a8a577e734667d5d2f75aaf042288faadd45840c15fda952d683e6", "0x0b1916e811028364d7ab4726ce9ee4158df2f3d7ec6b6de78b5dc5eb0ca33271", "0x26198efbc197e8b8995c6663dc22091aa79a66b43551576aed3bfac59a9a633c", "0x1c600b24660c474a5e7d065773c8bf46861b86bfd35263b7dedb6844cee0c2e0", "0x0f70705ffbc0c80c20feffa4040d88bda32bce28c68cae7f09f1298fa4e918b3", "0x26a5e8599e18ca537978b64c6e36d14aaaf77b1e73969ecc89d74f72c5141a91", "0x0d6d63868e3e7095a8c26da8ecd37e232f9fc59096db96f17ed3934ecba34478", "0x2193d7b12212843e643dbbdcf3cedb55fa67bac5e65a2ce8583ea507c04cce6c", "0x2141f72eb19fa0b2490400bf9b436c73f14c60e77a16138ae4b0d39629f8ed7b", "0x2e25c471d729c4742747fe52fbaf000d3b958480ea93141abbc6bb389ad854c7", "0x0b2299402112e0205f74bb313786994ca83c7bd204cca59e6dfa3b147c21203b", "0x159c2236da8acc75769bb8138dce9c6ea319651e9abed3c12ddd8f554e292486", "0x03b25006c2280965d306ba2a24c81a09d72d2b0c5f7f214221c76f6501efc911", "0x241850ae454b281ca5be4b064855da6481ce150485770e9b273de8a29a64fbd4", "0x2d1b2592def2035dacc6a50dd657994d98b669c13c8c4fa7a5de32bb1ff2afee", "0x18d3529fbe8fb1864ea8f336dc7c3088c229cfa3221d97b86b7399e3580f25be", "0x0cbc1780a299eed3dec9a3fc932606e76a56a9e6007e4818eefb3acc0c97eff1", "0x2b7951293695c48e3a040fd16efd03e6d2805a2e7d14863b12d44e2f0be6e548", "0x2e7279731396111d824de2d6fd4daf6bde38241f466774c888cab684af0310c9", "0x02787750cc8668538916ed23c77ef9f29a69904a5160910786c5e150c842fc7d", "0x0eecf3012294c565cea1737dcdaa677702e519c37394587bfb67f648712da35d", "0x04390e489dd2324235647de594b8fa12b9265f02a085e7ef528b0cde68b0c785", "0x12c1b72615ed431b56834d7427e04977c1ef53a7a8e5c8bb709f6930faa43ac9", "0x0ff305bdb025415d21cf771039088b787ea6b4f673fb88baf0bac451fd1e4b92", "0x045142ff5fe9c320cad25dc9a3cafa4668636c80228abecb46eb54c820eb2e5e", "0x2a0d0e0bcbaf05661431062ae2d2bd21fae75caa81fe7731051b64e6fbbce173", "0x23c761d849f4fabddd42918bdb9acd36381667f3ea3610c7a1599ba6096ca70d", "0x24091599cb2e211ed9d108b9197a3827507232941dc23113dbcee7e3459ccbd9", "0x0302343a788ded8936bb590a8eefd4aec4d704c2c6c24d70a60e4870c75196a1", "0x291b653f0b3b851572d7c0db159f88659326d61d1bd265bb08556e0e2156b753", "0x2e6683095e6a8ecff419b9c1bda2fca70f2686d7458230fcdb0b061469f91dea", "0x0856b8f721b54e425fe9aa589ba710f2845e72fc351793738df49c40967d716f", "0x12ae3e5184a7fed794ac452f94d51221ba134cf35f35179b66349bc4f426f33f", "0x23c35f99e0892be80095db2623042be2742c2b138b1df1efe03bd899dd35937d", "0x036bd6bd2b5b631e928f002e91ca13c55c24ad1b915839582709d0aabb1a3082", "0x1fb1cfc6f64e0b2e0a1d9bafc1702a468197f4a2fe41b233e04203dd699131fb", "0x1cfa0be2485c7cc8baee2f61c8059b887b581e4eab15c23941d226e3c2c849e1", "0x1fb8e18786782eb1842c6f1f4eb4cf22e1b8c5f310a6346db75735387f80013d", "0x20921175a4d50b6f672caffce2f12040e6e3bc792c4f38cb51b477fa19f12364", "0x15b7206684270facba2d70bf47ffa4f7d449f65cff70f8021b6a06598d60f4ea", "0x06932a178b45439cdc4e2c0313aaa31c29f561dea1e200d2347e82e1dcb2985d", "0x1486c1622fec7d6e0f7f08b4185daa992d0515e1e7ff782944cda22ff0df6065", "0x15ef8cf4627592aabc6efc812e210d4dfc4da02e41e53320b61364bd21deceed", "0x0a7974b21e7412c8ebe6c76217e74810a2b81eeda730a263ac64fb3bba2ec025", "0x046f771fc1399865a01e60419d2c969279fe1675aa0eeeb6a439e23a46813c89", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x166c19d70d8289a27c0eb973c525449ff7c70f23b32c38664848d4e4f14c955e", "0x22a2a920418bc6b0e16bd4b1696403b50d767a2339f788c62d3501dfd6d49554", "0x1c98c74062a05e52635b8cd160f7a76ec9045086de6fd29a2fdb2d51263bcc79", "0x1a3fc6e17e3a842c75bec9c41eeeb86751d10593ac2f69e6613c6608ce1e81f5", "0x21b27032c51038cf5487c60cf185d961fdec079677d267de3584d0b10c725240", "0x2de258ec1715ebc899390c9b05df9d65bd4757b569e5089afe6fc5c82c644b6b", "0x17061178ce0a7677558f33c2d6e4df02abb874f0ce7c22ad6a4a970161e0067f", "0x0169fa33066295974bb138801aacb5394816f5e05f242777d01b61b26e9eb2a7", "0x083eb97ea0e9791b11e64d920061cdff4622e9667a1760a80daa0dccfcafa042", "0x14b2070214354b2a8d7bd85efd042b2694677e41f5eb086f661df62be9d2409b", "0x2ea8ff97b1f59d4140c261964679111a963887587083e2443fa501f5e04245a1", "0x0ea3f4b3a74a29e3b5915426dff09eecf626ccd235f440a0adb67dbee38b8a18", "0x0898eaf4c2a5b4fd444e4f6260ad50e8df5ee1122cfb24c92dc606e4c617e076", "0x2e999b837edd61563dd94622763ba78468dab492c60829b55ad0b7095657826b", "0x180b65b148ae698c1018f24bfb49d4a700f67c47743e00efd34664e55ef632bc", "0x1eb2e1145982710a3ec233db72a0e4503883915b5e40ca5176b690af47c9ad87", "0x0c02c12a4089148c7a6e04277d9fea54915a88bc9f8d82cf4d58678a134e0124", "0x1715e74ad1c0bb740556e1e0227ce90e02310f5b033d5432559413a654f66fc2", "0x2de1d99911bcca2220e64331bc24481772f6ce35eebb082d63b3eab29a9b757b", "0x29327816bcf7c5c7ab28e339320f894dde297645c27d0aa0cee46a5c40471d0b", "0x07de8fabb98b1082d45824659e4b0ea62df3b7b76f26edfd9d67105a04ad75f8", "0x2e5bea4efb41fc22121db6fe400d91132968c5d898c14a0568610578fb7d9d57", "0x2f12a17b5d6bc006672bf388fd786fe2a0b3a15c380147fa4b7bd2b695c8101f", "0x26f724ff2a50863698db3dcfff0886edd8983d0c79fb5c1e5f591c5da66b302e", "0x1d1c4729b0d36cbcd59d69128fe6107a5d2280cd4ac31cfabad0262b51f00fe3", "0x14b18cd70319121bbc9cad9171e55d538864f9101cdd171e44b4058519cef3f9", "0x079a0b9d12cdf0cfb34a3708a4850a998cbd9b5ec62ce3f1103adac2fdd8a820", "0x12304d32e34962cf30b9955972c350e359d6bf62d0de9c98c72d9f0ab51131bd", "0x1b6ff02390ab39e40e65e045040f6e777ebfdea61902709dff743594eaf89509", "0x0e6e12ee6f19e11a543e63c4402419d0f07b424aec46eb4b909e090e57a6e6ea", "0x0e586c602754d885e91f077f9f219c1935a25c1ff5656fe5b445bff7efbef1a0", "0x116eb62fe75014d073f3526bbd97269f41064cdec2fefaab38a0acb333dd01bd", "0x1282bd7d7cbf0b806f729e331c1a7ac0c2598e2d75d1d2e0d62f3177df456b65", "0x0a85412e6b0eec538dd5038b532fb9e8e78476f1c4c677ba4e1ab310bbe40881", "0x0a85412e6b0eec538dd5038b532fb9e8e78476f1c4c677ba4e1ab310bbe40881", "0x2ffadaf1e9bf16dacff62d3ffb2d0f531da05737784be3a2d8eb61cafeed5560", "0x2002a5977ca93eb87d175fe3becb116997eac4993bbb73ec86d9326e1870bf2d", "0x1b418c821f6658d3c049194500604a449a1f7dc0d4aba87ad2bf554beefcfca5", "0x26407c0e8e64c2d127867b79cd0eef3edd6331ff3ed73d690eb748d94e08a9da", "0x2d4d4d75b30f50f2c562a046416637321909ec5bbc8c42eeb821b9326f89082a", "0x0000000000000000000000000000004f3b54655d2514081bf5b44620a2321680", "0x0000000000000000000000000000000000115472529ac9d0d7f94b6e15c6577b", "0x0000000000000000000000000000008ab9aa27023aa3fc58f408ab70b61a95b4", "0x00000000000000000000000000000000001118c892f3db5267f7d6d914edc04b", "0x0000000000000000000000000000001e8e1a870c828d9f8dddb0bdd8717a1150", "0x00000000000000000000000000000000001543c08c73ce441069251bee8a7ccf", "0x0000000000000000000000000000008215f11fa2188558ff721f8375825fb83c", "0x00000000000000000000000000000000000e31ae1d3a8332ed8cd38a8b209732", "0x000000000000000000000000000000d3ec44f7b83edc73d68c8892508d1592bd", "0x000000000000000000000000000000000006c1040c255d7b7688c31b4510f23b", "0x000000000000000000000000000000ec0d8427da7f35dedc93a42151b733feda", "0x0000000000000000000000000000000000296be1164397e24a3a95191d8f6acc", "0x0000000000000000000000000000006ef8210a3d90676f69caa1fe5d8353c226", "0x000000000000000000000000000000000024da7d17ed96fd2936e4bf381c460b", "0x000000000000000000000000000000d77f23fcadf7a6c19b7abef79268be9d30", "0x000000000000000000000000000000000007633ab01f5031fce5f16dba2b8e85", "0x0000000000000000000000000000005373fa1bb2baf156400ff4e60e6d9e84a3", "0x0000000000000000000000000000000000027e546ad562d24dae1063cbdbc5b4", "0x000000000000000000000000000000215aace588221c522bbc3d37eeb6f5273a", "0x0000000000000000000000000000000000063836cba05de3aa7813f54ed551b0", "0x000000000000000000000000000000050ae581123a9f65c92e35fd8fa9701dbc", "0x00000000000000000000000000000000002410871be237d666ed18fbaa0d4b53", "0x000000000000000000000000000000cd56ffd0cd263e30b90a521fd5b596fafe", "0x000000000000000000000000000000000002c79b34816830ec1e23bb1c131378", "0x000000000000000000000000000000150243fc42bde77914c155525f502b52cc", "0x000000000000000000000000000000000018bb64e2202e775ba805ed061abbdd", "0x000000000000000000000000000000d6b80a8a854fca134bb93030711f047275", "0x00000000000000000000000000000000000a87725eff75afc2d27f36b70ce951", "0x0000000000000000000000000000008af1e797628750941961d1ef9244b50f60", "0x000000000000000000000000000000000028b2290830a86f3bcd9db3a086251a", "0x00000000000000000000000000000007c3e0411ed5f2f276d2a658a9771a18a2", "0x00000000000000000000000000000000000974acd5f5847e300968d89b5a9ca9", "0x000000000000000000000000000000cb629eb69673c03619a88d48ca9ae68970", "0x00000000000000000000000000000000001d114437273293fd65b36e8321e274", "0x000000000000000000000000000000ecb2df851d702c95bd599346fb56d68e3e", "0x00000000000000000000000000000000002653d2cd543961e9e83a93387e9b96", "0x000000000000000000000000000000ead451fa7ba7e312c96a74107c27ad9736", "0x000000000000000000000000000000000011b952a737bcee1489197a273fb677", "0x0000000000000000000000000000006829212925ff54efee074c43911a3bca56", "0x00000000000000000000000000000000001fcd76b6a7065773fff04246a0dde7", "0x0000000000000000000000000000009585201bb8a8aaca49f72e3ef81fd0182b", "0x00000000000000000000000000000000002e3e3c6039ddf04d350f64019bb10e", "0x000000000000000000000000000000e4b332d5cc93d838948795e32bce4f9048", "0x00000000000000000000000000000000000bbbc875dc13137229856beeaa8f3e", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x22206fcc5c3b0898156852484d5f37f85d35d8b8aeb565697dd199d09e7a5cec", "0x19d3624ed1ab8eb7cb2c0c611f105cac54fd99cb83d3e1a3118aaded389a1962", "0x195139b81a6b92e865e582abbd4bfffb20a58dabe86663c2970374b4c332a08b", "0x217c7b444d49a26539a8f4fa6986dfd99e6c54123b3176a328f982d53c93f780", "0x2aea9a445c7855ada0acd0217ae4ae246ce32e7d3184876be8facd5d6ad7cf21", "0x229635ed1530b6d4c1bcaab18683ee77de072256ff66e89ce73f0871e8629b6f", "0x0d2e7c3d34ec80dbf267d86a682f348de3efef15097b4800c0a3920b5ac828bf", "0x25bf71d3459cb3b36e082388eb0bcbed1c47aac10167983a8d746a9ae4eced96", "0x1ccaaea883ccd7ac517f8d79afcddb652e0ef1895649b1f3468390a80731e1b7", "0x0be5724df9d80b16038ec9bbea91bab1dd7b4c87653a86a9e9ad386406659a1f", "0x2b78eea73d09a89dc9d114e9b3f26af9ddebb6dd36abd2efbc39abfe905d1aa1", "0x17edf16afacd239c408db23dce3db2ca0bfc154217a345cbadc130994fe53ed1", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x000000000000000000000000000000807a729ac756815393e47d04c6246b8348", "0x000000000000000000000000000000000019f305937c8c47b7dacf5fab1aa8b4", "0x000000000000000000000000000000c0ed4e98aa74ce610873cabc4b95a00134", "0x00000000000000000000000000000000000d516bc6b3a02be3f087b76c12be88", "0x000000000000000000000000000000e45af0c8ba661e75785fd2471326778e10", "0x00000000000000000000000000000000001589091efbdf11f8afa9c2d54dad86", "0x000000000000000000000000000000ffe3819b65cb309abdc6119ee873a7046a", "0x00000000000000000000000000000000000f2d67fc9070b0b7363747463c12a6", "0x0000000000000000000000000000000000000000000000000000000000000004", "0x1259539942eec51d5c53a19e6a6509d0bb10652a0ae852c7a4d48691a07c2d04", "0x21327da46c2c2f5183a7bb7a555d692582cb43319d028003ae3314427b2e2cee", "0x11ae288d0ebed5c93eb043e8e9e0a5a8da2e73c9564c013f6f77e2c75d9b6afb", "0x149ea3efa38041e1c2a490c28743de7ef70ed1ceac3290a147dbdb7427a1e5f7", "0x0ff8f2dcfa13ab248c35e38b827eb564140acca19094bd3f6b43480c24b1179b", "0x1d362e1726dce5a89a0b1028b5ae19c553cc33de9ad99ffb4a6f4df311a4c861", "0x03d28762bf78921834d18558fc20481a6921d4a863426a993d36ba0d9147bb58", "0x2fc693fe8b1c2bf894dc0ea77917c0e114b69ae0fe5e4aec86d813f485c9cfe1", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x025f07864b94d3ac018dc7e250aa8cd4cf266e8a258bc88bfd0f06eb19638b9f", "0x2f16d6396da879ee6c9d00000ceb26718cb7f696535e7700c57a49ead54acb67", "0x000000000000000000000000000000e7d4d22c7924866dbd40a7d07c78f3d221", "0x000000000000000000000000000000000011d8a95d17e954a17361390291aa74"] +proof = ["0x0000000000000000000000000000000000000000000000000000000000001000", "0x000000000000000000000000000000000000000000000000000000000000001b", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000042ab5d6d1986846cf", "0x00000000000000000000000000000000000000000000000b75c020998797da78", "0x0000000000000000000000000000000000000000000000005a107acb64952eca", "0x000000000000000000000000000000000000000000000000000031e97a575e9d", "0x00000000000000000000000000000000000000000000000b5666547acf8bd5a4", "0x00000000000000000000000000000000000000000000000c410db10a01750aeb", "0x00000000000000000000000000000000000000000000000d722669117f9758a4", "0x000000000000000000000000000000000000000000000000000178cbf4206471", "0x000000000000000000000000000000000000000000000000e91b8a11e7842c38", "0x000000000000000000000000000000000000000000000007fd51009034b3357f", "0x000000000000000000000000000000000000000000000009889939f81e9c7402", "0x0000000000000000000000000000000000000000000000000000f94656a2ca48", "0x000000000000000000000000000000000000000000000006fb128b46c1ddb67f", "0x0000000000000000000000000000000000000000000000093fe27776f50224bd", "0x000000000000000000000000000000000000000000000004a0c80c0da527a081", "0x0000000000000000000000000000000000000000000000000001b52c2020d746", "0x00000000000000000000000000000000000000000000000b8746002cbbcd311d", "0x00000000000000000000000000000000000000000000000d84045a5cddfe424e", "0x0000000000000000000000000000000000000000000000088936c2d073d67c62", "0x000000000000000000000000000000000000000000000000000068c36330fc9a", "0x00000000000000000000000000000000000000000000000acb5cc273fdec4ef9", "0x00000000000000000000000000000000000000000000000c26692b23b10f94ff", "0x000000000000000000000000000000000000000000000006f3c82642dbffc654", "0x00000000000000000000000000000000000000000000000000004161bbdf59c2", "0x27077141b3b69eef68bdb60ba306990b34262f742a633f61f108a83fb1124027", "0x2fbebd1bc360ebf2bc23794e8105aff514e7e3aa74470a6a868adae73fd8e1cc", "0x00000000000000000000000000000065c6196a352c9951ee52aa570e726f366f", "0x00000000000000000000000000000000000eac9d2c7814fc13cfe594cb71344b", "0x0000000000000000000000000000004e4bd661854ff161aa8e5c6a060996831a", "0x00000000000000000000000000000000000aa6094cdaac9a33be9ca0a0ee0602", "0x0000000000000000000000000000002ad76268ce80801b402a58e96ac84066ea", "0x00000000000000000000000000000000000cf642819500a84b75396476d57473", "0x00000000000000000000000000000027988a8f8dda439138efc182ecf7822b65", "0x00000000000000000000000000000000002291dc80f736fb01b9ba68d49d078d", "0x0000000000000000000000000000009ff0931d03876326c74094d6c63a9db7e2", "0x00000000000000000000000000000000000eb3bbc61161d2047ae2a2a30bb7e7", "0x0000000000000000000000000000006af2209dce0dbcb1c02d14531610a63a87", "0x0000000000000000000000000000000000228f7864cfb58c999d53bba03fabd2", "0x0000000000000000000000000000002a171861b26b214d20a7cb4d428a270b19", "0x000000000000000000000000000000000006d220e2a01128ecd1358cfd02bb10", "0x000000000000000000000000000000e45bf2a5a87cf3f64798f8279a77be8f0b", "0x00000000000000000000000000000000001fc24780eee3fa3aedc08349ec0a49", "0x0000000000000000000000000000002a171861b26b214d20a7cb4d428a270b19", "0x000000000000000000000000000000000006d220e2a01128ecd1358cfd02bb10", "0x000000000000000000000000000000e45bf2a5a87cf3f64798f8279a77be8f0b", "0x00000000000000000000000000000000001fc24780eee3fa3aedc08349ec0a49", "0x0000000000000000000000000000008bf0c4055432b864ffb9f8d410675576d2", "0x00000000000000000000000000000000000a16292e62c9ed0c299e3b7b7f3530", "0x000000000000000000000000000000584b711e8d292f830cb608a4a05a227076", "0x00000000000000000000000000000000000d4fda7fbe28b5146d303e26aa9a8f", "0x000000000000000000000000000000d02d8266cf951520fa3852cbc5e1f9312e", "0x00000000000000000000000000000000001d2b2157f41fc5b03f9a5ac57b6783", "0x0000000000000000000000000000002059fbb0f8ee0c7aaa185260fd7c321fc3", "0x00000000000000000000000000000000001ee440dcbb2e4f24ed68334e45e527", "0x00000000000000000000000000000022dd9b5c2e8bd4aa74677b7a9af6d902d5", "0x00000000000000000000000000000000001897a238b24d0a1cd4a06f509e5a26", "0x00000000000000000000000000000065623e6a05bfabbbc2f1d64fe431788745", "0x00000000000000000000000000000000001c8ad93dccd171298381bd3a26936a", "0x025f5c37ea0e38a104ef32fdbfd3145a07fd306b50a75e59e2ebe31af3a52c05", "0x2e04f23af7236788b36112b8c1ae44032036b7dd2912123760f61278fc5ad3fc", "0x0b0af65835f3bd70dacb82baad996e5072a98ed8000575b667ff973b7734f65c", "0x24717a5fc222a95e324b80209496f4696ced88b6eb0bfc2c37e5dd3ce8e1a2a1", "0x2a149ea427afe59e5a2e037e1f097ad9ece9be5f692f9492242c1a93024b5653", "0x0f6e10b5a83dde62698e0a00cd2f6f5f5c7a27f6a2f4d554db895e6bc6136ef0", "0x18e6029412bbcf4b7beaeba809321e35950b2c12c1a3d73e9eeaec81cc001442", "0x0817e8ca0ea53812f70744488c49af40b35f0cb0d798300dabccdabb925d9b22", "0x0e8db3dfffe1501b381587e08a1ba003d98167a4f91d88f156b09f13a5dbdd47", "0x22d22d247c91efe1f297fc7b8c3b40eb2dcf815a2dc0142c2ccec80157e2420a", "0x19947dd6e7df9c6eee2a464131ac868fa85e12c569afe687ea8d0c62ffd59233", "0x1d2c914f77a56279a0c9d32db065f2dbd10f2f8d842dc0014d551031041f7aa7", "0x1d29745d02ec5f169e2a9db489e3944ed15ee4e8b629bb647dbbd38427d5029e", "0x09bf58b5921e8cb2372521f7e8a8502235f156606c3098ffbe987b48b2af4073", "0x05a7cc2a5940b521239644d7250513bff7645f16eb0eb0683e3929260f81cdb6", "0x2dab86b959d04873c610a1676e233cce79fce6fdd3b5a82e81f1f773012efa7e", "0x0a255443bc8d00f60335d90fc9cd705d7b06a303c690f1312ade848358881e75", "0x076648a5db79df2a48cea47faa78c8adc59847be294457d8e8db7f8326c7b339", "0x1a553cf1f9ef08ba1c162910806cc5bd4b01db74f2c4985786a854cde1f52441", "0x225165901686b64f9de109f03fe3c843b56a2eae98d375ea620300dfdcce443c", "0x171e7fddc3036c8856c695e490e190646aaafc76cf2ed30d983bfadae66d3bdf", "0x2817863863d702dea17af857ddde79f0f76aee8bc35b7ffb691c9e80d33cde61", "0x1612ff7f39c0146c68b35c620c105e0bd92b2af5a589410af5b183aa68021f1d", "0x0cac976fc3d15c7bdc57465aef613e8989b74e6e334153bd89904629e6faa866", "0x1255fe9e9f69c6251d8079a44998e888e46bc3876e03c7cb9c7e62ddbd99e677", "0x20b44b67b2b9e6a634870c3af4fc84398954c7f3fd8a953511b65d10e23c4d81", "0x08cbad1a18b57f06777bec129d814b85600d66b47b60000b647db7a0e90fec39", "0x1908598df043a822b0e01f30c1fd77784896ab94ea66c6aa2da6b05f5de07433", "0x0b0be106eace90ba40992ff3a5f57681558e405aa66785006f83a378139d6822", "0x1a44dd9394351a1727d7ad5f6975edf97807ac8cc7b5cb02ac00117153014965", "0x0b622689d0db488b7cc81e840c432e207223a4f1331bc134b1e614fc02e3cc4a", "0x070a24a894645bc8deaa11efee6a0f0e7f7f3248b3de515bda529bc0fa758064", "0x09c434c45dbbbf2773438282e34930de805859da6f4daffd7640897331b6962d", "0x2f31aadb6eca7198272529d7cdbdd71234eda2815cac236e2cd76a52142d6b2d", "0x0fcd848eb33c87bb74a18c180edbf59191c2175479fa724de7d0afc4efc6b951", "0x138e8285ce88659c4b79a30c05a738f4c00aeb0100a9ea1f26fdec90aba2ad75", "0x071ca2dce8bab66696b99edf1f5fa5023bea9e657525444cfe56d29ff7d81106", "0x0f7e018b0132e26b51b786c44ae718d258630faa273b3f3d089abbb71f4ad8bf", "0x1d5b6c5b9124f124c25b00dbcff98926d1de8f28b2755020c6039e5f7d170017", "0x10b670af86d16e5653babf56bc8c89c7c83961a4d5a808aac1dcce0788ffef76", "0x284629a1f1cb8f26390401f570a46e043b06eb245b313e5233de51fda18af292", "0x179caea1d3b52e9884a6d4862e860d0d0867002b15f5ae5efc00d21e4ffecd0a", "0x14286e9f7472a7b61c36f20d72384213508c0b1b7a538f86b58ad9dc44bb110b", "0x24dd004f49596ad62b68cfea222acf35265f76e99ba921e3e91a34850ec5d89c", "0x09aadee62531248ad35126722e4d98eb4f2796d25e8f2b9fc8d908508f1c6850", "0x2f36e98d9402e18e3ab004bb128ddbe09539d0f69656a05272bd0a97966da1fd", "0x2479f4a4677186790630040e24f0bf9a4b131b3c33cd2cd8b0821319224bb0db", "0x304971c815b4065d9dda595382f892e960d6f82f88de4445e57c627f63818b9a", "0x2f53fb025635bdecaa81dbfc096bda42a60a1b8a1e133422d8b1ab66713d093e", "0x22450012367334753f7cb2c27cc360540246865b476a61655676a6b831f2935e", "0x1a7395119ef24d3e90d317574317fa0132effd35c5b538d39548ff69c6e035b6", "0x28932e09d058c5883671a632a9213ef780aca7cd8336431e5302fc1b74ed37f6", "0x209a8ff5a11f1c328921ec2e0be429710b7abcb581cd0b13f187ce4d10c86c86", "0x24681e237c8b186c1fa2d889a76617e781364880b4cc069f1c6b64e729a92fc9", "0x0df482c84a01dd32e6744639a1982739850e72a4d153767142028026e4466c20", "0x0775a3c226714a03368384158395ef8ecf9fee9f3e5d2a23563a2e958be61e0e", "0x144f4b74b34652087138c8f5252a7a56632a0e3f6c96e37c638571d75673e9b8", "0x24ad0a0a8649ba88dec87c938a1911ffd4f9e45f22bf3ccb6b40d00a52307ce4", "0x074835ca1b4ac1e703ee952bae884b833a4597de0906b6d8cc85559c7d7d466b", "0x21c618e70de949dd894270be6bdac5e0fe0e364bd27fe2b45c54442601047bb5", "0x2776f41a215cc480e20cff7d80f37758e373b9a655b14cffbe0dd1ad20b0d3fa", "0x1909a023dff04cd83875e8621d1f639b34a2055e824cabdc91b8eacae28f1222", "0x02219cc97f57dddef9f31ef420a2d4ebff434e5b0c9a2d609becce8186e3c761", "0x115fbf5eeefe29e11f72b79794d5a588240a58d0a336ba1ef8c45be25852b2e0", "0x05c563283984ed7f9fca5bb7dfdc595fe05f80b096ed192e1754f70c8add451d", "0x22fda8d37d884c75e1d4f3bb62e8511c9475d4509753981d0e0a9b9ba004559e", "0x1ae7cc16832942b0cc37936bfc8ba48b0b7178ffd4907956592125f77e5f0acf", "0x069a2009413f8b7b13ad0e29ebb7d77e79962324860c373e5a4dc88184acf2cb", "0x1527b22cf81c5a5e061f872a8754483caea31c78ab8e823835bee1b95c2fbc2b", "0x22fa65608e96af14215b06c04df266b326b4e385c5d7f5b516494e12455677db", "0x26f5d12fb4e6c0a6d9506bac8b4ce6b6e53393e4aff865df56a56960ed08251d", "0x026c0a3e4964eaf24c0a92907b907b3fce8e5d450d2dad25260afe137e3ee3c8", "0x23dccaf45c2c563d4cbf408454a935cec82365ab40d755aa6f5cec347f26219c", "0x08f2866c2013554b4d4276c4a81343a215dcf83eb26cf6dddfe99ddf8d72aba9", "0x284b9052a96cb05f0fdb308f543e21a926c4f3c64014b3a6d242ccf7cf8f3f6a", "0x2ce884fb94718be5fffe21db4b6fc8fe2c6fe08d6f4e725dd29995154dd5f57e", "0x07d329b62f9800b4d89cb3112424c33e0cfebe8500a86d9ae17207062702b4fa", "0x1b533ad65f00e02c664ce16a1b64a3a8fb2ca8b8e1b107e4cc40fe564c5801dd", "0x266a555b60e3dafc89b2880adf1ff397be00c1decbc3c838f761dcd8f5d872a7", "0x11264a632f2e0b3e86fb30267586ab258e0ae2ab0bc49e814f7a38b1f3886a54", "0x0e57559b8cc8d54d3cfb830acbc43d84afdd74ed6857a1afa8f81387286a0a7c", "0x21862b599dbfbc49a35e88bc262b898a147791d061ca3dc7fa4af7fb4c732b88", "0x2b6c18561289ded21def5b9238757cf8b12970a9704094123093dcad1cc15c22", "0x2a1792b5e883ede1eab7443b35482240e0d17062eac4d8bde58c35859bcbc387", "0x17d85c38220545b7e7a9be06b153a19c8e1010a71b47f07fd62044188b353064", "0x256944755fcf0e24bb0f782d535f880a1cea528d6e0630d4ebefed1db9470829", "0x16222c2b9e472307c9dc2316fbb48b0d4c76012eb087541243ba9d320fd861f4", "0x1ceb17923fba82df59ceccee8d985ea828330e5c26e4be2f3321819f838cb19d", "0x026b8f46fda3b85e263af5c77048b09a5d6b50d95e405514fda136d8726194fd", "0x18479d53ed62b7c11c4f4dcc09c4d0411d5e14a1cd6f482159caa0d36c040606", "0x1c2cb816c03d52bd3892f7010c666eb9023df6e54a9aac31a72dfb850428e61d", "0x23e62fc8f3740ee827e86987e3520213848436f2afd149822af8a3c25b97c714", "0x1279a2b9c9c69b375a3fd59fbe6bc9df9f432f63ba75e53a2d75e907cf45657d", "0x117d5edc333b97386af6f374c71ee282d7f81f42df021e39f209d0c4800d34d8", "0x27d94cf61138e0a277ba01ec8565b62c5e41aa504bbd4baa0f2e68229fa2bcb0", "0x17ba4b7c3c093d1d26d8a077a4ce7643fa792fd9ece57c8747fdf4fa2ea30695", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0b8b3217f5fe9c72fede996a5ad69d3b6acf8c160105184c40ac6983527d39a5", "0x0d43cad1f9a07883084cefb87e84e8d3122252840da8d3d02ed7ce9cc7892ced", "0x243e81bb2ffb4d40e57cafe2d24d0782f6d736a8dd7fcf8a950c5d6bf49e7c80", "0x04d2cbb4f2166cd6309c4b717a32a1a0d644b6e8f88beb141389f8f9477844e6", "0x1640750281dfdc30cdc0dd5287a47bf9901b030954f9a141ca16edf1e0cd1ce2", "0x2a631c514cb15205ff3109482f046efadf1ba925b45f7204c167937e8193430f", "0x013403ff8ca923d1264c47607ef84d484e08f72a8c7427d51c60a2b5a273f2a7", "0x2018b376b5018120de35eae0b9b1374b5269132e36820c14d9c95821a91e75fc", "0x019733574899b830b08505234c08648f3e3af256b2fb190d3c0134f8828d7991", "0x1ff51f1afc62c39c9098607f3c290ee441277070f617f782dbf60bb42b2c5e1a", "0x052fd04ab2653dcbcc6d57ee2e34ef4d1503fe36856bac1fc095a0af39241322", "0x05f8daeeec7ca421d4dd9a824f5c7014640658a5acaebacdd67293ccb908fb7e", "0x07e14f3a38cbb8dc7b92aa2025c551dd311e0569b04b69cbcb12e08686f2fc8c", "0x12cdc04526438b8eba0532989f80c0a53d851c733bdaf70cafd1435604a17659", "0x2605dca63f3e1d0ede88c10b4ec1dbebd3d561bee9e99ba298b405641785c2fe", "0x0c07dbf94350f1cdc78ffe85c52724dc594e1e28741b0c22624eef4a09fb503a", "0x0e7e0b7389c14ba80bba5579ef1d2659f5651ea3ceda091fee86ff772501ac93", "0x24f96b8bb891c3bcdbba35142159064defad5a0c4095c907b6b5a4aab15c0fc5", "0x304c70bc0c0ff81063d98fe51881dc2505b25e8763697fb8b03d06215cc73dba", "0x0dd4f88ad194bf3ce2cc5303447f6f3a8cad99e1847e11b7e7508e5515381570", "0x1261f37c17ee6ec94384546d4381102d48d79047819c1e0e2bc457be5d9a6997", "0x2ce1bbeaff11c010147a78088c5586f8b09bf22365bc626c5e9661641d4bfefa", "0x21127a60a786987c12c10a4c1b094456833225c948a6d07d05ed5dca45ee88cc", "0x304b9d8048cc3a7e452da848e827714072f0d0050b6aae36dbbe2d1798e4e0f8", "0x1d70eb5e3cb06fe66fa9f7af0460a207e7efb41b4994e96c348aba91e1630754", "0x1a51c1c33d4fa7bf495998f66b7799a7626d54ee60c0cce19fdd302135603019", "0x2301dbd6fe2b6ceab41ba8cc0fb0c2c9d33752aeae58c4844e581198b68be3b8", "0x0ca95c1ef44e332e24b8b7dd0e25fd63c3dab0335af5efa463133f75a6756d5a", "0x09e4b7708f3ff28829cdd8adc0f476690837c2592a6ec3b13cb42420a3ac2e27", "0x2222f053f3f9fc30356852963c7c1aef009c52aa824754cf5c5f2ae783d01048", "0x268448a3962224f1aa3aa009e43ecadb522f99979b25aea949aa39aa556f0a49", "0x1d4fc7e52539f5d1b5101afbefa84ce847d27b55b760e7fe727dd4050c8422d8", "0x2e8100d836ac0ddd851b14dfd0bf0b7fa045d9d299682868236b2a3bfd0a6c93", "0x0b05d0e6eb22049804f13cf5acf3071aba55a43840d2373e8fb46979708b9f0a", "0x0b05d0e6eb22049804f13cf5acf3071aba55a43840d2373e8fb46979708b9f0a", "0x26fe9707cae606580f38fdb1aab8cbd5a1e6f43d855eef09cc99673e572edcd5", "0x154c5c0f01a58b2007b9d775f7f6e00e27ce4b6eb4d51a0b87b04d1acaf742b5", "0x20873c3e17f2a3332d8cf4f9e4897ac3a9373cdd1a13f3ffdfdbdcf26cd1692e", "0x1dbcbbc44c8c8d6059c139edfdf0ab7ea610c8da34c3157751c480b76a4a3968", "0x13999c5eac82ceb72ce430287a6852eb7bd918231aefa8ba4a31ab3ca168282f", "0x00000000000000000000000000000062b891458dd24b563ea32ce551e2dc1401", "0x000000000000000000000000000000000008e017337250089638d381fb588111", "0x000000000000000000000000000000f29511b503a94faa26818f1d66f10b0668", "0x00000000000000000000000000000000002197f37ce37f7fb449a1378d57d96e", "0x000000000000000000000000000000c1d451cc9694e9a647924b3cc14134b7f1", "0x00000000000000000000000000000000002e001fac22bba00cb0815c300ecb1f", "0x0000000000000000000000000000003ed149c42b948f547faf28e048d124f85c", "0x000000000000000000000000000000000017a23afc38dd04e83dff7e927858c1", "0x0000000000000000000000000000007841b62aa36e2e5b54bb997a1723ba590d", "0x00000000000000000000000000000000002dcc68212d68b60317dddfad80e137", "0x00000000000000000000000000000035ac5728bc439a89fd6f0d597b1fea2b98", "0x000000000000000000000000000000000011c5a2e823f6b5acf905499976e08b", "0x0000000000000000000000000000000e2e50ca6377fe4b229f6c20ca81759d4e", "0x00000000000000000000000000000000002494259436db67310e79bc15b7946c", "0x0000000000000000000000000000006f1eac56102cc4eb5346cdbbdafcbacfac", "0x0000000000000000000000000000000000072131e35c11d628e48b098d6df495", "0x000000000000000000000000000000e05bbbc5bdcd5cbccb8690c62efe5b0fc2", "0x000000000000000000000000000000000019fbffba4ac991536afae2f420f94c", "0x0000000000000000000000000000007bb016811148df04855f797de065112992", "0x00000000000000000000000000000000000bb79ee897f8ee1318ad700a37c8bb", "0x000000000000000000000000000000942b96f32d7bf93ee813378b6014a18741", "0x000000000000000000000000000000000010ce7a1322a0157c915d19597f5e97", "0x000000000000000000000000000000f177bf8da36647518042aba3f5728c86ba", "0x000000000000000000000000000000000015eb978cf35cc176d523fa02a452ec", "0x00000000000000000000000000000093cdce5ba76ddf9dad3b5927b591406ee7", "0x00000000000000000000000000000000000cadcba14b18e01600f046c63397cb", "0x000000000000000000000000000000135d610c5e71ed12990c6067ecdda28f5e", "0x0000000000000000000000000000000000277411552ac27755eed15b464e1518", "0x00000000000000000000000000000051e80ab382f0f58f93a03558ae1906dc87", "0x000000000000000000000000000000000013b9732b57127f5e070b27bb834ecb", "0x0000000000000000000000000000006a3fdefc289de1ecb4607c81dced47539a", "0x000000000000000000000000000000000017c2af98d008875356625e40fbc876", "0x000000000000000000000000000000e704dd534cd2c196fd7265ae7e10e2d4ee", "0x00000000000000000000000000000000000eae170af2248dfbd1c6c0938a7ed9", "0x000000000000000000000000000000eccf380f69533eae3f5088a1cd1f687403", "0x00000000000000000000000000000000002758e79ccd2f0d7a5f4cfc6431a29d", "0x000000000000000000000000000000026b9284913baa6a73a2ac170bdc1d2905", "0x000000000000000000000000000000000017213cce8b7bcc9e0a5c0c131bdf95", "0x000000000000000000000000000000e51c33e735abe76bfcfc7f74d70066435d", "0x00000000000000000000000000000000002423d304ea2398bb024c79173e4ab8", "0x0000000000000000000000000000003b1f3084ca16a46a5636f4df91db0c4072", "0x00000000000000000000000000000000002d109f44066ae8e8b3d171fe5dc857", "0x00000000000000000000000000000077bc221de09ded0fb646554102d2a5729a", "0x00000000000000000000000000000000001bace5a3765f10657fbda74713faf7", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x27fd867f34a134e8a1117139770610a49326fe0993e9cedb97f1c49bf4d0c37e", "0x1e13f5fe62ad39d934c53a89ca23fb82df706bef914b026d91760567d32fee3d", "0x1377749e187da3005944ee59d2b3e47b6e19aace08096f368fb410ef8ca4807e", "0x0b8a3021c1b8675d83dfc0498c8ca03f64c1356a5147d489bb2909c698831017", "0x1bd2cad03de053ca3c1654b9d5190583a91fc1ce8852832e885d1d057d69f3e4", "0x045ef1e8bc5e55555974cccb36454b0b4b96662d5002a3699ad7039c8e6b8f76", "0x084df6d4d71ebfe664fd82adb1745fab26af6f4fbeda691609ec7b03ac14fceb", "0x080aff2ead8b977685173fe16885d44b660f7a9d236479e69aa52aabf5dedf9b", "0x108f76cce50b85875eab4a1affe83f3d502ffe89f9ebb45717931941dd36374c", "0x2f6238f167a82b1fa51f0b586718668c219cf0c5b86a4f9ecd8f9ee8466cbdaa", "0x174a1f05bfb1af1cb02f356eb61993058f1a4acc4015eeeff6ab6bc861c0082e", "0x11264388c26c33ddd5da54a50961054e53d0b80692d8d1fa3cc3385096cfbd21", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x000000000000000000000000000000ef2c2c3734b9d7f1ca6d8a4f25e5b30045", "0x000000000000000000000000000000000024e1c390cdef8e388ac5b71c03914f", "0x000000000000000000000000000000f1f778afa1e6d8795e138992c04ac53d3e", "0x000000000000000000000000000000000027e4472a6bce2207caec3c24623515", "0x000000000000000000000000000000fc0b167a383cb223183bca4890ed77ba73", "0x000000000000000000000000000000000006d40d5de6da4d7d9b077bf64ae6d2", "0x000000000000000000000000000000c71af3057609219e48234375e3d1c46385", "0x000000000000000000000000000000000022371900aa96962e1043e3105ec801", "0x0000000000000000000000000000000000000000000000000000000000000004", "0x25a1742d9f07d048818f06aae510eeb61a2e7e41174ab8ee3c2bed8f14d6ac7b", "0x06bfca75d1c374ce279c3b91525022bae1f318f0c02e572edceb3a1c1b8295df", "0x18fa8d4d23b0bbb642d22a9579930c4aae36727cec64d950166db96041077926", "0x0f1f02058bd777782fd203c59484286b8c93eb06a2a4c3e7b60594fc26aeabab", "0x238575f4f83e25bdbff8b9f3aa32736e2c3eb408cf315d18e2a4e8136e010f7f", "0x0f43df640ed3a1abaef94d0874aa8ec8e8c927df1865864ec154fcb555064eb8", "0x1892dbf1a8806091a23b1e95546c151584dd48c122ba966404092a745ebb4812", "0x2b51eee8db7e2770c813c3c492c26dcc89aa517bf9f00b5fb4c0b474f00bd422", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c", "0x2ee66ccd9a0415f633e3bb1ff8ef493df665ff85d804c1ef65d34c3ae5827c47", "0x24ac27c84b886c7ae9e28be05e6f72bd7c7936eebd0596310e24000b966f72f8", "0x00000000000000000000000000000006612875c72874d6231dd30894c6266900", "0x000000000000000000000000000000000002a85ff4cf723963d93731dcfbeb37"] public_inputs = ["0x0000000000000000000000000000000000000000000000000000000000000003"] verification_key = ["0x0000000000000000000000000000000000000000000000000000000000001000", "0x000000000000000000000000000000000000000000000000000000000000001b", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000003", "0x0000000000000000000000000000000000000000000000000000000000000004", "0x0000000000000000000000000000000000000000000000000000000000000005", "0x0000000000000000000000000000000000000000000000000000000000000006", "0x0000000000000000000000000000000000000000000000000000000000000007", "0x0000000000000000000000000000000000000000000000000000000000000008", "0x0000000000000000000000000000000000000000000000000000000000000009", "0x000000000000000000000000000000000000000000000000000000000000000a", "0x000000000000000000000000000000000000000000000000000000000000000b", "0x000000000000000000000000000000000000000000000000000000000000000c", "0x000000000000000000000000000000000000000000000000000000000000000d", "0x000000000000000000000000000000000000000000000000000000000000000e", "0x000000000000000000000000000000000000000000000000000000000000000f", "0x0000000000000000000000000000000000000000000000000000000000000010", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000011", "0x0000000000000000000000000000000000000000000000000000000000000012", "0x0000000000000000000000000000000000000000000000000000000000000013", "0x0000000000000000000000000000000000000000000000000000000000000014", "0x0000000000000000000000000000000000000000000000000000000000000015", "0x0000000000000000000000000000000000000000000000000000000000000016", "0x0000000000000000000000000000000000000000000000000000000000000017", "0x0000000000000000000000000000000000000000000000000000000000000018", "0x0000000000000000000000000000000000000000000000000000000000000019", "0x000000000000000000000000000000000000000000000000000000000000001a", "0x0000000000000000000000000000001f218b9483ed0942db15a28f417b248c6a", "0x0000000000000000000000000000000000120f46fb150fa5544f70413659ac49", "0x000000000000000000000000000000054bf69002abed9f9354db858292ee051b", "0x000000000000000000000000000000000003bd06a2c7606596f9eebda30dc708", "0x0000000000000000000000000000006c892b6234def66f6577959d064de4f749", "0x0000000000000000000000000000000000275343f0c8be74b6adaaf08488d71d", "0x000000000000000000000000000000826318e94744b1f7ff6776deffacebe17b", "0x00000000000000000000000000000000000f57fa00c8a7dafecb9ebaa52c523e", "0x000000000000000000000000000000032d7ef2b49315f1f09e67af355aebe815", "0x00000000000000000000000000000000000058cf4aba931d69238a327636e771", "0x00000000000000000000000000000091a26171db51087d9773ee271fe440fe4e", "0x0000000000000000000000000000000000014772ce2168f21ce8e217ac82596e", "0x000000000000000000000000000000d8197c7f95b3d7b116cb23b2be067742eb", "0x00000000000000000000000000000000001fb1a5cc1a1c33fa4699ade4a992f4", "0x000000000000000000000000000000ea89f3283423819ba34d92f1af1b60cf2a", "0x0000000000000000000000000000000000054f8cf376bda1948450747ce44405", "0x000000000000000000000000000000ac09001ff01875c762a7d467f333c9f0fa", "0x0000000000000000000000000000000000159abc799405ce9b72e9b8f0a58907", "0x000000000000000000000000000000870a9353bab217f6617fcea8b66efe55f5", "0x000000000000000000000000000000000016a23b3c4e2a9a54a68d8e187b4acd", "0x0000000000000000000000000000004e42c7133c8b802e7073cbdc76a95f54bc", "0x0000000000000000000000000000000000116d2ee2d6d93084d2f5576a9eff91", "0x0000000000000000000000000000000a9c29dd5c7e6b6bed485755ab7867129f", "0x0000000000000000000000000000000000182bf61f320d66dc49746f3f767b8a", "0x000000000000000000000000000000d9ad8b5943ead2741efdeb311b8305c9db", "0x000000000000000000000000000000000025043141e7a515687c6d80d0853b76", "0x0000000000000000000000000000003447e05ab4400999c9704862944ef03bb3", "0x00000000000000000000000000000000000d8d5f85133d06d56249c6463ac023", "0x000000000000000000000000000000a81a33be27f85f6030c2d794fc5ea7a0ec", "0x000000000000000000000000000000000025580b84ad2a823052effa19d61d88", "0x000000000000000000000000000000d398efe5afb73ff0ff0d5b4d919ebfeae4", "0x00000000000000000000000000000000000ce6de4173c77b111df4ae25bcf751", "0x000000000000000000000000000000d693725e4d29c3c1856f58d3657e4f8afd", "0x000000000000000000000000000000000000e860bbb23a26836fab3733b1a5d2", "0x00000000000000000000000000000004d908124a014f6dd4217600bbe017179a", "0x0000000000000000000000000000000000009639107ef0d7660211c3cb26fd83", "0x0000000000000000000000000000002c120d25144b16975f344bb8e0911f50aa", "0x0000000000000000000000000000000000025c989a5fbbc39a745dfa0c526d1b", "0x00000000000000000000000000000029c1683a4f2321682c17677f325c27de6a", "0x000000000000000000000000000000000017dcb48f840e14a56297e5e157ab5d", "0x0000000000000000000000000000006f8e97f2719363e73a919b60136cdf6fa7", "0x00000000000000000000000000000000001cc7ed8398842814b8d8b0a47fbff9", "0x00000000000000000000000000000074ba3ad568a253626720f6cf84a8048147", "0x000000000000000000000000000000000015a62e81e4d73a4bc690880130b274", "0x000000000000000000000000000000db0e9ab080d6e0b016d94b483c68a22b06", "0x00000000000000000000000000000000000530f0adac9e85a13bbe967daae7d1", "0x0000000000000000000000000000000a14cc5e4f5bd6610a377407c783d936d0", "0x00000000000000000000000000000000000550cdda8250517265285faa5a7517", "0x00000000000000000000000000000039d6a99d59ce1d827d7a6ae4ba48e2070a", "0x000000000000000000000000000000000027e18e542338c47beb02405dec6795", "0x000000000000000000000000000000e119c0de9378fef5e737a3e6c30e399742", "0x00000000000000000000000000000000002128ad7838a6a53762477930fd162b", "0x000000000000000000000000000000053d0020faf1ab4ecf00d41c2a01f4c504", "0x0000000000000000000000000000000000067729164ec966f4d8dec2642bfcca", "0x0000000000000000000000000000004e45ee83f5be6f212f96d9f88f62bc459a", "0x000000000000000000000000000000000020065b860db7e7f600ab625cda79c2", "0x00000000000000000000000000000093200fd4a93240ee6fff2d25065692e138", "0x0000000000000000000000000000000000244c194c60da2b4d787d5f98c136d1", "0x0000000000000000000000000000009354951db4088a11f00a3f56f9f54f0d7c", "0x000000000000000000000000000000000022dba94f08aaa5e6113bd537d354c3", "0x00000000000000000000000000000051931edd7fc2b2ff98d701d3072a909b41", "0x00000000000000000000000000000000001a026a2f8c1a2eb52a4d73daf3ab70", "0x000000000000000000000000000000a14701bcefa7c0c8d498abd548d689bc10", "0x00000000000000000000000000000000002da7bcb43520678e91f1c1422ee3fc", "0x000000000000000000000000000000deb2deb6c321a235675612f31bfd349dac", "0x000000000000000000000000000000000021315fa1cc6bb01b68f70de8e42f65", "0x00000000000000000000000000000013c727a1f60952056815260454d067900a", "0x000000000000000000000000000000000016d6e8d1ef7ebe3a30f3c8b0920807", "0x00000000000000000000000000000039d2e1e0191154f0059ed5412fc9c1cb1e", "0x000000000000000000000000000000000010435db71d51c501515cf705eb8277", "0x00000000000000000000000000000038222e1253633affb05f15fc206a5cbe07", "0x000000000000000000000000000000000003816781c9f9a9cca72015ee3348f4", "0x000000000000000000000000000000137f14764aed7a2559351dca3212a626ac", "0x00000000000000000000000000000000002d207af6743643d96947c9b1a259a6", "0x00000000000000000000000000000056089fbf1dc6b4af91ff8121303d83319d", "0x0000000000000000000000000000000000211ee2c4cf62fe3e7a9ce95faeb23e", "0x000000000000000000000000000000bbd960e0fdbaa3f47270679afd6bfcd349", "0x0000000000000000000000000000000000075069f76e6438da345431e1b2c58e", "0x000000000000000000000000000000ce72715195cd6f174603a8a614eb077e47", "0x00000000000000000000000000000000002724dd7694b7e169e5a264a6e02f9e", "0x000000000000000000000000000000a53ef0e9fc8df74a32e08c53a230c36836", "0x0000000000000000000000000000000000153ce938348613707a18a83d7511fe", "0x0000000000000000000000000000007234f68ff4ed021c91a2786b9b6cd75d1c", "0x000000000000000000000000000000000024a8faba9f2405c6660f1750c5cdcb", "0x0000000000000000000000000000002b80d6b55199d55c762fbaec636a525f3e", "0x0000000000000000000000000000000000217f0ffe90bea51c49a1b331adae9c", "0x0000000000000000000000000000000ed2d8303f6f9c83d86d5c13f3fb4e99dd", "0x000000000000000000000000000000000009c45c61155d314160b50b20e35c94", "0x0000000000000000000000000000009cf5a0abb80a3b2fe13b28fe8f315409b7", "0x0000000000000000000000000000000000188a2ffed7f8cbe84c7a0f1b018cd0", "0x00000000000000000000000000000085656153a6120eebdf3a488dccad95d00d", "0x000000000000000000000000000000000023cd626f5dde3ce81de0e7d4f74dc2", "0x000000000000000000000000000000d078687b7ffc17ed454fba23b4ecae0ec4", "0x00000000000000000000000000000000002501de6f063c457fccf291f449fe04", "0x0000000000000000000000000000009a4a3363201808e24813118bbaf3bd079b", "0x00000000000000000000000000000000002da4af07c13c8064fb9c44ff43a9cd", "0x000000000000000000000000000000cf91f0cb73295831bc93869fc19cdbad99", "0x00000000000000000000000000000000001ee5d0cdde02f62e468b034f988c56", "0x000000000000000000000000000000183a142ac675e313630847be47fe912b91", "0x00000000000000000000000000000000001558e16ed49142b74e4a2085b22287", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x000000000000000000000000000000c47f6850760801197f1d78c8b522e3e8ce", "0x00000000000000000000000000000000001c16df0851ab44fc5fc86fc32d901a", "0x0000000000000000000000000000003a206d1590ecddff13695a16efa7c66055", "0x000000000000000000000000000000000011d6dc438ccc1de6046419780f726b"] diff --git a/spartan/aztec-network/values/release-devnet.yaml b/spartan/aztec-network/values/release-devnet.yaml index a6bf3ce8c53a..16a9ca389ca5 100644 --- a/spartan/aztec-network/values/release-devnet.yaml +++ b/spartan/aztec-network/values/release-devnet.yaml @@ -18,11 +18,15 @@ bootNode: disabled: true proverAgent: - replicas: 1 + replicas: 10 + bb: + hardwareConcurrency: 31 + gke: + spotEnabled: true resources: requests: - memory: "4Gi" - cpu: "1" + memory: "116Gi" + cpu: "31" bot: followChain: "PENDING" diff --git a/spartan/metrics/.gitignore b/spartan/metrics/.gitignore new file mode 100644 index 000000000000..7d1010096142 --- /dev/null +++ b/spartan/metrics/.gitignore @@ -0,0 +1 @@ +values.yaml \ No newline at end of file diff --git a/spartan/metrics/install-kind.sh b/spartan/metrics/install-kind.sh index 3a9ecfb4ccf2..5b1c9ce79003 100755 --- a/spartan/metrics/install-kind.sh +++ b/spartan/metrics/install-kind.sh @@ -10,6 +10,10 @@ if helm ls --namespace metrics | grep -q metrics; then exit 0 fi +# Inject the Aztec Networks dashboard into values.yaml +DASHBOARD_JSON=$(jq -c '.' grafana_dashboards/aztec-dashboard-all-in-one.json) +DASHBOARD_JSON=$DASHBOARD_JSON yq e '.grafana.dashboards.default."aztec-networks".json = strenv(DASHBOARD_JSON)' values.tmp.yaml > values.yaml + helm repo add open-telemetry https://open-telemetry.github.io/opentelemetry-helm-charts helm repo add grafana https://grafana.github.io/helm-charts helm repo add prometheus-community https://prometheus-community.github.io/helm-charts diff --git a/spartan/metrics/install-prod.sh b/spartan/metrics/install-prod.sh index a61cc2b8ef4f..edb5af00aa7a 100755 --- a/spartan/metrics/install-prod.sh +++ b/spartan/metrics/install-prod.sh @@ -3,4 +3,7 @@ set -eu cd "$(dirname "${BASH_SOURCE[0]}")" +DASHBOARD_JSON=$(jq -c '.' grafana_dashboards/aztec-dashboard-all-in-one.json) +DASHBOARD_JSON=$DASHBOARD_JSON yq e '.grafana.dashboards.default."aztec-networks".json = strenv(DASHBOARD_JSON)' values.tmp.yaml > values.yaml + helm upgrade metrics . -n metrics --values "./values/prod.yaml" --install --create-namespace $@ diff --git a/spartan/metrics/values.tmp.yaml b/spartan/metrics/values.tmp.yaml new file mode 100644 index 000000000000..f6eb0e506c15 --- /dev/null +++ b/spartan/metrics/values.tmp.yaml @@ -0,0 +1,166 @@ +opentelemetry-collector: + mode: deployment + + service: + enabled: true + + image: + repository: "otel/opentelemetry-collector-contrib" + + ports: + otlp-http: + enabled: true + containerPort: 4318 + servicePort: 4318 + hostPort: 4318 + protocol: TCP + otel-metrics: + enabled: true + containerPort: 8888 + servicePort: 8888 + hostPort: 8888 + protocol: TCP + aztec-metrics: + enabled: true + containerPort: 8889 + servicePort: 8889 + hostPort: 8889 + protocol: TCP + + presets: + kubernetesAttributes: + enabled: true + config: + extensions: + health_check: + endpoint: ${env:MY_POD_IP}:13133 + processors: + resource: + attributes: + - action: preserve + key: k8s.namespace.name + batch: {} + receivers: + otlp: + protocols: + http: + endpoint: ${env:MY_POD_IP}:4318 + grpc: + endpoint: ${env:MY_POD_IP}:4317 + service: + extensions: [health_check] + telemetry: + metrics: + address: ${env:MY_POD_IP}:8888 + pipelines: + logs: + receivers: + - otlp + processors: + - batch + exporters: + - otlphttp/logs + traces: + receivers: + - otlp + processors: + - batch + exporters: + - otlp/tempo + metrics: + receivers: + - otlp + processors: + - batch + exporters: + - prometheus + # - debug + +# Enable and configure the Loki subchart +# https://artifacthub.io/packages/helm/grafana/loki +# loki: +# Nothing set here, because we need to use values from the values directory; +# otherwise, things don't get overridden correctly. + +# Enable and configure the Tempo subchart +# https://artifacthub.io/packages/helm/grafana/tempo +tempo: + minio: + enabled: true + mode: standalone + rootUser: grafana-tempo + rootPassword: supersecret + buckets: + # Default Tempo storage bucket + - name: tempo-traces + policy: none + purge: false + traces: + otlp: + grpc: + enabled: true + http: + enabled: true + zipkin: + enabled: false + jaeger: + thriftHttp: + enabled: false + opencensus: + enabled: false + +prometheus: + server: + global: + evaluation_interval: 15s + scrape_interval: 15s + serverFiles: + prometheus.yml: + scrape_configs: + - job_name: otel-collector + static_configs: + - targets: ["metrics-opentelemetry-collector.metrics:8888"] + - job_name: aztec + static_configs: + - targets: ["metrics-opentelemetry-collector.metrics:8889"] + - job_name: "kube-state-metrics" + static_configs: + - targets: + ["metrics-kube-state-metrics.metrics.svc.cluster.local:8080"] + +# Enable and configure Grafana +# https://artifacthub.io/packages/helm/grafana/grafana +grafana: + datasources: + datasources.yaml: + apiVersion: 1 + datasources: + - name: Loki + type: loki + url: http://metrics-loki.metrics:3100 + - name: Tempo + type: tempo + url: http://metrics-tempo.metrics:3100 + - name: Prometheus + type: prometheus + uid: spartan-metrics-prometheus + isDefault: true + url: http://metrics-prometheus-server.metrics:80 + dashboardProviders: + dashboardproviders.yaml: + apiVersion: 1 + providers: + - name: "default" + orgId: 1 + folder: "" + type: file + disableDeletion: false + editable: true + options: + path: /var/lib/grafana/dashboards/default + dashboards: + default: + # unfortunately, we can't use the `file` helper here, so we have to inline the dashboard + # json. This is a limitation of Helm. + # See the install scripts: we inject the dashboard json into a copy of this file, which is the + # version that actually gets helm installed. diff --git a/spartan/metrics/values.yaml b/spartan/metrics/values.yaml deleted file mode 100644 index df2ca87aa210..000000000000 --- a/spartan/metrics/values.yaml +++ /dev/null @@ -1,332 +0,0 @@ -opentelemetry-collector: - mode: deployment - - service: - enabled: true - - image: - repository: "otel/opentelemetry-collector-contrib" - - ports: - otlp-http: - enabled: true - containerPort: 4318 - servicePort: 4318 - hostPort: 4318 - protocol: TCP - otel-metrics: - enabled: true - containerPort: 8888 - servicePort: 8888 - hostPort: 8888 - protocol: TCP - aztec-metrics: - enabled: true - containerPort: 8889 - servicePort: 8889 - hostPort: 8889 - protocol: TCP - - presets: - kubernetesAttributes: - enabled: true - config: - extensions: - health_check: - endpoint: ${env:MY_POD_IP}:13133 - processors: - resource: - attributes: - - action: preserve - key: k8s.namespace.name - batch: {} - receivers: - otlp: - protocols: - http: - endpoint: ${env:MY_POD_IP}:4318 - grpc: - endpoint: ${env:MY_POD_IP}:4317 - service: - extensions: [health_check] - telemetry: - metrics: - address: ${env:MY_POD_IP}:8888 - pipelines: - logs: - receivers: - - otlp - processors: - - batch - exporters: - - otlphttp/logs - traces: - receivers: - - otlp - processors: - - batch - exporters: - - otlp/tempo - metrics: - receivers: - - otlp - processors: - - batch - exporters: - - prometheus - # - debug - -# Enable and configure the Loki subchart -# https://artifacthub.io/packages/helm/grafana/loki -# loki: -# Nothing set here, because we need to use values from the values directory; -# otherwise, things don't get overridden correctly. - -# Enable and configure the Tempo subchart -# https://artifacthub.io/packages/helm/grafana/tempo -tempo: - minio: - enabled: true - mode: standalone - rootUser: grafana-tempo - rootPassword: supersecret - buckets: - # Default Tempo storage bucket - - name: tempo-traces - policy: none - purge: false - traces: - otlp: - grpc: - enabled: true - http: - enabled: true - zipkin: - enabled: false - jaeger: - thriftHttp: - enabled: false - opencensus: - enabled: false - -prometheus: - server: - global: - evaluation_interval: 15s - scrape_interval: 15s - serverFiles: - prometheus.yml: - scrape_configs: - - job_name: otel-collector - static_configs: - - targets: ["metrics-opentelemetry-collector.metrics:8888"] - - job_name: aztec - static_configs: - - targets: ["metrics-opentelemetry-collector.metrics:8889"] - - job_name: "kube-state-metrics" - static_configs: - - targets: - ["metrics-kube-state-metrics.metrics.svc.cluster.local:8080"] - -# Enable and configure Grafana -# https://artifacthub.io/packages/helm/grafana/grafana -grafana: - datasources: - datasources.yaml: - apiVersion: 1 - datasources: - - name: Loki - type: loki - url: http://metrics-loki.metrics:3100 - - name: Tempo - type: tempo - url: http://metrics-tempo.metrics:3100 - - name: Prometheus - type: prometheus - uid: spartan-metrics-prometheus - isDefault: true - url: http://metrics-prometheus-server.metrics:80 - dashboardProviders: - dashboardproviders.yaml: - apiVersion: 1 - providers: - - name: "default" - orgId: 1 - folder: "" - type: file - disableDeletion: false - editable: true - options: - path: /var/lib/grafana/dashboards/default - dashboards: - default: - # unfortunately, we can't use the `file` helper here, so we have to inline the dashboard - # json. This is a limitation of Helm. - # See https://github.com/helm/helm/issues/1892 - spartan-dashboard: - json: | - { - "annotations": { - "list": [ - { - "builtIn": 1, - "datasource": { - "type": "grafana", - "uid": "-- Grafana --" - }, - "enable": true, - "hide": true, - "iconColor": "rgba(0, 211, 255, 1)", - "name": "Annotations & Alerts", - "type": "dashboard" - } - ] - }, - "editable": true, - "fiscalYearStartMonth": 0, - "graphTooltip": 0, - "id": 1, - "links": [], - "panels": [ - { - "datasource": { - "default": false, - "type": "prometheus", - "uid": "spartan-metrics-prometheus" - }, - "description": "", - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisBorderShow": false, - "axisCenteredZero": false, - "axisColorMode": "series", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "barWidthFactor": 0.6, - "drawStyle": "line", - "fillOpacity": 0, - "gradientMode": "none", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "insertNulls": false, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" - }, - "showPoints": "auto", - "spanNulls": false, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green", - "value": null - }, - { - "color": "red", - "value": 80 - } - ] - } - }, - "overrides": [] - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 0, - "y": 0 - }, - "id": 1, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "mode": "single", - "sort": "none" - } - }, - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "spartan-metrics-prometheus" - }, - "editorMode": "code", - "expr": "aztec_archiver_block_height", - "legendFormat": "__auto", - "range": true, - "refId": "A" - } - ], - "title": "L2 Block Height", - "type": "timeseries" - } - ], - "schemaVersion": 39, - "tags": [], - "templating": { - "list": [ - { - "current": { - "selected": false, - "text": "smoke", - "value": "smoke" - }, - "datasource": { - "type": "prometheus", - "uid": "spartan-metrics-prometheus" - }, - "definition": "label_values(k8s_namespace_name)", - "hide": 0, - "includeAll": false, - "multi": false, - "name": "Deployment", - "options": [], - "query": { - "qryType": 1, - "query": "label_values(k8s_namespace_name)", - "refId": "PrometheusVariableQueryEditor-VariableQuery" - }, - "refresh": 1, - "regex": "", - "skipUrlSync": false, - "sort": 1, - "type": "query" - } - ] - }, - "time": { - "from": "now-6h", - "to": "now" - }, - "timepicker": {}, - "timezone": "browser", - "title": "Spartan Deployments", - "uid": "ae01y5sn1bls0a", - "version": 1, - "weekStart": "" - } diff --git a/spartan/releases/README.md b/spartan/releases/README.md index ea41c68b9919..b368c4c723f6 100644 --- a/spartan/releases/README.md +++ b/spartan/releases/README.md @@ -48,9 +48,17 @@ To spare you a few keystrokes, you can use `npx aztec-spartan [start/stop/logs/u ## Node Configuration -You can tweak your validator in many different ways. Just edit the `.env` file directly and re-run `./aztec-spartan.sh`. +The `aztec-spartan.sh` script will set the following required variables on your behalf. You can ofcourse override the variables set by the script by simply changing the `.env` file directly and re-running `./aztec-spartan.sh` -Different env variables correspond to different components on the node. +| Variable | Description | +| ----- | ----- | +| ETHEREUM_HOST | URL to the Ethereum node your validator will connect to. For as long as we're on private networks, please use the value in `aztec-spartan.sh`| +| BOOTNODE_URL | URL to a bootnode that supplies L1 contract addresses and the ENR of the bootstrap nodes. | +| IMAGE | The docker image to run | + +In addition, the user is prompted to enter 1) an IP Address and a P2P port to be used for the TCP and UDP addresses (defaults to 40400) 2) A port for your node (8080) 3) an Ethereum private key 4) `COINBASE` which is the Ethereum address associated with the private key and 5) a path to a local directory to store node data if you don't opt for a named volume. + +On a first run, the script will generate a p2p private key and store it in `$DATA_DIR/var/lib/aztec/p2p-private-key`. If you wish to change your p2p private key, you can pass it on as a CLI arg using the flag `-pk` or update the `PEER_ID_PRIVATE_KEY` in the env file. ### Publisher and Archiver @@ -60,7 +68,7 @@ The Archiver's primary functions are data storage and retrieval (i.e. L1->L2 mes |Variable| Description| |----|-----| -|ETHEREUM_HOST| This is the URL to the L1 node your validator will connect to. For as long as we're on private networks, please use the value from above | +|ETHEREUM_HOST| This is the URL to the L1 node your validator will connect to. For as long as we're on private networks, please use the value in `aztec-spartan.sh`| |L1_CHAIN_ID | Chain ID of the L1 | | DATA_DIRECTORY | Optional dir to store archiver and world state data. If omitted will store in memory | | ARCHIVER_POLLING_INTERVAL_MS | The polling interval in ms for retrieving new L2 blocks and encrypted logs @@ -117,12 +125,6 @@ The P2P client coordinates peer-to-peer communication between Nodes. Please refer to the Epoch Proving Integration [Guide](https://hackmd.io/@aztec-network/epoch-proving-integration-guide) for info on how to setup your prover node. -During a governance upgrade, we'll announce details on the discord. At some point we'll also write AZIPs (Aztec Improvement Proposals) and post them to either the github or forum to collect feedback. - -We'll deploy the payload to the L1 and share the address of the payload with the sequencers on discord. - -To participate in the governance vote, sequencers must change the variable `GOVERNANCE_PROPOSER_PAYLOAD_ADDRESS` in the Sequencer Client to vote during the L2 slot they've been assigned sequencer duties. - ## Governance Upgrades During a governance upgrade, we'll announce details on the discord. At some point we'll also write AZIPs (Aztec Improvement Proposals) and post them to either the github or forum to collect feedback. diff --git a/spartan/releases/testnet/aztec-spartan.sh b/spartan/releases/testnet/aztec-spartan.sh index 2b90bf5fc0ea..2d05a936d8f0 100755 --- a/spartan/releases/testnet/aztec-spartan.sh +++ b/spartan/releases/testnet/aztec-spartan.sh @@ -171,8 +171,8 @@ configure_environment() { # if the network is `troll-turtle` if [ "$NETWORK" = "troll-turtle" ]; then - BOOTNODE_URL="${BOOTNODE_URL:-http://34.82.213.6:8080}" - ETHEREUM_HOST="${ETHEREUM_HOST:-http://34.19.127.9:8545}" + BOOTNODE_URL="${BOOTNODE_URL:-http://34.82.108.83:8080}" + ETHEREUM_HOST="${ETHEREUM_HOST:-http://34.82.53.127:8545}" IMAGE="${IMAGE:-aztecprotocol/aztec:troll-turtle}" else # unknown network @@ -216,6 +216,20 @@ configure_environment() { done fi + if [ -n "$CLI_COINBASE" ]; then + COINBASE="$CLI_COINBASE" + else + while true; do + read -p "COINBASE (default: 0xbaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa): " COINBASE + COINBASE=${COINBASE:-0xbaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa} + if [[ "$COINBASE" =~ ^0x[a-fA-F0-9]{40}$ ]]; then + break + else + echo -e "${RED}Error: Invalid COINBASE address. Please enter a valid Ethereum address.${NC}" + fi + done + fi + if [ -n "$CLI_IP" ]; then IP="$CLI_IP" else @@ -250,7 +264,6 @@ configure_environment() { cat > .env << EOF P2P_UDP_ANNOUNCE_ADDR=${IP}:${P2P_PORT} P2P_TCP_ANNOUNCE_ADDR=${IP}:${P2P_PORT} -COINBASE=0xbaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa VALIDATOR_DISABLED=false VALIDATOR_PRIVATE_KEY=${KEY} SEQ_PUBLISHER_PRIVATE_KEY=${KEY} diff --git a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts index 50730dbb56e7..5f9cd2f6b0ba 100644 --- a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts +++ b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts @@ -29,7 +29,7 @@ import { makeUnconstrainedFunctionWithMembershipProof, } from '@aztec/circuits.js/testing'; import { times } from '@aztec/foundation/collection'; -import { randomBytes, randomInt } from '@aztec/foundation/crypto'; +import { randomInt } from '@aztec/foundation/crypto'; import { type ArchiverDataStore, type ArchiverL1SynchPoint } from './archiver_store.js'; import { type L1Published } from './structs/published.js'; @@ -223,7 +223,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch }); it('returns undefined if tx is not found', async () => { - await expect(store.getTxEffect(new TxHash(Fr.random().toBuffer()))).resolves.toBeUndefined(); + await expect(store.getTxEffect(TxHash.random())).resolves.toBeUndefined(); }); it.each([ @@ -241,7 +241,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch }); it('returns undefined if tx is not found', async () => { - await expect(store.getTxEffect(new TxHash(Fr.random().toBuffer()))).resolves.toBeUndefined(); + await expect(store.getTxEffect(TxHash.random())).resolves.toBeUndefined(); }); }); @@ -645,7 +645,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch it('"txHash" filter param is ignored when "afterLog" is set', async () => { // Get random txHash - const txHash = new TxHash(randomBytes(TxHash.SIZE)); + const txHash = TxHash.random(); const afterLog = new LogId(1, 0, 0); const response = await store.getUnencryptedLogs({ txHash, afterLog }); diff --git a/yarn-project/aztec-node/src/aztec-node/server.test.ts b/yarn-project/aztec-node/src/aztec-node/server.test.ts index 942c544b26f8..4a417906496a 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.test.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.test.ts @@ -10,7 +10,7 @@ import { type WorldStateSynchronizer, mockTxForRollup, } from '@aztec/circuit-types'; -import { type ContractDataSource, EthAddress, Fr, MaxBlockNumber } from '@aztec/circuits.js'; +import { type ContractDataSource, EthAddress, Fr, GasFees, MaxBlockNumber } from '@aztec/circuits.js'; import { type P2P } from '@aztec/p2p'; import { type GlobalVariableBuilder } from '@aztec/sequencer-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; @@ -37,8 +37,9 @@ describe('aztec node', () => { p2p = mock(); globalVariablesBuilder = mock(); - merkleTreeOps = mock(); + globalVariablesBuilder.getCurrentBaseFees.mockResolvedValue(new GasFees(0, 0)); + merkleTreeOps = mock(); merkleTreeOps.findLeafIndices.mockImplementation((_treeId: MerkleTreeId, _value: any[]) => { return Promise.resolve([undefined]); }); @@ -99,14 +100,14 @@ describe('aztec node', () => { const doubleSpendWithExistingTx = txs[1]; lastBlockNumber += 1; - expect(await node.isValidTx(doubleSpendTx)).toBe(true); + expect(await node.isValidTx(doubleSpendTx)).toEqual({ result: 'valid' }); // We push a duplicate nullifier that was created in the same transaction doubleSpendTx.data.forRollup!.end.nullifiers.push(doubleSpendTx.data.forRollup!.end.nullifiers[0]); - expect(await node.isValidTx(doubleSpendTx)).toBe(false); + expect(await node.isValidTx(doubleSpendTx)).toEqual({ result: 'invalid', reason: ['Duplicate nullifier in tx'] }); - expect(await node.isValidTx(doubleSpendWithExistingTx)).toBe(true); + expect(await node.isValidTx(doubleSpendWithExistingTx)).toEqual({ result: 'valid' }); // We make a nullifier from `doubleSpendWithExistingTx` a part of the nullifier tree, so it gets rejected as double spend const doubleSpendNullifier = doubleSpendWithExistingTx.data.forRollup!.end.nullifiers[0].toBuffer(); @@ -116,7 +117,10 @@ describe('aztec node', () => { ); }); - expect(await node.isValidTx(doubleSpendWithExistingTx)).toBe(false); + expect(await node.isValidTx(doubleSpendWithExistingTx)).toEqual({ + result: 'invalid', + reason: ['Existing nullifier'], + }); lastBlockNumber = 0; }); @@ -124,12 +128,12 @@ describe('aztec node', () => { const tx = mockTxForRollup(0x10000); tx.data.constants.txContext.chainId = chainId; - expect(await node.isValidTx(tx)).toBe(true); + expect(await node.isValidTx(tx)).toEqual({ result: 'valid' }); // We make the chain id on the tx not equal to the configured chain id - tx.data.constants.txContext.chainId = new Fr(1n + chainId.value); + tx.data.constants.txContext.chainId = new Fr(1n + chainId.toBigInt()); - expect(await node.isValidTx(tx)).toBe(false); + expect(await node.isValidTx(tx)).toEqual({ result: 'invalid', reason: ['Incorrect chain id'] }); }); it('tests that the node correctly validates max block numbers', async () => { @@ -159,11 +163,14 @@ describe('aztec node', () => { lastBlockNumber = 3; // Default tx with no max block number should be valid - expect(await node.isValidTx(noMaxBlockNumberMetadata)).toBe(true); + expect(await node.isValidTx(noMaxBlockNumberMetadata)).toEqual({ result: 'valid' }); // Tx with max block number < current block number should be invalid - expect(await node.isValidTx(invalidMaxBlockNumberMetadata)).toBe(false); + expect(await node.isValidTx(invalidMaxBlockNumberMetadata)).toEqual({ + result: 'invalid', + reason: ['Invalid block number'], + }); // Tx with max block number >= current block number should be valid - expect(await node.isValidTx(validMaxBlockNumberMetadata)).toBe(true); + expect(await node.isValidTx(validMaxBlockNumberMetadata)).toEqual({ result: 'valid' }); }); }); }); diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 545a6d13d7f0..4b882875a945 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -16,7 +16,6 @@ import { NullifierMembershipWitness, type NullifierWithBlockSource, P2PClientType, - type ProcessedTx, type ProverConfig, PublicDataWitness, PublicSimulationOutput, @@ -29,7 +28,7 @@ import { TxReceipt, type TxScopedL2Log, TxStatus, - type TxValidator, + type TxValidationResult, type WorldStateSynchronizer, tryStop, } from '@aztec/circuit-types'; @@ -52,8 +51,9 @@ import { type PrivateLog, type ProtocolContractAddresses, type PublicDataTreeLeafPreimage, + REGISTERER_CONTRACT_ADDRESS, } from '@aztec/circuits.js'; -import { computePublicDataTreeLeafSlot } from '@aztec/circuits.js/hash'; +import { computePublicDataTreeLeafSlot, siloNullifier } from '@aztec/circuits.js/hash'; import { EpochCache } from '@aztec/epoch-cache'; import { type L1ContractAddresses, createEthereumChain } from '@aztec/ethereum'; import { AztecAddress } from '@aztec/foundation/aztec-address'; @@ -63,17 +63,16 @@ import { DateProvider, Timer } from '@aztec/foundation/timer'; import { type AztecKVStore } from '@aztec/kv-store'; import { openTmpStore } from '@aztec/kv-store/lmdb'; import { SHA256Trunc, StandardTree, UnbalancedTree } from '@aztec/merkle-tree'; -import { - AggregateTxValidator, - DataTxValidator, - DoubleSpendTxValidator, - MetadataTxValidator, - type P2P, - TxProofValidator, - createP2PClient, -} from '@aztec/p2p'; +import { type P2P, createP2PClient } from '@aztec/p2p'; import { ProtocolContractAddress } from '@aztec/protocol-contracts'; -import { GlobalVariableBuilder, type L1Publisher, SequencerClient, createSlasherClient } from '@aztec/sequencer-client'; +import { + GlobalVariableBuilder, + type L1Publisher, + SequencerClient, + createSlasherClient, + createValidatorForAcceptingTxs, + getDefaultAllowedSetupFunctions, +} from '@aztec/sequencer-client'; import { PublicProcessorFactory } from '@aztec/simulator'; import { Attributes, type TelemetryClient, type Traceable, type Tracer, trackSpan } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; @@ -348,8 +347,25 @@ export class AztecNodeService implements AztecNode, Traceable { return Promise.resolve(this.l1ChainId); } - public getContractClass(id: Fr): Promise { - return this.contractDataSource.getContractClass(id); + public async getContractClass(id: Fr): Promise { + const klazz = await this.contractDataSource.getContractClass(id); + + // TODO(#10007): Remove this check. This is needed only because we're manually registering + // some contracts in the archiver so they are available to all nodes (see `registerCommonContracts` + // in `archiver/src/factory.ts`), but we still want clients to send the registration tx in order + // to emit the corresponding nullifier, which is now being checked. Note that this method + // is only called by the PXE to check if a contract is publicly registered. + if (klazz) { + const classNullifier = siloNullifier(AztecAddress.fromNumber(REGISTERER_CONTRACT_ADDRESS), id); + const worldState = await this.#getWorldState('latest'); + const [index] = await worldState.findLeafIndices(MerkleTreeId.NULLIFIER_TREE, [classNullifier.toBuffer()]); + this.log.debug(`Registration nullifier ${classNullifier} for contract class ${id} found at index ${index}`); + if (index === undefined) { + return undefined; + } + } + + return klazz; } public getContract(address: AztecAddress): Promise { @@ -400,15 +416,21 @@ export class AztecNodeService implements AztecNode, Traceable { */ public async sendTx(tx: Tx) { const timer = new Timer(); - this.log.info(`Received tx ${tx.getTxHash()}`); + const txHash = tx.getTxHash().toString(); - if (!(await this.isValidTx(tx))) { + const valid = await this.isValidTx(tx); + if (valid.result !== 'valid') { + const reason = valid.reason.join(', '); this.metrics.receivedTx(timer.ms(), false); + this.log.warn(`Invalid tx ${txHash}: ${reason}`, { txHash }); + // TODO(#10967): Throw when receiving an invalid tx instead of just returning + // throw new Error(`Invalid tx: ${reason}`); return; } await this.p2pClient!.sendTx(tx); this.metrics.receivedTx(timer.ms(), true); + this.log.info(`Received tx ${tx.getTxHash()}`, { txHash }); } public async getTxReceipt(txHash: TxHash): Promise { @@ -860,34 +882,19 @@ export class AztecNodeService implements AztecNode, Traceable { } } - public async isValidTx(tx: Tx, isSimulation: boolean = false): Promise { + public async isValidTx(tx: Tx, isSimulation: boolean = false): Promise { const blockNumber = (await this.blockSource.getBlockNumber()) + 1; const db = this.worldStateSynchronizer.getCommitted(); - // These validators are taken from the sequencer, and should match. - // The reason why `phases` and `gas` tx validator is in the sequencer and not here is because - // those tx validators are customizable by the sequencer. - const txValidators: TxValidator[] = [ - new DataTxValidator(), - new MetadataTxValidator(new Fr(this.l1ChainId), new Fr(blockNumber)), - new DoubleSpendTxValidator({ - getNullifierIndices: nullifiers => db.findLeafIndices(MerkleTreeId.NULLIFIER_TREE, nullifiers), - }), - ]; - - if (!isSimulation) { - txValidators.push(new TxProofValidator(this.proofVerifier)); - } - - const txValidator = new AggregateTxValidator(...txValidators); - - const [_, invalidTxs] = await txValidator.validateTxs([tx]); - if (invalidTxs.length > 0) { - this.log.warn(`Rejecting tx ${tx.getTxHash()} because of validation errors`); - - return false; - } + const verifier = isSimulation ? undefined : this.proofVerifier; + const validator = createValidatorForAcceptingTxs(db, this.contractDataSource, verifier, { + blockNumber, + l1ChainId: this.l1ChainId, + enforceFees: !!this.config.enforceFees, + setupAllowList: this.config.allowedInSetup ?? getDefaultAllowedSetupFunctions(), + gasFees: await this.getCurrentBaseFees(), + }); - return true; + return await validator.validateTx(tx); } public async setConfig(config: Partial): Promise { diff --git a/yarn-project/aztec.js/src/contract/sent_tx.test.ts b/yarn-project/aztec.js/src/contract/sent_tx.test.ts index c0f7daf8a57b..2d03462f19f3 100644 --- a/yarn-project/aztec.js/src/contract/sent_tx.test.ts +++ b/yarn-project/aztec.js/src/contract/sent_tx.test.ts @@ -1,4 +1,5 @@ import { type PXE, TxHash, type TxReceipt, TxStatus } from '@aztec/circuit-types'; +import { Fr } from '@aztec/circuits.js'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -12,7 +13,7 @@ describe('SentTx', () => { beforeEach(() => { pxe = mock(); - txHashPromise = Promise.resolve(TxHash.fromBigInt(1n)); + txHashPromise = Promise.resolve(new TxHash(new Fr(1n))); sentTx = new SentTx(pxe, txHashPromise); }); diff --git a/yarn-project/aztec.js/src/utils/cheat_codes.ts b/yarn-project/aztec.js/src/utils/cheat_codes.ts index 507460ca4d94..284632a677ba 100644 --- a/yarn-project/aztec.js/src/utils/cheat_codes.ts +++ b/yarn-project/aztec.js/src/utils/cheat_codes.ts @@ -256,7 +256,7 @@ export class AztecCheatCodes { * @returns The notes stored at the given slot */ public async loadPrivate(owner: AztecAddress, contract: AztecAddress, slot: Fr | bigint): Promise { - const extendedNotes = await this.pxe.getIncomingNotes({ + const extendedNotes = await this.pxe.getNotes({ owner, contractAddress: contract, storageSlot: new Fr(slot), diff --git a/yarn-project/aztec.js/src/wallet/base_wallet.ts b/yarn-project/aztec.js/src/wallet/base_wallet.ts index 48199b089275..7087fc3414b5 100644 --- a/yarn-project/aztec.js/src/wallet/base_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/base_wallet.ts @@ -3,9 +3,9 @@ import { type EventMetadataDefinition, type ExtendedNote, type GetUnencryptedLogsResponse, - type IncomingNotesFilter, type L2Block, type LogFilter, + type NotesFilter, type PXE, type PXEInfo, type PrivateExecutionResult, @@ -134,8 +134,8 @@ export abstract class BaseWallet implements Wallet { getTxReceipt(txHash: TxHash): Promise { return this.pxe.getTxReceipt(txHash); } - getIncomingNotes(filter: IncomingNotesFilter): Promise { - return this.pxe.getIncomingNotes(filter); + getNotes(filter: NotesFilter): Promise { + return this.pxe.getNotes(filter); } getPublicStorageAt(contract: AztecAddress, storageSlot: Fr): Promise { return this.pxe.getPublicStorageAt(contract, storageSlot); diff --git a/yarn-project/aztec/package.json b/yarn-project/aztec/package.json index 714b0362802d..559d5343028d 100644 --- a/yarn-project/aztec/package.json +++ b/yarn-project/aztec/package.json @@ -17,6 +17,7 @@ "build": "yarn clean && tsc -b", "start": "node --no-warnings ./dest/bin", "start:debug": "node --inspect=0.0.0.0:9221 --no-warnings ./dest/bin", + "start:sandbox": "ETHEREUM_HOST=http://0.0.0.0:8545/ && yarn start start --sandbox", "clean": "rm -rf ./dest .tsbuildinfo", "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", diff --git a/yarn-project/aztec/src/cli/aztec_start_action.ts b/yarn-project/aztec/src/cli/aztec_start_action.ts index 0e45a8914632..c45892eccc13 100644 --- a/yarn-project/aztec/src/cli/aztec_start_action.ts +++ b/yarn-project/aztec/src/cli/aztec_start_action.ts @@ -102,7 +102,7 @@ export async function aztecStart(options: any, userLog: LogFn, debugLogger: Logg installSignalHandlers(debugLogger.info, signalHandlers); if (Object.entries(services).length > 0) { - const rpcServer = createNamespacedSafeJsonRpcServer(services, debugLogger); + const rpcServer = createNamespacedSafeJsonRpcServer(services, false, debugLogger); const { port } = await startHttpRpcServer(rpcServer, { port: options.port }); debugLogger.info(`Aztec Server listening on port ${port}`); } diff --git a/yarn-project/blob-sink/.eslintrc.cjs b/yarn-project/blob-sink/.eslintrc.cjs new file mode 100644 index 000000000000..e659927475c0 --- /dev/null +++ b/yarn-project/blob-sink/.eslintrc.cjs @@ -0,0 +1 @@ +module.exports = require('@aztec/foundation/eslint'); diff --git a/yarn-project/blob-sink/README.md b/yarn-project/blob-sink/README.md new file mode 100644 index 000000000000..649e8eab867a --- /dev/null +++ b/yarn-project/blob-sink/README.md @@ -0,0 +1,21 @@ +## Blob Sink + +A HTTP api that losely emulates the https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars API. +We do not support all of the possible values of block_id, namely `genesis`, `head`, `finalized`. As we are not using any of these values in our +blobs integration. + +## When is this used? + +This service will run alongside end to end tests to capture the blob transactions that are sent alongside a `propose` transaction. + +### Why? + +Once we make the transition to blob transactions, we will need to be able to query for blobs. One way to do this is to run an entire L1 execution layer and consensus layer pair alongside all of our e2e tests and inside the sandbox. But this is a bit much, so instead the blob sink can be used to store and request blobs, without needing to run an entire consensus layer pair client. + +### Other Usecases + +Blobs are only held in the L1 consensus layer for a period of ~3 weeks, the blob sink can be used to store blobs for longer. + +### How? + +The blob sink is a simple HTTP server that can be run alongside the e2e tests. It will store the blobs in a local file system and provide an API to query for them. diff --git a/yarn-project/blob-sink/package.json b/yarn-project/blob-sink/package.json new file mode 100644 index 000000000000..7090bf995270 --- /dev/null +++ b/yarn-project/blob-sink/package.json @@ -0,0 +1,84 @@ +{ + "name": "@aztec/blob-sink", + "version": "0.1.0", + "type": "module", + "exports": { + ".": "./dest/index.js" + }, + "inherits": [ + "../package.common.json" + ], + "scripts": { + "build": "yarn clean && tsc -b", + "build:dev": "tsc -b --watch", + "clean": "rm -rf ./dest .tsbuildinfo", + "formatting": "run -T prettier --check ./src && run -T eslint ./src", + "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", + "test": "HARDWARE_CONCURRENCY=${HARDWARE_CONCURRENCY:-16} RAYON_NUM_THREADS=${RAYON_NUM_THREADS:-4} NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests --maxWorkers=${JEST_MAX_WORKERS:-8}" + }, + "jest": { + "moduleNameMapper": { + "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" + }, + "testRegex": "./src/.*\\.test\\.(js|mjs|ts)$", + "rootDir": "./src", + "transform": { + "^.+\\.tsx?$": [ + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" + } + } + } + ] + }, + "extensionsToTreatAsEsm": [ + ".ts" + ], + "reporters": [ + "default" + ], + "testTimeout": 30000, + "setupFiles": [ + "../../foundation/src/jest/setup.mjs" + ] + }, + "dependencies": { + "@aztec/circuit-types": "workspace:^", + "@aztec/foundation": "workspace:^", + "@aztec/kv-store": "workspace:*", + "@aztec/telemetry-client": "workspace:*", + "express": "^4.21.1", + "source-map-support": "^0.5.21", + "tslib": "^2.4.0", + "zod": "^3.23.8" + }, + "devDependencies": { + "@jest/globals": "^29.5.0", + "@types/jest": "^29.5.0", + "@types/memdown": "^3.0.0", + "@types/node": "^18.7.23", + "@types/source-map-support": "^0.5.10", + "@types/supertest": "^6.0.2", + "jest": "^29.5.0", + "jest-mock-extended": "^3.0.3", + "supertest": "^7.0.0", + "ts-node": "^10.9.1", + "typescript": "^5.0.4" + }, + "files": [ + "dest", + "src", + "!*.test.*" + ], + "types": "./dest/index.d.ts", + "engines": { + "node": ">=18" + } +} diff --git a/yarn-project/blob-sink/src/blob-sink.test.ts b/yarn-project/blob-sink/src/blob-sink.test.ts new file mode 100644 index 000000000000..3107b953d47c --- /dev/null +++ b/yarn-project/blob-sink/src/blob-sink.test.ts @@ -0,0 +1,134 @@ +import { Blob } from '@aztec/foundation/blob'; +import { Fr } from '@aztec/foundation/fields'; + +import request from 'supertest'; + +import { BlobSinkServer } from './server.js'; + +describe('BlobSinkService', () => { + let service: BlobSinkServer; + + beforeEach(async () => { + service = new BlobSinkServer({ + port: 0, // Using port 0 lets the OS assign a random available port + }); + await service.start(); + }); + + afterEach(async () => { + await service.stop(); + }); + + describe('should store and retrieve a blob sidecar', () => { + const testFields = [Fr.random(), Fr.random(), Fr.random()]; + const testFields2 = [Fr.random(), Fr.random(), Fr.random()]; + const blob = Blob.fromFields(testFields); + const blob2 = Blob.fromFields(testFields2); + const blockId = '0x1234'; + + beforeEach(async () => { + // Post the blob + const postResponse = await request(service.getApp()) + .post('/blob_sidecar') + .send({ + // eslint-disable-next-line camelcase + block_id: blockId, + blobs: [ + { + index: 0, + blob: blob.toBuffer(), + }, + { + index: 1, + blob: blob2.toBuffer(), + }, + ], + }); + + expect(postResponse.status).toBe(200); + }); + + it('should retrieve the blob', async () => { + // Retrieve the blob + const getResponse = await request(service.getApp()).get(`/eth/v1/beacon/blob_sidecars/${blockId}`); + + expect(getResponse.status).toBe(200); + + // Convert the response blob back to a Blob object and verify it matches + const retrievedBlobs = getResponse.body.data; + + const retrievedBlob = Blob.fromBuffer(Buffer.from(retrievedBlobs[0].blob, 'hex')); + const retrievedBlob2 = Blob.fromBuffer(Buffer.from(retrievedBlobs[1].blob, 'hex')); + expect(retrievedBlob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + expect(retrievedBlob2.fieldsHash.toString()).toBe(blob2.fieldsHash.toString()); + expect(retrievedBlob2.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); + + it('should retrieve specific indicies', async () => { + // We can also request specific indicies + const getWithIndicies = await request(service.getApp()).get( + `/eth/v1/beacon/blob_sidecars/${blockId}?indices=0,1`, + ); + + expect(getWithIndicies.status).toBe(200); + expect(getWithIndicies.body.data.length).toBe(2); + + const retrievedBlobs = getWithIndicies.body.data; + const retrievedBlob = Blob.fromBuffer(Buffer.from(retrievedBlobs[0].blob, 'hex')); + const retrievedBlob2 = Blob.fromBuffer(Buffer.from(retrievedBlobs[1].blob, 'hex')); + expect(retrievedBlob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + expect(retrievedBlob2.fieldsHash.toString()).toBe(blob2.fieldsHash.toString()); + expect(retrievedBlob2.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); + + it('should retreive a single index', async () => { + const getWithIndicies = await request(service.getApp()).get(`/eth/v1/beacon/blob_sidecars/${blockId}?indices=1`); + + expect(getWithIndicies.status).toBe(200); + expect(getWithIndicies.body.data.length).toBe(1); + + const retrievedBlobs = getWithIndicies.body.data; + const retrievedBlob = Blob.fromBuffer(Buffer.from(retrievedBlobs[0].blob, 'hex')); + expect(retrievedBlob.fieldsHash.toString()).toBe(blob2.fieldsHash.toString()); + expect(retrievedBlob.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); + }); + + it('should return an error if invalid indicies are provided', async () => { + const blockId = '0x1234'; + + const response = await request(service.getApp()).get(`/eth/v1/beacon/blob_sidecars/${blockId}?indices=word`); + expect(response.status).toBe(400); + expect(response.body.error).toBe('Invalid indices parameter'); + }); + + it('should return an error if the block ID is invalid (POST)', async () => { + const response = await request(service.getApp()).post('/blob_sidecar').send({ + // eslint-disable-next-line camelcase + block_id: undefined, + }); + + expect(response.status).toBe(400); + }); + + it('should return an error if the block ID is invalid (GET)', async () => { + const response = await request(service.getApp()).get('/eth/v1/beacon/blob_sidecars/invalid-id'); + + expect(response.status).toBe(400); + }); + + it('should return 404 for non-existent blob', async () => { + const response = await request(service.getApp()).get('/eth/v1/beacon/blob_sidecars/0x999999'); + + expect(response.status).toBe(404); + }); + + it('should reject negative block IDs', async () => { + const response = await request(service.getApp()).get('/eth/v1/beacon/blob_sidecars/-123'); + + expect(response.status).toBe(400); + expect(response.body.error).toBe('Invalid block_id parameter'); + }); +}); diff --git a/yarn-project/blob-sink/src/blobstore/blob_store_test_suite.ts b/yarn-project/blob-sink/src/blobstore/blob_store_test_suite.ts new file mode 100644 index 000000000000..2636ec726d9b --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/blob_store_test_suite.ts @@ -0,0 +1,142 @@ +import { Blob } from '@aztec/foundation/blob'; +import { Fr } from '@aztec/foundation/fields'; + +import { BlobWithIndex } from '../types/index.js'; +import { type BlobStore } from './interface.js'; + +export function describeBlobStore(getBlobStore: () => BlobStore) { + let blobStore: BlobStore; + + beforeEach(() => { + blobStore = getBlobStore(); + }); + + it('should store and retrieve a blob', async () => { + // Create a test blob with random fields + const testFields = [Fr.random(), Fr.random(), Fr.random()]; + const blob = Blob.fromFields(testFields); + const blockId = '0x12345'; + const blobWithIndex = new BlobWithIndex(blob, 0); + + // Store the blob + await blobStore.addBlobSidecars(blockId, [blobWithIndex]); + + // Retrieve the blob + const retrievedBlobs = await blobStore.getBlobSidecars(blockId); + const [retrievedBlob] = retrievedBlobs!; + + // Verify the blob was retrieved and matches + expect(retrievedBlob).toBeDefined(); + expect(retrievedBlob.blob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.blob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + }); + + it('Should allow requesting a specific index of blob', async () => { + const testFields = [Fr.random(), Fr.random(), Fr.random()]; + const blob = Blob.fromFields(testFields); + const blockId = '0x12345'; + const blobWithIndex = new BlobWithIndex(blob, 0); + const blobWithIndex2 = new BlobWithIndex(blob, 1); + + await blobStore.addBlobSidecars(blockId, [blobWithIndex, blobWithIndex2]); + + const retrievedBlobs = await blobStore.getBlobSidecars(blockId, [0]); + const [retrievedBlob] = retrievedBlobs!; + + expect(retrievedBlob.blob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.blob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + + const retrievedBlobs2 = await blobStore.getBlobSidecars(blockId, [1]); + const [retrievedBlob2] = retrievedBlobs2!; + + expect(retrievedBlob2.blob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob2.blob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + }); + + it('Differentiate between blockHash and slot', async () => { + const testFields = [Fr.random(), Fr.random(), Fr.random()]; + const testFieldsSlot = [Fr.random(), Fr.random(), Fr.random()]; + const blob = Blob.fromFields(testFields); + const blobSlot = Blob.fromFields(testFieldsSlot); + const blockId = '0x12345'; + const slot = '12345'; + const blobWithIndex = new BlobWithIndex(blob, 0); + const blobWithIndexSlot = new BlobWithIndex(blobSlot, 0); + + await blobStore.addBlobSidecars(blockId, [blobWithIndex]); + await blobStore.addBlobSidecars(slot, [blobWithIndexSlot]); + + const retrievedBlobs = await blobStore.getBlobSidecars(blockId, [0]); + const [retrievedBlob] = retrievedBlobs!; + + expect(retrievedBlob.blob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.blob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + + const retrievedBlobs2 = await blobStore.getBlobSidecars(slot, [0]); + const [retrievedBlob2] = retrievedBlobs2!; + + expect(retrievedBlob2.blob.fieldsHash.toString()).toBe(blobSlot.fieldsHash.toString()); + expect(retrievedBlob2.blob.commitment.toString('hex')).toBe(blobSlot.commitment.toString('hex')); + }); + + it('should return undefined for non-existent blob', async () => { + const nonExistentBlob = await blobStore.getBlobSidecars('999999'); + expect(nonExistentBlob).toBeUndefined(); + }); + + it('should handle multiple blobs with different block IDs', async () => { + // Create two different blobs + const blob1 = Blob.fromFields([Fr.random(), Fr.random()]); + const blob2 = Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); + const blobWithIndex1 = new BlobWithIndex(blob1, 0); + const blobWithIndex2 = new BlobWithIndex(blob2, 0); + + // Store both blobs + await blobStore.addBlobSidecars('1', [blobWithIndex1]); + await blobStore.addBlobSidecars('2', [blobWithIndex2]); + + // Retrieve and verify both blobs + const retrieved1 = await blobStore.getBlobSidecars('1'); + const retrieved2 = await blobStore.getBlobSidecars('2'); + const [retrievedBlob1] = retrieved1!; + const [retrievedBlob2] = retrieved2!; + + expect(retrievedBlob1.blob.commitment.toString('hex')).toBe(blob1.commitment.toString('hex')); + expect(retrievedBlob2.blob.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); + + it('should overwrite blob when using same block ID', async () => { + // Create two different blobs + const originalBlob = Blob.fromFields([Fr.random()]); + const newBlob = Blob.fromFields([Fr.random(), Fr.random()]); + const blockId = '1'; + const originalBlobWithIndex = new BlobWithIndex(originalBlob, 0); + const newBlobWithIndex = new BlobWithIndex(newBlob, 0); + + // Store original blob + await blobStore.addBlobSidecars(blockId, [originalBlobWithIndex]); + + // Overwrite with new blob + await blobStore.addBlobSidecars(blockId, [newBlobWithIndex]); + + // Retrieve and verify it's the new blob + const retrievedBlobs = await blobStore.getBlobSidecars(blockId); + const [retrievedBlob] = retrievedBlobs!; + expect(retrievedBlob.blob.commitment.toString('hex')).toBe(newBlob.commitment.toString('hex')); + expect(retrievedBlob.blob.commitment.toString('hex')).not.toBe(originalBlob.commitment.toString('hex')); + }); + + it('should handle multiple blobs with the same block ID', async () => { + const blob1 = Blob.fromFields([Fr.random()]); + const blob2 = Blob.fromFields([Fr.random()]); + const blobWithIndex1 = new BlobWithIndex(blob1, 0); + const blobWithIndex2 = new BlobWithIndex(blob2, 0); + + await blobStore.addBlobSidecars('1', [blobWithIndex1, blobWithIndex2]); + const retrievedBlobs = await blobStore.getBlobSidecars('1'); + const [retrievedBlob1, retrievedBlob2] = retrievedBlobs!; + + expect(retrievedBlob1.blob.commitment.toString('hex')).toBe(blob1.commitment.toString('hex')); + expect(retrievedBlob2.blob.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); +} diff --git a/yarn-project/blob-sink/src/blobstore/disk_blob_store.test.ts b/yarn-project/blob-sink/src/blobstore/disk_blob_store.test.ts new file mode 100644 index 000000000000..8b523dbaef14 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/disk_blob_store.test.ts @@ -0,0 +1,8 @@ +import { openTmpStore } from '@aztec/kv-store/lmdb'; + +import { describeBlobStore } from './blob_store_test_suite.js'; +import { DiskBlobStore } from './disk_blob_store.js'; + +describe('DiskBlobStore', () => { + describeBlobStore(() => new DiskBlobStore(openTmpStore())); +}); diff --git a/yarn-project/blob-sink/src/blobstore/disk_blob_store.ts b/yarn-project/blob-sink/src/blobstore/disk_blob_store.ts new file mode 100644 index 000000000000..63e4dc10ab6e --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/disk_blob_store.ts @@ -0,0 +1,32 @@ +import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; + +import { type BlobWithIndex, BlobsWithIndexes } from '../types/index.js'; +import { type BlobStore } from './interface.js'; + +export class DiskBlobStore implements BlobStore { + blobs: AztecMap; + + constructor(store: AztecKVStore) { + this.blobs = store.openMap('blobs'); + } + + public getBlobSidecars(blockId: string, indices?: number[]): Promise { + const blobBuffer = this.blobs.get(`${blockId}`); + if (!blobBuffer) { + return Promise.resolve(undefined); + } + + const blobsWithIndexes = BlobsWithIndexes.fromBuffer(blobBuffer); + if (indices) { + // If indices are provided, return the blobs at the specified indices + return Promise.resolve(blobsWithIndexes.getBlobsFromIndices(indices)); + } + // If no indices are provided, return all blobs + return Promise.resolve(blobsWithIndexes.blobs); + } + + public async addBlobSidecars(blockId: string, blobSidecars: BlobWithIndex[]): Promise { + await this.blobs.set(blockId, new BlobsWithIndexes(blobSidecars).toBuffer()); + return Promise.resolve(); + } +} diff --git a/yarn-project/blob-sink/src/blobstore/index.ts b/yarn-project/blob-sink/src/blobstore/index.ts new file mode 100644 index 000000000000..fd3901930cfc --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/index.ts @@ -0,0 +1,3 @@ +export * from './memory_blob_store.js'; +export * from './disk_blob_store.js'; +export * from './interface.js'; diff --git a/yarn-project/blob-sink/src/blobstore/interface.ts b/yarn-project/blob-sink/src/blobstore/interface.ts new file mode 100644 index 000000000000..27d7fac25c29 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/interface.ts @@ -0,0 +1,12 @@ +import { type BlobWithIndex } from '../types/index.js'; + +export interface BlobStore { + /** + * Get a blob by block id + */ + getBlobSidecars: (blockId: string, indices?: number[]) => Promise; + /** + * Add a blob to the store + */ + addBlobSidecars: (blockId: string, blobSidecars: BlobWithIndex[]) => Promise; +} diff --git a/yarn-project/blob-sink/src/blobstore/memory_blob_store.test.ts b/yarn-project/blob-sink/src/blobstore/memory_blob_store.test.ts new file mode 100644 index 000000000000..2f13926cd1a4 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/memory_blob_store.test.ts @@ -0,0 +1,6 @@ +import { describeBlobStore } from './blob_store_test_suite.js'; +import { MemoryBlobStore } from './memory_blob_store.js'; + +describe('MemoryBlobStore', () => { + describeBlobStore(() => new MemoryBlobStore()); +}); diff --git a/yarn-project/blob-sink/src/blobstore/memory_blob_store.ts b/yarn-project/blob-sink/src/blobstore/memory_blob_store.ts new file mode 100644 index 000000000000..efe013f9b01b --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/memory_blob_store.ts @@ -0,0 +1,25 @@ +import { type BlobWithIndex, BlobsWithIndexes } from '../types/index.js'; +import { type BlobStore } from './interface.js'; + +export class MemoryBlobStore implements BlobStore { + private blobs: Map = new Map(); + + public getBlobSidecars(blockId: string, indices?: number[]): Promise { + const blobBuffer = this.blobs.get(blockId); + if (!blobBuffer) { + return Promise.resolve(undefined); + } + const blobsWithIndexes = BlobsWithIndexes.fromBuffer(blobBuffer); + if (indices) { + // If indices are provided, return the blobs at the specified indices + return Promise.resolve(blobsWithIndexes.getBlobsFromIndices(indices)); + } + // If no indices are provided, return all blobs + return Promise.resolve(blobsWithIndexes.blobs); + } + + public addBlobSidecars(blockId: string, blobSidecars: BlobWithIndex[]): Promise { + this.blobs.set(blockId, new BlobsWithIndexes(blobSidecars).toBuffer()); + return Promise.resolve(); + } +} diff --git a/yarn-project/blob-sink/src/config.ts b/yarn-project/blob-sink/src/config.ts new file mode 100644 index 000000000000..e18311f9f1d2 --- /dev/null +++ b/yarn-project/blob-sink/src/config.ts @@ -0,0 +1,7 @@ +import { type DataStoreConfig } from '@aztec/kv-store/config'; + +export interface BlobSinkConfig { + port?: number; + dataStoreConfig?: DataStoreConfig; + otelMetricsCollectorUrl?: string; +} diff --git a/yarn-project/blob-sink/src/factory.ts b/yarn-project/blob-sink/src/factory.ts new file mode 100644 index 000000000000..43a0df8e6c38 --- /dev/null +++ b/yarn-project/blob-sink/src/factory.ts @@ -0,0 +1,27 @@ +import { type AztecKVStore } from '@aztec/kv-store'; +import { createStore } from '@aztec/kv-store/lmdb'; +import { type TelemetryClient } from '@aztec/telemetry-client'; + +import { type BlobSinkConfig } from './config.js'; +import { BlobSinkServer } from './server.js'; + +// If data store settings are provided, the store is created and returned. +// Otherwise, undefined is returned and an in memory store will be used. +async function getDataStoreConfig(config?: BlobSinkConfig): Promise { + if (!config?.dataStoreConfig) { + return undefined; + } + return await createStore('blob-sink', config.dataStoreConfig); +} + +/** + * Creates a blob sink service from the provided config. + */ +export async function createBlobSinkServer( + config?: BlobSinkConfig, + telemetry?: TelemetryClient, +): Promise { + const store = await getDataStoreConfig(config); + + return new BlobSinkServer(config, store, telemetry); +} diff --git a/yarn-project/blob-sink/src/index.ts b/yarn-project/blob-sink/src/index.ts new file mode 100644 index 000000000000..25844130c2f7 --- /dev/null +++ b/yarn-project/blob-sink/src/index.ts @@ -0,0 +1,3 @@ +export * from './server.js'; +export * from './config.js'; +export * from './factory.js'; diff --git a/yarn-project/blob-sink/src/metrics.ts b/yarn-project/blob-sink/src/metrics.ts new file mode 100644 index 000000000000..28e2b6308c07 --- /dev/null +++ b/yarn-project/blob-sink/src/metrics.ts @@ -0,0 +1,27 @@ +import { type Histogram, Metrics, type TelemetryClient, type UpDownCounter } from '@aztec/telemetry-client'; + +import { type BlobWithIndex } from './types/blob_with_index.js'; + +export class BlobSinkMetrics { + /** The number of blobs in the blob store */ + private objectsInBlobStore: UpDownCounter; + + /** Tracks blob size */ + private blobSize: Histogram; + + constructor(telemetry: TelemetryClient) { + const name = 'BlobSink'; + this.objectsInBlobStore = telemetry.getMeter(name).createUpDownCounter(Metrics.BLOB_SINK_OBJECTS_IN_BLOB_STORE, { + description: 'The current number of blobs in the blob store', + }); + + this.blobSize = telemetry.getMeter(name).createHistogram(Metrics.BLOB_SINK_BLOB_SIZE, { + description: 'The non zero size of blobs in the blob store', + }); + } + + public recordBlobReciept(blobs: BlobWithIndex[]) { + this.objectsInBlobStore.add(blobs.length); + blobs.forEach(b => this.blobSize.record(b.blob.getSize())); + } +} diff --git a/yarn-project/blob-sink/src/server.ts b/yarn-project/blob-sink/src/server.ts new file mode 100644 index 000000000000..45c79f6991dd --- /dev/null +++ b/yarn-project/blob-sink/src/server.ts @@ -0,0 +1,170 @@ +import { Blob } from '@aztec/foundation/blob'; +import { type Logger, createLogger } from '@aztec/foundation/log'; +import { type AztecKVStore } from '@aztec/kv-store'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; + +import express, { type Express, type Request, type Response, json } from 'express'; +import { type Server } from 'http'; +import { z } from 'zod'; + +import { type BlobStore, DiskBlobStore } from './blobstore/index.js'; +import { MemoryBlobStore } from './blobstore/memory_blob_store.js'; +import { type BlobSinkConfig } from './config.js'; +import { BlobSinkMetrics } from './metrics.js'; +import { type PostBlobSidecarRequest, blockIdSchema, indicesSchema } from './types/api.js'; +import { BlobWithIndex } from './types/index.js'; + +/** + * Example usage: + * const service = new BlobSinkService({ port: 5052 }); + * await service.start(); + * ... later ... + * await service.stop(); + */ +export class BlobSinkServer { + public readonly port: number; + + private app: Express; + private server: Server | null = null; + private blobStore: BlobStore; + private metrics: BlobSinkMetrics; + private log: Logger = createLogger('aztec:blob-sink'); + + constructor(config?: BlobSinkConfig, store?: AztecKVStore, telemetry: TelemetryClient = new NoopTelemetryClient()) { + this.port = config?.port ?? 5052; // 5052 is beacon chain default http port + this.app = express(); + + // Setup middleware + this.app.use(json({ limit: '1mb' })); // Increase the limit to allow for a blob to be sent + + this.metrics = new BlobSinkMetrics(telemetry); + + this.blobStore = store === undefined ? new MemoryBlobStore() : new DiskBlobStore(store); + + // Setup routes + this.setupRoutes(); + } + + private setupRoutes() { + this.app.get('/eth/v1/beacon/blob_sidecars/:block_id', this.handleGetBlobSidecar.bind(this)); + this.app.post('/blob_sidecar', this.handlePostBlobSidecar.bind(this)); + } + + private async handleGetBlobSidecar(req: Request, res: Response) { + // eslint-disable-next-line camelcase + const { block_id } = req.params; + const { indices } = req.query; + + try { + // eslint-disable-next-line camelcase + const parsedBlockId = blockIdSchema.safeParse(block_id); + if (!parsedBlockId.success) { + res.status(400).json({ + error: 'Invalid block_id parameter', + }); + return; + } + + const parsedIndices = indicesSchema.safeParse(indices); + if (!parsedIndices.success) { + res.status(400).json({ + error: 'Invalid indices parameter', + }); + return; + } + + const blobs = await this.blobStore.getBlobSidecars(parsedBlockId.data.toString(), parsedIndices.data); + + if (!blobs) { + res.status(404).json({ error: 'Blob not found' }); + return; + } + + res.json({ + version: 'deneb', + data: blobs.map(blob => blob.toJSON()), + }); + } catch (error) { + if (error instanceof z.ZodError) { + res.status(400).json({ + error: 'Invalid block_id parameter', + details: error.errors, + }); + } else { + res.status(500).json({ + error: 'Internal server error', + }); + } + } + } + + private async handlePostBlobSidecar(req: Request, res: Response) { + // eslint-disable-next-line camelcase + const { block_id, blobs } = req.body; + + try { + // eslint-disable-next-line camelcase + const parsedBlockId = blockIdSchema.parse(block_id); + if (!parsedBlockId) { + res.status(400).json({ + error: 'Invalid block_id parameter', + }); + return; + } + + this.log.info(`Received blob sidecar for block ${parsedBlockId}`); + + const blobObjects: BlobWithIndex[] = this.parseBlobData(blobs); + + await this.blobStore.addBlobSidecars(parsedBlockId.toString(), blobObjects); + this.metrics.recordBlobReciept(blobObjects); + + this.log.info(`Blob sidecar stored successfully for block ${parsedBlockId}`); + + res.json({ message: 'Blob sidecar stored successfully' }); + } catch (error) { + res.status(400).json({ + error: 'Invalid blob data', + }); + } + } + + private parseBlobData(blobs: PostBlobSidecarRequest['blobs']): BlobWithIndex[] { + return blobs.map(({ index, blob }) => new BlobWithIndex(Blob.fromBuffer(Buffer.from(blob.data)), index)); + } + + public start(): Promise { + return new Promise(resolve => { + this.server = this.app.listen(this.port, () => { + this.log.info(`Server is running on http://localhost:${this.port}`); + resolve(); + }); + }); + } + + public stop(): Promise { + this.log.info('Stopping blob sink'); + return new Promise((resolve, reject) => { + if (!this.server) { + resolve(); + this.log.info('Blob sink already stopped'); + return; + } + + this.server.close(err => { + if (err) { + reject(err); + return; + } + this.server = null; + this.log.info('Blob sink stopped'); + resolve(); + }); + }); + } + + public getApp(): Express { + return this.app; + } +} diff --git a/yarn-project/blob-sink/src/types/api.ts b/yarn-project/blob-sink/src/types/api.ts new file mode 100644 index 000000000000..cd408ecdedb5 --- /dev/null +++ b/yarn-project/blob-sink/src/types/api.ts @@ -0,0 +1,49 @@ +import { z } from 'zod'; + +export interface PostBlobSidecarRequest { + // eslint-disable-next-line camelcase + block_id: string; + blobs: Array<{ + index: number; + blob: { + type: string; + data: string; + }; + }>; +} + +export const blockRootSchema = z + .string() + .regex(/^0x[0-9a-fA-F]{0,64}$/) + .max(66); +export const slotSchema = z.number().int().positive(); + +// Define the Zod schema for an array of numbers +export const indicesSchema = z.optional( + z + .string() + .refine(str => str.split(',').every(item => !isNaN(Number(item))), { + message: 'All items in the query must be valid numbers.', + }) + .transform(str => str.split(',').map(Number)), +); // Convert to an array of numbers + +// Validation schemas +// Block identifier. Can be one of: , . +// Note the spec https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars does allows for "head", "genesis", "finalized" as valid block ids, +// but we explicitly do not support these values. +export const blockIdSchema = blockRootSchema.or(slotSchema); + +export const postBlobSidecarSchema = z.object({ + // eslint-disable-next-line camelcase + block_id: blockIdSchema, + blobs: z.array( + z.object({ + index: z.number(), + blob: z.object({ + type: z.string(), + data: z.string(), + }), + }), + ), +}); diff --git a/yarn-project/blob-sink/src/types/blob_with_index.test.ts b/yarn-project/blob-sink/src/types/blob_with_index.test.ts new file mode 100644 index 000000000000..d29c6b98b887 --- /dev/null +++ b/yarn-project/blob-sink/src/types/blob_with_index.test.ts @@ -0,0 +1,31 @@ +import { Blob } from '@aztec/foundation/blob'; +import { Fr } from '@aztec/foundation/fields'; + +import { BlobWithIndex, BlobsWithIndexes } from './blob_with_index.js'; + +describe('BlobWithIndex Serde', () => { + it('should serialize and deserialize', () => { + const blob = Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); + const blobWithIndex = new BlobWithIndex(blob, 0); + const serialized = blobWithIndex.toBuffer(); + + const deserialized = BlobWithIndex.fromBuffer(serialized); + + expect(blobWithIndex).toEqual(deserialized); + }); +}); + +describe('BlobsWithIndexes Serde', () => { + it('should serialize and deserialize', () => { + const blobs = [ + new BlobWithIndex(Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]), 0), + new BlobWithIndex(Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]), 1), + ]; + const blobsWithIndexes = new BlobsWithIndexes(blobs); + + const serialized = blobsWithIndexes.toBuffer(); + const deserialized = BlobsWithIndexes.fromBuffer(serialized); + + expect(deserialized).toEqual(blobsWithIndexes); + }); +}); diff --git a/yarn-project/blob-sink/src/types/blob_with_index.ts b/yarn-project/blob-sink/src/types/blob_with_index.ts new file mode 100644 index 000000000000..60446f2ff160 --- /dev/null +++ b/yarn-project/blob-sink/src/types/blob_with_index.ts @@ -0,0 +1,51 @@ +import { Blob } from '@aztec/foundation/blob'; +import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; + +/** Serialized an array of blobs with their indexes to be stored at a given block id */ +export class BlobsWithIndexes { + constructor(public blobs: BlobWithIndex[]) {} + + public toBuffer(): Buffer { + return serializeToBuffer(this.blobs.length, this.blobs); + } + + public static fromBuffer(buffer: Buffer | BufferReader): BlobsWithIndexes { + const reader = BufferReader.asReader(buffer); + return new BlobsWithIndexes(reader.readArray(reader.readNumber(), BlobWithIndex)); + } + + public getBlobsFromIndices(indices: number[]): BlobWithIndex[] { + return this.blobs.filter((_, index) => indices.includes(index)); + } +} + +/** We store blobs alongside their index in the block */ +export class BlobWithIndex { + constructor( + /** The blob */ + public blob: Blob, + /** The index of the blob in the block */ + public index: number, + ) {} + + public toBuffer(): Buffer { + return serializeToBuffer([this.blob, this.index]); + } + + public static fromBuffer(buffer: Buffer | BufferReader): BlobWithIndex { + const reader = BufferReader.asReader(buffer); + return new BlobWithIndex(reader.readObject(Blob), reader.readNumber()); + } + + // Follows the structure the beacon node api expects + public toJSON(): { blob: string; index: number; kzg_commitment: string; kzg_proof: string } { + return { + blob: this.blob.toBuffer().toString('hex'), + index: this.index, + // eslint-disable-next-line camelcase + kzg_commitment: this.blob.commitment.toString('hex'), + // eslint-disable-next-line camelcase + kzg_proof: this.blob.proof.toString('hex'), + }; + } +} diff --git a/yarn-project/blob-sink/src/types/index.ts b/yarn-project/blob-sink/src/types/index.ts new file mode 100644 index 000000000000..396b8fc805ed --- /dev/null +++ b/yarn-project/blob-sink/src/types/index.ts @@ -0,0 +1 @@ +export * from './blob_with_index.js'; diff --git a/yarn-project/blob-sink/tsconfig.json b/yarn-project/blob-sink/tsconfig.json new file mode 100644 index 000000000000..535eabe58633 --- /dev/null +++ b/yarn-project/blob-sink/tsconfig.json @@ -0,0 +1,23 @@ +{ + "extends": "..", + "compilerOptions": { + "outDir": "dest", + "rootDir": "src", + "tsBuildInfoFile": ".tsbuildinfo" + }, + "references": [ + { + "path": "../circuit-types" + }, + { + "path": "../foundation" + }, + { + "path": "../kv-store" + }, + { + "path": "../telemetry-client" + } + ], + "include": ["src"] +} diff --git a/yarn-project/bot/src/rpc.ts b/yarn-project/bot/src/rpc.ts index 3fe11e372e1a..cca9df44e9c1 100644 --- a/yarn-project/bot/src/rpc.ts +++ b/yarn-project/bot/src/rpc.ts @@ -9,7 +9,7 @@ import { type BotRunner } from './runner.js'; * @returns An JSON-RPC HTTP server */ export function createBotRunnerRpcServer(botRunner: BotRunner) { - createSafeJsonRpcServer(botRunner, BotRunnerApiSchema, botRunner.isHealthy.bind(botRunner)); + createSafeJsonRpcServer(botRunner, BotRunnerApiSchema, false, botRunner.isHealthy.bind(botRunner)); } export function getBotRunnerApiHandler(botRunner: BotRunner): ApiHandler { diff --git a/yarn-project/circuit-types/src/interfaces/archiver.test.ts b/yarn-project/circuit-types/src/interfaces/archiver.test.ts index 4824b95365a4..05ad80021174 100644 --- a/yarn-project/circuit-types/src/interfaces/archiver.test.ts +++ b/yarn-project/circuit-types/src/interfaces/archiver.test.ts @@ -100,12 +100,12 @@ describe('ArchiverApiSchema', () => { }); it('getTxEffect', async () => { - const result = await context.client.getTxEffect(new TxHash(Buffer.alloc(32, 1))); + const result = await context.client.getTxEffect(TxHash.fromBuffer(Buffer.alloc(32, 1))); expect(result!.data).toBeInstanceOf(TxEffect); }); it('getSettledTxReceipt', async () => { - const result = await context.client.getSettledTxReceipt(new TxHash(Buffer.alloc(32, 1))); + const result = await context.client.getSettledTxReceipt(TxHash.fromBuffer(Buffer.alloc(32, 1))); expect(result).toBeInstanceOf(TxReceipt); }); diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts index 3bc333609d67..a771e4a6dcc2 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts @@ -40,6 +40,7 @@ import { MerkleTreeId } from '../merkle_tree_id.js'; import { EpochProofQuote } from '../prover_coordination/epoch_proof_quote.js'; import { PublicDataWitness } from '../public_data_witness.js'; import { SiblingPath } from '../sibling_path/sibling_path.js'; +import { type TxValidationResult } from '../tx/index.js'; import { PublicSimulationOutput } from '../tx/public_simulation_output.js'; import { Tx } from '../tx/tx.js'; import { TxHash } from '../tx/tx_hash.js'; @@ -293,9 +294,14 @@ describe('AztecNodeApiSchema', () => { expect(response).toBeInstanceOf(PublicSimulationOutput); }); - it('isValidTx', async () => { + it('isValidTx(valid)', async () => { + const response = await context.client.isValidTx(Tx.random(), true); + expect(response).toEqual({ result: 'valid' }); + }); + + it('isValidTx(invalid)', async () => { const response = await context.client.isValidTx(Tx.random()); - expect(response).toBe(true); + expect(response).toEqual({ result: 'invalid', reason: ['Invalid'] }); }); it('setConfig', async () => { @@ -559,9 +565,9 @@ class MockAztecNode implements AztecNode { expect(tx).toBeInstanceOf(Tx); return Promise.resolve(PublicSimulationOutput.random()); } - isValidTx(tx: Tx, _isSimulation?: boolean | undefined): Promise { + isValidTx(tx: Tx, isSimulation?: boolean | undefined): Promise { expect(tx).toBeInstanceOf(Tx); - return Promise.resolve(true); + return Promise.resolve(isSimulation ? { result: 'valid' } : { result: 'invalid', reason: ['Invalid'] }); } setConfig(config: Partial): Promise { expect(config.coinbase).toBeInstanceOf(EthAddress); diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.ts index 17f7fe16cdb2..9a1de505eb68 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.ts @@ -38,7 +38,14 @@ import { MerkleTreeId } from '../merkle_tree_id.js'; import { EpochProofQuote } from '../prover_coordination/epoch_proof_quote.js'; import { PublicDataWitness } from '../public_data_witness.js'; import { SiblingPath } from '../sibling_path/index.js'; -import { PublicSimulationOutput, Tx, TxHash, TxReceipt } from '../tx/index.js'; +import { + PublicSimulationOutput, + Tx, + TxHash, + TxReceipt, + type TxValidationResult, + TxValidationResultSchema, +} from '../tx/index.js'; import { TxEffect } from '../tx_effect.js'; import { type SequencerConfig, SequencerConfigSchema } from './configs.js'; import { type L2BlockNumber, L2BlockNumberSchema } from './l2_block_number.js'; @@ -395,7 +402,7 @@ export interface AztecNode * @param tx - The transaction to validate for correctness. * @param isSimulation - True if the transaction is a simulated one without generated proofs. (Optional) */ - isValidTx(tx: Tx, isSimulation?: boolean): Promise; + isValidTx(tx: Tx, isSimulation?: boolean): Promise; /** * Updates the configuration of this node. @@ -567,7 +574,7 @@ export const AztecNodeApiSchema: ApiSchemaFor = { simulatePublicCalls: z.function().args(Tx.schema, optional(z.boolean())).returns(PublicSimulationOutput.schema), - isValidTx: z.function().args(Tx.schema, optional(z.boolean())).returns(z.boolean()), + isValidTx: z.function().args(Tx.schema, optional(z.boolean())).returns(TxValidationResultSchema), setConfig: z.function().args(SequencerConfigSchema.merge(ProverConfigSchema).partial()).returns(z.void()), diff --git a/yarn-project/circuit-types/src/interfaces/configs.ts b/yarn-project/circuit-types/src/interfaces/configs.ts index baafd3642945..32a6514d2eef 100644 --- a/yarn-project/circuit-types/src/interfaces/configs.ts +++ b/yarn-project/circuit-types/src/interfaces/configs.ts @@ -20,6 +20,10 @@ export interface SequencerConfig { maxTxsPerBlock?: number; /** The minimum number of txs to include in a block. */ minTxsPerBlock?: number; + /** The maximum L2 block gas. */ + maxL2BlockGas?: number; + /** The maximum DA block gas. */ + maxDABlockGas?: number; /** Recipient of block reward. */ coinbase?: EthAddress; /** Address to receive fees. */ @@ -53,6 +57,8 @@ export const SequencerConfigSchema = z.object({ transactionPollingIntervalMS: z.number().optional(), maxTxsPerBlock: z.number().optional(), minTxsPerBlock: z.number().optional(), + maxL2BlockGas: z.number().optional(), + maxDABlockGas: z.number().optional(), coinbase: schemas.EthAddress.optional(), feeRecipient: schemas.AztecAddress.optional(), acvmWorkingDirectory: z.string().optional(), @@ -60,6 +66,7 @@ export const SequencerConfigSchema = z.object({ allowedInSetup: z.array(AllowedElementSchema).optional(), maxBlockSizeInBytes: z.number().optional(), enforceFees: z.boolean().optional(), - gerousiaPayload: schemas.EthAddress.optional(), + governanceProposerPayload: schemas.EthAddress.optional(), maxL1TxInclusionTimeIntoSlot: z.number().optional(), + enforceTimeTable: z.boolean().optional(), }) satisfies ZodFor; diff --git a/yarn-project/circuit-types/src/interfaces/pxe.test.ts b/yarn-project/circuit-types/src/interfaces/pxe.test.ts index ad3018dc8d65..88af066544c9 100644 --- a/yarn-project/circuit-types/src/interfaces/pxe.test.ts +++ b/yarn-project/circuit-types/src/interfaces/pxe.test.ts @@ -34,8 +34,8 @@ import { AuthWitness } from '../auth_witness.js'; import { type InBlock } from '../in_block.js'; import { L2Block } from '../l2_block.js'; import { ExtendedUnencryptedL2Log, type GetUnencryptedLogsResponse, type LogFilter } from '../logs/index.js'; -import { type IncomingNotesFilter } from '../notes/incoming_notes_filter.js'; import { ExtendedNote, UniqueNote } from '../notes/index.js'; +import { type NotesFilter } from '../notes/notes_filter.js'; import { PrivateExecutionResult } from '../private_execution_result.js'; import { type EpochProofQuote } from '../prover_coordination/epoch_proof_quote.js'; import { SiblingPath } from '../sibling_path/sibling_path.js'; @@ -190,8 +190,8 @@ describe('PXESchema', () => { expect(result).toBeInstanceOf(Fr); }); - it('getIncomingNotes', async () => { - const result = await context.client.getIncomingNotes({ contractAddress: address }); + it('getNotes', async () => { + const result = await context.client.getNotes({ contractAddress: address }); expect(result).toEqual([expect.any(UniqueNote)]); }); @@ -409,7 +409,7 @@ class MockPXE implements PXE { expect(slot).toBeInstanceOf(Fr); return Promise.resolve(Fr.random()); } - getIncomingNotes(filter: IncomingNotesFilter): Promise { + getNotes(filter: NotesFilter): Promise { expect(filter.contractAddress).toEqual(this.address); return Promise.resolve([UniqueNote.random()]); } diff --git a/yarn-project/circuit-types/src/interfaces/pxe.ts b/yarn-project/circuit-types/src/interfaces/pxe.ts index d70216ef15c0..06669355b195 100644 --- a/yarn-project/circuit-types/src/interfaces/pxe.ts +++ b/yarn-project/circuit-types/src/interfaces/pxe.ts @@ -36,8 +36,8 @@ import { type LogFilter, LogFilterSchema, } from '../logs/index.js'; -import { type IncomingNotesFilter, IncomingNotesFilterSchema } from '../notes/incoming_notes_filter.js'; import { ExtendedNote, UniqueNote } from '../notes/index.js'; +import { type NotesFilter, NotesFilterSchema } from '../notes/notes_filter.js'; import { PrivateExecutionResult } from '../private_execution_result.js'; import { SiblingPath } from '../sibling_path/sibling_path.js'; import { Tx, TxHash, TxProvingResult, TxReceipt, TxSimulationResult } from '../tx/index.js'; @@ -232,11 +232,11 @@ export interface PXE { getPublicStorageAt(contract: AztecAddress, slot: Fr): Promise; /** - * Gets incoming notes of accounts registered in this PXE based on the provided filter. + * Gets notes registered in this PXE based on the provided filter. * @param filter - The filter to apply to the notes. * @returns The requested notes. */ - getIncomingNotes(filter: IncomingNotesFilter): Promise; + getNotes(filter: NotesFilter): Promise; /** * Fetches an L1 to L2 message from the node. @@ -483,7 +483,7 @@ export const PXESchema: ApiSchemaFor = { .args(TxHash.schema) .returns(z.union([inBlockSchemaFor(TxEffect.schema), z.undefined()])), getPublicStorageAt: z.function().args(schemas.AztecAddress, schemas.Fr).returns(schemas.Fr), - getIncomingNotes: z.function().args(IncomingNotesFilterSchema).returns(z.array(UniqueNote.schema)), + getNotes: z.function().args(NotesFilterSchema).returns(z.array(UniqueNote.schema)), getL1ToL2MembershipWitness: z .function() .args(schemas.AztecAddress, schemas.Fr, schemas.Fr) diff --git a/yarn-project/circuit-types/src/logs/get_logs_response.ts b/yarn-project/circuit-types/src/logs/get_logs_response.ts index 62b2ff6d833e..13aab991d0db 100644 --- a/yarn-project/circuit-types/src/logs/get_logs_response.ts +++ b/yarn-project/circuit-types/src/logs/get_logs_response.ts @@ -73,7 +73,7 @@ export class TxScopedL2Log { static fromBuffer(buffer: Buffer) { const reader = BufferReader.asReader(buffer); return new TxScopedL2Log( - TxHash.fromField(reader.readObject(Fr)), + reader.readObject(TxHash), reader.readNumber(), reader.readNumber(), reader.readBoolean(), diff --git a/yarn-project/circuit-types/src/logs/log_id.ts b/yarn-project/circuit-types/src/logs/log_id.ts index fe718fb3821c..ec1080799cec 100644 --- a/yarn-project/circuit-types/src/logs/log_id.ts +++ b/yarn-project/circuit-types/src/logs/log_id.ts @@ -34,7 +34,7 @@ export class LogId { static random() { return new LogId( - Math.floor(Math.random() * 1000), + Math.floor(Math.random() * 1000) + 1, Math.floor(Math.random() * 1000), Math.floor(Math.random() * 100), ); diff --git a/yarn-project/circuit-types/src/mocks.ts b/yarn-project/circuit-types/src/mocks.ts index 9909b1e179ed..54b55ca1e5d7 100644 --- a/yarn-project/circuit-types/src/mocks.ts +++ b/yarn-project/circuit-types/src/mocks.ts @@ -34,7 +34,7 @@ import { PublicExecutionRequest } from './public_execution_request.js'; import { PublicSimulationOutput, Tx, TxHash, TxSimulationResult, accumulatePrivateReturnValues } from './tx/index.js'; import { TxEffect } from './tx_effect.js'; -export const randomTxHash = (): TxHash => new TxHash(randomBytes(32)); +export const randomTxHash = (): TxHash => TxHash.random(); export const mockPrivateExecutionResult = ( seed = 1, diff --git a/yarn-project/circuit-types/src/notes/extended_note.ts b/yarn-project/circuit-types/src/notes/extended_note.ts index cac982b01b88..85c8bda5ed21 100644 --- a/yarn-project/circuit-types/src/notes/extended_note.ts +++ b/yarn-project/circuit-types/src/notes/extended_note.ts @@ -1,7 +1,7 @@ import { AztecAddress, Fr } from '@aztec/circuits.js'; import { NoteSelector } from '@aztec/foundation/abi'; import { schemas } from '@aztec/foundation/schemas'; -import { BufferReader } from '@aztec/foundation/serialize'; +import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { bufferToHex, hexToBuffer } from '@aztec/foundation/string'; import { z } from 'zod'; @@ -29,25 +29,25 @@ export class ExtendedNote { ) {} toBuffer(): Buffer { - return Buffer.concat([ - this.note.toBuffer(), - this.owner.toBuffer(), - this.contractAddress.toBuffer(), - this.storageSlot.toBuffer(), - this.noteTypeId.toBuffer(), - this.txHash.buffer, + return serializeToBuffer([ + this.note, + this.owner, + this.contractAddress, + this.storageSlot, + this.noteTypeId, + this.txHash, ]); } static fromBuffer(buffer: Buffer | BufferReader) { const reader = BufferReader.asReader(buffer); - const note = Note.fromBuffer(reader); - const owner = AztecAddress.fromBuffer(reader); - const contractAddress = AztecAddress.fromBuffer(reader); - const storageSlot = Fr.fromBuffer(reader); + const note = reader.readObject(Note); + const owner = reader.readObject(AztecAddress); + const contractAddress = reader.readObject(AztecAddress); + const storageSlot = reader.readObject(Fr); const noteTypeId = reader.readObject(NoteSelector); - const txHash = new TxHash(reader.readBytes(TxHash.SIZE)); + const txHash = reader.readObject(TxHash); return new this(note, owner, contractAddress, storageSlot, noteTypeId, txHash); } @@ -124,14 +124,14 @@ export class UniqueNote extends ExtendedNote { } override toBuffer(): Buffer { - return Buffer.concat([ - this.note.toBuffer(), - this.owner.toBuffer(), - this.contractAddress.toBuffer(), - this.storageSlot.toBuffer(), - this.noteTypeId.toBuffer(), - this.txHash.buffer, - this.nonce.toBuffer(), + return serializeToBuffer([ + this.note, + this.owner, + this.contractAddress, + this.storageSlot, + this.noteTypeId, + this.txHash, + this.nonce, ]); } @@ -150,13 +150,13 @@ export class UniqueNote extends ExtendedNote { static override fromBuffer(buffer: Buffer | BufferReader) { const reader = BufferReader.asReader(buffer); - const note = Note.fromBuffer(reader); - const owner = AztecAddress.fromBuffer(reader); - const contractAddress = AztecAddress.fromBuffer(reader); - const storageSlot = Fr.fromBuffer(reader); + const note = reader.readObject(Note); + const owner = reader.readObject(AztecAddress); + const contractAddress = reader.readObject(AztecAddress); + const storageSlot = reader.readObject(Fr); const noteTypeId = reader.readObject(NoteSelector); - const txHash = new TxHash(reader.readBytes(TxHash.SIZE)); - const nonce = Fr.fromBuffer(reader); + const txHash = reader.readObject(TxHash); + const nonce = reader.readObject(Fr); return new this(note, owner, contractAddress, storageSlot, noteTypeId, txHash, nonce); } diff --git a/yarn-project/circuit-types/src/notes/index.ts b/yarn-project/circuit-types/src/notes/index.ts index d926d03e99a5..925b57252ea8 100644 --- a/yarn-project/circuit-types/src/notes/index.ts +++ b/yarn-project/circuit-types/src/notes/index.ts @@ -1,4 +1,4 @@ export * from './comparator.js'; export * from './extended_note.js'; -export * from './incoming_notes_filter.js'; +export * from './notes_filter.js'; export * from './note_status.js'; diff --git a/yarn-project/circuit-types/src/notes/incoming_notes_filter.ts b/yarn-project/circuit-types/src/notes/notes_filter.ts similarity index 82% rename from yarn-project/circuit-types/src/notes/incoming_notes_filter.ts rename to yarn-project/circuit-types/src/notes/notes_filter.ts index 0f2911c3a58b..f2ce6696aeba 100644 --- a/yarn-project/circuit-types/src/notes/incoming_notes_filter.ts +++ b/yarn-project/circuit-types/src/notes/notes_filter.ts @@ -7,10 +7,10 @@ import { TxHash } from '../tx/tx_hash.js'; import { NoteStatus } from './note_status.js'; /** - * A filter used to fetch incoming notes. + * A filter used to fetch notes. * @remarks This filter is applied as an intersection of all its params. */ -export type IncomingNotesFilter = { +export type NotesFilter = { /** Hash of a transaction from which to fetch the notes. */ txHash?: TxHash; /** The contract address the note belongs to. */ @@ -23,11 +23,11 @@ export type IncomingNotesFilter = { status?: NoteStatus; /** The siloed nullifier for the note. */ siloedNullifier?: Fr; - /** The scopes in which to get incoming notes from. This defaults to all scopes. */ + /** The scopes in which to get notes from. This defaults to all scopes. */ scopes?: AztecAddress[]; }; -export const IncomingNotesFilterSchema: ZodFor = z.object({ +export const NotesFilterSchema: ZodFor = z.object({ txHash: TxHash.schema.optional(), contractAddress: schemas.AztecAddress.optional(), storageSlot: schemas.Fr.optional(), diff --git a/yarn-project/circuit-types/src/p2p/block_attestation.ts b/yarn-project/circuit-types/src/p2p/block_attestation.ts index 4ac0babe6905..d4fea987a548 100644 --- a/yarn-project/circuit-types/src/p2p/block_attestation.ts +++ b/yarn-project/circuit-types/src/p2p/block_attestation.ts @@ -10,7 +10,7 @@ import { z } from 'zod'; import { ConsensusPayload } from './consensus_payload.js'; import { Gossipable } from './gossipable.js'; -import { SignatureDomainSeperator, getHashedSignaturePayloadEthSignedMessage } from './signature_utils.js'; +import { SignatureDomainSeparator, getHashedSignaturePayloadEthSignedMessage } from './signature_utils.js'; import { TopicType, createTopicString } from './topic_type.js'; export class BlockAttestationHash extends Buffer32 { @@ -65,7 +65,7 @@ export class BlockAttestation extends Gossipable { getSender() { if (!this.sender) { // Recover the sender from the attestation - const hashed = getHashedSignaturePayloadEthSignedMessage(this.payload, SignatureDomainSeperator.blockAttestation); + const hashed = getHashedSignaturePayloadEthSignedMessage(this.payload, SignatureDomainSeparator.blockAttestation); // Cache the sender for later use this.sender = recoverAddress(hashed, this.signature); } @@ -74,7 +74,7 @@ export class BlockAttestation extends Gossipable { } getPayload(): Buffer { - return this.payload.getPayloadToSign(SignatureDomainSeperator.blockAttestation); + return this.payload.getPayloadToSign(SignatureDomainSeparator.blockAttestation); } toBuffer(): Buffer { diff --git a/yarn-project/circuit-types/src/p2p/block_proposal.ts b/yarn-project/circuit-types/src/p2p/block_proposal.ts index 207312ba4a1d..b65b0870833e 100644 --- a/yarn-project/circuit-types/src/p2p/block_proposal.ts +++ b/yarn-project/circuit-types/src/p2p/block_proposal.ts @@ -8,7 +8,7 @@ import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { ConsensusPayload } from './consensus_payload.js'; import { Gossipable } from './gossipable.js'; import { - SignatureDomainSeperator, + SignatureDomainSeparator, getHashedSignaturePayload, getHashedSignaturePayloadEthSignedMessage, } from './signature_utils.js'; @@ -57,7 +57,7 @@ export class BlockProposal extends Gossipable { payload: ConsensusPayload, payloadSigner: (payload: Buffer32) => Promise, ) { - const hashed = getHashedSignaturePayload(payload, SignatureDomainSeperator.blockProposal); + const hashed = getHashedSignaturePayload(payload, SignatureDomainSeparator.blockProposal); const sig = await payloadSigner(hashed); return new BlockProposal(payload, sig); @@ -68,7 +68,7 @@ export class BlockProposal extends Gossipable { */ getSender() { if (!this.sender) { - const hashed = getHashedSignaturePayloadEthSignedMessage(this.payload, SignatureDomainSeperator.blockProposal); + const hashed = getHashedSignaturePayloadEthSignedMessage(this.payload, SignatureDomainSeparator.blockProposal); // Cache the sender for later use this.sender = recoverAddress(hashed, this.signature); } @@ -77,7 +77,7 @@ export class BlockProposal extends Gossipable { } getPayload() { - return this.payload.getPayloadToSign(SignatureDomainSeperator.blockProposal); + return this.payload.getPayloadToSign(SignatureDomainSeparator.blockProposal); } toBuffer(): Buffer { diff --git a/yarn-project/circuit-types/src/p2p/consensus_payload.ts b/yarn-project/circuit-types/src/p2p/consensus_payload.ts index 37fedc508da7..b1e723690130 100644 --- a/yarn-project/circuit-types/src/p2p/consensus_payload.ts +++ b/yarn-project/circuit-types/src/p2p/consensus_payload.ts @@ -8,7 +8,7 @@ import { encodeAbiParameters, parseAbiParameters } from 'viem'; import { z } from 'zod'; import { TxHash } from '../tx/tx_hash.js'; -import { type Signable, type SignatureDomainSeperator } from './signature_utils.js'; +import { type Signable, type SignatureDomainSeparator } from './signature_utils.js'; export class ConsensusPayload implements Signable { private size: number | undefined; @@ -36,11 +36,11 @@ export class ConsensusPayload implements Signable { return [fields.header, fields.archive, fields.txHashes] as const; } - getPayloadToSign(domainSeperator: SignatureDomainSeperator): Buffer { + getPayloadToSign(domainSeparator: SignatureDomainSeparator): Buffer { const abi = parseAbiParameters('uint8, (bytes32, bytes32, (uint256, uint256), bytes, bytes32[])'); const txArray = this.txHashes.map(tx => tx.toString()); const encodedData = encodeAbiParameters(abi, [ - domainSeperator, + domainSeparator, [ this.archive.toString(), this.header.hash().toString(), diff --git a/yarn-project/circuit-types/src/p2p/mocks.ts b/yarn-project/circuit-types/src/p2p/mocks.ts index 1e4e99ac0420..8896980843b8 100644 --- a/yarn-project/circuit-types/src/p2p/mocks.ts +++ b/yarn-project/circuit-types/src/p2p/mocks.ts @@ -7,7 +7,7 @@ import { TxHash } from '../tx/tx_hash.js'; import { BlockAttestation } from './block_attestation.js'; import { BlockProposal } from './block_proposal.js'; import { ConsensusPayload } from './consensus_payload.js'; -import { SignatureDomainSeperator, getHashedSignaturePayloadEthSignedMessage } from './signature_utils.js'; +import { SignatureDomainSeparator, getHashedSignaturePayloadEthSignedMessage } from './signature_utils.js'; export interface MakeConsensusPayloadOptions { signer?: Secp256k1Signer; @@ -17,7 +17,7 @@ export interface MakeConsensusPayloadOptions { } const makeAndSignConsensusPayload = ( - domainSeperator: SignatureDomainSeperator, + domainSeparator: SignatureDomainSeparator, options?: MakeConsensusPayloadOptions, ) => { const { @@ -33,19 +33,19 @@ const makeAndSignConsensusPayload = ( txHashes, }); - const hash = getHashedSignaturePayloadEthSignedMessage(payload, domainSeperator); + const hash = getHashedSignaturePayloadEthSignedMessage(payload, domainSeparator); const signature = signer.sign(hash); return { payload, signature }; }; export const makeBlockProposal = (options?: MakeConsensusPayloadOptions): BlockProposal => { - const { payload, signature } = makeAndSignConsensusPayload(SignatureDomainSeperator.blockProposal, options); + const { payload, signature } = makeAndSignConsensusPayload(SignatureDomainSeparator.blockProposal, options); return new BlockProposal(payload, signature); }; // TODO(https://github.com/AztecProtocol/aztec-packages/issues/8028) export const makeBlockAttestation = (options?: MakeConsensusPayloadOptions): BlockAttestation => { - const { payload, signature } = makeAndSignConsensusPayload(SignatureDomainSeperator.blockAttestation, options); + const { payload, signature } = makeAndSignConsensusPayload(SignatureDomainSeparator.blockAttestation, options); return new BlockAttestation(payload, signature); }; diff --git a/yarn-project/circuit-types/src/p2p/signature_utils.ts b/yarn-project/circuit-types/src/p2p/signature_utils.ts index 25e20ded5d81..4c05ccb38231 100644 --- a/yarn-project/circuit-types/src/p2p/signature_utils.ts +++ b/yarn-project/circuit-types/src/p2p/signature_utils.ts @@ -1,13 +1,13 @@ import { Buffer32 } from '@aztec/foundation/buffer'; import { keccak256, makeEthSignDigest } from '@aztec/foundation/crypto'; -export enum SignatureDomainSeperator { +export enum SignatureDomainSeparator { blockProposal = 0, blockAttestation = 1, } export interface Signable { - getPayloadToSign(domainSeperator: SignatureDomainSeperator): Buffer; + getPayloadToSign(domainSeparator: SignatureDomainSeparator): Buffer; } /** @@ -15,8 +15,8 @@ export interface Signable { * @param s - The `Signable` to sign * @returns The hashed payload for the signature of the `Signable` */ -export function getHashedSignaturePayload(s: Signable, domainSeperator: SignatureDomainSeperator): Buffer32 { - return Buffer32.fromBuffer(keccak256(s.getPayloadToSign(domainSeperator))); +export function getHashedSignaturePayload(s: Signable, domainSeparator: SignatureDomainSeparator): Buffer32 { + return Buffer32.fromBuffer(keccak256(s.getPayloadToSign(domainSeparator))); } /** @@ -26,8 +26,8 @@ export function getHashedSignaturePayload(s: Signable, domainSeperator: Signatur */ export function getHashedSignaturePayloadEthSignedMessage( s: Signable, - domainSeperator: SignatureDomainSeperator, + domainSeparator: SignatureDomainSeparator, ): Buffer32 { - const payload = getHashedSignaturePayload(s, domainSeperator); + const payload = getHashedSignaturePayload(s, domainSeparator); return makeEthSignDigest(payload); } diff --git a/yarn-project/circuit-types/src/tx/processed_tx.ts b/yarn-project/circuit-types/src/tx/processed_tx.ts index ddbc926647c5..b9c4c465333f 100644 --- a/yarn-project/circuit-types/src/tx/processed_tx.ts +++ b/yarn-project/circuit-types/src/tx/processed_tx.ts @@ -103,7 +103,7 @@ export function makeEmptyProcessedTx( clientProofOutput.constants = constants; return { - hash: new TxHash(Fr.ZERO.toBuffer()), + hash: new TxHash(Fr.ZERO), data: clientProofOutput, clientIvcProof: ClientIvcProof.empty(), avmProvingRequest: undefined, diff --git a/yarn-project/circuit-types/src/tx/tx.ts b/yarn-project/circuit-types/src/tx/tx.ts index 1e8a03107d19..00b8a8593e53 100644 --- a/yarn-project/circuit-types/src/tx/tx.ts +++ b/yarn-project/circuit-types/src/tx/tx.ts @@ -1,10 +1,12 @@ import { ClientIvcProof, + Fr, PrivateKernelTailCircuitPublicInputs, + PrivateLog, type PrivateToPublicAccumulatedData, type ScopedLogHash, } from '@aztec/circuits.js'; -import { type Buffer32 } from '@aztec/foundation/buffer'; +import { Buffer32 } from '@aztec/foundation/buffer'; import { arraySerializedSizeOfNonEmpty } from '@aztec/foundation/collection'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { type FieldsOf } from '@aztec/foundation/types'; @@ -65,7 +67,7 @@ export class Tx extends Gossipable { // Gossipable method override p2pMessageIdentifier(): Buffer32 { - return this.getTxHash(); + return new Buffer32(this.getTxHash().toBuffer()); } hasPublicCalls() { @@ -176,7 +178,7 @@ export class Tx extends Gossipable { if (!firstNullifier || firstNullifier.isZero()) { throw new Error(`Cannot get tx hash since first nullifier is missing`); } - return new TxHash(firstNullifier.toBuffer()); + return new TxHash(firstNullifier); } /** Returns the tx hash, or undefined if none is set. */ @@ -230,6 +232,20 @@ export class Tx extends Gossipable { ); } + /** + * Estimates the tx size based on its private effects. Note that the actual size of the tx + * after processing will probably be larger, as public execution would generate more data. + */ + getEstimatedPrivateTxEffectsSize() { + return ( + this.unencryptedLogs.getSerializedLength() + + this.contractClassLogs.getSerializedLength() + + this.data.getNonEmptyNoteHashes().length * Fr.SIZE_IN_BYTES + + this.data.getNonEmptyNullifiers().length * Fr.SIZE_IN_BYTES + + this.data.getNonEmptyPrivateLogs().length * PrivateLog.SIZE_IN_BYTES + ); + } + /** * Convenience function to get a hash out of a tx or a tx-like. * @param tx - Tx-like object. diff --git a/yarn-project/circuit-types/src/tx/tx_hash.ts b/yarn-project/circuit-types/src/tx/tx_hash.ts index f2477823ab2b..797733689ed7 100644 --- a/yarn-project/circuit-types/src/tx/tx_hash.ts +++ b/yarn-project/circuit-types/src/tx/tx_hash.ts @@ -1,32 +1,66 @@ import { Fr } from '@aztec/circuits.js'; -import { Buffer32 } from '@aztec/foundation/buffer'; import { schemas } from '@aztec/foundation/schemas'; +import { BufferReader } from '@aztec/foundation/serialize'; /** * A class representing hash of Aztec transaction. */ -export class TxHash extends Buffer32 { +export class TxHash { constructor( - /** The buffer containing the hash. */ - hash: Buffer, - ) { - super(hash); - } + /** A field representing the tx hash (tx hash is an output of poseidon hash hence it's a field). */ + public readonly hash: Fr, + ) {} /* * TxHashes are generated from the first nullifier of a transaction, which is a Fr. - * Using Buffer32.random() could potentially generate invalid TxHashes. * @returns A random TxHash. */ - static override random() { - return new TxHash(Fr.random().toBuffer()); + static random() { + return new TxHash(Fr.random()); + } + + static fromBuffer(buffer: Uint8Array | BufferReader) { + const reader = BufferReader.asReader(buffer); + return new this(reader.readObject(Fr)); + } + + static fromString(str: string) { + return new TxHash(Fr.fromString(str)); + } + + static fromBigInt(value: bigint) { + return new TxHash(new Fr(value)); + } + + public toBuffer() { + return this.hash.toBuffer(); + } + + public toString() { + return this.hash.toString(); + } + + public toBigInt() { + return this.hash.toBigInt(); + } + + toJSON() { + return this.toString(); + } + + public equals(other: TxHash) { + return this.hash.equals(other.hash); } static get schema() { - return schemas.BufferHex.transform(value => new TxHash(value)); + return schemas.BufferHex.transform(value => new TxHash(Fr.fromBuffer(value))); } static zero() { - return new TxHash(Buffer32.ZERO.toBuffer()); + return new TxHash(Fr.ZERO); + } + + static get SIZE() { + return Fr.SIZE_IN_BYTES; } } diff --git a/yarn-project/circuit-types/src/tx/validator/empty_validator.ts b/yarn-project/circuit-types/src/tx/validator/empty_validator.ts index 2ea10e7a55ab..ccb15a050721 100644 --- a/yarn-project/circuit-types/src/tx/validator/empty_validator.ts +++ b/yarn-project/circuit-types/src/tx/validator/empty_validator.ts @@ -1,11 +1,7 @@ -import { type AnyTx, type TxValidator } from './tx_validator.js'; +import { type AnyTx, type TxValidationResult, type TxValidator } from './tx_validator.js'; export class EmptyTxValidator implements TxValidator { - public validateTxs(txs: T[]): Promise<[validTxs: T[], invalidTxs: T[], skippedTxs: T[]]> { - return Promise.resolve([txs, [], []]); - } - - public validateTx(_tx: T): Promise { - return Promise.resolve(true); + public validateTx(_tx: T): Promise { + return Promise.resolve({ result: 'valid' }); } } diff --git a/yarn-project/circuit-types/src/tx/validator/tx_validator.ts b/yarn-project/circuit-types/src/tx/validator/tx_validator.ts index 040d764cf3d5..3928343efca5 100644 --- a/yarn-project/circuit-types/src/tx/validator/tx_validator.ts +++ b/yarn-project/circuit-types/src/tx/validator/tx_validator.ts @@ -1,9 +1,23 @@ +import { type ZodFor } from '@aztec/foundation/schemas'; + +import { z } from 'zod'; + import { type ProcessedTx } from '../processed_tx.js'; import { type Tx } from '../tx.js'; export type AnyTx = Tx | ProcessedTx; +export type TxValidationResult = + | { result: 'valid' } + | { result: 'invalid'; reason: string[] } + | { result: 'skipped'; reason: string[] }; + export interface TxValidator { - validateTx(tx: T): Promise; - validateTxs(txs: T[]): Promise<[validTxs: T[], invalidTxs: T[], skippedTxs?: T[]]>; + validateTx(tx: T): Promise; } + +export const TxValidationResultSchema = z.discriminatedUnion('result', [ + z.object({ result: z.literal('valid'), reason: z.array(z.string()).optional() }), + z.object({ result: z.literal('invalid'), reason: z.array(z.string()) }), + z.object({ result: z.literal('skipped'), reason: z.array(z.string()) }), +]) satisfies ZodFor; diff --git a/yarn-project/circuit-types/src/tx_effect.ts b/yarn-project/circuit-types/src/tx_effect.ts index 924f2e5bc95c..507827c44941 100644 --- a/yarn-project/circuit-types/src/tx_effect.ts +++ b/yarn-project/circuit-types/src/tx_effect.ts @@ -152,6 +152,11 @@ export class TxEffect { ]); } + /** Returns the size of this tx effect in bytes as serialized onto DA. */ + getDASize() { + return this.toBlobFields().length * Fr.SIZE_IN_BYTES; + } + /** * Deserializes the TxEffect object from a Buffer. * @param buffer - Buffer or BufferReader object to deserialize. @@ -550,6 +555,6 @@ export class TxEffect { } get txHash(): TxHash { - return new TxHash(this.nullifiers[0].toBuffer()); + return new TxHash(this.nullifiers[0]); } } diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index 00cc1a9beace..80b30fd6343f 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -7,7 +7,7 @@ export const MAX_NULLIFIERS_PER_CALL = 16; export const MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL = 5; export const MAX_ENQUEUED_CALLS_PER_CALL = 16; export const MAX_L2_TO_L1_MSGS_PER_CALL = 2; -export const MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL = 64; +export const MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL = 63; export const MAX_PUBLIC_DATA_READS_PER_CALL = 64; export const MAX_NOTE_HASH_READ_REQUESTS_PER_CALL = 16; export const MAX_NULLIFIER_READ_REQUESTS_PER_CALL = 16; @@ -165,7 +165,7 @@ export const TOTAL_MANA_USED_LENGTH = 1; export const BLOCK_HEADER_LENGTH = 25; export const BLOCK_HEADER_LENGTH_BYTES = 648; export const PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH = 741; -export const PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH = 867; +export const PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH = 864; export const PRIVATE_CONTEXT_INPUTS_LENGTH = 40; export const FEE_RECIPIENT_LENGTH = 2; export const AGGREGATION_OBJECT_LENGTH = 16; @@ -215,7 +215,7 @@ export const MAX_PUBLIC_BYTECODE_SIZE_IN_BYTES = 96000; export const MAX_PUBLIC_CALLS_TO_UNIQUE_CONTRACT_CLASS_IDS = 21; export const AVM_PROOF_LENGTH_IN_FIELDS = 4155; export const AVM_PUBLIC_COLUMN_MAX_SIZE = 1024; -export const AVM_PUBLIC_INPUTS_FLATTENED_SIZE = 2915; +export const AVM_PUBLIC_INPUTS_FLATTENED_SIZE = 2912; export const MEM_TAG_FF = 0; export const MEM_TAG_U1 = 1; export const MEM_TAG_U8 = 2; @@ -242,11 +242,11 @@ export const START_NULLIFIER_EXISTS_OFFSET = 16; export const START_NULLIFIER_NON_EXISTS_OFFSET = 32; export const START_L1_TO_L2_MSG_EXISTS_WRITE_OFFSET = 48; export const START_SSTORE_WRITE_OFFSET = 64; -export const START_SLOAD_WRITE_OFFSET = 128; -export const START_EMIT_NOTE_HASH_WRITE_OFFSET = 192; -export const START_EMIT_NULLIFIER_WRITE_OFFSET = 208; -export const START_EMIT_L2_TO_L1_MSG_WRITE_OFFSET = 224; -export const START_EMIT_UNENCRYPTED_LOG_WRITE_OFFSET = 226; +export const START_SLOAD_WRITE_OFFSET = 127; +export const START_EMIT_NOTE_HASH_WRITE_OFFSET = 191; +export const START_EMIT_NULLIFIER_WRITE_OFFSET = 207; +export const START_EMIT_L2_TO_L1_MSG_WRITE_OFFSET = 223; +export const START_EMIT_UNENCRYPTED_LOG_WRITE_OFFSET = 225; export const DEFAULT_GAS_LIMIT = 1000000000; export const MAX_L2_GAS_PER_TX_PUBLIC_PORTION = 6000000; export const DEFAULT_TEARDOWN_GAS_LIMIT = 6000000; diff --git a/yarn-project/circuits.js/src/contract/interfaces/contract_instance.ts b/yarn-project/circuits.js/src/contract/interfaces/contract_instance.ts index a8ea957cfeb8..0d4d0092b8b5 100644 --- a/yarn-project/circuits.js/src/contract/interfaces/contract_instance.ts +++ b/yarn-project/circuits.js/src/contract/interfaces/contract_instance.ts @@ -24,7 +24,7 @@ export interface ContractInstance { contractClassId: Fr; /** Hash of the selector and arguments to the constructor. */ initializationHash: Fr; - /** Public keys associated with this intsance. */ + /** Public keys associated with this instance. */ publicKeys: PublicKeys; } diff --git a/yarn-project/circuits.js/src/structs/blobs/blob_public_inputs.test.ts b/yarn-project/circuits.js/src/structs/blobs/blob_public_inputs.test.ts index 7624b088a5e4..aa996cae5a90 100644 --- a/yarn-project/circuits.js/src/structs/blobs/blob_public_inputs.test.ts +++ b/yarn-project/circuits.js/src/structs/blobs/blob_public_inputs.test.ts @@ -20,7 +20,7 @@ describe('BlobPublicInputs', () => { }); it('converts correctly from Blob class', () => { - const blob = new Blob(Array(400).fill(new Fr(3))); + const blob = Blob.fromFields(Array(400).fill(new Fr(3))); const converted = BlobPublicInputs.fromBlob(blob); expect(converted.z).toEqual(blob.challengeZ); expect(Buffer.from(converted.y.toString(16), 'hex')).toEqual(blob.evaluationY); @@ -55,7 +55,7 @@ describe('BlockBlobPublicInputs', () => { }); it('converts correctly from Blob class', () => { - const blobs = Array.from({ length: BLOBS_PER_BLOCK }, (_, i) => new Blob(Array(400).fill(new Fr(i + 1)))); + const blobs = Array.from({ length: BLOBS_PER_BLOCK }, (_, i) => Blob.fromFields(Array(400).fill(new Fr(i + 1)))); const converted = BlockBlobPublicInputs.fromBlobs(blobs); converted.inner.forEach((blobPI, i) => { expect(blobPI.z).toEqual(blobs[i].challengeZ); diff --git a/yarn-project/circuits.js/src/structs/gas.ts b/yarn-project/circuits.js/src/structs/gas.ts index 7952b2cbbbda..956c2b5052d4 100644 --- a/yarn-project/circuits.js/src/structs/gas.ts +++ b/yarn-project/circuits.js/src/structs/gas.ts @@ -78,6 +78,11 @@ export class Gas { return new Gas(Math.ceil(this.daGas * scalar), Math.ceil(this.l2Gas * scalar)); } + /** Returns true if any of this instance's dimensions is greater than the corresponding on the other. */ + gtAny(other: Gas) { + return this.daGas > other.daGas || this.l2Gas > other.l2Gas; + } + computeFee(gasFees: GasFees) { return GasDimensions.reduce( (acc, dimension) => acc.add(gasFees.get(dimension).mul(new Fr(this.get(dimension)))), diff --git a/yarn-project/cli/src/utils/inspect.ts b/yarn-project/cli/src/utils/inspect.ts index 80c87f4c79d0..a1a8148d5593 100644 --- a/yarn-project/cli/src/utils/inspect.ts +++ b/yarn-project/cli/src/utils/inspect.ts @@ -39,10 +39,10 @@ export async function inspectTx( log: LogFn, opts: { includeBlockInfo?: boolean; artifactMap?: ArtifactMap } = {}, ) { - const [receipt, effectsInBlock, incomingNotes] = await Promise.all([ + const [receipt, effectsInBlock, getNotes] = await Promise.all([ pxe.getTxReceipt(txHash), pxe.getTxEffect(txHash), - pxe.getIncomingNotes({ txHash, status: NoteStatus.ACTIVE_OR_NULLIFIED }), + pxe.getNotes({ txHash, status: NoteStatus.ACTIVE_OR_NULLIFIED }), ]); // Base tx data log(`Tx ${txHash.toString()}`); @@ -88,10 +88,10 @@ export async function inspectTx( const notes = effects.noteHashes; if (notes.length > 0) { log(' Created notes:'); - log(` Total: ${notes.length}. Incoming: ${incomingNotes.length}.`); - if (incomingNotes.length) { - log(' Incoming notes:'); - for (const note of incomingNotes) { + log(` Total: ${notes.length}. Found: ${getNotes.length}.`); + if (getNotes.length) { + log(' Found notes:'); + for (const note of getNotes) { inspectNote(note, artifactMap, log); } } @@ -103,7 +103,7 @@ export async function inspectTx( if (nullifierCount > 0) { log(' Nullifiers:'); for (const nullifier of effects.nullifiers) { - const [note] = await pxe.getIncomingNotes({ siloedNullifier: nullifier }); + const [note] = await pxe.getNotes({ siloedNullifier: nullifier }); const deployed = deployNullifiers[nullifier.toString()]; const initialized = initNullifiers[nullifier.toString()]; const registered = classNullifiers[nullifier.toString()]; diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index b84ebb2f27e7..a9b1f9d2d686 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -32,6 +32,7 @@ "@aztec/aztec-node": "workspace:^", "@aztec/aztec.js": "workspace:^", "@aztec/bb-prover": "workspace:^", + "@aztec/blob-sink": "workspace:^", "@aztec/bot": "workspace:^", "@aztec/circuit-types": "workspace:^", "@aztec/circuits.js": "workspace:^", diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index 7b61fc01c02b..de355e1fae62 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -71,6 +71,9 @@ config.l1RpcUrl = config.l1RpcUrl || 'http://127.0.0.1:8545'; const numberOfConsecutiveBlocks = 2; +const BLOB_SINK_PORT = 5052; +const BLOB_SINK_URL = `http://localhost:${BLOB_SINK_PORT}`; + describe('L1Publisher integration', () => { let publicClient: PublicClient; let walletClient: WalletClient; @@ -182,6 +185,7 @@ describe('L1Publisher integration', () => { l1ChainId: 31337, viemPollingIntervalMS: 100, ethereumSlotDuration: config.ethereumSlotDuration, + blobSinkUrl: BLOB_SINK_URL, }, new NoopTelemetryClient(), ); diff --git a/yarn-project/end-to-end/src/e2e_2_pxes.test.ts b/yarn-project/end-to-end/src/e2e_2_pxes.test.ts index c5f7b3134d9c..5317a5ff336f 100644 --- a/yarn-project/end-to-end/src/e2e_2_pxes.test.ts +++ b/yarn-project/end-to-end/src/e2e_2_pxes.test.ts @@ -244,9 +244,9 @@ describe('e2e_2_pxes', () => { .send() .wait(); await testContract.methods.sync_notes().simulate(); - const incomingNotes = await walletA.getIncomingNotes({ txHash: receipt.txHash }); - expect(incomingNotes).toHaveLength(1); - note = incomingNotes[0]; + const notes = await walletA.getNotes({ txHash: receipt.txHash }); + expect(notes).toHaveLength(1); + note = notes[0]; } // 3. Nullify the note diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/minting.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/minting.test.ts index 03aa384b6858..010eb706ef1a 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/minting.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/minting.test.ts @@ -98,9 +98,9 @@ describe('e2e_blacklist_token_contract mint', () => { // Trigger a note sync await asset.methods.sync_notes().simulate(); // 1 note should have been created containing `amount` of tokens - const visibleIncomingNotes = await wallets[0].getIncomingNotes({ txHash: receiptClaim.txHash }); - expect(visibleIncomingNotes.length).toBe(1); - expect(visibleIncomingNotes[0].note.items[0].toBigInt()).toBe(amount); + const visibleNotes = await wallets[0].getNotes({ txHash: receiptClaim.txHash }); + expect(visibleNotes.length).toBe(1); + expect(visibleNotes[0].note.items[0].toBigInt()).toBe(amount); }); }); diff --git a/yarn-project/end-to-end/src/e2e_block_building.test.ts b/yarn-project/end-to-end/src/e2e_block_building.test.ts index 6a2ed0213763..4421e007e25a 100644 --- a/yarn-project/end-to-end/src/e2e_block_building.test.ts +++ b/yarn-project/end-to-end/src/e2e_block_building.test.ts @@ -207,7 +207,7 @@ describe('e2e_block_building', () => { // to pick up and validate the txs, so we may need to bump it to work on CI. Note that we need // at least 3s here so the archiver has time to loop once and sync, and the sequencer has at // least 1s to loop. - sequencer.sequencer.timeTable[SequencerState.WAITING_FOR_TXS] = 4; + sequencer.sequencer.timeTable[SequencerState.INITIALIZING_PROPOSAL] = 4; sequencer.sequencer.timeTable[SequencerState.CREATING_BLOCK] = 4; sequencer.sequencer.processTxTime = 1; diff --git a/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts b/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts index 8918c3eb74a0..a56390821d85 100644 --- a/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts +++ b/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts @@ -172,12 +172,12 @@ describe('e2e_crowdfunding_and_claim', () => { // Get the notes emitted by the Crowdfunding contract and check that only 1 was emitted (the value note) await crowdfundingContract.withWallet(donorWallets[0]).methods.sync_notes().simulate(); - const incomingNotes = await donorWallets[0].getIncomingNotes({ txHash: donateTxReceipt.txHash }); - const notes = incomingNotes.filter(x => x.contractAddress.equals(crowdfundingContract.address)); - expect(notes!.length).toEqual(1); + const notes = await donorWallets[0].getNotes({ txHash: donateTxReceipt.txHash }); + const filteredNotes = notes.filter(x => x.contractAddress.equals(crowdfundingContract.address)); + expect(filteredNotes!.length).toEqual(1); // Set the value note in a format which can be passed to claim function - valueNote = processUniqueNote(notes![0]); + valueNote = processUniqueNote(filteredNotes![0]); } // 3) We claim the reward token via the Claim contract @@ -243,12 +243,12 @@ describe('e2e_crowdfunding_and_claim', () => { // Get the notes emitted by the Crowdfunding contract and check that only 1 was emitted (the value note) await crowdfundingContract.withWallet(donorWallets[0]).methods.sync_notes().simulate(); - const incomingNotes = await donorWallets[0].getIncomingNotes({ txHash: donateTxReceipt.txHash }); - const notes = incomingNotes.filter(x => x.contractAddress.equals(crowdfundingContract.address)); - expect(notes!.length).toEqual(1); + const notes = await donorWallets[0].getNotes({ txHash: donateTxReceipt.txHash }); + const filtered = notes.filter(x => x.contractAddress.equals(crowdfundingContract.address)); + expect(filtered!.length).toEqual(1); // Set the value note in a format which can be passed to claim function - const anotherDonationNote = processUniqueNote(notes![0]); + const anotherDonationNote = processUniqueNote(filtered![0]); // We create an unrelated pxe and wallet without access to the nsk_app that correlates to the npk_m specified in the proof note. let unrelatedWallet: AccountWallet; @@ -299,9 +299,9 @@ describe('e2e_crowdfunding_and_claim', () => { { const receipt = await inclusionsProofsContract.methods.create_note(owner, 5n).send().wait({ debug: true }); await inclusionsProofsContract.methods.sync_notes().simulate(); - const incomingNotes = await wallets[0].getIncomingNotes({ txHash: receipt.txHash }); - expect(incomingNotes.length).toEqual(1); - note = processUniqueNote(incomingNotes[0]); + const notes = await wallets[0].getNotes({ txHash: receipt.txHash }); + expect(notes.length).toEqual(1); + note = processUniqueNote(notes[0]); } // 3) Test the note was included diff --git a/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts index 1223bcf39584..2700daebfbd5 100644 --- a/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts +++ b/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts @@ -1,3 +1,4 @@ +import { type AztecNodeService } from '@aztec/aztec-node'; import { AztecAddress, type AztecNode, @@ -76,7 +77,11 @@ describe('e2e_deploy_contract contract class registration', () => { // TODO(#10007) Remove this test as well. it('starts archiver with pre-registered common contracts', async () => { const classId = computeContractClassId(getContractClassFromArtifact(TokenContractArtifact)); - expect(await aztecNode.getContractClass(classId)).not.toBeUndefined(); + // The node checks the registration nullifier + expect(await aztecNode.getContractClass(classId)).toBeUndefined(); + // But the archiver does not + const archiver = (aztecNode as AztecNodeService).getContractDataSource(); + expect(await archiver.getContractClass(classId)).toBeDefined(); }); it('registers the contract class on the node', async () => { diff --git a/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts index 5f2fe0781b4b..d0695e4200b0 100644 --- a/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts +++ b/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts @@ -1,5 +1,14 @@ import { getDeployedTestAccountsWallets } from '@aztec/accounts/testing'; -import { AztecAddress, type Logger, type PXE, type Wallet, createPXEClient, makeFetch } from '@aztec/aztec.js'; +import { + AztecAddress, + BatchCall, + type Logger, + type PXE, + type Wallet, + createPXEClient, + getContractClassFromArtifact, + makeFetch, +} from '@aztec/aztec.js'; import { CounterContract } from '@aztec/noir-contracts.js/Counter'; import { StatefulTestContract } from '@aztec/noir-contracts.js/StatefulTest'; import { TestContract } from '@aztec/noir-contracts.js/Test'; @@ -30,6 +39,16 @@ describe('e2e_deploy_contract deploy method', () => { logger.debug(`Calling public method on stateful test contract at ${contract.address.toString()}`); await contract.methods.increment_public_value(owner, 84).send().wait(); expect(await contract.methods.get_public_value(owner).simulate()).toEqual(84n); + expect(await pxe.isContractClassPubliclyRegistered(contract.instance.contractClassId)).toBeTrue(); + }); + + // TODO(#10007): Remove this test. Common contracts (ie token contracts) are only distinguished + // because we're manually adding them to the archiver to support provernet. + it('registers a contract class for a common contract', async () => { + const { id: tokenContractClass } = getContractClassFromArtifact(TokenContract.artifact); + expect(await pxe.isContractClassPubliclyRegistered(tokenContractClass)).toBeFalse(); + await TokenContract.deploy(wallet, wallet.getAddress(), 'TOKEN', 'TKN', 18n).send().deployed(); + expect(await pxe.isContractClassPubliclyRegistered(tokenContractClass)).toBeTrue(); }); it('publicly universally deploys and initializes a contract', async () => { @@ -89,9 +108,22 @@ describe('e2e_deploy_contract deploy method', () => { await expect(TestContract.deploy(wallet).prove(opts)).rejects.toThrow(/no function calls needed/i); }); - it.skip('publicly deploys and calls a public function in the same batched call', async () => { - // TODO(@spalladino): Requires being able to read a nullifier on the same tx it was emitted. - }); + it('publicly deploys and calls a public contract in the same batched call', async () => { + const owner = wallet.getAddress(); + // Create a contract instance and make the PXE aware of it + logger.debug(`Initializing deploy method`); + const deployMethod = StatefulTestContract.deploy(wallet, owner, owner, 42); + logger.debug(`Creating request/calls to register and deploy contract`); + const deploy = await deployMethod.request(); + logger.debug(`Getting an instance of the not-yet-deployed contract to batch calls to`); + const contract = await StatefulTestContract.at(deployMethod.getInstance().address, wallet); + + // Batch registration, deployment, and public call into same TX + logger.debug(`Creating public calls to run in same batch as deployment`); + const init = contract.methods.increment_public_value(owner, 84).request(); + logger.debug(`Deploying a contract and calling a public function in the same batched call`); + await new BatchCall(wallet, [...deploy.calls, init]).send().wait(); + }, 300_000); it.skip('publicly deploys and calls a public function in a tx in the same block', async () => { // TODO(@spalladino): Requires being able to read a nullifier on the same block it was emitted. diff --git a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts index be5475755c05..b91d154f1dc9 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts @@ -2,7 +2,7 @@ import { type AztecNodeService } from '@aztec/aztec-node'; import { type SentTx, sleep } from '@aztec/aztec.js'; /* eslint-disable-next-line no-restricted-imports */ -import { BlockProposal, SignatureDomainSeperator, getHashedSignaturePayload } from '@aztec/circuit-types'; +import { BlockProposal, SignatureDomainSeparator, getHashedSignaturePayload } from '@aztec/circuit-types'; import { beforeAll, describe, it, jest } from '@jest/globals'; import fs from 'fs'; @@ -92,7 +92,7 @@ describe('e2e_p2p_reex', () => { const signer = (node as any).sequencer.sequencer.validatorClient.validationService.keyStore; const newProposal = new BlockProposal( proposal.payload, - await signer.signMessage(getHashedSignaturePayload(proposal.payload, SignatureDomainSeperator.blockProposal)), + await signer.signMessage(getHashedSignaturePayload(proposal.payload, SignatureDomainSeparator.blockProposal)), ); return (node as any).p2pClient.p2pService.propagate(newProposal); diff --git a/yarn-project/end-to-end/src/e2e_pending_note_hashes_contract.test.ts b/yarn-project/end-to-end/src/e2e_pending_note_hashes_contract.test.ts index eb0d5d8b964a..ebda8f38f1fb 100644 --- a/yarn-project/end-to-end/src/e2e_pending_note_hashes_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_pending_note_hashes_contract.test.ts @@ -294,8 +294,8 @@ describe('e2e_pending_note_hashes_contract', () => { await deployedContract.methods.sync_notes().simulate(); - const incomingNotes = await wallet.getIncomingNotes({ txHash: txReceipt.txHash }); + const notes = await wallet.getNotes({ txHash: txReceipt.txHash }); - expect(incomingNotes.length).toBe(1); + expect(notes.length).toBe(1); }); }); diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index 50857c56baf2..a618bc6ec2e5 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -360,13 +360,23 @@ describe('e2e_synching', () => { return; } - const { teardown, logger, deployL1ContractsValues, config, cheatCodes, aztecNode, sequencer, watcher, pxe } = - await setup(0, { - salt: SALT, - l1StartTime: START_TIME, - skipProtocolContracts: true, - assumeProvenThrough, - }); + const { + teardown, + logger, + deployL1ContractsValues, + config, + cheatCodes, + aztecNode, + sequencer, + watcher, + pxe, + blobSink, + } = await setup(0, { + salt: SALT, + l1StartTime: START_TIME, + skipProtocolContracts: true, + assumeProvenThrough, + }); await (aztecNode as any).stop(); await (sequencer as any).stop(); @@ -383,6 +393,7 @@ describe('e2e_synching', () => { l1ChainId: 31337, viemPollingIntervalMS: 100, ethereumSlotDuration: ETHEREUM_SLOT_DURATION, + blobSinkUrl: `http://localhost:${blobSink?.port ?? 5052}`, }, new NoopTelemetryClient(), ); diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 929768285c77..42663cf4cc0e 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -14,6 +14,7 @@ import { type Wallet, } from '@aztec/aztec.js'; import { deployInstance, registerContractClass } from '@aztec/aztec.js/deployment'; +import { type BlobSinkServer, createBlobSinkServer } from '@aztec/blob-sink'; import { type DeployL1ContractsArgs, createL1Clients, getL1ContractsConfigEnvVars, l1Artifacts } from '@aztec/ethereum'; import { EthCheatCodesWithState, startAnvil } from '@aztec/ethereum/test'; import { asyncMap } from '@aztec/foundation/async-map'; @@ -29,6 +30,7 @@ import { type Anvil } from '@viem/anvil'; import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; import { copySync, removeSync } from 'fs-extra/esm'; import fs from 'fs/promises'; +import getPort from 'get-port'; import { tmpdir } from 'os'; import path, { join } from 'path'; import { type Hex, getContract } from 'viem'; @@ -53,6 +55,7 @@ export type SubsystemsContext = { watcher: AnvilTestWatcher; cheatCodes: CheatCodes; dateProvider: TestDateProvider; + blobSink: BlobSinkServer; directoryToCleanup?: string; }; @@ -254,6 +257,7 @@ async function teardown(context: SubsystemsContext | undefined) { await context.bbConfig?.cleanup(); await context.anvil.stop(); await context.watcher.stop(); + await context.blobSink.stop(); if (context.directoryToCleanup) { await fs.rm(context.directoryToCleanup, { recursive: true, force: true }); } @@ -278,6 +282,8 @@ async function setupFromFresh( ): Promise { logger.verbose(`Initializing state...`); + const blobSinkPort = await getPort(); + // Fetch the AztecNode config. // TODO: For some reason this is currently the union of a bunch of subsystems. That needs fixing. const aztecNodeConfig: AztecNodeConfig & SetupOptions = { ...getConfigEnvVars(), ...opts }; @@ -291,6 +297,17 @@ async function setupFromFresh( } else { aztecNodeConfig.dataDirectory = statePath; } + aztecNodeConfig.blobSinkUrl = `http://localhost:${blobSinkPort}`; + + // Setup blob sink service + const blobSink = await createBlobSinkServer({ + port: blobSinkPort, + dataStoreConfig: { + dataDirectory: aztecNodeConfig.dataDirectory, + dataStoreMapSizeKB: aztecNodeConfig.dataStoreMapSizeKB, + }, + }); + await blobSink.start(); // Start anvil. We go via a wrapper script to ensure if the parent dies, anvil dies. logger.verbose('Starting anvil...'); @@ -407,6 +424,7 @@ async function setupFromFresh( watcher, cheatCodes, dateProvider, + blobSink, directoryToCleanup, }; } @@ -420,12 +438,25 @@ async function setupFromState(statePath: string, logger: Logger): Promise Promise; }; @@ -382,6 +388,11 @@ export async function setup( return await setupWithRemoteEnvironment(publisherHdAccount!, config, logger, numberOfAccounts); } + // Blob sink service - blobs get posted here and served from here + const blobSinkPort = await getPort(); + const blobSink = await createBlobSinkServer({ port: blobSinkPort }); + config.blobSinkUrl = `http://127.0.0.1:${blobSinkPort}`; + const deployL1ContractsValues = opts.deployL1ContractsValues ?? (await setupL1Contracts(config.l1RpcUrl, publisherHdAccount!, logger, opts, chain)); @@ -494,6 +505,7 @@ export async function setup( await anvil?.stop(); await watcher.stop(); + await blobSink?.stop(); if (directoryToCleanup) { logger.verbose(`Cleaning up data directory at ${directoryToCleanup}`); @@ -514,6 +526,7 @@ export async function setup( sequencer, watcher, dateProvider, + blobSink, teardown, }; } @@ -683,7 +696,7 @@ export async function setupCanonicalFeeJuice(pxe: PXE) { .wait(); getLogger().info(`Fee Juice successfully setup. Portal address: ${feeJuicePortalAddress}`); } catch (error) { - getLogger().info(`Fee Juice might have already been setup.`); + getLogger().warn(`Fee Juice might have already been setup. Got error: ${inspect(error)}.`); } } diff --git a/yarn-project/end-to-end/src/flakey_e2e_inclusion_proofs_contract.test.ts b/yarn-project/end-to-end/src/flakey_e2e_inclusion_proofs_contract.test.ts index 7fc07cea2623..b93218fbd606 100644 --- a/yarn-project/end-to-end/src/flakey_e2e_inclusion_proofs_contract.test.ts +++ b/yarn-project/end-to-end/src/flakey_e2e_inclusion_proofs_contract.test.ts @@ -53,7 +53,7 @@ describe('e2e_inclusion_proofs_contract', () => { describe('proves note existence and its nullifier non-existence and nullifier non-existence failure case', () => { // Owner of a note let noteCreationBlockNumber: number; - let noteHashes, visibleIncomingNotes: ExtendedNote[]; + let noteHashes, visibleNotes: ExtendedNote[]; const value = 100n; let validNoteBlockNumber: any; @@ -65,13 +65,13 @@ describe('e2e_inclusion_proofs_contract', () => { ({ noteHashes } = receipt.debugInfo!); await contract.methods.sync_notes().simulate(); - visibleIncomingNotes = await wallets[0].getIncomingNotes({ txHash: receipt.txHash }); + visibleNotes = await wallets[0].getNotes({ txHash: receipt.txHash }); }); it('should return the correct values for creating a note', () => { expect(noteHashes.length).toBe(1); - expect(visibleIncomingNotes.length).toBe(1); - const [receivedValue, receivedOwner, _randomness] = visibleIncomingNotes[0].note.items; + expect(visibleNotes.length).toBe(1); + const [receivedValue, receivedOwner, _randomness] = visibleNotes[0].note.items; expect(receivedValue.toBigInt()).toBe(value); expect(receivedOwner).toEqual(owner.toField()); }); @@ -161,11 +161,11 @@ describe('e2e_inclusion_proofs_contract', () => { const { noteHashes } = receipt.debugInfo!; await contract.methods.sync_notes().simulate(); - const visibleIncomingNotes = await wallets[0].getIncomingNotes({ txHash: receipt.txHash }); + const visibleNotes = await wallets[0].getNotes({ txHash: receipt.txHash }); expect(noteHashes.length).toBe(1); - expect(visibleIncomingNotes.length).toBe(1); - const [receivedValue, receivedOwner, _randomness] = visibleIncomingNotes[0].note.items; + expect(visibleNotes.length).toBe(1); + const [receivedValue, receivedOwner, _randomness] = visibleNotes[0].note.items; expect(receivedValue.toBigInt()).toBe(value); expect(receivedOwner).toEqual(owner.toField()); } diff --git a/yarn-project/end-to-end/src/guides/dapp_testing.test.ts b/yarn-project/end-to-end/src/guides/dapp_testing.test.ts index 0448544372a2..8a24dc364db7 100644 --- a/yarn-project/end-to-end/src/guides/dapp_testing.test.ts +++ b/yarn-project/end-to-end/src/guides/dapp_testing.test.ts @@ -111,7 +111,7 @@ describe('guides/dapp/testing', () => { it('checks private storage', async () => { // docs:start:private-storage await token.methods.sync_notes().simulate(); - const notes = await pxe.getIncomingNotes({ + const notes = await pxe.getNotes({ owner: owner.getAddress(), contractAddress: token.address, storageSlot: ownerSlot, diff --git a/yarn-project/end-to-end/tsconfig.json b/yarn-project/end-to-end/tsconfig.json index 08932fbdb4a5..a8117b5a5dbc 100644 --- a/yarn-project/end-to-end/tsconfig.json +++ b/yarn-project/end-to-end/tsconfig.json @@ -21,6 +21,9 @@ { "path": "../bb-prover" }, + { + "path": "../blob-sink" + }, { "path": "../bot" }, diff --git a/yarn-project/ethereum/package.json b/yarn-project/ethereum/package.json index 87ecc3d321ea..5081cab89dc3 100644 --- a/yarn-project/ethereum/package.json +++ b/yarn-project/ethereum/package.json @@ -33,7 +33,6 @@ "@aztec/l1-artifacts": "workspace:^", "@viem/anvil": "^0.0.10", "dotenv": "^16.0.3", - "get-port": "^7.1.0", "tslib": "^2.4.0", "viem": "^2.7.15", "zod": "^3.23.8" diff --git a/yarn-project/ethereum/src/test/start_anvil.test.ts b/yarn-project/ethereum/src/test/start_anvil.test.ts index 8efdff4452b3..f04d55a26ff1 100644 --- a/yarn-project/ethereum/src/test/start_anvil.test.ts +++ b/yarn-project/ethereum/src/test/start_anvil.test.ts @@ -5,6 +5,15 @@ import { startAnvil } from './start_anvil.js'; describe('start_anvil', () => { it('starts anvil on a free port', async () => { const { anvil, rpcUrl } = await startAnvil(); + + const port = parseInt(new URL(rpcUrl).port); + expect(port).toBeLessThan(65536); + expect(port).toBeGreaterThan(1024); + expect(anvil.port).toEqual(port); + + const host = new URL(rpcUrl).hostname; + expect(anvil.host).toEqual(host); + const publicClient = createPublicClient({ transport: http(rpcUrl) }); const chainId = await publicClient.getChainId(); expect(chainId).toEqual(31337); diff --git a/yarn-project/ethereum/src/test/start_anvil.ts b/yarn-project/ethereum/src/test/start_anvil.ts index b8c287681b31..9c81b2e2832a 100644 --- a/yarn-project/ethereum/src/test/start_anvil.ts +++ b/yarn-project/ethereum/src/test/start_anvil.ts @@ -2,38 +2,47 @@ import { makeBackoff, retry } from '@aztec/foundation/retry'; import { fileURLToPath } from '@aztec/foundation/url'; import { type Anvil, createAnvil } from '@viem/anvil'; -import getPort from 'get-port'; import { dirname, resolve } from 'path'; /** * Ensures there's a running Anvil instance and returns the RPC URL. */ export async function startAnvil(l1BlockTime?: number): Promise<{ anvil: Anvil; rpcUrl: string }> { - let ethereumHostPort: number | undefined; - const anvilBinary = resolve(dirname(fileURLToPath(import.meta.url)), '../../', 'scripts/anvil_kill_wrapper.sh'); + let port: number | undefined; + // Start anvil. // We go via a wrapper script to ensure if the parent dies, anvil dies. const anvil = await retry( async () => { - ethereumHostPort = await getPort(); const anvil = createAnvil({ anvilBinary, - port: ethereumHostPort, + port: 0, blockTime: l1BlockTime, + stopTimeout: 1000, + }); + + // Listen to the anvil output to get the port. + const removeHandler = anvil.on('message', (message: string) => { + if (port === undefined && message.includes('Listening on')) { + port = parseInt(message.match(/Listening on ([^:]+):(\d+)/)![2]); + } }); await anvil.start(); + removeHandler(); + return anvil; }, 'Start anvil', makeBackoff([5, 5, 5]), ); - if (!ethereumHostPort) { + if (!port) { throw new Error('Failed to start anvil'); } - const rpcUrl = `http://127.0.0.1:${ethereumHostPort}`; - return { anvil, rpcUrl }; + // Monkeypatch the anvil instance to include the actually assigned port + Object.defineProperty(anvil, 'port', { value: port, writable: false }); + return { anvil, rpcUrl: `http://127.0.0.1:${port}` }; } diff --git a/yarn-project/foundation/src/blob/blob.test.ts b/yarn-project/foundation/src/blob/blob.test.ts index e4a5746ec06f..da4caa8fc74c 100644 --- a/yarn-project/foundation/src/blob/blob.test.ts +++ b/yarn-project/foundation/src/blob/blob.test.ts @@ -78,15 +78,19 @@ describe('blob', () => { // This test ensures that the Blob class correctly matches the c-kzg lib // The values here are used to test Noir's blob evaluation in noir-projects/noir-protocol-circuits/crates/blob/src/blob.nr -> test_400 const blobItems = Array(400).fill(new Fr(3)); - const ourBlob = new Blob(blobItems); + const ourBlob = Blob.fromFields(blobItems); const blobItemsHash = poseidon2Hash(Array(400).fill(new Fr(3))); expect(blobItemsHash).toEqual(ourBlob.fieldsHash); - expect(blobToKzgCommitment(ourBlob.data)).toEqual(ourBlob.commitment); + + // We add zeros before getting commitment as we do not store the blob along with + // all of the zeros + const dataWithZeros = Buffer.concat([ourBlob.data], BYTES_PER_BLOB); + expect(blobToKzgCommitment(dataWithZeros)).toEqual(ourBlob.commitment); const z = poseidon2Hash([blobItemsHash, ...ourBlob.commitmentToFields()]); expect(z).toEqual(ourBlob.challengeZ); - const res = computeKzgProof(ourBlob.data, ourBlob.challengeZ.toBuffer()); + const res = computeKzgProof(dataWithZeros, ourBlob.challengeZ.toBuffer()); expect(res[0]).toEqual(ourBlob.proof); expect(res[1]).toEqual(ourBlob.evaluationY); @@ -112,8 +116,9 @@ describe('blob', () => { const blobItemsHash = poseidon2Hash(blobItems); const blobs = Blob.getBlobs(blobItems); blobs.forEach(ourBlob => { - // const ourBlob = new Blob(blobItems.slice(j * FIELD_ELEMENTS_PER_BLOB, (j + 1) * FIELD_ELEMENTS_PER_BLOB), blobItemsHash); + // const ourBlob = Blob.fromFields(blobItems.slice(j * FIELD_ELEMENTS_PER_BLOB, (j + 1) * FIELD_ELEMENTS_PER_BLOB), blobItemsHash); expect(blobItemsHash).toEqual(ourBlob.fieldsHash); + expect(blobToKzgCommitment(ourBlob.data)).toEqual(ourBlob.commitment); const z = poseidon2Hash([blobItemsHash, ...ourBlob.commitmentToFields()]); @@ -132,4 +137,11 @@ describe('blob', () => { expect(isValid).toBe(true); }); }); + + it('Should serialise and deserialise a blob', () => { + const blob = Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); + const blobBuffer = blob.toBuffer(); + const deserialisedBlob = Blob.fromBuffer(blobBuffer); + expect(blob.fieldsHash.equals(deserialisedBlob.fieldsHash)).toBe(true); + }); }); diff --git a/yarn-project/foundation/src/blob/index.ts b/yarn-project/foundation/src/blob/index.ts index 6c1651f4c56a..dddb124f1d31 100644 --- a/yarn-project/foundation/src/blob/index.ts +++ b/yarn-project/foundation/src/blob/index.ts @@ -3,7 +3,7 @@ import type { Blob as BlobBuffer } from 'c-kzg'; import { poseidon2Hash, sha256 } from '../crypto/index.js'; import { Fr } from '../fields/index.js'; -import { serializeToBuffer } from '../serialize/index.js'; +import { BufferReader, serializeToBuffer } from '../serialize/index.js'; // Importing directly from 'c-kzg' does not work, ignoring import/no-named-as-default-member err: /* eslint-disable import/no-named-as-default-member */ @@ -36,48 +36,47 @@ export const VERSIONED_HASH_VERSION_KZG = 0x01; * A class to create, manage, and prove EVM blobs. */ export class Blob { - /** The blob to be broadcast on L1 in bytes form. */ - public readonly data: BlobBuffer; - /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */ - public readonly fieldsHash: Fr; - /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */ - public readonly challengeZ: Fr; - /** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */ - public readonly evaluationY: Buffer; - /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */ - public readonly commitment: Buffer; - /** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */ - public readonly proof: Buffer; - constructor( - /** All fields to be broadcast in the blob. */ - fields: Fr[], - /** If we want to broadcast more fields than fit into a blob, we hash those and used it as the fieldsHash across all blobs. - * This is much simpler and cheaper in the circuit to do, but MUST BE CHECKED before injecting here. - */ - multiBlobFieldsHash?: Fr, - ) { + /** The blob to be broadcast on L1 in bytes form. */ + public readonly data: BlobBuffer, + /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */ + public readonly fieldsHash: Fr, + /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */ + public readonly challengeZ: Fr, + /** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */ + public readonly evaluationY: Buffer, + /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */ + public readonly commitment: Buffer, + /** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */ + public readonly proof: Buffer, + ) {} + + static fromFields(fields: Fr[], multiBlobFieldsHash?: Fr): Blob { if (fields.length > FIELD_ELEMENTS_PER_BLOB) { throw new Error( `Attempted to overfill blob with ${fields.length} elements. The maximum is ${FIELD_ELEMENTS_PER_BLOB}`, ); } - this.data = Buffer.concat([serializeToBuffer(fields)], BYTES_PER_BLOB); + const dataWithoutZeros = serializeToBuffer(fields); + const data = Buffer.concat([dataWithoutZeros], BYTES_PER_BLOB); + // This matches the output of SpongeBlob.squeeze() in the blob circuit - this.fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : poseidon2Hash(fields); - this.commitment = Buffer.from(blobToKzgCommitment(this.data)); - this.challengeZ = poseidon2Hash([this.fieldsHash, ...this.commitmentToFields()]); - const res = computeKzgProof(this.data, this.challengeZ.toBuffer()); - if (!verifyKzgProof(this.commitment, this.challengeZ.toBuffer(), res[1], res[0])) { + const fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : poseidon2Hash(fields); + const commitment = Buffer.from(blobToKzgCommitment(data)); + const challengeZ = poseidon2Hash([fieldsHash, ...commitmentToFields(commitment)]); + const res = computeKzgProof(data, challengeZ.toBuffer()); + if (!verifyKzgProof(commitment, challengeZ.toBuffer(), res[1], res[0])) { throw new Error(`KZG proof did not verify.`); } - this.proof = Buffer.from(res[0]); - this.evaluationY = Buffer.from(res[1]); + const proof = Buffer.from(res[0]); + const evaluationY = Buffer.from(res[1]); + + return new Blob(dataWithoutZeros, fieldsHash, challengeZ, evaluationY, commitment, proof); } // 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48] commitmentToFields(): [Fr, Fr] { - return [new Fr(this.commitment.subarray(0, 31)), new Fr(this.commitment.subarray(31, 48))]; + return commitmentToFields(this.commitment); } // Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers @@ -93,6 +92,49 @@ export class Blob { return hash; } + toBuffer(): Buffer { + return Buffer.from( + serializeToBuffer( + this.data.length, + this.data, + this.fieldsHash, + this.challengeZ, + this.evaluationY.length, + this.evaluationY, + this.commitment.length, + this.commitment, + this.proof.length, + this.proof, + ), + ); + } + + static fromBuffer(buf: Buffer | BufferReader): Blob { + const reader = BufferReader.asReader(buf); + return new Blob( + reader.readUint8Array(), + reader.readObject(Fr), + reader.readObject(Fr), + reader.readBuffer(), + reader.readBuffer(), + reader.readBuffer(), + ); + } + + /** + * Pad the blob data to it's full size before posting + */ + get dataWithZeros(): BlobBuffer { + return Buffer.concat([this.data], BYTES_PER_BLOB); + } + + /** + * Get the size of the blob in bytes + */ + getSize() { + return this.data.length; + } + // Returns a proof of opening of the blob to verify on L1 using the point evaluation precompile: // * input[:32] - versioned_hash // * input[32:64] - z @@ -145,8 +187,13 @@ export class Blob { const res = []; for (let i = 0; i < numBlobs; i++) { const end = fields.length < (i + 1) * FIELD_ELEMENTS_PER_BLOB ? fields.length : (i + 1) * FIELD_ELEMENTS_PER_BLOB; - res.push(new Blob(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash)); + res.push(Blob.fromFields(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash)); } return res; } } + +// 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48] +function commitmentToFields(commitment: Buffer): [Fr, Fr] { + return [new Fr(commitment.subarray(0, 31)), new Fr(commitment.subarray(31, 48))]; +} diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 4ce23b9946e4..d45d8d36f63d 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -100,6 +100,7 @@ export type EnvVar = | 'P2P_UDP_ANNOUNCE_ADDR' | 'P2P_UDP_LISTEN_ADDR' | 'PEER_ID_PRIVATE_KEY' + | 'PROVER_BLOB_SINK_URL' | 'PROOF_VERIFIER_L1_START_BLOCK' | 'PROOF_VERIFIER_POLL_INTERVAL_MS' | 'PROVER_AGENT_ENABLED' @@ -136,9 +137,12 @@ export type EnvVar = | 'REGISTRY_CONTRACT_ADDRESS' | 'ROLLUP_CONTRACT_ADDRESS' | 'SEQ_ALLOWED_SETUP_FN' + | 'SEQ_BLOB_SINK_URL' | 'SEQ_MAX_BLOCK_SIZE_IN_BYTES' | 'SEQ_MAX_TX_PER_BLOCK' | 'SEQ_MIN_TX_PER_BLOCK' + | 'SEQ_MAX_DA_BLOCK_GAS' + | 'SEQ_MAX_L2_BLOCK_GAS' | 'SEQ_PUBLISH_RETRY_INTERVAL_MS' | 'SEQ_PUBLISHER_PRIVATE_KEY' | 'SEQ_REQUIRED_CONFIRMATIONS' diff --git a/yarn-project/foundation/src/fields/point.ts b/yarn-project/foundation/src/fields/point.ts index 96135f47b500..e950998350ba 100644 --- a/yarn-project/foundation/src/fields/point.ts +++ b/yarn-project/foundation/src/fields/point.ts @@ -10,6 +10,7 @@ import { Fr } from './fields.js'; * Represents a Point on an elliptic curve with x and y coordinates. * The Point class provides methods for creating instances from different input types, * converting instances to various output formats, and checking the equality of points. + * TODO(#7386): Clean up this class. */ export class Point { static ZERO = new Point(Fr.ZERO, Fr.ZERO, false); diff --git a/yarn-project/foundation/src/json-rpc/server/safe_json_rpc_server.ts b/yarn-project/foundation/src/json-rpc/server/safe_json_rpc_server.ts index 54436d63efa8..8d680cada320 100644 --- a/yarn-project/foundation/src/json-rpc/server/safe_json_rpc_server.ts +++ b/yarn-project/foundation/src/json-rpc/server/safe_json_rpc_server.ts @@ -24,6 +24,11 @@ export class SafeJsonRpcServer { constructor( /** The proxy object to delegate requests to. */ private readonly proxy: Proxy, + /** + * Return an HTTP 200 status code on errors, but include an error object + * as per the JSON RPC spec + */ + private http200OnError = false, /** Health check function */ private readonly healthCheck: StatusCheckFn = () => true, /** Logger */ @@ -105,9 +110,17 @@ export class SafeJsonRpcServer { ctx.status = 400; ctx.body = { jsonrpc, id, error: { code: -32601, message: `Method not found: ${method}` } }; } else { - const result = await this.proxy.call(method, params); - ctx.body = { jsonrpc, id, result }; ctx.status = 200; + try { + const result = await this.proxy.call(method, params); + ctx.body = { jsonrpc, id, result }; + } catch (err: any) { + if (this.http200OnError) { + ctx.body = { jsonrpc, id, error: { code: err.code || -32600, data: err.data, message: err.message } }; + } else { + throw err; + } + } } }); @@ -259,20 +272,22 @@ function makeAggregateHealthcheck(namedHandlers: NamespacedApiHandlers, log?: Lo */ export function createNamespacedSafeJsonRpcServer( handlers: NamespacedApiHandlers, + http200OnError = false, log = createLogger('json-rpc:server'), ): SafeJsonRpcServer { const proxy = new NamespacedSafeJsonProxy(handlers); const healthCheck = makeAggregateHealthcheck(handlers, log); - return new SafeJsonRpcServer(proxy, healthCheck, log); + return new SafeJsonRpcServer(proxy, http200OnError, healthCheck, log); } export function createSafeJsonRpcServer( handler: T, schema: ApiSchemaFor, + http200OnError = false, healthCheck?: StatusCheckFn, ) { const proxy = new SafeJsonProxy(handler, schema); - return new SafeJsonRpcServer(proxy, healthCheck); + return new SafeJsonRpcServer(proxy, http200OnError, healthCheck); } /** diff --git a/yarn-project/foundation/src/serialize/buffer_reader.ts b/yarn-project/foundation/src/serialize/buffer_reader.ts index 7abe3f59336f..84b2ea86277e 100644 --- a/yarn-project/foundation/src/serialize/buffer_reader.ts +++ b/yarn-project/foundation/src/serialize/buffer_reader.ts @@ -307,6 +307,20 @@ export class BufferReader { return this.readBytes(size); } + /** + * Reads a buffer from the current position of the reader and advances the index. + * The method first reads the size (number) of bytes to be read, and then returns + * a Buffer with that size containing the bytes. Useful for reading variable-length + * binary data encoded as (size, data) format. + * + * @returns A Buffer containing the read bytes. + */ + public readUint8Array(): Uint8Array { + const size = this.readNumber(); + this.#rangeCheck(size); + return this.readBytes(size); + } + /** * Reads and constructs a map object from the current buffer using the provided deserializer. * The method reads the number of entries in the map, followed by iterating through each key-value pair. diff --git a/yarn-project/foundation/src/serialize/serialize.ts b/yarn-project/foundation/src/serialize/serialize.ts index 6698a7081e2b..fc2638ac3e74 100644 --- a/yarn-project/foundation/src/serialize/serialize.ts +++ b/yarn-project/foundation/src/serialize/serialize.ts @@ -109,6 +109,7 @@ export function deserializeField(buf: Buffer, offset = 0) { export type Bufferable = | boolean | Buffer + | Uint8Array | number | bigint | string diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index 0db273a1bd68..4ab855f48476 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -142,6 +142,12 @@ export type P2P = P2PApi & { */ getTxStatus(txHash: TxHash): 'pending' | 'mined' | undefined; + /** Returns an iterator over pending txs on the mempool. */ + iteratePendingTxs(): Iterable; + + /** Returns the number of pending txs in the mempool. */ + getPendingTxCount(): number; + /** * Starts the p2p client. * @returns A promise signalling the completion of the block sync. @@ -460,6 +466,20 @@ export class P2PClient return Promise.resolve(this.getTxs('pending')); } + public getPendingTxCount(): number { + return this.txPool.getPendingTxHashes().length; + } + + public *iteratePendingTxs() { + const pendingTxHashes = this.txPool.getPendingTxHashes(); + for (const txHash of pendingTxHashes) { + const tx = this.txPool.getTxByHash(txHash); + if (tx) { + yield tx; + } + } + } + /** * Returns all transactions in the transaction pool. * @returns An array of Txs. diff --git a/yarn-project/p2p/src/mem_pools/attestation_pool/mocks.ts b/yarn-project/p2p/src/mem_pools/attestation_pool/mocks.ts index 8a1b60a9ffd2..7cbf84dfcbec 100644 --- a/yarn-project/p2p/src/mem_pools/attestation_pool/mocks.ts +++ b/yarn-project/p2p/src/mem_pools/attestation_pool/mocks.ts @@ -1,7 +1,7 @@ import { BlockAttestation, ConsensusPayload, - SignatureDomainSeperator, + SignatureDomainSeparator, TxHash, getHashedSignaturePayloadEthSignedMessage, } from '@aztec/circuit-types'; @@ -37,7 +37,7 @@ export const mockAttestation = ( const header = makeHeader(1, 2, slot); const payload = new ConsensusPayload(header, archive, txs); - const hash = getHashedSignaturePayloadEthSignedMessage(payload, SignatureDomainSeperator.blockAttestation); + const hash = getHashedSignaturePayloadEthSignedMessage(payload, SignatureDomainSeparator.blockAttestation); const signature = signer.sign(hash); return new BlockAttestation(payload, signature); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts index dfc5df7f105b..cb3abd077e3f 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts @@ -4,7 +4,7 @@ import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { AztecKVTxPool } from './aztec_kv_tx_pool.js'; import { describeTxPool } from './tx_pool_test_suite.js'; -describe('In-Memory TX pool', () => { +describe('KV TX pool', () => { let txPool: AztecKVTxPool; beforeEach(() => { txPool = new AztecKVTxPool(openTmpStore(), new NoopTelemetryClient()); diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/aggregate_tx_validator.test.ts b/yarn-project/p2p/src/msg_validators/tx_validator/aggregate_tx_validator.test.ts index bd3156644456..194779508c8d 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/aggregate_tx_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/aggregate_tx_validator.test.ts @@ -1,4 +1,4 @@ -import { type AnyTx, Tx, type TxHash, type TxValidator, mockTx } from '@aztec/circuit-types'; +import { type AnyTx, Tx, type TxHash, type TxValidationResult, type TxValidator, mockTx } from '@aztec/circuit-types'; import { AggregateTxValidator } from './aggregate_tx_validator.js'; @@ -6,57 +6,49 @@ describe('AggregateTxValidator', () => { it('allows txs that pass all validation', async () => { const txs = [mockTx(0), mockTx(1), mockTx(2), mockTx(3), mockTx(4)]; const agg = new AggregateTxValidator( - new TxDenyList([txs[0].getTxHash(), txs[1].getTxHash()], []), + new TxDenyList([txs[0].getTxHash(), txs[1].getTxHash(), txs[4].getTxHash()], []), new TxDenyList([txs[2].getTxHash(), txs[4].getTxHash()], []), ); - const validTxs = [txs[3]]; - const invalidTxs = [txs[0], txs[1], txs[2], txs[4]]; - const skippedTxs: AnyTx[] = []; - await expect(agg.validateTxs(txs)).resolves.toEqual([validTxs, invalidTxs, skippedTxs]); + await expect(agg.validateTx(txs[0])).resolves.toEqual({ result: 'invalid', reason: ['Denied'] }); + await expect(agg.validateTx(txs[1])).resolves.toEqual({ result: 'invalid', reason: ['Denied'] }); + await expect(agg.validateTx(txs[2])).resolves.toEqual({ result: 'invalid', reason: ['Denied'] }); + await expect(agg.validateTx(txs[3])).resolves.toEqual({ result: 'valid' }); + await expect(agg.validateTx(txs[4])).resolves.toEqual({ result: 'invalid', reason: ['Denied', 'Denied'] }); }); it('aggregate skipped txs ', async () => { const txs = [mockTx(0), mockTx(1), mockTx(2), mockTx(3), mockTx(4)]; const agg = new AggregateTxValidator( new TxDenyList([txs[0].getTxHash()], []), - new TxDenyList([], [txs[1].getTxHash(), txs[2].getTxHash()]), + new TxDenyList([txs[4].getTxHash()], [txs[1].getTxHash(), txs[2].getTxHash()]), new TxDenyList([], [txs[4].getTxHash()]), ); - const validTxs = [txs[3]]; - const invalidTxs = [txs[0]]; - const skippedTxs = [txs[1], txs[2], txs[4]]; - await expect(agg.validateTxs(txs)).resolves.toEqual([validTxs, invalidTxs, skippedTxs]); + await expect(agg.validateTx(txs[0])).resolves.toEqual({ result: 'invalid', reason: ['Denied'] }); + await expect(agg.validateTx(txs[1])).resolves.toEqual({ result: 'skipped', reason: ['Skipped'] }); + await expect(agg.validateTx(txs[2])).resolves.toEqual({ result: 'skipped', reason: ['Skipped'] }); + await expect(agg.validateTx(txs[3])).resolves.toEqual({ result: 'valid' }); + await expect(agg.validateTx(txs[4])).resolves.toEqual({ result: 'invalid', reason: ['Denied', 'Skipped'] }); }); class TxDenyList implements TxValidator { denyList: Set; skippedList: Set; + constructor(deniedTxHashes: TxHash[], skippedTxHashes: TxHash[]) { this.denyList = new Set(deniedTxHashes.map(hash => hash.toString())); this.skippedList = new Set(skippedTxHashes.map(hash => hash.toString())); } - validateTxs(txs: AnyTx[]): Promise<[AnyTx[], AnyTx[], AnyTx[] | undefined]> { - const validTxs: AnyTx[] = []; - const invalidTxs: AnyTx[] = []; - const skippedTxs: AnyTx[] = []; - txs.forEach(tx => { - const txHash = Tx.getHash(tx).toString(); - if (this.skippedList.has(txHash)) { - skippedTxs.push(tx); - } else if (this.denyList.has(txHash)) { - invalidTxs.push(tx); - } else { - validTxs.push(tx); - } - }); - return Promise.resolve([validTxs, invalidTxs, skippedTxs.length ? skippedTxs : undefined]); - } - - validateTx(tx: AnyTx): Promise { - return Promise.resolve(this.denyList.has(Tx.getHash(tx).toString())); + validateTx(tx: AnyTx): Promise { + if (this.skippedList.has(Tx.getHash(tx).toString())) { + return Promise.resolve({ result: 'skipped', reason: ['Skipped'] }); + } + if (this.denyList.has(Tx.getHash(tx).toString())) { + return Promise.resolve({ result: 'invalid', reason: ['Denied'] }); + } + return Promise.resolve({ result: 'valid' }); } } }); diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/aggregate_tx_validator.ts b/yarn-project/p2p/src/msg_validators/tx_validator/aggregate_tx_validator.ts index 21bf24ddb8db..f7279c9b3879 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/aggregate_tx_validator.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/aggregate_tx_validator.ts @@ -1,4 +1,4 @@ -import { type ProcessedTx, type Tx, type TxValidator } from '@aztec/circuit-types'; +import { type ProcessedTx, type Tx, type TxValidationResult, type TxValidator } from '@aztec/circuit-types'; export class AggregateTxValidator implements TxValidator { #validators: TxValidator[]; @@ -10,27 +10,23 @@ export class AggregateTxValidator implements TxValid this.#validators = validators; } - async validateTxs(txs: T[]): Promise<[validTxs: T[], invalidTxs: T[], skippedTxs: T[]]> { - const invalidTxs: T[] = []; - const skippedTxs: T[] = []; - let txPool = txs; + async validateTx(tx: T): Promise { + const aggregate: { result: string; reason?: string[] } = { result: 'valid', reason: [] }; for (const validator of this.#validators) { - const [valid, invalid, skipped] = await validator.validateTxs(txPool); - invalidTxs.push(...invalid); - skippedTxs.push(...(skipped ?? [])); - txPool = valid; - } - - return [txPool, invalidTxs, skippedTxs]; - } - - async validateTx(tx: T): Promise { - for (const validator of this.#validators) { - const valid = await validator.validateTx(tx); - if (!valid) { - return false; + const result = await validator.validateTx(tx); + if (result.result === 'invalid') { + aggregate.result = 'invalid'; + aggregate.reason!.push(...result.reason); + } else if (result.result === 'skipped') { + if (aggregate.result === 'valid') { + aggregate.result = 'skipped'; + } + aggregate.reason!.push(...result.reason); } } - return true; + if (aggregate.result === 'valid') { + delete aggregate.reason; + } + return aggregate as TxValidationResult; } } diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/data_validator.test.ts b/yarn-project/p2p/src/msg_validators/tx_validator/data_validator.test.ts index 6b7f42859f62..894d0e970c72 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/data_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/data_validator.test.ts @@ -1,4 +1,4 @@ -import { mockTx } from '@aztec/circuit-types'; +import { type Tx, mockTx } from '@aztec/circuit-types'; import { AztecAddress, Fr, FunctionSelector } from '@aztec/circuits.js'; import { DataTxValidator } from './data_validator.js'; @@ -21,9 +21,19 @@ describe('TxDataValidator', () => { validator = new DataTxValidator(); }); + const expectValid = async (txs: Tx[]) => { + for (const tx of txs) { + await expect(validator.validateTx(tx)).resolves.toEqual({ result: 'valid' }); + } + }; + + const expectInvalid = async (tx: Tx, reason: string) => { + await expect(validator.validateTx(tx)).resolves.toEqual({ result: 'invalid', reason: [reason] }); + }; + it('allows transactions with the correct data', async () => { - const txs = mockTxs(3); - await expect(validator.validateTxs(txs)).resolves.toEqual([txs, []]); + const [tx] = mockTxs(1); + await expect(validator.validateTx(tx)).resolves.toEqual({ result: 'valid' }); }); it('rejects txs with mismatch non revertible execution requests', async () => { @@ -33,7 +43,10 @@ describe('TxDataValidator', () => { badTxs[1].data.forPublic!.nonRevertibleAccumulatedData.publicCallRequests[1].contractAddress = AztecAddress.random(); - await expect(validator.validateTxs([...goodTxs, ...badTxs])).resolves.toEqual([goodTxs, badTxs]); + await expectValid(goodTxs); + + await expectInvalid(badTxs[0], 'Incorrect execution request for public call'); + await expectInvalid(badTxs[1], 'Incorrect execution request for public call'); }); it('rejects txs with mismatch revertible execution requests', async () => { @@ -46,7 +59,12 @@ describe('TxDataValidator', () => { badTxs[3].data.forPublic!.revertibleAccumulatedData.publicCallRequests[0].isStaticCall = !badTxs[3].enqueuedPublicFunctionCalls[0].callContext.isStaticCall; - await expect(validator.validateTxs([...badTxs, ...goodTxs])).resolves.toEqual([goodTxs, badTxs]); + await expectValid(goodTxs); + + await expectInvalid(badTxs[0], 'Incorrect execution request for public call'); + await expectInvalid(badTxs[1], 'Incorrect execution request for public call'); + await expectInvalid(badTxs[2], 'Incorrect execution request for public call'); + await expectInvalid(badTxs[3], 'Incorrect execution request for public call'); }); it('rejects txs with mismatch teardown execution requests', async () => { @@ -55,7 +73,10 @@ describe('TxDataValidator', () => { badTxs[0].data.forPublic!.publicTeardownCallRequest.contractAddress = AztecAddress.random(); badTxs[1].data.forPublic!.publicTeardownCallRequest.msgSender = AztecAddress.random(); - await expect(validator.validateTxs([...goodTxs, ...badTxs])).resolves.toEqual([goodTxs, badTxs]); + await expectValid(goodTxs); + + await expectInvalid(badTxs[0], 'Incorrect teardown execution request'); + await expectInvalid(badTxs[1], 'Incorrect teardown execution request'); }); it('rejects txs with mismatch number of execution requests', async () => { @@ -66,6 +87,9 @@ describe('TxDataValidator', () => { // Having an extra enqueuedPublicFunctionCall. badTxs[1].enqueuedPublicFunctionCalls.push(execRequest); - await expect(validator.validateTxs([...badTxs, ...goodTxs])).resolves.toEqual([goodTxs, badTxs]); + await expectValid(goodTxs); + + await expectInvalid(badTxs[0], 'Wrong number of execution requests for public calls'); + await expectInvalid(badTxs[1], 'Wrong number of execution requests for public calls'); }); }); diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/data_validator.ts b/yarn-project/p2p/src/msg_validators/tx_validator/data_validator.ts index 143713cc2801..ddc5d43ca87c 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/data_validator.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/data_validator.ts @@ -1,29 +1,14 @@ -import { Tx, type TxValidator } from '@aztec/circuit-types'; +import { Tx, type TxValidationResult, type TxValidator } from '@aztec/circuit-types'; import { createLogger } from '@aztec/foundation/log'; export class DataTxValidator implements TxValidator { #log = createLogger('p2p:tx_validator:tx_data'); - validateTxs(txs: Tx[]): Promise<[validTxs: Tx[], invalidTxs: Tx[]]> { - const validTxs: Tx[] = []; - const invalidTxs: Tx[] = []; - for (const tx of txs) { - if (!this.#hasCorrectExecutionRequests(tx)) { - invalidTxs.push(tx); - continue; - } - - validTxs.push(tx); - } - - return Promise.resolve([validTxs, invalidTxs]); - } - - validateTx(tx: Tx): Promise { + validateTx(tx: Tx): Promise { return Promise.resolve(this.#hasCorrectExecutionRequests(tx)); } - #hasCorrectExecutionRequests(tx: Tx): boolean { + #hasCorrectExecutionRequests(tx: Tx): TxValidationResult { const callRequests = [ ...tx.data.getRevertiblePublicCallRequests(), ...tx.data.getNonRevertiblePublicCallRequests(), @@ -34,7 +19,7 @@ export class DataTxValidator implements TxValidator { callRequests.length }. Got ${tx.enqueuedPublicFunctionCalls.length}.`, ); - return false; + return { result: 'invalid', reason: ['Wrong number of execution requests for public calls'] }; } const invalidExecutionRequestIndex = tx.enqueuedPublicFunctionCalls.findIndex( @@ -46,7 +31,7 @@ export class DataTxValidator implements TxValidator { tx, )} because of incorrect execution requests for public call at index ${invalidExecutionRequestIndex}.`, ); - return false; + return { result: 'invalid', reason: ['Incorrect execution request for public call'] }; } const teardownCallRequest = tx.data.getTeardownPublicCallRequest(); @@ -55,10 +40,10 @@ export class DataTxValidator implements TxValidator { (teardownCallRequest && !tx.publicTeardownFunctionCall.isForCallRequest(teardownCallRequest)); if (isInvalidTeardownExecutionRequest) { this.#log.warn(`Rejecting tx ${Tx.getHash(tx)} because of incorrect teardown execution requests.`); - return false; + return { result: 'invalid', reason: ['Incorrect teardown execution request'] }; } - return true; + return { result: 'valid' }; } // TODO: Check logs. diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/double_spend_validator.test.ts b/yarn-project/p2p/src/msg_validators/tx_validator/double_spend_validator.test.ts index 7b0fbb139742..3a64e1fb6015 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/double_spend_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/double_spend_validator.test.ts @@ -1,6 +1,6 @@ import { type AnyTx, mockTx, mockTxForRollup } from '@aztec/circuit-types'; -import { type MockProxy, mock, mockFn } from 'jest-mock-extended'; +import { type MockProxy, mock } from 'jest-mock-extended'; import { DoubleSpendTxValidator, type NullifierSource } from './double_spend_validator.js'; @@ -8,25 +8,26 @@ describe('DoubleSpendTxValidator', () => { let txValidator: DoubleSpendTxValidator; let nullifierSource: MockProxy; + const expectInvalid = async (tx: AnyTx, reason: string) => { + await expect(txValidator.validateTx(tx)).resolves.toEqual({ result: 'invalid', reason: [reason] }); + }; + beforeEach(() => { - nullifierSource = mock({ - getNullifierIndices: mockFn().mockImplementation(() => { - return Promise.resolve([undefined]); - }), - }); + nullifierSource = mock(); + nullifierSource.nullifiersExist.mockResolvedValue([]); txValidator = new DoubleSpendTxValidator(nullifierSource); }); it('rejects duplicates in non revertible data', async () => { const badTx = mockTxForRollup(); badTx.data.forRollup!.end.nullifiers[1] = badTx.data.forRollup!.end.nullifiers[0]; - await expect(txValidator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); + await expectInvalid(badTx, 'Duplicate nullifier in tx'); }); it('rejects duplicates in revertible data', async () => { const badTx = mockTxForRollup(); badTx.data.forRollup!.end.nullifiers[1] = badTx.data.forRollup!.end.nullifiers[0]; - await expect(txValidator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); + await expectInvalid(badTx, 'Duplicate nullifier in tx'); }); it('rejects duplicates across phases', async () => { @@ -36,19 +37,12 @@ describe('DoubleSpendTxValidator', () => { }); badTx.data.forPublic!.revertibleAccumulatedData.nullifiers[0] = badTx.data.forPublic!.nonRevertibleAccumulatedData.nullifiers[0]; - await expect(txValidator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); - }); - - it('rejects duplicates across txs', async () => { - const firstTx = mockTxForRollup(1); - const secondTx = mockTxForRollup(2); - secondTx.data.forRollup!.end.nullifiers[0] = firstTx.data.forRollup!.end.nullifiers[0]; - await expect(txValidator.validateTxs([firstTx, secondTx])).resolves.toEqual([[firstTx], [secondTx]]); + await expectInvalid(badTx, 'Duplicate nullifier in tx'); }); it('rejects duplicates against history', async () => { const badTx = mockTx(); - nullifierSource.getNullifierIndices.mockReturnValueOnce(Promise.resolve([1n])); - await expect(txValidator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); + nullifierSource.nullifiersExist.mockResolvedValue([true]); + await expectInvalid(badTx, 'Existing nullifier'); }); }); diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/double_spend_validator.ts b/yarn-project/p2p/src/msg_validators/tx_validator/double_spend_validator.ts index 9f735e197b02..7ec67bbbc39a 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/double_spend_validator.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/double_spend_validator.ts @@ -1,69 +1,33 @@ -import { type AnyTx, Tx, type TxValidator } from '@aztec/circuit-types'; +import { type AnyTx, Tx, type TxValidationResult, type TxValidator } from '@aztec/circuit-types'; import { createLogger } from '@aztec/foundation/log'; export interface NullifierSource { - getNullifierIndices: (nullifiers: Buffer[]) => Promise<(bigint | undefined)[]>; + nullifiersExist: (nullifiers: Buffer[]) => Promise; } export class DoubleSpendTxValidator implements TxValidator { #log = createLogger('p2p:tx_validator:tx_double_spend'); #nullifierSource: NullifierSource; - constructor(nullifierSource: NullifierSource, private readonly isValidatingBlock: boolean = true) { + constructor(nullifierSource: NullifierSource) { this.#nullifierSource = nullifierSource; } - async validateTxs(txs: T[]): Promise<[validTxs: T[], invalidTxs: T[]]> { - const validTxs: T[] = []; - const invalidTxs: T[] = []; - const thisBlockNullifiers = new Set(); - - for (const tx of txs) { - if (!(await this.#uniqueNullifiers(tx, thisBlockNullifiers))) { - invalidTxs.push(tx); - continue; - } - - validTxs.push(tx); - } - - return [validTxs, invalidTxs]; - } - - validateTx(tx: T): Promise { - return this.#uniqueNullifiers(tx, new Set()); - } - - async #uniqueNullifiers(tx: AnyTx, thisBlockNullifiers: Set): Promise { + async validateTx(tx: T): Promise { const nullifiers = tx instanceof Tx ? tx.data.getNonEmptyNullifiers() : tx.txEffect.nullifiers; // Ditch this tx if it has repeated nullifiers const uniqueNullifiers = new Set(nullifiers); if (uniqueNullifiers.size !== nullifiers.length) { this.#log.warn(`Rejecting tx ${Tx.getHash(tx)} for emitting duplicate nullifiers`); - return false; - } - - if (this.isValidatingBlock) { - for (const nullifier of nullifiers) { - const nullifierBigInt = nullifier.toBigInt(); - if (thisBlockNullifiers.has(nullifierBigInt)) { - this.#log.warn(`Rejecting tx ${Tx.getHash(tx)} for repeating a nullifier in the same block`); - return false; - } - - thisBlockNullifiers.add(nullifierBigInt); - } + return { result: 'invalid', reason: ['Duplicate nullifier in tx'] }; } - const nullifierIndexes = await this.#nullifierSource.getNullifierIndices(nullifiers.map(n => n.toBuffer())); - - const hasDuplicates = nullifierIndexes.some(index => index !== undefined); - if (hasDuplicates) { - this.#log.warn(`Rejecting tx ${Tx.getHash(tx)} for repeating nullifiers present in state trees`); - return false; + if ((await this.#nullifierSource.nullifiersExist(nullifiers.map(n => n.toBuffer()))).some(Boolean)) { + this.#log.warn(`Rejecting tx ${Tx.getHash(tx)} for repeating a nullifier`); + return { result: 'invalid', reason: ['Existing nullifier'] }; } - return true; + return { result: 'valid' }; } } diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.test.ts b/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.test.ts index 211d4ad0e669..0dbf8964e713 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.test.ts @@ -1,4 +1,4 @@ -import { type AnyTx, mockTx, mockTxForRollup } from '@aztec/circuit-types'; +import { type AnyTx, type Tx, mockTx, mockTxForRollup } from '@aztec/circuit-types'; import { Fr, MaxBlockNumber } from '@aztec/circuits.js'; import { MetadataTxValidator } from './metadata_validator.js'; @@ -14,6 +14,14 @@ describe('MetadataTxValidator', () => { validator = new MetadataTxValidator(chainId, blockNumber); }); + const expectValid = async (tx: Tx) => { + await expect(validator.validateTx(tx)).resolves.toEqual({ result: 'valid' }); + }; + + const expectInvalid = async (tx: Tx, reason: string) => { + await expect(validator.validateTx(tx)).resolves.toEqual({ result: 'invalid', reason: [reason] }); + }; + it('allows only transactions for the right chain', async () => { const goodTxs = [mockTx(1), mockTxForRollup(2)]; const badTxs = [mockTx(3), mockTxForRollup(4)]; @@ -26,7 +34,10 @@ describe('MetadataTxValidator', () => { tx.data.constants.txContext.chainId = chainId.add(new Fr(1)); }); - await expect(validator.validateTxs([...goodTxs, ...badTxs])).resolves.toEqual([goodTxs, badTxs]); + await expectValid(goodTxs[0]); + await expectValid(goodTxs[1]); + await expectInvalid(badTxs[0], 'Incorrect chain id'); + await expectInvalid(badTxs[1], 'Incorrect chain id'); }); it.each([42, 43])('allows txs with valid max block number', async maxBlockNumber => { @@ -34,7 +45,7 @@ describe('MetadataTxValidator', () => { goodTx.data.constants.txContext.chainId = chainId; goodTx.data.rollupValidationRequests.maxBlockNumber = new MaxBlockNumber(true, new Fr(maxBlockNumber)); - await expect(validator.validateTxs([goodTx])).resolves.toEqual([[goodTx], []]); + await expectValid(goodTx); }); it('allows txs with unset max block number', async () => { @@ -42,13 +53,14 @@ describe('MetadataTxValidator', () => { goodTx.data.constants.txContext.chainId = chainId; goodTx.data.rollupValidationRequests.maxBlockNumber = new MaxBlockNumber(false, Fr.ZERO); - await expect(validator.validateTxs([goodTx])).resolves.toEqual([[goodTx], []]); + await expectValid(goodTx); }); it('rejects txs with lower max block number', async () => { const badTx = mockTxForRollup(1); badTx.data.constants.txContext.chainId = chainId; badTx.data.rollupValidationRequests.maxBlockNumber = new MaxBlockNumber(true, blockNumber.sub(new Fr(1))); - await expect(validator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); + + await expectInvalid(badTx, 'Invalid block number'); }); }); diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.ts b/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.ts index fe3194a454ed..aefde1dfd725 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.ts @@ -1,4 +1,4 @@ -import { type AnyTx, Tx, type TxValidator } from '@aztec/circuit-types'; +import { type AnyTx, Tx, type TxValidationResult, type TxValidator } from '@aztec/circuit-types'; import { type Fr } from '@aztec/circuits.js'; import { createLogger } from '@aztec/foundation/log'; @@ -7,28 +7,15 @@ export class MetadataTxValidator implements TxValidator { constructor(private chainId: Fr, private blockNumber: Fr) {} - validateTxs(txs: T[]): Promise<[validTxs: T[], invalidTxs: T[]]> { - const validTxs: T[] = []; - const invalidTxs: T[] = []; - for (const tx of txs) { - if (!this.#hasCorrectChainId(tx)) { - invalidTxs.push(tx); - continue; - } - - if (!this.#isValidForBlockNumber(tx)) { - invalidTxs.push(tx); - continue; - } - - validTxs.push(tx); + validateTx(tx: T): Promise { + const errors = []; + if (!this.#hasCorrectChainId(tx)) { + errors.push('Incorrect chain id'); } - - return Promise.resolve([validTxs, invalidTxs]); - } - - validateTx(tx: T): Promise { - return Promise.resolve(this.#hasCorrectChainId(tx) && this.#isValidForBlockNumber(tx)); + if (!this.#isValidForBlockNumber(tx)) { + errors.push('Invalid block number'); + } + return Promise.resolve(errors.length > 0 ? { result: 'invalid', reason: errors } : { result: 'valid' }); } #hasCorrectChainId(tx: T): boolean { diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/tx_proof_validator.ts b/yarn-project/p2p/src/msg_validators/tx_validator/tx_proof_validator.ts index 172234ce3bc8..2bf3b1d45081 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/tx_proof_validator.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/tx_proof_validator.ts @@ -1,4 +1,9 @@ -import { type ClientProtocolCircuitVerifier, Tx, type TxValidator } from '@aztec/circuit-types'; +import { + type ClientProtocolCircuitVerifier, + Tx, + type TxValidationResult, + type TxValidator, +} from '@aztec/circuit-types'; import { createLogger } from '@aztec/foundation/log'; export class TxProofValidator implements TxValidator { @@ -6,23 +11,12 @@ export class TxProofValidator implements TxValidator { constructor(private verifier: ClientProtocolCircuitVerifier) {} - async validateTxs(txs: Tx[]): Promise<[validTxs: Tx[], invalidTxs: Tx[]]> { - const validTxs: Tx[] = []; - const invalidTxs: Tx[] = []; - - for (const tx of txs) { - if (await this.verifier.verifyProof(tx)) { - validTxs.push(tx); - } else { - this.#log.warn(`Rejecting tx ${Tx.getHash(tx)} for invalid proof`); - invalidTxs.push(tx); - } + async validateTx(tx: Tx): Promise { + if (!(await this.verifier.verifyProof(tx))) { + this.#log.warn(`Rejecting tx ${Tx.getHash(tx)} for invalid proof`); + return { result: 'invalid', reason: ['Invalid proof'] }; } - - return [validTxs, invalidTxs]; - } - - validateTx(tx: Tx): Promise { - return this.verifier.verifyProof(tx); + this.#log.trace(`Accepted ${Tx.getHash(tx)} with valid proof`); + return { result: 'valid' }; } } diff --git a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts index 6b052af024a2..5b47bc91ca3e 100644 --- a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts +++ b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts @@ -6,17 +6,18 @@ import { type Gossipable, type L2BlockSource, MerkleTreeId, + P2PClientType, PeerErrorSeverity, type PeerInfo, type RawGossipMessage, TopicTypeMap, Tx, TxHash, + type TxValidationResult, type WorldStateSynchronizer, getTopicTypeForClientType, metricsTopicStrToLabels, } from '@aztec/circuit-types'; -import { P2PClientType } from '@aztec/circuit-types'; import { Fr } from '@aztec/circuits.js'; import { type EpochCache } from '@aztec/epoch-cache'; import { createLogger } from '@aztec/foundation/log'; @@ -73,14 +74,14 @@ import { GossipSubEvent } from '../types.js'; interface MessageValidator { validator: { - validateTx(tx: Tx): Promise; + validateTx(tx: Tx): Promise; }; severity: PeerErrorSeverity; } interface ValidationResult { name: string; - isValid: boolean; + isValid: TxValidationResult; severity: PeerErrorSeverity; } @@ -568,7 +569,7 @@ export class LibP2PService extends WithTracer implement return false; } - if (!validProof) { + if (validProof.result === 'invalid') { // If the proof is invalid, but the txHash is correct, then this is an active attack and we severly punish this.peerManager.penalizePeer(peerId, PeerErrorSeverity.LowToleranceError); return false; @@ -704,9 +705,10 @@ export class LibP2PService extends WithTracer implement }, doubleSpendValidator: { validator: new DoubleSpendTxValidator({ - getNullifierIndices: (nullifiers: Buffer[]) => { + nullifiersExist: async (nullifiers: Buffer[]) => { const merkleTree = this.worldStateSynchronizer.getCommitted(); - return merkleTree.findLeafIndices(MerkleTreeId.NULLIFIER_TREE, nullifiers); + const indices = await merkleTree.findLeafIndices(MerkleTreeId.NULLIFIER_TREE, nullifiers); + return indices.map(index => index !== undefined); }, }), severity: PeerErrorSeverity.HighToleranceError, @@ -725,8 +727,8 @@ export class LibP2PService extends WithTracer implement messageValidators: Record, ): Promise { const validationPromises = Object.entries(messageValidators).map(async ([name, { validator, severity }]) => { - const isValid = await validator.validateTx(tx); - return { name, isValid, severity }; + const { result } = await validator.validateTx(tx); + return { name, isValid: result === 'valid', severity }; }); // A promise that resolves when all validations have been run @@ -767,16 +769,17 @@ export class LibP2PService extends WithTracer implement } const snapshotValidator = new DoubleSpendTxValidator({ - getNullifierIndices: (nullifiers: Buffer[]) => { + nullifiersExist: async (nullifiers: Buffer[]) => { const merkleTree = this.worldStateSynchronizer.getSnapshot( blockNumber - this.config.severePeerPenaltyBlockLength, ); - return merkleTree.findLeafIndices(MerkleTreeId.NULLIFIER_TREE, nullifiers); + const indices = await merkleTree.findLeafIndices(MerkleTreeId.NULLIFIER_TREE, nullifiers); + return indices.map(index => index !== undefined); }, }); const validSnapshot = await snapshotValidator.validateTx(tx); - if (!validSnapshot) { + if (validSnapshot.result !== 'valid') { this.peerManager.penalizePeer(peerId, PeerErrorSeverity.LowToleranceError); return false; } diff --git a/yarn-project/package.json b/yarn-project/package.json index 0a2e6fbc9eb5..68b32f53c60d 100644 --- a/yarn-project/package.json +++ b/yarn-project/package.json @@ -26,6 +26,7 @@ "aztec-node", "validator-client", "bb-prover", + "blob-sink", "bot", "builder", "pxe", diff --git a/yarn-project/prover-client/src/block_builder/light.ts b/yarn-project/prover-client/src/block_builder/light.ts index e56986b839a7..39d5b7a2185f 100644 --- a/yarn-project/prover-client/src/block_builder/light.ts +++ b/yarn-project/prover-client/src/block_builder/light.ts @@ -4,6 +4,7 @@ import { MerkleTreeId, type MerkleTreeWriteOperations, type ProcessedTx, + TxHash, makeEmptyProcessedTx, toNumBlobFields, } from '@aztec/circuit-types'; @@ -53,7 +54,7 @@ export class LightweightBlockBuilder implements BlockBuilder { this.numTxs = Math.max(2, txs.length); this.spongeBlobState = SpongeBlob.init(toNumBlobFields(txs)); for (const tx of txs) { - this.logger.debug(tx.hash.isZero() ? 'Adding padding tx to block' : 'Adding new tx to block', { + this.logger.debug(tx.hash.equals(TxHash.zero()) ? 'Adding padding tx to block' : 'Adding new tx to block', { txHash: tx.hash.toString(), }); this.txs.push(tx); diff --git a/yarn-project/prover-client/src/mocks/test_context.ts b/yarn-project/prover-client/src/mocks/test_context.ts index 9ec68ce94ce0..ee2740b5ebb1 100644 --- a/yarn-project/prover-client/src/mocks/test_context.ts +++ b/yarn-project/prover-client/src/mocks/test_context.ts @@ -5,7 +5,6 @@ import { type PublicExecutionRequest, type ServerCircuitProver, type Tx, - type TxValidator, } from '@aztec/circuit-types'; import { makeBloatedProcessedTx } from '@aztec/circuit-types/test'; import { @@ -195,7 +194,7 @@ export class TestContext { return { block, txs, msgs }; } - public async processPublicFunctions(txs: Tx[], maxTransactions: number, txValidator?: TxValidator) { + public async processPublicFunctions(txs: Tx[], maxTransactions: number) { const defaultExecutorImplementation = ( _stateManager: AvmPersistableStateManager, executionRequest: PublicExecutionRequest, @@ -220,7 +219,6 @@ export class TestContext { return await this.processPublicFunctionsWithMockExecutorImplementation( txs, maxTransactions, - txValidator, defaultExecutorImplementation, ); } @@ -244,7 +242,6 @@ export class TestContext { private async processPublicFunctionsWithMockExecutorImplementation( txs: Tx[], maxTransactions: number, - txValidator?: TxValidator, executorMock?: ( stateManager: AvmPersistableStateManager, executionRequest: PublicExecutionRequest, @@ -271,7 +268,7 @@ export class TestContext { if (executorMock) { simulateInternal.mockImplementation(executorMock); } - return await this.publicProcessor.process(txs, maxTransactions, txValidator); + return await this.publicProcessor.process(txs, { maxTransactions }); } } diff --git a/yarn-project/prover-node/src/job/epoch-proving-job.ts b/yarn-project/prover-node/src/job/epoch-proving-job.ts index 3db7c5ca4d2f..f434421dca3f 100644 --- a/yarn-project/prover-node/src/job/epoch-proving-job.ts +++ b/yarn-project/prover-node/src/job/epoch-proving-job.ts @@ -1,5 +1,4 @@ import { - EmptyTxValidator, type EpochProver, type EpochProvingJobState, type ForkMerkleTreeOperations, @@ -90,7 +89,6 @@ export class EpochProvingJob implements Traceable { await asyncPool(this.config.parallelBlockLimit, this.blocks, async block => { const globalVariables = block.header.globalVariables; - const txCount = block.body.numberOfTxsIncludingPadded; const txs = this.getTxs(block); const l1ToL2Messages = await this.getL1ToL2Messages(block); const previousHeader = await this.getBlockHeader(block.number - 1); @@ -112,7 +110,7 @@ export class EpochProvingJob implements Traceable { // Process public fns const db = await this.dbProvider.fork(block.number - 1); const publicProcessor = this.publicProcessorFactory.create(db, previousHeader, globalVariables, true); - const processed = await this.processTxs(publicProcessor, txs, txCount); + const processed = await this.processTxs(publicProcessor, txs); await this.prover.addTxs(processed); await db.close(); this.log.verbose(`Processed all ${txs.length} txs for block ${block.number}`, { @@ -168,12 +166,8 @@ export class EpochProvingJob implements Traceable { return this.l1ToL2MessageSource.getL1ToL2Messages(BigInt(block.number)); } - private async processTxs( - publicProcessor: PublicProcessor, - txs: Tx[], - totalNumberOfTxs: number, - ): Promise { - const [processedTxs, failedTxs] = await publicProcessor.process(txs, totalNumberOfTxs, new EmptyTxValidator()); + private async processTxs(publicProcessor: PublicProcessor, txs: Tx[]): Promise { + const [processedTxs, failedTxs] = await publicProcessor.process(txs); if (failedTxs.length) { throw new Error( diff --git a/yarn-project/pxe/src/database/incoming_note_dao.test.ts b/yarn-project/pxe/src/database/incoming_note_dao.test.ts deleted file mode 100644 index 1df9103e08fe..000000000000 --- a/yarn-project/pxe/src/database/incoming_note_dao.test.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { IncomingNoteDao } from './incoming_note_dao.js'; - -describe('Incoming Note DAO', () => { - it('convert to and from buffer', () => { - const note = IncomingNoteDao.random(); - const buf = note.toBuffer(); - expect(IncomingNoteDao.fromBuffer(buf)).toEqual(note); - }); -}); diff --git a/yarn-project/pxe/src/database/kv_pxe_database.ts b/yarn-project/pxe/src/database/kv_pxe_database.ts index ebf1c07991cd..ccff64cb2291 100644 --- a/yarn-project/pxe/src/database/kv_pxe_database.ts +++ b/yarn-project/pxe/src/database/kv_pxe_database.ts @@ -1,4 +1,4 @@ -import { type InBlock, type IncomingNotesFilter, MerkleTreeId, NoteStatus } from '@aztec/circuit-types'; +import { type InBlock, MerkleTreeId, NoteStatus, type NotesFilter } from '@aztec/circuit-types'; import { AztecAddress, BlockHeader, @@ -22,7 +22,7 @@ import { } from '@aztec/kv-store'; import { contractArtifactFromBuffer, contractArtifactToBuffer } from '@aztec/types/abi'; -import { IncomingNoteDao } from './incoming_note_dao.js'; +import { NoteDao } from './note_dao.js'; import { type PxeDatabase } from './pxe_database.js'; /** @@ -185,17 +185,17 @@ export class KVPxeDatabase implements PxeDatabase { return val?.map(b => Fr.fromBuffer(b)); } - async addNote(note: IncomingNoteDao, scope?: AztecAddress): Promise { + async addNote(note: NoteDao, scope?: AztecAddress): Promise { await this.addNotes([note], scope); } - async addNotes(incomingNotes: IncomingNoteDao[], scope: AztecAddress = AztecAddress.ZERO): Promise { + async addNotes(notes: NoteDao[], scope: AztecAddress = AztecAddress.ZERO): Promise { if (!(await this.#scopes.hasAsync(scope.toString()))) { await this.#addScope(scope); } return this.db.transactionAsync(async () => { - for (const dao of incomingNotes) { + for (const dao of notes) { // store notes by their index in the notes hash tree // this provides the uniqueness we need to store individual notes // and should also return notes in the order that they were created. @@ -217,7 +217,7 @@ export class KVPxeDatabase implements PxeDatabase { return this.db.transactionAsync(async () => { const notes = await toArray(this.#notes.valuesAsync()); for (const note of notes) { - const noteDao = IncomingNoteDao.fromBuffer(note); + const noteDao = NoteDao.fromBuffer(note); if (noteDao.l2BlockNumber > blockNumber) { const noteIndex = toBufferBE(noteDao.index, 32).toString('hex'); await this.#notes.delete(noteIndex); @@ -252,7 +252,7 @@ export class KVPxeDatabase implements PxeDatabase { ); const noteDaos = nullifiedNoteBuffers .filter(buffer => buffer != undefined) - .map(buffer => IncomingNoteDao.fromBuffer(buffer!)); + .map(buffer => NoteDao.fromBuffer(buffer!)); await this.db.transactionAsync(async () => { for (const dao of noteDaos) { @@ -286,7 +286,7 @@ export class KVPxeDatabase implements PxeDatabase { }); } - async getIncomingNotes(filter: IncomingNotesFilter): Promise { + async getNotes(filter: NotesFilter): Promise { const publicKey: PublicKey | undefined = filter.owner ? filter.owner.toAddressPoint() : undefined; filter.status = filter.status ?? NoteStatus.ACTIVE; @@ -348,7 +348,7 @@ export class KVPxeDatabase implements PxeDatabase { }); } - const result: IncomingNoteDao[] = []; + const result: NoteDao[] = []; for (const { ids, notes } of candidateNoteSources) { for (const id of ids) { const serializedNote = await notes.getAsync(id); @@ -356,7 +356,7 @@ export class KVPxeDatabase implements PxeDatabase { continue; } - const note = IncomingNoteDao.fromBuffer(serializedNote); + const note = NoteDao.fromBuffer(serializedNote); if (filter.contractAddress && !note.contractAddress.equals(filter.contractAddress)) { continue; } @@ -384,13 +384,13 @@ export class KVPxeDatabase implements PxeDatabase { return result; } - removeNullifiedNotes(nullifiers: InBlock[], accountAddressPoint: PublicKey): Promise { + removeNullifiedNotes(nullifiers: InBlock[], accountAddressPoint: PublicKey): Promise { if (nullifiers.length === 0) { return Promise.resolve([]); } return this.db.transactionAsync(async () => { - const nullifiedNotes: IncomingNoteDao[] = []; + const nullifiedNotes: NoteDao[] = []; for (const blockScopedNullifier of nullifiers) { const { data: nullifier, l2BlockNumber: blockNumber } = blockScopedNullifier; @@ -406,7 +406,7 @@ export class KVPxeDatabase implements PxeDatabase { continue; } const noteScopes = (await toArray(this.#notesToScope.getValuesAsync(noteIndex))) ?? []; - const note = IncomingNoteDao.fromBuffer(noteBuffer); + const note = NoteDao.fromBuffer(noteBuffer); if (!note.addressPoint.equals(accountAddressPoint)) { // tried to nullify someone else's note continue; @@ -445,7 +445,7 @@ export class KVPxeDatabase implements PxeDatabase { }); } - async addNullifiedNote(note: IncomingNoteDao): Promise { + async addNullifiedNote(note: NoteDao): Promise { const noteIndex = toBufferBE(note.index, 32).toString('hex'); await this.#nullifiedNotes.set(noteIndex, note.toBuffer()); @@ -563,7 +563,7 @@ export class KVPxeDatabase implements PxeDatabase { } async estimateSize(): Promise { - const incomingNotesSize = (await this.getIncomingNotes({})).reduce((sum, note) => sum + note.getSize(), 0); + const noteSize = (await this.getNotes({})).reduce((sum, note) => sum + note.getSize(), 0); const authWitsSize = (await toArray(this.#authWitnesses.valuesAsync())).reduce( (sum, value) => sum + value.length * Fr.SIZE_IN_BYTES, @@ -572,7 +572,7 @@ export class KVPxeDatabase implements PxeDatabase { const addressesSize = (await this.#completeAddresses.lengthAsync()) * CompleteAddress.SIZE_IN_BYTES; const treeRootsSize = Object.keys(MerkleTreeId).length * Fr.SIZE_IN_BYTES; - return incomingNotesSize + treeRootsSize + authWitsSize + addressesSize; + return noteSize + treeRootsSize + authWitsSize + addressesSize; } async setTaggingSecretsIndexesAsSender(indexedSecrets: IndexedTaggingSecret[]): Promise { diff --git a/yarn-project/pxe/src/database/note_dao.test.ts b/yarn-project/pxe/src/database/note_dao.test.ts new file mode 100644 index 000000000000..599519e310dc --- /dev/null +++ b/yarn-project/pxe/src/database/note_dao.test.ts @@ -0,0 +1,9 @@ +import { NoteDao } from './note_dao.js'; + +describe('Note DAO', () => { + it('convert to and from buffer', () => { + const note = NoteDao.random(); + const buf = note.toBuffer(); + expect(NoteDao.fromBuffer(buf)).toEqual(note); + }); +}); diff --git a/yarn-project/pxe/src/database/incoming_note_dao.ts b/yarn-project/pxe/src/database/note_dao.ts similarity index 69% rename from yarn-project/pxe/src/database/incoming_note_dao.ts rename to yarn-project/pxe/src/database/note_dao.ts index d4c4192c3a5f..b4cb311fd46f 100644 --- a/yarn-project/pxe/src/database/incoming_note_dao.ts +++ b/yarn-project/pxe/src/database/note_dao.ts @@ -8,40 +8,57 @@ import { type NoteData } from '@aztec/simulator/acvm'; import { type NoteInfo } from '../note_decryption_utils/index.js'; /** - * A note with contextual data which was decrypted as incoming. + * A Note Data Access Object, representing a note that was comitted to the note hash tree, holding all of the + * information required to use it during execution and manage its state. */ -export class IncomingNoteDao implements NoteData { +export class NoteDao implements NoteData { constructor( - /** The note as emitted from the Noir contract. */ + // Note information + + /** The serialized content of the note, as will be returned in the getNotes oracle. */ public note: Note, - /** The contract address this note is created in. */ + /** The address of the contract that created the note (i.e. the address used by the kernel during siloing). */ public contractAddress: AztecAddress, - /** The specific storage location of the note on the contract. */ + /** + * The storage location of the note. This value is not used for anything in PXE, but we do index by storage slot + * since contracts typically make queries based on it. + * */ public storageSlot: Fr, - /** The note type identifier for the contract. */ - public noteTypeId: NoteSelector, - /** The hash of the tx the note was created in. */ - public txHash: TxHash, - /** The L2 block number in which the tx with this note was included. */ - public l2BlockNumber: number, - /** The L2 block hash in which the tx with this note was included. */ - public l2BlockHash: string, - /** The nonce of the note. */ + /** The kernel-provided nonce of the note, required to compute the uniqueNoteHash. */ public nonce: Fr, + + // Computed values /** - * A hash of the note. This is customizable by the app circuit. - * We can use this value to compute siloedNoteHash and uniqueSiloedNoteHash. + * The inner hash (non-unique, non-siloed) of the note. Each contract determines how the note content is hashed. Can + * be used alongside contractAddress and nonce to compute the uniqueNoteHash and the siloedNoteHash. */ public noteHash: Fr, /** - * The nullifier of the note (siloed by contract address). + * The nullifier of the note, siloed by contract address. * Note: Might be set as 0 if the note was added to PXE as nullified. */ public siloedNullifier: Fr, - /** The location of the relevant note in the note hash tree. */ + + // Metadata + /** The hash of the tx in which this note was created. Knowing the tx hash allows for efficient node queries e.g. + * when searching for txEffects. + */ + public txHash: TxHash, + /** The L2 block number in which the tx with this note was included. Used for note management while processing + * reorgs.*/ + public l2BlockNumber: number, + /** The L2 block hash in which the tx with this note was included. Used for note management while processing + * reorgs.*/ + public l2BlockHash: string, + /** The index of the leaf in the global note hash tree the note is stored at */ public index: bigint, - /** The public key with which the note was encrypted. */ + /** The public key with which the note content was encrypted during delivery. */ public addressPoint: PublicKey, + + /** The note type identifier for the contract. + * TODO: remove + */ + public noteTypeId: NoteSelector, ) {} static fromPayloadAndNoteInfo( @@ -54,19 +71,19 @@ export class IncomingNoteDao implements NoteData { addressPoint: PublicKey, ) { const noteHashIndexInTheWholeTree = BigInt(dataStartIndexForTx + noteInfo.noteHashIndex); - return new IncomingNoteDao( + return new NoteDao( note, payload.contractAddress, payload.storageSlot, - payload.noteTypeId, - noteInfo.txHash, - l2BlockNumber, - l2BlockHash, noteInfo.nonce, noteInfo.noteHash, noteInfo.siloedNullifier, + noteInfo.txHash, + l2BlockNumber, + l2BlockHash, noteHashIndexInTheWholeTree, addressPoint, + payload.noteTypeId, ); } @@ -75,15 +92,15 @@ export class IncomingNoteDao implements NoteData { this.note, this.contractAddress, this.storageSlot, - this.noteTypeId, - this.txHash.buffer, - this.l2BlockNumber, - Fr.fromHexString(this.l2BlockHash), this.nonce, this.noteHash, this.siloedNullifier, + this.txHash, + this.l2BlockNumber, + Fr.fromHexString(this.l2BlockHash), this.index, this.addressPoint, + this.noteTypeId, ]); } @@ -93,29 +110,29 @@ export class IncomingNoteDao implements NoteData { const note = Note.fromBuffer(reader); const contractAddress = AztecAddress.fromBuffer(reader); const storageSlot = Fr.fromBuffer(reader); - const noteTypeId = reader.readObject(NoteSelector); - const txHash = reader.readObject(TxHash); - const l2BlockNumber = reader.readNumber(); - const l2BlockHash = Fr.fromBuffer(reader).toString(); const nonce = Fr.fromBuffer(reader); const noteHash = Fr.fromBuffer(reader); const siloedNullifier = Fr.fromBuffer(reader); + const txHash = reader.readObject(TxHash); + const l2BlockNumber = reader.readNumber(); + const l2BlockHash = Fr.fromBuffer(reader).toString(); const index = toBigIntBE(reader.readBytes(32)); const publicKey = Point.fromBuffer(reader); + const noteTypeId = reader.readObject(NoteSelector); - return new IncomingNoteDao( + return new NoteDao( note, contractAddress, storageSlot, - noteTypeId, - txHash, - l2BlockNumber, - l2BlockHash, nonce, noteHash, siloedNullifier, + txHash, + l2BlockNumber, + l2BlockHash, index, publicKey, + noteTypeId, ); } @@ -125,7 +142,7 @@ export class IncomingNoteDao implements NoteData { static fromString(str: string) { const hex = str.replace(/^0x/, ''); - return IncomingNoteDao.fromBuffer(Buffer.from(hex, 'hex')); + return NoteDao.fromBuffer(Buffer.from(hex, 'hex')); } /** @@ -141,30 +158,30 @@ export class IncomingNoteDao implements NoteData { static random({ note = Note.random(), contractAddress = AztecAddress.random(), - txHash = randomTxHash(), storageSlot = Fr.random(), - noteTypeId = NoteSelector.random(), nonce = Fr.random(), - l2BlockNumber = Math.floor(Math.random() * 1000), - l2BlockHash = Fr.random().toString(), noteHash = Fr.random(), siloedNullifier = Fr.random(), + txHash = randomTxHash(), + l2BlockNumber = Math.floor(Math.random() * 1000), + l2BlockHash = Fr.random().toString(), index = Fr.random().toBigInt(), addressPoint = Point.random(), - }: Partial = {}) { - return new IncomingNoteDao( + noteTypeId = NoteSelector.random(), + }: Partial = {}) { + return new NoteDao( note, contractAddress, storageSlot, - noteTypeId, - txHash, - l2BlockNumber, - l2BlockHash, nonce, noteHash, siloedNullifier, + txHash, + l2BlockNumber, + l2BlockHash, index, addressPoint, + noteTypeId, ); } } diff --git a/yarn-project/pxe/src/database/outgoing_note_dao.test.ts b/yarn-project/pxe/src/database/outgoing_note_dao.test.ts deleted file mode 100644 index 0c293ba13ebf..000000000000 --- a/yarn-project/pxe/src/database/outgoing_note_dao.test.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { OutgoingNoteDao } from './outgoing_note_dao.js'; - -describe('Outgoing Note DAO', () => { - it('convert to and from buffer', () => { - const note = OutgoingNoteDao.random(); - const buf = note.toBuffer(); - expect(OutgoingNoteDao.fromBuffer(buf)).toEqual(note); - }); -}); diff --git a/yarn-project/pxe/src/database/outgoing_note_dao.ts b/yarn-project/pxe/src/database/outgoing_note_dao.ts index 211c66cea7b0..50fae33c5244 100644 --- a/yarn-project/pxe/src/database/outgoing_note_dao.ts +++ b/yarn-project/pxe/src/database/outgoing_note_dao.ts @@ -69,7 +69,7 @@ export class OutgoingNoteDao { this.contractAddress, this.storageSlot, this.noteTypeId, - this.txHash.buffer, + this.txHash, this.l2BlockNumber, Fr.fromHexString(this.l2BlockHash), this.nonce, @@ -85,7 +85,7 @@ export class OutgoingNoteDao { const contractAddress = AztecAddress.fromBuffer(reader); const storageSlot = Fr.fromBuffer(reader); const noteTypeId = reader.readObject(NoteSelector); - const txHash = new TxHash(reader.readBytes(TxHash.SIZE)); + const txHash = reader.readObject(TxHash); const l2BlockNumber = reader.readNumber(); const l2BlockHash = Fr.fromBuffer(reader).toString(); const nonce = Fr.fromBuffer(reader); diff --git a/yarn-project/pxe/src/database/pxe_database.ts b/yarn-project/pxe/src/database/pxe_database.ts index 6926b2c02afe..8a8b2f8cd61a 100644 --- a/yarn-project/pxe/src/database/pxe_database.ts +++ b/yarn-project/pxe/src/database/pxe_database.ts @@ -1,4 +1,4 @@ -import { type InBlock, type IncomingNotesFilter } from '@aztec/circuit-types'; +import { type InBlock, type NotesFilter } from '@aztec/circuit-types'; import { type BlockHeader, type CompleteAddress, @@ -12,7 +12,7 @@ import { type Fr } from '@aztec/foundation/fields'; import { type ContractArtifactDatabase } from './contracts/contract_artifact_db.js'; import { type ContractInstanceDatabase } from './contracts/contract_instance_db.js'; -import { type IncomingNoteDao } from './incoming_note_dao.js'; +import { type NoteDao } from './note_dao.js'; /** * A database interface that provides methods for retrieving, adding, and removing transactional data related to Aztec @@ -50,11 +50,11 @@ export interface PxeDatabase extends ContractArtifactDatabase, ContractInstanceD popCapsule(): Promise; /** - * Gets incoming notes based on the provided filter. + * Gets notes based on the provided filter. * @param filter - The filter to apply to the notes. * @returns The requested notes. */ - getIncomingNotes(filter: IncomingNotesFilter): Promise; + getNotes(filter: NotesFilter): Promise; /** * Adds a note to DB. @@ -62,24 +62,24 @@ export interface PxeDatabase extends ContractArtifactDatabase, ContractInstanceD * @param scope - The scope to add the note under. Currently optional. * @remark - Will create a database for the scope if it does not already exist. */ - addNote(note: IncomingNoteDao, scope?: AztecAddress): Promise; + addNote(note: NoteDao, scope?: AztecAddress): Promise; /** * Adds a nullified note to DB. * @param note - The note to add. */ - addNullifiedNote(note: IncomingNoteDao): Promise; + addNullifiedNote(note: NoteDao): Promise; /** * Adds an array of notes to DB. * This function is used to insert multiple notes to the database at once, * which can improve performance when dealing with large numbers of transactions. * - * @param incomingNotes - An array of notes which were decrypted as incoming. + * @param notes - An array of notes. * @param scope - The scope to add the notes under. Currently optional. * @remark - Will create a database for the scope if it does not already exist. */ - addNotes(incomingNotes: IncomingNoteDao[], scope?: AztecAddress): Promise; + addNotes(notes: NoteDao[], scope?: AztecAddress): Promise; /** * Remove nullified notes associated with the given account and nullifiers. @@ -88,7 +88,7 @@ export interface PxeDatabase extends ContractArtifactDatabase, ContractInstanceD * @param account - A PublicKey instance representing the account for which the records are being removed. * @returns Removed notes. */ - removeNullifiedNotes(nullifiers: InBlock[], account: PublicKey): Promise; + removeNullifiedNotes(nullifiers: InBlock[], account: PublicKey): Promise; /** * Gets the most recently processed block number. diff --git a/yarn-project/pxe/src/database/pxe_database_test_suite.ts b/yarn-project/pxe/src/database/pxe_database_test_suite.ts index ac14fa97c539..78c7cf2f110d 100644 --- a/yarn-project/pxe/src/database/pxe_database_test_suite.ts +++ b/yarn-project/pxe/src/database/pxe_database_test_suite.ts @@ -1,4 +1,4 @@ -import { type IncomingNotesFilter, NoteStatus, randomTxHash } from '@aztec/circuit-types'; +import { NoteStatus, type NotesFilter, randomTxHash } from '@aztec/circuit-types'; import { AztecAddress, CompleteAddress, @@ -13,7 +13,7 @@ import { Fr, Point } from '@aztec/foundation/fields'; import { BenchmarkingContractArtifact } from '@aztec/noir-contracts.js/Benchmarking'; import { TestContractArtifact } from '@aztec/noir-contracts.js/Test'; -import { IncomingNoteDao } from './incoming_note_dao.js'; +import { NoteDao } from './note_dao.js'; import { type PxeDatabase } from './pxe_database.js'; /** @@ -78,9 +78,9 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { let owners: CompleteAddress[]; let contractAddresses: AztecAddress[]; let storageSlots: Fr[]; - let notes: IncomingNoteDao[]; + let notes: NoteDao[]; - const filteringTests: [() => IncomingNotesFilter, () => IncomingNoteDao[]][] = [ + const filteringTests: [() => NotesFilter, () => NoteDao[]][] = [ [() => ({}), () => notes], [ @@ -119,7 +119,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { storageSlots = Array.from({ length: 2 }).map(() => Fr.random()); notes = Array.from({ length: 10 }).map((_, i) => - IncomingNoteDao.random({ + NoteDao.random({ contractAddress: contractAddresses[i % contractAddresses.length], storageSlot: storageSlots[i % storageSlots.length], addressPoint: owners[i % owners.length].address.toAddressPoint(), @@ -135,7 +135,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { it.each(filteringTests)('stores notes in bulk and retrieves notes', async (getFilter, getExpected) => { await database.addNotes(notes); - const returnedNotes = await database.getIncomingNotes(getFilter()); + const returnedNotes = await database.getNotes(getFilter()); expect(returnedNotes.sort()).toEqual(getExpected().sort()); }); @@ -145,7 +145,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { await database.addNote(note); } - const returnedNotes = await database.getIncomingNotes(getFilter()); + const returnedNotes = await database.getNotes(getFilter()); expect(returnedNotes.sort()).toEqual(getExpected().sort()); }); @@ -166,9 +166,9 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { ); } - await expect( - database.getIncomingNotes({ ...getFilter(), status: NoteStatus.ACTIVE_OR_NULLIFIED }), - ).resolves.toEqual(getExpected()); + await expect(database.getNotes({ ...getFilter(), status: NoteStatus.ACTIVE_OR_NULLIFIED })).resolves.toEqual( + getExpected(), + ); }); it('skips nullified notes by default or when requesting active', async () => { @@ -184,8 +184,8 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { notesToNullify, ); - const actualNotesWithDefault = await database.getIncomingNotes({}); - const actualNotesWithActive = await database.getIncomingNotes({ status: NoteStatus.ACTIVE }); + const actualNotesWithDefault = await database.getNotes({}); + const actualNotesWithActive = await database.getNotes({ status: NoteStatus.ACTIVE }); expect(actualNotesWithDefault).toEqual(actualNotesWithActive); expect(actualNotesWithActive).toEqual(notes.filter(note => !notesToNullify.includes(note))); @@ -206,7 +206,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { ); await expect(database.unnullifyNotesAfter(98)).resolves.toEqual(undefined); - const result = await database.getIncomingNotes({ status: NoteStatus.ACTIVE, owner: owners[0].address }); + const result = await database.getNotes({ status: NoteStatus.ACTIVE, owner: owners[0].address }); expect(result.sort()).toEqual([...notesToNullify].sort()); }); @@ -224,7 +224,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { notesToNullify, ); - const result = await database.getIncomingNotes({ + const result = await database.getNotes({ status: NoteStatus.ACTIVE_OR_NULLIFIED, }); @@ -242,23 +242,23 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { await database.addNote(note, owners[1].address); } - const owner0IncomingNotes = await database.getIncomingNotes({ + const owner0Notes = await database.getNotes({ scopes: [owners[0].address], }); - expect(owner0IncomingNotes.sort()).toEqual(notes.slice(0, 5).sort()); + expect(owner0Notes.sort()).toEqual(notes.slice(0, 5).sort()); - const owner1IncomingNotes = await database.getIncomingNotes({ + const owner1Notes = await database.getNotes({ scopes: [owners[1].address], }); - expect(owner1IncomingNotes.sort()).toEqual(notes.slice(5).sort()); + expect(owner1Notes.sort()).toEqual(notes.slice(5).sort()); - const bothOwnerIncomingNotes = await database.getIncomingNotes({ + const bothOwnerNotes = await database.getNotes({ scopes: [owners[0].address, owners[1].address], }); - expect(bothOwnerIncomingNotes.sort()).toEqual(notes.sort()); + expect(bothOwnerNotes.sort()).toEqual(notes.sort()); }); it('a nullified note removes notes from all accounts in the pxe', async () => { @@ -266,12 +266,12 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { await database.addNote(notes[0], owners[1].address); await expect( - database.getIncomingNotes({ + database.getNotes({ scopes: [owners[0].address], }), ).resolves.toEqual([notes[0]]); await expect( - database.getIncomingNotes({ + database.getNotes({ scopes: [owners[1].address], }), ).resolves.toEqual([notes[0]]); @@ -290,12 +290,12 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { ).resolves.toEqual([notes[0]]); await expect( - database.getIncomingNotes({ + database.getNotes({ scopes: [owners[0].address], }), ).resolves.toEqual([]); await expect( - database.getIncomingNotes({ + database.getNotes({ scopes: [owners[1].address], }), ).resolves.toEqual([]); @@ -305,7 +305,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { await database.addNotes(notes, owners[0].address); await database.removeNotesAfter(5); - const result = await database.getIncomingNotes({ scopes: [owners[0].address] }); + const result = await database.getNotes({ scopes: [owners[0].address] }); expect(new Set(result)).toEqual(new Set(notes.slice(0, 6))); }); }); diff --git a/yarn-project/pxe/src/kernel_prover/hints/build_private_kernel_reset_private_inputs.ts b/yarn-project/pxe/src/kernel_prover/hints/build_private_kernel_reset_private_inputs.ts index b7435a4483b3..fde4074171d4 100644 --- a/yarn-project/pxe/src/kernel_prover/hints/build_private_kernel_reset_private_inputs.ts +++ b/yarn-project/pxe/src/kernel_prover/hints/build_private_kernel_reset_private_inputs.ts @@ -57,7 +57,7 @@ function getNullifierMembershipWitnessResolver(oracle: ProvingDataOracle) { return async (nullifier: Fr) => { const res = await oracle.getNullifierMembershipWitness(nullifier); if (!res) { - throw new Error(`Cannot find the leaf for nullifier ${nullifier.toBigInt()}.`); + throw new Error(`Cannot find the leaf for nullifier ${nullifier}.`); } const { index, siblingPath, leafPreimage } = res; diff --git a/yarn-project/pxe/src/note_decryption_utils/produce_note_daos.ts b/yarn-project/pxe/src/note_decryption_utils/produce_note_daos.ts index ca05f03cfa82..7e1f94abe03d 100644 --- a/yarn-project/pxe/src/note_decryption_utils/produce_note_daos.ts +++ b/yarn-project/pxe/src/note_decryption_utils/produce_note_daos.ts @@ -3,7 +3,7 @@ import { type Fr } from '@aztec/foundation/fields'; import { type Logger } from '@aztec/foundation/log'; import { type AcirSimulator } from '@aztec/simulator/client'; -import { IncomingNoteDao } from '../database/incoming_note_dao.js'; +import { NoteDao } from '../database/note_dao.js'; import { type PxeDatabase } from '../database/pxe_database.js'; import { produceNoteDaosForKey } from './produce_note_daos_for_key.js'; @@ -37,15 +37,15 @@ export async function produceNoteDaos( dataStartIndexForTx: number, excludedIndices: Set, logger: Logger, -): Promise<{ incomingNote: IncomingNoteDao | undefined }> { +): Promise<{ note: NoteDao | undefined }> { if (!addressPoint) { throw new Error('addressPoint is undefined. Cannot create note.'); } - let incomingNote: IncomingNoteDao | undefined; + let note: NoteDao | undefined; if (addressPoint) { - incomingNote = await produceNoteDaosForKey( + note = await produceNoteDaosForKey( simulator, db, addressPoint, @@ -57,11 +57,11 @@ export async function produceNoteDaos( dataStartIndexForTx, excludedIndices, logger, - IncomingNoteDao.fromPayloadAndNoteInfo, + NoteDao.fromPayloadAndNoteInfo, ); } return { - incomingNote, + note, }; } diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index 4bf9f3bccf56..b57ea54df95b 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -7,11 +7,11 @@ import { type FunctionCall, type GetUnencryptedLogsResponse, type InBlock, - type IncomingNotesFilter, L1EventPayload, type L2Block, type LogFilter, MerkleTreeId, + type NotesFilter, type PXE, type PXEInfo, type PrivateExecutionResult, @@ -71,8 +71,8 @@ import { inspect } from 'util'; import { type PXEServiceConfig } from '../config/index.js'; import { getPackageInfo } from '../config/package_info.js'; import { ContractDataOracle } from '../contract_data_oracle/index.js'; -import { IncomingNoteDao } from '../database/incoming_note_dao.js'; import { type PxeDatabase } from '../database/index.js'; +import { NoteDao } from '../database/note_dao.js'; import { KernelOracle } from '../kernel_oracle/index.js'; import { KernelProver } from '../kernel_prover/kernel_prover.js'; import { TestPrivateKernelProver } from '../kernel_prover/test/test_circuit_prover.js'; @@ -273,8 +273,8 @@ export class PXEService implements PXE { return await this.node.getPublicStorageAt(contract, slot, 'latest'); } - public async getIncomingNotes(filter: IncomingNotesFilter): Promise { - const noteDaos = await this.db.getIncomingNotes(filter); + public async getNotes(filter: NotesFilter): Promise { + const noteDaos = await this.db.getNotes(filter); const extendedNotes = noteDaos.map(async dao => { let owner = filter.owner; @@ -343,19 +343,19 @@ export class PXEService implements PXE { } await this.db.addNote( - new IncomingNoteDao( + new NoteDao( note.note, note.contractAddress, note.storageSlot, - note.noteTypeId, - note.txHash, - l2BlockNumber, - l2BlockHash, nonce, noteHash, siloedNullifier, + note.txHash, + l2BlockNumber, + l2BlockHash, index, owner.address.toAddressPoint(), + note.noteTypeId, ), scope, ); @@ -388,19 +388,19 @@ export class PXEService implements PXE { } await this.db.addNullifiedNote( - new IncomingNoteDao( + new NoteDao( note.note, note.contractAddress, note.storageSlot, - note.noteTypeId, - note.txHash, - l2BlockNumber, - l2BlockHash, nonce, noteHash, Fr.ZERO, // We are not able to derive + note.txHash, + l2BlockNumber, + l2BlockHash, index, note.owner.toAddressPoint(), + note.noteTypeId, ), ); } diff --git a/yarn-project/pxe/src/simulator_oracle/index.ts b/yarn-project/pxe/src/simulator_oracle/index.ts index f19f840a3439..137b4137d955 100644 --- a/yarn-project/pxe/src/simulator_oracle/index.ts +++ b/yarn-project/pxe/src/simulator_oracle/index.ts @@ -34,8 +34,8 @@ import { MessageLoadOracleInputs } from '@aztec/simulator/acvm'; import { type AcirSimulator, type DBOracle } from '@aztec/simulator/client'; import { type ContractDataOracle } from '../contract_data_oracle/index.js'; -import { type IncomingNoteDao } from '../database/incoming_note_dao.js'; import { type PxeDatabase } from '../database/index.js'; +import { type NoteDao } from '../database/note_dao.js'; import { produceNoteDaos } from '../note_decryption_utils/produce_note_daos.js'; import { getAcirSimulator } from '../simulator/index.js'; import { WINDOW_HALF_SIZE, getIndexedTaggingSecretsForTheWindow, getInitialIndexesMap } from './tagging_utils.js'; @@ -92,7 +92,7 @@ export class SimulatorOracle implements DBOracle { } async getNotes(contractAddress: AztecAddress, storageSlot: Fr, status: NoteStatus, scopes?: AztecAddress[]) { - const noteDaos = await this.db.getIncomingNotes({ + const noteDaos = await this.db.getNotes({ contractAddress, storageSlot, status, @@ -569,17 +569,17 @@ export class SimulatorOracle implements DBOracle { // Since we could have notes with the same index for different txs, we need // to keep track of them scoping by txHash const excludedIndices: Map> = new Map(); - const incomingNotes: IncomingNoteDao[] = []; + const notes: NoteDao[] = []; const txEffectsCache = new Map | undefined>(); for (const scopedLog of scopedLogs) { - const incomingNotePayload = scopedLog.isFromPublic + const notePayload = scopedLog.isFromPublic ? L1NotePayload.decryptAsIncomingFromPublic(scopedLog.logData, addressSecret) : L1NotePayload.decryptAsIncoming(PrivateLog.fromBuffer(scopedLog.logData), addressSecret); - if (incomingNotePayload) { - const payload = incomingNotePayload; + if (notePayload) { + const payload = notePayload; const txEffect = txEffectsCache.get(scopedLog.txHash.toString()) ?? (await this.aztecNode.getTxEffect(scopedLog.txHash)); @@ -594,13 +594,13 @@ export class SimulatorOracle implements DBOracle { if (!excludedIndices.has(scopedLog.txHash.toString())) { excludedIndices.set(scopedLog.txHash.toString(), new Set()); } - const { incomingNote } = await produceNoteDaos( + const { note } = await produceNoteDaos( // I don't like this at all, but we need a simulator to run `computeNoteHashAndOptionallyANullifier`. This generates // a chicken-and-egg problem due to this oracle requiring a simulator, which in turn requires this oracle. Furthermore, since jest doesn't allow // mocking ESM exports, we have to pollute the method even more by providing a simulator parameter so tests can inject a fake one. simulator ?? getAcirSimulator(this.db, this.aztecNode, this.keyStore, this.contractDataOracle), this.db, - incomingNotePayload ? recipient.toAddressPoint() : undefined, + notePayload ? recipient.toAddressPoint() : undefined, payload!, txEffect.data.txHash, txEffect.l2BlockNumber, @@ -611,12 +611,12 @@ export class SimulatorOracle implements DBOracle { this.log, ); - if (incomingNote) { - incomingNotes.push(incomingNote); + if (note) { + notes.push(note); } } } - return { incomingNotes }; + return { notes }; } /** @@ -629,10 +629,10 @@ export class SimulatorOracle implements DBOracle { recipient: AztecAddress, simulator?: AcirSimulator, ): Promise { - const { incomingNotes } = await this.#decryptTaggedLogs(logs, recipient, simulator); - if (incomingNotes.length) { - await this.db.addNotes(incomingNotes, recipient); - incomingNotes.forEach(noteDao => { + const { notes } = await this.#decryptTaggedLogs(logs, recipient, simulator); + if (notes.length) { + await this.db.addNotes(notes, recipient); + notes.forEach(noteDao => { this.log.verbose(`Added incoming note for contract ${noteDao.contractAddress} at slot ${noteDao.storageSlot}`, { contract: noteDao.contractAddress, slot: noteDao.storageSlot, @@ -644,7 +644,7 @@ export class SimulatorOracle implements DBOracle { public async removeNullifiedNotes(contractAddress: AztecAddress) { for (const recipient of await this.keyStore.getAccounts()) { - const currentNotesForRecipient = await this.db.getIncomingNotes({ contractAddress, owner: recipient }); + const currentNotesForRecipient = await this.db.getNotes({ contractAddress, owner: recipient }); const nullifiersToCheck = currentNotesForRecipient.map(note => note.siloedNullifier); const nullifierIndexes = await this.aztecNode.findNullifiersIndexesWithBlock('latest', nullifiersToCheck); diff --git a/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts b/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts index e70072687536..e4e95f0ada55 100644 --- a/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts +++ b/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts @@ -32,9 +32,9 @@ import { jest } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; import times from 'lodash.times'; -import { type IncomingNoteDao } from '../database/incoming_note_dao.js'; import { type PxeDatabase } from '../database/index.js'; import { KVPxeDatabase } from '../database/kv_pxe_database.js'; +import { type NoteDao } from '../database/note_dao.js'; import { ContractDataOracle } from '../index.js'; import { SimulatorOracle } from './index.js'; import { WINDOW_HALF_SIZE } from './tagging_utils.js'; @@ -461,13 +461,13 @@ describe('Simulator oracle', () => { describe('Process notes', () => { let addNotesSpy: any; - let getIncomingNotesSpy: any; + let getNotesSpy: any; let removeNullifiedNotesSpy: any; let simulator: MockProxy; beforeEach(() => { addNotesSpy = jest.spyOn(database, 'addNotes'); - getIncomingNotesSpy = jest.spyOn(database, 'getIncomingNotes'); + getNotesSpy = jest.spyOn(database, 'getNotes'); removeNullifiedNotesSpy = jest.spyOn(database, 'removeNullifiedNotes'); removeNullifiedNotesSpy.mockImplementation(() => Promise.resolve([])); simulator = mock(); @@ -483,7 +483,7 @@ describe('Simulator oracle', () => { afterEach(() => { addNotesSpy.mockReset(); - getIncomingNotesSpy.mockReset(); + getNotesSpy.mockReset(); removeNullifiedNotesSpy.mockReset(); simulator.computeNoteHashAndOptionallyANullifier.mockReset(); aztecNode.getTxEffect.mockReset(); @@ -642,10 +642,10 @@ describe('Simulator oracle', () => { await simulatorOracle.processTaggedLogs(taggedLogs, recipient.address, simulator); - // Check incoming + // Check notes { - const addedIncoming: IncomingNoteDao[] = addNotesSpy.mock.calls[0][0]; - expect(addedIncoming.map(dao => dao)).toEqual([ + const addedNotes: NoteDao[] = addNotesSpy.mock.calls[0][0]; + expect(addedNotes.map(dao => dao)).toEqual([ expect.objectContaining({ ...requests[0].snippetOfNoteDao, index: requests[0].indexWithinNoteHashTree }), expect.objectContaining({ ...requests[1].snippetOfNoteDao, index: requests[1].indexWithinNoteHashTree }), expect.objectContaining({ ...requests[2].snippetOfNoteDao, index: requests[2].indexWithinNoteHashTree }), @@ -655,7 +655,7 @@ describe('Simulator oracle', () => { // Check that every note has a different nonce. const nonceSet = new Set(); - addedIncoming.forEach(info => nonceSet.add(info.nonce.value)); + addedNotes.forEach(info => nonceSet.add(info.nonce.value)); expect(nonceSet.size).toBe(requests.length); } }); @@ -667,7 +667,7 @@ describe('Simulator oracle', () => { new MockNoteRequest(getRandomNoteLogPayload(Fr.random(), contractAddress), 12, 3, 2, recipient.address), ]; - getIncomingNotesSpy.mockResolvedValueOnce( + getNotesSpy.mockResolvedValueOnce( Promise.resolve(requests.map(request => ({ siloedNullifier: Fr.random(), ...request.snippetOfNoteDao }))), ); let requestedNullifier; diff --git a/yarn-project/sequencer-client/package.json b/yarn-project/sequencer-client/package.json index 6721e3080b5b..fb874975daa3 100644 --- a/yarn-project/sequencer-client/package.json +++ b/yarn-project/sequencer-client/package.json @@ -64,6 +64,7 @@ "@types/node": "^18.7.23", "concurrently": "^7.6.0", "eslint": "^8.37.0", + "express": "^4.21.1", "jest": "^29.5.0", "jest-mock-extended": "^3.0.3", "levelup": "^5.1.1", diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index 6947f4101acf..14a416688933 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -14,7 +14,6 @@ import { GlobalVariableBuilder } from '../global_variable_builder/index.js'; import { L1Publisher } from '../publisher/index.js'; import { Sequencer, type SequencerConfig } from '../sequencer/index.js'; import { type SlasherClient } from '../slasher/index.js'; -import { TxValidatorFactory } from '../tx_validator/tx_validator_factory.js'; /** * Encapsulates the full sequencer and publisher. @@ -99,7 +98,7 @@ export class SequencerClient { l2BlockSource, l1ToL2MessageSource, publicProcessorFactory, - new TxValidatorFactory(worldStateSynchronizer.getCommitted(), contractDataSource, !!config.enforceFees), + contractDataSource, l1Constants, deps.dateProvider, telemetryClient, diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index 10f714b6cf60..09064f254046 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -59,6 +59,16 @@ export const sequencerConfigMappings: ConfigMappingsType = { description: 'The minimum number of txs to include in a block.', ...numberConfigHelper(1), }, + maxL2BlockGas: { + env: 'SEQ_MAX_L2_BLOCK_GAS', + description: 'The maximum L2 block gas.', + ...numberConfigHelper(10e9), + }, + maxDABlockGas: { + env: 'SEQ_MAX_DA_BLOCK_GAS', + description: 'The maximum DA block gas.', + ...numberConfigHelper(10e9), + }, coinbase: { env: 'COINBASE', parseEnv: (val: string) => EthAddress.fromString(val), diff --git a/yarn-project/sequencer-client/src/index.ts b/yarn-project/sequencer-client/src/index.ts index 35129eed538e..d5fc13c50efa 100644 --- a/yarn-project/sequencer-client/src/index.ts +++ b/yarn-project/sequencer-client/src/index.ts @@ -1,8 +1,9 @@ export * from './client/index.js'; export * from './config.js'; export * from './publisher/index.js'; -export { Sequencer, SequencerState } from './sequencer/index.js'; +export * from './tx_validator/tx_validator_factory.js'; export * from './slasher/index.js'; +export { Sequencer, SequencerState, getDefaultAllowedSetupFunctions } from './sequencer/index.js'; // Used by the node to simulate public parts of transactions. Should these be moved to a shared library? // ISSUE(#9832) diff --git a/yarn-project/sequencer-client/src/publisher/config.ts b/yarn-project/sequencer-client/src/publisher/config.ts index 367f2aa66779..d77efa57ca2a 100644 --- a/yarn-project/sequencer-client/src/publisher/config.ts +++ b/yarn-project/sequencer-client/src/publisher/config.ts @@ -24,6 +24,11 @@ export type PublisherConfig = L1TxUtilsConfig & { * The interval to wait between publish retries. */ l1PublishRetryIntervalMS: number; + + /** + * The URL of the blob sink. + */ + blobSinkUrl?: string; }; export const getTxSenderConfigMappings: ( @@ -72,6 +77,11 @@ export const getPublisherConfigMappings: ( description: 'The interval to wait between publish retries.', }, ...l1TxUtilsConfigMappings, + blobSinkUrl: { + env: `${scope}_BLOB_SINK_URL`, + description: 'The URL of the blob sink.', + parseEnv: (val?: string) => val, + }, }); export function getPublisherConfigFromEnv(scope: 'PROVER' | 'SEQ'): PublisherConfig { diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index 64ac88119d50..689c03c71d16 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -13,6 +13,9 @@ import { sleep } from '@aztec/foundation/sleep'; import { RollupAbi } from '@aztec/l1-artifacts'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; +import { jest } from '@jest/globals'; +import express, { json } from 'express'; +import { type Server } from 'http'; import { type MockProxy, mock } from 'jest-mock-extended'; import { type GetTransactionReceiptReturnType, @@ -68,6 +71,9 @@ class MockRollupContract { } } +const BLOB_SINK_PORT = 5052; +const BLOB_SINK_URL = `http://localhost:${BLOB_SINK_PORT}`; + describe('L1Publisher', () => { let rollupContractRead: MockProxy; let rollupContractWrite: MockProxy; @@ -85,11 +91,16 @@ describe('L1Publisher', () => { let blockHash: Buffer; let body: Buffer; + let mockBlobSinkServer: Server | undefined = undefined; + + // An l1 publisher with some private methods exposed let publisher: L1Publisher; const GAS_GUESS = 300_000n; beforeEach(() => { + mockBlobSinkServer = undefined; + l2Block = L2Block.random(42); header = l2Block.header.toBuffer(); @@ -112,6 +123,7 @@ describe('L1Publisher', () => { publicClient = mock(); l1TxUtils = mock(); const config = { + blobSinkUrl: BLOB_SINK_URL, l1RpcUrl: `http://127.0.0.1:8545`, l1ChainId: 1, publisherPrivateKey: `0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80`, @@ -138,19 +150,66 @@ describe('L1Publisher', () => { (l1TxUtils as any).estimateGas.mockResolvedValue(GAS_GUESS); }); + const closeServer = (server: Server): Promise => { + return new Promise((resolve, reject) => { + server.close(err => { + if (err) { + reject(err); + return; + } + resolve(); + }); + }); + }; + + afterEach(async () => { + if (mockBlobSinkServer) { + await closeServer(mockBlobSinkServer); + mockBlobSinkServer = undefined; + } + }); + + // Run a mock blob sink in the background, and test that the correct data is sent to it + const runBlobSinkServer = (blobs: Blob[]) => { + const app = express(); + app.use(json({ limit: '10mb' })); + + app.post('/blob_sidecar', (req, res) => { + const blobsBuffers = req.body.blobs.map((b: { index: number; blob: { type: string; data: string } }) => + Blob.fromBuffer(Buffer.from(b.blob.data)), + ); + + expect(blobsBuffers).toEqual(blobs); + res.status(200).send(); + }); + + return new Promise(resolve => { + mockBlobSinkServer = app.listen(BLOB_SINK_PORT, () => { + // Resolve when the server is listening + resolve(); + }); + }); + }; + it('publishes and propose l2 block to l1', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); rollupContractWrite.propose.mockResolvedValueOnce(proposeTxHash); - const result = await publisher.proposeL2Block(l2Block); + const kzg = Blob.getViemKzgInstance(); - expect(result).toEqual(true); + const expectedBlobs = Blob.getBlobs(l2Block.body.toBlobFields()); - const kzg = Blob.getViemKzgInstance(); + // Check the blobs were forwarded to the blob sink service + const sendToBlobSinkSpy = jest.spyOn(publisher as any, 'sendBlobsToBlobSink'); - const blobs = Blob.getBlobs(l2Block.body.toBlobFields()); + // Expect the blob sink server to receive the blobs + await runBlobSinkServer(expectedBlobs); - const blobInput = Blob.getEthBlobEvaluationInputs(blobs); + const result = await publisher.proposeL2Block(l2Block); + + expect(result).toEqual(true); + + const blobInput = Blob.getEthBlobEvaluationInputs(expectedBlobs); const args = [ { @@ -173,8 +232,14 @@ describe('L1Publisher', () => { data: encodeFunctionData({ abi: rollupContract.abi, functionName: 'propose', args }), }, { fixedGas: GAS_GUESS + L1Publisher.PROPOSE_GAS_GUESS }, - { blobs: blobs.map(b => b.data), kzg, maxFeePerBlobGas: 10000000000n }, + { blobs: expectedBlobs.map(b => b.dataWithZeros), kzg, maxFeePerBlobGas: 10000000000n }, ); + + expect(sendToBlobSinkSpy).toHaveBeenCalledTimes(1); + // If this does not return true, then the mocked server will have errored, and + // the expects that run there will have failed + const returnValuePromise = sendToBlobSinkSpy.mock.results[0].value; + expect(await returnValuePromise).toBe(true); }); it('does not retry if sending a propose tx fails', async () => { diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index c27d47100319..727e57f51e70 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -3,7 +3,7 @@ import { type EpochProofClaim, type EpochProofQuote, type L2Block, - SignatureDomainSeperator, + SignatureDomainSeparator, type TxHash, getHashedSignaturePayload, } from '@aztec/circuit-types'; @@ -95,6 +95,8 @@ export type MinimalTransactionReceipt = { logs: any[]; /** Block number in which this tx was mined. */ blockNumber: bigint; + /** The block hash in which this tx was mined */ + blockHash: `0x${string}`; }; /** Arguments to the process method of the rollup contract */ @@ -175,6 +177,8 @@ export class L1Publisher { protected account: PrivateKeyAccount; protected ethereumSlotDuration: bigint; + private blobSinkUrl: string | undefined; + // @note - with blobs, the below estimate seems too large. // Total used for full block from int_l1_pub e2e test: 1m (of which 86k is 1x blob) // Total used for emptier block from above test: 429k (of which 84k is 1x blob) @@ -189,6 +193,7 @@ export class L1Publisher { ) { this.sleepTimeMs = config?.l1PublishRetryIntervalMS ?? 60_000; this.ethereumSlotDuration = BigInt(config.ethereumSlotDuration); + this.blobSinkUrl = config.blobSinkUrl; this.metrics = new L1PublisherMetrics(client, 'L1Publisher'); const { l1RpcUrl: rpcUrl, l1ChainId: chainId, publisherPrivateKey, l1Contracts } = config; @@ -593,16 +598,19 @@ export class L1Publisher { const consensusPayload = new ConsensusPayload(block.header, block.archive.root, txHashes ?? []); - const digest = getHashedSignaturePayload(consensusPayload, SignatureDomainSeperator.blockAttestation); + const digest = getHashedSignaturePayload(consensusPayload, SignatureDomainSeparator.blockAttestation); + + const blobs = Blob.getBlobs(block.body.toBlobFields()); const proposeTxArgs = { header: block.header.toBuffer(), archive: block.archive.root.toBuffer(), blockHash: block.header.hash().toBuffer(), body: block.body.toBuffer(), - blobs: Blob.getBlobs(block.body.toBlobFields()), + blobs, attestations, txHashes: txHashes ?? [], }; + // Publish body and propose block (if not already published) if (this.interrupted) { this.log.verbose('L2 block data syncing interrupted while processing blocks.', ctx); @@ -647,6 +655,12 @@ export class L1Publisher { }; this.log.verbose(`Published L2 block to L1 rollup contract`, { ...stats, ...ctx }); this.metrics.recordProcessBlockTx(timer.ms(), stats); + + // Send the blobs to the blob sink + this.sendBlobsToBlobSink(receipt.blockHash, blobs).catch(_err => { + this.log.error('Failed to send blobs to blob sink'); + }); + return true; } @@ -661,7 +675,7 @@ export class L1Publisher { address: this.rollupContract.address, }, { - blobs: proposeTxArgs.blobs.map(b => b.data), + blobs: proposeTxArgs.blobs.map(b => b.dataWithZeros), kzg, maxFeePerBlobGas: 10000000000n, }, @@ -966,7 +980,7 @@ export class L1Publisher { }, {}, { - blobs: encodedData.blobs.map(b => b.data), + blobs: encodedData.blobs.map(b => b.dataWithZeros), kzg, maxFeePerBlobGas: 10000000000n, //This is 10 gwei, taken from DEFAULT_MAX_FEE_PER_GAS }, @@ -1056,7 +1070,7 @@ export class L1Publisher { fixedGas: gas, }, { - blobs: encodedData.blobs.map(b => b.data), + blobs: encodedData.blobs.map(b => b.dataWithZeros), kzg, maxFeePerBlobGas: 10000000000n, //This is 10 gwei, taken from DEFAULT_MAX_FEE_PER_GAS }, @@ -1095,7 +1109,7 @@ export class L1Publisher { }, { fixedGas: gas }, { - blobs: encodedData.blobs.map(b => b.data), + blobs: encodedData.blobs.map(b => b.dataWithZeros), kzg, maxFeePerBlobGas: 10000000000n, //This is 10 gwei, taken from DEFAULT_MAX_FEE_PER_GAS }, @@ -1137,6 +1151,7 @@ export class L1Publisher { gasPrice: receipt.effectiveGasPrice, logs: receipt.logs, blockNumber: receipt.blockNumber, + blockHash: receipt.blockHash, }; } @@ -1152,9 +1167,51 @@ export class L1Publisher { protected async sleepOrInterrupted() { await this.interruptibleSleep.sleep(this.sleepTimeMs); } + + /** + * Send blobs to the blob sink + * + * If a blob sink url is configured, then we send blobs to the blob sink + * - for now we use the blockHash as the identifier for the blobs; + * In the future this will move to be the beacon block id - which takes a bit more work + * to calculate and will need to be mocked in e2e tests + */ + protected async sendBlobsToBlobSink(blockHash: string, blobs: Blob[]): Promise { + // TODO(md): for now we are assuming the indexes of the blobs will be 0, 1, 2 + // When in reality they will not, but for testing purposes this is fine + if (!this.blobSinkUrl) { + this.log.verbose('No blob sink url configured'); + return false; + } + + this.log.verbose(`Sending ${blobs.length} blobs to blob sink`); + try { + const res = await fetch(`${this.blobSinkUrl}/blob_sidecar`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + // eslint-disable-next-line camelcase + block_id: blockHash, + blobs: blobs.map((b, i) => ({ blob: b.toBuffer(), index: i })), + }), + }); + + if (res.ok) { + return true; + } + + this.log.error('Failed to send blobs to blob sink', res.status); + return false; + } catch (err) { + this.log.error(`Error sending blobs to blob sink`, err); + return false; + } + } } -/** +/* * Returns cost of calldata usage in Ethereum. * @param data - Calldata. * @returns 4 for each zero byte, 16 for each nonzero. diff --git a/yarn-project/sequencer-client/src/sequencer/index.ts b/yarn-project/sequencer-client/src/sequencer/index.ts index 459a5cab42ff..316084b13f13 100644 --- a/yarn-project/sequencer-client/src/sequencer/index.ts +++ b/yarn-project/sequencer-client/src/sequencer/index.ts @@ -1,2 +1,3 @@ export * from './config.js'; export * from './sequencer.js'; +export * from './allowed.js'; diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 60aa42086114..c012fbefb2ed 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -2,18 +2,17 @@ import { BlockAttestation, type BlockBuilder, BlockProposal, + Body, ConsensusPayload, type EpochProofQuote, type L1ToL2MessageSource, L2Block, type L2BlockSource, - MerkleTreeId, + type MerkleTreeId, type MerkleTreeReadOperations, type MerkleTreeWriteOperations, type Tx, TxHash, - type UnencryptedL2Log, - UnencryptedTxL2Logs, WorldStateRunningState, type WorldStateSynchronizer, mockEpochProofQuote as baseMockEpochProofQuote, @@ -22,21 +21,21 @@ import { } from '@aztec/circuit-types'; import { AztecAddress, + BlockHeader, type ContractDataSource, EthAddress, Fr, GasFees, - type GasSettings, GlobalVariables, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, } from '@aztec/circuits.js'; +import { makeAppendOnlyTreeSnapshot } from '@aztec/circuits.js/testing'; import { DefaultL1ContractsConfig } from '@aztec/ethereum'; import { Buffer32 } from '@aztec/foundation/buffer'; import { times } from '@aztec/foundation/collection'; -import { randomBytes } from '@aztec/foundation/crypto'; import { Signature } from '@aztec/foundation/eth-signature'; +import { type Logger, createLogger } from '@aztec/foundation/log'; import { TestDateProvider } from '@aztec/foundation/timer'; -import { type Writeable } from '@aztec/foundation/types'; import { type P2P, P2PClientState } from '@aztec/p2p'; import { type BlockBuilderFactory } from '@aztec/prover-client/block-builder'; import { type PublicProcessor, type PublicProcessorFactory } from '@aztec/simulator'; @@ -49,7 +48,6 @@ import { type MockProxy, mock, mockFn } from 'jest-mock-extended'; import { type GlobalVariableBuilder } from '../global_variable_builder/global_builder.js'; import { type L1Publisher } from '../publisher/l1-publisher.js'; import { type SlasherClient } from '../slasher/index.js'; -import { TxValidatorFactory } from '../tx_validator/tx_validator_factory.js'; import { Sequencer } from './sequencer.js'; import { SequencerState } from './utils.js'; @@ -68,7 +66,13 @@ describe('sequencer', () => { let publicProcessorFactory: MockProxy; let lastBlockNumber: number; + let newBlockNumber: number; + let newSlotNumber: number; let hash: string; + let logger: Logger; + + let block: L2Block; + let globalVariables: GlobalVariables; let sequencer: TestSubject; @@ -87,13 +91,13 @@ describe('sequencer', () => { const archive = Fr.random(); const mockedSig = new Signature(Buffer32.fromField(Fr.random()), Buffer32.fromField(Fr.random()), 27); - const committee = [EthAddress.random()]; + const getSignatures = () => [mockedSig]; + const getAttestations = () => { const attestation = new BlockAttestation(new ConsensusPayload(block.header, archive, []), mockedSig); (attestation as any).sender = committee[0]; - return [attestation]; }; @@ -101,20 +105,43 @@ describe('sequencer', () => { return new BlockProposal(new ConsensusPayload(block.header, archive, [TxHash.random()]), mockedSig); }; - let block: L2Block; - let mockedGlobalVariables: GlobalVariables; + const processTxs = async (txs: Tx[]) => { + return await Promise.all(txs.map(tx => makeProcessedTxFromPrivateOnlyTx(tx, Fr.ZERO, undefined, globalVariables))); + }; + + const mockPendingTxs = (txs: Tx[]) => { + p2p.getPendingTxCount.mockReturnValue(txs.length); + p2p.iteratePendingTxs.mockReturnValue(txs); + }; + + const makeBlock = async (txs: Tx[]) => { + const processedTxs = await processTxs(txs); + const body = new Body(processedTxs.map(tx => tx.txEffect)); + const header = BlockHeader.empty({ globalVariables: globalVariables }); + const archive = makeAppendOnlyTreeSnapshot(newBlockNumber + 1); + + block = new L2Block(archive, header, body); + return block; + }; + + const makeTx = (seed?: number) => { + const tx = mockTxForRollup(seed); + tx.data.constants.txContext.chainId = chainId; + return tx; + }; beforeEach(() => { lastBlockNumber = 0; + newBlockNumber = lastBlockNumber + 1; + newSlotNumber = newBlockNumber; hash = Fr.ZERO.toString(); + logger = createLogger('sequencer:test'); - block = L2Block.random(lastBlockNumber + 1); - - mockedGlobalVariables = new GlobalVariables( + globalVariables = new GlobalVariables( chainId, version, - block.header.globalVariables.blockNumber, - block.header.globalVariables.slotNumber, + new Fr(newBlockNumber), + new Fr(newSlotNumber), Fr.ZERO, coinbase, feeRecipient, @@ -124,16 +151,17 @@ describe('sequencer', () => { publisher = mock(); publisher.getSenderAddress.mockImplementation(() => EthAddress.random()); publisher.getCurrentEpochCommittee.mockResolvedValue(committee); - publisher.canProposeAtNextEthBlock.mockResolvedValue([ - block.header.globalVariables.slotNumber.toBigInt(), - block.header.globalVariables.blockNumber.toBigInt(), - ]); + publisher.canProposeAtNextEthBlock.mockResolvedValue([BigInt(newSlotNumber), BigInt(newBlockNumber)]); publisher.validateBlockForSubmission.mockResolvedValue(); + publisher.proposeL2Block.mockResolvedValue(true); globalVariableBuilder = mock(); - merkleTreeOps = mock(); + globalVariableBuilder.buildGlobalVariables.mockResolvedValue(globalVariables); + blockBuilder = mock(); + blockBuilder.setBlockCompleted.mockImplementation(() => Promise.resolve(block)); + merkleTreeOps = mock(); merkleTreeOps.findLeafIndices.mockImplementation((_treeId: MerkleTreeId, _value: any[]) => { return Promise.resolve([undefined]); }); @@ -155,14 +183,12 @@ describe('sequencer', () => { }), }); - publicProcessor = mock({ - process: async txs => [ - await Promise.all( - txs.map(tx => makeProcessedTxFromPrivateOnlyTx(tx, Fr.ZERO, undefined, block.header.globalVariables)), - ), - [], - [], - ], + publicProcessor = mock(); + publicProcessor.process.mockImplementation(async txsIter => { + const txs = Array.from(txsIter); + const processed = await processTxs(txs); + logger.verbose(`Processed ${txs.length} txs`, { txHashes: txs.map(tx => tx.getTxHash()) }); + return [processed, [], []]; }); publicProcessorFactory = mock({ @@ -189,10 +215,9 @@ describe('sequencer', () => { create: () => blockBuilder, }); - validatorClient = mock({ - collectAttestations: mockFn().mockResolvedValue(getAttestations()), - createBlockProposal: mockFn().mockResolvedValue(createBlockProposal()), - }); + validatorClient = mock(); + validatorClient.collectAttestations.mockImplementation(() => Promise.resolve(getAttestations())); + validatorClient.createBlockProposal.mockImplementation(() => Promise.resolve(createBlockProposal())); const l1GenesisTime = BigInt(Math.floor(Date.now() / 1000)); const l1Constants = { l1GenesisTime, slotDuration, ethereumSlotDuration }; @@ -210,7 +235,7 @@ describe('sequencer', () => { l2BlockSource, l1ToL2MessageSource, publicProcessorFactory, - new TxValidatorFactory(merkleTreeOps, contractSource, false), + contractSource, l1Constants, new TestDateProvider(), new NoopTelemetryClient(), @@ -219,29 +244,25 @@ describe('sequencer', () => { }); it('builds a block out of a single tx', async () => { - const tx = mockTxForRollup(); - tx.data.constants.txContext.chainId = chainId; + const tx = makeTx(); const txHash = tx.getTxHash(); - p2p.getPendingTxs.mockResolvedValueOnce([tx]); - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); + block = await makeBlock([tx]); + mockPendingTxs([tx]); await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, + globalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - // Ok, we have an issue that we never actually call the process L2 block + expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash], undefined); }); it.each([ - { delayedState: SequencerState.WAITING_FOR_TXS }, + { delayedState: SequencerState.INITIALIZING_PROPOSAL }, // It would be nice to add the other states, but we would need to inject delays within the `work` loop ])('does not build a block if it does not have enough time left in the slot', async ({ delayedState }) => { // trick the sequencer into thinking that we are just too far into slot 1 @@ -249,14 +270,9 @@ describe('sequencer', () => { Math.floor(Date.now() / 1000) - slotDuration * 1 - (sequencer.getTimeTable()[delayedState] + 1), ); - const tx = mockTxForRollup(); - tx.data.constants.txContext.chainId = chainId; - - p2p.getPendingTxs.mockResolvedValueOnce([tx]); - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); + const tx = makeTx(); + mockPendingTxs([tx]); + block = await makeBlock([tx]); await expect(sequencer.doRealWork()).rejects.toThrow( expect.objectContaining({ @@ -270,15 +286,11 @@ describe('sequencer', () => { }); it('builds a block when it is their turn', async () => { - const tx = mockTxForRollup(); - tx.data.constants.txContext.chainId = chainId; + const tx = makeTx(); const txHash = tx.getTxHash(); - p2p.getPendingTxs.mockResolvedValue([tx]); - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValue(mockedGlobalVariables); + mockPendingTxs([tx]); + block = await makeBlock([tx]); // Not your turn! publisher.canProposeAtNextEthBlock.mockRejectedValue(new Error()); @@ -302,206 +314,83 @@ describe('sequencer', () => { await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, + globalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash], undefined); }); - it('builds a block out of several txs rejecting double spends', async () => { - const doubleSpendTxIndex = 1; - const txs = [mockTxForRollup(0x10000), mockTxForRollup(0x20000), mockTxForRollup(0x30000)]; - txs.forEach(tx => { - tx.data.constants.txContext.chainId = chainId; - }); - const validTxHashes = txs.filter((_, i) => i !== doubleSpendTxIndex).map(tx => tx.getTxHash()); - - const doubleSpendTx = txs[doubleSpendTxIndex]; - - p2p.getPendingTxs.mockResolvedValueOnce(txs); - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); - - // We make a nullifier from tx1 a part of the nullifier tree, so it gets rejected as double spend - const doubleSpendNullifier = doubleSpendTx.data.forRollup!.end.nullifiers[0].toBuffer(); - merkleTreeOps.findLeafIndices.mockImplementation((treeId: MerkleTreeId, value: any[]) => { - return Promise.resolve( - treeId === MerkleTreeId.NULLIFIER_TREE && value[0].equals(doubleSpendNullifier) ? [1n] : [undefined], - ); - }); - - await sequencer.doRealWork(); - - expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, - Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), - ); - expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes, undefined); - expect(p2p.deleteTxs).toHaveBeenCalledWith([doubleSpendTx.getTxHash()]); - }); - - it('builds a block out of several txs rejecting incorrect chain ids', async () => { - const invalidChainTxIndex = 1; - const txs = [mockTxForRollup(0x10000), mockTxForRollup(0x20000), mockTxForRollup(0x30000)]; - txs.forEach(tx => { - tx.data.constants.txContext.chainId = chainId; - }); - const invalidChainTx = txs[invalidChainTxIndex]; - const validTxHashes = txs.filter((_, i) => i !== invalidChainTxIndex).map(tx => tx.getTxHash()); - - p2p.getPendingTxs.mockResolvedValueOnce(txs); - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); - - // We make the chain id on the invalid tx not equal to the configured chain id - invalidChainTx.data.constants.txContext.chainId = new Fr(1n + chainId.value); - - await sequencer.doRealWork(); - - expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, - Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), - ); - expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes, undefined); - expect(p2p.deleteTxs).toHaveBeenCalledWith([invalidChainTx.getTxHash()]); - }); - - it('builds a block out of several txs dropping the ones that go over max size', async () => { - const invalidTransactionIndex = 1; - - const txs = [mockTxForRollup(0x10000), mockTxForRollup(0x20000), mockTxForRollup(0x30000)]; - txs.forEach(tx => { - tx.data.constants.txContext.chainId = chainId; - }); - const validTxHashes = txs.filter((_, i) => i !== invalidTransactionIndex).map(tx => tx.getTxHash()); - - p2p.getPendingTxs.mockResolvedValueOnce(txs); - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); - - // We make txs[1] too big to fit - (txs[invalidTransactionIndex] as Writeable).unencryptedLogs = UnencryptedTxL2Logs.random(2, 4); - (txs[invalidTransactionIndex].unencryptedLogs.functionLogs[0].logs[0] as Writeable).data = - randomBytes(1024 * 1022); - - await sequencer.doRealWork(); - - expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, - Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), - ); - expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes, undefined); - }); - - it('builds a block out of several txs skipping the ones not providing enough fee per gas', async () => { - const gasFees = new GasFees(10, 20); - mockedGlobalVariables.gasFees = gasFees; - - const txs = Array(5) - .fill(0) - .map((_, i) => mockTxForRollup(0x10000 * i)); - - const skippedTxIndexes = [1, 2]; - const validTxHashes: TxHash[] = []; - txs.forEach((tx, i) => { - tx.data.constants.txContext.chainId = chainId; - const maxFeesPerGas: Writeable = gasFees.clone(); - const feeToAdjust = i % 2 ? 'feePerDaGas' : 'feePerL2Gas'; - if (skippedTxIndexes.includes(i)) { - // maxFeesPerGas is less than gasFees. - maxFeesPerGas[feeToAdjust] = maxFeesPerGas[feeToAdjust].sub(new Fr(i + 1)); - } else { - // maxFeesPerGas is greater than or equal to gasFees. - maxFeesPerGas[feeToAdjust] = maxFeesPerGas[feeToAdjust].add(new Fr(i)); - validTxHashes.push(tx.getTxHash()); - } - (tx.data.constants.txContext.gasSettings as Writeable).maxFeesPerGas = maxFeesPerGas; - }); - - p2p.getPendingTxs.mockResolvedValueOnce(txs); - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); + it('builds a block out of several txs rejecting invalid txs', async () => { + const txs = [makeTx(0x10000), makeTx(0x20000), makeTx(0x30000)]; + const validTxs = [txs[0], txs[2]]; + const invalidTx = txs[1]; + const validTxHashes = validTxs.map(tx => tx.getTxHash()); + + mockPendingTxs(txs); + block = await makeBlock([txs[0], txs[2]]); + publicProcessor.process.mockResolvedValue([ + await processTxs(validTxs), + [{ tx: invalidTx, error: new Error() }], + [], + ]); await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, + globalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes, undefined); - // The txs are not included. But they are not dropped from the pool either. - expect(p2p.deleteTxs).not.toHaveBeenCalled(); + expect(p2p.deleteTxs).toHaveBeenCalledWith([invalidTx.getTxHash()]); }); it('builds a block once it reaches the minimum number of transactions', async () => { - const txs = times(8, i => { - const tx = mockTxForRollup(i * 0x10000); - tx.data.constants.txContext.chainId = chainId; - return tx; - }); - const block = L2Block.random(lastBlockNumber + 1); - - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValue(mockedGlobalVariables); - + const txs = times(8, i => makeTx(i * 0x10000)); sequencer.updateConfig({ minTxsPerBlock: 4 }); // block is not built with 0 txs - p2p.getPendingTxs.mockResolvedValueOnce([]); - //p2p.getPendingTxs.mockResolvedValueOnce(txs.slice(0, 4)); + mockPendingTxs([]); await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); // block is not built with 3 txs - p2p.getPendingTxs.mockResolvedValueOnce(txs.slice(0, 3)); + mockPendingTxs(txs.slice(0, 3)); await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); // block is built with 4 txs - p2p.getPendingTxs.mockResolvedValueOnce(txs.slice(0, 4)); - const txHashes = txs.slice(0, 4).map(tx => tx.getTxHash()); + const neededTxs = txs.slice(0, 4); + mockPendingTxs(neededTxs); + block = await makeBlock(neededTxs); await sequencer.doRealWork(); + expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, - Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), + globalVariables, + times(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, Fr.zero), ); expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); - expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), txHashes, undefined); + expect(publisher.proposeL2Block).toHaveBeenCalledWith( + block, + getSignatures(), + neededTxs.map(tx => tx.getTxHash()), + undefined, + ); }); it('builds a block that contains zero real transactions once flushed', async () => { - const txs = times(8, i => { - const tx = mockTxForRollup(i * 0x10000); - tx.data.constants.txContext.chainId = chainId; - return tx; - }); - const block = L2Block.random(lastBlockNumber + 1); - - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValue(mockedGlobalVariables); + const txs = times(8, i => makeTx(i * 0x10000)); sequencer.updateConfig({ minTxsPerBlock: 4 }); // block is not built with 0 txs - p2p.getPendingTxs.mockResolvedValueOnce([]); + mockPendingTxs([]); await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); // block is not built with 3 txs - p2p.getPendingTxs.mockResolvedValueOnce(txs.slice(0, 3)); + mockPendingTxs(txs.slice(0, 3)); await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); @@ -509,12 +398,15 @@ describe('sequencer', () => { sequencer.flush(); // block is built with 0 txs - p2p.getPendingTxs.mockResolvedValueOnce([]); + mockPendingTxs([]); + block = await makeBlock([]); + await sequencer.doRealWork(); + expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(1); expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, - Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), + globalVariables, + times(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, Fr.zero), ); expect(blockBuilder.addTxs).toHaveBeenCalledWith([]); expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); @@ -522,27 +414,17 @@ describe('sequencer', () => { }); it('builds a block that contains less than the minimum number of transactions once flushed', async () => { - const txs = times(8, i => { - const tx = mockTxForRollup(i * 0x10000); - tx.data.constants.txContext.chainId = chainId; - return tx; - }); - const block = L2Block.random(lastBlockNumber + 1); - - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValue(mockedGlobalVariables); + const txs = times(8, i => makeTx(i * 0x10000)); sequencer.updateConfig({ minTxsPerBlock: 4 }); // block is not built with 0 txs - p2p.getPendingTxs.mockResolvedValueOnce([]); + mockPendingTxs([]); await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); // block is not built with 3 txs - p2p.getPendingTxs.mockResolvedValueOnce(txs.slice(0, 3)); + mockPendingTxs(txs.slice(0, 3)); await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); @@ -551,12 +433,14 @@ describe('sequencer', () => { // block is built with 3 txs const postFlushTxs = txs.slice(0, 3); - p2p.getPendingTxs.mockResolvedValueOnce(postFlushTxs); + mockPendingTxs(postFlushTxs); + block = await makeBlock(postFlushTxs); const postFlushTxHashes = postFlushTxs.map(tx => tx.getTxHash()); + await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(1); expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, + globalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); @@ -565,31 +449,12 @@ describe('sequencer', () => { }); it('aborts building a block if the chain moves underneath it', async () => { - const tx = mockTxForRollup(); - tx.data.constants.txContext.chainId = chainId; - - p2p.getPendingTxs.mockResolvedValueOnce([tx]); - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - const mockedGlobalVariables = new GlobalVariables( - chainId, - version, - block.header.globalVariables.blockNumber, - block.header.globalVariables.slotNumber, - Fr.ZERO, - coinbase, - feeRecipient, - gasFees, - ); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); + const tx = makeTx(); + mockPendingTxs([tx]); + block = await makeBlock([tx]); // This could practically be for any reason, e.g., could also be that we have entered a new slot. - publisher.validateBlockForSubmission - .mockResolvedValueOnce() - .mockResolvedValueOnce() - .mockRejectedValueOnce(new Error()); + publisher.validateBlockForSubmission.mockResolvedValueOnce().mockRejectedValueOnce(new Error('No block for you')); await sequencer.doRealWork(); @@ -597,19 +462,20 @@ describe('sequencer', () => { }); describe('proof quotes', () => { + let tx: Tx; let txHash: TxHash; let currentEpoch = 0n; - const setupForBlockNumber = (blockNumber: number) => { + const setupForBlockNumber = async (blockNumber: number) => { + newBlockNumber = blockNumber; + newSlotNumber = blockNumber; currentEpoch = BigInt(blockNumber) / BigInt(epochDuration); - // Create a new block and header - block = L2Block.random(blockNumber); - mockedGlobalVariables = new GlobalVariables( + globalVariables = new GlobalVariables( chainId, version, - block.header.globalVariables.blockNumber, - block.header.globalVariables.slotNumber, + new Fr(blockNumber), + new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient, @@ -618,35 +484,31 @@ describe('sequencer', () => { worldState.status.mockResolvedValue({ state: WorldStateRunningState.IDLE, - syncedToL2Block: { number: block.header.globalVariables.blockNumber.toNumber() - 1, hash }, + syncedToL2Block: { number: blockNumber - 1, hash }, }); p2p.getStatus.mockResolvedValue({ - syncedToL2Block: { number: block.header.globalVariables.blockNumber.toNumber() - 1, hash }, state: P2PClientState.IDLE, + syncedToL2Block: { number: blockNumber - 1, hash }, }); - l2BlockSource.getBlockNumber.mockResolvedValue(block.header.globalVariables.blockNumber.toNumber() - 1); + l2BlockSource.getBlockNumber.mockResolvedValue(blockNumber - 1); - l1ToL2MessageSource.getBlockNumber.mockResolvedValue(block.header.globalVariables.blockNumber.toNumber() - 1); + l1ToL2MessageSource.getBlockNumber.mockResolvedValue(blockNumber - 1); - globalVariableBuilder.buildGlobalVariables.mockResolvedValue(mockedGlobalVariables); - - publisher.canProposeAtNextEthBlock.mockResolvedValue([ - block.header.globalVariables.slotNumber.toBigInt(), - block.header.globalVariables.blockNumber.toBigInt(), - ]); + globalVariableBuilder.buildGlobalVariables.mockResolvedValue(globalVariables); + publisher.canProposeAtNextEthBlock.mockResolvedValue([BigInt(newSlotNumber), BigInt(blockNumber)]); + publisher.claimEpochProofRight.mockResolvedValueOnce(true); publisher.getEpochForSlotNumber.mockImplementation((slotNumber: bigint) => Promise.resolve(slotNumber / BigInt(epochDuration)), ); - const tx = mockTxForRollup(); - tx.data.constants.txContext.chainId = chainId; + tx = makeTx(); txHash = tx.getTxHash(); - p2p.getPendingTxs.mockResolvedValue([tx]); - blockBuilder.setBlockCompleted.mockResolvedValue(block); + mockPendingTxs([tx]); + block = await makeBlock([tx]); }; const mockEpochProofQuote = (opts: { epoch?: bigint; validUntilSlot?: bigint; fee?: number } = {}) => @@ -660,12 +522,11 @@ describe('sequencer', () => { it('submits a valid proof quote with a block', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); const proofQuote = mockEpochProofQuote(); p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); - publisher.proposeL2Block.mockResolvedValueOnce(true); publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); // The previous epoch can be claimed @@ -677,15 +538,14 @@ describe('sequencer', () => { it('submits a valid proof quote even without a block', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); // There are no txs! - p2p.getPendingTxs.mockResolvedValue([]); + mockPendingTxs([]); const proofQuote = mockEpochProofQuote(); p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); - publisher.claimEpochProofRight.mockResolvedValueOnce(true); publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); // The previous epoch can be claimed @@ -698,12 +558,11 @@ describe('sequencer', () => { it('does not claim the epoch previous to the first', async () => { const blockNumber = 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); const proofQuote = mockEpochProofQuote({ epoch: 0n }); p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); - publisher.proposeL2Block.mockResolvedValueOnce(true); publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); publisher.getClaimableEpoch.mockImplementation(() => Promise.resolve(undefined)); @@ -714,13 +573,12 @@ describe('sequencer', () => { it('does not submit a quote with an expired slot number', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); const expiredSlotNumber = block.header.globalVariables.slotNumber.toBigInt() - 1n; const proofQuote = mockEpochProofQuote({ validUntilSlot: expiredSlotNumber }); p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); - publisher.proposeL2Block.mockResolvedValueOnce(true); publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); // The previous epoch can be claimed @@ -732,12 +590,11 @@ describe('sequencer', () => { it('does not submit a valid quote if unable to claim epoch', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); const proofQuote = mockEpochProofQuote(); p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); - publisher.proposeL2Block.mockResolvedValueOnce(true); publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); publisher.getClaimableEpoch.mockResolvedValue(undefined); @@ -748,7 +605,7 @@ describe('sequencer', () => { it('does not submit an invalid quote', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); const proofQuote = mockEpochProofQuote(); @@ -767,7 +624,7 @@ describe('sequencer', () => { it('selects the lowest cost valid quote', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); // Create 3 valid quotes with different fees. // And 3 invalid quotes with lower fees diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 44acbec00d3c..a953e51e1101 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -4,11 +4,9 @@ import { type L1ToL2MessageSource, type L2Block, type L2BlockSource, - type ProcessedTx, SequencerConfigSchema, Tx, type TxHash, - type TxValidator, type WorldStateSynchronizer, } from '@aztec/circuit-types'; import type { AllowedElement, Signature, WorldStateSynchronizerStatus } from '@aztec/circuit-types/interfaces'; @@ -17,7 +15,9 @@ import { AppendOnlyTreeSnapshot, BlockHeader, ContentCommitment, + type ContractDataSource, GENESIS_ARCHIVE_ROOT, + Gas, type GlobalVariables, StateReference, } from '@aztec/circuits.js'; @@ -39,7 +39,7 @@ import { type GlobalVariableBuilder } from '../global_variable_builder/global_bu import { type L1Publisher, VoteType } from '../publisher/l1-publisher.js'; import { prettyLogViemErrorMsg } from '../publisher/utils.js'; import { type SlasherClient } from '../slasher/slasher_client.js'; -import { type TxValidatorFactory } from '../tx_validator/tx_validator_factory.js'; +import { createValidatorsForBlockBuilding } from '../tx_validator/tx_validator_factory.js'; import { getDefaultAllowedSetupFunctions } from './allowed.js'; import { type SequencerConfig } from './config.js'; import { SequencerMetrics } from './metrics.js'; @@ -47,12 +47,6 @@ import { SequencerState, orderAttestations } from './utils.js'; export { SequencerState }; -export type ShouldProposeArgs = { - pendingTxsCount?: number; - validTxsCount?: number; - processedTxsCount?: number; -}; - export class SequencerTooSlowError extends Error { constructor( public readonly currentState: SequencerState, @@ -90,6 +84,7 @@ export class Sequencer { private state = SequencerState.STOPPED; private allowedInSetup: AllowedElement[] = getDefaultAllowedSetupFunctions(); private maxBlockSizeInBytes: number = 1024 * 1024; + private maxBlockGas: Gas = new Gas(10e9, 10e9); private processTxTime: number = 12; private metrics: SequencerMetrics; private isFlushing: boolean = false; @@ -112,7 +107,7 @@ export class Sequencer { private l2BlockSource: L2BlockSource, private l1ToL2MessageSource: L1ToL2MessageSource, private publicProcessorFactory: PublicProcessorFactory, - private txValidatorFactory: TxValidatorFactory, + private contractDataSource: ContractDataSource, protected l1Constants: SequencerRollupConstants, private dateProvider: DateProvider, telemetry: TelemetryClient, @@ -149,6 +144,12 @@ export class Sequencer { if (config.minTxsPerBlock !== undefined) { this.minTxsPerBLock = config.minTxsPerBlock; } + if (config.maxDABlockGas !== undefined) { + this.maxBlockGas = new Gas(config.maxDABlockGas, this.maxBlockGas.l2Gas); + } + if (config.maxL2BlockGas !== undefined) { + this.maxBlockGas = new Gas(this.maxBlockGas.daGas, config.maxL2BlockGas); + } if (config.coinbase) { this._coinbase = config.coinbase; } @@ -179,7 +180,7 @@ export class Sequencer { // How late into the slot can we be to start working const initialTime = 2; - // How long it takes to validate the txs collected and get ready to start building + // How long it takes to get ready to start building const blockPrepareTime = 1; // How long it takes to for attestations to travel across the p2p layer. @@ -218,9 +219,9 @@ export class Sequencer { [SequencerState.SYNCHRONIZING]: this.aztecSlotDuration, // We always want to allow the full slot to check if we are the proposer [SequencerState.PROPOSER_CHECK]: this.aztecSlotDuration, - // First transition towards building a block - [SequencerState.WAITING_FOR_TXS]: initialTime, - // We then validate the txs and prepare to start building the block + // How late we can start initializing a new block proposal + [SequencerState.INITIALIZING_PROPOSAL]: initialTime, + // When we start building a block [SequencerState.CREATING_BLOCK]: initialTime + blockPrepareTime, // We start collecting attestations after building the block [SequencerState.COLLECTING_ATTESTATIONS]: initialTime + blockPrepareTime + processTxsTime + blockValidationTime, @@ -323,25 +324,27 @@ export class Sequencer { void this.publisher.castVote(slot, newGlobalVariables.timestamp.toBigInt(), VoteType.GOVERNANCE); void this.publisher.castVote(slot, newGlobalVariables.timestamp.toBigInt(), VoteType.SLASHING); - if (!this.shouldProposeBlock(historicalHeader, {})) { + // Check the pool has enough txs to build a block + const pendingTxCount = this.p2pClient.getPendingTxCount(); + if (pendingTxCount < this.minTxsPerBLock && !this.isFlushing) { + this.log.verbose(`Not enough txs to propose block. Got ${pendingTxCount} min ${this.minTxsPerBLock}.`, { + slot, + blockNumber: newBlockNumber, + }); + await this.claimEpochProofRightIfAvailable(slot); return; } + this.setState(SequencerState.INITIALIZING_PROPOSAL, slot); this.log.verbose(`Preparing proposal for block ${newBlockNumber} at slot ${slot}`, { chainTipArchive: new Fr(chainTipArchive), blockNumber: newBlockNumber, slot, }); - this.setState(SequencerState.WAITING_FOR_TXS, slot); - - // Get txs to build the new block. - const pendingTxs = await this.p2pClient.getPendingTxs(); - - if (!this.shouldProposeBlock(historicalHeader, { pendingTxsCount: pendingTxs.length })) { - await this.claimEpochProofRightIfAvailable(slot); - return; - } + // We don't fetch exactly maxTxsPerBlock txs here because we may not need all of them if we hit a limit before, + // and also we may need to fetch more if we don't have enough valid txs. + const pendingTxs = this.p2pClient.iteratePendingTxs(); // If I created a "partial" header here that should make our job much easier. const proposalHeader = new BlockHeader( @@ -353,35 +356,12 @@ export class Sequencer { Fr.ZERO, ); - // TODO: It should be responsibility of the P2P layer to validate txs before passing them on here. - // TODO: We should validate only the number of txs we need to speed up this process. - const allValidTxs = await this.takeValidTxs( - pendingTxs, - this.txValidatorFactory.validatorForNewTxs(newGlobalVariables, this.allowedInSetup), - ); - - // TODO: We are taking the size of the tx from private-land, but we should be doing this after running - // public functions. Only reason why we do it here now is because the public processor and orchestrator - // are set up such that they require knowing the total number of txs in advance. Still, main reason for - // exceeding max block size in bytes is contract class registration, which happens in private-land. This - // may break if we start emitting lots of log data from public-land. - const validTxs = this.takeTxsWithinMaxSize(allValidTxs); - - this.log.verbose( - `Collected ${validTxs.length} txs out of ${allValidTxs.length} valid txs out of ${pendingTxs.length} total pending txs for block ${newBlockNumber}`, - ); - - // Bail if we don't have enough valid txs - if (!this.shouldProposeBlock(historicalHeader, { validTxsCount: validTxs.length })) { - await this.claimEpochProofRightIfAvailable(slot); - return; - } - try { + // TODO(palla/txs) Is the note below still valid? We don't seem to be doing any rollback in there. // @note It is very important that the following function will FAIL and not just return early // if it have made any state changes. If not, we won't rollback the state, and you will // be in for a world of pain. - await this.buildBlockAndAttemptToPublish(validTxs, proposalHeader, historicalHeader); + await this.buildBlockAndAttemptToPublish(pendingTxs, proposalHeader, historicalHeader); } catch (err) { this.log.error(`Error assembling block`, err, { blockNumber: newBlockNumber, slot }); } @@ -469,64 +449,20 @@ export class Sequencer { this.state = proposedState; } - shouldProposeBlock(historicalHeader: BlockHeader | undefined, args: ShouldProposeArgs): boolean { - if (this.isFlushing) { - this.log.verbose(`Flushing all pending txs in new block`); - return true; - } - - // Compute time elapsed since the previous block - const lastBlockTime = historicalHeader?.globalVariables.timestamp.toNumber() || 0; - const currentTime = Math.floor(Date.now() / 1000); - const elapsedSinceLastBlock = currentTime - lastBlockTime; - this.log.debug( - `Last block mined at ${lastBlockTime} current time is ${currentTime} (elapsed ${elapsedSinceLastBlock})`, - ); - - // We need to have at least minTxsPerBLock txs. - if (args.pendingTxsCount !== undefined && args.pendingTxsCount < this.minTxsPerBLock) { - this.log.verbose( - `Not creating block because not enough txs in the pool (got ${args.pendingTxsCount} min ${this.minTxsPerBLock})`, - ); - return false; - } - - // Bail if we don't have enough valid txs - if (args.validTxsCount !== undefined && args.validTxsCount < this.minTxsPerBLock) { - this.log.verbose( - `Not creating block because not enough valid txs loaded from the pool (got ${args.validTxsCount} min ${this.minTxsPerBLock})`, - ); - return false; - } - - // TODO: This check should be processedTxs.length < this.minTxsPerBLock, so we don't publish a block with - // less txs than the minimum. But that'd cause the entire block to be aborted and retried. Instead, we should - // go back to the p2p pool and load more txs until we hit our minTxsPerBLock target. Only if there are no txs - // we should bail. - if (args.processedTxsCount === 0 && this.minTxsPerBLock > 0) { - this.log.verbose('No txs processed correctly to build block.'); - return false; - } - - return true; - } - /** * Build a block * * Shared between the sequencer and the validator for re-execution * - * @param validTxs - The valid transactions to construct the block from + * @param pendingTxs - The pending transactions to construct the block from * @param newGlobalVariables - The global variables for the new block * @param historicalHeader - The historical header of the parent - * @param interrupt - The interrupt callback, used to validate the block for submission and check if we should propose the block * @param opts - Whether to just validate the block as a validator, as opposed to building it as a proposal */ private async buildBlock( - validTxs: Tx[], + pendingTxs: Iterable, newGlobalVariables: GlobalVariables, historicalHeader?: BlockHeader, - interrupt?: (processedTxs: ProcessedTx[]) => Promise, opts: { validateOnly?: boolean } = {}, ) { const blockNumber = newGlobalVariables.blockNumber.toBigInt(); @@ -534,19 +470,9 @@ export class Sequencer { this.log.debug(`Requesting L1 to L2 messages from contract for block ${blockNumber}`); const l1ToL2Messages = await this.l1ToL2MessageSource.getL1ToL2Messages(blockNumber); + const msgCount = l1ToL2Messages.length; - this.log.verbose( - `Building block ${blockNumber} with ${validTxs.length} txs and ${l1ToL2Messages.length} messages`, - { - msgCount: l1ToL2Messages.length, - txCount: validTxs.length, - slot, - blockNumber, - }, - ); - - const numRealTxs = validTxs.length; - const blockSize = Math.max(2, numRealTxs); + this.log.verbose(`Building block ${blockNumber} for slot ${slot}`, { slot, blockNumber, msgCount }); // Sync to the previous block at least await this.worldState.syncImmediate(newGlobalVariables.blockNumber.toNumber() - 1); @@ -570,18 +496,30 @@ export class Sequencer { // We set the deadline for tx processing to the start of the CREATING_BLOCK phase, plus the expected time for tx processing. // Deadline is only set if enforceTimeTable is enabled. const processingEndTimeWithinSlot = this.timeTable[SequencerState.CREATING_BLOCK] + this.processTxTime; - const processingDeadline = this.enforceTimeTable + const deadline = this.enforceTimeTable ? new Date((this.getSlotStartTimestamp(slot) + processingEndTimeWithinSlot) * 1000) : undefined; - this.log.verbose(`Processing ${validTxs.length} txs`, { + this.log.verbose(`Processing pending txs`, { slot, slotStart: new Date(this.getSlotStartTimestamp(slot) * 1000), now: new Date(this.dateProvider.now()), - deadline: processingDeadline, + deadline, }); - const processingTxValidator = this.txValidatorFactory.validatorForProcessedTxs(publicProcessorFork); + + const validators = createValidatorsForBlockBuilding( + publicProcessorFork, + this.contractDataSource, + newGlobalVariables, + !!this.config.enforceFees, + this.allowedInSetup, + ); + + // REFACTOR: Public processor should just handle processing, one tx at a time. It should be responsibility + // of the sequencer to update world state and iterate over txs. We should refactor this along with unifying the + // publicProcessorFork and orchestratorFork, to avoid doing tree insertions twice when building the block. + const limits = { deadline, maxTransactions: this.maxTxsPerBlock, maxBlockSize: this.maxBlockSizeInBytes }; const [publicProcessorDuration, [processedTxs, failedTxs]] = await elapsed(() => - processor.process(validTxs, blockSize, processingTxValidator, processingDeadline), + processor.process(pendingTxs, limits, validators), ); if (failedTxs.length > 0) { @@ -609,8 +547,6 @@ export class Sequencer { const duration = Number(end - start) / 1_000; this.metrics.recordBlockBuilderTreeInsertions(duration); - await interrupt?.(processedTxs); - // All real transactions have been added, set the block as full and pad if needed const block = await blockBuilder.setBlockCompleted(); @@ -618,7 +554,7 @@ export class Sequencer { block, publicProcessorDuration, numMsgs: l1ToL2Messages.length, - numProcessedTxs: processedTxs.length, + numTxs: processedTxs.length, blockBuildingTimer, }; } finally { @@ -642,7 +578,7 @@ export class Sequencer { * @dev MUST throw instead of exiting early to ensure that world-state * is being rolled back if the block is dropped. * - * @param validTxs - The valid transactions to construct the block from + * @param pendingTxs - Iterable of pending transactions to construct the block from * @param proposalHeader - The partial header constructed for the proposal * @param historicalHeader - The historical header of the parent */ @@ -650,7 +586,7 @@ export class Sequencer { [Attributes.BLOCK_NUMBER]: proposalHeader.globalVariables.blockNumber.toNumber(), })) private async buildBlockAndAttemptToPublish( - validTxs: Tx[], + pendingTxs: Iterable, proposalHeader: BlockHeader, historicalHeader: BlockHeader | undefined, ): Promise { @@ -660,40 +596,19 @@ export class Sequencer { const blockNumber = newGlobalVariables.blockNumber.toNumber(); const slot = newGlobalVariables.slotNumber.toBigInt(); - this.metrics.recordNewBlock(blockNumber, validTxs.length); + // this.metrics.recordNewBlock(blockNumber, validTxs.length); const workTimer = new Timer(); this.setState(SequencerState.CREATING_BLOCK, slot); - /** - * BuildBlock is shared between the sequencer and the validator for re-execution - * We use the interrupt callback to validate the block for submission and check if we should propose the block - * - * If we fail, we throw an error in order to roll back - */ - const interrupt = async (processedTxs: ProcessedTx[]) => { - await this.publisher.validateBlockForSubmission(proposalHeader); - - if ( - !this.shouldProposeBlock(historicalHeader, { - validTxsCount: validTxs.length, - processedTxsCount: processedTxs.length, - }) - ) { - // TODO: Roll back changes to world state - throw new Error('Should not propose the block'); - } - }; - // Start collecting proof quotes for the previous epoch if needed in the background const proofQuotePromise = this.createProofClaimForPreviousEpoch(slot); try { - const buildBlockRes = await this.buildBlock(validTxs, newGlobalVariables, historicalHeader, interrupt); - const { block, publicProcessorDuration, numProcessedTxs, numMsgs, blockBuildingTimer } = buildBlockRes; + const buildBlockRes = await this.buildBlock(pendingTxs, newGlobalVariables, historicalHeader); + const { block, publicProcessorDuration, numTxs, numMsgs, blockBuildingTimer } = buildBlockRes; // TODO(@PhilWindle) We should probably periodically check for things like another // block being published before ours instead of just waiting on our block - await this.publisher.validateBlockForSubmission(block.header); const workDuration = workTimer.ms(); @@ -707,8 +622,8 @@ export class Sequencer { }; const blockHash = block.hash(); - const txHashes = validTxs.map(tx => tx.getTxHash()); - this.log.info(`Built block ${block.number} with hash ${blockHash}`, { + const txHashes = block.body.txEffects.map(tx => tx.txHash); + this.log.info(`Built block ${block.number} for slot ${slot} with ${numTxs} txs`, { blockHash, globalVariables: block.header.globalVariables.toInspect(), txHashes, @@ -734,14 +649,12 @@ export class Sequencer { await this.publishL2Block(block, attestations, txHashes, proofQuote); this.metrics.recordPublishedBlock(workDuration); this.log.info( - `Published rollup block ${ - block.number - } with ${numProcessedTxs} transactions and ${numMsgs} messages in ${Math.ceil(workDuration)}ms`, + `Published block ${block.number} with ${numTxs} txs and ${numMsgs} messages in ${Math.ceil(workDuration)}ms`, { blockNumber: block.number, blockHash: blockHash, slot, - txCount: numProcessedTxs, + txCount: txHashes.length, msgCount: numMsgs, duration: Math.ceil(workDuration), submitter: this.publisher.getSenderAddress().toString(), @@ -865,36 +778,6 @@ export class Sequencer { } } - protected async takeValidTxs(txs: T[], validator: TxValidator): Promise { - const [valid, invalid] = await validator.validateTxs(txs); - if (invalid.length > 0) { - this.log.debug(`Dropping invalid txs from the p2p pool ${Tx.getHashes(invalid).join(', ')}`); - await this.p2pClient.deleteTxs(Tx.getHashes(invalid)); - } - - return valid.slice(0, this.maxTxsPerBlock); - } - - protected takeTxsWithinMaxSize(txs: Tx[]): Tx[] { - const maxSize = this.maxBlockSizeInBytes; - let totalSize = 0; - - const toReturn: Tx[] = []; - for (const tx of txs) { - const txSize = tx.getSize() - tx.clientIvcProof.clientIvcProofBuffer.length; - if (totalSize + txSize > maxSize) { - this.log.debug( - `Dropping tx ${tx.getTxHash()} with estimated size ${txSize} due to exceeding ${maxSize} block size limit (currently at ${totalSize})`, - ); - continue; - } - toReturn.push(tx); - totalSize += txSize; - } - - return toReturn; - } - @trackSpan( 'Sequencer.claimEpochProofRightIfAvailable', slotNumber => ({ [Attributes.SLOT_NUMBER]: Number(slotNumber) }), diff --git a/yarn-project/sequencer-client/src/sequencer/utils.ts b/yarn-project/sequencer-client/src/sequencer/utils.ts index 5939b43c353c..af90e2f45ddd 100644 --- a/yarn-project/sequencer-client/src/sequencer/utils.ts +++ b/yarn-project/sequencer-client/src/sequencer/utils.ts @@ -19,9 +19,9 @@ export enum SequencerState { */ PROPOSER_CHECK = 'PROPOSER_CHECK', /** - * Polling the P2P module for txs to include in a block. Will move to CREATING_BLOCK if there are valid txs to include, or back to SYNCHRONIZING otherwise. + * Initializing the block proposal. Will move to CREATING_BLOCK if there are valid txs to include, or back to SYNCHRONIZING otherwise. */ - WAITING_FOR_TXS = 'WAITING_FOR_TXS', + INITIALIZING_PROPOSAL = 'INITIALIZING_PROPOSAL', /** * Creating a new L2 block. Includes processing public function calls and running rollup circuits. Will move to PUBLISHING_CONTRACT_DATA. */ diff --git a/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts b/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts index 68a7c4f84276..07f67fdeb0bd 100644 --- a/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts +++ b/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts @@ -46,22 +46,19 @@ describe('GasTxValidator', () => { const validateTx = async (tx: Tx) => { const validator = new GasTxValidator(publicStateSource, feeJuiceAddress, enforceFees, gasFees); - return await validator.validateTxs([tx]); + return await validator.validateTx(tx); }; const expectValid = async (tx: Tx) => { - const result = await validateTx(tx); - expect(result).toEqual([[tx], [], []]); + await expect(validateTx(tx)).resolves.toEqual({ result: 'valid' }); }; - const expectInvalid = async (tx: Tx) => { - const result = await validateTx(tx); - expect(result).toEqual([[], [tx], []]); + const expectInvalid = async (tx: Tx, reason: string) => { + await expect(validateTx(tx)).resolves.toEqual({ result: 'invalid', reason: [reason] }); }; - const expectSkipped = async (tx: Tx) => { - const result = await validateTx(tx); - expect(result).toEqual([[], [], [tx]]); + const expectSkipped = async (tx: Tx, reason: string) => { + await expect(validateTx(tx)).resolves.toEqual({ result: 'skipped', reason: [reason] }); }; it('allows fee paying txs if fee payer has enough balance', async () => { @@ -83,11 +80,11 @@ describe('GasTxValidator', () => { it('rejects txs if fee payer has not enough balance', async () => { mockBalance(feeLimit - 1n); - await expectInvalid(tx); + await expectInvalid(tx, 'Insufficient fee payer balance'); }); it('rejects txs if fee payer has zero balance', async () => { - await expectInvalid(tx); + await expectInvalid(tx, 'Insufficient fee payer balance'); }); it('rejects txs if fee payer claims balance outside setup', async () => { @@ -96,7 +93,7 @@ describe('GasTxValidator', () => { selector: FunctionSelector.fromSignature('_increase_public_balance((Field),Field)'), args: [payer.toField(), new Fr(1n)], }); - await expectInvalid(tx); + await expectInvalid(tx, 'Insufficient fee payer balance'); }); it('allows txs with no fee payer if fees are not enforced', async () => { @@ -107,16 +104,16 @@ describe('GasTxValidator', () => { it('rejects txs with no fee payer if fees are enforced', async () => { enforceFees = true; tx.data.feePayer = AztecAddress.ZERO; - await expectInvalid(tx); + await expectInvalid(tx, 'Missing fee payer'); }); it('skips txs with not enough fee per da gas', async () => { gasFees.feePerDaGas = gasFees.feePerDaGas.add(new Fr(1)); - await expectSkipped(tx); + await expectSkipped(tx, 'Insufficient fee per gas'); }); it('skips txs with not enough fee per l2 gas', async () => { gasFees.feePerL2Gas = gasFees.feePerL2Gas.add(new Fr(1)); - await expectSkipped(tx); + await expectSkipped(tx, 'Insufficient fee per gas'); }); }); diff --git a/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts b/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts index 8b4167543c9e..b5ee24df54c0 100644 --- a/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts +++ b/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts @@ -1,4 +1,4 @@ -import { type Tx, TxExecutionPhase, type TxValidator } from '@aztec/circuit-types'; +import { type Tx, TxExecutionPhase, type TxValidationResult, type TxValidator } from '@aztec/circuit-types'; import { type AztecAddress, type Fr, FunctionSelector, type GasFees } from '@aztec/circuits.js'; import { createLogger } from '@aztec/foundation/log'; import { computeFeePayerBalanceStorageSlot, getExecutionRequestsByPhase } from '@aztec/simulator'; @@ -27,25 +27,10 @@ export class GasTxValidator implements TxValidator { this.#gasFees = gasFees; } - async validateTxs(txs: Tx[]): Promise<[validTxs: Tx[], invalidTxs: Tx[], skippedTxs: Tx[]]> { - const validTxs: Tx[] = []; - const invalidTxs: Tx[] = []; - const skippedTxs: Tx[] = []; - - for (const tx of txs) { - if (this.#shouldSkip(tx)) { - skippedTxs.push(tx); - } else if (await this.#validateTxFee(tx)) { - validTxs.push(tx); - } else { - invalidTxs.push(tx); - } + validateTx(tx: Tx): Promise { + if (this.#shouldSkip(tx)) { + return Promise.resolve({ result: 'skipped', reason: ['Insufficient fee per gas'] }); } - - return [validTxs, invalidTxs, skippedTxs]; - } - - validateTx(tx: Tx): Promise { return this.#validateTxFee(tx); } @@ -57,20 +42,22 @@ export class GasTxValidator implements TxValidator { const notEnoughMaxFees = maxFeesPerGas.feePerDaGas.lt(this.#gasFees.feePerDaGas) || maxFeesPerGas.feePerL2Gas.lt(this.#gasFees.feePerL2Gas); + if (notEnoughMaxFees) { this.#log.warn(`Skipping transaction ${tx.getTxHash()} due to insufficient fee per gas`); } return notEnoughMaxFees; } - async #validateTxFee(tx: Tx): Promise { + async #validateTxFee(tx: Tx): Promise { const feePayer = tx.data.feePayer; // TODO(@spalladino) Eventually remove the is_zero condition as we should always charge fees to every tx if (feePayer.isZero()) { if (this.#enforceFees) { this.#log.warn(`Rejecting transaction ${tx.getTxHash()} due to missing fee payer`); + return { result: 'invalid', reason: ['Missing fee payer'] }; } else { - return true; + return { result: 'valid' }; } } @@ -98,13 +85,13 @@ export class GasTxValidator implements TxValidator { const balance = claimFunctionCall ? initialBalance.add(claimFunctionCall.args[2]) : initialBalance; if (balance.lt(feeLimit)) { - this.#log.info(`Rejecting transaction due to not enough fee payer balance`, { + this.#log.warn(`Rejecting transaction due to not enough fee payer balance`, { feePayer, balance: balance.toBigInt(), feeLimit: feeLimit.toBigInt(), }); - return false; + return { result: 'invalid', reason: ['Insufficient fee payer balance'] }; } - return true; + return { result: 'valid' }; } } diff --git a/yarn-project/sequencer-client/src/tx_validator/nullifier_cache.test.ts b/yarn-project/sequencer-client/src/tx_validator/nullifier_cache.test.ts new file mode 100644 index 000000000000..136ad73056ac --- /dev/null +++ b/yarn-project/sequencer-client/src/tx_validator/nullifier_cache.test.ts @@ -0,0 +1,57 @@ +import { MerkleTreeId, type MerkleTreeReadOperations } from '@aztec/circuit-types'; +import { times } from '@aztec/foundation/collection'; + +import { type MockProxy, mock } from 'jest-mock-extended'; + +import { NullifierCache } from './nullifier_cache.js'; + +describe('NullifierCache', () => { + let nullifierCache: NullifierCache; + let db: MockProxy; + let nullifiers: Buffer[]; + + beforeEach(() => { + db = mock(); + nullifierCache = new NullifierCache(db); + nullifiers = [Buffer.alloc(1, 1), Buffer.alloc(1, 2), Buffer.alloc(1, 3)]; + }); + + it('checks nullifier existence against cache', async () => { + nullifierCache.addNullifiers([nullifiers[0], nullifiers[1]]); + db.findLeafIndices.mockResolvedValue([]); + await expect(nullifierCache.nullifiersExist(nullifiers)).resolves.toEqual([true, true, false]); + }); + + it('checks nullifier existence against db', async () => { + db.findLeafIndices.mockResolvedValue([1n, 2n, undefined]); + await expect(nullifierCache.nullifiersExist(nullifiers)).resolves.toEqual([true, true, false]); + }); + + it('checks nullifier existence against db only on cache miss', async () => { + nullifierCache.addNullifiers([nullifiers[0]]); + db.findLeafIndices.mockResolvedValue([2n, undefined]); + const result = await nullifierCache.nullifiersExist(nullifiers); + expect(db.findLeafIndices).toHaveBeenCalledWith(MerkleTreeId.NULLIFIER_TREE, [nullifiers[1], nullifiers[2]]); + expect(result).toEqual([true, true, false]); + }); + + it('checks existence with several nullifiers', async () => { + // Split 60 nullifiers evenly across db, cache, or not found + const nullifiers = times(60, i => Buffer.alloc(1, i)); + const where = nullifiers.map((_, i) => + i % 3 === 0 ? ('db' as const) : i % 3 === 1 ? ('cache' as const) : ('none' as const), + ); + + // Add to the cache nullifiers flagged as cache + nullifierCache.addNullifiers(nullifiers.filter((_, i) => where[i] === 'cache')); + // The db should be queried only with nullifiers not in the cache, return true for half of them then + db.findLeafIndices.mockResolvedValue(times(40, i => (i % 2 === 0 ? BigInt(i) : undefined))); + + const result = await nullifierCache.nullifiersExist(nullifiers); + expect(db.findLeafIndices).toHaveBeenCalledWith( + MerkleTreeId.NULLIFIER_TREE, + nullifiers.filter((_, i) => where[i] !== 'cache'), + ); + expect(result).toEqual(times(60, i => where[i] !== 'none')); + }); +}); diff --git a/yarn-project/sequencer-client/src/tx_validator/nullifier_cache.ts b/yarn-project/sequencer-client/src/tx_validator/nullifier_cache.ts new file mode 100644 index 000000000000..3175f05e1d04 --- /dev/null +++ b/yarn-project/sequencer-client/src/tx_validator/nullifier_cache.ts @@ -0,0 +1,29 @@ +import { MerkleTreeId, type MerkleTreeReadOperations } from '@aztec/circuit-types'; +import { type NullifierSource } from '@aztec/p2p'; + +/** + * Implements a nullifier source by checking a DB and an in-memory collection. + * Intended for validating transactions as they are added to a block. + */ +export class NullifierCache implements NullifierSource { + nullifiers: Set; + + constructor(private db: MerkleTreeReadOperations) { + this.nullifiers = new Set(); + } + + public async nullifiersExist(nullifiers: Buffer[]): Promise { + const cacheResults = nullifiers.map(n => this.nullifiers.has(n.toString())); + const toCheckDb = nullifiers.filter((_n, index) => !cacheResults[index]); + const dbHits = await this.db.findLeafIndices(MerkleTreeId.NULLIFIER_TREE, toCheckDb); + + let dbIndex = 0; + return nullifiers.map((_n, index) => cacheResults[index] || dbHits[dbIndex++] !== undefined); + } + + public addNullifiers(nullifiers: Buffer[]) { + for (const nullifier of nullifiers) { + this.nullifiers.add(nullifier.toString()); + } + } +} diff --git a/yarn-project/sequencer-client/src/tx_validator/phases_validator.test.ts b/yarn-project/sequencer-client/src/tx_validator/phases_validator.test.ts index 55a1d0ecb79a..53894c3e97c1 100644 --- a/yarn-project/sequencer-client/src/tx_validator/phases_validator.test.ts +++ b/yarn-project/sequencer-client/src/tx_validator/phases_validator.test.ts @@ -1,4 +1,4 @@ -import { mockTx } from '@aztec/circuit-types'; +import { type Tx, mockTx } from '@aztec/circuit-types'; import { type AztecAddress, type ContractDataSource, Fr, type FunctionSelector } from '@aztec/circuits.js'; import { makeAztecAddress, makeSelector } from '@aztec/circuits.js/testing'; @@ -15,6 +15,14 @@ describe('PhasesTxValidator', () => { let allowedSetupSelector1: FunctionSelector; let allowedSetupSelector2: FunctionSelector; + const expectValid = async (tx: Tx) => { + await expect(txValidator.validateTx(tx)).resolves.toEqual({ result: 'valid' }); + }; + + const expectInvalid = async (tx: Tx, reason: string) => { + await expect(txValidator.validateTx(tx)).resolves.toEqual({ result: 'invalid', reason: [reason] }); + }; + beforeEach(() => { allowedContractClass = Fr.random(); allowedContract = makeAztecAddress(); @@ -53,7 +61,7 @@ describe('PhasesTxValidator', () => { const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); patchNonRevertibleFn(tx, 0, { address: allowedContract, selector: allowedSetupSelector1 }); - await expect(txValidator.validateTxs([tx])).resolves.toEqual([[tx], []]); + await expectValid(tx); }); it('allows setup functions on the contracts class allow list', async () => { @@ -70,13 +78,13 @@ describe('PhasesTxValidator', () => { } }); - await expect(txValidator.validateTxs([tx])).resolves.toEqual([[tx], []]); + await expectValid(tx); }); it('rejects txs with setup functions not on the allow list', async () => { const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 2 }); - await expect(txValidator.validateTxs([tx])).resolves.toEqual([[], [tx]]); + await expectInvalid(tx, 'Setup function not on allow list'); }); it('rejects setup functions not on the contracts class list', async () => { @@ -92,7 +100,8 @@ describe('PhasesTxValidator', () => { return Promise.resolve(undefined); } }); - await expect(txValidator.validateTxs([tx])).resolves.toEqual([[], [tx]]); + + await expectInvalid(tx, 'Setup function not on allow list'); }); it('allows multiple setup functions on the allow list', async () => { @@ -100,13 +109,13 @@ describe('PhasesTxValidator', () => { patchNonRevertibleFn(tx, 0, { address: allowedContract, selector: allowedSetupSelector1 }); patchNonRevertibleFn(tx, 1, { address: allowedContract, selector: allowedSetupSelector2 }); - await expect(txValidator.validateTxs([tx])).resolves.toEqual([[tx], []]); + await expectValid(tx); }); it('rejects if one setup functions is not on the allow list', async () => { const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 2 }); patchNonRevertibleFn(tx, 0, { address: allowedContract, selector: allowedSetupSelector1 }); - await expect(txValidator.validateTxs([tx])).resolves.toEqual([[], [tx]]); + await expectInvalid(tx, 'Setup function not on allow list'); }); }); diff --git a/yarn-project/sequencer-client/src/tx_validator/phases_validator.ts b/yarn-project/sequencer-client/src/tx_validator/phases_validator.ts index d21b136a8284..2d885f68ce6e 100644 --- a/yarn-project/sequencer-client/src/tx_validator/phases_validator.ts +++ b/yarn-project/sequencer-client/src/tx_validator/phases_validator.ts @@ -3,6 +3,7 @@ import { type PublicExecutionRequest, Tx, TxExecutionPhase, + type TxValidationResult, type TxValidator, } from '@aztec/circuit-types'; import { type ContractDataSource } from '@aztec/circuits.js'; @@ -17,48 +18,36 @@ export class PhasesTxValidator implements TxValidator { this.contractDataSource = new ContractsDataSourcePublicDB(contracts); } - async validateTxs(txs: Tx[]): Promise<[validTxs: Tx[], invalidTxs: Tx[]]> { - const validTxs: Tx[] = []; - const invalidTxs: Tx[] = []; - - for (const tx of txs) { + async validateTx(tx: Tx): Promise { + try { // TODO(@spalladino): We add this just to handle public authwit-check calls during setup // which are needed for public FPC flows, but fail if the account contract hasnt been deployed yet, // which is what we're trying to do as part of the current txs. await this.contractDataSource.addNewContracts(tx); - if (await this.validateTx(tx)) { - validTxs.push(tx); - } else { - invalidTxs.push(tx); + if (!tx.data.forPublic) { + this.#log.debug(`Tx ${Tx.getHash(tx)} does not contain enqueued public functions. Skipping phases validation.`); + return { result: 'valid' }; } - await this.contractDataSource.removeNewContracts(tx); - } - - return Promise.resolve([validTxs, invalidTxs]); - } - - async validateTx(tx: Tx): Promise { - if (!tx.data.forPublic) { - this.#log.debug(`Tx ${Tx.getHash(tx)} does not contain enqueued public functions. Skipping phases validation.`); - return true; - } - - const setupFns = getExecutionRequestsByPhase(tx, TxExecutionPhase.SETUP); - for (const setupFn of setupFns) { - if (!(await this.isOnAllowList(setupFn, this.setupAllowList))) { - this.#log.warn( - `Rejecting tx ${Tx.getHash(tx)} because it calls setup function not on allow list: ${ - setupFn.callContext.contractAddress - }:${setupFn.callContext.functionSelector}`, - ); - - return false; + const setupFns = getExecutionRequestsByPhase(tx, TxExecutionPhase.SETUP); + for (const setupFn of setupFns) { + if (!(await this.isOnAllowList(setupFn, this.setupAllowList))) { + this.#log.warn( + `Rejecting tx ${Tx.getHash(tx)} because it calls setup function not on allow list: ${ + setupFn.callContext.contractAddress + }:${setupFn.callContext.functionSelector}`, + { allowList: this.setupAllowList }, + ); + + return { result: 'invalid', reason: ['Setup function not on allow list'] }; + } } - } - return true; + return { result: 'valid' }; + } finally { + await this.contractDataSource.removeNewContracts(tx); + } } async isOnAllowList(publicCall: PublicExecutionRequest, allowList: AllowedElement[]): Promise { diff --git a/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts b/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts index 59b6baab1cf6..500e446360ca 100644 --- a/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts +++ b/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts @@ -1,65 +1,107 @@ import { type AllowedElement, - MerkleTreeId, + type ClientProtocolCircuitVerifier, type MerkleTreeReadOperations, type ProcessedTx, type Tx, type TxValidator, } from '@aztec/circuit-types'; -import { type ContractDataSource, type GlobalVariables } from '@aztec/circuits.js'; +import { type AztecAddress, type ContractDataSource, Fr, type GasFees, type GlobalVariables } from '@aztec/circuits.js'; import { AggregateTxValidator, DataTxValidator, DoubleSpendTxValidator, MetadataTxValidator, - type NullifierSource, + TxProofValidator, } from '@aztec/p2p'; import { ProtocolContractAddress } from '@aztec/protocol-contracts'; import { readPublicState } from '@aztec/simulator'; import { GasTxValidator, type PublicStateSource } from './gas_validator.js'; +import { NullifierCache } from './nullifier_cache.js'; import { PhasesTxValidator } from './phases_validator.js'; -export class TxValidatorFactory { - nullifierSource: NullifierSource; - publicStateSource: PublicStateSource; - constructor( - private committedDb: MerkleTreeReadOperations, - private contractDataSource: ContractDataSource, - private enforceFees: boolean, - ) { - this.nullifierSource = { - getNullifierIndices: nullifiers => - this.committedDb - .findLeafIndices(MerkleTreeId.NULLIFIER_TREE, nullifiers) - .then(x => x.filter(index => index !== undefined) as bigint[]), - }; +export function createValidatorForAcceptingTxs( + db: MerkleTreeReadOperations, + contractDataSource: ContractDataSource, + verifier: ClientProtocolCircuitVerifier | undefined, + data: { + blockNumber: number; + l1ChainId: number; + enforceFees: boolean; + setupAllowList: AllowedElement[]; + gasFees: GasFees; + }, +): TxValidator { + const { blockNumber, l1ChainId, enforceFees, setupAllowList, gasFees } = data; + const validators: TxValidator[] = [ + new DataTxValidator(), + new MetadataTxValidator(new Fr(l1ChainId), new Fr(blockNumber)), + new DoubleSpendTxValidator(new NullifierCache(db)), + new PhasesTxValidator(contractDataSource, setupAllowList), + new GasTxValidator(new DatabasePublicStateSource(db), ProtocolContractAddress.FeeJuice, enforceFees, gasFees), + ]; - this.publicStateSource = { - storageRead: (contractAddress, slot) => { - return readPublicState(this.committedDb, contractAddress, slot); - }, - }; + if (verifier) { + validators.push(new TxProofValidator(verifier)); } - validatorForNewTxs(globalVariables: GlobalVariables, setupAllowList: AllowedElement[]): TxValidator { - return new AggregateTxValidator( - new DataTxValidator(), - new MetadataTxValidator(globalVariables.chainId, globalVariables.blockNumber), - new DoubleSpendTxValidator(this.nullifierSource), - new PhasesTxValidator(this.contractDataSource, setupAllowList), - new GasTxValidator( - this.publicStateSource, - ProtocolContractAddress.FeeJuice, - this.enforceFees, - globalVariables.gasFees, - ), - ); - } + return new AggregateTxValidator(...validators); +} + +export function createValidatorsForBlockBuilding( + db: MerkleTreeReadOperations, + contractDataSource: ContractDataSource, + globalVariables: GlobalVariables, + enforceFees: boolean, + setupAllowList: AllowedElement[], +): { + preprocessValidator: TxValidator; + postprocessValidator: TxValidator; + nullifierCache: NullifierCache; +} { + const nullifierCache = new NullifierCache(db); + const publicStateSource = new DatabasePublicStateSource(db); + + return { + preprocessValidator: preprocessValidator( + nullifierCache, + publicStateSource, + contractDataSource, + enforceFees, + globalVariables, + setupAllowList, + ), + postprocessValidator: postprocessValidator(nullifierCache), + nullifierCache, + }; +} - validatorForProcessedTxs(fork: MerkleTreeReadOperations): TxValidator { - return new DoubleSpendTxValidator({ - getNullifierIndices: nullifiers => fork.findLeafIndices(MerkleTreeId.NULLIFIER_TREE, nullifiers), - }); +class DatabasePublicStateSource implements PublicStateSource { + constructor(private db: MerkleTreeReadOperations) {} + + storageRead(contractAddress: AztecAddress, slot: Fr): Promise { + return readPublicState(this.db, contractAddress, slot); } } + +function preprocessValidator( + nullifierCache: NullifierCache, + publicStateSource: PublicStateSource, + contractDataSource: ContractDataSource, + enforceFees: boolean, + globalVariables: GlobalVariables, + setupAllowList: AllowedElement[], +): TxValidator { + // We don't include the TxProofValidator nor the DataTxValidator here because they are already checked by the time we get to block building. + return new AggregateTxValidator( + new MetadataTxValidator(globalVariables.chainId, globalVariables.blockNumber), + new DoubleSpendTxValidator(nullifierCache), + new PhasesTxValidator(contractDataSource, setupAllowList), + new GasTxValidator(publicStateSource, ProtocolContractAddress.FeeJuice, enforceFees, globalVariables.gasFees), + ); +} + +function postprocessValidator(nullifierCache: NullifierCache): TxValidator { + return new DoubleSpendTxValidator(nullifierCache); +} diff --git a/yarn-project/simulator/src/avm/errors.ts b/yarn-project/simulator/src/avm/errors.ts index 30c1fd4726d3..64007b356294 100644 --- a/yarn-project/simulator/src/avm/errors.ts +++ b/yarn-project/simulator/src/avm/errors.ts @@ -1,5 +1,5 @@ import { type FailingFunction, type NoirCallStack } from '@aztec/circuit-types'; -import { type AztecAddress, type Fr } from '@aztec/circuits.js'; +import { type AztecAddress, type Fr, type Point } from '@aztec/circuits.js'; import { ExecutionError } from '../common/errors.js'; import { type AvmContext } from './avm_context.js'; @@ -128,6 +128,26 @@ export class OutOfGasError extends AvmExecutionError { } } +/** + * Error is thrown when the supplied points length is not a multiple of 3. Specific for MSM opcode. + */ +export class MSMPointsLengthError extends AvmExecutionError { + constructor(pointsReadLength: number) { + super(`Points vector length should be a multiple of 3, was ${pointsReadLength}`); + this.name = 'MSMPointsLengthError'; + } +} + +/** + * Error is thrown when one of the supplied points does not lie on the Grumpkin curve. Specific for MSM opcode. + */ +export class MSMPointNotOnCurveError extends AvmExecutionError { + constructor(point: Point) { + super(`Point ${point.toString()} is not on the curve.`); + this.name = 'MSMPointNotOnCurveError'; + } +} + /** * Error is thrown when a static call attempts to alter some state */ diff --git a/yarn-project/simulator/src/avm/fixtures/index.ts b/yarn-project/simulator/src/avm/fixtures/index.ts index fce5fb7eff43..7990a0b2c3ba 100644 --- a/yarn-project/simulator/src/avm/fixtures/index.ts +++ b/yarn-project/simulator/src/avm/fixtures/index.ts @@ -55,7 +55,7 @@ export function initPersistableStateManager(overrides?: { overrides?.nullifiers || new NullifierManager(worldStateDB), overrides?.doMerkleOperations || false, overrides?.merkleTrees || mock(), - overrides?.txHash || new TxHash(new Fr(27).toBuffer()), + overrides?.txHash || new TxHash(new Fr(27)), ); } diff --git a/yarn-project/simulator/src/avm/opcodes/multi_scalar_mul.test.ts b/yarn-project/simulator/src/avm/opcodes/multi_scalar_mul.test.ts index 5af0702b10c3..8b963fe8670b 100644 --- a/yarn-project/simulator/src/avm/opcodes/multi_scalar_mul.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/multi_scalar_mul.test.ts @@ -1,8 +1,9 @@ -import { Fq, Fr } from '@aztec/circuits.js'; +import { Fq, Fr, Point } from '@aztec/circuits.js'; import { Grumpkin } from '@aztec/circuits.js/barretenberg'; import { type AvmContext } from '../avm_context.js'; import { Field, type MemoryValue, Uint1, Uint32 } from '../avm_memory_types.js'; +import { MSMPointNotOnCurveError, MSMPointsLengthError } from '../errors.js'; import { initContext } from '../fixtures/index.js'; import { MultiScalarMul } from './multi_scalar_mul.js'; @@ -127,4 +128,54 @@ describe('MultiScalarMul Opcode', () => { expect(result).toEqual([expectedResult.x, expectedResult.y, new Fr(0n)]); }); + + it('Should throw an error if points length is not a multiple of 3', async () => { + const indirect = 0; + + // No need to set up points nor scalars as it is expected to fail before any processing of them. + const pointsReadLength = 17; // Not multiple of 3 + const pointsOffset = 0; + const scalarsOffset = 20; + const pointsLengthOffset = 100; + const outputOffset = 120; + + context.machineState.memory.set(pointsLengthOffset, new Uint32(pointsReadLength)); + + await expect( + new MultiScalarMul(indirect, pointsOffset, scalarsOffset, outputOffset, pointsLengthOffset).execute(context), + ).rejects.toThrow(MSMPointsLengthError); + }); + + it('Should throw an error if a point is not on Grumpkin curve', async () => { + const indirect = 0; + const grumpkin = new Grumpkin(); + // We need to ensure points are actually on curve, so we just use the generator + // In future we could use a random point, for now we create an array of [G, 2G, NOT_ON_CURVE] + const points = Array.from({ length: 2 }, (_, i) => grumpkin.mul(grumpkin.generator(), new Fq(i + 1))); + points.push(new Point(new Fr(13), new Fr(14), false)); + + const scalars = [new Fq(5n), new Fq(3n), new Fq(1n)]; + const pointsReadLength = points.length * 3; // multiplied by 3 since we will store them as triplet in avm memory + const scalarsLength = scalars.length * 2; // multiplied by 2 since we will store them as lo and hi limbs in avm memory + // Transform the points and scalars into the format that we will write to memory + // We just store the x and y coordinates here, and handle the infinities when we write to memory + const storedScalars: Field[] = scalars.flatMap(s => [new Field(s.lo), new Field(s.hi)]); + // Points are stored as [x1, y1, inf1, x2, y2, inf2, ...] where the types are [Field, Field, Uint8, Field, Field, Uint8, ...] + const storedPoints: MemoryValue[] = points + .map(p => p.toFields()) + .flatMap(([x, y, inf]) => [new Field(x), new Field(y), new Uint1(inf.toNumber())]); + const pointsOffset = 0; + context.machineState.memory.setSlice(pointsOffset, storedPoints); + // Store scalars + const scalarsOffset = pointsOffset + pointsReadLength; + context.machineState.memory.setSlice(scalarsOffset, storedScalars); + // Store length of points to read + const pointsLengthOffset = scalarsOffset + scalarsLength; + context.machineState.memory.set(pointsLengthOffset, new Uint32(pointsReadLength)); + const outputOffset = pointsLengthOffset + 1; + + await expect( + new MultiScalarMul(indirect, pointsOffset, scalarsOffset, outputOffset, pointsLengthOffset).execute(context), + ).rejects.toThrow(MSMPointNotOnCurveError); + }); }); diff --git a/yarn-project/simulator/src/avm/opcodes/multi_scalar_mul.ts b/yarn-project/simulator/src/avm/opcodes/multi_scalar_mul.ts index 8939824c4e68..ecd648762358 100644 --- a/yarn-project/simulator/src/avm/opcodes/multi_scalar_mul.ts +++ b/yarn-project/simulator/src/avm/opcodes/multi_scalar_mul.ts @@ -3,7 +3,7 @@ import { Grumpkin } from '@aztec/circuits.js/barretenberg'; import { type AvmContext } from '../avm_context.js'; import { Field, TypeTag, Uint1 } from '../avm_memory_types.js'; -import { InstructionExecutionError } from '../errors.js'; +import { MSMPointNotOnCurveError, MSMPointsLengthError } from '../errors.js'; import { Opcode, OperandType } from '../serialization/instruction_serialization.js'; import { Addressing } from './addressing_mode.js'; import { Instruction } from './instruction.js'; @@ -44,7 +44,7 @@ export class MultiScalarMul extends Instruction { // Get the size of the unrolled (x, y , inf) points vector const pointsReadLength = memory.get(pointsLengthOffset).toNumber(); if (pointsReadLength % 3 !== 0) { - throw new InstructionExecutionError(`Points vector offset should be a multiple of 3, was ${pointsReadLength}`); + throw new MSMPointsLengthError(pointsReadLength); } // Get the unrolled (x, y, inf) representing the points @@ -76,7 +76,7 @@ export class MultiScalarMul extends Instruction { const isInf = pointsVector[3 * i + 2].toNumber() === 1; const p: Point = new Point(pointsVector[3 * i].toFr(), pointsVector[3 * i + 1].toFr(), isInf); if (!p.isOnGrumpkin()) { - throw new InstructionExecutionError(`Point ${p.toString()} is not on the curve.`); + throw new MSMPointNotOnCurveError(p); } grumpkinPoints.push(p); } diff --git a/yarn-project/simulator/src/client/client_execution_context.ts b/yarn-project/simulator/src/client/client_execution_context.ts index 12e8c814a2ab..c4ac6fafc9b0 100644 --- a/yarn-project/simulator/src/client/client_execution_context.ts +++ b/yarn-project/simulator/src/client/client_execution_context.ts @@ -23,7 +23,7 @@ import { computeUniqueNoteHash, siloNoteHash } from '@aztec/circuits.js/hash'; import { type FunctionAbi, type FunctionArtifact, type NoteSelector, countArgumentsSize } from '@aztec/foundation/abi'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr } from '@aztec/foundation/fields'; -import { applyStringFormatting, createLogger } from '@aztec/foundation/log'; +import { createLogger } from '@aztec/foundation/log'; import { type NoteData, toACVMWitness } from '../acvm/index.js'; import { type PackedValuesCache } from '../common/packed_values_cache.js'; @@ -544,10 +544,6 @@ export class ClientExecutionContext extends ViewDataOracle { ); } - public override debugLog(message: string, fields: Fr[]) { - this.log.verbose(`${applyStringFormatting(message, fields)}`, { module: `${this.log.module}:debug_log` }); - } - public getDebugFunctionName() { return this.db.getDebugFunctionName(this.contractAddress, this.callContext.functionSelector); } diff --git a/yarn-project/simulator/src/client/view_data_oracle.ts b/yarn-project/simulator/src/client/view_data_oracle.ts index d80b2d91182d..8f561371b4dd 100644 --- a/yarn-project/simulator/src/client/view_data_oracle.ts +++ b/yarn-project/simulator/src/client/view_data_oracle.ts @@ -290,6 +290,10 @@ export class ViewDataOracle extends TypedOracle { } public override debugLog(message: string, fields: Fr[]): void { + // TODO(#10558) Remove this check once the debug log is fixed + if (message.startsWith('Context.note_hashes, after pushing new note hash:')) { + return; + } this.log.verbose(`${applyStringFormatting(message, fields)}`, { module: `${this.log.module}:debug_log` }); } diff --git a/yarn-project/simulator/src/public/public_processor.test.ts b/yarn-project/simulator/src/public/public_processor.test.ts index d20435862881..b2696cb860c7 100644 --- a/yarn-project/simulator/src/public/public_processor.test.ts +++ b/yarn-project/simulator/src/public/public_processor.test.ts @@ -4,6 +4,7 @@ import { ProvingRequestType, SimulationError, type TreeInfo, + type Tx, type TxValidator, mockTx, } from '@aztec/circuit-types'; @@ -95,7 +96,7 @@ describe('public_processor', () => { it('process private-only txs', async function () { const tx = mockPrivateOnlyTx(); - const [processed, failed] = await processor.process([tx], 1); + const [processed, failed] = await processor.process([tx]); expect(processed.length).toBe(1); expect(processed[0].hash).toEqual(tx.getTxHash()); @@ -106,7 +107,7 @@ describe('public_processor', () => { it('runs a tx with enqueued public calls', async function () { const tx = mockTxWithPublicCalls(); - const [processed, failed] = await processor.process([tx], 1); + const [processed, failed] = await processor.process([tx]); expect(processed.length).toBe(1); expect(processed[0].hash).toEqual(tx.getTxHash()); @@ -122,7 +123,7 @@ describe('public_processor', () => { mockedEnqueuedCallsResult.revertCode = RevertCode.APP_LOGIC_REVERTED; mockedEnqueuedCallsResult.revertReason = new SimulationError(`Failed`, []); - const [processed, failed] = await processor.process([tx], 1); + const [processed, failed] = await processor.process([tx]); expect(processed.length).toBe(1); expect(processed[0].hash).toEqual(tx.getTxHash()); @@ -135,7 +136,7 @@ describe('public_processor', () => { publicTxSimulator.simulate.mockRejectedValue(new SimulationError(`Failed`, [])); const tx = mockTxWithPublicCalls(); - const [processed, failed] = await processor.process([tx], 1); + const [processed, failed] = await processor.process([tx]); expect(processed).toEqual([]); expect(failed.length).toBe(1); @@ -149,7 +150,7 @@ describe('public_processor', () => { const txs = Array.from([1, 2, 3], seed => mockPrivateOnlyTx({ seed })); // We are passing 3 txs but only 2 can fit in the block - const [processed, failed] = await processor.process(txs, 2); + const [processed, failed] = await processor.process(txs, { maxTransactions: 2 }); expect(processed.length).toBe(2); expect(processed[0].hash).toEqual(txs[0].getTxHash()); @@ -159,13 +160,25 @@ describe('public_processor', () => { expect(worldStateDB.commit).toHaveBeenCalledTimes(2); }); - it('does not send a transaction to the prover if validation fails', async function () { + it('does not send a transaction to the prover if pre validation fails', async function () { + const tx = mockPrivateOnlyTx(); + + const txValidator: MockProxy> = mock(); + txValidator.validateTx.mockResolvedValue({ result: 'invalid', reason: ['Invalid'] }); + + const [processed, failed] = await processor.process([tx], {}, { preprocessValidator: txValidator }); + + expect(processed).toEqual([]); + expect(failed.length).toBe(1); + }); + + it('does not send a transaction to the prover if post validation fails', async function () { const tx = mockPrivateOnlyTx(); const txValidator: MockProxy> = mock(); - txValidator.validateTxs.mockRejectedValue([[], [tx]]); + txValidator.validateTx.mockResolvedValue({ result: 'invalid', reason: ['Invalid'] }); - const [processed, failed] = await processor.process([tx], 1, txValidator); + const [processed, failed] = await processor.process([tx], {}, { postprocessValidator: txValidator }); expect(processed).toEqual([]); expect(failed.length).toBe(1); @@ -183,7 +196,7 @@ describe('public_processor', () => { // We allocate a deadline of 1s, so only one 2 txs should fit const deadline = new Date(Date.now() + 1000); - const [processed, failed] = await processor.process(txs, 3, undefined, deadline); + const [processed, failed] = await processor.process(txs, { deadline }); expect(processed.length).toBe(2); expect(processed[0].hash).toEqual(txs[0].getTxHash()); @@ -215,7 +228,7 @@ describe('public_processor', () => { const txFee = privateGasUsed.computeFee(globalVariables.gasFees); - const [processed, failed] = await processor.process([tx], 1); + const [processed, failed] = await processor.process([tx]); expect(processed).toHaveLength(1); expect(processed[0].data.feePayer).toEqual(feePayer); @@ -239,7 +252,7 @@ describe('public_processor', () => { } tx.data.gasUsed = privateGasUsed; - const [processed, failed] = await processor.process([tx], 1); + const [processed, failed] = await processor.process([tx]); expect(processed).toEqual([]); expect(failed).toHaveLength(1); diff --git a/yarn-project/simulator/src/public/public_processor.ts b/yarn-project/simulator/src/public/public_processor.ts index 5a5fc91d0e07..eb3f743d963a 100644 --- a/yarn-project/simulator/src/public/public_processor.ts +++ b/yarn-project/simulator/src/public/public_processor.ts @@ -136,29 +136,132 @@ export class PublicProcessor implements Traceable { * @returns The list of processed txs with their circuit simulation outputs. */ public async process( - txs: Tx[], - maxTransactions = txs.length, - txValidator?: TxValidator, - deadline?: Date, + txs: Iterable, + limits: { + maxTransactions?: number; + maxBlockSize?: number; + maxBlockGas?: Gas; + deadline?: Date; + } = {}, + validators: { + preprocessValidator?: TxValidator; + postprocessValidator?: TxValidator; + nullifierCache?: { addNullifiers: (nullifiers: Buffer[]) => void }; + } = {}, ): Promise<[ProcessedTx[], FailedTx[], NestedProcessReturnValues[]]> { - // The processor modifies the tx objects in place, so we need to clone them. - txs = txs.map(tx => Tx.clone(tx)); + const { maxTransactions, maxBlockSize, deadline, maxBlockGas } = limits; + const { preprocessValidator, postprocessValidator, nullifierCache } = validators; const result: ProcessedTx[] = []; const failed: FailedTx[] = []; - let returns: NestedProcessReturnValues[] = []; - let totalGas = new Gas(0, 0); const timer = new Timer(); - for (const tx of txs) { - // only process up to the limit of the block - if (result.length >= maxTransactions) { + let totalSizeInBytes = 0; + let returns: NestedProcessReturnValues[] = []; + let totalPublicGas = new Gas(0, 0); + let totalBlockGas = new Gas(0, 0); + + for (const origTx of txs) { + // Only process up to the max tx limit + if (maxTransactions !== undefined && result.length >= maxTransactions) { + this.log.debug(`Stopping tx processing due to reaching the max tx limit.`); break; } + + // Bail if we've hit the deadline + if (deadline && this.dateProvider.now() > +deadline) { + this.log.warn(`Stopping tx processing due to timeout.`); + break; + } + + // Skip this tx if it'd exceed max block size + const txHash = origTx.getTxHash().toString(); + const preTxSizeInBytes = origTx.getEstimatedPrivateTxEffectsSize(); + if (maxBlockSize !== undefined && totalSizeInBytes + preTxSizeInBytes > maxBlockSize) { + this.log.warn(`Skipping processing of tx ${txHash} sized ${preTxSizeInBytes} bytes due to block size limit`, { + txHash, + sizeInBytes: preTxSizeInBytes, + totalSizeInBytes, + maxBlockSize, + }); + continue; + } + + // Skip this tx if its gas limit would exceed the block gas limit + const txGasLimit = origTx.data.constants.txContext.gasSettings.gasLimits; + if (maxBlockGas !== undefined && totalBlockGas.add(txGasLimit).gtAny(maxBlockGas)) { + this.log.warn(`Skipping processing of tx ${txHash} due to block gas limit`, { + txHash, + txGasLimit, + totalBlockGas, + maxBlockGas, + }); + continue; + } + + // The processor modifies the tx objects in place, so we need to clone them. + const tx = Tx.clone(origTx); + + // We validate the tx before processing it, to avoid unnecessary work. + if (preprocessValidator) { + const result = await preprocessValidator.validateTx(tx); + if (result.result === 'invalid') { + const reason = result.reason.join(', '); + this.log.warn(`Rejecting tx ${tx.getTxHash().toString()} due to pre-process validation fail: ${reason}`); + failed.push({ tx, error: new Error(`Tx failed preprocess validation: ${reason}`) }); + returns.push(new NestedProcessReturnValues([])); + continue; + } else if (result.result === 'skipped') { + const reason = result.reason.join(', '); + this.log.warn(`Skipping tx ${tx.getTxHash().toString()} due to pre-process validation: ${reason}`); + returns.push(new NestedProcessReturnValues([])); + continue; + } else { + this.log.trace(`Tx ${tx.getTxHash().toString()} is valid before processing.`); + } + } + try { - const [processedTx, returnValues] = await this.processTx(tx, txValidator, deadline); + const [processedTx, returnValues] = await this.processTx(tx, deadline); + + // If the actual size of this tx would exceed block size, skip it + const txSize = processedTx.txEffect.getDASize(); + if (maxBlockSize !== undefined && totalSizeInBytes + txSize > maxBlockSize) { + this.log.warn(`Skipping processed tx ${txHash} sized ${txSize} due to max block size.`, { + txHash, + sizeInBytes: txSize, + totalSizeInBytes, + maxBlockSize, + }); + continue; + } + + // Re-validate the transaction + if (postprocessValidator) { + // Only accept processed transactions that are not double-spends, + // public functions emitting nullifiers would pass earlier check but fail here. + // Note that we're checking all nullifiers generated in the private execution twice, + // we could store the ones already checked and skip them here as an optimization. + // TODO(palla/txs): Can we get into this case? AVM validates this. We should be able to remove it. + const result = await postprocessValidator.validateTx(processedTx); + if (result.result !== 'valid') { + const reason = result.reason.join(', '); + this.log.error(`Rejecting tx ${processedTx.hash} after processing: ${reason}.`); + failed.push({ tx, error: new Error(`Tx failed post-process validation: ${reason}`) }); + continue; + } else { + this.log.trace(`Tx ${tx.getTxHash().toString()} is valid post processing.`); + } + } + + // Otherwise, commit tx state for the next tx to be processed + await this.commitTxState(processedTx); + nullifierCache?.addNullifiers(processedTx.txEffect.nullifiers.map(n => n.toBuffer())); result.push(processedTx); returns = returns.concat(returnValues); - totalGas = totalGas.add(processedTx.gasUsed.publicGas); + + totalPublicGas = totalPublicGas.add(processedTx.gasUsed.publicGas); + totalBlockGas = totalBlockGas.add(processedTx.gasUsed.totalGas); + totalSizeInBytes += txSize; } catch (err: any) { if (err?.name === 'PublicProcessorTimeoutError') { this.log.warn(`Stopping tx processing due to timeout.`); @@ -173,18 +276,22 @@ export class PublicProcessor implements Traceable { } const duration = timer.s(); - const rate = duration > 0 ? totalGas.l2Gas / duration : 0; - this.metrics.recordAllTxs(totalGas, rate); + const rate = duration > 0 ? totalPublicGas.l2Gas / duration : 0; + this.metrics.recordAllTxs(totalPublicGas, rate); + + this.log.info(`Processed ${result.length} succesful txs and ${failed.length} txs in ${duration}ms`, { + duration, + rate, + totalPublicGas, + totalBlockGas, + totalSizeInBytes, + }); return [result, failed, returns]; } @trackSpan('PublicProcessor.processTx', tx => ({ [Attributes.TX_HASH]: tx.tryGetTxHash()?.toString() })) - private async processTx( - tx: Tx, - txValidator?: TxValidator, - deadline?: Date, - ): Promise<[ProcessedTx, NestedProcessReturnValues[]]> { + private async processTx(tx: Tx, deadline?: Date): Promise<[ProcessedTx, NestedProcessReturnValues[]]> { const [time, [processedTx, returnValues]] = await elapsed(() => this.processTxWithinDeadline(tx, deadline)); this.log.verbose( @@ -208,20 +315,14 @@ export class PublicProcessor implements Traceable { }, ); + return [processedTx, returnValues ?? []]; + } + + private async commitTxState(processedTx: ProcessedTx, txValidator?: TxValidator): Promise { // Commit the state updates from this transaction + // TODO(palla/txs): It seems like this doesn't do anything...? await this.worldStateDB.commit(); - // Re-validate the transaction - if (txValidator) { - // Only accept processed transactions that are not double-spends, - // public functions emitting nullifiers would pass earlier check but fail here. - // Note that we're checking all nullifiers generated in the private execution twice, - // we could store the ones already checked and skip them here as an optimization. - const [_, invalid] = await txValidator.validateTxs([processedTx]); - if (invalid.length) { - throw new Error(`Transaction ${invalid[0].hash} invalid after processing public functions`); - } - } // Update the state so that the next tx in the loop has the correct .startState // NB: before this change, all .startStates were actually incorrect, but the issue was never caught because we either: // a) had only 1 tx with public calls per block, so this loop had len 1 @@ -255,8 +356,6 @@ export class PublicProcessor implements Traceable { ); const treeInsertionEnd = process.hrtime.bigint(); this.metrics.recordTreeInsertions(Number(treeInsertionEnd - treeInsertionStart) / 1_000); - - return [processedTx, returnValues ?? []]; } /** Processes the given tx within deadline. Returns timeout if deadline is hit. */ diff --git a/yarn-project/simulator/src/public/public_tx_context.ts b/yarn-project/simulator/src/public/public_tx_context.ts index 62dab19d16ec..0c376934a20a 100644 --- a/yarn-project/simulator/src/public/public_tx_context.ts +++ b/yarn-project/simulator/src/public/public_tx_context.ts @@ -459,5 +459,5 @@ function fetchTxHash(nonRevertibleAccumulatedData: PrivateToPublicAccumulatedDat if (!firstNullifier || firstNullifier.isZero()) { throw new Error(`Cannot get tx hash since first nullifier is missing`); } - return new TxHash(firstNullifier.toBuffer()); + return new TxHash(firstNullifier); } diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index 5e2d33befe8d..d68b39399c6b 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -6,6 +6,9 @@ * @see {@link https://opentelemetry.io/docs/specs/semconv/general/metrics/ | OpenTelemetry Metrics} for naming conventions. */ +export const BLOB_SINK_OBJECTS_IN_BLOB_STORE = 'aztec.blob_sink.objects_in_blob_store'; +export const BLOB_SINK_BLOB_SIZE = 'aztec.blob_sink.blob_size'; + /** How long it takes to simulate a circuit */ export const CIRCUIT_SIMULATION_DURATION = 'aztec.circuit.simulation.duration'; export const CIRCUIT_SIMULATION_INPUT_SIZE = 'aztec.circuit.simulation.input_size'; diff --git a/yarn-project/txe/src/index.ts b/yarn-project/txe/src/index.ts index 47079c7f6a5d..1fab653ae0ab 100644 --- a/yarn-project/txe/src/index.ts +++ b/yarn-project/txe/src/index.ts @@ -109,7 +109,7 @@ class TXEDispatcher { } } -const TXEDispatcherApiChema: ApiSchemaFor = { +const TXEDispatcherApiSchema: ApiSchemaFor = { // eslint-disable-next-line camelcase resolve_foreign_call: z.function().args(TXEForeignCallInputSchema).returns(ForeignCallResultSchema), }; @@ -120,5 +120,5 @@ const TXEDispatcherApiChema: ApiSchemaFor = { * @returns A TXE RPC server. */ export function createTXERpcServer(logger: Logger) { - return createSafeJsonRpcServer(new TXEDispatcher(logger), TXEDispatcherApiChema); + return createSafeJsonRpcServer(new TXEDispatcher(logger), TXEDispatcherApiSchema, true); } diff --git a/yarn-project/txe/src/node/txe_node.ts b/yarn-project/txe/src/node/txe_node.ts index a5c8570be2fa..8197627ce15e 100644 --- a/yarn-project/txe/src/node/txe_node.ts +++ b/yarn-project/txe/src/node/txe_node.ts @@ -20,6 +20,7 @@ import { TxHash, type TxReceipt, TxScopedL2Log, + type TxValidationResult, type UnencryptedL2Log, } from '@aztec/circuit-types'; import { @@ -149,7 +150,7 @@ export class TXENode implements AztecNode { const tag = log.fields[0]; const currentLogs = this.#logsByTags.get(tag.toString()) ?? []; const scopedLog = new TxScopedL2Log( - new TxHash(new Fr(blockNumber).toBuffer()), + new TxHash(new Fr(blockNumber)), this.#noteIndex, blockNumber, false, @@ -193,7 +194,7 @@ export class TXENode implements AztecNode { const currentLogs = this.#logsByTags.get(tag.toString()) ?? []; const scopedLog = new TxScopedL2Log( - new TxHash(new Fr(blockNumber).toBuffer()), + new TxHash(new Fr(blockNumber)), this.#noteIndex, blockNumber, true, @@ -557,7 +558,7 @@ export class TXENode implements AztecNode { * @param tx - The transaction to validate for correctness. * @param isSimulation - True if the transaction is a simulated one without generated proofs. (Optional) */ - isValidTx(_tx: Tx, _isSimulation?: boolean): Promise { + isValidTx(_tx: Tx, _isSimulation?: boolean): Promise { throw new Error('TXE Node method isValidTx not implemented'); } diff --git a/yarn-project/txe/src/oracle/txe_oracle.ts b/yarn-project/txe/src/oracle/txe_oracle.ts index ec4c78185257..608263ea150b 100644 --- a/yarn-project/txe/src/oracle/txe_oracle.ts +++ b/yarn-project/txe/src/oracle/txe_oracle.ts @@ -649,7 +649,7 @@ export class TXE implements TypedOracle { // index = await (await this.trees.getLatest()).findLeafIndex(MerkleTreeId.NULLIFIER_TREE, Fr.random().toBuffer()); // console.log('INDEX OF RANDOM', index); - this.node.setTxEffect(blockNumber, new TxHash(new Fr(blockNumber).toBuffer()), txEffect); + this.node.setTxEffect(blockNumber, new TxHash(new Fr(blockNumber)), txEffect); this.node.setNullifiersIndexesWithBlock(blockNumber, txEffect.nullifiers); this.node.addNoteLogsByTags(this.blockNumber, this.privateLogs); this.node.addPublicLogsByTags(this.blockNumber, this.publicLogs); diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index e79fe5fa8c13..0772063a1f38 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -2,7 +2,7 @@ import { BlockAttestation, BlockProposal, ConsensusPayload, - SignatureDomainSeperator, + SignatureDomainSeparator, type TxHash, } from '@aztec/circuit-types'; import { type BlockHeader } from '@aztec/circuits.js'; @@ -43,7 +43,7 @@ export class ValidationService { // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7961): check that the current validator is correct const buf = Buffer32.fromBuffer( - keccak256(proposal.payload.getPayloadToSign(SignatureDomainSeperator.blockAttestation)), + keccak256(proposal.payload.getPayloadToSign(SignatureDomainSeparator.blockAttestation)), ); const sig = await this.keyStore.signMessage(buf); return new BlockAttestation(proposal.payload, sig); diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index b7874b104cea..09c19eac82b5 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -1,11 +1,4 @@ -import { - type BlockAttestation, - type BlockProposal, - type L2Block, - type ProcessedTx, - type Tx, - type TxHash, -} from '@aztec/circuit-types'; +import { type BlockAttestation, type BlockProposal, type L2Block, type Tx, type TxHash } from '@aztec/circuit-types'; import { type BlockHeader, type GlobalVariables } from '@aztec/circuits.js'; import { type EpochCache } from '@aztec/epoch-cache'; import { Buffer32 } from '@aztec/foundation/buffer'; @@ -38,12 +31,11 @@ import { ValidatorMetrics } from './metrics.js'; * We reuse the sequencer's block building functionality for re-execution */ type BlockBuilderCallback = ( - txs: Tx[], + txs: Iterable, globalVariables: GlobalVariables, historicalHeader?: BlockHeader, - interrupt?: (processedTxs: ProcessedTx[]) => Promise, opts?: { validateOnly?: boolean }, -) => Promise<{ block: L2Block; publicProcessorDuration: number; numProcessedTxs: number; blockBuildingTimer: Timer }>; +) => Promise<{ block: L2Block; publicProcessorDuration: number; numTxs: number; blockBuildingTimer: Timer }>; export interface Validator { start(): Promise; @@ -243,9 +235,7 @@ export class ValidatorClient extends WithTracer implements Validator { // Use the sequencer's block building logic to re-execute the transactions const stopTimer = this.metrics.reExecutionTimer(); - const { block } = await this.blockBuilder(txs, header.globalVariables, undefined, undefined, { - validateOnly: true, - }); + const { block } = await this.blockBuilder(txs, header.globalVariables, undefined, { validateOnly: true }); stopTimer(); this.log.verbose(`Transaction re-execution complete`); diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index c4806a57b345..e1fa34cd008f 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -334,6 +334,32 @@ __metadata: languageName: node linkType: soft +"@aztec/blob-sink@workspace:^, @aztec/blob-sink@workspace:blob-sink": + version: 0.0.0-use.local + resolution: "@aztec/blob-sink@workspace:blob-sink" + dependencies: + "@aztec/circuit-types": "workspace:^" + "@aztec/foundation": "workspace:^" + "@aztec/kv-store": "workspace:*" + "@aztec/telemetry-client": "workspace:*" + "@jest/globals": "npm:^29.5.0" + "@types/jest": "npm:^29.5.0" + "@types/memdown": "npm:^3.0.0" + "@types/node": "npm:^18.7.23" + "@types/source-map-support": "npm:^0.5.10" + "@types/supertest": "npm:^6.0.2" + express: "npm:^4.21.1" + jest: "npm:^29.5.0" + jest-mock-extended: "npm:^3.0.3" + source-map-support: "npm:^0.5.21" + supertest: "npm:^7.0.0" + ts-node: "npm:^10.9.1" + tslib: "npm:^2.4.0" + typescript: "npm:^5.0.4" + zod: "npm:^3.23.8" + languageName: unknown + linkType: soft + "@aztec/bot@workspace:^, @aztec/bot@workspace:bot": version: 0.0.0-use.local resolution: "@aztec/bot@workspace:bot" @@ -531,6 +557,7 @@ __metadata: "@aztec/aztec-node": "workspace:^" "@aztec/aztec.js": "workspace:^" "@aztec/bb-prover": "workspace:^" + "@aztec/blob-sink": "workspace:^" "@aztec/bot": "workspace:^" "@aztec/circuit-types": "workspace:^" "@aztec/circuits.js": "workspace:^" @@ -795,7 +822,7 @@ __metadata: languageName: unknown linkType: soft -"@aztec/kv-store@workspace:^, @aztec/kv-store@workspace:kv-store": +"@aztec/kv-store@workspace:*, @aztec/kv-store@workspace:^, @aztec/kv-store@workspace:kv-store": version: 0.0.0-use.local resolution: "@aztec/kv-store@workspace:kv-store" dependencies: @@ -1188,6 +1215,7 @@ __metadata: "@types/node": "npm:^18.7.23" concurrently: "npm:^7.6.0" eslint: "npm:^8.37.0" + express: "npm:^4.21.1" jest: "npm:^29.5.0" jest-mock-extended: "npm:^3.0.3" levelup: "npm:^5.1.1" @@ -1240,7 +1268,7 @@ __metadata: languageName: unknown linkType: soft -"@aztec/telemetry-client@workspace:^, @aztec/telemetry-client@workspace:telemetry-client": +"@aztec/telemetry-client@workspace:*, @aztec/telemetry-client@workspace:^, @aztec/telemetry-client@workspace:telemetry-client": version: 0.0.0-use.local resolution: "@aztec/telemetry-client@workspace:telemetry-client" dependencies: @@ -5713,6 +5741,18 @@ __metadata: languageName: node linkType: hard +"@types/superagent@npm:^8.1.0": + version: 8.1.9 + resolution: "@types/superagent@npm:8.1.9" + dependencies: + "@types/cookiejar": "npm:^2.1.5" + "@types/methods": "npm:^1.1.4" + "@types/node": "npm:*" + form-data: "npm:^4.0.0" + checksum: 10/6d9687b0bc3d693b900ef76000b02437a70879c3219b28606879c086d786bb1e48429813e72e32dd0aafc94c053a78a2aa8be67c45bc8e6b968ca62d6d5cc554 + languageName: node + linkType: hard + "@types/supertest@npm:^2.0.12": version: 2.0.16 resolution: "@types/supertest@npm:2.0.16" @@ -5722,6 +5762,16 @@ __metadata: languageName: node linkType: hard +"@types/supertest@npm:^6.0.2": + version: 6.0.2 + resolution: "@types/supertest@npm:6.0.2" + dependencies: + "@types/methods": "npm:^1.1.4" + "@types/superagent": "npm:^8.1.0" + checksum: 10/4b67fb2d1bfbb7ff0a7dfaaf190cdf2e0014522615fb2dc53c214bdac95b4ee42696dd1df13332c90a7765cc52934c9cc0c428bf0f9e8189167aef01042e7448 + languageName: node + linkType: hard + "@types/wrap-ansi@npm:^3.0.0": version: 3.0.0 resolution: "@types/wrap-ansi@npm:3.0.0" @@ -10699,7 +10749,7 @@ __metadata: languageName: node linkType: hard -"express@npm:^4.19.2": +"express@npm:^4.19.2, express@npm:^4.21.1": version: 4.21.1 resolution: "express@npm:4.21.1" dependencies: @@ -11072,6 +11122,17 @@ __metadata: languageName: node linkType: hard +"formidable@npm:^3.5.1": + version: 3.5.2 + resolution: "formidable@npm:3.5.2" + dependencies: + dezalgo: "npm:^1.0.4" + hexoid: "npm:^2.0.0" + once: "npm:^1.4.0" + checksum: 10/b9d87af44be8ba82f8f4955c240e65c559aedb84fecce6b294d97b256db66e6a20d50e799776fdf29ee46cb83857231d12c416c735696b18d3895b85620704f4 + languageName: node + linkType: hard + "forwarded@npm:0.2.0": version: 0.2.0 resolution: "forwarded@npm:0.2.0" @@ -11670,6 +11731,13 @@ __metadata: languageName: node linkType: hard +"hexoid@npm:^2.0.0": + version: 2.0.0 + resolution: "hexoid@npm:2.0.0" + checksum: 10/73d8e135bdd9326d0fa9ea05356741d48a3e67fbd3b2ce14c4f7b523a1cdabe70fa42f2c53447244886a0aecdf7873d4124abc30093a72d15188805f7a7ee406 + languageName: node + linkType: hard + "hmac-drbg@npm:^1.0.1": version: 1.0.1 resolution: "hmac-drbg@npm:1.0.1" @@ -18300,6 +18368,23 @@ __metadata: languageName: node linkType: hard +"superagent@npm:^9.0.1": + version: 9.0.2 + resolution: "superagent@npm:9.0.2" + dependencies: + component-emitter: "npm:^1.3.0" + cookiejar: "npm:^2.1.4" + debug: "npm:^4.3.4" + fast-safe-stringify: "npm:^2.1.1" + form-data: "npm:^4.0.0" + formidable: "npm:^3.5.1" + methods: "npm:^1.1.2" + mime: "npm:2.6.0" + qs: "npm:^6.11.0" + checksum: 10/d3c0c9051ceec84d5b431eaa410ad81bcd53255cea57af1fc66d683a24c34f3ba4761b411072a9bf489a70e3d5b586a78a0e6f2eac6a561067e7d196ddab0907 + languageName: node + linkType: hard + "supertest@npm:^6.3.3": version: 6.3.4 resolution: "supertest@npm:6.3.4" @@ -18310,6 +18395,16 @@ __metadata: languageName: node linkType: hard +"supertest@npm:^7.0.0": + version: 7.0.0 + resolution: "supertest@npm:7.0.0" + dependencies: + methods: "npm:^1.1.2" + superagent: "npm:^9.0.1" + checksum: 10/73bf2a37e13856a1b3e6a37b9df5cec8e506aa0360a5f5ecd989d1f4b0edf168883e306012e81e371d5252c17d4c7bef4ba30633dbf3877cbf52fc7af51cca9b + languageName: node + linkType: hard + "supports-color@npm:^2.0.0": version: 2.0.0 resolution: "supports-color@npm:2.0.0"