diff --git a/.circleci/config.yml b/.circleci/config.yml index 794d5a090b70..fb0ea6e4b9a7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -358,15 +358,15 @@ jobs: command: build yarn-project-prod | add_timestamps yarn-project-formatting: - machine: - image: ubuntu-2204:2023.07.2 - resource_class: large + docker: + - image: aztecprotocol/alpine-build-image + resource_class: small steps: - *checkout - *setup_env - run: name: Check Formatting - command: cond_run_container yarn-project formatting | add_timestamps + command: cond_spot_run_container yarn-project 8 formatting | add_timestamps yarn-project-tests: docker: diff --git a/aztec-up/.gitignore b/aztec-up/.gitignore new file mode 100644 index 000000000000..5fd438ece3e8 --- /dev/null +++ b/aztec-up/.gitignore @@ -0,0 +1,3 @@ +.terraform +.terraform* +.DS_Store \ No newline at end of file diff --git a/aztec-up/README.md b/aztec-up/README.md new file mode 100644 index 000000000000..630d0e0fc5c8 --- /dev/null +++ b/aztec-up/README.md @@ -0,0 +1,30 @@ +# The Aztec Installation Script + +``` +bash -i <(curl -s install.aztec.network) +``` + +That is all. + +This will install into `~/.aztec/bin` a collection of scripts to help running aztec containers, and will update +a users `PATH` variable in their shell startup script so they can be found. + +- `aztec` - The infrastructure container. +- `aztec-cli` - A command line tool for interacting with infrastructure. +- `aztec-nargo` - A build of `nargo` from `noir` that is guaranteed to be version aligned. Provides compiler, lsp and more. +- `aztec-sandbox` - A wrapper around docker-compose that launches services needed for sandbox testing. +- `aztec-up` - A tool to upgrade the aztec toolchain to the latest, or specific versions. + +After installed, you can use `aztec-up` to upgrade or install specific versions. + +``` +VERSION=master aztec-up +``` + +This will install the container built from master branch. + +``` +VERSION=v1.2.3 aztec-up +``` + +This will install tagged release version 1.2.3. diff --git a/aztec-up/bin/.aztec-run b/aztec-up/bin/.aztec-run new file mode 100755 index 000000000000..a02a3bafbd98 --- /dev/null +++ b/aztec-up/bin/.aztec-run @@ -0,0 +1,84 @@ +#!/usr/bin/env bash +# The script starts a Docker container passing any commands and arguments to the command running inside the container. +# It handles mounting paths into the container. +# It handles networking comms back to the host. +set -euo pipefail + +IMAGE=${1:-} +shift + +VERSION=${VERSION:-"latest"} + +# Any host bindings we might send to the container. +DOCKER_HOST_BINDS="" + +# Volumes to pass to the container. +DOCKER_VOLUME="-v $HOME:/root" + +# Colors. +y="\033[33m" +r="\033[0m" + +function warn { + echo -e "${y}$1${r}" +} + +if ! command -v docker &> /dev/null; then + warn "No docker found." + exit 1 +fi + +if [[ $PWD != ${HOME}* ]]; then + warn "Due to how we containerize our applications, we require your working directory to be somewhere within $HOME." + exit 1 +fi + +# Set up host.docker.internal alias on Linux, just like it is on mac. +UNAME=$(uname -s) +if [ "$UNAME" == "Linux" ]; then + if docker info 2>/dev/null | grep -q rootless; then + # We're in rootless docker. Probe for the host ip and use that. + ip=$(hostname -I | head | tr -d ' ') + warn "WARNING: Running within rootless docker. Using $ip as host ip. Ensure listening services are listening on this interface." + DOCKER_HOST_BINDS="$DOCKER_HOST_BINDS --add-host host.docker.internal:$ip" + else + DOCKER_HOST_BINDS="$DOCKER_HOST_BINDS --add-host host.docker.internal:host-gateway" + fi +fi + +# Substitute any references to localhost with our host gateway. +# TODO: In node, we can hook the resolve override for localhost with host.docker.internal. +# Consider if we should just do that, but that wouldn't help e.g. nargo. +args=("$@") +for i in "${!args[@]}"; do + args[$i]=${args[$i]//localhost/host.docker.internal} +done + +# Check if it's either a filename or a directory that exists outside the HOME. +# If so, warn and exit. +for i in "${!args[@]}"; do + arg=${args[$i]} + if [[ -f "$arg" || -d "$arg" && $(realpath $arg) != ${HOME}* ]]; then + warn "Due to how we containerize our applications, paths outside of $HOME cannot be referenced." + exit 1 + fi +done + +DOCKER_ENV="" +for env in ${ENV_VARS_TO_INJECT:-}; do + # First substitute any reference to localhost with our host gateway. + env=${env//localhost/host.docker.internal} + # Inject into container. + DOCKER_ENV+="-e $env:${!env:-} " +done + +DOCKER_VOLUME="$DOCKER_VOLUME -v cache:/cache" + +docker run \ + -ti \ + --rm \ + --workdir "${PWD/$HOME/\/root}" \ + $DOCKER_HOST_BINDS \ + $DOCKER_ENV \ + $DOCKER_VOLUME \ + $IMAGE:$VERSION ${args[@]:-} diff --git a/aztec-up/bin/aztec b/aztec-up/bin/aztec new file mode 100755 index 000000000000..30ef8a66fabe --- /dev/null +++ b/aztec-up/bin/aztec @@ -0,0 +1,4 @@ +#!/usr/bin/env bash +set -euo pipefail + +$(dirname $0)/.aztec-run aztecprotocol/aztec-sandbox $@ \ No newline at end of file diff --git a/aztec-up/bin/aztec-cli b/aztec-up/bin/aztec-cli new file mode 100755 index 000000000000..7d8b75f41469 --- /dev/null +++ b/aztec-up/bin/aztec-cli @@ -0,0 +1,9 @@ +#!/usr/bin/env bash +# TODO: Make compile command always be wasm. Or put nargo in container. Or probe. +# TODO: Make unbox fail if trying to unbox outside of the cwd. +set -euo pipefail + +export ENV_VARS_TO_INJECT="PXE_URL PRIVATE_KEY DEBUG" +export PXE_URL=${PXE_URL:-"http://host.docker.internal:8080"} + +$(dirname $0)/.aztec-run aztecprotocol/cli $@ \ No newline at end of file diff --git a/aztec-up/bin/aztec-install b/aztec-up/bin/aztec-install new file mode 100755 index 000000000000..11eaada215d9 --- /dev/null +++ b/aztec-up/bin/aztec-install @@ -0,0 +1,163 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Colors +g="\033[32m" # Green +y="\033[33m" # Yellow +b="\033[34m" # Blue +p="\033[35m" # Purple +r="\033[0m" # Reset +bold="\033[1m" + +# Function to replace characters and add color +function print_colored() { + local b=$'\033[34m' # Blue + local y=$'\033[33m' # Yellow + local r=$'\033[0m' # Reset + echo "$1" | sed -E "s/(█+)/${b}\1${y}/g" +} + +function title() { + echo + print_colored " █████╗ ███████╗████████╗███████╗ ██████╗" + print_colored "██╔══██╗╚══███╔╝╚══██╔══╝██╔════╝██╔════╝" + print_colored "███████║ ███╔╝ ██║ █████╗ ██║" + print_colored "██╔══██║ ███╔╝ ██║ ██╔══╝ ██║" + print_colored "██║ ██║███████╗ ██║ ███████╗╚██████╗" + print_colored "╚═╝ ╚═╝╚══════╝ ╚═╝ ╚══════╝ ╚═════╝" + echo -e "${r}" + echo -e "Welcome to the ${bold}${b}Aztec${r} installer! Your journey into blockchain privacy begins... ${bold}${p}now${r}." + echo -e "We presently leverage docker to simplify releases of our complex project." + echo -e "Please ensure it's installed for your platform: https://docs.docker.com/engine/install" + echo + if [ "$(uname -s)" == "Darwin" ]; then + echo -e "${y}WARNING: For best performance we recommend adjusting your default docker settings:" + echo -e " - Under general, enable VirtioFS." + echo -e " - Under resources, set CPUs to ~80-100% your maximum." + echo -e " - Under resources, set Memory to ~80% your maximum." + echo -e "You may receive a warning about your home directory being mounted into a container." + echo -e "This is requested so we can read and write project files, that is all." + echo -e "${r}" + fi + echo -e "This will install the following scripts and update your PATH if necessary:" + echo -e " ${bold}${g}aztec${r} - launches various infrastructure subsystems (sequencer, prover, pxe, etc)." + echo -e " ${bold}${g}aztec-cli${r} - a command line tool for interfacing and experimenting with infrastructure." + echo -e " ${bold}${g}aztec-nargo${r} - aztec's build of nargo, the noir compiler toolchain." + echo -e " ${bold}${g}aztec-sandbox${r} - a wrapper around docker-compose that launches services needed for sandbox testing." + echo -e " ${bold}${g}aztec-up${r} - a tool to upgrade the aztec toolchain to the latest, or specific versions." + echo + read -p "Do you wish to continue? (y/n)" -n 1 -r + echo + if [[ ! $REPLY =~ ^[Yy]$ ]]; then + exit 0 + fi +} + +function info { + echo -e "${g}$1${r}" +} + +function warn { + echo -e "${y}$1${r}" +} + +AZTEC_PATH=$HOME/.aztec +BIN_PATH=$AZTEC_PATH/bin + +# Define version if specified, otherwise set to "latest". +VERSION=${VERSION:-"latest"} +INSTALL_HOST=install.aztec.network.s3-website.eu-west-2.amazonaws.com + +[ -z "${SKIP_TITLE:-}" ] && title + +# Check if Docker is available. +if ! command -v docker &>/dev/null; then + warn "Docker is not installed. Please install Docker and try again." + exit 1 +fi + +# Check if Docker is running. +if ! docker info &>/dev/null; then + warn "Docker is not running. Please start Docker and try again." + exit 1 +fi + +if ! docker compose &>/dev/null && ! command -v docker-compose &>/dev/null; then + warn "WARNING: 'docker compose' not supported and docker-compose not found." + warn "Continuing installation, but aztec-sandbox will not work." +fi + +# Create a "hidden" `$HOME/.aztec` dir, so as not to clutter the user's cwd. +rm -f $BIN_PATH/* && mkdir -p $BIN_PATH + +# Download containers from dockerhub. Tag them as latest. +function pull_container { + docker pull aztecprotocol/$1:$VERSION + + # If not latest, retag to be latest so it runs from scripts. + if [ $VERSION != "latest" ]; then + docker tag aztecprotocol/$1:$VERSION aztecprotocol/$1:latest + fi +} + +if [ -z "${SKIP_PULL:-}" ]; then + info "Pulling aztec version $VERSION..." + pull_container aztec-sandbox + pull_container cli + pull_container noir +fi + +# Download the Docker Compose file. Used by aztec-start. +curl -fsSL http://$INSTALL_HOST/docker-compose.yml -o $BIN_PATH/docker-compose.yml + +function install_bin { + curl -fsSL http://$INSTALL_HOST/$1 -o $BIN_PATH/$1 + chmod +x $BIN_PATH/$1 + echo "Installed: $BIN_PATH/$1" +} + +info "Installing scripts in $BIN_PATH..." +install_bin .aztec-run +install_bin aztec +install_bin aztec-cli +install_bin aztec-sandbox +install_bin aztec-up +install_bin aztec-nargo + +function update_path_env_var { + TARGET_DIR="${1}" + # Check if the target directory is in the user's PATH. + if [[ ":$PATH:" != *":$TARGET_DIR:"* ]]; then + # Determine the user's shell. + SHELL_PROFILE="" + case $SHELL in + */bash) + SHELL_PROFILE="$HOME/.bashrc" + ;; + */zsh) + SHELL_PROFILE="$HOME/.zshrc" + ;; + # Add other shells as needed + *) + echo "Unsupported shell: $SHELL" + return + ;; + esac + # Inform the user about the change and ask for confirmation + warn "The directory $TARGET_DIR is not in your PATH." + read -p "Add it to $SHELL_PROFILE to make the aztec binaries accessible? (y/n)" -n 1 -r + echo + if [[ $REPLY =~ ^[Yy]$ ]]; then + # Add the target directory to the user's PATH in their profile. + echo "export PATH=\$PATH:$TARGET_DIR" >> "$SHELL_PROFILE" + info "Done! Starting fresh shell..." + $SHELL + else + warn "Skipped updating PATH. You might need to add $TARGET_DIR to your PATH manually to use the binary." + fi + fi +} + +update_path_env_var $BIN_PATH + +info "Done!" \ No newline at end of file diff --git a/aztec-up/bin/aztec-nargo b/aztec-up/bin/aztec-nargo new file mode 100755 index 000000000000..5fdee793d7dc --- /dev/null +++ b/aztec-up/bin/aztec-nargo @@ -0,0 +1,4 @@ +#!/usr/bin/env bash +set -euo pipefail + +$(dirname $0)/.aztec-run aztecprotocol/noir $@ \ No newline at end of file diff --git a/aztec-up/bin/aztec-sandbox b/aztec-up/bin/aztec-sandbox new file mode 100755 index 000000000000..1558bc49cb2f --- /dev/null +++ b/aztec-up/bin/aztec-sandbox @@ -0,0 +1,21 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Favour 'docker compose', falling back on docker-compose. +CMD="docker compose" +$CMD &>/dev/null || CMD="docker-compose" + +ARGS="-f $HOME/.aztec/bin/docker-compose.yml -p sandbox" + +# Function to be executed when SIGINT is received. +cleanup() { + $CMD $ARGS down +} + +# Set trap to catch SIGINT and call the cleanup function. +trap cleanup SIGINT + +# Change working dir, so relative volume mounts are in the right place. +cd ~/.aztec + +$CMD $ARGS up --force-recreate --remove-orphans \ No newline at end of file diff --git a/aztec-up/bin/aztec-up b/aztec-up/bin/aztec-up new file mode 100755 index 000000000000..736307e18273 --- /dev/null +++ b/aztec-up/bin/aztec-up @@ -0,0 +1,5 @@ +#!/usr/bin/env bash +set -euo pipefail + +export SKIP_TITLE=1 +bash -i <(curl -s http://install.aztec.network) \ No newline at end of file diff --git a/aztec-up/bin/docker-compose.yml b/aztec-up/bin/docker-compose.yml new file mode 100644 index 000000000000..b861d729d163 --- /dev/null +++ b/aztec-up/bin/docker-compose.yml @@ -0,0 +1,35 @@ +version: "3" +services: + ethereum: + image: ghcr.io/foundry-rs/foundry@sha256:29ba6e34379e79c342ec02d437beb7929c9e254261e8032b17e187be71a2609f + command: > + ' + [ -n "$$FORK_URL" ] && ARGS="$$ARGS --fork-url $$FORK_URL"; + [ -n "$$FORK_BLOCK_NUMBER" ] && ARGS="$$ARGS --fork-block-number $$FORK_BLOCK_NUMBER"; + echo anvil -p 8545 --host 0.0.0.0 --chain-id 31337 --silent $$ARGS; + anvil -p 8545 --host 0.0.0.0 --chain-id 31337 --silent $$ARGS + ' + ports: + - "${SANDBOX_ANVIL_PORT:-8545}:8545" + environment: + FORK_URL: + FORK_BLOCK_NUMBER: + + aztec: + image: "aztecprotocol/aztec-sandbox" + ports: + - "${SANDBOX_AZTEC_NODE_PORT:-8079}:8079" + - "${SANDBOX_PXE_PORT:-8080}:8080" + environment: + DEBUG: # Loaded from the user shell if explicitly set + HOST_WORKDIR: "${PWD}" # Loaded from the user shell to show log files absolute path in host + ETHEREUM_HOST: http://ethereum:8545 + CHAIN_ID: 31337 + ARCHIVER_POLLING_INTERVAL_MS: 50 + P2P_BLOCK_CHECK_INTERVAL_MS: 50 + SEQ_TX_POLLING_INTERVAL_MS: 50 + WS_BLOCK_CHECK_INTERVAL_MS: 50 + PXE_BLOCK_POLLING_INTERVAL_MS: 50 + ARCHIVER_VIEM_POLLING_INTERVAL_MS: 500 + volumes: + - ./log:/usr/src/yarn-project/aztec-sandbox/log:rw diff --git a/aztec-up/deploy.sh b/aztec-up/deploy.sh new file mode 100755 index 000000000000..14f89b8e8db3 --- /dev/null +++ b/aztec-up/deploy.sh @@ -0,0 +1,23 @@ +set -e + +BRANCH=$1 + +export TF_VAR_BRANCH=$BRANCH + +# Downloads and installs `terraform` if it's not installed. +if [ ! -f /usr/local/bin/terraform ]; then + cd $HOME + TERRAFORM_VERSION=1.5.2 + curl -sSL https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip -o terraform.zip + sudo apt install -y unzip + unzip terraform.zip + sudo mv terraform /usr/local/bin/ + rm terraform.zip + cd - +fi + +echo "Initializing terraform" +terraform init -input=false -backend-config="key=aztec-sandbox-website/$BRANCH" + +echo "Applying terraform config" +terraform apply -input=false -auto-approve \ No newline at end of file diff --git a/aztec-up/terraform/main.tf b/aztec-up/terraform/main.tf new file mode 100644 index 000000000000..2465082e3fa4 --- /dev/null +++ b/aztec-up/terraform/main.tf @@ -0,0 +1,88 @@ +terraform { + backend "s3" { + bucket = "aztec-terraform" + region = "eu-west-2" + key = "aztec-up" + } + required_providers { + aws = { + source = "hashicorp/aws" + version = "5.29.0" + } + } +} + +# Define provider and region +provider "aws" { + region = "eu-west-2" +} + +data "terraform_remote_state" "aztec2_iac" { + backend = "s3" + config = { + bucket = "aztec-terraform" + key = "aztec2/iac" + region = "eu-west-2" + } +} + +# Create the website S3 bucket +resource "aws_s3_bucket" "install_bucket" { + bucket = "install.aztec.network" +} + +resource "aws_s3_bucket_website_configuration" "website_bucket" { + bucket = aws_s3_bucket.install_bucket.id + + index_document { + suffix = "aztec-install" + } +} + +resource "aws_s3_bucket_public_access_block" "install_bucket_public_access" { + bucket = aws_s3_bucket.install_bucket.id + + block_public_acls = false + ignore_public_acls = false + block_public_policy = false + restrict_public_buckets = false +} + +resource "aws_s3_bucket_policy" "install_bucket_policy" { + bucket = aws_s3_bucket.install_bucket.id + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Effect = "Allow" + Principal = "*" + Action = "s3:GetObject" + Resource = "arn:aws:s3:::${aws_s3_bucket.install_bucket.id}/*" + } + ] + }) +} + +# Upload files to s3 bucket if changes were detected +resource "null_resource" "upload_public_directory" { + triggers = { + always_run = "${timestamp()}" + } + + provisioner "local-exec" { + command = "aws s3 sync ../bin s3://${aws_s3_bucket.install_bucket.id}/" + } +} + +resource "aws_route53_record" "subdomain_record" { + zone_id = data.terraform_remote_state.aztec2_iac.outputs.aws_route53_zone_id + name = "install.aztec.network" + type = "A" + + alias { + name = "${aws_s3_bucket_website_configuration.website_bucket.website_domain}" + zone_id = "${aws_s3_bucket.install_bucket.hosted_zone_id}" + evaluate_target_health = true + } +} diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 4ba3cac6d742..ea3392265464 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = 4e4634542814f15d3703fe560874e08eda57e1fa - parent = 379b5adc259ac69b01e61b852172cdfc87cf9350 + commit = 86e6d6f1e48a0609de3f8e8f99007d45b4dbbdf4 + parent = 9c5443651faaed3dcb9fae36727337a34ce5922b method = merge cmdver = 0.4.6 diff --git a/barretenberg/acir_tests/Dockerfile.noir_acir_tests b/barretenberg/acir_tests/Dockerfile.noir_acir_tests index 87fdd8604a42..5131e94e2795 100644 --- a/barretenberg/acir_tests/Dockerfile.noir_acir_tests +++ b/barretenberg/acir_tests/Dockerfile.noir_acir_tests @@ -3,7 +3,7 @@ # So, it lives here. # This chains off the nargo build, and creates a container with a compiled set of acir tests. FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/noir -RUN apk add bash jq +RUN apt update && apt install -y jq && rm -rf /var/lib/apt/lists/* && apt-get clean ENV PATH="/usr/src/noir/target/release:${PATH}" WORKDIR /usr/src/noir/test_programs COPY . . diff --git a/barretenberg/acir_tests/bash_helpers/catch.sh b/barretenberg/acir_tests/bash_helpers/catch.sh index 888af3cbb44e..bc2025d4da51 100644 --- a/barretenberg/acir_tests/bash_helpers/catch.sh +++ b/barretenberg/acir_tests/bash_helpers/catch.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Handler for SIGCHLD, cleanup if child exit with error handle_sigchild() { diff --git a/barretenberg/acir_tests/bench_acir_tests.sh b/barretenberg/acir_tests/bench_acir_tests.sh index b338e82e30b2..a41b261d1cc3 100755 --- a/barretenberg/acir_tests/bench_acir_tests.sh +++ b/barretenberg/acir_tests/bench_acir_tests.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash TEST_NAMES=("$@") THREADS=(1 4 16 32 64) diff --git a/barretenberg/acir_tests/clone_test_vectors.sh b/barretenberg/acir_tests/clone_test_vectors.sh index c6fd6ef1bf16..b15814ab013f 100755 --- a/barretenberg/acir_tests/clone_test_vectors.sh +++ b/barretenberg/acir_tests/clone_test_vectors.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu TEST_SRC=${TEST_SRC:-../../noir/test_programs/acir_artifacts} diff --git a/barretenberg/acir_tests/gen_inner_proof_inputs.sh b/barretenberg/acir_tests/gen_inner_proof_inputs.sh index 64b87fe19db5..36137bde82ee 100755 --- a/barretenberg/acir_tests/gen_inner_proof_inputs.sh +++ b/barretenberg/acir_tests/gen_inner_proof_inputs.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Env var overrides: # BIN: to specify a different binary to test with (e.g. bb.js or bb.js-dev). set -eu diff --git a/barretenberg/acir_tests/run_acir_tests.sh b/barretenberg/acir_tests/run_acir_tests.sh index 84953d01c558..ee28c975113f 100755 --- a/barretenberg/acir_tests/run_acir_tests.sh +++ b/barretenberg/acir_tests/run_acir_tests.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Env var overrides: # BIN: to specify a different binary to test with (e.g. bb.js or bb.js-dev). # VERBOSE: to enable logging for each test. @@ -80,8 +80,8 @@ else # If parallel flag is set, run in parallel if [ -n "${PARALLEL:-}" ]; then test $TEST_NAME & - else - test $TEST_NAME + else + test $TEST_NAME fi done fi diff --git a/barretenberg/acir_tests/run_acir_tests_browser.sh b/barretenberg/acir_tests/run_acir_tests_browser.sh index 22830656250e..1c1f2ce0e08f 100755 --- a/barretenberg/acir_tests/run_acir_tests_browser.sh +++ b/barretenberg/acir_tests/run_acir_tests_browser.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -em cleanup() { diff --git a/barretenberg/bootstrap.sh b/barretenberg/bootstrap.sh index 504f19809820..c16fd294b6c2 100755 --- a/barretenberg/bootstrap.sh +++ b/barretenberg/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd "$(dirname "$0")" diff --git a/barretenberg/cpp/bootstrap.sh b/barretenberg/cpp/bootstrap.sh index 25350c4fcce1..0b9295bafbd3 100755 --- a/barretenberg/cpp/bootstrap.sh +++ b/barretenberg/cpp/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu # Navigate to script folder diff --git a/barretenberg/cpp/format.sh b/barretenberg/cpp/format.sh index 0bf8bca805ca..ae314e96a6f1 100755 --- a/barretenberg/cpp/format.sh +++ b/barretenberg/cpp/format.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e if [ "$1" == "staged" ]; then diff --git a/barretenberg/cpp/scripts/bb-tests.sh b/barretenberg/cpp/scripts/bb-tests.sh index 459eb7565937..a3f322b2afaf 100755 --- a/barretenberg/cpp/scripts/bb-tests.sh +++ b/barretenberg/cpp/scripts/bb-tests.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script runs all test suites that have not been broken out into their own jobs for parallelisation. # Might be better to list exclusions here rather than inclusions as risky to maintain. set -eu diff --git a/barretenberg/cpp/scripts/benchmarks.sh b/barretenberg/cpp/scripts/benchmarks.sh index a7ec1dcb1d0e..8a547c29aba6 100755 --- a/barretenberg/cpp/scripts/benchmarks.sh +++ b/barretenberg/cpp/scripts/benchmarks.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu # Move above script dir. diff --git a/barretenberg/cpp/scripts/collect_coverage_information.sh b/barretenberg/cpp/scripts/collect_coverage_information.sh index b018e2514838..0a3231a304c6 100755 --- a/barretenberg/cpp/scripts/collect_coverage_information.sh +++ b/barretenberg/cpp/scripts/collect_coverage_information.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash if [ $# -ne 2 ]; then echo "Usage: $0 " diff --git a/barretenberg/cpp/scripts/collect_heap_information.sh b/barretenberg/cpp/scripts/collect_heap_information.sh index 1d25c5a791c4..a1b6bee078ee 100755 --- a/barretenberg/cpp/scripts/collect_heap_information.sh +++ b/barretenberg/cpp/scripts/collect_heap_information.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu PRESET=gperftools diff --git a/barretenberg/cpp/scripts/collect_profile_information.sh b/barretenberg/cpp/scripts/collect_profile_information.sh index df932c086bcd..ebc0249392e8 100755 --- a/barretenberg/cpp/scripts/collect_profile_information.sh +++ b/barretenberg/cpp/scripts/collect_profile_information.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu # can also be 'xray-1thread' diff --git a/barretenberg/cpp/scripts/install-wasi-sdk.sh b/barretenberg/cpp/scripts/install-wasi-sdk.sh index 1da258d880bf..9d27d012a617 100755 --- a/barretenberg/cpp/scripts/install-wasi-sdk.sh +++ b/barretenberg/cpp/scripts/install-wasi-sdk.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu if [[ -d ./src/wasi-sdk-20.0 && -d ./src/wasi-sdk-20.0+threads ]]; then diff --git a/barretenberg/cpp/scripts/run_tests b/barretenberg/cpp/scripts/run_tests index 28d9f8cc4f8c..a67f03ee9921 100755 --- a/barretenberg/cpp/scripts/run_tests +++ b/barretenberg/cpp/scripts/run_tests @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This is the default test runner which takes the as arguments: # 1. The number of ignition transcripts to download. # 2. The set of gtest binary names to run. diff --git a/barretenberg/cpp/src/barretenberg/benchmark/compare_branch_vs_baseline.sh b/barretenberg/cpp/src/barretenberg/benchmark/compare_branch_vs_baseline.sh index 0ac6dce1157b..34ee2ce171dc 100755 --- a/barretenberg/cpp/src/barretenberg/benchmark/compare_branch_vs_baseline.sh +++ b/barretenberg/cpp/src/barretenberg/benchmark/compare_branch_vs_baseline.sh @@ -1,7 +1,7 @@ -#!/bin/bash +#!/usr/bin/env bash # This script is used to compare a suite of benchmarks between baseline (default: master) and -# the branch from which the script is run. Simply check out the branch of interest, ensure +# the branch from which the script is run. Simply check out the branch of interest, ensure # it is up to date with local master, and run the script. # Specify the benchmark suite and the "baseline" branch against which to compare @@ -43,7 +43,7 @@ BASELINE_RESULTS="$BENCH_RESULTS_DIR/results_baseline.json" echo -e "\nRunning $BENCH_TARGET in master.." bin/$BENCH_TARGET --benchmark_format=json > $BASELINE_RESULTS -# Call compare.py on the results (json) to get high level statistics. +# Call compare.py on the results (json) to get high level statistics. # See docs at https://github.com/google/benchmark/blob/main/docs/tools.md for more details. $BENCH_TOOLS_DIR/compare.py benchmarks $BASELINE_RESULTS $BRANCH_RESULTS diff --git a/barretenberg/cpp/src/barretenberg/benchmark/honk_bench/compare_honk_to_plonk_ultra.sh b/barretenberg/cpp/src/barretenberg/benchmark/honk_bench/compare_honk_to_plonk_ultra.sh index 1863327ae4ea..f97dbc0eaa74 100755 --- a/barretenberg/cpp/src/barretenberg/benchmark/honk_bench/compare_honk_to_plonk_ultra.sh +++ b/barretenberg/cpp/src/barretenberg/benchmark/honk_bench/compare_honk_to_plonk_ultra.sh @@ -1,7 +1,7 @@ -#!/bin/bash +#!/usr/bin/env bash # This script is used to compare the results of honk_bench between baseline (master) and -# the branch from which the script is run. Simply check out the branch of interest, ensure +# the branch from which the script is run. Simply check out the branch of interest, ensure # it is up to date with local master, and run the script. echo -e '\nComparing Ultra Plonk/Honk benchmarks.' @@ -19,21 +19,21 @@ pip3 install --user -r $BUILD_DIR/_deps/benchmark-src/requirements.txt cd $BASE_DIR mkdir $BENCH_RESULTS_DIR -# +# echo -e '\nBuilding and running Standard benchmarks..' # rm -rf $BUILD_DIR -cmake --preset bench > /dev/null && cmake --build --preset bench --target ultra_plonk_bench +cmake --preset bench > /dev/null && cmake --build --preset bench --target ultra_plonk_bench cd build-bench PLONK_BENCH_RESULTS="$BENCH_RESULTS_DIR/plonk_bench.json" ./bin/ultra_plonk_bench --benchmark_format=json > $PLONK_BENCH_RESULTS cd .. -cmake --preset bench > /dev/null && cmake --build --preset bench --target ultra_honk_bench +cmake --preset bench > /dev/null && cmake --build --preset bench --target ultra_honk_bench cd build-bench HONK_BENCH_RESULTS="$BENCH_RESULTS_DIR/honk_bench.json" ./bin/ultra_honk_bench --benchmark_format=json > $HONK_BENCH_RESULTS -# Call compare.py on the results (json) to get high level statistics. +# Call compare.py on the results (json) to get high level statistics. # See docs at https://github.com/google/benchmark/blob/main/docs/tools.md for more details. $BENCH_TOOLS_DIR/compare.py benchmarks $PLONK_BENCH_RESULTS $HONK_BENCH_RESULTS diff --git a/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra.hpp b/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra.hpp index 80d1ca652e93..ce322761ccb7 100644 --- a/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra.hpp +++ b/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra.hpp @@ -194,11 +194,6 @@ class GoblinUltra { { return { this->ecc_op_wire_1, this->ecc_op_wire_2, this->ecc_op_wire_3, this->ecc_op_wire_4 }; } - // The sorted concatenations of table and witness data needed for plookup. - RefVector get_sorted_polynomials() - { - return { this->sorted_1, this->sorted_2, this->sorted_3, this->sorted_4 }; - }; }; template class ShiftedEntities { @@ -290,11 +285,60 @@ class GoblinUltra { this->calldata_read_counts, this->lookup_inverses }; }; + + RefVector get_witness() + { + return { this->w_l, + this->w_r, + this->w_o, + this->w_4, + this->sorted_accum, + this->z_perm, + this->z_lookup, + this->ecc_op_wire_1, + this->ecc_op_wire_2, + this->ecc_op_wire_3, + this->ecc_op_wire_4, + this->calldata, + this->calldata_read_counts, + this->lookup_inverses }; + }; RefVector get_to_be_shifted() { return { this->table_1, this->table_2, this->table_3, this->table_4, this->w_l, this->w_r, this->w_o, this->w_4, this->sorted_accum, this->z_perm, this->z_lookup }; }; + RefVector get_precomputed() + { + return { this->q_m, + this->q_c, + this->q_l, + this->q_r, + this->q_o, + this->q_4, + this->q_arith, + this->q_sort, + this->q_elliptic, + this->q_aux, + this->q_lookup, + this->q_busread, + this->sigma_1, + this->sigma_2, + this->sigma_3, + this->sigma_4, + this->id_1, + this->id_2, + this->id_3, + this->id_4, + this->table_1, + this->table_2, + this->table_3, + this->table_4, + this->lagrange_first, + this->lagrange_last, + this->lagrange_ecc_op, + this->databus_id }; + } RefVector get_shifted() { return ShiftedEntities::get_all(); }; }; @@ -381,6 +425,29 @@ class GoblinUltra { } }; + /** + * @brief An owning container of polynomials. + * @warning When this was introduced it broke some of our design principles. + * - Execution trace builders don't handle "polynomials" because the interpretation of the execution trace + * columns as polynomials is a detail of the proving system, and trace builders are (sometimes in practice, + * always in principle) reusable for different proving protocols (e.g., Plonk and Honk). + * - Polynomial storage is handled by key classes. Polynomials aren't moved, but are accessed elsewhere by + * std::spans. + * + * We will consider revising this data model: TODO(https://github.com/AztecProtocol/barretenberg/issues/743) + */ + class AllPolynomials : public AllEntities { + public: + [[nodiscard]] AllValues get_row(const size_t row_idx) const + { + AllValues result; + for (auto [result_field, polynomial] : zip_view(result.get_all(), this->get_all())) { + result_field = polynomial[row_idx]; + } + return result; + } + }; + /** * @brief A container for the witness commitments. */ @@ -488,7 +555,7 @@ class GoblinUltra { using VerifierCommitments = VerifierCommitments_; class FoldingParameters { public: - std::vector gate_separation_challenges; + std::vector gate_challenges; FF target_sum; }; diff --git a/barretenberg/cpp/src/barretenberg/flavor/ultra.hpp b/barretenberg/cpp/src/barretenberg/flavor/ultra.hpp index 065a70aed2f7..450571eaaba5 100644 --- a/barretenberg/cpp/src/barretenberg/flavor/ultra.hpp +++ b/barretenberg/cpp/src/barretenberg/flavor/ultra.hpp @@ -207,6 +207,18 @@ class Ultra { }; }; + + RefVector get_precomputed() + { + return { q_m, q_c, q_l, q_r, q_o, q_4, q_arith, q_sort, + q_elliptic, q_aux, q_lookup, sigma_1, sigma_2, sigma_3, sigma_4, id_1, + id_2, id_3, id_4, table_1, table_2, table_3, table_4, lagrange_first, + lagrange_last + + }; + } + + RefVector get_witness() { return { w_l, w_r, w_o, w_4, sorted_accum, z_perm, z_lookup }; }; RefVector get_to_be_shifted() { return { table_1, table_2, table_3, table_4, w_l, w_r, w_o, w_4, sorted_accum, z_perm, z_lookup }; @@ -248,8 +260,8 @@ class Ultra { using VerificationKey = VerificationKey_>; /** - * @brief A field element for each entity of the flavor. These entities represent the prover polynomials evaluated - * at one point. + * @brief A field element for each entity of the flavor. These entities represent the prover polynomials + * evaluated at one point. */ class AllValues : public AllEntities { public: @@ -273,6 +285,29 @@ class Ultra { } }; + /** + * @brief An owning container of polynomials. + * @warning When this was introduced it broke some of our design principles. + * - Execution trace builders don't handle "polynomials" because the interpretation of the execution trace + * columns as polynomials is a detail of the proving system, and trace builders are (sometimes in practice, + * always in principle) reusable for different proving protocols (e.g., Plonk and Honk). + * - Polynomial storage is handled by key classes. Polynomials aren't moved, but are accessed elsewhere by + * std::spans. + * + * We will consider revising this data model: TODO(https://github.com/AztecProtocol/barretenberg/issues/743) + */ + class AllPolynomials : public AllEntities { + public: + [[nodiscard]] AllValues get_row(const size_t row_idx) const + { + AllValues result; + for (auto [result_field, polynomial] : zip_view(result.get_all(), this->get_all())) { + result_field = polynomial[row_idx]; + } + return result; + } + }; + /** * @brief A container for storing the partially evaluated multivariates produced by sumcheck. */ @@ -323,32 +358,31 @@ class Ultra { z_lookup = "Z_LOOKUP"; sorted_accum = "SORTED_ACCUM"; - // The ones beginning with "__" are only used for debugging - q_c = "__Q_C"; - q_l = "__Q_L"; - q_r = "__Q_R"; - q_o = "__Q_O"; - q_4 = "__Q_4"; - q_m = "__Q_M"; - q_arith = "__Q_ARITH"; - q_sort = "__Q_SORT"; - q_elliptic = "__Q_ELLIPTIC"; - q_aux = "__Q_AUX"; - q_lookup = "__Q_LOOKUP"; - sigma_1 = "__SIGMA_1"; - sigma_2 = "__SIGMA_2"; - sigma_3 = "__SIGMA_3"; - sigma_4 = "__SIGMA_4"; - id_1 = "__ID_1"; - id_2 = "__ID_2"; - id_3 = "__ID_3"; - id_4 = "__ID_4"; - table_1 = "__TABLE_1"; - table_2 = "__TABLE_2"; - table_3 = "__TABLE_3"; - table_4 = "__TABLE_4"; - lagrange_first = "__LAGRANGE_FIRST"; - lagrange_last = "__LAGRANGE_LAST"; + q_c = "Q_C"; + q_l = "Q_L"; + q_r = "Q_R"; + q_o = "Q_O"; + q_4 = "Q_4"; + q_m = "Q_M"; + q_arith = "Q_ARITH"; + q_sort = "Q_SORT"; + q_elliptic = "Q_ELLIPTIC"; + q_aux = "Q_AUX"; + q_lookup = "Q_LOOKUP"; + sigma_1 = "SIGMA_1"; + sigma_2 = "SIGMA_2"; + sigma_3 = "SIGMA_3"; + sigma_4 = "SIGMA_4"; + id_1 = "ID_1"; + id_2 = "ID_2"; + id_3 = "ID_3"; + id_4 = "ID_4"; + table_1 = "TABLE_1"; + table_2 = "TABLE_2"; + table_3 = "TABLE_3"; + table_4 = "TABLE_4"; + lagrange_first = "LAGRANGE_FIRST"; + lagrange_last = "LAGRANGE_LAST"; }; }; @@ -357,11 +391,11 @@ class Ultra { VerifierCommitments(const std::shared_ptr& verification_key) { q_m = verification_key->q_m; + q_c = verification_key->q_c; q_l = verification_key->q_l; q_r = verification_key->q_r; q_o = verification_key->q_o; q_4 = verification_key->q_4; - q_c = verification_key->q_c; q_arith = verification_key->q_arith; q_sort = verification_key->q_sort; q_elliptic = verification_key->q_elliptic; @@ -386,7 +420,7 @@ class Ultra { class FoldingParameters { public: - std::vector gate_separation_challenges; + std::vector gate_challenges; FF target_sum; }; diff --git a/barretenberg/cpp/src/barretenberg/honk/utils/testing.hpp b/barretenberg/cpp/src/barretenberg/honk/utils/testing.hpp index 37c056609eef..dda239527410 100644 --- a/barretenberg/cpp/src/barretenberg/honk/utils/testing.hpp +++ b/barretenberg/cpp/src/barretenberg/honk/utils/testing.hpp @@ -9,18 +9,17 @@ namespace proof_system::honk { * function returns an array of data pointed to by the ProverPolynomials. */ template -std::pair, Flavor::NUM_ALL_ENTITIES>, - typename Flavor::ProverPolynomials> -get_sequential_prover_polynomials(const size_t log_circuit_size, const size_t starting_value) +std::pair get_sequential_prover_polynomials( + const size_t log_circuit_size, const size_t starting_value) { using FF = typename Flavor::FF; using ProverPolynomials = typename Flavor::ProverPolynomials; using Polynomial = typename Flavor::Polynomial; - std::array, Flavor::NUM_ALL_ENTITIES> storage; + typename Flavor::AllPolynomials storage; size_t circuit_size = 1 << log_circuit_size; size_t value_idx = starting_value; - for (auto& polynomial : storage) { + for (auto& polynomial : storage.get_all()) { polynomial = Polynomial(circuit_size); for (auto& value : polynomial) { value = FF(value_idx++); @@ -28,7 +27,7 @@ get_sequential_prover_polynomials(const size_t log_circuit_size, const size_t st } ProverPolynomials prover_polynomials; - for (auto [prover_poly, storage_poly] : zip_view(prover_polynomials.get_all(), storage)) { + for (auto [prover_poly, storage_poly] : zip_view(prover_polynomials.get_all(), storage.get_all())) { prover_poly = storage_poly; } @@ -36,17 +35,16 @@ get_sequential_prover_polynomials(const size_t log_circuit_size, const size_t st } template -std::pair, Flavor::NUM_ALL_ENTITIES>, - typename Flavor::ProverPolynomials> -get_zero_prover_polynomials(const size_t log_circuit_size) +std::pair get_zero_prover_polynomials( + const size_t log_circuit_size) { using FF = typename Flavor::FF; using ProverPolynomials = typename Flavor::ProverPolynomials; using Polynomial = typename Flavor::Polynomial; - std::array, Flavor::NUM_ALL_ENTITIES> storage; + typename Flavor::AllPolynomials storage; size_t circuit_size = 1 << log_circuit_size; - for (auto& polynomial : storage) { + for (auto& polynomial : storage.get_all()) { polynomial = Polynomial(circuit_size); for (auto& value : polynomial) { value = FF(0); @@ -54,7 +52,7 @@ get_zero_prover_polynomials(const size_t log_circuit_size) } ProverPolynomials prover_polynomials; - for (auto [prover_poly, storage_poly] : zip_view(prover_polynomials.get_all(), storage)) { + for (auto [prover_poly, storage_poly] : zip_view(prover_polynomials.get_all(), storage.get_all())) { prover_poly = storage_poly; } diff --git a/barretenberg/cpp/src/barretenberg/honk/utils/testing.test.cpp b/barretenberg/cpp/src/barretenberg/honk/utils/testing.test.cpp index c4b5fda266c2..3e150e4a2d5d 100644 --- a/barretenberg/cpp/src/barretenberg/honk/utils/testing.test.cpp +++ b/barretenberg/cpp/src/barretenberg/honk/utils/testing.test.cpp @@ -10,8 +10,8 @@ TEST(HonkTestingUtils, ProverPolynomials) auto [storage, prover_polynomials] = proof_system::honk::get_sequential_prover_polynomials(/*log_circuit_size=*/2, /*starting_value=*/0); auto& first_polynomial = prover_polynomials.get_all()[0]; - EXPECT_EQ(storage[0][0], first_polynomial[0]); - EXPECT_EQ(storage[0][1], first_polynomial[1]); + EXPECT_EQ(storage.get_all()[0][0], first_polynomial[0]); + EXPECT_EQ(storage.get_all()[0][1], first_polynomial[1]); }; } // namespace barretenberg::test_testing_utils diff --git a/barretenberg/cpp/src/barretenberg/numeric/uint128/uint128.hpp b/barretenberg/cpp/src/barretenberg/numeric/uint128/uint128.hpp index 3dc320abe2d4..e229f9b7d237 100644 --- a/barretenberg/cpp/src/barretenberg/numeric/uint128/uint128.hpp +++ b/barretenberg/cpp/src/barretenberg/numeric/uint128/uint128.hpp @@ -5,6 +5,7 @@ #ifdef __i386__ #include "barretenberg/common/serialize.hpp" +#include namespace numeric { @@ -37,7 +38,7 @@ class alignas(32) uint128_t { constexpr ~uint128_t() = default; explicit constexpr operator bool() const { return static_cast(data[0]); }; - template explicit constexpr operator T() const { return static_cast(data[0]); }; + template explicit constexpr operator T() const { return static_cast(data[0]); }; [[nodiscard]] constexpr bool get_bit(uint64_t bit_index) const; [[nodiscard]] constexpr uint64_t get_msb() const; diff --git a/barretenberg/cpp/src/barretenberg/numeric/uint256/uint256.hpp b/barretenberg/cpp/src/barretenberg/numeric/uint256/uint256.hpp index d8cd9ef2f184..5ddaa9713aeb 100644 --- a/barretenberg/cpp/src/barretenberg/numeric/uint256/uint256.hpp +++ b/barretenberg/cpp/src/barretenberg/numeric/uint256/uint256.hpp @@ -14,6 +14,7 @@ #include "../uint128/uint128.hpp" #include "barretenberg/common/serialize.hpp" #include "barretenberg/common/throw_or_abort.hpp" +#include #include #include #include @@ -91,7 +92,7 @@ class alignas(32) uint256_t { explicit constexpr operator bool() const { return static_cast(data[0]); }; - template explicit constexpr operator T() const { return static_cast(data[0]); }; + template explicit constexpr operator T() const { return static_cast(data[0]); }; [[nodiscard]] constexpr bool get_bit(uint64_t bit_index) const; [[nodiscard]] constexpr uint64_t get_msb() const; diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/combiner.test.cpp b/barretenberg/cpp/src/barretenberg/protogalaxy/combiner.test.cpp index dce50d0220ca..68f22195659c 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/combiner.test.cpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/combiner.test.cpp @@ -33,9 +33,11 @@ TEST(Protogalaxy, CombinerOn2Instances) }; auto run_test = [&](bool is_random_input) { + // Combiner test on prover polynomisls containing random values, restricted to only the standard arithmetic + // relation. if (is_random_input) { std::vector> instance_data(NUM_INSTANCES); - std::array, NUM_INSTANCES> storage_arrays; + std::array storage_arrays; ProtoGalaxyProver prover; std::vector pow_betas = { FF(1), FF(2) }; @@ -46,6 +48,7 @@ TEST(Protogalaxy, CombinerOn2Instances) restrict_to_standard_arithmetic_relation(prover_polynomials); storage_arrays[idx] = std::move(storage); instance->prover_polynomials = prover_polynomials; + instance->instance_size = 2; instance_data[idx] = instance; } @@ -70,7 +73,7 @@ TEST(Protogalaxy, CombinerOn2Instances) EXPECT_EQ(result, expected_result); } else { std::vector> instance_data(NUM_INSTANCES); - std::array, NUM_INSTANCES> storage_arrays; + std::array storage_arrays; ProtoGalaxyProver prover; std::vector pow_betas = { FF(1), FF(2) }; @@ -81,6 +84,7 @@ TEST(Protogalaxy, CombinerOn2Instances) restrict_to_standard_arithmetic_relation(prover_polynomials); storage_arrays[idx] = std::move(storage); instance->prover_polynomials = prover_polynomials; + instance->instance_size = 2; instance_data[idx] = instance; } @@ -162,7 +166,7 @@ TEST(Protogalaxy, CombinerOn4Instances) auto run_test = [&]() { std::vector> instance_data(NUM_INSTANCES); - std::array, NUM_INSTANCES> storage_arrays; + std::array storage_arrays; ProtoGalaxyProver prover; std::vector pow_betas = { FF(1), FF(2) }; @@ -172,6 +176,7 @@ TEST(Protogalaxy, CombinerOn4Instances) /*log_circuit_size=*/1); storage_arrays[idx] = std::move(storage); instance->prover_polynomials = prover_polynomials; + instance->instance_size = 2; instance_data[idx] = instance; } diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/combiner_example_gen.py b/barretenberg/cpp/src/barretenberg/protogalaxy/combiner_example_gen.py index 906d1948847b..ac701d41e95f 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/combiner_example_gen.py +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/combiner_example_gen.py @@ -103,7 +103,6 @@ def compute_first_example(): row.q_l, row.q_r, row.q_o, row.q_c) accumulator += zeta_pow * relation_value zeta_pow *= zeta - return accumulator diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/folding_result.hpp b/barretenberg/cpp/src/barretenberg/protogalaxy/folding_result.hpp index 61118b8b6e14..171f9b38a781 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/folding_result.hpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/folding_result.hpp @@ -1,39 +1,18 @@ #pragma once #include "barretenberg/flavor/flavor.hpp" #include "barretenberg/relations/relation_parameters.hpp" +#include "barretenberg/sumcheck/instance/prover_instance.hpp" namespace proof_system::honk { -template struct ProverFoldingResult { - public: - using ProverPolynomials = typename Flavor::ProverPolynomials; - using FoldingParameters = typename Flavor::FoldingParameters; - ProverPolynomials folded_prover_polynomials; - // TODO(https://github.com/AztecProtocol/barretenberg/issues/656): turn folding data into a struct - std::vector folding_data; - FoldingParameters params; -}; - -template struct VerifierFoldingResult { - using FF = typename Flavor::FF; - using VerificationKey = typename Flavor::VerificationKey; - using FoldingParameters = typename Flavor::FoldingParameters; - std::vector folded_public_inputs; - std::shared_ptr folded_verification_key; - FoldingParameters parameters; -}; - /** - * @brief The aggregated result from the prover and verifier after a round of folding, used to create a new Instance. + * @brief The result of running the Protogalaxy prover containing a new accumulator (relaxed instance) as well as the + * proof data to instantiate the verifier transcript. * * @tparam Flavor */ template struct FoldingResult { - using FF = typename Flavor::FF; - using ProverPolynomials = typename Flavor::ProverPolynomials; - using VerificationKey = typename Flavor::VerificationKey; - using FoldingParameters = typename Flavor::FoldingParameters; - ProverPolynomials folded_prover_polynomials; - std::vector folded_public_inputs; - std::shared_ptr verification_key; - FoldingParameters folding_parameters; + public: + std::shared_ptr> accumulator; + // TODO(https://github.com/AztecProtocol/barretenberg/issues/656): turn folding data into a struct + std::vector folding_data; }; } // namespace proof_system::honk \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.cpp b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.cpp index b8543daa64ca..4a95f231d525 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.cpp @@ -1,91 +1,306 @@ #include "protogalaxy_prover.hpp" #include "barretenberg/flavor/flavor.hpp" namespace proof_system::honk { +template +void ProtoGalaxyProver_::finalise_and_send_instance(std::shared_ptr instance, + const std::string& domain_separator) +{ + instance->initialize_prover_polynomials(); -template void ProtoGalaxyProver_::prepare_for_folding() + const auto instance_size = static_cast(instance->instance_size); + const auto num_public_inputs = static_cast(instance->public_inputs.size()); + transcript->send_to_verifier(domain_separator + "_instance_size", instance_size); + transcript->send_to_verifier(domain_separator + "_public_input_size", num_public_inputs); + + for (size_t i = 0; i < instance->public_inputs.size(); ++i) { + auto public_input_i = instance->public_inputs[i]; + transcript->send_to_verifier(domain_separator + "_public_input_" + std::to_string(i), public_input_i); + } + transcript->send_to_verifier(domain_separator + "_pub_inputs_offset", + static_cast(instance->pub_inputs_offset)); + + auto& witness_commitments = instance->witness_commitments; + + // Commit to the first three wire polynomials of the instance + // We only commit to the fourth wire polynomial after adding memory recordss + witness_commitments.w_l = commitment_key->commit(instance->proving_key->w_l); + witness_commitments.w_r = commitment_key->commit(instance->proving_key->w_r); + witness_commitments.w_o = commitment_key->commit(instance->proving_key->w_o); + + auto wire_comms = witness_commitments.get_wires(); + auto commitment_labels = instance->commitment_labels; + auto wire_labels = commitment_labels.get_wires(); + for (size_t idx = 0; idx < 3; ++idx) { + transcript->send_to_verifier(domain_separator + "_" + wire_labels[idx], wire_comms[idx]); + } + + auto eta = transcript->get_challenge(domain_separator + "_eta"); + instance->compute_sorted_accumulator_polynomials(eta); + + // Commit to the sorted withness-table accumulator and the finalized (i.e. with memory records) fourth wire + // polynomial + witness_commitments.sorted_accum = commitment_key->commit(instance->prover_polynomials.sorted_accum); + witness_commitments.w_4 = commitment_key->commit(instance->prover_polynomials.w_4); + + transcript->send_to_verifier(domain_separator + "_" + commitment_labels.sorted_accum, + witness_commitments.sorted_accum); + transcript->send_to_verifier(domain_separator + "_" + commitment_labels.w_4, witness_commitments.w_4); + + auto [beta, gamma] = transcript->get_challenges(domain_separator + "_beta", domain_separator + "_gamma"); + instance->compute_grand_product_polynomials(beta, gamma); + + witness_commitments.z_perm = commitment_key->commit(instance->prover_polynomials.z_perm); + witness_commitments.z_lookup = commitment_key->commit(instance->prover_polynomials.z_lookup); + + transcript->send_to_verifier(domain_separator + "_" + commitment_labels.z_perm, + instance->witness_commitments.z_perm); + transcript->send_to_verifier(domain_separator + "_" + commitment_labels.z_lookup, + instance->witness_commitments.z_lookup); + + instance->alpha = transcript->get_challenge(domain_separator + "_alpha"); + + auto vk_view = instance->verification_key->get_all(); + auto labels = instance->commitment_labels.get_precomputed(); + for (size_t idx = 0; idx < labels.size(); idx++) { + transcript->send_to_verifier(domain_separator + "_" + labels[idx], vk_view[idx]); + } +} + +template +void ProtoGalaxyProver_::send_accumulator(std::shared_ptr instance, + const std::string& domain_separator) { - auto idx = 0; - for (auto it = instances.begin(); it != instances.end(); it++, idx++) { - auto instance = *it; - instance->initialize_prover_polynomials(); + const auto instance_size = static_cast(instance->instance_size); + const auto num_public_inputs = static_cast(instance->public_inputs.size()); + transcript->send_to_verifier(domain_separator + "_instance_size", instance_size); + transcript->send_to_verifier(domain_separator + "_public_input_size", num_public_inputs); - auto domain_separator = std::to_string(idx); - const auto circuit_size = static_cast(instance->proving_key->circuit_size); - const auto num_public_inputs = static_cast(instance->proving_key->num_public_inputs); + for (size_t i = 0; i < instance->public_inputs.size(); ++i) { + auto public_input_i = instance->public_inputs[i]; + transcript->send_to_verifier(domain_separator + "_public_input_" + std::to_string(i), public_input_i); + } - transcript->send_to_verifier(domain_separator + "_circuit_size", circuit_size); - transcript->send_to_verifier(domain_separator + "_public_input_size", num_public_inputs); - transcript->send_to_verifier(domain_separator + "_pub_inputs_offset", - static_cast(instance->pub_inputs_offset)); + transcript->send_to_verifier(domain_separator + "_eta", instance->relation_parameters.eta); + transcript->send_to_verifier(domain_separator + "_beta", instance->relation_parameters.beta); + transcript->send_to_verifier(domain_separator + "_gamma", instance->relation_parameters.gamma); + transcript->send_to_verifier(domain_separator + "_public_input_delta", + instance->relation_parameters.public_input_delta); + transcript->send_to_verifier(domain_separator + "_lookup_grand_product_delta", + instance->relation_parameters.lookup_grand_product_delta); - for (size_t i = 0; i < instance->proving_key->num_public_inputs; ++i) { - auto public_input_i = instance->public_inputs[i]; - transcript->send_to_verifier(domain_separator + "_public_input_" + std::to_string(i), public_input_i); - } + transcript->send_to_verifier(domain_separator + "_alpha", instance->alpha); - auto [eta, beta, gamma] = challenges_to_field_elements(transcript->get_challenges( - domain_separator + "_eta", domain_separator + "_beta", domain_separator + "_gamma")); + auto folding_parameters = instance->folding_parameters; + transcript->send_to_verifier(domain_separator + "_target_sum", folding_parameters.target_sum); + for (size_t idx = 0; idx < folding_parameters.gate_challenges.size(); idx++) { + transcript->send_to_verifier(domain_separator + "_gate_challenge_" + std::to_string(idx), + folding_parameters.gate_challenges[idx]); + } - instance->compute_sorted_accumulator_polynomials(eta); - instance->compute_grand_product_polynomials(beta, gamma); - instance->alpha = transcript->get_challenge(domain_separator + "_alpha"); + auto comm_view = instance->witness_commitments.get_all(); + auto witness_labels = instance->commitment_labels.get_witness(); + for (size_t idx = 0; idx < witness_labels.size(); idx++) { + transcript->send_to_verifier(domain_separator + "_" + witness_labels[idx], comm_view[idx]); } - fold_relation_parameters(instances); - fold_alpha(instances); + auto vk_view = instance->verification_key->get_all(); + auto vk_labels = instance->commitment_labels.get_precomputed(); + for (size_t idx = 0; idx < vk_labels.size(); idx++) { + transcript->send_to_verifier(domain_separator + "_" + vk_labels[idx], vk_view[idx]); + } +} + +template void ProtoGalaxyProver_::prepare_for_folding() +{ + auto idx = 0; + auto instance = instances[0]; + auto domain_separator = std::to_string(idx); + transcript->send_to_verifier(domain_separator + "is_accumulator", instance->is_accumulator); + if (instance->is_accumulator) { + send_accumulator(instance, domain_separator); + } else { + finalise_and_send_instance(instance, domain_separator); + } + idx++; + + for (auto it = instances.begin() + 1; it != instances.end(); it++, idx++) { + auto instance = *it; + auto domain_separator = std::to_string(idx); + finalise_and_send_instance(instance, domain_separator); + } } // TODO(#https://github.com/AztecProtocol/barretenberg/issues/689): finalise implementation this function template -ProverFoldingResult ProtoGalaxyProver_::fold_instances() +FoldingResult ProtoGalaxyProver_::fold_instances() { prepare_for_folding(); + // TODO(#https://github.com/AztecProtocol/barretenberg/issues/740): Handle the case where we are folding for the // first time and accumulator is 0 - // TODO(#https://github.com/AztecProtocol/barretenberg/issues/763): Fold alpha FF delta = transcript->get_challenge("delta"); auto accumulator = get_accumulator(); - auto instance_size = accumulator->prover_polynomials.get_polynomial_size(); - const auto log_instance_size = static_cast(numeric::get_msb(instance_size)); - auto deltas = compute_round_challenge_pows(log_instance_size, delta); + auto deltas = compute_round_challenge_pows(accumulator->log_instance_size, delta); auto perturbator = compute_perturbator(accumulator, deltas); - for (size_t idx = 0; idx <= log_instance_size; idx++) { + for (size_t idx = 0; idx <= accumulator->log_instance_size; idx++) { transcript->send_to_verifier("perturbator_" + std::to_string(idx), perturbator[idx]); } + assert(perturbator[0] == accumulator->folding_parameters.target_sum); + auto perturbator_challenge = transcript->get_challenge("perturbator_challenge"); + instances.next_gate_challenges = + update_gate_challenges(perturbator_challenge, accumulator->folding_parameters.gate_challenges, deltas); + const auto pow_betas_star = + compute_pow_polynomial_at_values(instances.next_gate_challenges, accumulator->instance_size); + + combine_relation_parameters(instances); + combine_alpha(instances); + auto combiner = compute_combiner(instances, pow_betas_star); - FF perturbator_challenge = transcript->get_challenge("perturbator_challenge"); auto compressed_perturbator = perturbator.evaluate(perturbator_challenge); - std::vector betas_star(log_instance_size); - betas_star[0] = 1; - auto betas = accumulator->folding_parameters.gate_separation_challenges; - for (size_t idx = 1; idx < log_instance_size; idx++) { - betas_star[idx] = betas[idx] + perturbator_challenge * deltas[idx - 1]; + auto combiner_quotient = compute_combiner_quotient(compressed_perturbator, combiner); + + for (size_t idx = ProverInstances::NUM; idx < ProverInstances::BATCHED_EXTENDED_LENGTH; idx++) { + transcript->send_to_verifier("combiner_quotient_" + std::to_string(idx), combiner_quotient.value_at(idx)); } + auto combiner_challenge = transcript->get_challenge("combiner_quotient_challenge"); - auto pow_betas_star = compute_pow_polynomial_at_values(betas_star, instance_size); + FoldingResult res; + res.accumulator = + compute_next_accumulator(instances, combiner_quotient, combiner_challenge, compressed_perturbator); + res.folding_data = transcript->proof_data; - auto combiner = compute_combiner(instances, pow_betas_star); - auto combiner_quotient = compute_combiner_quotient(compressed_perturbator, combiner); - for (size_t idx = ProverInstances::NUM; idx < combiner.size(); idx++) { - transcript->send_to_verifier("combiner_quotient_" + std::to_string(idx), combiner_quotient.value_at(idx)); + return res; +} +template +std::shared_ptr ProtoGalaxyProver_::compute_next_accumulator( + ProverInstances& instances, + Univariate& combiner_quotient, + const FF& challenge, + const FF& compressed_perturbator) +{ + auto combiner_quotient_at_challenge = combiner_quotient.evaluate(challenge); + + // Given the challenge \gamma, compute Z(\gamma) and {L_0(\gamma),L_1(\gamma)} + // TODO(https://github.com/AztecProtocol/barretenberg/issues/764): Generalize the vanishing polynomial formula + // and the computation of Lagrange basis for k instances + auto vanishing_polynomial_at_challenge = challenge * (challenge - FF(1)); + std::vector lagranges{ FF(1) - challenge, challenge }; + + auto next_accumulator = std::make_shared(); + + // Compute the next target sum and send the next folding parameters to the verifier + auto next_target_sum = + compressed_perturbator * lagranges[0] + vanishing_polynomial_at_challenge * combiner_quotient_at_challenge; + next_accumulator->folding_parameters = { instances.next_gate_challenges, next_target_sum }; + transcript->send_to_verifier("next_target_sum", next_target_sum); + for (size_t idx = 0; idx < instances.next_gate_challenges.size(); idx++) { + transcript->send_to_verifier("next_gate_challenge_" + std::to_string(idx), instances.next_gate_challenges[idx]); } - FF combiner_challenge = transcript->get_challenge("combiner_quotient_challenge"); - auto combiner_quotient_at_challenge = combiner_quotient.evaluate(combiner_challenge); - // TODO(https://github.com/AztecProtocol/barretenberg/issues/764): Generalize these formulas as well as computation - // of Lagrange basis - auto vanishing_polynomial_at_challenge = combiner_challenge * (combiner_challenge - FF(1)); - auto lagrange_0_at_challenge = FF(1) - combiner_challenge; + // Allocate space, initialised to 0, for the prover polynomials of the next accumulator + AllPolynomials storage; + for (auto& polynomial : storage.get_all()) { + polynomial = typename Flavor::Polynomial(instances[0]->instance_size); + for (auto& value : polynomial) { + value = FF(0); + } + } + ProverPolynomials acc_prover_polynomials; + size_t poly_idx = 0; + auto prover_polynomial_pointers = acc_prover_polynomials.get_all(); + for (auto& polynomial : storage.get_all()) { + prover_polynomial_pointers[poly_idx] = polynomial; + poly_idx++; + } - auto new_target_sum = compressed_perturbator * lagrange_0_at_challenge + - vanishing_polynomial_at_challenge * combiner_quotient_at_challenge; + // Fold the prover polynomials + auto acc_poly_views = acc_prover_polynomials.get_all(); + for (size_t inst_idx = 0; inst_idx < ProverInstances::NUM; inst_idx++) { + auto inst_poly_views = instances[inst_idx]->prover_polynomials.get_all(); + for (auto [acc_poly_view, inst_poly_view] : zip_view(acc_poly_views, inst_poly_views)) { + for (size_t poly_idx = 0; poly_idx < inst_poly_view.size(); poly_idx++) { + (acc_poly_view)[poly_idx] += (inst_poly_view)[poly_idx] * lagranges[inst_idx]; + } + } + } + next_accumulator->prover_polynomials = acc_prover_polynomials; - ProverFoldingResult res; - res.params.target_sum = new_target_sum; - res.folding_data = transcript->proof_data; - return res; + // Fold the witness commtiments and send them to the verifier + auto witness_labels = next_accumulator->commitment_labels.get_witness(); + size_t comm_idx = 0; + for (auto& acc_comm : next_accumulator->witness_commitments.get_all()) { + acc_comm = Commitment::infinity(); + size_t inst_idx = 0; + for (auto& instance : instances) { + acc_comm = acc_comm + instance->witness_commitments.get_all()[comm_idx] * lagranges[inst_idx]; + inst_idx++; + } + transcript->send_to_verifier("next_" + witness_labels[comm_idx], acc_comm); + comm_idx++; + } + + // Fold public data ϕ from all instances to produce ϕ* and add it to the transcript. As part of the folding + // verification, the verifier will produce ϕ* as well and check it against what was sent by the prover. + + // Fold the public inputs and send to the verifier + next_accumulator->public_inputs = std::vector(instances[0]->public_inputs.size(), 0); + size_t el_idx = 0; + for (auto& el : next_accumulator->public_inputs) { + size_t inst = 0; + for (auto& instance : instances) { + el += instance->public_inputs[el_idx] * lagranges[inst]; + inst++; + } + transcript->send_to_verifier("next_public_input_" + std::to_string(el_idx), el); + el_idx++; + } + + // Evaluate the combined batching challenge α univariate at challenge to obtain next α and send it to the + // verifier + next_accumulator->alpha = instances.alpha.evaluate(challenge); + transcript->send_to_verifier("next_alpha", next_accumulator->alpha); + + // Evaluate each relation parameter univariate at challenge to obtain the folded relation parameters and send to + // the verifier + auto& combined_relation_parameters = instances.relation_parameters; + auto folded_relation_parameters = proof_system::RelationParameters{ + combined_relation_parameters.eta.evaluate(challenge), + combined_relation_parameters.beta.evaluate(challenge), + combined_relation_parameters.gamma.evaluate(challenge), + combined_relation_parameters.public_input_delta.evaluate(challenge), + combined_relation_parameters.lookup_grand_product_delta.evaluate(challenge), + }; + // TODO(https://github.com/AztecProtocol/barretenberg/issues/805): Add the relation parameters to the transcript + // together. + transcript->send_to_verifier("next_eta", folded_relation_parameters.eta); + transcript->send_to_verifier("next_beta", folded_relation_parameters.beta); + transcript->send_to_verifier("next_gamma", folded_relation_parameters.gamma); + transcript->send_to_verifier("next_public_input_delta", folded_relation_parameters.public_input_delta); + transcript->send_to_verifier("next_lookup_grand_product_delta", + folded_relation_parameters.lookup_grand_product_delta); + next_accumulator->relation_parameters = folded_relation_parameters; + + // Fold the verification key and send it to the verifier as this is part of ϕ as well + auto acc_vk = std::make_shared(instances[0]->prover_polynomials.get_polynomial_size(), + instances[0]->public_inputs.size()); + auto labels = next_accumulator->commitment_labels.get_precomputed(); + size_t vk_idx = 0; + for (auto& vk : acc_vk->get_all()) { + size_t inst = 0; + vk = Commitment::infinity(); + for (auto& instance : instances) { + vk = vk + (instance->verification_key->get_all()[vk_idx]) * lagranges[inst]; + inst++; + } + transcript->send_to_verifier("next_" + labels[vk_idx], vk); + vk_idx++; + } + next_accumulator->verification_key = acc_vk; + + return next_accumulator; } + template class ProtoGalaxyProver_>; template class ProtoGalaxyProver_>; } // namespace proof_system::honk \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp index 5e2b7b32ec67..9c881d82a891 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp @@ -24,6 +24,11 @@ template class ProtoGalaxyProver_ { using ProverPolynomials = typename Flavor::ProverPolynomials; using Relations = typename Flavor::Relations; using AlphaType = typename ProverInstances::AlphaType; + using VerificationKey = typename Flavor::VerificationKey; + using CommitmentKey = typename Flavor::CommitmentKey; + using WitnessCommitments = typename Flavor::WitnessCommitments; + using Commitment = typename Flavor::Commitment; + using AllPolynomials = typename Flavor::AllPolynomials; using BaseUnivariate = Univariate; // The length of ExtendedUnivariate is the largest length (==max_relation_degree + 1) of a univariate polynomial @@ -44,19 +49,48 @@ template class ProtoGalaxyProver_ { ProverInstances instances; std::shared_ptr transcript = std::make_shared(); + std::shared_ptr commitment_key; + ProtoGalaxyProver_() = default; - ProtoGalaxyProver_(ProverInstances insts) - : instances(insts){}; + ProtoGalaxyProver_(const std::vector>& insts, + const std::shared_ptr& commitment_key) + : instances(ProverInstances(insts)) + , commitment_key(std::move(commitment_key)){}; ~ProtoGalaxyProver_() = default; /** - * @brief Prior to folding we need to add all the public inputs to the transcript, labelled by their corresponding - * instance index, compute all the instance's polynomials and record the relation parameters involved in computing - * these polynomials in the transcript. - * + * @brief Prior to folding, we need to finalize the given instances and add all their public data ϕ to the + * transcript, labelled by their corresponding instance index for domain separation. + * TODO(https://github.com/AztecProtocol/barretenberg/issues/795):The rounds prior to actual proving/folding are + * common between decider and folding verifier and could be somehow shared so we do not duplicate code so much. */ void prepare_for_folding(); + /** + * @brief Send the public data of an accumulator, i.e. a relaxed instance, to the verifier (ϕ in the paper). + * + * @param domain_separator separates the same type of data coming from difference instances by instance + * index + */ + void send_accumulator(std::shared_ptr, const std::string& domain_separator); + + /** + * @brief For each instance produced by a circuit, prior to folding, we need to complete the computation of its + * prover polynomials, commit to witnesses and generate the relation parameters as well as send the public data ϕ of + * an instance to the verifier. + * + * @param domain_separator separates the same type of data coming from difference instances by instance + * index + */ + void finalise_and_send_instance(std::shared_ptr, const std::string& domain_separator); + + /** + * @brief Run the folding prover protocol to produce a new accumulator and a folding proof to be verified by the + * folding verifier. + * + * TODO(https://github.com/AztecProtocol/barretenberg/issues/753): fold goblin polynomials + */ + FoldingResult fold_instances(); /** * @brief Given a vector \vec{\beta} of values, compute the pow polynomial on these values as defined in the paper. */ @@ -89,6 +123,20 @@ template class ProtoGalaxyProver_ { return pows; } + static std::vector update_gate_challenges(const FF perturbator_challenge, + const std::vector& gate_challenges, + const std::vector& round_challenges) + { + auto log_instance_size = gate_challenges.size(); + std::vector next_gate_challenges(log_instance_size); + next_gate_challenges[0] = 1; + + for (size_t idx = 1; idx < log_instance_size; idx++) { + next_gate_challenges[idx] = gate_challenges[idx] + perturbator_challenge * round_challenges[idx - 1]; + } + return next_gate_challenges; + } + // Returns the accumulator, which is the first element in ProverInstances. The accumulator is assumed to have the // FoldingParameters set and be the result of a previous round of folding. // TODO(https://github.com/AztecProtocol/barretenberg/issues/740): handle the case when the accumulator is empty @@ -191,14 +239,12 @@ template class ProtoGalaxyProver_ { { auto full_honk_evaluations = compute_full_honk_evaluations( accumulator->prover_polynomials, accumulator->alpha, accumulator->relation_parameters); - const auto betas = accumulator->folding_parameters.gate_separation_challenges; + const auto betas = accumulator->folding_parameters.gate_challenges; assert(betas.size() == deltas.size()); auto coeffs = construct_perturbator_coefficients(betas, deltas, full_honk_evaluations); return Polynomial(coeffs); } - ProverFoldingResult fold_instances(); - TupleOfTuplesOfUnivariates univariate_accumulators; /** @@ -243,7 +289,7 @@ template class ProtoGalaxyProver_ { ExtendedUnivariateWithRandomization compute_combiner(const ProverInstances& instances, const std::vector& pow_betas_star) { - size_t common_circuit_size = instances[0]->prover_polynomials.get_polynomial_size(); + size_t common_instance_size = instances[0]->instance_size; // Determine number of threads for multithreading. // Note: Multithreading is "on" for every round but we reduce the number of threads from the max available based @@ -251,14 +297,15 @@ template class ProtoGalaxyProver_ { // For now we use a power of 2 number of threads simply to ensure the round size is evenly divided. size_t max_num_threads = get_num_cpus_pow2(); // number of available threads (power of 2) size_t min_iterations_per_thread = 1 << 6; // min number of iterations for which we'll spin up a unique thread - size_t desired_num_threads = common_circuit_size / min_iterations_per_thread; + size_t desired_num_threads = common_instance_size / min_iterations_per_thread; size_t num_threads = std::min(desired_num_threads, max_num_threads); // fewer than max if justified num_threads = num_threads > 0 ? num_threads : 1; // ensure num threads is >= 1 - size_t iterations_per_thread = common_circuit_size / num_threads; // actual iterations per thread + size_t iterations_per_thread = common_instance_size / num_threads; // actual iterations per thread // Construct univariate accumulator containers; one per thread std::vector thread_univariate_accumulators(num_threads); for (auto& accum : thread_univariate_accumulators) { + // just normal relation lengths Utils::zero_univariates(accum); } @@ -341,21 +388,21 @@ template class ProtoGalaxyProver_ { } /** - * @brief Create folded (univariate) relation parameters. + * @brief Combine each relation parameter, in part, from all the instances into univariates, used in the computation + * of combiner. * @details For a given relation parameter type, extract that parameter from each instance, place the values in a * univariate (i.e., sum them against an appropriate univariate Lagrange basis) and then extended as needed during * the constuction of the combiner. */ - static void fold_relation_parameters(ProverInstances& instances) + static void combine_relation_parameters(ProverInstances& instances) { // array of parameters to be computed - auto& folded_parameters = instances.relation_parameters.to_fold; size_t param_idx = 0; - for (auto& folded_parameter : folded_parameters) { + for (auto& folded_parameter : instances.relation_parameters.to_fold) { Univariate tmp(0); size_t instance_idx = 0; for (auto& instance : instances) { - tmp.value_at(instance_idx) = instance->relation_parameters.to_fold[param_idx]; + tmp.value_at(instance_idx) = instance->relation_parameters.to_fold[param_idx].get(); instance_idx++; } folded_parameter.get() = tmp.template extend_to(); @@ -364,14 +411,15 @@ template class ProtoGalaxyProver_ { } /** - * @brief Create folded univariate for the relation batching parameter (alpha). + * @brief Combine the relation batching parameter (named alpha) from each instance into a univariate, used in the + * computation of combiner. * */ // TODO(https://github.com/AztecProtocol/barretenberg/issues/772): At the moment we have a single α per Instance, we // fold them and then we use the unique folded_α for each folded subrelation that is batched in the combiner. This // is obviously insecure. We need to generate α_i for each subrelation_i, fold them and then use folded_α_i when // batching the i-th folded subrelation in the combiner. - static void fold_alpha(ProverInstances& instances) + static void combine_alpha(ProverInstances& instances) { Univariate accumulated_alpha; size_t instance_idx = 0; @@ -381,6 +429,24 @@ template class ProtoGalaxyProver_ { } instances.alpha = accumulated_alpha.template extend_to(); } + + /** + * @brief Compute the next accumulator (ϕ*, ω*\vec{\beta*}, e*), send the public data ϕ* and the folding parameters + * (\vec{\beta*}, e*) to the verifier and return the complete accumulator + * + * @details At this stage, we assume that the instances have the same size and the same number of public parameter.s + * @param instances + * @param combiner_quotient polynomial K in the paper + * @param challenge + * @param compressed_perturbator + * + * TODO(https://github.com/AztecProtocol/barretenberg/issues/796): optimise the construction of the new accumulator + */ + std::shared_ptr compute_next_accumulator( + ProverInstances& instances, + Univariate& combiner_quotient, + const FF& challenge, + const FF& compressed_perturbator); }; extern template class ProtoGalaxyProver_>; diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.cpp b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.cpp index 1aa99dea3792..7107d991228a 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.cpp @@ -3,57 +3,148 @@ namespace proof_system::honk { template -void ProtoGalaxyVerifier_::prepare_for_folding(std::vector fold_data) +void ProtoGalaxyVerifier_::receive_accumulator(const std::shared_ptr& inst, + const std::string& domain_separator) +{ + inst->instance_size = transcript->template receive_from_prover(domain_separator + "_instance_size"); + inst->log_instance_size = static_cast(numeric::get_msb(inst->instance_size)); + inst->public_input_size = + transcript->template receive_from_prover(domain_separator + "_public_input_size"); + + for (size_t i = 0; i < inst->public_input_size; ++i) { + auto public_input_i = + transcript->template receive_from_prover(domain_separator + "_public_input_" + std::to_string(i)); + inst->public_inputs.emplace_back(public_input_i); + } + + auto eta = transcript->template receive_from_prover(domain_separator + "_eta"); + auto beta = transcript->template receive_from_prover(domain_separator + "_beta"); + auto gamma = transcript->template receive_from_prover(domain_separator + "_gamma"); + auto public_input_delta = transcript->template receive_from_prover(domain_separator + "_public_input_delta"); + auto lookup_grand_product_delta = + transcript->template receive_from_prover(domain_separator + "_lookup_grand_product_delta"); + inst->relation_parameters = + RelationParameters{ eta, beta, gamma, public_input_delta, lookup_grand_product_delta }; + inst->alpha = transcript->template receive_from_prover(domain_separator + "_alpha"); + + inst->folding_parameters.target_sum = + transcript->template receive_from_prover(domain_separator + "_target_sum"); + + inst->folding_parameters.gate_challenges = std::vector(inst->log_instance_size); + for (size_t idx = 0; idx < inst->log_instance_size; idx++) { + inst->folding_parameters.gate_challenges[idx] = + transcript->template receive_from_prover(domain_separator + "_gate_challenge_" + std::to_string(idx)); + } + auto comm_view = inst->witness_commitments.get_all(); + auto witness_labels = inst->commitment_labels.get_witness(); + for (size_t idx = 0; idx < witness_labels.size(); idx++) { + comm_view[idx] = + transcript->template receive_from_prover(domain_separator + "_" + witness_labels[idx]); + } + + inst->verification_key = std::make_shared(inst->instance_size, inst->public_input_size); + auto vk_view = inst->verification_key->get_all(); + auto vk_labels = inst->commitment_labels.get_precomputed(); + for (size_t idx = 0; idx < vk_labels.size(); idx++) { + vk_view[idx] = transcript->template receive_from_prover(domain_separator + "_" + vk_labels[idx]); + } +} + +template +void ProtoGalaxyVerifier_::receive_and_finalise_instance(const std::shared_ptr& inst, + const std::string& domain_separator) +{ + inst->instance_size = transcript->template receive_from_prover(domain_separator + "_instance_size"); + inst->log_instance_size = static_cast(numeric::get_msb(inst->instance_size)); + inst->public_input_size = + transcript->template receive_from_prover(domain_separator + "_public_input_size"); + + for (size_t i = 0; i < inst->public_input_size; ++i) { + auto public_input_i = + transcript->template receive_from_prover(domain_separator + "_public_input_" + std::to_string(i)); + inst->public_inputs.emplace_back(public_input_i); + } + + inst->pub_inputs_offset = + transcript->template receive_from_prover(domain_separator + "_pub_inputs_offset"); + + auto labels = inst->commitment_labels; + auto& witness_commitments = inst->witness_commitments; + witness_commitments.w_l = transcript->template receive_from_prover(domain_separator + "_" + labels.w_l); + witness_commitments.w_r = transcript->template receive_from_prover(domain_separator + "_" + labels.w_r); + witness_commitments.w_o = transcript->template receive_from_prover(domain_separator + "_" + labels.w_o); + + auto eta = transcript->get_challenge(domain_separator + "_eta"); + witness_commitments.sorted_accum = + transcript->template receive_from_prover(domain_separator + "_" + labels.sorted_accum); + witness_commitments.w_4 = transcript->template receive_from_prover(domain_separator + "_" + labels.w_4); + + auto [beta, gamma] = transcript->get_challenges(domain_separator + "_beta", domain_separator + "_gamma"); + witness_commitments.z_perm = + transcript->template receive_from_prover(domain_separator + "_" + labels.z_perm); + witness_commitments.z_lookup = + transcript->template receive_from_prover(domain_separator + "_" + labels.z_lookup); + + const FF public_input_delta = compute_public_input_delta( + inst->public_inputs, beta, gamma, inst->instance_size, inst->pub_inputs_offset); + const FF lookup_grand_product_delta = compute_lookup_grand_product_delta(beta, gamma, inst->instance_size); + inst->relation_parameters = + RelationParameters{ eta, beta, gamma, public_input_delta, lookup_grand_product_delta }; + + inst->alpha = transcript->get_challenge(domain_separator + "_alpha"); + + inst->verification_key = std::make_shared(inst->instance_size, inst->public_input_size); + auto vk_view = inst->verification_key->get_all(); + auto vk_labels = labels.get_precomputed(); + for (size_t idx = 0; idx < vk_labels.size(); idx++) { + vk_view[idx] = transcript->template receive_from_prover(domain_separator + "_" + vk_labels[idx]); + } +} + +// TODO(https://github.com/AztecProtocol/barretenberg/issues/795): The rounds prior to actual verifying are common +// between decider and folding verifier and could be somehow shared so we do not duplicate code so much. +template +void ProtoGalaxyVerifier_::prepare_for_folding(const std::vector& fold_data) { transcript = std::make_shared(fold_data); auto index = 0; - for (auto it = verifier_instances.begin(); it != verifier_instances.end(); it++, index++) { + auto inst = instances[0]; + auto domain_separator = std::to_string(index); + inst->is_accumulator = transcript->template receive_from_prover(domain_separator + "is_accumulator"); + if (inst->is_accumulator) { + receive_accumulator(inst, domain_separator); + } else { + receive_and_finalise_instance(inst, domain_separator); + } + index++; + + for (auto it = instances.begin() + 1; it != instances.end(); it++, index++) { auto inst = *it; auto domain_separator = std::to_string(index); - inst->instance_size = transcript->template receive_from_prover(domain_separator + "_circuit_size"); - inst->public_input_size = - transcript->template receive_from_prover(domain_separator + "_public_input_size"); - inst->pub_inputs_offset = - transcript->template receive_from_prover(domain_separator + "_pub_inputs_offset"); - - for (size_t i = 0; i < inst->public_input_size; ++i) { - auto public_input_i = - transcript->template receive_from_prover(domain_separator + "_public_input_" + std::to_string(i)); - inst->public_inputs.emplace_back(public_input_i); - } - auto [eta, beta, gamma] = challenges_to_field_elements(transcript->get_challenges( - domain_separator + "_eta", domain_separator + "_beta", domain_separator + "_gamma")); - - const FF public_input_delta = compute_public_input_delta( - inst->public_inputs, beta, gamma, inst->instance_size, inst->pub_inputs_offset); - const FF lookup_grand_product_delta = compute_lookup_grand_product_delta(beta, gamma, inst->instance_size); - inst->relation_parameters = - RelationParameters{ eta, beta, gamma, public_input_delta, lookup_grand_product_delta }; - inst->alpha = transcript->get_challenge(domain_separator + "_alpha"); + receive_and_finalise_instance(inst, domain_separator); } } template -VerifierFoldingResult ProtoGalaxyVerifier_< - VerifierInstances>::fold_public_parameters(std::vector fold_data) +bool ProtoGalaxyVerifier_::verify_folding_proof(std::vector fold_data) { - using Flavor = typename VerifierInstances::Flavor; - prepare_for_folding(fold_data); - FF delta = transcript->get_challenge("delta"); + + auto delta = transcript->get_challenge("delta"); auto accumulator = get_accumulator(); - auto log_instance_size = static_cast(numeric::get_msb(accumulator->instance_size)); - auto deltas = compute_round_challenge_pows(log_instance_size, delta); - std::vector perturbator_coeffs(log_instance_size + 1); - for (size_t idx = 0; idx <= log_instance_size; idx++) { + auto deltas = compute_round_challenge_pows(accumulator->log_instance_size, delta); + + std::vector perturbator_coeffs(accumulator->log_instance_size + 1); + for (size_t idx = 0; idx <= accumulator->log_instance_size; idx++) { perturbator_coeffs[idx] = transcript->template receive_from_prover("perturbator_" + std::to_string(idx)); } + ASSERT(perturbator_coeffs[0] == accumulator->folding_parameters.target_sum); auto perturbator = Polynomial(perturbator_coeffs); FF perturbator_challenge = transcript->get_challenge("perturbator_challenge"); auto perturbator_at_challenge = perturbator.evaluate(perturbator_challenge); - // Thed degree of K(X) is dk - k - 1 = k(d - 1) - 1. Hence we need k(d - 1) evaluations to represent it. - std::array combiner_quotient_evals = {}; + // The degree of K(X) is dk - k - 1 = k(d - 1) - 1. Hence we need k(d - 1) evaluations to represent it. + std::array combiner_quotient_evals; for (size_t idx = 0; idx < VerifierInstances::BATCHED_EXTENDED_LENGTH - VerifierInstances::NUM; idx++) { combiner_quotient_evals[idx] = transcript->template receive_from_prover( "combiner_quotient_" + std::to_string(idx + VerifierInstances::NUM)); @@ -64,14 +155,99 @@ VerifierFoldingResult ProtoGalaxyVerifier_< auto combiner_quotient_at_challenge = combiner_quotient.evaluate(combiner_challenge); auto vanishing_polynomial_at_challenge = combiner_challenge * (combiner_challenge - FF(1)); - auto lagrange_0_at_challenge = FF(1) - combiner_challenge; + auto lagranges = std::vector{ FF(1) - combiner_challenge, combiner_challenge }; + + // Compute next folding parameters and verify against the ones received from the prover + auto expected_next_target_sum = + perturbator_at_challenge * lagranges[0] + vanishing_polynomial_at_challenge * combiner_quotient_at_challenge; + auto next_target_sum = transcript->template receive_from_prover("next_target_sum"); + bool verified = (expected_next_target_sum == next_target_sum); + auto expected_betas_star = + update_gate_challenges(perturbator_challenge, accumulator->folding_parameters.gate_challenges, deltas); + for (size_t idx = 0; idx < accumulator->log_instance_size; idx++) { + auto beta_star = transcript->template receive_from_prover("next_gate_challenge_" + std::to_string(idx)); + verified = verified & (expected_betas_star[idx] == beta_star); + } + + // Compute ϕ and verify against the data received from the prover + WitnessCommitments acc_witness_commitments; + auto witness_labels = commitment_labels.get_witness(); + size_t comm_idx = 0; + for (auto& expected_comm : acc_witness_commitments.get_all()) { + expected_comm = Commitment::infinity(); + size_t inst = 0; + for (auto& instance : instances) { + expected_comm = expected_comm + instance->witness_commitments.get_all()[comm_idx] * lagranges[inst]; + inst++; + } + auto comm = transcript->template receive_from_prover("next_" + witness_labels[comm_idx]); + verified = verified & (comm == expected_comm); + comm_idx++; + } - auto new_target_sum = perturbator_at_challenge * lagrange_0_at_challenge + - vanishing_polynomial_at_challenge * combiner_quotient_at_challenge; + std::vector folded_public_inputs(instances[0]->public_inputs.size(), 0); + size_t el_idx = 0; + for (auto& expected_el : folded_public_inputs) { + size_t inst = 0; + for (auto& instance : instances) { + expected_el += instance->public_inputs[el_idx] * lagranges[inst]; + inst++; + } + auto el = transcript->template receive_from_prover("next_public_input" + std::to_string(el_idx)); + verified = verified & (el == expected_el); + el_idx++; + } + + auto expected_alpha = FF(0); + auto expected_parameters = proof_system::RelationParameters{}; + for (size_t inst_idx = 0; inst_idx < VerifierInstances::NUM; inst_idx++) { + auto instance = instances[inst_idx]; + expected_alpha += instance->alpha * lagranges[inst_idx]; + expected_parameters.eta += instance->relation_parameters.eta * lagranges[inst_idx]; + expected_parameters.beta += instance->relation_parameters.beta * lagranges[inst_idx]; + expected_parameters.gamma += instance->relation_parameters.gamma * lagranges[inst_idx]; + expected_parameters.public_input_delta += + instance->relation_parameters.public_input_delta * lagranges[inst_idx]; + expected_parameters.lookup_grand_product_delta += + instance->relation_parameters.lookup_grand_product_delta * lagranges[inst_idx]; + } + + auto next_alpha = transcript->template receive_from_prover("next_alpha"); + verified = verified & (next_alpha == expected_alpha); + info(verified); + auto next_eta = transcript->template receive_from_prover("next_eta"); + verified = verified & (next_eta == expected_parameters.eta); + info(verified); + + auto next_beta = transcript->template receive_from_prover("next_beta"); + verified = verified & (next_beta == expected_parameters.beta); + + auto next_gamma = transcript->template receive_from_prover("next_gamma"); + verified = verified & (next_gamma == expected_parameters.gamma); + + auto next_public_input_delta = transcript->template receive_from_prover("next_public_input_delta"); + verified = verified & (next_public_input_delta == expected_parameters.public_input_delta); + + auto next_lookup_grand_product_delta = + transcript->template receive_from_prover("next_lookup_grand_product_delta"); + verified = verified & (next_lookup_grand_product_delta == expected_parameters.lookup_grand_product_delta); + + auto acc_vk = std::make_shared(instances[0]->instance_size, instances[0]->public_input_size); + auto vk_labels = commitment_labels.get_precomputed(); + size_t vk_idx = 0; + for (auto& expected_vk : acc_vk->get_all()) { + size_t inst = 0; + expected_vk = Commitment::infinity(); + for (auto& instance : instances) { + expected_vk = expected_vk + instance->verification_key->get_all()[vk_idx] * lagranges[inst]; + inst++; + } + auto vk = transcript->template receive_from_prover("next_" + vk_labels[vk_idx]); + verified = verified & (vk == expected_vk); + vk_idx++; + } - VerifierFoldingResult res; - res.parameters.target_sum = new_target_sum; - return res; + return verified; } template class ProtoGalaxyVerifier_>; diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.hpp b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.hpp index 3c5a4ed8ef1e..c723532a5b9c 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.hpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.hpp @@ -12,17 +12,23 @@ template class ProtoGalaxyVerifier_ { using Flavor = typename VerifierInstances::Flavor; using Transcript = typename Flavor::Transcript; using FF = typename Flavor::FF; + using Commitment = typename Flavor::Commitment; using Instance = typename VerifierInstances::Instance; using VerificationKey = typename Flavor::VerificationKey; + using WitnessCommitments = typename Flavor::WitnessCommitments; + using CommitmentLabels = typename Flavor::CommitmentLabels; + + VerifierInstances instances; - VerifierInstances verifier_instances; std::shared_ptr transcript = std::make_shared(); + CommitmentLabels commitment_labels; + ProtoGalaxyVerifier_(VerifierInstances insts) - : verifier_instances(insts){}; + : instances(insts){}; ~ProtoGalaxyVerifier_() = default; /** - * @brief For a new round challenge δ at each iteration of the ProtoGalaxy protocol, compute the vector + * @brief Given a new round challenge δ for each iteration of the full ProtoGalaxy protocol, compute the vector * [δ, δ^2,..., δ^t] where t = logn and n is the size of the instance. */ static std::vector compute_round_challenge_pows(size_t log_instance_size, FF round_challenge) @@ -35,21 +41,47 @@ template class ProtoGalaxyVerifier_ { return pows; } - std::shared_ptr get_accumulator() { return verifier_instances[0]; } + static std::vector update_gate_challenges(const FF perturbator_challenge, + const std::vector& gate_challenges, + const std::vector& round_challenges) + { + auto log_instance_size = gate_challenges.size(); + std::vector next_gate_challenges(log_instance_size); + next_gate_challenges[0] = 1; + + for (size_t idx = 1; idx < log_instance_size; idx++) { + next_gate_challenges[idx] = gate_challenges[idx] + perturbator_challenge * round_challenges[idx - 1]; + } + return next_gate_challenges; + } + + std::shared_ptr get_accumulator() { return instances[0]; } /** - * @brief Instatiate the VerifierInstances and the VerifierTranscript. + * @brief Instatiate the instances and the transcript. * * @param fold_data The data transmitted via the transcript by the prover. */ - void prepare_for_folding(std::vector fold_data); + void prepare_for_folding(const std::vector&); + + /** + * @brief Instantiatied the accumulator (i.e. the relaxed instance) from the transcript. + * + */ + void receive_accumulator(const std::shared_ptr&, const std::string&); + + /** + * @brief Process the public data ϕ for the Instances to be folded. + * + */ + void receive_and_finalise_instance(const std::shared_ptr&, const std::string&); /** - * @brief Run the folding protocol on the verifier side. + * @brief Run the folding protocol on the verifier side to establish whether the public data ϕ of the new + * accumulator, received from the prover is the same as that produced by the verifier. * - * TODO(https://github.com/AztecProtocol/barretenberg/issues/690): finalise the implementation of this function */ - VerifierFoldingResult fold_public_parameters(std::vector fold_data); + bool verify_folding_proof(std::vector); }; extern template class ProtoGalaxyVerifier_>; diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield.fuzzer.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield.fuzzer.hpp index 18e2cbe129e9..5111ea71a548 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield.fuzzer.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield.fuzzer.hpp @@ -266,7 +266,7 @@ template class BigFieldBase { mask = (uint256_t(1) << mask_size) - 1; // Choose the bit range // Return instruction - return { .id = instruction_opcode, .arguments.element = Element(temp & mask) }; + return { .id = instruction_opcode, .arguments.element = Element(static_cast(temp & mask)) }; break; case OPCODE::RANDOMSEED: diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/instance/instances.hpp b/barretenberg/cpp/src/barretenberg/sumcheck/instance/instances.hpp index 1fc658eae9cd..0fcc8d276149 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/instance/instances.hpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/instance/instances.hpp @@ -8,6 +8,7 @@ template struct ProverInstances_ { public: static_assert(NUM_ > 0, "Must have at least one prover instance"); using Flavor = Flavor_; + using FoldingParameters = typename Flavor::FoldingParameters; using FF = typename Flavor::FF; static constexpr size_t NUM = NUM_; using Instance = ProverInstance_; @@ -21,6 +22,7 @@ template struct ProverInstances_ { ArrayType _data; RelationParameters relation_parameters; AlphaType alpha; + std::vector next_gate_challenges; std::shared_ptr const& operator[](size_t idx) const { return _data[idx]; } typename ArrayType::iterator begin() { return _data.begin(); }; @@ -54,16 +56,15 @@ template struct ProverInstances_ { */ std::vector> row_to_univariates(size_t row_idx) const { - auto instance_polynomial_views = get_polynomial_views(); + auto insts_prover_polynomials_views = get_polynomials_views(); std::vector> results; - // Initialize to our amount of columns - results.resize(instance_polynomial_views[0].size()); + // Set the size corresponding to the number of rows in the execution trace + results.resize(insts_prover_polynomials_views[0].size()); size_t instance_idx = 0; - // Iterate instances - for (auto& get_all : instance_polynomial_views) { - // Iterate columns + // Iterate over the prover polynomials' views corresponding to each instance + for (auto& get_all : insts_prover_polynomials_views) { + // Iterate over all columns in the trace execution of an instance and extract their value at row_idx. for (auto [result, poly_ptr] : zip_view(results, get_all)) { - // Assign row for each instance result.evaluations[instance_idx] = (poly_ptr)[row_idx]; } instance_idx++; @@ -72,9 +73,10 @@ template struct ProverInstances_ { } private: - auto get_polynomial_views() const + // Returns a vector containing pointer views to the prover polynomials corresponding to each instance. + auto get_polynomials_views() const { - // As a practical measure, get the first instance's pointer view to deduce the vector type + // As a practical measure, get the first instance's view to deduce the vector type std::vector get_alls{ _data[0]->prover_polynomials.get_all() }; // complete the views, starting from the second item for (size_t i = 1; i < NUM; i++) { @@ -97,14 +99,10 @@ template struct VerifierInstances_ { std::shared_ptr const& operator[](size_t idx) const { return _data[idx]; } typename ArrayType::iterator begin() { return _data.begin(); }; typename ArrayType::iterator end() { return _data.end(); }; - VerifierInstances_(std::vector> vks) + + VerifierInstances_() { - ASSERT(vks.size() == NUM); - for (size_t idx = 0; idx < vks.size(); idx++) { - Instance inst; - inst.verification_key = std::move(vks[idx]); - _data[idx] = std::make_unique(inst); - } + std::generate(_data.begin(), _data.end(), []() { return std::make_unique(); }); }; }; } // namespace proof_system::honk diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.cpp b/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.cpp index 8e1a6ede9498..4e5a5d661236 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.cpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.cpp @@ -353,6 +353,9 @@ template void ProverInstance_::initialize_prover_polynomi size_t idx = i + pub_inputs_offset; public_inputs.emplace_back(public_wires_source[idx]); } + + instance_size = proving_key->circuit_size; + log_instance_size = static_cast(numeric::get_msb(instance_size)); } template void ProverInstance_::compute_sorted_accumulator_polynomials(FF eta) diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.hpp b/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.hpp index 5f12dc0c1dd1..ea09aa6fceb4 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.hpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.hpp @@ -3,7 +3,6 @@ #include "barretenberg/flavor/goblin_ultra.hpp" #include "barretenberg/flavor/ultra.hpp" #include "barretenberg/proof_system/composer/composer_lib.hpp" -#include "barretenberg/protogalaxy/folding_result.hpp" #include "barretenberg/relations/relation_parameters.hpp" #include "barretenberg/srs/factories/file_crs_factory.hpp" @@ -28,6 +27,7 @@ template class ProverInstance_ { using ProverPolynomials = typename Flavor::ProverPolynomials; using Polynomial = typename Flavor::Polynomial; using WitnessCommitments = typename Flavor::WitnessCommitments; + using CommitmentLabels = typename Flavor::CommitmentLabels; public: std::shared_ptr proving_key; @@ -35,6 +35,7 @@ template class ProverInstance_ { ProverPolynomials prover_polynomials; WitnessCommitments witness_commitments; + CommitmentLabels commitment_labels; std::array sorted_polynomials; @@ -50,6 +51,9 @@ template class ProverInstance_ { std::vector recursive_proof_public_input_indices; // non-empty for the accumulated instances FoldingParameters folding_parameters; + bool is_accumulator = false; + size_t instance_size; + size_t log_instance_size; ProverInstance_(Circuit& circuit) { @@ -58,12 +62,6 @@ template class ProverInstance_ { compute_witness(circuit); } - ProverInstance_(FoldingResult result) - : verification_key(std::move(result.verification_key)) - , prover_polynomials(result.folded_prover_polynomials) - , public_inputs(result.folded_public_inputs) - , folding_parameters(result.folding_parameters){}; - ProverInstance_() = default; ~ProverInstance_() = default; diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/instance/verifier_instance.hpp b/barretenberg/cpp/src/barretenberg/sumcheck/instance/verifier_instance.hpp index fb14bd32b8bf..06fdc47f264a 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/instance/verifier_instance.hpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/instance/verifier_instance.hpp @@ -8,14 +8,20 @@ template class VerifierInstance_ { using FF = typename Flavor::FF; using VerificationKey = typename Flavor::VerificationKey; using FoldingParameters = typename Flavor::FoldingParameters; + using WitnessCommitments = typename Flavor::WitnessCommitments; + using CommitmentLabels = typename Flavor::CommitmentLabels; std::shared_ptr verification_key; std::vector public_inputs; - size_t pub_inputs_offset; + size_t pub_inputs_offset = 0; size_t public_input_size; size_t instance_size; + size_t log_instance_size; RelationParameters relation_parameters; FF alpha; + bool is_accumulator = false; FoldingParameters folding_parameters; + WitnessCommitments witness_commitments; + CommitmentLabels commitment_labels; }; } // namespace proof_system::honk \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/protogalaxy.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/protogalaxy.test.cpp index fac96d16cc68..caf1285e0b8a 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/protogalaxy.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/protogalaxy.test.cpp @@ -6,14 +6,18 @@ using namespace barretenberg; using namespace proof_system::honk; using Flavor = flavor::Ultra; +using VerificationKey = Flavor::VerificationKey; using Instance = ProverInstance_; using Instances = ProverInstances_; using ProtoGalaxyProver = ProtoGalaxyProver_; using FF = Flavor::FF; +using Affine = Flavor::Commitment; +using Projective = Flavor::GroupElement; using Builder = Flavor::CircuitBuilder; using Polynomial = typename Flavor::Polynomial; using ProverPolynomials = Flavor::ProverPolynomials; using RelationParameters = proof_system::RelationParameters; +using WitnessCommitments = typename Flavor::WitnessCommitments; const size_t NUM_POLYNOMIALS = Flavor::NUM_ALL_ENTITIES; namespace protogalaxy_tests { @@ -81,6 +85,26 @@ ProverPolynomials construct_ultra_full_polynomials(auto& input_polynomials) return full_polynomials; } +std::shared_ptr construct_ultra_verification_key(size_t instance_size, size_t num_public_inputs) +{ + auto verification_key = std::make_shared(instance_size, num_public_inputs); + auto vk_view = verification_key->get_all(); + for (auto& view : vk_view) { + view = Affine(Projective::random_element()); + } + return verification_key; +} + +WitnessCommitments construct_witness_commitments() +{ + WitnessCommitments wc; + auto w_view = wc.get_all(); + for (auto& view : w_view) { + view = Affine(Projective::random_element()); + } + return wc; +} + class ProtoGalaxyTests : public ::testing::Test { public: static void SetUpTestSuite() { barretenberg::srs::init_crs_factory("../srs_db/ignition"); } @@ -159,11 +183,9 @@ TEST_F(ProtoGalaxyTests, PerturbatorPolynomial) target_sum += full_honk_evals[i] * pow_beta[i]; } - auto accumulator = std::make_shared( - FoldingResult{ .folded_prover_polynomials = full_polynomials, - .folded_public_inputs = std::vector{}, - .verification_key = std::make_shared(), - .folding_parameters = { betas, target_sum } }); + auto accumulator = std::make_shared(); + accumulator->prover_polynomials = full_polynomials; + accumulator->folding_parameters = { betas, target_sum }; accumulator->relation_parameters = relation_parameters; accumulator->alpha = alpha; @@ -225,7 +247,7 @@ TEST_F(ProtoGalaxyTests, FoldChallenges) instance2->relation_parameters.eta = 3; Instances instances{ { instance1, instance2 } }; - ProtoGalaxyProver::fold_relation_parameters(instances); + ProtoGalaxyProver::combine_relation_parameters(instances); Univariate expected_eta{ { 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23 } }; EXPECT_EQ(instances.relation_parameters.eta, expected_eta); @@ -246,10 +268,67 @@ TEST_F(ProtoGalaxyTests, FoldAlpha) instance2->alpha = 4; Instances instances{ { instance1, instance2 } }; - ProtoGalaxyProver::fold_alpha(instances); + ProtoGalaxyProver::combine_alpha(instances); Univariate expected_alpha{ { 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26 } }; EXPECT_EQ(instances.alpha, expected_alpha); } +// TODO(https://github.com/AztecProtocol/barretenberg/issues/807): Have proper full folding testing (both failing and +// passing) and move creating a test accumulator in a separate function. +TEST_F(ProtoGalaxyTests, ComputeNewAccumulator) +{ + const size_t log_instance_size(4); + const size_t instance_size(1 << log_instance_size); + + std::array, NUM_POLYNOMIALS> random_polynomials; + for (auto& poly : random_polynomials) { + poly = get_random_polynomial(instance_size); + } + auto full_polynomials = construct_ultra_full_polynomials(random_polynomials); + auto relation_parameters = proof_system::RelationParameters::get_random(); + auto alpha = FF::random_element(); + + auto full_honk_evals = + ProtoGalaxyProver::compute_full_honk_evaluations(full_polynomials, alpha, relation_parameters); + std::vector betas(log_instance_size); + for (size_t idx = 0; idx < log_instance_size; idx++) { + betas[idx] = FF::random_element(); + } + + // Construct pow(\vec{betas}) as in the paper + auto pow_beta = ProtoGalaxyProver::compute_pow_polynomial_at_values(betas, instance_size); + + // Compute the corresponding target sum and create a dummy accumulator + auto target_sum = FF(0); + for (size_t i = 0; i < instance_size; i++) { + target_sum += full_honk_evals[i] * pow_beta[i]; + } + + auto accumulator = std::make_shared(); + accumulator->witness_commitments = construct_witness_commitments(); + accumulator->instance_size = instance_size; + accumulator->log_instance_size = log_instance_size; + accumulator->prover_polynomials = full_polynomials; + accumulator->folding_parameters = { betas, target_sum }; + accumulator->relation_parameters = relation_parameters; + accumulator->alpha = alpha; + accumulator->is_accumulator = true; + accumulator->public_inputs = std::vector{ FF::random_element() }; + accumulator->verification_key = construct_ultra_verification_key(instance_size, 1); + + auto builder = typename Flavor::CircuitBuilder(); + auto composer = UltraComposer(); + builder.add_public_variable(FF(1)); + + auto instance = composer.create_instance(builder); + auto instances = std::vector>{ accumulator, instance }; + auto folding_prover = composer.create_folding_prover(instances, composer.commitment_key); + auto folding_verifier = composer.create_folding_verifier(); + + auto proof = folding_prover.fold_instances(); + auto res = folding_verifier.verify_folding_proof(proof.folding_data); + EXPECT_EQ(res, true); +} + } // namespace protogalaxy_tests \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.hpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.hpp index 0d4dd752d080..7daaab2e1d3d 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.hpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.hpp @@ -90,21 +90,17 @@ template class UltraComposer_ { */ MergeVerifier_ create_merge_verifier() { return MergeVerifier_(); } - ProtoGalaxyProver_ create_folding_prover(const std::vector>& instances) + ProtoGalaxyProver_ create_folding_prover(const std::vector>& instances, + const std::shared_ptr& commitment_key) { - ProverInstances insts(instances); - ProtoGalaxyProver_ output_state(insts); + ProtoGalaxyProver_ output_state(instances, commitment_key); return output_state; }; - ProtoGalaxyVerifier_ create_folding_verifier( - const std::vector>& instances) + ProtoGalaxyVerifier_ create_folding_verifier() { - std::vector> vks; - for (const auto& inst : instances) { - vks.emplace_back(inst->verification_key); - } - VerifierInstances insts(vks); + + auto insts = VerifierInstances(); ProtoGalaxyVerifier_ output_state(insts); return output_state; diff --git a/barretenberg/scripts/bindgen.sh b/barretenberg/scripts/bindgen.sh index 1a2034a213a3..e3080a848242 100755 --- a/barretenberg/scripts/bindgen.sh +++ b/barretenberg/scripts/bindgen.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu #find ./cpp/src -type f -name "c_bind*.hpp" | ./scripts/decls_json.py > exports.json diff --git a/barretenberg/sol/scripts/init.sh b/barretenberg/sol/scripts/init.sh index 147adc9ae219..70868e378083 100755 --- a/barretenberg/sol/scripts/init.sh +++ b/barretenberg/sol/scripts/init.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash PLONK_FLAVOUR="ultra" diff --git a/barretenberg/sol/scripts/run_fuzzer.sh b/barretenberg/sol/scripts/run_fuzzer.sh index e6e235a85e51..2c76ad1de435 100755 --- a/barretenberg/sol/scripts/run_fuzzer.sh +++ b/barretenberg/sol/scripts/run_fuzzer.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash PLONK_FLAVOUR=${1:-"ultra"} CIRCUIT_FLAVOUR=${2:-"blake"} diff --git a/barretenberg/ts/bootstrap.sh b/barretenberg/ts/bootstrap.sh index 8b2d5b76c667..2f0fa19bb81e 100755 --- a/barretenberg/ts/bootstrap.sh +++ b/barretenberg/ts/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd "$(dirname "$0")" diff --git a/barretenberg/ts/scripts/cjs_postprocess.sh b/barretenberg/ts/scripts/cjs_postprocess.sh index ccfcfc2d8a20..8a805bcdd440 100755 --- a/barretenberg/ts/scripts/cjs_postprocess.sh +++ b/barretenberg/ts/scripts/cjs_postprocess.sh @@ -11,4 +11,6 @@ DIR="./dest/node-cjs" for FILE in $(find "$DIR" -name "*.js"); do # Use sed to replace 'import.meta.url' with '""' sed -i.bak 's/import\.meta\.url/""/g' "$FILE" && rm "$FILE.bak" + # Use sed to remove any lines postfixed // POSTPROCESS ESM ONLY + sed -i.bak '/\/\/ POSTPROCESS ESM ONLY$/d' "$FILE" && rm "$FILE.bak" done \ No newline at end of file diff --git a/barretenberg/ts/scripts/run_tests b/barretenberg/ts/scripts/run_tests index ed93c9a80916..0f0d2895cec9 100755 --- a/barretenberg/ts/scripts/run_tests +++ b/barretenberg/ts/scripts/run_tests @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -xe $(aws ecr get-login --region us-east-2 --no-include-email) 2> /dev/null diff --git a/barretenberg/ts/src/barretenberg/index.ts b/barretenberg/ts/src/barretenberg/index.ts index 6019b24e88fa..6b00a9b752a1 100644 --- a/barretenberg/ts/src/barretenberg/index.ts +++ b/barretenberg/ts/src/barretenberg/index.ts @@ -65,7 +65,7 @@ export class BarretenbergSync extends BarretenbergApiSync { static getSingleton() { if (!barretenbergSyncSingleton) { - throw new Error('Initialise first via initSingleton().'); + throw new Error('First call BarretenbergSync.initSingleton() on @aztec/bb.js module.'); } return barretenbergSyncSingleton; } @@ -75,10 +75,8 @@ export class BarretenbergSync extends BarretenbergApiSync { } } -// If we're loading this module in a test environment, just init the singleton immediately for convienience. -if (process.env.NODE_ENV === 'test') { - // Need to ignore for cjs build. - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - await BarretenbergSync.initSingleton(); -} +// If we're in ESM environment, use top level await. CJS users need to call it manually. +// Need to ignore for cjs build. +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +await BarretenbergSync.initSingleton(); // POSTPROCESS ESM ONLY diff --git a/bootstrap.sh b/bootstrap.sh index 5a953eee6b59..ac402f07baaf 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Usage: # Bootstraps the repo. End to end tests should be runnable after a bootstrap: # ./bootstrap.sh diff --git a/bootstrap/bootstrap_test.sh b/bootstrap/bootstrap_test.sh index 805f0d0ef0ed..8e05b2650efd 100755 --- a/bootstrap/bootstrap_test.sh +++ b/bootstrap/bootstrap_test.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script takes the state of your current repository, and clones it inside of a docker container. # You likely don't have a fresh clone, and it's paramount that to test bootstrapping, we don't have any # intermediate build state in the context. diff --git a/bootstrap_docker.sh b/bootstrap_docker.sh index 6fc48b2b118b..6d086d1d3314 100755 --- a/bootstrap_docker.sh +++ b/bootstrap_docker.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script builds the projects listed in build_manifest.sh, terminating when it reaches PROJECT_NAME. # If run from within a project, it will build only that project, unless env var ONLY_TARGET=false. # diff --git a/build-system/.gitrepo b/build-system/.gitrepo index b1fdb9bd8727..5b99662494d6 100644 --- a/build-system/.gitrepo +++ b/build-system/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/build-system branch = master - commit = 4e1ea3fc2cdbbfdb97d21b0920eb681ede20c333 - parent = 379b5adc259ac69b01e61b852172cdfc87cf9350 + commit = 1f7b38d90be36c46b46ac52c5d63ce7a0d627ecf + parent = 9c5443651faaed3dcb9fae36727337a34ce5922b method = merge cmdver = 0.4.6 diff --git a/build-system/remote/bootstrap_build_instance.sh b/build-system/remote/bootstrap_build_instance.sh index 535bcdd7ce72..c911eb113b32 100644 --- a/build-system/remote/bootstrap_build_instance.sh +++ b/build-system/remote/bootstrap_build_instance.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null sudo apt update diff --git a/build-system/scripts/add_timestamps b/build-system/scripts/add_timestamps index 8b658925ba50..230d51fb6263 100755 --- a/build-system/scripts/add_timestamps +++ b/build-system/scripts/add_timestamps @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash while IFS= read -r line; do printf '%(%Y-%m-%d %H:%M:%S)T %s\n' -1 "$line" done diff --git a/build-system/scripts/build b/build-system/scripts/build index 6906a275149e..c82e6bde0a69 100755 --- a/build-system/scripts/build +++ b/build-system/scripts/build @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Builds a docker image and pushes it to it's repository. Leverages caches where possible. # Cached images include previous successfully built images (including multi-stages) built on this branch. diff --git a/build-system/scripts/build_local b/build-system/scripts/build_local index cdb9f3a60f3f..0bc99f17d4a2 100755 --- a/build-system/scripts/build_local +++ b/build-system/scripts/build_local @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Builds the PROJECTS in the given order. # Will terminate build at TARGET_PROJECT (if given). # Will only build TARGET_PROJECT if ONLY_TARGET given. diff --git a/build-system/scripts/calculate_content_hash b/build-system/scripts/calculate_content_hash index 28ae8ff2e629..7b7b68ade718 100755 --- a/build-system/scripts/calculate_content_hash +++ b/build-system/scripts/calculate_content_hash @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -euo pipefail diff --git a/build-system/scripts/calculate_image_tag b/build-system/scripts/calculate_image_tag index c273648287ce..f2a3cea871f4 100755 --- a/build-system/scripts/calculate_image_tag +++ b/build-system/scripts/calculate_image_tag @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Return a repositories build cache image tag based on content hash. # If the second argument is set: # It's used to suffix the tag with the given unique arch descriptor. diff --git a/build-system/scripts/calculate_image_uri b/build-system/scripts/calculate_image_uri index 8efd7ab4cdc4..c107647434c5 100755 --- a/build-system/scripts/calculate_image_uri +++ b/build-system/scripts/calculate_image_uri @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/calculate_rebuild_files b/build-system/scripts/calculate_rebuild_files index 9be1afd49124..49e076349559 100755 --- a/build-system/scripts/calculate_rebuild_files +++ b/build-system/scripts/calculate_rebuild_files @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/check_rebuild b/build-system/scripts/check_rebuild index 76f99c6540e2..df2291065e42 100755 --- a/build-system/scripts/check_rebuild +++ b/build-system/scripts/check_rebuild @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # If this script fails (nonzero exit), then the caller should rebuild. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -euo pipefail diff --git a/build-system/scripts/clean_image_tags b/build-system/scripts/clean_image_tags index 832fdf4cda6d..9c4557e68c6b 100755 --- a/build-system/scripts/clean_image_tags +++ b/build-system/scripts/clean_image_tags @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -e @@ -20,7 +20,7 @@ fi # Collect all the commits ids in the repository and remove the remote for faster lookups # See warning in https://github.blog/2020-12-21-get-up-to-speed-with-partial-clone-and-shallow-clone/ -git config fetch.recurseSubmodules false +git config fetch.recurseSubmodules false git fetch --filter=tree:0 origin ORIGIN_URL=$(git remote get-url origin) git remote remove origin @@ -29,7 +29,7 @@ git remote remove origin # This happens for all commits tagged for PRs that then get squashed and merged IFS=$'\n' for TAG in $IMAGE_TAGS; do - if [[ $TAG =~ ^cache-[0-9a-fA-F]+-builder$ ]]; then + if [[ $TAG =~ ^cache-[0-9a-fA-F]+-builder$ ]]; then TAG_COMMIT=$(echo "$TAG" | cut -d '-' -f 2) if git cat-file -e $TAG_COMMIT; then echo "Commit for $TAG found" diff --git a/build-system/scripts/cond_run_compose b/build-system/scripts/cond_run_compose index ee412325a6a7..d1d14c6baca2 100755 --- a/build-system/scripts/cond_run_compose +++ b/build-system/scripts/cond_run_compose @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/cond_run_container b/build-system/scripts/cond_run_container index ca8d67fc0bdc..12badea45afa 100755 --- a/build-system/scripts/cond_run_container +++ b/build-system/scripts/cond_run_container @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Conditionally runs a script if the REPOSITORY content hash has changed and we haven't had a successful run. # # Arguments are: diff --git a/build-system/scripts/cond_run_script b/build-system/scripts/cond_run_script index c1ada5f8cf2a..1eb6b3695d92 100755 --- a/build-system/scripts/cond_run_script +++ b/build-system/scripts/cond_run_script @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Conditionally runs a script if the REPOSITORY content hash has changed and we haven't had a successful run. # # Arguments are: diff --git a/build-system/scripts/cond_spot_run_build b/build-system/scripts/cond_spot_run_build index 3333e5dec7c2..a2e2663de898 100755 --- a/build-system/scripts/cond_spot_run_build +++ b/build-system/scripts/cond_spot_run_build @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -euo pipefail diff --git a/build-system/scripts/cond_spot_run_compose b/build-system/scripts/cond_spot_run_compose index 51a4529471a4..56da3e3da4e8 100755 --- a/build-system/scripts/cond_spot_run_compose +++ b/build-system/scripts/cond_spot_run_compose @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -euo pipefail diff --git a/build-system/scripts/cond_spot_run_container b/build-system/scripts/cond_spot_run_container index 48876a65f3ba..5b2ebc71f32a 100755 --- a/build-system/scripts/cond_spot_run_container +++ b/build-system/scripts/cond_spot_run_container @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/cond_spot_run_script b/build-system/scripts/cond_spot_run_script index 4b1832127ac6..23a678fcd32f 100755 --- a/build-system/scripts/cond_spot_run_script +++ b/build-system/scripts/cond_spot_run_script @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Conditionally runs a script on a remote spot instance if the REPOSITORY content hash has changed and we haven't had a # successful run. # The TAG_POSTFIX is used by cond_spot_run_test whereby we use an image tag postfixed with JOB_NAME to identifify if @@ -19,7 +19,12 @@ CPUS=$2 ARCH=$3 shift 3 -BASE_TAG=$(calculate_image_tag $REPOSITORY) +MULTIARCH=$(query_manifest multiarch $REPOSITORY) +if [ "$MULTIARCH" == "host" ]; then + BASE_TAG=$(calculate_image_tag $REPOSITORY $ARCH) +else + BASE_TAG=$(calculate_image_tag $REPOSITORY) +fi SUCCESS_TAG=$BASE_TAG if [ -n "${TAG_POSTFIX:-}" ]; then diff --git a/build-system/scripts/cond_spot_run_test b/build-system/scripts/cond_spot_run_test index e8b8f71358d5..cc48ffc742ea 100755 --- a/build-system/scripts/cond_spot_run_test +++ b/build-system/scripts/cond_spot_run_test @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -euo pipefail diff --git a/build-system/scripts/create_ecr_manifest b/build-system/scripts/create_ecr_manifest index 5bda420039d9..fcc96d7651e4 100755 --- a/build-system/scripts/create_ecr_manifest +++ b/build-system/scripts/create_ecr_manifest @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script: # 1. Logs into ECR and ensures we have the given repository # 2. Computes the image uri of the cached images for the given repository given the list of architectures diff --git a/build-system/scripts/deploy b/build-system/scripts/deploy index ca43d5b9cfb3..a158175c4142 100755 --- a/build-system/scripts/deploy +++ b/build-system/scripts/deploy @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/deploy_dockerhub b/build-system/scripts/deploy_dockerhub index 606e0952b5d6..b8d925b07110 100755 --- a/build-system/scripts/deploy_dockerhub +++ b/build-system/scripts/deploy_dockerhub @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/deploy_ecr b/build-system/scripts/deploy_ecr index 3be534694c7a..c156f99e71e4 100755 --- a/build-system/scripts/deploy_ecr +++ b/build-system/scripts/deploy_ecr @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/deploy_global b/build-system/scripts/deploy_global index 28b70a286194..2267474706fe 100755 --- a/build-system/scripts/deploy_global +++ b/build-system/scripts/deploy_global @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Deployment script for global service (e.g. company website and metrics). [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/deploy_npm b/build-system/scripts/deploy_npm index a35d68c46504..16df5a156d04 100755 --- a/build-system/scripts/deploy_npm +++ b/build-system/scripts/deploy_npm @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/deploy_s3 b/build-system/scripts/deploy_s3 index caaa4d00bd97..87acbf004d9e 100755 --- a/build-system/scripts/deploy_s3 +++ b/build-system/scripts/deploy_s3 @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/deploy_service b/build-system/scripts/deploy_service index b014ca0d8230..83c451281417 100755 --- a/build-system/scripts/deploy_service +++ b/build-system/scripts/deploy_service @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/deploy_terraform b/build-system/scripts/deploy_terraform index ce2231853aaf..b81c4d3f4b2f 100755 --- a/build-system/scripts/deploy_terraform +++ b/build-system/scripts/deploy_terraform @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/dockerhub_login b/build-system/scripts/dockerhub_login index dae523fb43b7..50a2068377b5 100755 --- a/build-system/scripts/dockerhub_login +++ b/build-system/scripts/dockerhub_login @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu # Retries up to 3 times with 10 second intervals for i in $(seq 1 3); do diff --git a/build-system/scripts/ecr_login b/build-system/scripts/ecr_login index 54a8e6d36da4..331b49d40241 100755 --- a/build-system/scripts/ecr_login +++ b/build-system/scripts/ecr_login @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu REGION=${1:-$ECR_REGION} # Retries up to 3 times with 10 second intervals diff --git a/build-system/scripts/ensure_repo b/build-system/scripts/ensure_repo index b767976354f0..b71c0fabb2f1 100755 --- a/build-system/scripts/ensure_repo +++ b/build-system/scripts/ensure_repo @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Logs the shell into the ECR instance at the given region, establishes if the given repository exists, creates it if it # doesn't, and re-applies thie lifecycle policy (determines when images should be automatically deleted) if it does. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace diff --git a/build-system/scripts/ensure_terraform b/build-system/scripts/ensure_terraform index d7444aa4e4c4..ce53dba1b31a 100755 --- a/build-system/scripts/ensure_terraform +++ b/build-system/scripts/ensure_terraform @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Downloads and installs `terraform` if it's not installed. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/extract_repo b/build-system/scripts/extract_repo index c2579b5a46f1..6bdae1027f90 100755 --- a/build-system/scripts/extract_repo +++ b/build-system/scripts/extract_repo @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Given a repository, extracts the builds entire /usr/src dir to the given path. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/extract_tag_version b/build-system/scripts/extract_tag_version index d142458104d7..c0f7c3fd165f 100755 --- a/build-system/scripts/extract_tag_version +++ b/build-system/scripts/extract_tag_version @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script takes a repository name as variable, # then checks if the commit tag variable (if any) # is a valid semver & echoes that valid semver. @@ -28,7 +28,7 @@ if [[ "$COMMIT_TAG" == *"/"* ]]; then COMMIT_TAG_VERSION="${COMMIT_TAG#*/}" echo "Tag was made for: $REPO_NAME" >&2 echo "Version: $COMMIT_TAG_VERSION" >&2 - + # Check if REPO_NAME is equal to REPOSITORY if [[ "$REPO_NAME" != "$REPOSITORY" ]]; then echo "REPO_NAME ($REPO_NAME) does not match REPOSITORY ($REPOSITORY). Exiting..." >&2 diff --git a/build-system/scripts/image_exists b/build-system/scripts/image_exists index 0a4bee54d7af..0ad9d90ae69a 100755 --- a/build-system/scripts/image_exists +++ b/build-system/scripts/image_exists @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu # Returns true if the given image exists in the current ECR. aws ecr describe-images --region=$ECR_REGION --repository-name=$1 --image-ids=imageTag=$2 > /dev/null 2>&1 diff --git a/build-system/scripts/init_submodules b/build-system/scripts/init_submodules index a4a9cd62db67..0b89b089866b 100755 --- a/build-system/scripts/init_submodules +++ b/build-system/scripts/init_submodules @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # For a given repository, init any required submodules. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -euo pipefail diff --git a/build-system/scripts/query_manifest b/build-system/scripts/query_manifest index 1fee0ee57593..a6e846503318 100755 --- a/build-system/scripts/query_manifest +++ b/build-system/scripts/query_manifest @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu CMD=$1 diff --git a/build-system/scripts/remote_run_script b/build-system/scripts/remote_run_script index e9ef84d723d4..8e1d8adf38c2 100755 --- a/build-system/scripts/remote_run_script +++ b/build-system/scripts/remote_run_script @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Copies the runner script to the remote instance, runs it giving it script and args to run. # The runner script checks out the repository first and runs setup-env. # diff --git a/build-system/scripts/remote_runner b/build-system/scripts/remote_runner index ce1a567a1989..6283050283c8 100755 --- a/build-system/scripts/remote_runner +++ b/build-system/scripts/remote_runner @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/request_spot b/build-system/scripts/request_spot index 35c4d650bac9..3d669fed1df2 100755 --- a/build-system/scripts/request_spot +++ b/build-system/scripts/request_spot @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/setup_env b/build-system/scripts/setup_env index 8e4058a1c532..3486e5f86530 100755 --- a/build-system/scripts/setup_env +++ b/build-system/scripts/setup_env @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script sets up the global build environment. This should be called before any other build scripts, # as the other build scripts assume these global variables are set. The global variables are written to # the file in $BASH_ENV, which means that any new bash shells launched within the lifetime of the machine diff --git a/build-system/scripts/should_deploy b/build-system/scripts/should_deploy index 53f295b191f2..283191f51f3b 100755 --- a/build-system/scripts/should_deploy +++ b/build-system/scripts/should_deploy @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Returns success if we are expected to do a deployment. # Right now, that's only if we're master. set -eu diff --git a/build-system/scripts/should_release b/build-system/scripts/should_release index 5473fc6da765..701b8a375dbe 100755 --- a/build-system/scripts/should_release +++ b/build-system/scripts/should_release @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Returns true if we are expected to proceed with a release job. # Specifically if we have a commit tag, are master, or are being forced to release. # This script should be used at the start of all release steps to early out PR runs. diff --git a/build-system/scripts/spot_run_script b/build-system/scripts/spot_run_script index 7a2377973d5f..d5e77e35712d 100755 --- a/build-system/scripts/spot_run_script +++ b/build-system/scripts/spot_run_script @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Runs a test script on a remote spot instance. Arguments are: # 1. NAME: Used to identify spot jobs. # 2. CPUS: Number of cpus on spot request. diff --git a/build-system/scripts/tag_remote_image b/build-system/scripts/tag_remote_image index bc7675b237c6..0bb9e2a390f0 100755 --- a/build-system/scripts/tag_remote_image +++ b/build-system/scripts/tag_remote_image @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu @@ -34,7 +34,7 @@ if [ "$EXISTING_TAG_MANIFEST" != "$NEW_TAG_MANIFEST" ]; then --image-tag $NEW_TAG \ --image-manifest "$EXISTING_TAG_MANIFEST" 2>&1) TAG_EXIT_CODE=$? - + # If we failed to tag due to too many tags on this image, then clean some of them up and try again if [ $TAG_EXIT_CODE -ne 0 ] && $(echo $TAG_RESULT | grep -q LimitExceededException); then echo "Failed to tag due to limit exceeded. Starting tag cleanup." diff --git a/build-system/scripts/untag_remote_image b/build-system/scripts/untag_remote_image index f79fee729bc5..aa913c1200bf 100755 --- a/build-system/scripts/untag_remote_image +++ b/build-system/scripts/untag_remote_image @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash REPOSITORY=$1 TAG=$2 aws ecr batch-delete-image --region=$ECR_REGION --repository-name $REPOSITORY --image-ids imageTag=$2 > /dev/null \ No newline at end of file diff --git a/build-system/scripts/upload_logs_to_s3 b/build-system/scripts/upload_logs_to_s3 index ede934edae59..25c7fe2e50a6 100755 --- a/build-system/scripts/upload_logs_to_s3 +++ b/build-system/scripts/upload_logs_to_s3 @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Uploads to S3 the contents of the log file mounted on the end-to-end container, # which contains log entries with an associated event and metrics for it. diff --git a/build-system/start_interactive b/build-system/start_interactive index 2ed1f2df13a4..64e244997827 100755 --- a/build-system/start_interactive +++ b/build-system/start_interactive @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Starts an interactive shell with the build system initialised. # Good for playing around with build system on development machines. diff --git a/build_manifest.yml b/build_manifest.yml index 588c3c70a93e..43bce1a4071d 100644 --- a/build_manifest.yml +++ b/build_manifest.yml @@ -216,4 +216,4 @@ docs: yellow-paper: buildDir: yellow-paper rebuildPatterns: - - ^yellow-paper/ \ No newline at end of file + - ^yellow-paper/ diff --git a/circuits/cpp/bootstrap.sh b/circuits/cpp/bootstrap.sh index 6a64d541b7be..440bf1e44c6f 100755 --- a/circuits/cpp/bootstrap.sh +++ b/circuits/cpp/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd "$(dirname "$0")" diff --git a/circuits/cpp/format.sh b/circuits/cpp/format.sh index e12e84ab27a3..54047bd6def1 100755 --- a/circuits/cpp/format.sh +++ b/circuits/cpp/format.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu if [ "${1:-}" == "staged" ]; then diff --git a/circuits/cpp/scripts/build_run_tests_docker_local b/circuits/cpp/scripts/build_run_tests_docker_local index 19eb1784f710..f861fecbfa85 100755 --- a/circuits/cpp/scripts/build_run_tests_docker_local +++ b/circuits/cpp/scripts/build_run_tests_docker_local @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/circuits/cpp/scripts/collect_coverage_information.sh b/circuits/cpp/scripts/collect_coverage_information.sh index cac9393a9b65..45ad2764a225 100755 --- a/circuits/cpp/scripts/collect_coverage_information.sh +++ b/circuits/cpp/scripts/collect_coverage_information.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Check that the correct number of args have been provided if [ $# -ne 2 ]; then @@ -67,7 +67,7 @@ if [ ${#non_empty_profiles[@]} -gt 1 ]; then additional_objects+="-object $WORKING_DIRECTORY/bin/${non_empty_profile_base}_tests " done object_string=${additional_objects#"-object"} - + # Output the coverage report into `all_tests_coverage_report` folder rm -rf "$WORKING_DIRECTORY/all_tests_coverage_report" mkdir "$WORKING_DIRECTORY/all_tests_coverage_report" diff --git a/circuits/cpp/scripts/run_tests b/circuits/cpp/scripts/run_tests index dd7d7e213889..62fbd90153b8 100755 --- a/circuits/cpp/scripts/run_tests +++ b/circuits/cpp/scripts/run_tests @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/circuits/cpp/scripts/run_tests_local b/circuits/cpp/scripts/run_tests_local index d65ff5f358f8..4c12807414eb 100755 --- a/circuits/cpp/scripts/run_tests_local +++ b/circuits/cpp/scripts/run_tests_local @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/circuits/cpp/scripts/tidy.sh b/circuits/cpp/scripts/tidy.sh index 3283fb89b62e..7faab2658c6a 100755 --- a/circuits/cpp/scripts/tidy.sh +++ b/circuits/cpp/scripts/tidy.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu # Run clang-tidy on all C++ source files diff --git a/docs/deploy_netlify.sh b/docs/deploy_netlify.sh index 82b447119f67..24c7c1f3845d 100755 --- a/docs/deploy_netlify.sh +++ b/docs/deploy_netlify.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/docs/docs/dev_docs/tutorials/writing_token_contract.md b/docs/docs/dev_docs/tutorials/writing_token_contract.md index 18e16c113d69..27cb44bd8f1f 100644 --- a/docs/docs/dev_docs/tutorials/writing_token_contract.md +++ b/docs/docs/dev_docs/tutorials/writing_token_contract.md @@ -210,7 +210,7 @@ Just below the contract definition, add the following imports: #include_code imports /yarn-project/noir-contracts/src/contracts/token_contract/src/main.nr rust -We are importing the Option type, items from the `value_note` library to help manage private value storage, note utilities, context (for managing private and public execution contexts), `state_vars` for helping manage state, `types` for data manipulation and `oracle` for help passing data from the private to public execution context. We also import the `auth` [library](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/yarn-project/aztec-nr/aztec/src/auth.nr) to handle token authorizations from [Account Contracts](../../concepts/foundation/accounts/main). Check out the Account Contract with AuthWitness [here](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/yarn-project/noir-contracts/src/contracts/schnorr_single_key_account_contract/src/main.nr). +We are importing the Option type, note utilities, context (for managing private and public execution contexts), `state_vars` for helping manage state, `compute_selector` for helping with calling public functions from private functions, and `types` for data manipulation. We also import the `authwit` [library](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/yarn-project/aztec-nr/aztec/src/auth.nr) to handle token authorizations from [Account Contracts](../../concepts/foundation/accounts/main). Check out the Account Contract with AuthWitness [here](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/yarn-project/noir-contracts/src/contracts/schnorr_single_key_account_contract/src/main.nr). [SafeU120](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/yarn-project/aztec-nr/safe-math/src/safe_u120.nr) is a library to do safe math operations on unsigned integers that protects against overflows and underflows. @@ -309,7 +309,7 @@ First, storage is initialized. Then it checks whether the calling contract (`con ##### Authorizing token spends -If the `msg_sender` is **NOT** the same as the account to debit from, the function checks that the account has authorized the `msg_sender` contract to debit tokens on its behalf. This check is done by computing the function selector that needs to be authorized (in this case, the `shield` function), computing the hash of the message that the account contract has approved. This is a hash of the contract that is approved to spend (`context.msg_sender`), the token contract that can be spent from (`context.this_address()`), the `selector`, the account to spend from (`from.address`), the `amount`, the `secret_hash` and a `nonce` to prevent multiple spends. This hash is passed to `assert_valid_public_message_for` to ensure that the Account Contract has approved tokens to be spent on it's behalf. +If the `msg_sender` is **NOT** the same as the account to debit from, the function checks that the account has authorized the `msg_sender` contract to debit tokens on its behalf. This check is done by computing the function selector that needs to be authorized (in this case, the `shield` function), computing the hash of the message that the account contract has approved. This is a hash of the contract that is approved to spend (`context.msg_sender`), the token contract that can be spent from (`context.this_address()`), the `selector`, the account to spend from (`from.address`), the `amount`, the `secret_hash` and a `nonce` to prevent multiple spends. This hash is passed to `assert_current_call_valid_authwit_public` to ensure that the Account Contract has approved tokens to be spent on it's behalf. If the `msg_sender` is the same as the account to debit tokens from, the authorization check is bypassed and the function proceeds to update the account's `public_balance` and adds a new `TransparentNote` to the `pending_shields`. @@ -360,7 +360,7 @@ The function returns `1` to indicate successful execution. This private function enables un-shielding of private `ValueNote`s stored in `balances` to any Aztec account's `public_balance`. -After initializing storage, the function checks that the `msg_sender` is authorized to spend tokens. See [the Authorizing token spends section](#authorizing-token-spends) above for more detail--the only difference being that `assert_valid_message_for` is modified to work specifically in the private context. After the authorization check, the sender's private balance is decreased using the `decrement` helper function for the `value_note` library. Then it stages a public function call on this contract ([`_increase_public_balance`](#_increase_public_balance)) to be executed in the [public execution phase](#execution-contexts) of transaction execution. `_increase_public_balance` is marked as an `internal` function, so can only be called by this token contract. +After initializing storage, the function checks that the `msg_sender` is authorized to spend tokens. See [the Authorizing token spends section](#authorizing-token-spends) above for more detail--the only difference being that `assert_current_call_valid_authwit` is modified to work specifically in the private context. After the authorization check, the sender's private balance is decreased using the `decrement` helper function for the `value_note` library. Then it stages a public function call on this contract ([`_increase_public_balance`](#_increase_public_balance)) to be executed in the [public execution phase](#execution-contexts) of transaction execution. `_increase_public_balance` is marked as an `internal` function, so can only be called by this token contract. The function returns `1` to indicate successful execution. @@ -370,7 +370,7 @@ The function returns `1` to indicate successful execution. This private function enables private token transfers between Aztec accounts. -After initializing storage, the function checks that the `msg_sender` is authorized to spend tokens. See [the Authorizing token spends section](#authorizing-token-spends) above for more detail--the only difference being that `assert_valid_message_for` is modified to work specifically in the private context. After authorization, the function gets the current balances for the sender and recipient and decrements and increments them, respectively, using the `value_note` helper functions. +After initializing storage, the function checks that the `msg_sender` is authorized to spend tokens. See [the Authorizing token spends section](#authorizing-token-spends) above for more detail--the only difference being that `assert_current_call_valid_authwit` is modified to work specifically in the private context. After authorization, the function gets the current balances for the sender and recipient and decrements and increments them, respectively, using the `value_note` helper functions. #include_code transfer /yarn-project/noir-contracts/src/contracts/token_contract/src/main.nr rust diff --git a/docs/scripts/build.sh b/docs/scripts/build.sh index e06ae5f2aa99..eb63a05887d5 100755 --- a/docs/scripts/build.sh +++ b/docs/scripts/build.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eo pipefail # Helper function for building packages in yarn project @@ -10,7 +10,7 @@ build_package() { (cd "yarn-project/$package_name" && $build_command) } -# Build script. If run on Netlify, first it needs to compile all yarn-projects +# Build script. If run on Netlify, first it needs to compile all yarn-projects # that are involved in typedoc in order to generate their type information. if [ -n "$NETLIFY" ]; then # Move to project root diff --git a/iac/mainnet-fork/scripts/run_nginx_anvil.sh b/iac/mainnet-fork/scripts/run_nginx_anvil.sh index 4b701f132560..38788424ed74 100755 --- a/iac/mainnet-fork/scripts/run_nginx_anvil.sh +++ b/iac/mainnet-fork/scripts/run_nginx_anvil.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eum pipefail diff --git a/iac/mainnet-fork/scripts/wait_for_fork b/iac/mainnet-fork/scripts/wait_for_fork index 326582c25aff..831e06723e26 100755 --- a/iac/mainnet-fork/scripts/wait_for_fork +++ b/iac/mainnet-fork/scripts/wait_for_fork @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e # When destroying and applying mainnet fork terraform, it may not be diff --git a/l1-contracts/bootstrap.sh b/l1-contracts/bootstrap.sh index db2a71b8c75d..f776b6072dc2 100755 --- a/l1-contracts/bootstrap.sh +++ b/l1-contracts/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd "$(dirname "$0")" diff --git a/l1-contracts/scripts/ci_deploy_contracts.sh b/l1-contracts/scripts/ci_deploy_contracts.sh index 128a1702f31d..aaedb6f2f7d5 100755 --- a/l1-contracts/scripts/ci_deploy_contracts.sh +++ b/l1-contracts/scripts/ci_deploy_contracts.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash export ETHEREUM_HOST=https://$DEPLOY_TAG-mainnet-fork.aztec.network:8545/$FORK_API_KEY diff --git a/l1-contracts/scripts/deploy_contracts.sh b/l1-contracts/scripts/deploy_contracts.sh index 6223a0719a96..0bd743e15672 100755 --- a/l1-contracts/scripts/deploy_contracts.sh +++ b/l1-contracts/scripts/deploy_contracts.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu # Sets up defaults then runs the E2E Setup script to perform contract deployments diff --git a/noir/.github/actions/docs/build-status/script.sh b/noir/.github/actions/docs/build-status/script.sh index 0b282557cf2c..2e86de6c173f 100755 --- a/noir/.github/actions/docs/build-status/script.sh +++ b/noir/.github/actions/docs/build-status/script.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash BRANCH_NAME=$(echo "$BRANCH_NAME" | sed -e "s#refs/[^/]*/##") DEPLOY_STATUS=$(curl -X GET "https://api.netlify.com/api/v1/sites/$SITE_ID/deploys?branch=$BRANCH_NAME" | jq -r '.[] | select(.created_at != null) | .state' | head -1) diff --git a/noir/Dockerfile b/noir/Dockerfile index ac818cb8bd2a..de63983db3e9 100644 --- a/noir/Dockerfile +++ b/noir/Dockerfile @@ -1,15 +1,10 @@ -FROM rust:alpine3.17 -RUN apk update \ - && apk upgrade \ - && apk add --no-cache \ - build-base \ - bash +FROM rust:bookworm WORKDIR /usr/src/noir COPY . . RUN ./scripts/bootstrap_native.sh -# When running the container, mount the current working directory to /project. -FROM alpine:3.17 +# When running the container, mount the users home directory to /root +FROM ubuntu:lunar COPY --from=0 /usr/src/noir/target/release/nargo /usr/src/noir/target/release/nargo -WORKDIR /project +WORKDIR /root ENTRYPOINT ["/usr/src/noir/target/release/nargo"] \ No newline at end of file diff --git a/noir/bootstrap.sh b/noir/bootstrap.sh index bf672ac0ad2e..5ebe7ade0901 100755 --- a/noir/bootstrap.sh +++ b/noir/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd $(dirname "$0") diff --git a/noir/compiler/integration-tests/scripts/codegen-verifiers.sh b/noir/compiler/integration-tests/scripts/codegen-verifiers.sh index 13667038728f..4518141fc130 100644 --- a/noir/compiler/integration-tests/scripts/codegen-verifiers.sh +++ b/noir/compiler/integration-tests/scripts/codegen-verifiers.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash self_path=$(dirname "$(readlink -f "$0")") diff --git a/noir/scripts/bootstrap_native.sh b/noir/scripts/bootstrap_native.sh index 693a9d9678e5..b5e004106ff7 100755 --- a/noir/scripts/bootstrap_native.sh +++ b/noir/scripts/bootstrap_native.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd $(dirname "$0")/.. diff --git a/noir/scripts/bootstrap_packages.sh b/noir/scripts/bootstrap_packages.sh index 5fce26750371..1363acf51a6e 100755 --- a/noir/scripts/bootstrap_packages.sh +++ b/noir/scripts/bootstrap_packages.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd $(dirname "$0")/.. diff --git a/noir/scripts/install_wasm-bindgen.sh b/noir/scripts/install_wasm-bindgen.sh index 5e9f91275063..c6e85bac50b2 100755 --- a/noir/scripts/install_wasm-bindgen.sh +++ b/noir/scripts/install_wasm-bindgen.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd $(dirname "$0")/.. diff --git a/noir/test_programs/gates_report.sh b/noir/test_programs/gates_report.sh index e06e6812e9d0..4192c5813767 100755 --- a/noir/test_programs/gates_report.sh +++ b/noir/test_programs/gates_report.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e # These tests are incompatible with gas reporting diff --git a/noir/test_programs/rebuild.sh b/noir/test_programs/rebuild.sh index dfc3dc5c967a..fd3e4478d627 100755 --- a/noir/test_programs/rebuild.sh +++ b/noir/test_programs/rebuild.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e process_dir() { diff --git a/scripts/ci/assemble_e2e_benchmark.sh b/scripts/ci/assemble_e2e_benchmark.sh index 7456d4e1cbca..4a7b1bbe0150 100755 --- a/scripts/ci/assemble_e2e_benchmark.sh +++ b/scripts/ci/assemble_e2e_benchmark.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Grabs the log files uploaded in yarn-project/end-to-end/scripts/upload_logs_to_s3.sh # that contain representative benchmarks, extracts whatever metrics are interesting, # and assembles a single file that shows the current state of the repository. @@ -70,7 +70,7 @@ if [ -n "${BENCHMARK_LATEST_FILE:-}" ]; then aws s3 cp $BENCHMARK_FILE_JSON "s3://${BUCKET_NAME}/${BENCHMARK_LATEST_FILE}" fi -# If on a pull request, get the data from the most recent commit on master where it's available, +# If on a pull request, get the data from the most recent commit on master where it's available, # generate a markdown comment, and post it on the pull request if [ -n "${CIRCLE_PULL_REQUEST:-}" ]; then MASTER_COMMIT_HASH=$(curl -s "https://api.github.com/repos/AztecProtocol/aztec-packages/pulls/${CIRCLE_PULL_REQUEST##*/}" | jq -r '.base.sha') @@ -88,11 +88,9 @@ if [ -n "${CIRCLE_PULL_REQUEST:-}" ]; then done set -e - if [ -z "${BASE_COMMIT_HASH:-}" ]; then + if [ -z "${BASE_COMMIT_HASH:-}" ]; then echo "No base commit data found" fi (yarn-project/scripts/run_script.sh workspace @aztec/scripts bench-comment && echo "commented on pr $CIRCLE_PULL_REQUEST") || echo "failed commenting on pr" fi - - diff --git a/scripts/ci/store_test_benchmark_logs b/scripts/ci/store_test_benchmark_logs index 3554aa746d2d..c9ae07fc96a4 100755 --- a/scripts/ci/store_test_benchmark_logs +++ b/scripts/ci/store_test_benchmark_logs @@ -1,5 +1,5 @@ -#!/bin/bash -# Script for storing barretenberg benchmark results. +#!/usr/bin/env bash +# Script for storing barretenberg benchmark results. # Uploads results to the AztecProtocol/benchmark-archive repository. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace @@ -36,7 +36,7 @@ cat /tmp/csv/new.csv \ # If there actually were any logs, update the information in the benchmark repository if [ -s /tmp/csv/trimmed.csv ]; then cd /tmp - + git clone --depth 1 https://$AZTEC_GITHUB_TOKEN:@github.com/AztecProtocol/benchmark-archive cd benchmark-archive diff --git a/scripts/git-subrepo/note/init-test b/scripts/git-subrepo/note/init-test index a20854de88ac..810dae14a4db 100755 --- a/scripts/git-subrepo/note/init-test +++ b/scripts/git-subrepo/note/init-test @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -ex cat $0 # Show this script in the output diff --git a/scripts/git-subrepo/note/recreate-rebase-conflict.sh b/scripts/git-subrepo/note/recreate-rebase-conflict.sh index 8e1103e575d8..6dc0a51e300d 100644 --- a/scripts/git-subrepo/note/recreate-rebase-conflict.sh +++ b/scripts/git-subrepo/note/recreate-rebase-conflict.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu set -x diff --git a/scripts/git-subrepo/note/subtree-rebase-fail-example/test.bash b/scripts/git-subrepo/note/subtree-rebase-fail-example/test.bash index df8b818cee61..9118236848ad 100755 --- a/scripts/git-subrepo/note/subtree-rebase-fail-example/test.bash +++ b/scripts/git-subrepo/note/subtree-rebase-fail-example/test.bash @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -ex @@ -24,6 +24,6 @@ git clone repo1 repo3 git subrepo clone ../repo2 subrepo bash git rebase -i HEAD^ - git log -p + git log -p ls ) diff --git a/scripts/git-subrepo/note/test-subrepo-push.sh b/scripts/git-subrepo/note/test-subrepo-push.sh index afceb5efa92f..13b76e47fc82 100644 --- a/scripts/git-subrepo/note/test-subrepo-push.sh +++ b/scripts/git-subrepo/note/test-subrepo-push.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -ex diff --git a/scripts/git-subrepo/note/test.sh b/scripts/git-subrepo/note/test.sh index fae278795d1c..d27548f0937f 100755 --- a/scripts/git-subrepo/note/test.sh +++ b/scripts/git-subrepo/note/test.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -x diff --git a/scripts/git_subrepo.sh b/scripts/git_subrepo.sh index df9743a2bd5f..969789cdcc8c 100755 --- a/scripts/git_subrepo.sh +++ b/scripts/git_subrepo.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu SCRIPT_DIR=$(dirname "$(realpath "$0")") @@ -11,10 +11,10 @@ fi # git subrepo is quite nice, but has one flaw in our workflow: # We frequently squash commits in PRs, and we might update the .gitrepo file -# with a parent commit that later does not exist. +# with a parent commit that later does not exist. # A backup heuristic is used to later find the squashed commit's parent -# using the .gitrepo file's git history. This might be brittle -# in the face of e.g. a .gitrepo whitespace change, but it's a fallback, +# using the .gitrepo file's git history. This might be brittle +# in the face of e.g. a .gitrepo whitespace change, but it's a fallback, # we only have this issue in master, and the file should only be edited # generally by subrepo commands. SUBREPO_PATH="${2:-}" diff --git a/scripts/migrate_barretenberg_branch.sh b/scripts/migrate_barretenberg_branch.sh index 8cf396867c30..36c128a62320 100755 --- a/scripts/migrate_barretenberg_branch.sh +++ b/scripts/migrate_barretenberg_branch.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu # Usage: ./this.sh diff --git a/scripts/tmux-splits b/scripts/tmux-splits index 296cdb366325..6b1e28f85595 100755 --- a/scripts/tmux-splits +++ b/scripts/tmux-splits @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Launches tmux with 1 window that has 2 panes running, from top to bottom: # - aztec.js # - a shell in end-to-end. If a test is given as an argument, the test will be run here automatically. diff --git a/yarn-project/Dockerfile b/yarn-project/Dockerfile index 257d93af9112..696c60fbe1c0 100644 --- a/yarn-project/Dockerfile +++ b/yarn-project/Dockerfile @@ -6,8 +6,6 @@ # Any subsequent build steps needed to support downstream containers should be done in those containers build files. FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/yarn-project-base as builder -RUN apk add bash perl - # Copy in the entire workspace. COPY . . diff --git a/yarn-project/Dockerfile.prod b/yarn-project/Dockerfile.prod index 95e4479137b7..16f65db47408 100644 --- a/yarn-project/Dockerfile.prod +++ b/yarn-project/Dockerfile.prod @@ -3,8 +3,8 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/yarn-project AS yarn-project # Need new arch specific image. -FROM node:18.19.0-alpine AS builder -RUN apk add bash jq --no-cache +FROM node:18.19.0 AS builder +RUN apt update && apt install -y jq && rm -rf /var/lib/apt/lists/* && apt-get clean COPY --from=yarn-project /usr/src /usr/src WORKDIR /usr/src/yarn-project ARG COMMIT_TAG="" @@ -18,5 +18,32 @@ RUN yarn workspaces focus @aztec/cli @aztec/aztec-sandbox @aztec/aztec-faucet -- RUN rm -rf /usr/src/noir/target # Create fresh minimal size image. -FROM node:18.19.0-alpine -COPY --from=builder /usr/src /usr/src \ No newline at end of file +# Installs our specific version of node, stripping out the unnecessary. +# We could probably just apt install nodejs, but it's both a different version, and seemingly a bit slower. +# We could also use distroless, to get us about 20mb off, but meh. It's actually useful to shell into containers. +#FROM gcr.io/distroless/nodejs18-debian12 +FROM ubuntu:lunar +# RUN apt update && apt install -y nodejs && rm -rf /var/lib/apt/lists/* && apt-get clean +RUN apt update && apt install -y curl && rm -rf /var/lib/apt/lists/* && apt-get clean +ENV NODE_VERSION=18.19.0 +RUN ARCH= && \ + dpkgArch="$(dpkg --print-architecture)" && \ + case "${dpkgArch##*-}" in \ + amd64) ARCH='x64';; \ + arm64) ARCH='arm64';; \ + *) echo "unsupported architecture"; exit 1 ;; \ + esac && \ + curl -fsSLO --compressed "https://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-$ARCH.tar.gz" && \ + tar zxf "node-v$NODE_VERSION-linux-$ARCH.tar.gz" -C /usr --strip-components=1 --no-same-owner \ + --exclude "*/share/*" \ + --exclude "*/bin/corepack" \ + --exclude "*/bin/npx" \ + --exclude "*/bin/npm" \ + --exclude "*/corepack/*" \ + --exclude "*/npm/man/*" \ + --exclude "*/npm/docs/*" \ + --exclude "*/include/*" && \ + rm "node-v$NODE_VERSION-linux-$ARCH.tar.gz" && \ + node --version +COPY --from=builder /usr/src /usr/src +ENTRYPOINT ["/usr/bin/node"] \ No newline at end of file diff --git a/yarn-project/aztec-node/terraform/servicediscovery-drain.sh b/yarn-project/aztec-node/terraform/servicediscovery-drain.sh index 1fa02e92d911..b8d6c3015196 100755 --- a/yarn-project/aztec-node/terraform/servicediscovery-drain.sh +++ b/yarn-project/aztec-node/terraform/servicediscovery-drain.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ $# -ne 1 ] && echo "Usage: $0 " && exit 1 diff --git a/yarn-project/aztec-nr/.gitrepo b/yarn-project/aztec-nr/.gitrepo index 0930abc8a304..6fc3eca0c440 100644 --- a/yarn-project/aztec-nr/.gitrepo +++ b/yarn-project/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = 4e873ce0d34cad3743be460f5ed703350435a974 + commit = a3aa6bd5d8f2ad9d44e3d9340550203fbe38c4d6 method = merge cmdver = 0.4.6 - parent = 379b5adc259ac69b01e61b852172cdfc87cf9350 + parent = 9c5443651faaed3dcb9fae36727337a34ce5922b diff --git a/yarn-project/aztec-sandbox/Dockerfile b/yarn-project/aztec-sandbox/Dockerfile index 2f9644563e02..afdfb2094e18 100644 --- a/yarn-project/aztec-sandbox/Dockerfile +++ b/yarn-project/aztec-sandbox/Dockerfile @@ -1,8 +1,7 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/yarn-project-prod AS yarn-project-prod +ENTRYPOINT ["node", "--no-warnings", "/usr/src/yarn-project/aztec-sandbox/dest/bin/index.js"] +EXPOSE 8079 8080 # The version has been updated in yarn-project-prod. # Adding COMMIT_TAG here to rebuild versioned image. -ARG COMMIT_TAG="" - -ENTRYPOINT ["node", "--no-warnings", "/usr/src/yarn-project/aztec-sandbox/dest/bin/index.js"] -EXPOSE 8079 8080 +ARG COMMIT_TAG="" \ No newline at end of file diff --git a/yarn-project/aztec-sandbox/src/bin/index.ts b/yarn-project/aztec-sandbox/src/bin/index.ts index 71cf982e0468..72a9413454d8 100644 --- a/yarn-project/aztec-sandbox/src/bin/index.ts +++ b/yarn-project/aztec-sandbox/src/bin/index.ts @@ -10,6 +10,7 @@ import { NoirCommit } from '@aztec/noir-compiler/versions'; import { BootstrapNode, getP2PConfigEnvVars } from '@aztec/p2p'; import { GrumpkinScalar, PXEService, createPXERpcServer } from '@aztec/pxe'; +import { resolve as dnsResolve } from 'dns'; import { readFileSync } from 'fs'; import http from 'http'; import { dirname, resolve } from 'path'; @@ -30,8 +31,19 @@ enum SandboxMode { P2PBootstrap = 'p2p-bootstrap', } +/** + * If we can successfully resolve 'host.docker.internal', then we are running in a container, and we should treat + * localhost as being host.docker.internal. + */ +function getLocalhost() { + return new Promise(resolve => + dnsResolve('host.docker.internal', err => (err ? resolve('localhost') : resolve('host.docker.internal'))), + ); +} + +const LOCALHOST = await getLocalhost(); const { - AZTEC_NODE_URL = 'http://localhost:8079', + AZTEC_NODE_URL = `http://${LOCALHOST}:8079`, AZTEC_NODE_PORT = 8079, PXE_PORT = 8080, MODE = 'sandbox', diff --git a/yarn-project/aztec.js/src/api/init.ts b/yarn-project/aztec.js/src/api/init.ts index 2b5203c9d0b0..9654b9c80427 100644 --- a/yarn-project/aztec.js/src/api/init.ts +++ b/yarn-project/aztec.js/src/api/init.ts @@ -1 +1,10 @@ -export { init as initAztecJs } from '@aztec/foundation/crypto'; +import { init } from '@aztec/foundation/crypto'; + +/** + * This should only be needed to be called in CJS environments that don't have top level await. + * Initializes any asynchronous subsystems required to use the library. + * At time of writing, this is just our foundation crypto lib. + */ +export async function initAztecJs() { + await init(); +} diff --git a/yarn-project/aztec.js/src/index.ts b/yarn-project/aztec.js/src/index.ts index 38f1bc8970f2..353457cd6f51 100644 --- a/yarn-project/aztec.js/src/index.ts +++ b/yarn-project/aztec.js/src/index.ts @@ -16,6 +16,8 @@ * import { AztecAddress } from '@aztec/aztec.js/aztec_address'; * import { EthAddress } from '@aztec/aztec.js/eth_address'; * ``` + * + * TODO: Ultimately reimplement this mega exporter by mega exporting a granular api (then deprecate it). */ export { WaitOpts, @@ -118,7 +120,7 @@ export { // External devs will almost certainly have their own methods of doing these things. // If we want to use them in our own "aztec.js consuming code", import them from foundation as needed. export { ContractArtifact, FunctionArtifact, encodeArguments } from '@aztec/foundation/abi'; -export { sha256, init } from '@aztec/foundation/crypto'; +export { sha256 } from '@aztec/foundation/crypto'; export { DebugLogger, createDebugLogger, onLog } from '@aztec/foundation/log'; export { retry, retryUntil } from '@aztec/foundation/retry'; export { sleep } from '@aztec/foundation/sleep'; @@ -127,6 +129,7 @@ export { fileURLToPath } from '@aztec/foundation/url'; export { to2Fields, toBigInt } from '@aztec/foundation/serialize'; export { toBigIntBE } from '@aztec/foundation/bigint-buffer'; export { makeFetch } from '@aztec/foundation/json-rpc/client'; +export { FieldsOf } from '@aztec/foundation/types'; export { DeployL1Contracts, @@ -135,4 +138,7 @@ export { deployL1Contracts, } from '@aztec/ethereum'; -export { FieldsOf } from '@aztec/foundation/types'; +// Start of section that exports public api via granular api. +// Here you *can* do `export *` as the granular api defacto exports things explicitly. +// This entire index file will be deprecated at some point after we're satisfied. +export * from './api/init.js'; diff --git a/yarn-project/bootstrap.sh b/yarn-project/bootstrap.sh index 6f9acecd7db2..ff01dcb5e80e 100755 --- a/yarn-project/bootstrap.sh +++ b/yarn-project/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd "$(dirname "$0")" diff --git a/yarn-project/boxes/bootstrap.sh b/yarn-project/boxes/bootstrap.sh index 340bc0e8972e..9d1b562be368 100755 --- a/yarn-project/boxes/bootstrap.sh +++ b/yarn-project/boxes/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # should be run from yarn-project/boxes original_path=$(pwd) @@ -7,16 +7,16 @@ original_path=$(pwd) for dir in *; do # Check if it's a directory if [ -d "${dir}" ]; then - + cd "${dir}" # Run the compile command echo "Running compile command inside ${dir}..." # Runs ts-node command from "../cli" to use latest "compile" code. this uses the yarn command to use the subpackage ts-node dep yarn compile:local - + # Change back to the original directory cd "${original_path}" - + fi done diff --git a/yarn-project/boxes/run_tests b/yarn-project/boxes/run_tests index 24a089b203c0..c358a9366eeb 100755 --- a/yarn-project/boxes/run_tests +++ b/yarn-project/boxes/run_tests @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script is used to run an e2e type test in CI (see .circleci/config.yml). # It pulls images and runs docker-compose, which has the test as the entrypoint. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace diff --git a/yarn-project/canary/scripts/cond_run_script b/yarn-project/canary/scripts/cond_run_script index 32834cef2f1d..84c03bd4574a 100755 --- a/yarn-project/canary/scripts/cond_run_script +++ b/yarn-project/canary/scripts/cond_run_script @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Conditionally runs a script if any dependent code has changed between # the last successful run and the present commit. # diff --git a/yarn-project/canary/scripts/extract_packages.sh b/yarn-project/canary/scripts/extract_packages.sh index 24499e233750..78255e01b249 100755 --- a/yarn-project/canary/scripts/extract_packages.sh +++ b/yarn-project/canary/scripts/extract_packages.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash FILE=$1 diff --git a/yarn-project/canary/scripts/run_tests b/yarn-project/canary/scripts/run_tests index 21894dbedb08..6bde0b451a47 100755 --- a/yarn-project/canary/scripts/run_tests +++ b/yarn-project/canary/scripts/run_tests @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script is used to run an e2e test in CI (see config.yml and cond_spot_run_tests). # It sets a few environment variables used inside the docker-compose.yml, pulls images, and runs docker-compose. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace diff --git a/yarn-project/canary/scripts/update_packages.sh b/yarn-project/canary/scripts/update_packages.sh index c8fe7740bb7a..085dfd217bdd 100755 --- a/yarn-project/canary/scripts/update_packages.sh +++ b/yarn-project/canary/scripts/update_packages.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu DIST_TAG=$1 diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts index df34d06d3058..9b8afc328e2c 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts @@ -1,5 +1,3 @@ -import { init } from '@aztec/foundation/crypto'; - import { createCipheriv, createDecipheriv, randomBytes } from 'crypto'; import { Aes128 } from './index.js'; @@ -7,8 +5,7 @@ import { Aes128 } from './index.js'; describe('aes128', () => { let aes128!: Aes128; - beforeAll(async () => { - await init(); + beforeAll(() => { aes128 = new Aes128(); }); diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.test.ts b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.test.ts index 671c019291b0..154ab39075fb 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.test.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.test.ts @@ -1,4 +1,3 @@ -import { init } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { GrumpkinScalar, Point } from '../../../index.js'; @@ -9,8 +8,7 @@ const debug = createDebugLogger('bb:grumpkin_test'); describe('grumpkin', () => { let grumpkin!: Grumpkin; - beforeAll(async () => { - await init(); + beforeAll(() => { grumpkin = new Grumpkin(); }); diff --git a/yarn-project/cli/Dockerfile b/yarn-project/cli/Dockerfile index dfffe9b18436..dc70ce119cad 100644 --- a/yarn-project/cli/Dockerfile +++ b/yarn-project/cli/Dockerfile @@ -1,17 +1,6 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/yarn-project-prod AS yarn-project-prod - ENTRYPOINT ["node", "--no-warnings", "/usr/src/yarn-project/cli/dest/bin/index.js"] # The version has been updated in yarn-project-prod. # Adding COMMIT_TAG here to rebuild versioned image. -ARG COMMIT_TAG="" - -# Setup cache volume. -ENV XDG_CACHE_HOME /cache -RUN mkdir /cache && chmod 777 /cache -VOLUME [ "/cache" ] - -# Run as non-root user. -RUN corepack enable -RUN addgroup -S aztec && adduser -S aztec -G aztec -USER aztec +ARG COMMIT_TAG="" \ No newline at end of file diff --git a/yarn-project/cli/src/cmds/add_note.ts b/yarn-project/cli/src/cmds/add_note.ts index 643400343709..33ad5a52f80a 100644 --- a/yarn-project/cli/src/cmds/add_note.ts +++ b/yarn-project/cli/src/cmds/add_note.ts @@ -3,7 +3,7 @@ import { DebugLogger } from '@aztec/foundation/log'; import { ExtendedNote, Note, TxHash } from '@aztec/types'; import { createCompatibleClient } from '../client.js'; -import { parseFields } from '../utils.js'; +import { parseFields } from '../parse_args.js'; /** * diff --git a/yarn-project/cli/src/index.ts b/yarn-project/cli/src/index.ts index 6f44a6ca8b31..08fd0c44b5ec 100644 --- a/yarn-project/cli/src/index.ts +++ b/yarn-project/cli/src/index.ts @@ -1,9 +1,9 @@ -import { initAztecJs } from '@aztec/aztec.js/init'; import { DebugLogger, LogFn } from '@aztec/foundation/log'; import { fileURLToPath } from '@aztec/foundation/url'; import { addNoirCompilerCommanderActions } from '@aztec/noir-compiler/cli'; import { Command, Option } from 'commander'; +import { resolve as dnsResolve } from 'dns'; import { readFileSync } from 'fs'; import { dirname, resolve } from 'path'; @@ -21,9 +21,20 @@ import { parsePublicKey, parseSaltFromHexString, parseTxHash, -} from './utils.js'; +} from './parse_args.js'; -const { ETHEREUM_HOST = 'http://localhost:8545', PRIVATE_KEY, API_KEY } = process.env; +/** + * If we can successfully resolve 'host.docker.internal', then we are running in a container, and we should treat + * localhost as being host.docker.internal. + */ +function getLocalhost() { + return new Promise(resolve => + dnsResolve('host.docker.internal', err => (err ? resolve('localhost') : resolve('host.docker.internal'))), + ); +} + +const LOCALHOST = await getLocalhost(); +const { ETHEREUM_HOST = `http://${LOCALHOST}:8545`, PRIVATE_KEY, API_KEY } = process.env; /** * Returns commander program that defines the CLI. @@ -42,7 +53,7 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { const pxeOption = new Option('-u, --rpc-url ', 'URL of the PXE') .env('PXE_URL') - .default('http://localhost:8080') + .default(`http://${LOCALHOST}:8080`) .makeOptionMandatory(true); const createPrivateKeyOption = (description: string, mandatory: boolean) => @@ -51,8 +62,6 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .argParser(parsePrivateKey) .makeOptionMandatory(mandatory); - program.hook('preAction', initAztecJs); - program .command('deploy-l1-contracts') .description('Deploys all necessary Ethereum contracts for Aztec.') diff --git a/yarn-project/cli/src/parse_args.ts b/yarn-project/cli/src/parse_args.ts new file mode 100644 index 000000000000..e2641005cd27 --- /dev/null +++ b/yarn-project/cli/src/parse_args.ts @@ -0,0 +1,248 @@ +import { FunctionSelector } from '@aztec/aztec.js/abi'; +import { AztecAddress } from '@aztec/aztec.js/aztec_address'; +import { EthAddress } from '@aztec/aztec.js/eth_address'; +import { Fr, GrumpkinScalar, Point } from '@aztec/aztec.js/fields'; +import { LogId } from '@aztec/aztec.js/log_id'; +import { TxHash } from '@aztec/aztec.js/tx_hash'; + +import { InvalidArgumentError } from 'commander'; + +/** + * Removes the leading 0x from a hex string. If no leading 0x is found the string is returned unchanged. + * @param hex - A hex string + * @returns A new string with leading 0x removed + */ +const stripLeadingHex = (hex: string) => { + if (hex.length > 2 && hex.startsWith('0x')) { + return hex.substring(2); + } + return hex; +}; + +/** + * Parses a hex encoded string to an Fr integer to be used as salt + * @param str - Hex encoded string + * @returns A integer to be used as salt + */ +export function parseSaltFromHexString(str: string): Fr { + const hex = stripLeadingHex(str); + + // ensure it's a hex string + if (!hex.match(/^[0-9a-f]+$/i)) { + throw new InvalidArgumentError('Invalid hex string'); + } + + // pad it so that we may read it as a buffer. + // Buffer needs _exactly_ two hex characters per byte + const padded = hex.length % 2 === 1 ? '0' + hex : hex; + + // finally, turn it into an integer + return Fr.fromBuffer(Buffer.from(padded, 'hex')); +} + +/** + * Parses an AztecAddress from a string. + * @param address - A serialized Aztec address + * @returns An Aztec address + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parseAztecAddress(address: string): AztecAddress { + try { + return AztecAddress.fromString(address); + } catch { + throw new InvalidArgumentError(`Invalid address: ${address}`); + } +} + +/** + * Parses an Ethereum address from a string. + * @param address - A serialized Ethereum address + * @returns An Ethereum address + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parseEthereumAddress(address: string): EthAddress { + try { + return EthAddress.fromString(address); + } catch { + throw new InvalidArgumentError(`Invalid address: ${address}`); + } +} + +/** + * Parses an AztecAddress from a string. + * @param address - A serialized Aztec address + * @returns An Aztec address + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parseOptionalAztecAddress(address: string): AztecAddress | undefined { + if (!address) { + return undefined; + } + return parseAztecAddress(address); +} + +/** + * Parses an optional log ID string into a LogId object. + * + * @param logId - The log ID string to parse. + * @returns The parsed LogId object, or undefined if the log ID is missing or empty. + */ +export function parseOptionalLogId(logId: string): LogId | undefined { + if (!logId) { + return undefined; + } + return LogId.fromString(logId); +} + +/** + * Parses a selector from a string. + * @param selector - A serialized selector. + * @returns A selector. + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parseOptionalSelector(selector: string): FunctionSelector | undefined { + if (!selector) { + return undefined; + } + try { + return FunctionSelector.fromString(selector); + } catch { + throw new InvalidArgumentError(`Invalid selector: ${selector}`); + } +} + +/** + * Parses a string into an integer or returns undefined if the input is falsy. + * + * @param value - The string to parse into an integer. + * @returns The parsed integer, or undefined if the input string is falsy. + * @throws If the input is not a valid integer. + */ +export function parseOptionalInteger(value: string): number | undefined { + if (!value) { + return undefined; + } + const parsed = Number(value); + if (!Number.isInteger(parsed)) { + throw new InvalidArgumentError('Invalid integer.'); + } + return parsed; +} + +/** + * Parses a TxHash from a string. + * @param txHash - A transaction hash + * @returns A TxHash instance + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parseTxHash(txHash: string): TxHash { + try { + return TxHash.fromString(txHash); + } catch { + throw new InvalidArgumentError(`Invalid transaction hash: ${txHash}`); + } +} + +/** + * Parses an optional TxHash from a string. + * Calls parseTxHash internally. + * @param txHash - A transaction hash + * @returns A TxHash instance, or undefined if the input string is falsy. + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parseOptionalTxHash(txHash: string): TxHash | undefined { + if (!txHash) { + return undefined; + } + return parseTxHash(txHash); +} + +/** + * Parses a public key from a string. + * @param publicKey - A public key + * @returns A Point instance + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parsePublicKey(publicKey: string): Point { + try { + return Point.fromString(publicKey); + } catch (err) { + throw new InvalidArgumentError(`Invalid public key: ${publicKey}`); + } +} + +/** + * Parses a partial address from a string. + * @param address - A partial address + * @returns A Fr instance + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parsePartialAddress(address: string): Fr { + try { + return Fr.fromString(address); + } catch (err) { + throw new InvalidArgumentError(`Invalid partial address: ${address}`); + } +} + +/** + * Parses a private key from a string. + * @param privateKey - A string + * @returns A private key + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parsePrivateKey(privateKey: string): GrumpkinScalar { + try { + const value = GrumpkinScalar.fromString(privateKey); + // most likely a badly formatted key was passed + if (value.isZero()) { + throw new Error('Private key must not be zero'); + } + + return value; + } catch (err) { + throw new InvalidArgumentError(`Invalid private key: ${privateKey}`); + } +} + +/** + * Parses a field from a string. + * @param field - A string representing the field. + * @returns A field. + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parseField(field: string): Fr { + try { + const isHex = field.startsWith('0x') || field.match(new RegExp(`^[0-9a-f]{${Fr.SIZE_IN_BYTES * 2}}$`, 'i')); + if (isHex) { + return Fr.fromString(field); + } + + if (['true', 'false'].includes(field)) { + return new Fr(field === 'true'); + } + + const isNumber = +field || field === '0'; + if (isNumber) { + return new Fr(BigInt(field)); + } + + const isBigInt = field.endsWith('n'); + if (isBigInt) { + return new Fr(BigInt(field.replace(/n$/, ''))); + } + + return new Fr(BigInt(field)); + } catch (err) { + throw new InvalidArgumentError(`Invalid field: ${field}`); + } +} + +/** + * Parses an array of strings to Frs. + * @param fields - An array of strings representing the fields. + * @returns An array of Frs. + */ +export function parseFields(fields: string[]): Fr[] { + return fields.map(parseField); +} diff --git a/yarn-project/cli/src/test/utils.test.ts b/yarn-project/cli/src/test/utils.test.ts index e465138083b2..d0c9ff1b7ef8 100644 --- a/yarn-project/cli/src/test/utils.test.ts +++ b/yarn-project/cli/src/test/utils.test.ts @@ -5,7 +5,8 @@ import { InvalidArgumentError } from 'commander'; import { MockProxy, mock } from 'jest-mock-extended'; import { encodeArgs } from '../encoding.js'; -import { getTxSender, parseSaltFromHexString, stripLeadingHex } from '../utils.js'; +import { parseSaltFromHexString } from '../parse_args.js'; +import { getTxSender, stripLeadingHex } from '../utils.js'; import { mockContractArtifact } from './mocks.js'; describe('CLI Utils', () => { diff --git a/yarn-project/cli/src/utils.ts b/yarn-project/cli/src/utils.ts index ed8dd3cc0179..40867e8fbb09 100644 --- a/yarn-project/cli/src/utils.ts +++ b/yarn-project/cli/src/utils.ts @@ -1,11 +1,7 @@ -import { type ContractArtifact, type FunctionArtifact, FunctionSelector } from '@aztec/aztec.js/abi'; +import { type ContractArtifact, type FunctionArtifact } from '@aztec/aztec.js/abi'; import { AztecAddress } from '@aztec/aztec.js/aztec_address'; -import { EthAddress } from '@aztec/aztec.js/eth_address'; import { type L1ContractArtifactsForDeployment } from '@aztec/aztec.js/ethereum'; -import { Fr, GrumpkinScalar, Point } from '@aztec/aztec.js/fields'; import { type PXE } from '@aztec/aztec.js/interfaces/pxe'; -import { LogId } from '@aztec/aztec.js/log_id'; -import { TxHash } from '@aztec/aztec.js/tx_hash'; import { DebugLogger, LogFn } from '@aztec/foundation/log'; import { CommanderError, InvalidArgumentError } from 'commander'; @@ -182,234 +178,6 @@ export const stripLeadingHex = (hex: string) => { return hex; }; -/** - * Parses a hex encoded string to an Fr integer to be used as salt - * @param str - Hex encoded string - * @returns A integer to be used as salt - */ -export function parseSaltFromHexString(str: string): Fr { - const hex = stripLeadingHex(str); - - // ensure it's a hex string - if (!hex.match(/^[0-9a-f]+$/i)) { - throw new InvalidArgumentError('Invalid hex string'); - } - - // pad it so that we may read it as a buffer. - // Buffer needs _exactly_ two hex characters per byte - const padded = hex.length % 2 === 1 ? '0' + hex : hex; - - // finally, turn it into an integer - return Fr.fromBuffer(Buffer.from(padded, 'hex')); -} - -/** - * Parses an AztecAddress from a string. - * @param address - A serialized Aztec address - * @returns An Aztec address - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parseAztecAddress(address: string): AztecAddress { - try { - return AztecAddress.fromString(address); - } catch { - throw new InvalidArgumentError(`Invalid address: ${address}`); - } -} - -/** - * Parses an Ethereum address from a string. - * @param address - A serialized Ethereum address - * @returns An Ethereum address - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parseEthereumAddress(address: string): EthAddress { - try { - return EthAddress.fromString(address); - } catch { - throw new InvalidArgumentError(`Invalid address: ${address}`); - } -} - -/** - * Parses an AztecAddress from a string. - * @param address - A serialized Aztec address - * @returns An Aztec address - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parseOptionalAztecAddress(address: string): AztecAddress | undefined { - if (!address) { - return undefined; - } - return parseAztecAddress(address); -} - -/** - * Parses an optional log ID string into a LogId object. - * - * @param logId - The log ID string to parse. - * @returns The parsed LogId object, or undefined if the log ID is missing or empty. - */ -export function parseOptionalLogId(logId: string): LogId | undefined { - if (!logId) { - return undefined; - } - return LogId.fromString(logId); -} - -/** - * Parses a selector from a string. - * @param selector - A serialized selector. - * @returns A selector. - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parseOptionalSelector(selector: string): FunctionSelector | undefined { - if (!selector) { - return undefined; - } - try { - return FunctionSelector.fromString(selector); - } catch { - throw new InvalidArgumentError(`Invalid selector: ${selector}`); - } -} - -/** - * Parses a string into an integer or returns undefined if the input is falsy. - * - * @param value - The string to parse into an integer. - * @returns The parsed integer, or undefined if the input string is falsy. - * @throws If the input is not a valid integer. - */ -export function parseOptionalInteger(value: string): number | undefined { - if (!value) { - return undefined; - } - const parsed = Number(value); - if (!Number.isInteger(parsed)) { - throw new InvalidArgumentError('Invalid integer.'); - } - return parsed; -} - -/** - * Parses a TxHash from a string. - * @param txHash - A transaction hash - * @returns A TxHash instance - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parseTxHash(txHash: string): TxHash { - try { - return TxHash.fromString(txHash); - } catch { - throw new InvalidArgumentError(`Invalid transaction hash: ${txHash}`); - } -} - -/** - * Parses an optional TxHash from a string. - * Calls parseTxHash internally. - * @param txHash - A transaction hash - * @returns A TxHash instance, or undefined if the input string is falsy. - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parseOptionalTxHash(txHash: string): TxHash | undefined { - if (!txHash) { - return undefined; - } - return parseTxHash(txHash); -} - -/** - * Parses a public key from a string. - * @param publicKey - A public key - * @returns A Point instance - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parsePublicKey(publicKey: string): Point { - try { - return Point.fromString(publicKey); - } catch (err) { - throw new InvalidArgumentError(`Invalid public key: ${publicKey}`); - } -} - -/** - * Parses a partial address from a string. - * @param address - A partial address - * @returns A Fr instance - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parsePartialAddress(address: string): Fr { - try { - return Fr.fromString(address); - } catch (err) { - throw new InvalidArgumentError(`Invalid partial address: ${address}`); - } -} - -/** - * Parses a private key from a string. - * @param privateKey - A string - * @returns A private key - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parsePrivateKey(privateKey: string): GrumpkinScalar { - try { - const value = GrumpkinScalar.fromString(privateKey); - // most likely a badly formatted key was passed - if (value.isZero()) { - throw new Error('Private key must not be zero'); - } - - return value; - } catch (err) { - throw new InvalidArgumentError(`Invalid private key: ${privateKey}`); - } -} - -/** - * Parses a field from a string. - * @param field - A string representing the field. - * @returns A field. - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parseField(field: string): Fr { - try { - const isHex = field.startsWith('0x') || field.match(new RegExp(`^[0-9a-f]{${Fr.SIZE_IN_BYTES * 2}}$`, 'i')); - if (isHex) { - return Fr.fromString(field); - } - - if (['true', 'false'].includes(field)) { - return new Fr(field === 'true'); - } - - const isNumber = +field || field === '0'; - if (isNumber) { - return new Fr(BigInt(field)); - } - - const isBigInt = field.endsWith('n'); - if (isBigInt) { - return new Fr(BigInt(field.replace(/n$/, ''))); - } - - return new Fr(BigInt(field)); - } catch (err) { - throw new InvalidArgumentError(`Invalid field: ${field}`); - } -} - -/** - * Parses an array of strings to Frs. - * @param fields - An array of strings representing the fields. - * @returns An array of Frs. - */ -export function parseFields(fields: string[]): Fr[] { - return fields.map(parseField); -} - /** * Updates a file in place atomically. * @param filePath - Path to file diff --git a/yarn-project/deploy_npm.sh b/yarn-project/deploy_npm.sh index 04f6ab468415..f092e6fe1060 100755 --- a/yarn-project/deploy_npm.sh +++ b/yarn-project/deploy_npm.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/yarn-project/end-to-end/scripts/setup_canary.sh b/yarn-project/end-to-end/scripts/setup_canary.sh index 2d4cd7fa2ef4..14d7e176652b 100755 --- a/yarn-project/end-to-end/scripts/setup_canary.sh +++ b/yarn-project/end-to-end/scripts/setup_canary.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu TARGET_PKGS_FILE=$1 diff --git a/yarn-project/end-to-end/src/shared/browser.ts b/yarn-project/end-to-end/src/shared/browser.ts index 9c7cfdfc1545..72b064bc9030 100644 --- a/yarn-project/end-to-end/src/shared/browser.ts +++ b/yarn-project/end-to-end/src/shared/browser.ts @@ -84,9 +84,8 @@ export const browserTestSuite = (setup: () => Server, pageLogger: AztecJs.DebugL }); it('Loads Aztec.js in the browser', async () => { - const generatePublicKeyExists = await page.evaluate(async () => { - const { generatePublicKey, init } = window.AztecJs; - await init(); + const generatePublicKeyExists = await page.evaluate(() => { + const { generatePublicKey } = window.AztecJs; return typeof generatePublicKey === 'function'; }); expect(generatePublicKeyExists).toBe(true); diff --git a/yarn-project/foundation/src/abi/abi_coder.ts b/yarn-project/foundation/src/abi/abi_coder.ts index 6971b423757c..a702e65153a5 100644 --- a/yarn-project/foundation/src/abi/abi_coder.ts +++ b/yarn-project/foundation/src/abi/abi_coder.ts @@ -1,4 +1,4 @@ -import { ABIType } from '@aztec/foundation/abi'; +import { type ABIType } from './abi.js'; /** * Get the size of an ABI type in field elements. diff --git a/yarn-project/foundation/src/abi/decoder.ts b/yarn-project/foundation/src/abi/decoder.ts index 9ea69388f76f..cd37ba361e57 100644 --- a/yarn-project/foundation/src/abi/decoder.ts +++ b/yarn-project/foundation/src/abi/decoder.ts @@ -1,5 +1,5 @@ -import { ABIParameter, ABIType, ABIVariable, FunctionArtifact } from '@aztec/foundation/abi'; -import { Fr } from '@aztec/foundation/fields'; +import { Fr } from '../fields/index.js'; +import { ABIParameter, type ABIType, ABIVariable, FunctionArtifact } from './abi.js'; /** * The type of our decoded ABI. diff --git a/yarn-project/foundation/src/abi/encoder.ts b/yarn-project/foundation/src/abi/encoder.ts index 2e4b1844060d..a4db8e24230a 100644 --- a/yarn-project/foundation/src/abi/encoder.ts +++ b/yarn-project/foundation/src/abi/encoder.ts @@ -1,6 +1,6 @@ -import { ABIType, FunctionAbi, isAddressStruct } from '@aztec/foundation/abi'; - import { Fr } from '../fields/index.js'; +import { ABIType, FunctionAbi } from './abi.js'; +import { isAddressStruct } from './utils.js'; /** * Encodes arguments for a function call. diff --git a/yarn-project/foundation/src/abi/function_selector.ts b/yarn-project/foundation/src/abi/function_selector.ts index df53ef9a1fb8..de0b879cf6b9 100644 --- a/yarn-project/foundation/src/abi/function_selector.ts +++ b/yarn-project/foundation/src/abi/function_selector.ts @@ -1,9 +1,11 @@ -import { ABIParameter, decodeFunctionSignature } from '@aztec/foundation/abi'; import { toBigIntBE, toBufferBE } from '@aztec/foundation/bigint-buffer'; -import { keccak } from '@aztec/foundation/crypto'; -import { Fr } from '@aztec/foundation/fields'; import { BufferReader } from '@aztec/foundation/serialize'; +import { keccak } from '../crypto/keccak/index.js'; +import { Fr } from '../fields/index.js'; +import { ABIParameter } from './abi.js'; +import { decodeFunctionSignature } from './decoder.js'; + /** * A function selector is the first 4 bytes of the hash of a function signature. */ diff --git a/yarn-project/foundation/src/abi/utils.ts b/yarn-project/foundation/src/abi/utils.ts index b2ee62d2dd54..d7d15a4d94ac 100644 --- a/yarn-project/foundation/src/abi/utils.ts +++ b/yarn-project/foundation/src/abi/utils.ts @@ -1,4 +1,4 @@ -import { ABIType } from './abi.js'; +import { type ABIType } from './abi.js'; /** * Returns whether the ABI type is an Aztec or Ethereum Address defined in Aztec.nr. diff --git a/yarn-project/foundation/src/eth-address/index.ts b/yarn-project/foundation/src/eth-address/index.ts index 2571f09790c2..76587ecab263 100644 --- a/yarn-project/foundation/src/eth-address/index.ts +++ b/yarn-project/foundation/src/eth-address/index.ts @@ -1,4 +1,5 @@ -import { keccak256String, randomBytes } from '../crypto/index.js'; +import { keccak256String } from '../crypto/keccak/index.js'; +import { randomBytes } from '../crypto/random/index.js'; import { Fr } from '../fields/index.js'; import { BufferReader } from '../serialize/index.js'; diff --git a/yarn-project/foundation/src/fields/fields.ts b/yarn-project/foundation/src/fields/fields.ts index 591b57f6892d..90600643cfe5 100644 --- a/yarn-project/foundation/src/fields/fields.ts +++ b/yarn-project/foundation/src/fields/fields.ts @@ -1,5 +1,5 @@ import { toBigIntBE, toBufferBE } from '../bigint-buffer/index.js'; -import { randomBytes } from '../crypto/index.js'; +import { randomBytes } from '../crypto/random/index.js'; import { BufferReader } from '../serialize/buffer_reader.js'; const ZERO_BUFFER = Buffer.alloc(32); diff --git a/yarn-project/l1-artifacts/scripts/generate-artifacts.sh b/yarn-project/l1-artifacts/scripts/generate-artifacts.sh index 1c4bd5e2ba9d..37a07c35e17f 100755 --- a/yarn-project/l1-artifacts/scripts/generate-artifacts.sh +++ b/yarn-project/l1-artifacts/scripts/generate-artifacts.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail; target_dir=./generated diff --git a/yarn-project/noir-compiler/src/cli/add_noir_compiler_commander_actions.ts b/yarn-project/noir-compiler/src/cli/add_noir_compiler_commander_actions.ts index f651eecd9960..7dff5ecb7b1f 100644 --- a/yarn-project/noir-compiler/src/cli/add_noir_compiler_commander_actions.ts +++ b/yarn-project/noir-compiler/src/cli/add_noir_compiler_commander_actions.ts @@ -26,7 +26,7 @@ export function addNoirCompilerCommanderActions(program: Command, log: LogFn = ( .option('-o, --outdir ', 'Output folder for the binary artifacts, relative to the project path', 'target') .option('-ts, --typescript ', 'Optional output folder for generating typescript wrappers', undefined) .option('-i, --interface ', 'Optional output folder for generating an Aztec.nr contract interface', undefined) - .option('-c --compiler ', 'Which compiler to use. Either nargo or wasm. Defaults to nargo', 'wasm') + .option('-c --compiler ', 'Which compiler to use. Either nargo or wasm.', 'wasm') .description('Compiles the Noir Source in the target project') .action(async (projectPath: string, options: Options) => { diff --git a/yarn-project/noir-contracts/scripts/catch.sh b/yarn-project/noir-contracts/scripts/catch.sh index 8a0a894b93ff..87b485eb3f30 100644 --- a/yarn-project/noir-contracts/scripts/catch.sh +++ b/yarn-project/noir-contracts/scripts/catch.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Handler for SIGCHLD, cleanup if child exit with error, used by nargo_test.sh and compile.sh handle_sigchld() { diff --git a/yarn-project/noir-contracts/scripts/compile.sh b/yarn-project/noir-contracts/scripts/compile.sh index bc217a6156b9..055485220e05 100755 --- a/yarn-project/noir-contracts/scripts/compile.sh +++ b/yarn-project/noir-contracts/scripts/compile.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail; diff --git a/yarn-project/noir-contracts/scripts/compile_all.sh b/yarn-project/noir-contracts/scripts/compile_all.sh index 32de820a630c..7445dbbc854a 100755 --- a/yarn-project/noir-contracts/scripts/compile_all.sh +++ b/yarn-project/noir-contracts/scripts/compile_all.sh @@ -1,3 +1,3 @@ -#!/bin/bash +#!/usr/bin/env bash echo "Compiling all contracts" ./scripts/compile.sh $(./scripts/get_all_contracts.sh) diff --git a/yarn-project/noir-contracts/scripts/get_all_contracts.sh b/yarn-project/noir-contracts/scripts/get_all_contracts.sh index caaf81c1a136..1311a4862a41 100755 --- a/yarn-project/noir-contracts/scripts/get_all_contracts.sh +++ b/yarn-project/noir-contracts/scripts/get_all_contracts.sh @@ -1,3 +1,3 @@ -#!/bin/bash +#!/usr/bin/env bash # Utility to get the names of all contracts echo $(ls -d src/contracts/*_contract/Nargo.toml | sed -r "s/src\\/contracts\\/(.+)_contract\\/Nargo.toml/\\1/") \ No newline at end of file diff --git a/yarn-project/noir-contracts/scripts/get_all_libraries.sh b/yarn-project/noir-contracts/scripts/get_all_libraries.sh index f1913a46cafd..8fbe7bb1b19e 100755 --- a/yarn-project/noir-contracts/scripts/get_all_libraries.sh +++ b/yarn-project/noir-contracts/scripts/get_all_libraries.sh @@ -1,3 +1,3 @@ -#!/bin/bash +#!/usr/bin/env bash # Utility to get the names of all noir libraries located in ../aztec-nr echo $(ls -d ../aztec-nr/*/Nargo.toml | sed -r "s/..\\/aztec-nr\\/(.+)\\/Nargo.toml/\\1/") \ No newline at end of file diff --git a/yarn-project/noir-contracts/scripts/install_noir.sh b/yarn-project/noir-contracts/scripts/install_noir.sh index 325e9a56620d..b1105d6ad469 100755 --- a/yarn-project/noir-contracts/scripts/install_noir.sh +++ b/yarn-project/noir-contracts/scripts/install_noir.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Script to install noirup and the latest aztec nargo set -eu diff --git a/yarn-project/noir-contracts/scripts/install_noirup.sh b/yarn-project/noir-contracts/scripts/install_noirup.sh index 11ba9b15d312..025237333cdb 100755 --- a/yarn-project/noir-contracts/scripts/install_noirup.sh +++ b/yarn-project/noir-contracts/scripts/install_noirup.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Script to install noirup and the latest nargo set -eu diff --git a/yarn-project/noir-contracts/scripts/nargo_check.sh b/yarn-project/noir-contracts/scripts/nargo_check.sh index 10d9d14c2c1b..45209d4ee430 100644 --- a/yarn-project/noir-contracts/scripts/nargo_check.sh +++ b/yarn-project/noir-contracts/scripts/nargo_check.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Check nargo version matches the expected one nargo_check() { diff --git a/yarn-project/noir-contracts/scripts/nargo_test.sh b/yarn-project/noir-contracts/scripts/nargo_test.sh index 8468c19f4928..0666714388df 100755 --- a/yarn-project/noir-contracts/scripts/nargo_test.sh +++ b/yarn-project/noir-contracts/scripts/nargo_test.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Tests noir contracts, if multiple are provided, then they are testing in parallel, bubbling any testing errors # diff --git a/yarn-project/noir-contracts/scripts/nargo_test_ci.sh b/yarn-project/noir-contracts/scripts/nargo_test_ci.sh index 5a4458d4da50..d835f9723247 100755 --- a/yarn-project/noir-contracts/scripts/nargo_test_ci.sh +++ b/yarn-project/noir-contracts/scripts/nargo_test_ci.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Runs tests scripts for all contracts, then for all libraries. ./scripts/nargo_test.sh CONTRACT $(./scripts/get_all_contracts.sh) diff --git a/yarn-project/noir-contracts/scripts/types.sh b/yarn-project/noir-contracts/scripts/types.sh index ebbc24bdc5fc..3e2410e4004a 100755 --- a/yarn-project/noir-contracts/scripts/types.sh +++ b/yarn-project/noir-contracts/scripts/types.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Example: # - this script will automatically be run when running `yarn noir:build` diff --git a/yarn-project/noir-contracts/scripts/types_all.sh b/yarn-project/noir-contracts/scripts/types_all.sh index 5cbb3ce9d918..38081dc6efe6 100755 --- a/yarn-project/noir-contracts/scripts/types_all.sh +++ b/yarn-project/noir-contracts/scripts/types_all.sh @@ -1,3 +1,3 @@ -#!/bin/bash +#!/usr/bin/env bash # Run the types script for all files ./scripts/types.sh $(./scripts/get_all_contracts.sh) diff --git a/yarn-project/noir-contracts/src/scripts/compile.sh b/yarn-project/noir-contracts/src/scripts/compile.sh index 551f7a748694..adedfaeba4f0 100755 --- a/yarn-project/noir-contracts/src/scripts/compile.sh +++ b/yarn-project/noir-contracts/src/scripts/compile.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Example: diff --git a/yarn-project/p2p-bootstrap/terraform/servicediscovery-drain.sh b/yarn-project/p2p-bootstrap/terraform/servicediscovery-drain.sh index 1fa02e92d911..b8d6c3015196 100755 --- a/yarn-project/p2p-bootstrap/terraform/servicediscovery-drain.sh +++ b/yarn-project/p2p-bootstrap/terraform/servicediscovery-drain.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ $# -ne 1 ] && echo "Usage: $0 " && exit 1 diff --git a/yarn-project/scripts/run_script.sh b/yarn-project/scripts/run_script.sh index a3347411e85e..a06cf4eb42e1 100755 --- a/yarn-project/scripts/run_script.sh +++ b/yarn-project/scripts/run_script.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Downloads the image that contains the built scripts package and executes the given command in it. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/yarn-project/scripts/version_packages.sh b/yarn-project/scripts/version_packages.sh index a708cb7a4ed0..bf631bd8ac16 100755 --- a/yarn-project/scripts/version_packages.sh +++ b/yarn-project/scripts/version_packages.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu if [ -n "$COMMIT_TAG" ]; then diff --git a/yarn-project/types/src/tx/tx_hash.ts b/yarn-project/types/src/tx/tx_hash.ts index 138aa6dbe0fd..00003456ee38 100644 --- a/yarn-project/types/src/tx/tx_hash.ts +++ b/yarn-project/types/src/tx/tx_hash.ts @@ -1,4 +1,3 @@ -import { assertMemberLength } from '@aztec/circuits.js'; import { deserializeBigInt, serializeBigInt } from '@aztec/foundation/serialize'; /** @@ -21,7 +20,9 @@ export class TxHash { */ public buffer: Buffer, ) { - assertMemberLength(this, 'buffer', TxHash.SIZE); + if (buffer.length !== TxHash.SIZE) { + throw new Error(`Expected buffer to have length ${TxHash.SIZE} but was ${buffer.length}`); + } } /** diff --git a/yarn-project/yarn-project-base/Dockerfile b/yarn-project/yarn-project-base/Dockerfile index ee9c4f4f6b7f..96be5e824c3b 100644 --- a/yarn-project/yarn-project-base/Dockerfile +++ b/yarn-project/yarn-project-base/Dockerfile @@ -47,8 +47,8 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/bb.js as bb.js FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/noir as noir FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/noir-packages as noir-packages -FROM node:18.19.0-alpine -RUN apk update && apk add --no-cache bash jq curl +FROM node:18.19.0 +RUN apt update && apt install -y jq curl perl && rm -rf /var/lib/apt/lists/* && apt-get clean # Copy L1 contracts. COPY --from=contracts /usr/src/l1-contracts /usr/src/l1-contracts diff --git a/yellow-paper/docs/public-vm/gen/_InstructionSet.mdx b/yellow-paper/docs/public-vm/gen/_InstructionSet.mdx index 4f13ec9bd982..2c1cc93dd04a 100644 --- a/yellow-paper/docs/public-vm/gen/_InstructionSet.mdx +++ b/yellow-paper/docs/public-vm/gen/_InstructionSet.mdx @@ -710,7 +710,7 @@ Type cast - **aOffset**: memory offset of word to cast - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = cast(M[aOffset])` -- **Details**: Cast a word in memory based on the `dst-tag` specified in the bytecode. Truncates when casting to a smaller type, left-zero-pads when casting to a larger type. See [here](./state-model#cast-and-tag-conversions) for more details. +- **Details**: Cast a word in memory based on the `dst-tag` specified in the bytecode. Truncates (`M[dstOffset] = M[aOffset] mod 2^dstsize`) when casting to a smaller type, left-zero-pads when casting to a larger type. See [here](./state-model#cast-and-tag-conversions) for more details. - **Tag updates**: `T[dstOffset] = dst-tag` - **Bit-size**: 96 diff --git a/yellow-paper/src/preprocess/InstructionSet/InstructionSet.js b/yellow-paper/src/preprocess/InstructionSet/InstructionSet.js index 844a89c369bd..19acb3759edc 100644 --- a/yellow-paper/src/preprocess/InstructionSet/InstructionSet.js +++ b/yellow-paper/src/preprocess/InstructionSet/InstructionSet.js @@ -279,7 +279,7 @@ const INSTRUCTION_SET_RAW = [ ], "Expression": "`M[dstOffset] = cast(M[aOffset])`", "Summary": "Type cast", - "Details": "Cast a word in memory based on the `dst-tag` specified in the bytecode. Truncates when casting to a smaller type, left-zero-pads when casting to a larger type. See [here](./state-model#cast-and-tag-conversions) for more details.", + "Details": "Cast a word in memory based on the `dst-tag` specified in the bytecode. Truncates (`M[dstOffset] = M[aOffset] mod 2^dstsize`) when casting to a smaller type, left-zero-pads when casting to a larger type. See [here](./state-model#cast-and-tag-conversions) for more details.", "Tag checks": "", "Tag updates": "`T[dstOffset] = dst-tag`", },