Skip to content

Commit

Permalink
Merge branch 'master' into ir/11-28-feat_enqueued_call_side_effect_trace
Browse files Browse the repository at this point in the history
  • Loading branch information
dbanks12 committed Dec 4, 2024
2 parents 30dda53 + c2c1744 commit 17d4962
Show file tree
Hide file tree
Showing 255 changed files with 6,340 additions and 6,626 deletions.
1 change: 1 addition & 0 deletions .github/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
.secrets
23 changes: 18 additions & 5 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -444,7 +444,7 @@ jobs:
timeout-minutes: 40
run: earthly-ci --no-output ./+barretenberg-acir-tests-bb-ultra-honk

bb-acir-tests-bb-mega-honk:
bb-acir-tests-bb-client-ivc:
needs: [noir-build-acir-tests, build, configure]
runs-on: ${{ needs.configure.outputs.username }}-x86
if: needs.configure.outputs.barretenberg == 'true' || needs.configure.outputs.noir == 'true'
Expand All @@ -453,11 +453,11 @@ jobs:
with: { ref: "${{ env.GIT_COMMIT }}" }
- uses: ./.github/ci-setup-action
with:
concurrency_key: barretenberg-acir-tests-bb-mega-honk-x86
- name: "BB Native Acir Tests (Megahonk)"
concurrency_key: barretenberg-acir-tests-bb-client-ivc-x86
- name: "BB Native Acir Tests (ClientIVC)"
working-directory: ./barretenberg/
timeout-minutes: 40
run: earthly-ci --no-output ./+barretenberg-acir-tests-bb-mega-honk
run: earthly-ci --no-output ./+barretenberg-acir-tests-bb-client-ivc

bb-acir-tests-sol:
needs: [noir-build-acir-tests, build, configure]
Expand Down Expand Up @@ -875,6 +875,19 @@ jobs:
timeout-minutes: 40
run: earthly-ci -P --no-output +test --box=${{ matrix.box }} --browser=${{ matrix.browser }} --mode=cache

rough-rhino-installer:
needs: [configure]
runs-on: ${{ needs.configure.outputs.username }}-x86
steps:
- uses: actions/checkout@v4
with: { ref: "${{ github.event.pull_request.head.sha }}" }
- uses: ./.github/ci-setup-action
with:
concurrency_key: rough-rhino-installer
- name: Rough Rhino Installer Helper Script
working-directory: ./spartan/releases/rough-rhino
run: earthly-ci +test-all

protocol-circuits-gates-report:
needs: [build, configure]
if: needs.configure.outputs.non-docs == 'true' && needs.configure.outputs.non-barretenberg-cpp == 'true'
Expand Down Expand Up @@ -969,7 +982,7 @@ jobs:
- bb-acir-tests-bb
- bb-acir-tests-bb-ultra-plonk
- bb-acir-tests-bb-ultra-honk
- bb-acir-tests-bb-mega-honk
- bb-acir-tests-bb-client-ivc
- bb-acir-tests-sol
- bb-acir-tests-sol-honk
- bb-acir-tests-bb-js
Expand Down
94 changes: 63 additions & 31 deletions .github/workflows/network-deploy.yml
Original file line number Diff line number Diff line change
@@ -1,18 +1,17 @@
name: Aztec Network EKS Deployment

# Manual trigerring of this workflow is intentionally disabled
# Helm deployments do not support lock files
# Without a lockfile, manual trigerring can lead to corrupted or partial deployments
name: Aztec Network Deployment

on:
push:
branches:
- staging
- production
pull_request:
branches:
- staging
- production
workflow_dispatch:
inputs:
namespace:
description: The namespace to deploy to, e.g. smoke
required: true
values_file:
description: The values file to use, e.g. 1-validators.yaml
required: true
aztec_docker_image:
description: The Aztec Docker image to use, e.g. aztecprotocol/aztec:da809c58290f9590836f45ec59376cbf04d3c4ce-x86_64
required: true

jobs:
network_deployment:
Expand All @@ -24,34 +23,67 @@ jobs:

# Set up a variable based on the branch name
env:
NAMESPACE: ${{ github.ref == 'refs/heads/production' && 'production' || 'staging' }}
AZTEC_DOCKER_IMAGE: ${{ inputs.aztec_docker_image }}
NAMESPACE: ${{ inputs.namespace }}
VALUES_FILE: ${{ inputs.values_file }}
CHART_PATH: ./spartan/aztec-network
CLUSTER_NAME: aztec-gke
REGION: us-west1-a
TF_STATE_BUCKET: aztec-terraform
GKE_CLUSTER_CONTEXT: gke_testnet-440309_us-west1-a_aztec-gke

steps:
# Step 1: Check out the repository's code
- name: Checkout code
uses: actions/checkout@v3

# Step 2: Configure AWS credentials using GitHub Secrets
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v2
- name: Authenticate to Google Cloud
uses: google-github-actions/auth@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
credentials_json: ${{ secrets.GCP_SA_KEY }}

- name: Set up Cloud SDK
uses: google-github-actions/setup-gcloud@v2

# Step 3: Set up Kubernetes context for AWS EKS
- name: Configure kubectl with EKS cluster
- name: Install GKE Auth Plugin
run: |
aws eks update-kubeconfig --region us-east-1 --name spartan
gcloud components install gke-gcloud-auth-plugin --quiet
# Step 4: Install Helm
- name: Install Helm
- name: Configure kubectl with GKE cluster
run: |
curl https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 | bash
gcloud container clusters get-credentials ${{ env.CLUSTER_NAME }} --region ${{ env.REGION }}
# Step 5: Apply Helm Chart
- name: Deploy Helm chart
- name: Ensure Terraform state bucket exists
run: |
helm dependency update ${{ env.CHART_PATH }}
helm upgrade --install ${{ env.NAMESPACE }} ${{ env.CHART_PATH }} --namespace ${{ env.NAMESPACE }} --set network.public=true --atomic --create-namespace --timeout 20m
if ! gsutil ls gs://${{ env.TF_STATE_BUCKET }} >/dev/null 2>&1; then
echo "Creating GCS bucket for Terraform state..."
gsutil mb -l us-east4 gs://${{ env.TF_STATE_BUCKET }}
gsutil versioning set on gs://${{ env.TF_STATE_BUCKET }}
else
echo "Terraform state bucket already exists"
fi
- name: Setup Terraform
uses: hashicorp/setup-terraform@v2
with:
terraform_version: "1.5.0" # Specify your desired version

- name: Terraform Init
working-directory: ./spartan/terraform/deploy-release
run: |
terraform init \
-backend-config="bucket=${{ env.TF_STATE_BUCKET }}" \
-backend-config="prefix=network-deploy/${{ env.REGION }}/${{ env.CLUSTER_NAME }}/${{ env.NAMESPACE }}/terraform.tfstate" \
- name: Terraform Plan
working-directory: ./spartan/terraform/deploy-release
run: |
terraform plan \
-var="release_name=${{ env.NAMESPACE }}" \
-var="values_file=${{ env.VALUES_FILE }}" \
-var="gke_cluster_context=${{ env.GKE_CLUSTER_CONTEXT }}" \
-var="aztec_docker_image=${{ env.AZTEC_DOCKER_IMAGE }}" \
-out=tfplan
- name: Terraform Apply
working-directory: ./spartan/terraform/deploy-release
run: terraform apply -auto-approve tfplan
86 changes: 86 additions & 0 deletions .github/workflows/network-test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
name: Aztec Network Test

on:
workflow_dispatch:
inputs:
namespace:
description: The namespace to deploy to, e.g. smoke
required: true
test:
description: The test to run, e.g. spartan/smoke.test.ts
required: true
aztec_e2e_docker_image:
description: The Aztec E2E Docker image to use, e.g. aztecprotocol/end-to-end:da809c58290f9590836f45ec59376cbf04d3c4ce-x86_64
required: true

jobs:
network_test:
runs-on: ubuntu-latest

env:
TEST_DOCKER_IMAGE: ${{ inputs.aztec_e2e_docker_image }}
NAMESPACE: ${{ inputs.namespace }}
TEST: ${{ inputs.test }}
CHART_PATH: ./spartan/aztec-network
CLUSTER_NAME: aztec-gke
REGION: us-west1-a
PROJECT_ID: testnet-440309
GKE_CLUSTER_CONTEXT: gke_testnet-440309_us-west1-a_aztec-gke

steps:
- name: Checkout code
uses: actions/checkout@v3

- name: Authenticate to Google Cloud
uses: google-github-actions/auth@v2
with:
credentials_json: ${{ secrets.GCP_SA_KEY }}

- name: Set up Cloud SDK
uses: google-github-actions/setup-gcloud@v2
with:
install_components: gke-gcloud-auth-plugin

- name: Configure kubectl with GKE cluster
run: |
gcloud container clusters get-credentials ${{ env.CLUSTER_NAME }} --region ${{ env.REGION }}
- name: Run test
run: |
# Find 3 free ports between 9000 and 10000
FREE_PORTS=$(comm -23 <(seq 9000 10000 | sort) <(ss -Htan | awk '{print $4}' | cut -d':' -f2 | sort -u) | shuf | head -n 3)
# Extract the free ports from the list
PXE_PORT=$(echo $FREE_PORTS | awk '{print $1}')
ANVIL_PORT=$(echo $FREE_PORTS | awk '{print $2}')
METRICS_PORT=$(echo $FREE_PORTS | awk '{print $3}')
export GRAFANA_PASSWORD=$(kubectl get secrets -n metrics metrics-grafana -o jsonpath='{.data.admin-password}' | base64 --decode)
gcloud config set project ${{ env.PROJECT_ID }}
GCLOUD_CONFIG_DIR=$(gcloud info --format='value(config. paths. global_config_dir)')
echo "gcloud config dir: [$GCLOUD_CONFIG_DIR]"
docker run --rm --network=host \
-v ~/.kube:/root/.kube \
-v $GCLOUD_CONFIG_DIR:/root/.config/gcloud \
-e K8S=gcloud \
-e CLUSTER_NAME=${{ env.CLUSTER_NAME }} \
-e REGION=${{ env.REGION }} \
-e INSTANCE_NAME=${{ env.NAMESPACE }} \
-e SPARTAN_DIR="/usr/src/spartan" \
-e NAMESPACE=${{ env.NAMESPACE }} \
-e HOST_PXE_PORT=$PXE_PORT \
-e CONTAINER_PXE_PORT=8081 \
-e HOST_ETHEREUM_PORT=$ANVIL_PORT \
-e CONTAINER_ETHEREUM_PORT=8545 \
-e HOST_METRICS_PORT=$METRICS_PORT \
-e CONTAINER_METRICS_PORT=80 \
-e GRAFANA_PASSWORD=$GRAFANA_PASSWORD \
-e DEBUG="aztec:*" \
-e LOG_JSON=1 \
-e LOG_LEVEL=debug \
${{ env.TEST_DOCKER_IMAGE }} ${{ env.TEST }}
16 changes: 14 additions & 2 deletions .github/workflows/publish-aztec-packages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -100,13 +100,13 @@ jobs:
with:
concurrency_key: build-aztec
dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}"
- name: Build & Push Aztec x86_64
- name: Build & Push Aztec and End-to-End x86_64
timeout-minutes: 40
run: |
earthly-ci \
--no-output \
--push \
./yarn-project+export-aztec-arch \
./yarn-project+export-images-arch \
--DIST_TAG=${{ env.GIT_COMMIT }} \
--ARCH=x86_64
Expand Down Expand Up @@ -312,6 +312,18 @@ jobs:
--VERSION=$VERSION \
--DRY_RUN=${{ (github.event.inputs.publish == 'false') && '1' || '0' }}
- name: Publish spartan NPM package
run: |
DEPLOY_TAG=${{ env.DEPLOY_TAG }}
VERSION=${DEPLOY_TAG#aztec-packages-v}
earthly-ci \
--no-output \
--secret NPM_TOKEN=${{ env.NPM_TOKEN }} \
./spartan/releases/rough-rhino+publish-npm \
--DIST_TAG=latest \
--VERSION=$VERSION \
--DRY_RUN=${{ (github.event.inputs.publish == 'false') && '1' || '0' }}
publish-aztec-up:
needs: [configure, publish-manifests]
runs-on: ubuntu-latest
Expand Down
12 changes: 0 additions & 12 deletions aztec-up/terraform/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -108,18 +108,6 @@ resource "null_resource" "upload_public_directory" {
}
}

# resource "aws_route53_record" "subdomain_record" {
# zone_id = data.terraform_remote_state.aztec2_iac.outputs.aws_route53_zone_id
# name = "install.aztec.network"
# type = "A"

# alias {
# name = aws_s3_bucket_website_configuration.website_bucket.website_domain
# zone_id = aws_s3_bucket.install_bucket.hosted_zone_id
# evaluate_target_health = true
# }
# }

resource "aws_cloudfront_distribution" "install" {
origin {
domain_name = aws_s3_bucket.install_bucket.website_endpoint
Expand Down
4 changes: 2 additions & 2 deletions barretenberg/.gitrepo
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
[subrepo]
remote = https://github.com/AztecProtocol/barretenberg
branch = master
commit = 0fe0a5d6dff43547aaec45256440982184e93bb0
parent = da1470d074f4884e61b51e450a661432c6f0a10f
commit = 3195a1b30b3bcfd635f9b4a899c49cb517283685
parent = 94e6e1a954911b81e6af85edff55c64f13595b20
method = merge
cmdver = 0.4.6
16 changes: 7 additions & 9 deletions barretenberg/Earthfile
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@ barretenberg-acir-tests-bb:
ENV VERBOSE=1

# Fold and verify an ACIR program stack using ClientIvc
RUN FLOW=fold_and_verify_program ./run_acir_tests.sh fold_basic
RUN INPUT_TYPE=compiletime_stack FLOW=prove_and_verify_client_ivc ./run_acir_tests.sh fold_basic
# Fold and verify an ACIR program stack using ClientIvc, then natively verify the ClientIVC proof.
RUN FLOW=prove_then_verify_client_ivc ./run_acir_tests.sh fold_basic
RUN INPUT_TYPE=compiletime_stack FLOW=prove_then_verify_client_ivc ./run_acir_tests.sh fold_basic
# Fold and verify an ACIR program stack using ClientIvc, recursively verify as part of the Tube circuit and produce and verify a Honk proof
RUN FLOW=prove_then_verify_tube ./run_acir_tests.sh fold_basic
# Run 1_mul through native bb build, all_cmds flow, to test all cli args.
Expand Down Expand Up @@ -79,7 +79,7 @@ barretenberg-acir-tests-bb-ultra-honk:
# Construct and verify a UltraHonk proof for a single program that recursively verifies a Honk proof
RUN FLOW=prove_and_verify_ultra_honk ./run_acir_tests.sh verify_honk_proof

barretenberg-acir-tests-bb-mega-honk:
barretenberg-acir-tests-bb-client-ivc:
FROM ../build-images/+from-registry

COPY ./cpp/+preset-clang-assert/bin/bb /usr/src/barretenberg/cpp/build/bin/bb
Expand All @@ -92,12 +92,10 @@ barretenberg-acir-tests-bb-mega-honk:
ENV TEST_SRC /usr/src/acir_artifacts
ENV VERBOSE=1

# Construct and separately verify a MegaHonk proof for all acir programs
RUN FLOW=prove_then_verify_mega_honk ./run_acir_tests.sh
# Construct and verify a MegaHonk proof for a single arbitrary program
RUN FLOW=prove_and_verify_mega_honk ./run_acir_tests.sh 6_array
# Construct and verify a MegaHonk proof for all ACIR programs using the new witness stack workflow
RUN FLOW=prove_and_verify_mega_honk_program ./run_acir_tests.sh
# Construct and verify a ClientIVC proof for a single arbitrary program
RUN FLOW=prove_and_verify_client_ivc ./run_acir_tests.sh 6_array
# Construct and separately verify a ClientIVC proof for all acir programs
RUN FLOW=prove_then_verify_client_ivc CLIENT_IVC_SKIPS=true ./run_acir_tests.sh

barretenberg-acir-tests-sol:
FROM ../build-images/+from-registry
Expand Down
2 changes: 2 additions & 0 deletions barretenberg/acir_tests/flows/fold_and_verify_program.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
#!/bin/sh
set -eu

# this flow is deprecated. currently it is bb.js only. for bb is is replaced by:
# prove_and_verify --scheme client_ivc --input-type compiletime_stack
VFLAG=${VERBOSE:+-v}

$BIN fold_and_verify_program $VFLAG -c $CRS_PATH -b ./target/program.json
9 changes: 9 additions & 0 deletions barretenberg/acir_tests/flows/prove_and_verify_client_ivc.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#!/bin/sh
set -eu

VFLAG=${VERBOSE:+-v}
INFLAG=${INPUT_TYPE=compiletime_stack}

FLAGS="$CRS_PATH -b ./target/program.json $VFLAG --scheme client_ivc -c --input_type $INFLAG"

$BIN prove_and_verify $FLAGS
5 changes: 5 additions & 0 deletions barretenberg/acir_tests/flows/prove_and_verify_mega_honk.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,9 @@ set -eu

VFLAG=${VERBOSE:+-v}

# this flow is deprecated. currently it is bb.js only. for bb is is replaced by:
# prove_and_verify --scheme client_ivc --input-type compiletime_stack
# NB: In general, it is not meaningful to produce a MegaHonk proof an its own since
# the MegaHonk proof does not attest to the correctness of every possible kind
# of gate that could appear in a Mega execution trace.
$BIN prove_and_verify_mega_honk $VFLAG -c $CRS_PATH -b ./target/program.json
Loading

0 comments on commit 17d4962

Please sign in to comment.